diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml index f68a7719..ca1872fc 100644 --- a/.github/workflows/python-publish.yml +++ b/.github/workflows/python-publish.yml @@ -25,7 +25,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v2 with: - python-version: '3.9.x' + python-version: '3.10.x' - name: Install dependencies run: | python -m pip install poetry diff --git a/.github/workflows/python-tests.yml b/.github/workflows/python-tests.yml index 949333dd..8b428a5d 100644 --- a/.github/workflows/python-tests.yml +++ b/.github/workflows/python-tests.yml @@ -15,7 +15,6 @@ jobs: strategy: matrix: python-version: - - '3.9.x' - '3.10.x' - '3.11.x' - '3.12.x' diff --git a/.github/workflows/type-checkers.yml b/.github/workflows/type-checkers.yml index 63a6ab91..3ca51ccf 100644 --- a/.github/workflows/type-checkers.yml +++ b/.github/workflows/type-checkers.yml @@ -8,7 +8,7 @@ jobs: strategy: fail-fast: true matrix: - python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + python-version: ["3.10", "3.11", "3.12", "3.13"] os: [ubuntu-latest] name: Python ${{ matrix.python-version }} test diff --git a/fastembed/common/model_description.py b/fastembed/common/model_description.py index f1d7ac45..caa17710 100644 --- a/fastembed/common/model_description.py +++ b/fastembed/common/model_description.py @@ -1,12 +1,12 @@ from dataclasses import dataclass, field from enum import Enum -from typing import Optional, Any +from typing import Any @dataclass(frozen=True) class ModelSource: - hf: Optional[str] = None - url: Optional[str] = None + hf: str | None = None + url: str | None = None _deprecated_tar_struct: bool = False @property @@ -33,8 +33,8 @@ class BaseModelDescription: @dataclass(frozen=True) class DenseModelDescription(BaseModelDescription): - dim: Optional[int] = None - tasks: Optional[dict[str, Any]] = field(default_factory=dict) + dim: int | None = None + tasks: dict[str, Any] | None = field(default_factory=dict) def __post_init__(self) -> None: assert self.dim is not None, "dim is required for dense model description" @@ -42,8 +42,8 @@ def __post_init__(self) -> None: @dataclass(frozen=True) class SparseModelDescription(BaseModelDescription): - requires_idf: Optional[bool] = None - vocab_size: Optional[int] = None + requires_idf: bool | None = None + vocab_size: int | None = None class PoolingType(str, Enum): diff --git a/fastembed/common/model_management.py b/fastembed/common/model_management.py index 34714dd2..5301def8 100644 --- a/fastembed/common/model_management.py +++ b/fastembed/common/model_management.py @@ -5,7 +5,7 @@ import tarfile from copy import deepcopy from pathlib import Path -from typing import Any, Optional, Union, TypeVar, Generic +from typing import Any, TypeVar, Generic import requests from huggingface_hub import snapshot_download, model_info, list_repo_tree @@ -180,8 +180,8 @@ def _verify_files_from_metadata( def _collect_file_metadata( model_dir: Path, repo_files: list[RepoFile] - ) -> dict[str, dict[str, Union[int, str]]]: - meta: dict[str, dict[str, Union[int, str]]] = {} + ) -> dict[str, dict[str, int | str]]: + meta: dict[str, dict[str, int | str]] = {} file_info_map = {f.path: f for f in repo_files} for file_path in model_dir.rglob("*"): if file_path.is_file() and file_path.name != cls.METADATA_FILE: @@ -193,9 +193,7 @@ def _collect_file_metadata( } return meta - def _save_file_metadata( - model_dir: Path, meta: dict[str, dict[str, Union[int, str]]] - ) -> None: + def _save_file_metadata(model_dir: Path, meta: dict[str, dict[str, int | str]]) -> None: try: if not model_dir.exists(): model_dir.mkdir(parents=True, exist_ok=True) @@ -397,7 +395,7 @@ def download_model(cls, model: T, cache_dir: str, retries: int = 3, **kwargs: An Path: The path to the downloaded model directory. """ local_files_only = kwargs.get("local_files_only", False) - specific_model_path: Optional[str] = kwargs.pop("specific_model_path", None) + specific_model_path: str | None = kwargs.pop("specific_model_path", None) if specific_model_path: return Path(specific_model_path) retries = 1 if local_files_only else retries diff --git a/fastembed/common/onnx_model.py b/fastembed/common/onnx_model.py index 1b589e18..490ed875 100644 --- a/fastembed/common/onnx_model.py +++ b/fastembed/common/onnx_model.py @@ -1,7 +1,7 @@ import warnings from dataclasses import dataclass from pathlib import Path -from typing import Any, Generic, Iterable, Optional, Sequence, Type, TypeVar +from typing import Any, Generic, Iterable, Sequence, Type, TypeVar import numpy as np import onnxruntime as ort @@ -19,8 +19,8 @@ @dataclass class OnnxOutputContext: model_output: NumpyArray - attention_mask: Optional[NDArray[np.int64]] = None - input_ids: Optional[NDArray[np.int64]] = None + attention_mask: NDArray[np.int64] | None = None + input_ids: NDArray[np.int64] | None = None class OnnxModel(Generic[T]): @@ -43,8 +43,8 @@ def _post_process_onnx_output(self, output: OnnxOutputContext, **kwargs: Any) -> raise NotImplementedError("Subclasses must implement this method") def __init__(self) -> None: - self.model: Optional[ort.InferenceSession] = None - self.tokenizer: Optional[Tokenizer] = None + self.model: ort.InferenceSession | None = None + self.tokenizer: Tokenizer | None = None def _preprocess_onnx_input( self, onnx_input: dict[str, NumpyArray], **kwargs: Any @@ -58,11 +58,11 @@ def _load_onnx_model( self, model_dir: Path, model_file: str, - threads: Optional[int], - providers: Optional[Sequence[OnnxProvider]] = None, + threads: int | None, + providers: Sequence[OnnxProvider] | None = None, cuda: bool = False, - device_id: Optional[int] = None, - extra_session_options: Optional[dict[str, Any]] = None, + device_id: int | None = None, + extra_session_options: dict[str, Any] | None = None, ) -> None: model_path = model_dir / model_file # List of Execution Providers: https://onnxruntime.ai/docs/execution-providers diff --git a/fastembed/common/types.py b/fastembed/common/types.py index a1adccbb..69d047fb 100644 --- a/fastembed/common/types.py +++ b/fastembed/common/types.py @@ -1,25 +1,20 @@ from pathlib import Path -import sys -from PIL import Image -from typing import Any, Union + +from typing import Any, TypeAlias import numpy as np from numpy.typing import NDArray - -if sys.version_info >= (3, 10): - from typing import TypeAlias -else: - from typing_extensions import TypeAlias +from PIL import Image -PathInput: TypeAlias = Union[str, Path] -ImageInput: TypeAlias = Union[PathInput, Image.Image] +PathInput: TypeAlias = str | Path +ImageInput: TypeAlias = PathInput | Image.Image -OnnxProvider: TypeAlias = Union[str, tuple[str, dict[Any, Any]]] -NumpyArray = Union[ - NDArray[np.float64], - NDArray[np.float32], - NDArray[np.float16], - NDArray[np.int8], - NDArray[np.int64], - NDArray[np.int32], -] +OnnxProvider: TypeAlias = str | tuple[str, dict[Any, Any]] +NumpyArray: TypeAlias = ( + NDArray[np.float64] + | NDArray[np.float32] + | NDArray[np.float16] + | NDArray[np.int8] + | NDArray[np.int64] + | NDArray[np.int32] +) diff --git a/fastembed/common/utils.py b/fastembed/common/utils.py index 02ff615b..b61a8b9c 100644 --- a/fastembed/common/utils.py +++ b/fastembed/common/utils.py @@ -5,7 +5,7 @@ import unicodedata from pathlib import Path from itertools import islice -from typing import Iterable, Optional, TypeVar +from typing import Iterable, TypeVar import numpy as np from numpy.typing import NDArray @@ -45,7 +45,7 @@ def iter_batch(iterable: Iterable[T], size: int) -> Iterable[list[T]]: yield b -def define_cache_dir(cache_dir: Optional[str] = None) -> Path: +def define_cache_dir(cache_dir: str | None = None) -> Path: """ Define the cache directory for fastembed """ diff --git a/fastembed/embedding.py b/fastembed/embedding.py index 4266de4c..37566691 100644 --- a/fastembed/embedding.py +++ b/fastembed/embedding.py @@ -1,4 +1,4 @@ -from typing import Optional, Any +from typing import Any from loguru import logger @@ -17,8 +17,8 @@ class JinaEmbedding(TextEmbedding): def __init__( self, model_name: str = "jinaai/jina-embeddings-v2-base-en", - cache_dir: Optional[str] = None, - threads: Optional[int] = None, + cache_dir: str | None = None, + threads: int | None = None, **kwargs: Any, ): super().__init__(model_name, cache_dir, threads, **kwargs) diff --git a/fastembed/image/image_embedding.py b/fastembed/image/image_embedding.py index 07c60181..057dfc21 100644 --- a/fastembed/image/image_embedding.py +++ b/fastembed/image/image_embedding.py @@ -1,4 +1,4 @@ -from typing import Any, Iterable, Optional, Sequence, Type, Union +from typing import Any, Iterable, Sequence, Type from dataclasses import asdict from fastembed.common.types import NumpyArray @@ -48,11 +48,11 @@ def _list_supported_models(cls) -> list[DenseModelDescription]: def __init__( self, model_name: str, - cache_dir: Optional[str] = None, - threads: Optional[int] = None, - providers: Optional[Sequence[OnnxProvider]] = None, + cache_dir: str | None = None, + threads: int | None = None, + providers: Sequence[OnnxProvider] | None = None, cuda: bool = False, - device_ids: Optional[list[int]] = None, + device_ids: list[int] | None = None, lazy_load: bool = False, **kwargs: Any, ): @@ -98,7 +98,7 @@ def get_embedding_size(cls, model_name: str) -> int: ValueError: If the model name is not found in the supported models. """ descriptions = cls._list_supported_models() - embedding_size: Optional[int] = None + embedding_size: int | None = None for description in descriptions: if description.model.lower() == model_name.lower(): embedding_size = description.dim @@ -113,9 +113,9 @@ def get_embedding_size(cls, model_name: str) -> int: def embed( self, - images: Union[ImageInput, Iterable[ImageInput]], + images: ImageInput | Iterable[ImageInput], batch_size: int = 16, - parallel: Optional[int] = None, + parallel: int | None = None, **kwargs: Any, ) -> Iterable[NumpyArray]: """ diff --git a/fastembed/image/image_embedding_base.py b/fastembed/image/image_embedding_base.py index 475395bb..3601d1e5 100644 --- a/fastembed/image/image_embedding_base.py +++ b/fastembed/image/image_embedding_base.py @@ -1,4 +1,4 @@ -from typing import Iterable, Optional, Any, Union +from typing import Iterable, Any from fastembed.common.model_description import DenseModelDescription from fastembed.common.types import NumpyArray @@ -10,21 +10,21 @@ class ImageEmbeddingBase(ModelManagement[DenseModelDescription]): def __init__( self, model_name: str, - cache_dir: Optional[str] = None, - threads: Optional[int] = None, + cache_dir: str | None = None, + threads: int | None = None, **kwargs: Any, ): self.model_name = model_name self.cache_dir = cache_dir self.threads = threads self._local_files_only = kwargs.pop("local_files_only", False) - self._embedding_size: Optional[int] = None + self._embedding_size: int | None = None def embed( self, - images: Union[ImageInput, Iterable[ImageInput]], + images: ImageInput | Iterable[ImageInput], batch_size: int = 16, - parallel: Optional[int] = None, + parallel: int | None = None, **kwargs: Any, ) -> Iterable[NumpyArray]: """ diff --git a/fastembed/image/onnx_embedding.py b/fastembed/image/onnx_embedding.py index ae0ce848..92165594 100644 --- a/fastembed/image/onnx_embedding.py +++ b/fastembed/image/onnx_embedding.py @@ -1,4 +1,4 @@ -from typing import Any, Iterable, Optional, Sequence, Type, Union +from typing import Any, Iterable, Sequence, Type from fastembed.common.types import NumpyArray @@ -63,14 +63,14 @@ class OnnxImageEmbedding(ImageEmbeddingBase, OnnxImageModel[NumpyArray]): def __init__( self, model_name: str, - cache_dir: Optional[str] = None, - threads: Optional[int] = None, - providers: Optional[Sequence[OnnxProvider]] = None, + cache_dir: str | None = None, + threads: int | None = None, + providers: Sequence[OnnxProvider] | None = None, cuda: bool = False, - device_ids: Optional[list[int]] = None, + device_ids: list[int] | None = None, lazy_load: bool = False, - device_id: Optional[int] = None, - specific_model_path: Optional[str] = None, + device_id: int | None = None, + specific_model_path: str | None = None, **kwargs: Any, ): """ @@ -105,7 +105,7 @@ def __init__( self.cuda = cuda # This device_id will be used if we need to load model in current process - self.device_id: Optional[int] = None + self.device_id: int | None = None if device_id is not None: self.device_id = device_id elif self.device_ids is not None: @@ -150,9 +150,9 @@ def _list_supported_models(cls) -> list[DenseModelDescription]: def embed( self, - images: Union[ImageInput, Iterable[ImageInput]], + images: ImageInput | Iterable[ImageInput], batch_size: int = 16, - parallel: Optional[int] = None, + parallel: int | None = None, **kwargs: Any, ) -> Iterable[NumpyArray]: """ diff --git a/fastembed/image/onnx_image_model.py b/fastembed/image/onnx_image_model.py index 2f4de833..118bd382 100644 --- a/fastembed/image/onnx_image_model.py +++ b/fastembed/image/onnx_image_model.py @@ -2,7 +2,7 @@ import os from multiprocessing import get_all_start_methods from pathlib import Path -from typing import Any, Iterable, Optional, Sequence, Type, Union +from typing import Any, Iterable, Sequence, Type import numpy as np from PIL import Image @@ -37,7 +37,7 @@ def _post_process_onnx_output(self, output: OnnxOutputContext, **kwargs: Any) -> def __init__(self) -> None: super().__init__() - self.processor: Optional[Compose] = None + self.processor: Compose | None = None def _preprocess_onnx_input( self, onnx_input: dict[str, NumpyArray], **kwargs: Any @@ -51,11 +51,11 @@ def _load_onnx_model( self, model_dir: Path, model_file: str, - threads: Optional[int], - providers: Optional[Sequence[OnnxProvider]] = None, + threads: int | None, + providers: Sequence[OnnxProvider] | None = None, cuda: bool = False, - device_id: Optional[int] = None, - extra_session_options: Optional[dict[str, Any]] = None, + device_id: int | None = None, + extra_session_options: dict[str, Any] | None = None, ) -> None: super()._load_onnx_model( model_dir=model_dir, @@ -93,15 +93,15 @@ def _embed_images( self, model_name: str, cache_dir: str, - images: Union[ImageInput, Iterable[ImageInput]], + images: ImageInput | Iterable[ImageInput], batch_size: int = 256, - parallel: Optional[int] = None, - providers: Optional[Sequence[OnnxProvider]] = None, + parallel: int | None = None, + providers: Sequence[OnnxProvider] | None = None, cuda: bool = False, - device_ids: Optional[list[int]] = None, + device_ids: list[int] | None = None, local_files_only: bool = False, - specific_model_path: Optional[str] = None, - extra_session_options: Optional[dict[str, Any]] = None, + specific_model_path: str | None = None, + extra_session_options: dict[str, Any] | None = None, **kwargs: Any, ) -> Iterable[T]: is_small = False diff --git a/fastembed/image/transform/functional.py b/fastembed/image/transform/functional.py index 66223fdf..b06ef46c 100644 --- a/fastembed/image/transform/functional.py +++ b/fastembed/image/transform/functional.py @@ -1,5 +1,3 @@ -from typing import Union - import numpy as np from PIL import Image @@ -15,7 +13,7 @@ def convert_to_rgb(image: Image.Image) -> Image.Image: def center_crop( - image: Union[Image.Image, NumpyArray], + image: Image.Image | NumpyArray, size: tuple[int, int], ) -> NumpyArray: if isinstance(image, np.ndarray): @@ -64,8 +62,8 @@ def center_crop( def normalize( image: NumpyArray, - mean: Union[float, list[float]], - std: Union[float, list[float]], + mean: float | list[float], + std: float | list[float], ) -> NumpyArray: num_channels = image.shape[1] if len(image.shape) == 4 else image.shape[0] @@ -96,8 +94,8 @@ def normalize( def resize( image: Image.Image, - size: Union[int, tuple[int, int]], - resample: Union[int, Image.Resampling] = Image.Resampling.BILINEAR, + size: int | tuple[int, int], + resample: int | Image.Resampling = Image.Resampling.BILINEAR, ) -> Image.Image: if isinstance(size, tuple): return image.resize(size, resample) @@ -117,7 +115,7 @@ def rescale(image: NumpyArray, scale: float, dtype: type = np.float32) -> NumpyA return (image * scale).astype(dtype) -def pil2ndarray(image: Union[Image.Image, NumpyArray]) -> NumpyArray: +def pil2ndarray(image: Image.Image | NumpyArray) -> NumpyArray: if isinstance(image, Image.Image): return np.asarray(image).transpose((2, 0, 1)) return image @@ -126,7 +124,7 @@ def pil2ndarray(image: Union[Image.Image, NumpyArray]) -> NumpyArray: def pad2square( image: Image.Image, size: int, - fill_color: Union[str, int, tuple[int, ...]] = 0, + fill_color: str | int | tuple[int, ...] = 0, ) -> Image.Image: height, width = image.height, image.width diff --git a/fastembed/image/transform/operators.py b/fastembed/image/transform/operators.py index 55a45074..857b1999 100644 --- a/fastembed/image/transform/operators.py +++ b/fastembed/image/transform/operators.py @@ -1,4 +1,4 @@ -from typing import Any, Union, Optional +from typing import Any from PIL import Image @@ -15,7 +15,7 @@ class Transform: - def __call__(self, images: list[Any]) -> Union[list[Image.Image], list[NumpyArray]]: + def __call__(self, images: list[Any]) -> list[Image.Image] | list[NumpyArray]: raise NotImplementedError("Subclasses must implement this method") @@ -33,7 +33,7 @@ def __call__(self, images: list[Image.Image]) -> list[NumpyArray]: class Normalize(Transform): - def __init__(self, mean: Union[float, list[float]], std: Union[float, list[float]]): + def __init__(self, mean: float | list[float], std: float | list[float]): self.mean = mean self.std = std @@ -44,7 +44,7 @@ def __call__(self, images: list[NumpyArray]) -> list[NumpyArray]: class Resize(Transform): def __init__( self, - size: Union[int, tuple[int, int]], + size: int | tuple[int, int], resample: Image.Resampling = Image.Resampling.BICUBIC, ): self.size = size @@ -63,7 +63,7 @@ def __call__(self, images: list[NumpyArray]) -> list[NumpyArray]: class PILtoNDarray(Transform): - def __call__(self, images: list[Union[Image.Image, NumpyArray]]) -> list[NumpyArray]: + def __call__(self, images: list[Image.Image | NumpyArray]) -> list[NumpyArray]: return [pil2ndarray(image) for image in images] @@ -71,7 +71,7 @@ class PadtoSquare(Transform): def __init__( self, size: int, - fill_color: Union[str, int, tuple[int, ...]], + fill_color: str | int | tuple[int, ...], ): self.size = size self.fill_color = fill_color @@ -87,8 +87,8 @@ def __init__(self, transforms: list[Transform]): self.transforms = transforms def __call__( - self, images: Union[list[Image.Image], list[NumpyArray]] - ) -> Union[list[NumpyArray], list[Image.Image]]: + self, images: list[Image.Image] | list[NumpyArray] + ) -> list[NumpyArray] | list[Image.Image]: for transform in self.transforms: images = transform(images) return images @@ -253,7 +253,7 @@ def _get_pad2square(transforms: list[Transform], config: dict[str, Any]) -> None ) @staticmethod - def _interpolation_resolver(resample: Optional[str] = None) -> Image.Resampling: + def _interpolation_resolver(resample: str | None = None) -> Image.Resampling: interpolation_map = { "nearest": Image.Resampling.NEAREST, "lanczos": Image.Resampling.LANCZOS, diff --git a/fastembed/late_interaction/colbert.py b/fastembed/late_interaction/colbert.py index 023b8c29..66a44e1e 100644 --- a/fastembed/late_interaction/colbert.py +++ b/fastembed/late_interaction/colbert.py @@ -1,5 +1,5 @@ import string -from typing import Any, Iterable, Optional, Sequence, Type, Union +from typing import Any, Iterable, Sequence, Type import numpy as np from tokenizers import Encoding, Tokenizer @@ -98,7 +98,7 @@ def _tokenize_documents(self, documents: list[str]) -> list[Encoding]: def token_count( self, - texts: Union[str, Iterable[str]], + texts: str | Iterable[str], batch_size: int = 1024, is_doc: bool = True, include_extension: bool = False, @@ -140,14 +140,14 @@ def _list_supported_models(cls) -> list[DenseModelDescription]: def __init__( self, model_name: str, - cache_dir: Optional[str] = None, - threads: Optional[int] = None, - providers: Optional[Sequence[OnnxProvider]] = None, + cache_dir: str | None = None, + threads: int | None = None, + providers: Sequence[OnnxProvider] | None = None, cuda: bool = False, - device_ids: Optional[list[int]] = None, + device_ids: list[int] | None = None, lazy_load: bool = False, - device_id: Optional[int] = None, - specific_model_path: Optional[str] = None, + device_id: int | None = None, + specific_model_path: str | None = None, **kwargs: Any, ): """ @@ -182,7 +182,7 @@ def __init__( self.cuda = cuda # This device_id will be used if we need to load model in current process - self.device_id: Optional[int] = None + self.device_id: int | None = None if device_id is not None: self.device_id = device_id elif self.device_ids is not None: @@ -198,11 +198,11 @@ def __init__( local_files_only=self._local_files_only, specific_model_path=self._specific_model_path, ) - self.mask_token_id: Optional[int] = None - self.pad_token_id: Optional[int] = None + self.mask_token_id: int | None = None + self.pad_token_id: int | None = None self.skip_list: set[int] = set() - self.query_tokenizer: Optional[Tokenizer] = None + self.query_tokenizer: Tokenizer | None = None if not self.lazy_load: self.load_onnx_model() @@ -238,9 +238,9 @@ def load_onnx_model(self) -> None: def embed( self, - documents: Union[str, Iterable[str]], + documents: str | Iterable[str], batch_size: int = 256, - parallel: Optional[int] = None, + parallel: int | None = None, **kwargs: Any, ) -> Iterable[NumpyArray]: """ @@ -273,7 +273,7 @@ def embed( **kwargs, ) - def query_embed(self, query: Union[str, Iterable[str]], **kwargs: Any) -> Iterable[NumpyArray]: + def query_embed(self, query: str | Iterable[str], **kwargs: Any) -> Iterable[NumpyArray]: if isinstance(query, str): query = [query] diff --git a/fastembed/late_interaction/late_interaction_embedding_base.py b/fastembed/late_interaction/late_interaction_embedding_base.py index ec37c79b..1ba7909e 100644 --- a/fastembed/late_interaction/late_interaction_embedding_base.py +++ b/fastembed/late_interaction/late_interaction_embedding_base.py @@ -1,4 +1,4 @@ -from typing import Iterable, Optional, Union, Any +from typing import Iterable, Any from fastembed.common.model_description import DenseModelDescription from fastembed.common.types import NumpyArray @@ -9,21 +9,21 @@ class LateInteractionTextEmbeddingBase(ModelManagement[DenseModelDescription]): def __init__( self, model_name: str, - cache_dir: Optional[str] = None, - threads: Optional[int] = None, + cache_dir: str | None = None, + threads: int | None = None, **kwargs: Any, ): self.model_name = model_name self.cache_dir = cache_dir self.threads = threads self._local_files_only = kwargs.pop("local_files_only", False) - self._embedding_size: Optional[int] = None + self._embedding_size: int | None = None def embed( self, - documents: Union[str, Iterable[str]], + documents: str | Iterable[str], batch_size: int = 256, - parallel: Optional[int] = None, + parallel: int | None = None, **kwargs: Any, ) -> Iterable[NumpyArray]: raise NotImplementedError() @@ -43,7 +43,7 @@ def passage_embed(self, texts: Iterable[str], **kwargs: Any) -> Iterable[NumpyAr # This is model-specific, so that different models can have specialized implementations yield from self.embed(texts, **kwargs) - def query_embed(self, query: Union[str, Iterable[str]], **kwargs: Any) -> Iterable[NumpyArray]: + def query_embed(self, query: str | Iterable[str], **kwargs: Any) -> Iterable[NumpyArray]: """ Embeds queries @@ -72,7 +72,7 @@ def embedding_size(self) -> int: def token_count( self, - texts: Union[str, Iterable[str]], + texts: str | Iterable[str], batch_size: int = 1024, **kwargs: Any, ) -> int: diff --git a/fastembed/late_interaction/late_interaction_text_embedding.py b/fastembed/late_interaction/late_interaction_text_embedding.py index 482a4331..ac99b03f 100644 --- a/fastembed/late_interaction/late_interaction_text_embedding.py +++ b/fastembed/late_interaction/late_interaction_text_embedding.py @@ -1,4 +1,4 @@ -from typing import Any, Iterable, Optional, Sequence, Type, Union +from typing import Any, Iterable, Sequence, Type from dataclasses import asdict from fastembed.common.model_description import DenseModelDescription @@ -51,11 +51,11 @@ def _list_supported_models(cls) -> list[DenseModelDescription]: def __init__( self, model_name: str, - cache_dir: Optional[str] = None, - threads: Optional[int] = None, - providers: Optional[Sequence[OnnxProvider]] = None, + cache_dir: str | None = None, + threads: int | None = None, + providers: Sequence[OnnxProvider] | None = None, cuda: bool = False, - device_ids: Optional[list[int]] = None, + device_ids: list[int] | None = None, lazy_load: bool = False, **kwargs: Any, ): @@ -101,7 +101,7 @@ def get_embedding_size(cls, model_name: str) -> int: ValueError: If the model name is not found in the supported models. """ descriptions = cls._list_supported_models() - embedding_size: Optional[int] = None + embedding_size: int | None = None for description in descriptions: if description.model.lower() == model_name.lower(): embedding_size = description.dim @@ -116,9 +116,9 @@ def get_embedding_size(cls, model_name: str) -> int: def embed( self, - documents: Union[str, Iterable[str]], + documents: str | Iterable[str], batch_size: int = 256, - parallel: Optional[int] = None, + parallel: int | None = None, **kwargs: Any, ) -> Iterable[NumpyArray]: """ @@ -138,7 +138,7 @@ def embed( """ yield from self.model.embed(documents, batch_size, parallel, **kwargs) - def query_embed(self, query: Union[str, Iterable[str]], **kwargs: Any) -> Iterable[NumpyArray]: + def query_embed(self, query: str | Iterable[str], **kwargs: Any) -> Iterable[NumpyArray]: """ Embeds queries @@ -154,7 +154,7 @@ def query_embed(self, query: Union[str, Iterable[str]], **kwargs: Any) -> Iterab def token_count( self, - texts: Union[str, Iterable[str]], + texts: str | Iterable[str], batch_size: int = 1024, is_doc: bool = True, include_extension: bool = False, diff --git a/fastembed/late_interaction/token_embeddings.py b/fastembed/late_interaction/token_embeddings.py index ec4844ba..55d3cd94 100644 --- a/fastembed/late_interaction/token_embeddings.py +++ b/fastembed/late_interaction/token_embeddings.py @@ -1,5 +1,5 @@ from dataclasses import asdict -from typing import Union, Iterable, Optional, Any, Type +from typing import Iterable, Any, Type from fastembed.common.model_description import DenseModelDescription, ModelSource from fastembed.common.onnx_model import OnnxOutputContext @@ -63,9 +63,9 @@ def _post_process_onnx_output( def embed( self, - documents: Union[str, Iterable[str]], + documents: str | Iterable[str], batch_size: int = 256, - parallel: Optional[int] = None, + parallel: int | None = None, **kwargs: Any, ) -> Iterable[NumpyArray]: yield from super().embed(documents, batch_size=batch_size, parallel=parallel, **kwargs) diff --git a/fastembed/late_interaction_multimodal/colpali.py b/fastembed/late_interaction_multimodal/colpali.py index 7d0218fe..cc3273c4 100644 --- a/fastembed/late_interaction_multimodal/colpali.py +++ b/fastembed/late_interaction_multimodal/colpali.py @@ -1,4 +1,4 @@ -from typing import Any, Iterable, Optional, Sequence, Type, Union +from typing import Any, Iterable, Sequence, Type import numpy as np from tokenizers import Encoding @@ -46,14 +46,14 @@ class ColPali(LateInteractionMultimodalEmbeddingBase, OnnxMultimodalModel[NumpyA def __init__( self, model_name: str, - cache_dir: Optional[str] = None, - threads: Optional[int] = None, - providers: Optional[Sequence[OnnxProvider]] = None, + cache_dir: str | None = None, + threads: int | None = None, + providers: Sequence[OnnxProvider] | None = None, cuda: bool = False, - device_ids: Optional[list[int]] = None, + device_ids: list[int] | None = None, lazy_load: bool = False, - device_id: Optional[int] = None, - specific_model_path: Optional[str] = None, + device_id: int | None = None, + specific_model_path: str | None = None, **kwargs: Any, ): """ @@ -87,7 +87,7 @@ def __init__( self.cuda = cuda # This device_id will be used if we need to load model in current process - self.device_id: Optional[int] = None + self.device_id: int | None = None if device_id is not None: self.device_id = device_id elif self.device_ids is not None: @@ -174,7 +174,7 @@ def tokenize(self, documents: list[str], **kwargs: Any) -> list[Encoding]: def token_count( self, - texts: Union[str, Iterable[str]], + texts: str | Iterable[str], batch_size: int = 1024, include_extension: bool = False, **kwargs: Any, @@ -227,9 +227,9 @@ def _preprocess_onnx_image_input( def embed_text( self, - documents: Union[str, Iterable[str]], + documents: str | Iterable[str], batch_size: int = 256, - parallel: Optional[int] = None, + parallel: int | None = None, **kwargs: Any, ) -> Iterable[NumpyArray]: """ @@ -263,9 +263,9 @@ def embed_text( def embed_image( self, - images: Union[ImageInput, Iterable[ImageInput]], + images: ImageInput | Iterable[ImageInput], batch_size: int = 16, - parallel: Optional[int] = None, + parallel: int | None = None, **kwargs: Any, ) -> Iterable[NumpyArray]: """ diff --git a/fastembed/late_interaction_multimodal/late_interaction_multimodal_embedding.py b/fastembed/late_interaction_multimodal/late_interaction_multimodal_embedding.py index 01a57294..f960b2bb 100644 --- a/fastembed/late_interaction_multimodal/late_interaction_multimodal_embedding.py +++ b/fastembed/late_interaction_multimodal/late_interaction_multimodal_embedding.py @@ -1,4 +1,4 @@ -from typing import Any, Iterable, Optional, Sequence, Type, Union +from typing import Any, Iterable, Sequence, Type from dataclasses import asdict from fastembed.common import OnnxProvider, ImageInput @@ -54,11 +54,11 @@ def _list_supported_models(cls) -> list[DenseModelDescription]: def __init__( self, model_name: str, - cache_dir: Optional[str] = None, - threads: Optional[int] = None, - providers: Optional[Sequence[OnnxProvider]] = None, + cache_dir: str | None = None, + threads: int | None = None, + providers: Sequence[OnnxProvider] | None = None, cuda: bool = False, - device_ids: Optional[list[int]] = None, + device_ids: list[int] | None = None, lazy_load: bool = False, **kwargs: Any, ): @@ -104,7 +104,7 @@ def get_embedding_size(cls, model_name: str) -> int: ValueError: If the model name is not found in the supported models. """ descriptions = cls._list_supported_models() - embedding_size: Optional[int] = None + embedding_size: int | None = None for description in descriptions: if description.model.lower() == model_name.lower(): embedding_size = description.dim @@ -119,9 +119,9 @@ def get_embedding_size(cls, model_name: str) -> int: def embed_text( self, - documents: Union[str, Iterable[str]], + documents: str | Iterable[str], batch_size: int = 256, - parallel: Optional[int] = None, + parallel: int | None = None, **kwargs: Any, ) -> Iterable[NumpyArray]: """ @@ -142,9 +142,9 @@ def embed_text( def embed_image( self, - images: Union[ImageInput, Iterable[ImageInput]], + images: ImageInput | Iterable[ImageInput], batch_size: int = 16, - parallel: Optional[int] = None, + parallel: int | None = None, **kwargs: Any, ) -> Iterable[NumpyArray]: """ @@ -165,7 +165,7 @@ def embed_image( def token_count( self, - texts: Union[str, Iterable[str]], + texts: str | Iterable[str], batch_size: int = 1024, include_extension: bool = False, **kwargs: Any, diff --git a/fastembed/late_interaction_multimodal/late_interaction_multimodal_embedding_base.py b/fastembed/late_interaction_multimodal/late_interaction_multimodal_embedding_base.py index 0d148ce4..72a87fe5 100644 --- a/fastembed/late_interaction_multimodal/late_interaction_multimodal_embedding_base.py +++ b/fastembed/late_interaction_multimodal/late_interaction_multimodal_embedding_base.py @@ -1,4 +1,4 @@ -from typing import Iterable, Optional, Union, Any +from typing import Iterable, Any from fastembed.common import ImageInput @@ -11,21 +11,21 @@ class LateInteractionMultimodalEmbeddingBase(ModelManagement[DenseModelDescripti def __init__( self, model_name: str, - cache_dir: Optional[str] = None, - threads: Optional[int] = None, + cache_dir: str | None = None, + threads: int | None = None, **kwargs: Any, ): self.model_name = model_name self.cache_dir = cache_dir self.threads = threads self._local_files_only = kwargs.pop("local_files_only", False) - self._embedding_size: Optional[int] = None + self._embedding_size: int | None = None def embed_text( self, - documents: Union[str, Iterable[str]], + documents: str | Iterable[str], batch_size: int = 256, - parallel: Optional[int] = None, + parallel: int | None = None, **kwargs: Any, ) -> Iterable[NumpyArray]: """ @@ -47,9 +47,9 @@ def embed_text( def embed_image( self, - images: Union[ImageInput, Iterable[ImageInput]], + images: ImageInput | Iterable[ImageInput], batch_size: int = 16, - parallel: Optional[int] = None, + parallel: int | None = None, **kwargs: Any, ) -> Iterable[NumpyArray]: """ @@ -79,7 +79,7 @@ def embedding_size(self) -> int: def token_count( self, - texts: Union[str, Iterable[str]], + texts: str | Iterable[str], **kwargs: Any, ) -> int: """Returns the number of tokens in the texts.""" diff --git a/fastembed/late_interaction_multimodal/onnx_multimodal_model.py b/fastembed/late_interaction_multimodal/onnx_multimodal_model.py index 50a5d8b4..b6fc7a97 100644 --- a/fastembed/late_interaction_multimodal/onnx_multimodal_model.py +++ b/fastembed/late_interaction_multimodal/onnx_multimodal_model.py @@ -2,7 +2,7 @@ import os from multiprocessing import get_all_start_methods from pathlib import Path -from typing import Any, Iterable, Optional, Sequence, Type, Union +from typing import Any, Iterable, Sequence, Type import numpy as np from PIL import Image @@ -18,12 +18,12 @@ class OnnxMultimodalModel(OnnxModel[T]): - ONNX_OUTPUT_NAMES: Optional[list[str]] = None + ONNX_OUTPUT_NAMES: list[str] | None = None def __init__(self) -> None: super().__init__() - self.tokenizer: Optional[Tokenizer] = None - self.processor: Optional[Compose] = None + self.tokenizer: Tokenizer | None = None + self.processor: Compose | None = None self.special_token_to_id: dict[str, int] = {} def _preprocess_onnx_text_input( @@ -60,11 +60,11 @@ def _load_onnx_model( self, model_dir: Path, model_file: str, - threads: Optional[int], - providers: Optional[Sequence[OnnxProvider]] = None, + threads: int | None, + providers: Sequence[OnnxProvider] | None = None, cuda: bool = False, - device_id: Optional[int] = None, - extra_session_options: Optional[dict[str, Any]] = None, + device_id: int | None = None, + extra_session_options: dict[str, Any] | None = None, ) -> None: super()._load_onnx_model( model_dir=model_dir, @@ -116,15 +116,15 @@ def _embed_documents( self, model_name: str, cache_dir: str, - documents: Union[str, Iterable[str]], + documents: str | Iterable[str], batch_size: int = 256, - parallel: Optional[int] = None, - providers: Optional[Sequence[OnnxProvider]] = None, + parallel: int | None = None, + providers: Sequence[OnnxProvider] | None = None, cuda: bool = False, - device_ids: Optional[list[int]] = None, + device_ids: list[int] | None = None, local_files_only: bool = False, - specific_model_path: Optional[str] = None, - extra_session_options: Optional[dict[str, Any]] = None, + specific_model_path: str | None = None, + extra_session_options: dict[str, Any] | None = None, **kwargs: Any, ) -> Iterable[T]: is_small = False @@ -187,15 +187,15 @@ def _embed_images( self, model_name: str, cache_dir: str, - images: Union[Iterable[ImageInput], ImageInput], + images: Iterable[ImageInput] | ImageInput, batch_size: int = 256, - parallel: Optional[int] = None, - providers: Optional[Sequence[OnnxProvider]] = None, + parallel: int | None = None, + providers: Sequence[OnnxProvider] | None = None, cuda: bool = False, - device_ids: Optional[list[int]] = None, + device_ids: list[int] | None = None, local_files_only: bool = False, - specific_model_path: Optional[str] = None, - extra_session_options: Optional[dict[str, Any]] = None, + specific_model_path: str | None = None, + extra_session_options: dict[str, Any] | None = None, **kwargs: Any, ) -> Iterable[T]: is_small = False diff --git a/fastembed/parallel_processor.py b/fastembed/parallel_processor.py index 9a20a8e9..1632ac2c 100644 --- a/fastembed/parallel_processor.py +++ b/fastembed/parallel_processor.py @@ -8,7 +8,7 @@ from multiprocessing.process import BaseProcess from multiprocessing.sharedctypes import Synchronized as BaseValue from queue import Empty -from typing import Any, Iterable, Optional, Type +from typing import Any, Iterable, Type # Single item should be processed in less than: @@ -38,7 +38,7 @@ def _worker( output_queue: Queue, num_active_workers: BaseValue, worker_id: int, - kwargs: Optional[dict[str, Any]] = None, + kwargs: dict[str, Any] | None = None, ) -> None: """ A worker that pulls data pints off the input queue, and places the execution result on the output queue. @@ -93,21 +93,21 @@ def __init__( self, num_workers: int, worker: Type[Worker], - start_method: Optional[str] = None, - device_ids: Optional[list[int]] = None, + start_method: str | None = None, + device_ids: list[int] | None = None, cuda: bool = False, ): self.worker_class = worker self.num_workers = num_workers - self.input_queue: Optional[Queue] = None - self.output_queue: Optional[Queue] = None + self.input_queue: Queue | None = None + self.output_queue: Queue | None = None self.ctx: BaseContext = get_context(start_method) self.processes: list[BaseProcess] = [] self.queue_size = self.num_workers * max_internal_batch_size self.emergency_shutdown = False self.device_ids = device_ids self.cuda = cuda - self.num_active_workers: Optional[BaseValue] = None + self.num_active_workers: BaseValue | None = None def start(self, **kwargs: Any) -> None: self.input_queue = self.ctx.Queue(self.queue_size) @@ -220,7 +220,7 @@ def check_worker_health(self) -> None: f"Worker PID: {process.pid} terminated unexpectedly with code {process.exitcode}" ) - def join_or_terminate(self, timeout: Optional[int] = 1) -> None: + def join_or_terminate(self, timeout: int = 1) -> None: """ Emergency shutdown @param timeout: diff --git a/fastembed/postprocess/muvera.py b/fastembed/postprocess/muvera.py index 35dfaabe..3c647679 100644 --- a/fastembed/postprocess/muvera.py +++ b/fastembed/postprocess/muvera.py @@ -1,5 +1,3 @@ -from typing import Union - import numpy as np from fastembed.common.types import NumpyArray @@ -11,7 +9,7 @@ ) -MultiVectorModel = Union[LateInteractionTextEmbeddingBase, LateInteractionMultimodalEmbeddingBase] +MultiVectorModel = LateInteractionTextEmbeddingBase | LateInteractionMultimodalEmbeddingBase MAX_HAMMING_DISTANCE = 65 # 64 bits + 1 POPCOUNT_LUT = np.array([bin(x).count("1") for x in range(256)], dtype=np.uint8) diff --git a/fastembed/rerank/cross_encoder/custom_text_cross_encoder.py b/fastembed/rerank/cross_encoder/custom_text_cross_encoder.py index 8821cae7..a9fa3d79 100644 --- a/fastembed/rerank/cross_encoder/custom_text_cross_encoder.py +++ b/fastembed/rerank/cross_encoder/custom_text_cross_encoder.py @@ -1,4 +1,4 @@ -from typing import Optional, Sequence, Any +from typing import Sequence, Any from fastembed.common import OnnxProvider from fastembed.common.model_description import BaseModelDescription @@ -11,14 +11,14 @@ class CustomTextCrossEncoder(OnnxTextCrossEncoder): def __init__( self, model_name: str, - cache_dir: Optional[str] = None, - threads: Optional[int] = None, - providers: Optional[Sequence[OnnxProvider]] = None, + cache_dir: str | None = None, + threads: int | None = None, + providers: Sequence[OnnxProvider] | None = None, cuda: bool = False, - device_ids: Optional[list[int]] = None, + device_ids: list[int] | None = None, lazy_load: bool = False, - device_id: Optional[int] = None, - specific_model_path: Optional[str] = None, + device_id: int | None = None, + specific_model_path: str | None = None, **kwargs: Any, ): super().__init__( diff --git a/fastembed/rerank/cross_encoder/onnx_text_cross_encoder.py b/fastembed/rerank/cross_encoder/onnx_text_cross_encoder.py index 4a91a010..e077c216 100644 --- a/fastembed/rerank/cross_encoder/onnx_text_cross_encoder.py +++ b/fastembed/rerank/cross_encoder/onnx_text_cross_encoder.py @@ -1,4 +1,4 @@ -from typing import Any, Iterable, Optional, Sequence, Type +from typing import Any, Iterable, Sequence, Type from loguru import logger @@ -77,14 +77,14 @@ def _list_supported_models(cls) -> list[BaseModelDescription]: def __init__( self, model_name: str, - cache_dir: Optional[str] = None, - threads: Optional[int] = None, - providers: Optional[Sequence[OnnxProvider]] = None, + cache_dir: str | None = None, + threads: int | None = None, + providers: Sequence[OnnxProvider] | None = None, cuda: bool = False, - device_ids: Optional[list[int]] = None, + device_ids: list[int] | None = None, lazy_load: bool = False, - device_id: Optional[int] = None, - specific_model_path: Optional[str] = None, + device_id: int | None = None, + specific_model_path: str | None = None, **kwargs: Any, ): """ @@ -124,7 +124,7 @@ def __init__( ) # This device_id will be used if we need to load model in current process - self.device_id: Optional[int] = None + self.device_id: int | None = None if device_id is not None: self.device_id = device_id elif self.device_ids is not None: @@ -180,7 +180,7 @@ def rerank_pairs( self, pairs: Iterable[tuple[str, str]], batch_size: int = 64, - parallel: Optional[int] = None, + parallel: int | None = None, **kwargs: Any, ) -> Iterable[float]: yield from self._rerank_pairs( diff --git a/fastembed/rerank/cross_encoder/onnx_text_model.py b/fastembed/rerank/cross_encoder/onnx_text_model.py index 801c60dc..aa058322 100644 --- a/fastembed/rerank/cross_encoder/onnx_text_model.py +++ b/fastembed/rerank/cross_encoder/onnx_text_model.py @@ -1,7 +1,7 @@ import os from multiprocessing import get_all_start_methods from pathlib import Path -from typing import Any, Iterable, Optional, Sequence, Type +from typing import Any, Iterable, Sequence, Type import numpy as np from tokenizers import Encoding @@ -19,7 +19,7 @@ class OnnxCrossEncoderModel(OnnxModel[float]): - ONNX_OUTPUT_NAMES: Optional[list[str]] = None + ONNX_OUTPUT_NAMES: list[str] | None = None @classmethod def _get_worker_class(cls) -> Type["TextRerankerWorker"]: @@ -29,11 +29,11 @@ def _load_onnx_model( self, model_dir: Path, model_file: str, - threads: Optional[int], - providers: Optional[Sequence[OnnxProvider]] = None, + threads: int | None, + providers: Sequence[OnnxProvider] | None = None, cuda: bool = False, - device_id: Optional[int] = None, - extra_session_options: Optional[dict[str, Any]] = None, + device_id: int | None = None, + extra_session_options: dict[str, Any] | None = None, ) -> None: super()._load_onnx_model( model_dir=model_dir, @@ -92,13 +92,13 @@ def _rerank_pairs( cache_dir: str, pairs: Iterable[tuple[str, str]], batch_size: int, - parallel: Optional[int] = None, - providers: Optional[Sequence[OnnxProvider]] = None, + parallel: int | None = None, + providers: Sequence[OnnxProvider] | None = None, cuda: bool = False, - device_ids: Optional[list[int]] = None, + device_ids: list[int] | None = None, local_files_only: bool = False, - specific_model_path: Optional[str] = None, - extra_session_options: Optional[dict[str, Any]] = None, + specific_model_path: str | None = None, + extra_session_options: dict[str, Any] | None = None, **kwargs: Any, ) -> Iterable[float]: is_small = False diff --git a/fastembed/rerank/cross_encoder/text_cross_encoder.py b/fastembed/rerank/cross_encoder/text_cross_encoder.py index c6182084..2d3920a8 100644 --- a/fastembed/rerank/cross_encoder/text_cross_encoder.py +++ b/fastembed/rerank/cross_encoder/text_cross_encoder.py @@ -1,4 +1,4 @@ -from typing import Any, Iterable, Optional, Sequence, Type +from typing import Any, Iterable, Sequence, Type from dataclasses import asdict from fastembed.common import OnnxProvider @@ -53,11 +53,11 @@ def _list_supported_models(cls) -> list[BaseModelDescription]: def __init__( self, model_name: str, - cache_dir: Optional[str] = None, - threads: Optional[int] = None, - providers: Optional[Sequence[OnnxProvider]] = None, + cache_dir: str | None = None, + threads: int | None = None, + providers: Sequence[OnnxProvider] | None = None, cuda: bool = False, - device_ids: Optional[list[int]] = None, + device_ids: list[int] | None = None, lazy_load: bool = False, **kwargs: Any, ): @@ -102,7 +102,7 @@ def rerank_pairs( self, pairs: Iterable[tuple[str, str]], batch_size: int = 64, - parallel: Optional[int] = None, + parallel: int | None = None, **kwargs: Any, ) -> Iterable[float]: """ @@ -140,7 +140,7 @@ def add_custom_model( description: str = "", license: str = "", size_in_gb: float = 0.0, - additional_files: Optional[list[str]] = None, + additional_files: list[str] | None = None, ) -> None: registered_models = cls._list_supported_models() for registered_model in registered_models: diff --git a/fastembed/rerank/cross_encoder/text_cross_encoder_base.py b/fastembed/rerank/cross_encoder/text_cross_encoder_base.py index 7baffd0e..6c2660b0 100644 --- a/fastembed/rerank/cross_encoder/text_cross_encoder_base.py +++ b/fastembed/rerank/cross_encoder/text_cross_encoder_base.py @@ -1,4 +1,4 @@ -from typing import Any, Iterable, Optional +from typing import Any, Iterable from fastembed.common.model_description import BaseModelDescription from fastembed.common.model_management import ModelManagement @@ -8,8 +8,8 @@ class TextCrossEncoderBase(ModelManagement[BaseModelDescription]): def __init__( self, model_name: str, - cache_dir: Optional[str] = None, - threads: Optional[int] = None, + cache_dir: str | None = None, + threads: int | None = None, **kwargs: Any, ): self.model_name = model_name @@ -41,7 +41,7 @@ def rerank_pairs( self, pairs: Iterable[tuple[str, str]], batch_size: int = 64, - parallel: Optional[int] = None, + parallel: int | None = None, **kwargs: Any, ) -> Iterable[float]: """Rerank query-document pairs. diff --git a/fastembed/sparse/bm25.py b/fastembed/sparse/bm25.py index 8265a621..c31acd12 100644 --- a/fastembed/sparse/bm25.py +++ b/fastembed/sparse/bm25.py @@ -2,7 +2,7 @@ from collections import defaultdict from multiprocessing import get_all_start_methods from pathlib import Path -from typing import Any, Iterable, Optional, Type, Union +from typing import Any, Iterable, Type import mmh3 import numpy as np @@ -91,14 +91,14 @@ class Bm25(SparseTextEmbeddingBase): def __init__( self, model_name: str, - cache_dir: Optional[str] = None, + cache_dir: str | None = None, k: float = 1.2, b: float = 0.75, avg_len: float = 256.0, language: str = "english", token_max_length: int = 40, disable_stemmer: bool = False, - specific_model_path: Optional[str] = None, + specific_model_path: str | None = None, **kwargs: Any, ): super().__init__(model_name, cache_dir, **kwargs) @@ -158,11 +158,11 @@ def _embed_documents( self, model_name: str, cache_dir: str, - documents: Union[str, Iterable[str]], + documents: str | Iterable[str], batch_size: int = 256, - parallel: Optional[int] = None, + parallel: int | None = None, local_files_only: bool = False, - specific_model_path: Optional[str] = None, + specific_model_path: str | None = None, ) -> Iterable[SparseEmbedding]: is_small = False @@ -205,9 +205,9 @@ def _embed_documents( def embed( self, - documents: Union[str, Iterable[str]], + documents: str | Iterable[str], batch_size: int = 256, - parallel: Optional[int] = None, + parallel: int | None = None, **kwargs: Any, ) -> Iterable[SparseEmbedding]: """ @@ -268,7 +268,7 @@ def raw_embed( embeddings.append(SparseEmbedding.from_dict(token_id2value)) return embeddings - def token_count(self, texts: Union[str, Iterable[str]], **kwargs: Any) -> int: + def token_count(self, texts: str | Iterable[str], **kwargs: Any) -> int: token_num = 0 texts = [texts] if isinstance(texts, str) else texts for text in texts: @@ -311,9 +311,7 @@ def _term_frequency(self, tokens: list[str]) -> dict[int, float]: def compute_token_id(cls, token: str) -> int: return abs(mmh3.hash(token)) - def query_embed( - self, query: Union[str, Iterable[str]], **kwargs: Any - ) -> Iterable[SparseEmbedding]: + def query_embed(self, query: str | Iterable[str], **kwargs: Any) -> Iterable[SparseEmbedding]: """To emulate BM25 behaviour, we don't need to use weights in the query, and it's enough to just hash the tokens and assign a weight of 1.0 to them. """ diff --git a/fastembed/sparse/bm42.py b/fastembed/sparse/bm42.py index 536ba61e..ce564967 100644 --- a/fastembed/sparse/bm42.py +++ b/fastembed/sparse/bm42.py @@ -1,7 +1,7 @@ import math import string from pathlib import Path -from typing import Any, Iterable, Optional, Sequence, Type, Union +from typing import Any, Iterable, Sequence, Type import mmh3 import numpy as np @@ -65,15 +65,15 @@ class Bm42(SparseTextEmbeddingBase, OnnxTextModel[SparseEmbedding]): def __init__( self, model_name: str, - cache_dir: Optional[str] = None, - threads: Optional[int] = None, - providers: Optional[Sequence[OnnxProvider]] = None, + cache_dir: str | None = None, + threads: int | None = None, + providers: Sequence[OnnxProvider] | None = None, alpha: float = 0.5, cuda: bool = False, - device_ids: Optional[list[int]] = None, + device_ids: list[int] | None = None, lazy_load: bool = False, - device_id: Optional[int] = None, - specific_model_path: Optional[str] = None, + device_id: int | None = None, + specific_model_path: str | None = None, **kwargs: Any, ): """ @@ -110,7 +110,7 @@ def __init__( self.cuda = cuda # This device_id will be used if we need to load model in current process - self.device_id: Optional[int] = None + self.device_id: int | None = None if device_id is not None: self.device_id = device_id elif self.device_ids is not None: @@ -282,9 +282,9 @@ def _load_stopwords(cls, model_dir: Path) -> list[str]: def embed( self, - documents: Union[str, Iterable[str]], + documents: str | Iterable[str], batch_size: int = 256, - parallel: Optional[int] = None, + parallel: int | None = None, **kwargs: Any, ) -> Iterable[SparseEmbedding]: """ @@ -325,9 +325,7 @@ def _query_rehash(cls, tokens: Iterable[str]) -> dict[int, float]: result[token_id] = 1.0 return result - def query_embed( - self, query: Union[str, Iterable[str]], **kwargs: Any - ) -> Iterable[SparseEmbedding]: + def query_embed(self, query: str | Iterable[str], **kwargs: Any) -> Iterable[SparseEmbedding]: """ To emulate BM25 behaviour, we don't need to use smart weights in the query, and it's enough to just hash the tokens and assign a weight of 1.0 to them. @@ -353,7 +351,7 @@ def _get_worker_class(cls) -> Type[TextEmbeddingWorker[SparseEmbedding]]: return Bm42TextEmbeddingWorker def token_count( - self, texts: Union[str, Iterable[str]], batch_size: int = 1024, **kwargs: Any + self, texts: str | Iterable[str], batch_size: int = 1024, **kwargs: Any ) -> int: if not hasattr(self, "model") or self.model is None: self.load_onnx_model() # loads the tokenizer as well diff --git a/fastembed/sparse/minicoil.py b/fastembed/sparse/minicoil.py index 04d74793..611c38d4 100644 --- a/fastembed/sparse/minicoil.py +++ b/fastembed/sparse/minicoil.py @@ -1,6 +1,6 @@ from pathlib import Path -from typing import Any, Optional, Sequence, Iterable, Union, Type +from typing import Any, Sequence, Iterable, Type import numpy as np from numpy.typing import NDArray @@ -72,17 +72,17 @@ class MiniCOIL(SparseTextEmbeddingBase, OnnxTextModel[SparseEmbedding]): def __init__( self, model_name: str, - cache_dir: Optional[str] = None, - threads: Optional[int] = None, - providers: Optional[Sequence[OnnxProvider]] = None, + cache_dir: str | None = None, + threads: int | None = None, + providers: Sequence[OnnxProvider] | None = None, k: float = 1.2, b: float = 0.75, avg_len: float = 150.0, cuda: bool = False, - device_ids: Optional[list[int]] = None, + device_ids: list[int] | None = None, lazy_load: bool = False, - device_id: Optional[int] = None, - specific_model_path: Optional[str] = None, + device_id: int | None = None, + specific_model_path: str | None = None, **kwargs: Any, ): """ @@ -124,15 +124,15 @@ def __init__( self.avg_len = avg_len # Initialize class attributes - self.tokenizer: Optional[Tokenizer] = None + self.tokenizer: Tokenizer | None = None self.invert_vocab: dict[int, str] = {} self.special_tokens: set[str] = set() self.special_tokens_ids: set[int] = set() self.stopwords: set[str] = set() - self.vocab_resolver: Optional[VocabResolver] = None - self.encoder: Optional[Encoder] = None - self.output_dim: Optional[int] = None - self.sparse_vector_converter: Optional[SparseVectorConverter] = None + self.vocab_resolver: VocabResolver | None = None + self.encoder: Encoder | None = None + self.output_dim: int | None = None + self.sparse_vector_converter: SparseVectorConverter | None = None self.model_description = self._get_model_description(model_name) self.cache_dir = str(define_cache_dir(cache_dir)) @@ -188,15 +188,15 @@ def load_onnx_model(self) -> None: ) def token_count( - self, texts: Union[str, Iterable[str]], batch_size: int = 1024, **kwargs: Any + self, texts: str | Iterable[str], batch_size: int = 1024, **kwargs: Any ) -> int: return self._token_count(texts, batch_size=batch_size, **kwargs) def embed( self, - documents: Union[str, Iterable[str]], + documents: str | Iterable[str], batch_size: int = 256, - parallel: Optional[int] = None, + parallel: int | None = None, **kwargs: Any, ) -> Iterable[SparseEmbedding]: """ @@ -233,9 +233,7 @@ def embed( **kwargs, ) - def query_embed( - self, query: Union[str, Iterable[str]], **kwargs: Any - ) -> Iterable[SparseEmbedding]: + def query_embed(self, query: str | Iterable[str], **kwargs: Any) -> Iterable[SparseEmbedding]: """ Encode a list of queries into list of embeddings. """ diff --git a/fastembed/sparse/sparse_embedding_base.py b/fastembed/sparse/sparse_embedding_base.py index 47026f65..ea9a4907 100644 --- a/fastembed/sparse/sparse_embedding_base.py +++ b/fastembed/sparse/sparse_embedding_base.py @@ -1,5 +1,5 @@ from dataclasses import dataclass -from typing import Iterable, Optional, Union, Any +from typing import Iterable, Any import numpy as np from numpy.typing import NDArray @@ -12,7 +12,7 @@ @dataclass class SparseEmbedding: values: NumpyArray - indices: Union[NDArray[np.int64], NDArray[np.int32]] + indices: NDArray[np.int64] | NDArray[np.int32] def as_object(self) -> dict[str, NumpyArray]: return { @@ -35,8 +35,8 @@ class SparseTextEmbeddingBase(ModelManagement[SparseModelDescription]): def __init__( self, model_name: str, - cache_dir: Optional[str] = None, - threads: Optional[int] = None, + cache_dir: str | None = None, + threads: int | None = None, **kwargs: Any, ): self.model_name = model_name @@ -46,9 +46,9 @@ def __init__( def embed( self, - documents: Union[str, Iterable[str]], + documents: str | Iterable[str], batch_size: int = 256, - parallel: Optional[int] = None, + parallel: int | None = None, **kwargs: Any, ) -> Iterable[SparseEmbedding]: raise NotImplementedError() @@ -68,9 +68,7 @@ def passage_embed(self, texts: Iterable[str], **kwargs: Any) -> Iterable[SparseE # This is model-specific, so that different models can have specialized implementations yield from self.embed(texts, **kwargs) - def query_embed( - self, query: Union[str, Iterable[str]], **kwargs: Any - ) -> Iterable[SparseEmbedding]: + def query_embed(self, query: str | Iterable[str], **kwargs: Any) -> Iterable[SparseEmbedding]: """ Embeds queries @@ -87,6 +85,6 @@ def query_embed( else: yield from self.embed(query, **kwargs) - def token_count(self, texts: Union[str, Iterable[str]], **kwargs: Any) -> int: + def token_count(self, texts: str | Iterable[str], **kwargs: Any) -> int: """Returns the number of tokens in the texts.""" raise NotImplementedError("Subclasses must implement this method") diff --git a/fastembed/sparse/sparse_text_embedding.py b/fastembed/sparse/sparse_text_embedding.py index 6f51f69e..cbd59057 100644 --- a/fastembed/sparse/sparse_text_embedding.py +++ b/fastembed/sparse/sparse_text_embedding.py @@ -1,4 +1,4 @@ -from typing import Any, Iterable, Optional, Sequence, Type, Union +from typing import Any, Iterable, Sequence, Type from dataclasses import asdict from fastembed.common import OnnxProvider @@ -53,11 +53,11 @@ def _list_supported_models(cls) -> list[SparseModelDescription]: def __init__( self, model_name: str, - cache_dir: Optional[str] = None, - threads: Optional[int] = None, - providers: Optional[Sequence[OnnxProvider]] = None, + cache_dir: str | None = None, + threads: int | None = None, + providers: Sequence[OnnxProvider] | None = None, cuda: bool = False, - device_ids: Optional[list[int]] = None, + device_ids: list[int] | None = None, lazy_load: bool = False, **kwargs: Any, ): @@ -93,9 +93,9 @@ def __init__( def embed( self, - documents: Union[str, Iterable[str]], + documents: str | Iterable[str], batch_size: int = 256, - parallel: Optional[int] = None, + parallel: int | None = None, **kwargs: Any, ) -> Iterable[SparseEmbedding]: """ @@ -115,9 +115,7 @@ def embed( """ yield from self.model.embed(documents, batch_size, parallel, **kwargs) - def query_embed( - self, query: Union[str, Iterable[str]], **kwargs: Any - ) -> Iterable[SparseEmbedding]: + def query_embed(self, query: str | Iterable[str], **kwargs: Any) -> Iterable[SparseEmbedding]: """ Embeds queries @@ -130,7 +128,7 @@ def query_embed( yield from self.model.query_embed(query, **kwargs) def token_count( - self, texts: Union[str, Iterable[str]], batch_size: int = 1024, **kwargs: Any + self, texts: str | Iterable[str], batch_size: int = 1024, **kwargs: Any ) -> int: """Returns the number of tokens in the texts. diff --git a/fastembed/sparse/splade_pp.py b/fastembed/sparse/splade_pp.py index 8480cb10..9c739b2c 100644 --- a/fastembed/sparse/splade_pp.py +++ b/fastembed/sparse/splade_pp.py @@ -1,4 +1,4 @@ -from typing import Any, Iterable, Optional, Sequence, Type, Union +from typing import Any, Iterable, Sequence, Type import numpy as np from fastembed.common import OnnxProvider @@ -54,7 +54,7 @@ def _post_process_onnx_output( yield SparseEmbedding(values=scores, indices=indices) def token_count( - self, texts: Union[str, Iterable[str]], batch_size: int = 1024, **kwargs: Any + self, texts: str | Iterable[str], batch_size: int = 1024, **kwargs: Any ) -> int: return self._token_count(texts, batch_size=batch_size, **kwargs) @@ -70,14 +70,14 @@ def _list_supported_models(cls) -> list[SparseModelDescription]: def __init__( self, model_name: str, - cache_dir: Optional[str] = None, - threads: Optional[int] = None, - providers: Optional[Sequence[OnnxProvider]] = None, + cache_dir: str | None = None, + threads: int | None = None, + providers: Sequence[OnnxProvider] | None = None, cuda: bool = False, - device_ids: Optional[list[int]] = None, + device_ids: list[int] | None = None, lazy_load: bool = False, - device_id: Optional[int] = None, - specific_model_path: Optional[str] = None, + device_id: int | None = None, + specific_model_path: str | None = None, **kwargs: Any, ): """ @@ -111,7 +111,7 @@ def __init__( self.cuda = cuda # This device_id will be used if we need to load model in current process - self.device_id: Optional[int] = None + self.device_id: int | None = None if device_id is not None: self.device_id = device_id elif self.device_ids is not None: @@ -144,9 +144,9 @@ def load_onnx_model(self) -> None: def embed( self, - documents: Union[str, Iterable[str]], + documents: str | Iterable[str], batch_size: int = 256, - parallel: Optional[int] = None, + parallel: int | None = None, **kwargs: Any, ) -> Iterable[SparseEmbedding]: """ diff --git a/fastembed/sparse/utils/sparse_vectors_converter.py b/fastembed/sparse/utils/sparse_vectors_converter.py index 36a2e298..f856f0de 100644 --- a/fastembed/sparse/utils/sparse_vectors_converter.py +++ b/fastembed/sparse/utils/sparse_vectors_converter.py @@ -1,12 +1,11 @@ -from typing import Dict, List, Set -from py_rust_stemmers import SnowballStemmer -from fastembed.common.utils import get_all_punctuation, remove_non_alphanumeric -import mmh3 import copy from dataclasses import dataclass +import mmh3 import numpy as np +from py_rust_stemmers import SnowballStemmer +from fastembed.common.utils import get_all_punctuation, remove_non_alphanumeric from fastembed.sparse.sparse_embedding_base import SparseEmbedding GAP = 32000 @@ -16,16 +15,16 @@ @dataclass class WordEmbedding: word: str - forms: List[str] + forms: list[str] count: int word_id: int - embedding: List[float] + embedding: list[float] class SparseVectorConverter: def __init__( self, - stopwords: Set[str], + stopwords: set[str], stemmer: SnowballStemmer, k: float = 1.2, b: float = 0.75, @@ -58,15 +57,15 @@ def bm25_tf(self, num_occurrences: int, sentence_len: int) -> float: return res @classmethod - def normalize_vector(cls, vector: List[float]) -> List[float]: + def normalize_vector(cls, vector: list[float]) -> list[float]: norm = sum([x**2 for x in vector]) ** 0.5 if norm < 1e-8: return vector return [x / norm for x in vector] def clean_words( - self, sentence_embedding: Dict[str, WordEmbedding], token_max_length: int = 40 - ) -> Dict[str, WordEmbedding]: + self, sentence_embedding: dict[str, WordEmbedding], token_max_length: int = 40 + ) -> dict[str, WordEmbedding]: """ Clean miniCOIL-produced sentence_embedding, as unknown to the miniCOIL's stemmer tokens should fully resemble our BM25 token representation. @@ -85,7 +84,7 @@ def clean_words( } """ - new_sentence_embedding: Dict[str, WordEmbedding] = {} + new_sentence_embedding: dict[str, WordEmbedding] = {} for word, embedding in sentence_embedding.items(): # embedding = { @@ -127,7 +126,7 @@ def clean_words( def embedding_to_vector( self, - sentence_embedding: Dict[str, WordEmbedding], + sentence_embedding: dict[str, WordEmbedding], embedding_size: int, vocab_size: int, ) -> SparseEmbedding: @@ -156,14 +155,14 @@ def embedding_to_vector( """ - indices: List[int] = [] - values: List[float] = [] - + indices: list[int] = [] + values: list[float] = [] + # Example: # vocab_size = 10000 # embedding_size = 4 # GAP = 32000 - # + # # We want to start random words section from the bucket, that is guaranteed to not # include any vocab words. # We need (vocab_size * embedding_size) slots for vocab words. @@ -171,9 +170,7 @@ def embedding_to_vector( # Therefore, we can start random words from bucket (vocab_size * embedding_size) // GAP + 1 + 1 # ID at which the scope of OOV words starts - unknown_words_shift = ( - (vocab_size * embedding_size) // GAP + 2 - ) * GAP + unknown_words_shift = ((vocab_size * embedding_size) // GAP + 2) * GAP sentence_embedding_cleaned = self.clean_words(sentence_embedding) # Calculate sentence length after cleaning @@ -208,7 +205,7 @@ def embedding_to_vector( def embedding_to_vector_query( self, - sentence_embedding: Dict[str, WordEmbedding], + sentence_embedding: dict[str, WordEmbedding], embedding_size: int, vocab_size: int, ) -> SparseEmbedding: @@ -216,8 +213,8 @@ def embedding_to_vector_query( Same as `embedding_to_vector`, but no TF """ - indices: List[int] = [] - values: List[float] = [] + indices: list[int] = [] + values: list[float] = [] # ID at which the scope of OOV words starts unknown_words_shift = ((vocab_size * embedding_size) // GAP + 2) * GAP diff --git a/fastembed/text/custom_text_embedding.py b/fastembed/text/custom_text_embedding.py index bc8a8def..55692a22 100644 --- a/fastembed/text/custom_text_embedding.py +++ b/fastembed/text/custom_text_embedding.py @@ -1,4 +1,4 @@ -from typing import Optional, Sequence, Any, Iterable +from typing import Sequence, Any, Iterable from dataclasses import dataclass @@ -29,14 +29,14 @@ class CustomTextEmbedding(OnnxTextEmbedding): def __init__( self, model_name: str, - cache_dir: Optional[str] = None, - threads: Optional[int] = None, - providers: Optional[Sequence[OnnxProvider]] = None, + cache_dir: str | None = None, + threads: int | None = None, + providers: Sequence[OnnxProvider] | None = None, cuda: bool = False, - device_ids: Optional[list[int]] = None, + device_ids: list[int] | None = None, lazy_load: bool = False, - device_id: Optional[int] = None, - specific_model_path: Optional[str] = None, + device_id: int | None = None, + specific_model_path: str | None = None, **kwargs: Any, ): super().__init__( @@ -64,7 +64,7 @@ def _post_process_onnx_output( return self._normalize(self._pool(output.model_output, output.attention_mask)) def _pool( - self, embeddings: NumpyArray, attention_mask: Optional[NDArray[np.int64]] = None + self, embeddings: NumpyArray, attention_mask: NDArray[np.int64] | None = None ) -> NumpyArray: if self._pooling == PoolingType.CLS: return embeddings[:, 0] diff --git a/fastembed/text/multitask_embedding.py b/fastembed/text/multitask_embedding.py index d8b28737..73ce2b1b 100644 --- a/fastembed/text/multitask_embedding.py +++ b/fastembed/text/multitask_embedding.py @@ -1,5 +1,5 @@ from enum import Enum -from typing import Any, Type, Iterable, Union, Optional +from typing import Any, Type, Iterable import numpy as np @@ -45,11 +45,9 @@ class JinaEmbeddingV3(PooledNormalizedEmbedding): PASSAGE_TASK = Task.RETRIEVAL_PASSAGE QUERY_TASK = Task.RETRIEVAL_QUERY - def __init__(self, *args: Any, task_id: Optional[int] = None, **kwargs: Any): + def __init__(self, *args: Any, task_id: int | None = None, **kwargs: Any): super().__init__(*args, **kwargs) - self.default_task_id: Union[Task, int] = ( - task_id if task_id is not None else self.PASSAGE_TASK - ) + self.default_task_id: Task | int = task_id if task_id is not None else self.PASSAGE_TASK @classmethod def _get_worker_class(cls) -> Type[OnnxTextEmbeddingWorker]: @@ -62,7 +60,7 @@ def _list_supported_models(cls) -> list[DenseModelDescription]: def _preprocess_onnx_input( self, onnx_input: dict[str, NumpyArray], - task_id: Optional[Union[int, Task]] = None, + task_id: int | Task | None = None, **kwargs: Any, ) -> dict[str, NumpyArray]: if task_id is None: @@ -72,10 +70,10 @@ def _preprocess_onnx_input( def embed( self, - documents: Union[str, Iterable[str]], + documents: str | Iterable[str], batch_size: int = 256, - parallel: Optional[int] = None, - task_id: Optional[int] = None, + parallel: int | None = None, + task_id: int | None = None, **kwargs: Any, ) -> Iterable[NumpyArray]: task_id = ( @@ -83,7 +81,7 @@ def embed( ) # required for multiprocessing yield from super().embed(documents, batch_size, parallel, task_id=task_id, **kwargs) - def query_embed(self, query: Union[str, Iterable[str]], **kwargs: Any) -> Iterable[NumpyArray]: + def query_embed(self, query: str | Iterable[str], **kwargs: Any) -> Iterable[NumpyArray]: yield from super().embed(query, task_id=self.QUERY_TASK, **kwargs) def passage_embed(self, texts: Iterable[str], **kwargs: Any) -> Iterable[NumpyArray]: diff --git a/fastembed/text/onnx_embedding.py b/fastembed/text/onnx_embedding.py index 2e3fc7d2..773ad3ed 100644 --- a/fastembed/text/onnx_embedding.py +++ b/fastembed/text/onnx_embedding.py @@ -1,4 +1,4 @@ -from typing import Any, Iterable, Optional, Sequence, Type, Union +from typing import Any, Iterable, Sequence, Type from fastembed.common.types import NumpyArray, OnnxProvider from fastembed.common.onnx_model import OnnxOutputContext @@ -199,14 +199,14 @@ def _list_supported_models(cls) -> list[DenseModelDescription]: def __init__( self, model_name: str = "BAAI/bge-small-en-v1.5", - cache_dir: Optional[str] = None, - threads: Optional[int] = None, - providers: Optional[Sequence[OnnxProvider]] = None, + cache_dir: str | None = None, + threads: int | None = None, + providers: Sequence[OnnxProvider] | None = None, cuda: bool = False, - device_ids: Optional[list[int]] = None, + device_ids: list[int] | None = None, lazy_load: bool = False, - device_id: Optional[int] = None, - specific_model_path: Optional[str] = None, + device_id: int | None = None, + specific_model_path: str | None = None, **kwargs: Any, ): """ @@ -239,7 +239,7 @@ def __init__( self.cuda = cuda # This device_id will be used if we need to load model in current process - self.device_id: Optional[int] = None + self.device_id: int | None = None if device_id is not None: self.device_id = device_id elif self.device_ids is not None: @@ -260,9 +260,9 @@ def __init__( def embed( self, - documents: Union[str, Iterable[str]], + documents: str | Iterable[str], batch_size: int = 256, - parallel: Optional[int] = None, + parallel: int | None = None, **kwargs: Any, ) -> Iterable[NumpyArray]: """ @@ -332,7 +332,7 @@ def load_onnx_model(self) -> None: ) def token_count( - self, texts: Union[str, Iterable[str]], batch_size: int = 1024, **kwargs: Any + self, texts: str | Iterable[str], batch_size: int = 1024, **kwargs: Any ) -> int: return self._token_count(texts, batch_size=batch_size, **kwargs) diff --git a/fastembed/text/onnx_text_model.py b/fastembed/text/onnx_text_model.py index 16dd6946..b4ecfac3 100644 --- a/fastembed/text/onnx_text_model.py +++ b/fastembed/text/onnx_text_model.py @@ -1,7 +1,7 @@ import os from multiprocessing import get_all_start_methods from pathlib import Path -from typing import Any, Iterable, Optional, Sequence, Type, Union +from typing import Any, Iterable, Sequence, Type import numpy as np from numpy.typing import NDArray @@ -15,7 +15,7 @@ class OnnxTextModel(OnnxModel[T]): - ONNX_OUTPUT_NAMES: Optional[list[str]] = None + ONNX_OUTPUT_NAMES: list[str] | None = None @classmethod def _get_worker_class(cls) -> Type["TextEmbeddingWorker[T]"]: @@ -35,12 +35,12 @@ def _post_process_onnx_output(self, output: OnnxOutputContext, **kwargs: Any) -> def __init__(self) -> None: super().__init__() - self.tokenizer: Optional[Tokenizer] = None + self.tokenizer: Tokenizer | None = None self.special_token_to_id: dict[str, int] = {} def _preprocess_onnx_input( self, onnx_input: dict[str, NumpyArray], **kwargs: Any - ) -> dict[str, Union[NumpyArray, NDArray[np.int64]]]: + ) -> dict[str, NumpyArray | NDArray[np.int64]]: """ Preprocess the onnx input. """ @@ -50,11 +50,11 @@ def _load_onnx_model( self, model_dir: Path, model_file: str, - threads: Optional[int], - providers: Optional[Sequence[OnnxProvider]] = None, + threads: int | None, + providers: Sequence[OnnxProvider] | None = None, cuda: bool = False, - device_id: Optional[int] = None, - extra_session_options: Optional[dict[str, Any]] = None, + device_id: int | None = None, + extra_session_options: dict[str, Any] | None = None, ) -> None: super()._load_onnx_model( model_dir=model_dir, @@ -104,15 +104,15 @@ def _embed_documents( self, model_name: str, cache_dir: str, - documents: Union[str, Iterable[str]], + documents: str | Iterable[str], batch_size: int = 256, - parallel: Optional[int] = None, - providers: Optional[Sequence[OnnxProvider]] = None, + parallel: int | None = None, + providers: Sequence[OnnxProvider] | None = None, cuda: bool = False, - device_ids: Optional[list[int]] = None, + device_ids: list[int] | None = None, local_files_only: bool = False, - specific_model_path: Optional[str] = None, - extra_session_options: Optional[dict[str, Any]] = None, + specific_model_path: str | None = None, + extra_session_options: dict[str, Any] | None = None, **kwargs: Any, ) -> Iterable[T]: is_small = False @@ -159,9 +159,7 @@ def _embed_documents( for batch in pool.ordered_map(iter_batch(documents, batch_size), **params): yield from self._post_process_onnx_output(batch, **kwargs) # type: ignore - def _token_count( - self, texts: Union[str, Iterable[str]], batch_size: int = 1024, **_: Any - ) -> int: + def _token_count(self, texts: str | Iterable[str], batch_size: int = 1024, **_: Any) -> int: if not hasattr(self, "model") or self.model is None: self.load_onnx_model() # loads the tokenizer as well diff --git a/fastembed/text/text_embedding.py b/fastembed/text/text_embedding.py index 0c58a7f3..54ece67d 100644 --- a/fastembed/text/text_embedding.py +++ b/fastembed/text/text_embedding.py @@ -1,5 +1,5 @@ import warnings -from typing import Any, Iterable, Optional, Sequence, Type, Union +from typing import Any, Iterable, Sequence, Type from dataclasses import asdict from fastembed.common.types import NumpyArray, OnnxProvider @@ -51,7 +51,7 @@ def add_custom_model( description: str = "", license: str = "", size_in_gb: float = 0.0, - additional_files: Optional[list[str]] = None, + additional_files: list[str] | None = None, ) -> None: registered_models = cls._list_supported_models() for registered_model in registered_models: @@ -79,11 +79,11 @@ def add_custom_model( def __init__( self, model_name: str = "BAAI/bge-small-en-v1.5", - cache_dir: Optional[str] = None, - threads: Optional[int] = None, - providers: Optional[Sequence[OnnxProvider]] = None, + cache_dir: str | None = None, + threads: int | None = None, + providers: Sequence[OnnxProvider] | None = None, cuda: bool = False, - device_ids: Optional[list[int]] = None, + device_ids: list[int] | None = None, lazy_load: bool = False, **kwargs: Any, ): @@ -149,7 +149,7 @@ def get_embedding_size(cls, model_name: str) -> int: ValueError: If the model name is not found in the supported models. """ descriptions = cls._list_supported_models() - embedding_size: Optional[int] = None + embedding_size: int | None = None for description in descriptions: if description.model.lower() == model_name.lower(): embedding_size = description.dim @@ -164,9 +164,9 @@ def get_embedding_size(cls, model_name: str) -> int: def embed( self, - documents: Union[str, Iterable[str]], + documents: str | Iterable[str], batch_size: int = 256, - parallel: Optional[int] = None, + parallel: int | None = None, **kwargs: Any, ) -> Iterable[NumpyArray]: """ @@ -186,7 +186,7 @@ def embed( """ yield from self.model.embed(documents, batch_size, parallel, **kwargs) - def query_embed(self, query: Union[str, Iterable[str]], **kwargs: Any) -> Iterable[NumpyArray]: + def query_embed(self, query: str | Iterable[str], **kwargs: Any) -> Iterable[NumpyArray]: """ Embeds queries @@ -214,7 +214,7 @@ def passage_embed(self, texts: Iterable[str], **kwargs: Any) -> Iterable[NumpyAr yield from self.model.passage_embed(texts, **kwargs) def token_count( - self, texts: Union[str, Iterable[str]], batch_size: int = 1024, **kwargs: Any + self, texts: str | Iterable[str], batch_size: int = 1024, **kwargs: Any ) -> int: """Returns the number of tokens in the texts. diff --git a/fastembed/text/text_embedding_base.py b/fastembed/text/text_embedding_base.py index a11ecae0..13bc4f73 100644 --- a/fastembed/text/text_embedding_base.py +++ b/fastembed/text/text_embedding_base.py @@ -1,4 +1,4 @@ -from typing import Iterable, Optional, Union, Any +from typing import Iterable, Any from fastembed.common.model_description import DenseModelDescription from fastembed.common.types import NumpyArray @@ -9,21 +9,21 @@ class TextEmbeddingBase(ModelManagement[DenseModelDescription]): def __init__( self, model_name: str, - cache_dir: Optional[str] = None, - threads: Optional[int] = None, + cache_dir: str | None = None, + threads: int | None = None, **kwargs: Any, ): self.model_name = model_name self.cache_dir = cache_dir self.threads = threads self._local_files_only = kwargs.pop("local_files_only", False) - self._embedding_size: Optional[int] = None + self._embedding_size: int | None = None def embed( self, - documents: Union[str, Iterable[str]], + documents: str | Iterable[str], batch_size: int = 256, - parallel: Optional[int] = None, + parallel: int | None = None, **kwargs: Any, ) -> Iterable[NumpyArray]: raise NotImplementedError() @@ -43,7 +43,7 @@ def passage_embed(self, texts: Iterable[str], **kwargs: Any) -> Iterable[NumpyAr # This is model-specific, so that different models can have specialized implementations yield from self.embed(texts, **kwargs) - def query_embed(self, query: Union[str, Iterable[str]], **kwargs: Any) -> Iterable[NumpyArray]: + def query_embed(self, query: str | Iterable[str], **kwargs: Any) -> Iterable[NumpyArray]: """ Embeds queries @@ -70,6 +70,6 @@ def embedding_size(self) -> int: """Returns embedding size for the current model""" raise NotImplementedError("Subclasses must implement this method") - def token_count(self, texts: Union[str, Iterable[str]], **kwargs: Any) -> int: + def token_count(self, texts: str | Iterable[str], **kwargs: Any) -> int: """Returns the number of tokens in the texts.""" raise NotImplementedError("Subclasses must implement this method") diff --git a/poetry.lock b/poetry.lock index b5417ea8..d6f688a6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -211,26 +211,6 @@ charset-normalizer = ["charset-normalizer"] html5lib = ["html5lib"] lxml = ["lxml"] -[[package]] -name = "bleach" -version = "6.2.0" -description = "An easy safelist-based HTML-sanitizing tool." -optional = false -python-versions = ">=3.9" -groups = ["dev", "docs"] -markers = "python_version < \"3.10\"" -files = [ - {file = "bleach-6.2.0-py3-none-any.whl", hash = "sha256:117d9c6097a7c3d22fd578fcd8d35ff1e125df6736f554da4e432fdd63f31e5e"}, - {file = "bleach-6.2.0.tar.gz", hash = "sha256:123e894118b8a599fd80d3ec1a6d4cc7ce4e5882b1317a7e1ba69b56e95f991f"}, -] - -[package.dependencies] -tinycss2 = {version = ">=1.1.0,<1.5", optional = true, markers = "extra == \"css\""} -webencodings = "*" - -[package.extras] -css = ["tinycss2 (>=1.1.0,<1.5)"] - [[package]] name = "bleach" version = "6.3.0" @@ -238,7 +218,6 @@ description = "An easy safelist-based HTML-sanitizing tool." optional = false python-versions = ">=3.10" groups = ["dev", "docs"] -markers = "python_version >= \"3.10\"" files = [ {file = "bleach-6.3.0-py3-none-any.whl", hash = "sha256:fe10ec77c93ddf3d13a73b035abaac7a9f5e436513864ccdad516693213c65d6"}, {file = "bleach-6.3.0.tar.gz", hash = "sha256:6f3b91b1c0a02bb9a78b5a454c92506aa0fdf197e1d5e114d2e00c6f64306d22"}, @@ -403,19 +382,6 @@ files = [ [package.dependencies] pycparser = {version = "*", markers = "implementation_name != \"PyPy\""} -[[package]] -name = "cfgv" -version = "3.4.0" -description = "Validate configuration and produce human readable error messages." -optional = false -python-versions = ">=3.8" -groups = ["dev"] -markers = "python_version < \"3.10\"" -files = [ - {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, - {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, -] - [[package]] name = "cfgv" version = "3.5.0" @@ -423,7 +389,6 @@ description = "Validate configuration and produce human readable error messages. optional = false python-versions = ">=3.10" groups = ["dev"] -markers = "python_version >= \"3.10\"" files = [ {file = "cfgv-3.5.0-py2.py3-none-any.whl", hash = "sha256:a8dc6b26ad22ff227d2634a65cb388215ce6cc96bbcc5cfde7641ae87e8dacc0"}, {file = "cfgv-3.5.0.tar.gz", hash = "sha256:d5b1034354820651caa73ede66a6294d6e95c1b00acc5e9b098e917404669132"}, @@ -552,22 +517,6 @@ files = [ {file = "charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a"}, ] -[[package]] -name = "click" -version = "8.1.8" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -groups = ["main", "docs"] -markers = "python_version < \"3.10\"" -files = [ - {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, - {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - [[package]] name = "click" version = "8.3.1" @@ -575,7 +524,6 @@ description = "Composable command line interface toolkit" optional = false python-versions = ">=3.10" groups = ["main", "docs"] -markers = "python_version >= \"3.10\"" files = [ {file = "click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6"}, {file = "click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a"}, @@ -733,7 +681,7 @@ description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" groups = ["main", "dev", "test"] -markers = "python_version <= \"3.10\"" +markers = "python_version == \"3.10\"" files = [ {file = "exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598"}, {file = "exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219"}, @@ -775,19 +723,6 @@ files = [ [package.extras] devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] -[[package]] -name = "filelock" -version = "3.19.1" -description = "A platform independent file lock." -optional = false -python-versions = ">=3.9" -groups = ["main", "dev"] -markers = "python_version < \"3.10\"" -files = [ - {file = "filelock-3.19.1-py3-none-any.whl", hash = "sha256:d38e30481def20772f5baf097c122c3babc4fcdb7e14e57049eb9d88c6dc017d"}, - {file = "filelock-3.19.1.tar.gz", hash = "sha256:66eda1888b0171c998b35be2bcc0f6d75c388a7ce20c3f3f37aa8e96c2dddf58"}, -] - [[package]] name = "filelock" version = "3.20.0" @@ -795,7 +730,6 @@ description = "A platform independent file lock." optional = false python-versions = ">=3.10" groups = ["main", "dev"] -markers = "python_version >= \"3.10\"" files = [ {file = "filelock-3.20.0-py3-none-any.whl", hash = "sha256:339b4732ffda5cd79b13f4e2711a31b0365ce445d95d243bb996273d072546a2"}, {file = "filelock-3.20.0.tar.gz", hash = "sha256:711e943b4ec6be42e1d4e6690b48dc175c822967466bb31c0c293f34334c13f4"}, @@ -841,14 +775,14 @@ files = [ [[package]] name = "fsspec" -version = "2025.10.0" +version = "2025.12.0" description = "File-system specification" optional = false -python-versions = ">=3.9" +python-versions = ">=3.10" groups = ["main"] files = [ - {file = "fsspec-2025.10.0-py3-none-any.whl", hash = "sha256:7c7712353ae7d875407f97715f0e1ffcc21e33d5b24556cb1e090ae9409ec61d"}, - {file = "fsspec-2025.10.0.tar.gz", hash = "sha256:b6789427626f068f9a83ca4e8a3cc050850b6c0f71f99ddb4f542b8266a26a59"}, + {file = "fsspec-2025.12.0-py3-none-any.whl", hash = "sha256:8bf1fe301b7d8acfa6e8571e3b1c3d158f909666642431cc78a1b7b4dbc5ec5b"}, + {file = "fsspec-2025.12.0.tar.gz", hash = "sha256:c505de011584597b1060ff778bb664c1bc022e87921b0e4f10cc9c44f9635973"}, ] [package.extras] @@ -926,7 +860,6 @@ files = [ [package.dependencies] gitdb = ">=4.0.1,<5" -typing-extensions = {version = ">=3.10.0.2", markers = "python_version < \"3.10\""} [package.extras] doc = ["sphinx (>=7.1.2,<7.2)", "sphinx-autodoc-typehints", "sphinx_rtd_theme"] @@ -1029,14 +962,14 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "huggingface-hub" -version = "1.1.7" +version = "1.2.1" description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" optional = false python-versions = ">=3.9.0" groups = ["main"] files = [ - {file = "huggingface_hub-1.1.7-py3-none-any.whl", hash = "sha256:f3efa4779f4890e44c957bbbb0f197e6028887ad09f0cf95a21659fa7753605d"}, - {file = "huggingface_hub-1.1.7.tar.gz", hash = "sha256:3c84b6283caca928595f08fd42e9a572f17ec3501dec508c3f2939d94bfbd9d2"}, + {file = "huggingface_hub-1.2.1-py3-none-any.whl", hash = "sha256:8c74a41a16156337dfa1090873ca11f8c1d7b6efcbac9f6673d008a740207e6a"}, + {file = "huggingface_hub-1.2.1.tar.gz", hash = "sha256:1aced061fa1bd443c0ec80a4af432b8b70041d54860f7af334ceff599611a415"}, ] [package.dependencies] @@ -1108,44 +1041,6 @@ files = [ [package.extras] all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] -[[package]] -name = "importlib-metadata" -version = "8.7.0" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.9" -groups = ["dev", "docs"] -markers = "python_version < \"3.10\"" -files = [ - {file = "importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd"}, - {file = "importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000"}, -] - -[package.dependencies] -zipp = ">=3.20" - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -perf = ["ipython"] -test = ["flufl.flake8", "importlib_resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] -type = ["pytest-mypy"] - -[[package]] -name = "iniconfig" -version = "2.1.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.8" -groups = ["test"] -markers = "python_version < \"3.10\"" -files = [ - {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, - {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, -] - [[package]] name = "iniconfig" version = "2.3.0" @@ -1153,47 +1048,11 @@ description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.10" groups = ["test"] -markers = "python_version >= \"3.10\"" files = [ {file = "iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12"}, {file = "iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730"}, ] -[[package]] -name = "ipykernel" -version = "6.31.0" -description = "IPython Kernel for Jupyter" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -markers = "python_version < \"3.10\"" -files = [ - {file = "ipykernel-6.31.0-py3-none-any.whl", hash = "sha256:abe5386f6ced727a70e0eb0cf1da801fa7c5fa6ff82147747d5a0406cd8c94af"}, - {file = "ipykernel-6.31.0.tar.gz", hash = "sha256:2372ce8bc1ff4f34e58cafed3a0feb2194b91fc7cad0fc72e79e47b45ee9e8f6"}, -] - -[package.dependencies] -appnope = {version = ">=0.1.2", markers = "platform_system == \"Darwin\""} -comm = ">=0.1.1" -debugpy = ">=1.6.5" -ipython = ">=7.23.1" -jupyter-client = ">=8.0.0" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -matplotlib-inline = ">=0.1" -nest-asyncio = ">=1.4" -packaging = ">=22" -psutil = ">=5.7" -pyzmq = ">=25" -tornado = ">=6.2" -traitlets = ">=5.4.0" - -[package.extras] -cov = ["coverage[toml]", "matplotlib", "pytest-cov", "trio"] -docs = ["intersphinx-registry", "myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] -pyqt5 = ["pyqt5"] -pyside6 = ["pyside6"] -test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0,<9)", "pytest-asyncio (>=0.23.5)", "pytest-cov", "pytest-timeout"] - [[package]] name = "ipykernel" version = "7.1.0" @@ -1201,7 +1060,6 @@ description = "IPython Kernel for Jupyter" optional = false python-versions = ">=3.10" groups = ["dev"] -markers = "python_version >= \"3.10\"" files = [ {file = "ipykernel-7.1.0-py3-none-any.whl", hash = "sha256:763b5ec6c5b7776f6a8d7ce09b267693b4e5ce75cb50ae696aaefb3c85e1ea4c"}, {file = "ipykernel-7.1.0.tar.gz", hash = "sha256:58a3fc88533d5930c3546dc7eac66c6d288acde4f801e2001e65edc5dc9cf0db"}, @@ -1229,45 +1087,6 @@ pyqt5 = ["pyqt5"] pyside6 = ["pyside6"] test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0,<9)", "pytest-asyncio (>=0.23.5)", "pytest-cov", "pytest-timeout"] -[[package]] -name = "ipython" -version = "8.18.1" -description = "IPython: Productive Interactive Computing" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -markers = "python_version < \"3.10\"" -files = [ - {file = "ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397"}, - {file = "ipython-8.18.1.tar.gz", hash = "sha256:ca6f079bb33457c66e233e4580ebfc4128855b4cf6370dddd73842a9563e8a27"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -decorator = "*" -exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} -jedi = ">=0.16" -matplotlib-inline = "*" -pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} -prompt-toolkit = ">=3.0.41,<3.1.0" -pygments = ">=2.4.0" -stack-data = "*" -traitlets = ">=5" -typing-extensions = {version = "*", markers = "python_version < \"3.10\""} - -[package.extras] -all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio (<0.22)", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] -black = ["black"] -doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pickleshare", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio (<0.22)", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] -kernel = ["ipykernel"] -nbconvert = ["nbconvert"] -nbformat = ["nbformat"] -notebook = ["ipywidgets", "notebook"] -parallel = ["ipyparallel"] -qtconsole = ["qtconsole"] -test = ["pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath"] -test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath", "trio"] - [[package]] name = "ipython" version = "8.37.0" @@ -1310,15 +1129,15 @@ test-extra = ["curio", "ipython[test]", "jupyter_ai", "matplotlib (!=3.2.0)", "n [[package]] name = "ipython" -version = "9.7.0" +version = "9.8.0" description = "IPython: Productive Interactive Computing" optional = false python-versions = ">=3.11" groups = ["dev"] markers = "python_version >= \"3.11\"" files = [ - {file = "ipython-9.7.0-py3-none-any.whl", hash = "sha256:bce8ac85eb9521adc94e1845b4c03d88365fd6ac2f4908ec4ed1eb1b0a065f9f"}, - {file = "ipython-9.7.0.tar.gz", hash = "sha256:5f6de88c905a566c6a9d6c400a8fed54a638e1f7543d17aae2551133216b1e4e"}, + {file = "ipython-9.8.0-py3-none-any.whl", hash = "sha256:ebe6d1d58d7d988fbf23ff8ff6d8e1622cfdb194daf4b7b73b792c4ec3b85385"}, + {file = "ipython-9.8.0.tar.gz", hash = "sha256:8e4ce129a627eb9dd221c41b1d2cdaed4ef7c9da8c17c63f6f578fe231141f83"}, ] [package.dependencies] @@ -1486,49 +1305,26 @@ referencing = ">=0.31.0" [[package]] name = "jupyter-client" -version = "8.6.3" +version = "8.7.0" description = "Jupyter protocol implementation and client libraries" optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" groups = ["dev", "docs"] files = [ - {file = "jupyter_client-8.6.3-py3-none-any.whl", hash = "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f"}, - {file = "jupyter_client-8.6.3.tar.gz", hash = "sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419"}, + {file = "jupyter_client-8.7.0-py3-none-any.whl", hash = "sha256:3671a94fd25e62f5f2f554f5e95389c2294d89822378a5f2dd24353e1494a9e0"}, + {file = "jupyter_client-8.7.0.tar.gz", hash = "sha256:3357212d9cbe01209e59190f67a3a7e1f387a4f4e88d1e0433ad84d7b262531d"}, ] [package.dependencies] -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-core = ">=5.1" python-dateutil = ">=2.8.2" -pyzmq = ">=23.0" -tornado = ">=6.2" +pyzmq = ">=25.0" +tornado = ">=6.4.1" traitlets = ">=5.3" [package.extras] docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko ; sys_platform == \"win32\"", "pre-commit", "pytest (<8.2.0)", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] - -[[package]] -name = "jupyter-core" -version = "5.8.1" -description = "Jupyter core package. A base package on which Jupyter projects rely." -optional = false -python-versions = ">=3.8" -groups = ["dev", "docs"] -markers = "python_version < \"3.10\"" -files = [ - {file = "jupyter_core-5.8.1-py3-none-any.whl", hash = "sha256:c28d268fc90fb53f1338ded2eb410704c5449a358406e8a948b75706e24863d0"}, - {file = "jupyter_core-5.8.1.tar.gz", hash = "sha256:0a5f9706f70e64786b75acba995988915ebd4601c8a52e534a40b51c95f59941"}, -] - -[package.dependencies] -platformdirs = ">=2.5" -pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} -traitlets = ">=5.3" - -[package.extras] -docs = ["intersphinx-registry", "myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-spelling", "traitlets"] -test = ["ipykernel", "pre-commit", "pytest (<9)", "pytest-cov", "pytest-timeout"] +test = ["anyio", "coverage", "ipykernel (>=6.14)", "mypy", "paramiko ; sys_platform == \"win32\"", "pre-commit", "pytest", "pytest-cov", "pytest-jupyter[client] (>=0.6.2)", "pytest-timeout"] [[package]] name = "jupyter-core" @@ -1537,7 +1333,6 @@ description = "Jupyter core package. A base package on which Jupyter projects re optional = false python-versions = ">=3.10" groups = ["dev", "docs"] -markers = "python_version >= \"3.10\"" files = [ {file = "jupyter_core-5.9.1-py3-none-any.whl", hash = "sha256:ebf87fdc6073d142e114c72c9e29a9d7ca03fad818c5d300ce2adc1fb0743407"}, {file = "jupyter_core-5.9.1.tar.gz", hash = "sha256:4d09aaff303b9566c3ce657f580bd089ff5c91f5f89cf7d8846c3cdf465b5508"}, @@ -1591,7 +1386,6 @@ files = [ ] [package.dependencies] -importlib_metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} jupyter_server = ">=1.1.2" [[package]] @@ -1666,7 +1460,6 @@ files = [ [package.dependencies] async-lru = ">=1.0.0" httpx = ">=0.25.0,<1" -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} ipykernel = ">=6.5.0,<6.30.0 || >6.30.0" jinja2 = ">=3.0.3" jupyter-core = "*" @@ -1713,7 +1506,6 @@ files = [ [package.dependencies] babel = ">=2.10" -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} jinja2 = ">=3.0.3" json5 = ">=0.9.0" jsonschema = ">=4.18.0" @@ -1746,88 +1538,88 @@ regex = ["regex"] [[package]] name = "librt" -version = "0.6.3" +version = "0.7.3" description = "Mypyc runtime library" optional = false python-versions = ">=3.9" groups = ["types"] files = [ - {file = "librt-0.6.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:45660d26569cc22ed30adf583389d8a0d1b468f8b5e518fcf9bfe2cd298f9dd1"}, - {file = "librt-0.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:54f3b2177fb892d47f8016f1087d21654b44f7fc4cf6571c1c6b3ea531ab0fcf"}, - {file = "librt-0.6.3-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c5b31bed2c2f2fa1fcb4815b75f931121ae210dc89a3d607fb1725f5907f1437"}, - {file = "librt-0.6.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8f8ed5053ef9fb08d34f1fd80ff093ccbd1f67f147633a84cf4a7d9b09c0f089"}, - {file = "librt-0.6.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3f0e4bd9bcb0ee34fa3dbedb05570da50b285f49e52c07a241da967840432513"}, - {file = "librt-0.6.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d8f89c8d20dfa648a3f0a56861946eb00e5b00d6b00eea14bc5532b2fcfa8ef1"}, - {file = "librt-0.6.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ecc2c526547eacd20cb9fbba19a5268611dbc70c346499656d6cf30fae328977"}, - {file = "librt-0.6.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fbedeb9b48614d662822ee514567d2d49a8012037fc7b4cd63f282642c2f4b7d"}, - {file = "librt-0.6.3-cp310-cp310-win32.whl", hash = "sha256:0765b0fe0927d189ee14b087cd595ae636bef04992e03fe6dfdaa383866c8a46"}, - {file = "librt-0.6.3-cp310-cp310-win_amd64.whl", hash = "sha256:8c659f9fb8a2f16dc4131b803fa0144c1dadcb3ab24bb7914d01a6da58ae2457"}, - {file = "librt-0.6.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:61348cc488b18d1b1ff9f3e5fcd5ac43ed22d3e13e862489d2267c2337285c08"}, - {file = "librt-0.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:64645b757d617ad5f98c08e07620bc488d4bced9ced91c6279cec418f16056fa"}, - {file = "librt-0.6.3-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:26b8026393920320bb9a811b691d73c5981385d537ffc5b6e22e53f7b65d4122"}, - {file = "librt-0.6.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d998b432ed9ffccc49b820e913c8f327a82026349e9c34fa3690116f6b70770f"}, - {file = "librt-0.6.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e18875e17ef69ba7dfa9623f2f95f3eda6f70b536079ee6d5763ecdfe6cc9040"}, - {file = "librt-0.6.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a218f85081fc3f70cddaed694323a1ad7db5ca028c379c214e3a7c11c0850523"}, - {file = "librt-0.6.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1ef42ff4edd369e84433ce9b188a64df0837f4f69e3d34d3b34d4955c599d03f"}, - {file = "librt-0.6.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0e0f2b79993fec23a685b3e8107ba5f8675eeae286675a216da0b09574fa1e47"}, - {file = "librt-0.6.3-cp311-cp311-win32.whl", hash = "sha256:fd98cacf4e0fabcd4005c452cb8a31750258a85cab9a59fb3559e8078da408d7"}, - {file = "librt-0.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:e17b5b42c8045867ca9d1f54af00cc2275198d38de18545edaa7833d7e9e4ac8"}, - {file = "librt-0.6.3-cp311-cp311-win_arm64.whl", hash = "sha256:87597e3d57ec0120a3e1d857a708f80c02c42ea6b00227c728efbc860f067c45"}, - {file = "librt-0.6.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:74418f718083009108dc9a42c21bf2e4802d49638a1249e13677585fcc9ca176"}, - {file = "librt-0.6.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:514f3f363d1ebc423357d36222c37e5c8e6674b6eae8d7195ac9a64903722057"}, - {file = "librt-0.6.3-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cf1115207a5049d1f4b7b4b72de0e52f228d6c696803d94843907111cbf80610"}, - {file = "librt-0.6.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ad8ba80cdcea04bea7b78fcd4925bfbf408961e9d8397d2ee5d3ec121e20c08c"}, - {file = "librt-0.6.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4018904c83eab49c814e2494b4e22501a93cdb6c9f9425533fe693c3117126f9"}, - {file = "librt-0.6.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8983c5c06ac9c990eac5eb97a9f03fe41dc7e9d7993df74d9e8682a1056f596c"}, - {file = "librt-0.6.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7769c579663a6f8dbf34878969ac71befa42067ce6bf78e6370bf0d1194997c"}, - {file = "librt-0.6.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d3c9a07eafdc70556f8c220da4a538e715668c0c63cabcc436a026e4e89950bf"}, - {file = "librt-0.6.3-cp312-cp312-win32.whl", hash = "sha256:38320386a48a15033da295df276aea93a92dfa94a862e06893f75ea1d8bbe89d"}, - {file = "librt-0.6.3-cp312-cp312-win_amd64.whl", hash = "sha256:c0ecf4786ad0404b072196b5df774b1bb23c8aacdcacb6c10b4128bc7b00bd01"}, - {file = "librt-0.6.3-cp312-cp312-win_arm64.whl", hash = "sha256:9f2a6623057989ebc469cd9cc8fe436c40117a0147627568d03f84aef7854c55"}, - {file = "librt-0.6.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9e716f9012148a81f02f46a04fc4c663420c6fbfeacfac0b5e128cf43b4413d3"}, - {file = "librt-0.6.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:669ff2495728009a96339c5ad2612569c6d8be4474e68f3f3ac85d7c3261f5f5"}, - {file = "librt-0.6.3-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:349b6873ebccfc24c9efd244e49da9f8a5c10f60f07575e248921aae2123fc42"}, - {file = "librt-0.6.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0c74c26736008481c9f6d0adf1aedb5a52aff7361fea98276d1f965c0256ee70"}, - {file = "librt-0.6.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:408a36ddc75e91918cb15b03460bdc8a015885025d67e68c6f78f08c3a88f522"}, - {file = "librt-0.6.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e61ab234624c9ffca0248a707feffe6fac2343758a36725d8eb8a6efef0f8c30"}, - {file = "librt-0.6.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:324462fe7e3896d592b967196512491ec60ca6e49c446fe59f40743d08c97917"}, - {file = "librt-0.6.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:36b2ec8c15030002c7f688b4863e7be42820d7c62d9c6eece3db54a2400f0530"}, - {file = "librt-0.6.3-cp313-cp313-win32.whl", hash = "sha256:25b1b60cb059471c0c0c803e07d0dfdc79e41a0a122f288b819219ed162672a3"}, - {file = "librt-0.6.3-cp313-cp313-win_amd64.whl", hash = "sha256:10a95ad074e2a98c9e4abc7f5b7d40e5ecbfa84c04c6ab8a70fabf59bd429b88"}, - {file = "librt-0.6.3-cp313-cp313-win_arm64.whl", hash = "sha256:17000df14f552e86877d67e4ab7966912224efc9368e998c96a6974a8d609bf9"}, - {file = "librt-0.6.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8e695f25d1a425ad7a272902af8ab8c8d66c1998b177e4b5f5e7b4e215d0c88a"}, - {file = "librt-0.6.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:3e84a4121a7ae360ca4da436548a9c1ca8ca134a5ced76c893cc5944426164bd"}, - {file = "librt-0.6.3-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:05f385a414de3f950886ea0aad8f109650d4b712cf9cc14cc17f5f62a9ab240b"}, - {file = "librt-0.6.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36a8e337461150b05ca2c7bdedb9e591dfc262c5230422cea398e89d0c746cdc"}, - {file = "librt-0.6.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcbe48f6a03979384f27086484dc2a14959be1613cb173458bd58f714f2c48f3"}, - {file = "librt-0.6.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4bca9e4c260233fba37b15c4ec2f78aa99c1a79fbf902d19dd4a763c5c3fb751"}, - {file = "librt-0.6.3-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:760c25ed6ac968e24803eb5f7deb17ce026902d39865e83036bacbf5cf242aa8"}, - {file = "librt-0.6.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4aa4a93a353ccff20df6e34fa855ae8fd788832c88f40a9070e3ddd3356a9f0e"}, - {file = "librt-0.6.3-cp314-cp314-win32.whl", hash = "sha256:cb92741c2b4ea63c09609b064b26f7f5d9032b61ae222558c55832ec3ad0bcaf"}, - {file = "librt-0.6.3-cp314-cp314-win_amd64.whl", hash = "sha256:fdcd095b1b812d756fa5452aca93b962cf620694c0cadb192cec2bb77dcca9a2"}, - {file = "librt-0.6.3-cp314-cp314-win_arm64.whl", hash = "sha256:822ca79e28720a76a935c228d37da6579edef048a17cd98d406a2484d10eda78"}, - {file = "librt-0.6.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:078cd77064d1640cb7b0650871a772956066174d92c8aeda188a489b58495179"}, - {file = "librt-0.6.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5cc22f7f5c0cc50ed69f4b15b9c51d602aabc4500b433aaa2ddd29e578f452f7"}, - {file = "librt-0.6.3-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:14b345eb7afb61b9fdcdfda6738946bd11b8e0f6be258666b0646af3b9bb5916"}, - {file = "librt-0.6.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d46aa46aa29b067f0b8b84f448fd9719aaf5f4c621cc279164d76a9dc9ab3e8"}, - {file = "librt-0.6.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1b51ba7d9d5d9001494769eca8c0988adce25d0a970c3ba3f2eb9df9d08036fc"}, - {file = "librt-0.6.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ced0925a18fddcff289ef54386b2fc230c5af3c83b11558571124bfc485b8c07"}, - {file = "librt-0.6.3-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:6bac97e51f66da2ca012adddbe9fd656b17f7368d439de30898f24b39512f40f"}, - {file = "librt-0.6.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b2922a0e8fa97395553c304edc3bd36168d8eeec26b92478e292e5d4445c1ef0"}, - {file = "librt-0.6.3-cp314-cp314t-win32.whl", hash = "sha256:f33462b19503ba68d80dac8a1354402675849259fb3ebf53b67de86421735a3a"}, - {file = "librt-0.6.3-cp314-cp314t-win_amd64.whl", hash = "sha256:04f8ce401d4f6380cfc42af0f4e67342bf34c820dae01343f58f472dbac75dcf"}, - {file = "librt-0.6.3-cp314-cp314t-win_arm64.whl", hash = "sha256:afb39550205cc5e5c935762c6bf6a2bb34f7d21a68eadb25e2db7bf3593fecc0"}, - {file = "librt-0.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:09262cb2445b6f15d09141af20b95bb7030c6f13b00e876ad8fdd1a9045d6aa5"}, - {file = "librt-0.6.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:57705e8eec76c5b77130d729c0f70190a9773366c555c5457c51eace80afd873"}, - {file = "librt-0.6.3-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3ac2a7835434b31def8ed5355dd9b895bbf41642d61967522646d1d8b9681106"}, - {file = "librt-0.6.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:71f0a5918aebbea1e7db2179a8fe87e8a8732340d9e8b8107401fb407eda446e"}, - {file = "librt-0.6.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aa346e202e6e1ebc01fe1c69509cffe486425884b96cb9ce155c99da1ecbe0e9"}, - {file = "librt-0.6.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:92267f865c7bbd12327a0d394666948b9bf4b51308b52947c0cc453bfa812f5d"}, - {file = "librt-0.6.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:86605d5bac340beb030cbc35859325982a79047ebdfba1e553719c7126a2389d"}, - {file = "librt-0.6.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:98e4bbecbef8d2a60ecf731d735602feee5ac0b32117dbbc765e28b054bac912"}, - {file = "librt-0.6.3-cp39-cp39-win32.whl", hash = "sha256:3caa0634c02d5ff0b2ae4a28052e0d8c5f20d497623dc13f629bd4a9e2a6efad"}, - {file = "librt-0.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:b47395091e7e0ece1e6ebac9b98bf0c9084d1e3d3b2739aa566be7e56e3f7bf2"}, - {file = "librt-0.6.3.tar.gz", hash = "sha256:c724a884e642aa2bbad52bb0203ea40406ad742368a5f90da1b220e970384aae"}, + {file = "librt-0.7.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2682162855a708e3270eba4b92026b93f8257c3e65278b456c77631faf0f4f7a"}, + {file = "librt-0.7.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:440c788f707c061d237c1e83edf6164ff19f5c0f823a3bf054e88804ebf971ec"}, + {file = "librt-0.7.3-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:399938edbd3d78339f797d685142dd8a623dfaded023cf451033c85955e4838a"}, + {file = "librt-0.7.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1975eda520957c6e0eb52d12968dd3609ffb7eef05d4223d097893d6daf1d8a7"}, + {file = "librt-0.7.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f9da128d0edf990cf0d2ca011b02cd6f639e79286774bd5b0351245cbb5a6e51"}, + {file = "librt-0.7.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e19acfde38cb532a560b98f473adc741c941b7a9bc90f7294bc273d08becb58b"}, + {file = "librt-0.7.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:7b4f57f7a0c65821c5441d98c47ff7c01d359b1e12328219709bdd97fdd37f90"}, + {file = "librt-0.7.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:256793988bff98040de23c57cf36e1f4c2f2dc3dcd17537cdac031d3b681db71"}, + {file = "librt-0.7.3-cp310-cp310-win32.whl", hash = "sha256:fcb72249ac4ea81a7baefcbff74df7029c3cb1cf01a711113fa052d563639c9c"}, + {file = "librt-0.7.3-cp310-cp310-win_amd64.whl", hash = "sha256:4887c29cadbdc50640179e3861c276325ff2986791e6044f73136e6e798ff806"}, + {file = "librt-0.7.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:687403cced6a29590e6be6964463835315905221d797bc5c934a98750fe1a9af"}, + {file = "librt-0.7.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:24d70810f6e2ea853ff79338001533716b373cc0f63e2a0be5bc96129edb5fb5"}, + {file = "librt-0.7.3-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bf8c7735fbfc0754111f00edda35cf9e98a8d478de6c47b04eaa9cef4300eaa7"}, + {file = "librt-0.7.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e32d43610dff472eab939f4d7fbdd240d1667794192690433672ae22d7af8445"}, + {file = "librt-0.7.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:adeaa886d607fb02563c1f625cf2ee58778a2567c0c109378da8f17ec3076ad7"}, + {file = "librt-0.7.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:572a24fc5958c61431da456a0ef1eeea6b4989d81eeb18b8e5f1f3077592200b"}, + {file = "librt-0.7.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6488e69d408b492e08bfb68f20c4a899a354b4386a446ecd490baff8d0862720"}, + {file = "librt-0.7.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ed028fc3d41adda916320712838aec289956c89b4f0a361ceadf83a53b4c047a"}, + {file = "librt-0.7.3-cp311-cp311-win32.whl", hash = "sha256:2cf9d73499486ce39eebbff5f42452518cc1f88d8b7ea4a711ab32962b176ee2"}, + {file = "librt-0.7.3-cp311-cp311-win_amd64.whl", hash = "sha256:35f1609e3484a649bb80431310ddbec81114cd86648f1d9482bc72a3b86ded2e"}, + {file = "librt-0.7.3-cp311-cp311-win_arm64.whl", hash = "sha256:550fdbfbf5bba6a2960b27376ca76d6aaa2bd4b1a06c4255edd8520c306fcfc0"}, + {file = "librt-0.7.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0fa9ac2e49a6bee56e47573a6786cb635e128a7b12a0dc7851090037c0d397a3"}, + {file = "librt-0.7.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2e980cf1ed1a2420a6424e2ed884629cdead291686f1048810a817de07b5eb18"}, + {file = "librt-0.7.3-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e094e445c37c57e9ec612847812c301840239d34ccc5d153a982fa9814478c60"}, + {file = "librt-0.7.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aca73d70c3f553552ba9133d4a09e767dcfeee352d8d8d3eb3f77e38a3beb3ed"}, + {file = "librt-0.7.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c634a0a6db395fdaba0361aa78395597ee72c3aad651b9a307a3a7eaf5efd67e"}, + {file = "librt-0.7.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a59a69deeb458c858b8fea6acf9e2acd5d755d76cd81a655256bc65c20dfff5b"}, + {file = "librt-0.7.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d91e60ac44bbe3a77a67af4a4c13114cbe9f6d540337ce22f2c9eaf7454ca71f"}, + {file = "librt-0.7.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:703456146dc2bf430f7832fd1341adac5c893ec3c1430194fdcefba00012555c"}, + {file = "librt-0.7.3-cp312-cp312-win32.whl", hash = "sha256:b7c1239b64b70be7759554ad1a86288220bbb04d68518b527783c4ad3fb4f80b"}, + {file = "librt-0.7.3-cp312-cp312-win_amd64.whl", hash = "sha256:ef59c938f72bdbc6ab52dc50f81d0637fde0f194b02d636987cea2ab30f8f55a"}, + {file = "librt-0.7.3-cp312-cp312-win_arm64.whl", hash = "sha256:ff21c554304e8226bf80c3a7754be27c6c3549a9fec563a03c06ee8f494da8fc"}, + {file = "librt-0.7.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:56f2a47beda8409061bc1c865bef2d4bd9ff9255219402c0817e68ab5ad89aed"}, + {file = "librt-0.7.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:14569ac5dd38cfccf0a14597a88038fb16811a6fede25c67b79c6d50fc2c8fdc"}, + {file = "librt-0.7.3-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6038ccbd5968325a5d6fd393cf6e00b622a8de545f0994b89dd0f748dcf3e19e"}, + {file = "librt-0.7.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d39079379a9a28e74f4d57dc6357fa310a1977b51ff12239d7271ec7e71d67f5"}, + {file = "librt-0.7.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8837d5a52a2d7aa9f4c3220a8484013aed1d8ad75240d9a75ede63709ef89055"}, + {file = "librt-0.7.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:399bbd7bcc1633c3e356ae274a1deb8781c7bf84d9c7962cc1ae0c6e87837292"}, + {file = "librt-0.7.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8d8cf653e798ee4c4e654062b633db36984a1572f68c3aa25e364a0ddfbbb910"}, + {file = "librt-0.7.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2f03484b54bf4ae80ab2e504a8d99d20d551bfe64a7ec91e218010b467d77093"}, + {file = "librt-0.7.3-cp313-cp313-win32.whl", hash = "sha256:44b3689b040df57f492e02cd4f0bacd1b42c5400e4b8048160c9d5e866de8abe"}, + {file = "librt-0.7.3-cp313-cp313-win_amd64.whl", hash = "sha256:6b407c23f16ccc36614c136251d6b32bf30de7a57f8e782378f1107be008ddb0"}, + {file = "librt-0.7.3-cp313-cp313-win_arm64.whl", hash = "sha256:abfc57cab3c53c4546aee31859ef06753bfc136c9d208129bad23e2eca39155a"}, + {file = "librt-0.7.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:120dd21d46ff875e849f1aae19346223cf15656be489242fe884036b23d39e93"}, + {file = "librt-0.7.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1617bea5ab31266e152871208502ee943cb349c224846928a1173c864261375e"}, + {file = "librt-0.7.3-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:93b2a1f325fefa1482516ced160c8c7b4b8d53226763fa6c93d151fa25164207"}, + {file = "librt-0.7.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3d4801db8354436fd3936531e7f0e4feb411f62433a6b6cb32bb416e20b529f"}, + {file = "librt-0.7.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11ad45122bbed42cfc8b0597450660126ef28fd2d9ae1a219bc5af8406f95678"}, + {file = "librt-0.7.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:6b4e7bff1d76dd2b46443078519dc75df1b5e01562345f0bb740cea5266d8218"}, + {file = "librt-0.7.3-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:d86f94743a11873317094326456b23f8a5788bad9161fd2f0e52088c33564620"}, + {file = "librt-0.7.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:754a0d09997095ad764ccef050dd5bf26cbf457aab9effcba5890dad081d879e"}, + {file = "librt-0.7.3-cp314-cp314-win32.whl", hash = "sha256:fbd7351d43b80d9c64c3cfcb50008f786cc82cba0450e8599fdd64f264320bd3"}, + {file = "librt-0.7.3-cp314-cp314-win_amd64.whl", hash = "sha256:d376a35c6561e81d2590506804b428fc1075fcc6298fc5bb49b771534c0ba010"}, + {file = "librt-0.7.3-cp314-cp314-win_arm64.whl", hash = "sha256:cbdb3f337c88b43c3b49ca377731912c101178be91cb5071aac48faa898e6f8e"}, + {file = "librt-0.7.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9f0e0927efe87cd42ad600628e595a1a0aa1c64f6d0b55f7e6059079a428641a"}, + {file = "librt-0.7.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:020c6db391268bcc8ce75105cb572df8cb659a43fd347366aaa407c366e5117a"}, + {file = "librt-0.7.3-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7af7785f5edd1f418da09a8cdb9ec84b0213e23d597413e06525340bcce1ea4f"}, + {file = "librt-0.7.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8ccadf260bb46a61b9c7e89e2218f6efea9f3eeaaab4e3d1f58571890e54858e"}, + {file = "librt-0.7.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9883b2d819ce83f87ba82a746c81d14ada78784db431e57cc9719179847376e"}, + {file = "librt-0.7.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:59cb0470612d21fa1efddfa0dd710756b50d9c7fb6c1236bbf8ef8529331dc70"}, + {file = "librt-0.7.3-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:1fe603877e1865b5fd047a5e40379509a4a60204aa7aa0f72b16f7a41c3f0712"}, + {file = "librt-0.7.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5460d99ed30f043595bbdc888f542bad2caeb6226b01c33cda3ae444e8f82d42"}, + {file = "librt-0.7.3-cp314-cp314t-win32.whl", hash = "sha256:d09f677693328503c9e492e33e9601464297c01f9ebd966ea8fc5308f3069bfd"}, + {file = "librt-0.7.3-cp314-cp314t-win_amd64.whl", hash = "sha256:25711f364c64cab2c910a0247e90b51421e45dbc8910ceeb4eac97a9e132fc6f"}, + {file = "librt-0.7.3-cp314-cp314t-win_arm64.whl", hash = "sha256:a9f9b661f82693eb56beb0605156c7fca57f535704ab91837405913417d6990b"}, + {file = "librt-0.7.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cd8551aa21df6c60baa2624fd086ae7486bdde00c44097b32e1d1b1966e365e0"}, + {file = "librt-0.7.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6eb9295c730e26b849ed1f4022735f36863eb46b14b6e10604c1c39b8b5efaea"}, + {file = "librt-0.7.3-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3edbf257c40d21a42615e9e332a6b10a8bacaaf58250aed8552a14a70efd0d65"}, + {file = "librt-0.7.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7b29e97273bd6999e2bfe9fe3531b1f4f64effd28327bced048a33e49b99674a"}, + {file = "librt-0.7.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e40520c37926166c24d0c2e0f3bc3a5f46646c34bdf7b4ea9747c297d6ee809"}, + {file = "librt-0.7.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6bdd9adfca615903578d2060ee8a6eb1c24eaf54919ff0ddc820118e5718931b"}, + {file = "librt-0.7.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f57aca20e637750a2c18d979f7096e2c2033cc40cf7ed201494318de1182f135"}, + {file = "librt-0.7.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cad9971881e4fec00d96af7eaf4b63aa7a595696fc221808b0d3ce7ca9743258"}, + {file = "librt-0.7.3-cp39-cp39-win32.whl", hash = "sha256:170cdb8436188347af17bf9cccf3249ba581c933ed56d926497119d4cf730cec"}, + {file = "librt-0.7.3-cp39-cp39-win_amd64.whl", hash = "sha256:b278a9248a4e3260fee3db7613772ca9ab6763a129d6d6f29555e2f9b168216d"}, + {file = "librt-0.7.3.tar.gz", hash = "sha256:3ec50cf65235ff5c02c5b747748d9222e564ad48597122a361269dd3aa808798"}, ] [[package]] @@ -1870,26 +1662,6 @@ win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} [package.extras] dev = ["Sphinx (==8.1.3) ; python_version >= \"3.11\"", "build (==1.2.2) ; python_version >= \"3.11\"", "colorama (==0.4.5) ; python_version < \"3.8\"", "colorama (==0.4.6) ; python_version >= \"3.8\"", "exceptiongroup (==1.1.3) ; python_version >= \"3.7\" and python_version < \"3.11\"", "freezegun (==1.1.0) ; python_version < \"3.8\"", "freezegun (==1.5.0) ; python_version >= \"3.8\"", "mypy (==v0.910) ; python_version < \"3.6\"", "mypy (==v0.971) ; python_version == \"3.6\"", "mypy (==v1.13.0) ; python_version >= \"3.8\"", "mypy (==v1.4.1) ; python_version == \"3.7\"", "myst-parser (==4.0.0) ; python_version >= \"3.11\"", "pre-commit (==4.0.1) ; python_version >= \"3.9\"", "pytest (==6.1.2) ; python_version < \"3.8\"", "pytest (==8.3.2) ; python_version >= \"3.8\"", "pytest-cov (==2.12.1) ; python_version < \"3.8\"", "pytest-cov (==5.0.0) ; python_version == \"3.8\"", "pytest-cov (==6.0.0) ; python_version >= \"3.9\"", "pytest-mypy-plugins (==1.9.3) ; python_version >= \"3.6\" and python_version < \"3.8\"", "pytest-mypy-plugins (==3.1.0) ; python_version >= \"3.8\"", "sphinx-rtd-theme (==3.0.2) ; python_version >= \"3.11\"", "tox (==3.27.1) ; python_version < \"3.8\"", "tox (==4.23.2) ; python_version >= \"3.8\"", "twine (==6.0.1) ; python_version >= \"3.11\""] -[[package]] -name = "markdown" -version = "3.9" -description = "Python implementation of John Gruber's Markdown." -optional = false -python-versions = ">=3.9" -groups = ["docs"] -markers = "python_version < \"3.10\"" -files = [ - {file = "markdown-3.9-py3-none-any.whl", hash = "sha256:9f4d91ed810864ea88a6f32c07ba8bee1346c0cc1f6b1f9f6c822f2a9667d280"}, - {file = "markdown-3.9.tar.gz", hash = "sha256:d2900fe1782bd33bdbbd56859defef70c2e78fc46668f8eb9df3128138f2cb6a"}, -] - -[package.dependencies] -importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} - -[package.extras] -docs = ["mdx_gh_links (>=0.2)", "mkdocs (>=1.6)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] -testing = ["coverage", "pyyaml"] - [[package]] name = "markdown" version = "3.10" @@ -1897,7 +1669,6 @@ description = "Python implementation of John Gruber's Markdown." optional = false python-versions = ">=3.10" groups = ["docs"] -markers = "python_version >= \"3.10\"" files = [ {file = "markdown-3.10-py3-none-any.whl", hash = "sha256:b5b99d6951e2e4948d939255596523444c0e677c669700b1d17aa4a8a464cb7c"}, {file = "markdown-3.10.tar.gz", hash = "sha256:37062d4f2aa4b2b6b32aefb80faa300f82cc790cb949a35b8caede34f2b68c0e"}, @@ -2067,7 +1838,6 @@ files = [ click = ">=7.0" colorama = {version = ">=0.4", markers = "platform_system == \"Windows\""} ghp-import = ">=1.0" -importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} jinja2 = ">=2.11.1" markdown = ">=3.3.6" markupsafe = ">=2.0.1" @@ -2113,7 +1883,6 @@ files = [ ] [package.dependencies] -importlib-metadata = {version = ">=4.3", markers = "python_version < \"3.10\""} mergedeep = ">=1.3.4" platformdirs = ">=2.2.0" pyyaml = ">=5.1" @@ -2174,7 +1943,6 @@ files = [ [package.dependencies] click = ">=7.0" -importlib-metadata = {version = ">=4.6", markers = "python_version < \"3.10\""} Jinja2 = ">=2.11.1" Markdown = ">=3.3" MarkupSafe = ">=1.1" @@ -2182,7 +1950,6 @@ mkdocs = ">=1.4" mkdocs-autorefs = ">=0.3.1" platformdirs = ">=2.2.0" pymdown-extensions = ">=6.3" -typing-extensions = {version = ">=4.1", markers = "python_version < \"3.10\""} [package.extras] crystal = ["mkdocstrings-crystal (>=0.3.4)"] @@ -2258,7 +2025,6 @@ files = [ [package.dependencies] numpy = [ - {version = ">=1.21"}, {version = ">=1.21.2", markers = "python_version >= \"3.10\""}, {version = ">=1.23.3", markers = "python_version >= \"3.11\""}, {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, @@ -2538,7 +2304,6 @@ files = [ beautifulsoup4 = "*" bleach = {version = "!=5.0.0", extras = ["css"]} defusedxml = "*" -importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} jinja2 = ">=3.0" jupyter-core = ">=4.7" jupyterlab-pygments = "*" @@ -2648,62 +2413,6 @@ jupyter-server = ">=1.8,<3" [package.extras] test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync"] -[[package]] -name = "numpy" -version = "2.0.2" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.9" -groups = ["main", "dev"] -markers = "python_version < \"3.10\"" -files = [ - {file = "numpy-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:51129a29dbe56f9ca83438b706e2e69a39892b5eda6cedcb6b0c9fdc9b0d3ece"}, - {file = "numpy-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f15975dfec0cf2239224d80e32c3170b1d168335eaedee69da84fbe9f1f9cd04"}, - {file = "numpy-2.0.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:8c5713284ce4e282544c68d1c3b2c7161d38c256d2eefc93c1d683cf47683e66"}, - {file = "numpy-2.0.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:becfae3ddd30736fe1889a37f1f580e245ba79a5855bff5f2a29cb3ccc22dd7b"}, - {file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2da5960c3cf0df7eafefd806d4e612c5e19358de82cb3c343631188991566ccd"}, - {file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:496f71341824ed9f3d2fd36cf3ac57ae2e0165c143b55c3a035ee219413f3318"}, - {file = "numpy-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a61ec659f68ae254e4d237816e33171497e978140353c0c2038d46e63282d0c8"}, - {file = "numpy-2.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d731a1c6116ba289c1e9ee714b08a8ff882944d4ad631fd411106a30f083c326"}, - {file = "numpy-2.0.2-cp310-cp310-win32.whl", hash = "sha256:984d96121c9f9616cd33fbd0618b7f08e0cfc9600a7ee1d6fd9b239186d19d97"}, - {file = "numpy-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:c7b0be4ef08607dd04da4092faee0b86607f111d5ae68036f16cc787e250a131"}, - {file = "numpy-2.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:49ca4decb342d66018b01932139c0961a8f9ddc7589611158cb3c27cbcf76448"}, - {file = "numpy-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:11a76c372d1d37437857280aa142086476136a8c0f373b2e648ab2c8f18fb195"}, - {file = "numpy-2.0.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:807ec44583fd708a21d4a11d94aedf2f4f3c3719035c76a2bbe1fe8e217bdc57"}, - {file = "numpy-2.0.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:8cafab480740e22f8d833acefed5cc87ce276f4ece12fdaa2e8903db2f82897a"}, - {file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a15f476a45e6e5a3a79d8a14e62161d27ad897381fecfa4a09ed5322f2085669"}, - {file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13e689d772146140a252c3a28501da66dfecd77490b498b168b501835041f951"}, - {file = "numpy-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9ea91dfb7c3d1c56a0e55657c0afb38cf1eeae4544c208dc465c3c9f3a7c09f9"}, - {file = "numpy-2.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c1c9307701fec8f3f7a1e6711f9089c06e6284b3afbbcd259f7791282d660a15"}, - {file = "numpy-2.0.2-cp311-cp311-win32.whl", hash = "sha256:a392a68bd329eafac5817e5aefeb39038c48b671afd242710b451e76090e81f4"}, - {file = "numpy-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:286cd40ce2b7d652a6f22efdfc6d1edf879440e53e76a75955bc0c826c7e64dc"}, - {file = "numpy-2.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:df55d490dea7934f330006d0f81e8551ba6010a5bf035a249ef61a94f21c500b"}, - {file = "numpy-2.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8df823f570d9adf0978347d1f926b2a867d5608f434a7cff7f7908c6570dcf5e"}, - {file = "numpy-2.0.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9a92ae5c14811e390f3767053ff54eaee3bf84576d99a2456391401323f4ec2c"}, - {file = "numpy-2.0.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:a842d573724391493a97a62ebbb8e731f8a5dcc5d285dfc99141ca15a3302d0c"}, - {file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c05e238064fc0610c840d1cf6a13bf63d7e391717d247f1bf0318172e759e692"}, - {file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0123ffdaa88fa4ab64835dcbde75dcdf89c453c922f18dced6e27c90d1d0ec5a"}, - {file = "numpy-2.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:96a55f64139912d61de9137f11bf39a55ec8faec288c75a54f93dfd39f7eb40c"}, - {file = "numpy-2.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec9852fb39354b5a45a80bdab5ac02dd02b15f44b3804e9f00c556bf24b4bded"}, - {file = "numpy-2.0.2-cp312-cp312-win32.whl", hash = "sha256:671bec6496f83202ed2d3c8fdc486a8fc86942f2e69ff0e986140339a63bcbe5"}, - {file = "numpy-2.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:cfd41e13fdc257aa5778496b8caa5e856dc4896d4ccf01841daee1d96465467a"}, - {file = "numpy-2.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9059e10581ce4093f735ed23f3b9d283b9d517ff46009ddd485f1747eb22653c"}, - {file = "numpy-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:423e89b23490805d2a5a96fe40ec507407b8ee786d66f7328be214f9679df6dd"}, - {file = "numpy-2.0.2-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:2b2955fa6f11907cf7a70dab0d0755159bca87755e831e47932367fc8f2f2d0b"}, - {file = "numpy-2.0.2-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:97032a27bd9d8988b9a97a8c4d2c9f2c15a81f61e2f21404d7e8ef00cb5be729"}, - {file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e795a8be3ddbac43274f18588329c72939870a16cae810c2b73461c40718ab1"}, - {file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26b258c385842546006213344c50655ff1555a9338e2e5e02a0756dc3e803dd"}, - {file = "numpy-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fec9451a7789926bcf7c2b8d187292c9f93ea30284802a0ab3f5be8ab36865d"}, - {file = "numpy-2.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9189427407d88ff25ecf8f12469d4d39d35bee1db5d39fc5c168c6f088a6956d"}, - {file = "numpy-2.0.2-cp39-cp39-win32.whl", hash = "sha256:905d16e0c60200656500c95b6b8dca5d109e23cb24abc701d41c02d74c6b3afa"}, - {file = "numpy-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:a3f4ab0caa7f053f6797fcd4e1e25caee367db3112ef2b6ef82d749530768c73"}, - {file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7f0a0c6f12e07fa94133c8a67404322845220c06a9e80e85999afe727f7438b8"}, - {file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:312950fdd060354350ed123c0e25a71327d3711584beaef30cdaa93320c392d4"}, - {file = "numpy-2.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26df23238872200f63518dd2aa984cfca675d82469535dc7162dc2ee52d9dd5c"}, - {file = "numpy-2.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a46288ec55ebbd58947d31d72be2c63cbf839f0a63b49cb755022310792a3385"}, - {file = "numpy-2.0.2.tar.gz", hash = "sha256:883c987dee1880e2a864ab0dc9892292582510604156762362d9326444636e78"}, -] - [[package]] name = "numpy" version = "2.2.6" @@ -2855,60 +2564,6 @@ files = [ {file = "numpy-2.3.5.tar.gz", hash = "sha256:784db1dcdab56bf0517743e746dfb0f885fc68d948aba86eeec2cba234bdf1c0"}, ] -[[package]] -name = "onnx" -version = "1.19.1" -description = "Open Neural Network Exchange" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -markers = "python_version < \"3.10\"" -files = [ - {file = "onnx-1.19.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:7343250cc5276cf439fe623b8f92e11cf0d1eebc733ae4a8b2e86903bb72ae68"}, - {file = "onnx-1.19.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1fb8f79de7f3920bb82b537f3c6ac70c0ce59f600471d9c3eed2b5f8b079b748"}, - {file = "onnx-1.19.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:92b9d2dece41cc84213dbbfd1acbc2a28c27108c53bd28ddb6d1043fbfcbd2d5"}, - {file = "onnx-1.19.1-cp310-cp310-win32.whl", hash = "sha256:c0b1a2b6bb19a0fc9f5de7661a547136d082c03c169a5215e18ff3ececd2a82f"}, - {file = "onnx-1.19.1-cp310-cp310-win_amd64.whl", hash = "sha256:1c0498c00db05fcdb3426697d330dcecc3f60020015065e2c76fa795f2c9a605"}, - {file = "onnx-1.19.1-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:17aaf5832126de0a5197a5864e4f09a764dd7681d3035135547959b4b6b77a09"}, - {file = "onnx-1.19.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:01b292a4d0b197c45d8184545bbc8ae1df83466341b604187c1b05902cb9c920"}, - {file = "onnx-1.19.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1839af08ab4a909e4af936b8149c27f8c64b96138981024e251906e0539d8bf9"}, - {file = "onnx-1.19.1-cp311-cp311-win32.whl", hash = "sha256:0bdbb676e3722bd32f9227c465d552689f49086f986a696419d865cb4e70b989"}, - {file = "onnx-1.19.1-cp311-cp311-win_amd64.whl", hash = "sha256:1346853df5c1e3ebedb2e794cf2a51e0f33759affd655524864ccbcddad7035b"}, - {file = "onnx-1.19.1-cp311-cp311-win_arm64.whl", hash = "sha256:2d69c280c0e665b7f923f499243b9bb84fe97970b7a4668afa0032045de602c8"}, - {file = "onnx-1.19.1-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:3612193a89ddbce5c4e86150869b9258780a82fb8c4ca197723a4460178a6ce9"}, - {file = "onnx-1.19.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6c2fd2f744e7a3880ad0c262efa2edf6d965d0bd02b8f327ec516ad4cb0f2f15"}, - {file = "onnx-1.19.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:485d3674d50d789e0ee72fa6f6e174ab81cb14c772d594f992141bd744729d8a"}, - {file = "onnx-1.19.1-cp312-cp312-win32.whl", hash = "sha256:638bc56ff1a5718f7441e887aeb4e450f37a81c6eac482040381b140bd9ba601"}, - {file = "onnx-1.19.1-cp312-cp312-win_amd64.whl", hash = "sha256:bc7e2e4e163e679721e547958b5a7db875bf822cad371b7c1304aa4401a7c7a4"}, - {file = "onnx-1.19.1-cp312-cp312-win_arm64.whl", hash = "sha256:17c215b1c0f20fe93b4cbe62668247c1d2294b9bc7f6be0ca9ced28e980c07b7"}, - {file = "onnx-1.19.1-cp313-cp313-macosx_12_0_universal2.whl", hash = "sha256:4e5f938c68c4dffd3e19e4fd76eb98d298174eb5ebc09319cdd0ec5fe50050dc"}, - {file = "onnx-1.19.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:86e20a5984b017feeef2dbf4ceff1c7c161ab9423254968dd77d3696c38691d0"}, - {file = "onnx-1.19.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d9c467f0f29993c12f330736af87972f30adb8329b515f39d63a0db929cb2c"}, - {file = "onnx-1.19.1-cp313-cp313-win32.whl", hash = "sha256:65eee353a51b4e4ca3e797784661e5376e2b209f17557e04921eac9166a8752e"}, - {file = "onnx-1.19.1-cp313-cp313-win_amd64.whl", hash = "sha256:c3bc87e38b53554b1fc9ef7b275c81c6f5c93c90a91935bb0aa8d4d498a6d48e"}, - {file = "onnx-1.19.1-cp313-cp313-win_arm64.whl", hash = "sha256:e41496f400afb980ec643d80d5164753a88a85234fa5c06afdeebc8b7d1ec252"}, - {file = "onnx-1.19.1-cp313-cp313t-macosx_12_0_universal2.whl", hash = "sha256:5f6274abf0fd74e80e78ecbb44bd44509409634525c89a9b38276c8af47dc0a2"}, - {file = "onnx-1.19.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:07dcd4d83584eb4bf8f21ac04c82643712e5e93ac2a0ed10121ec123cb127e1e"}, - {file = "onnx-1.19.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1975860c3e720db25d37f1619976582828264bdcc64fa7511c321ac4fc01add3"}, - {file = "onnx-1.19.1-cp313-cp313t-win_amd64.whl", hash = "sha256:9807d0e181f6070ee3a6276166acdc571575d1bd522fc7e89dba16fd6e7ffed9"}, - {file = "onnx-1.19.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b6ee83e6929d75005482d9f304c502ac7c9b8d6db153aa6b484dae74d0f28570"}, - {file = "onnx-1.19.1-cp39-cp39-macosx_12_0_universal2.whl", hash = "sha256:2980de39df1f5afd005a8aeb0b35703dbbab8e4012bcec1634febbdfb8654da8"}, - {file = "onnx-1.19.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bf35f7abc7096df2bb0171102fa7d89ba4a5f5407e3b352ee27bb5e1867e0f19"}, - {file = "onnx-1.19.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cc81f200ed98bd0ced53c3f0fdb8164a42e2b8582a1fa9cb8aeb01b64367c7f4"}, - {file = "onnx-1.19.1-cp39-cp39-win32.whl", hash = "sha256:a2e51118c3db00b169cac8170d94d832c2ffe80935563ced596182d4baa6fcb4"}, - {file = "onnx-1.19.1-cp39-cp39-win_amd64.whl", hash = "sha256:4650d053c7c26e40a080b7378d61446958d6da4e217e1d0d422eb9264f8064ae"}, - {file = "onnx-1.19.1.tar.gz", hash = "sha256:737524d6eb3907d3499ea459c6f01c5a96278bb3a0f2ff8ae04786fb5d7f1ed5"}, -] - -[package.dependencies] -ml_dtypes = ">=0.5.0" -numpy = ">=1.22" -protobuf = ">=4.25.1" -typing_extensions = ">=4.7.1" - -[package.extras] -reference = ["Pillow"] - [[package]] name = "onnx" version = "1.20.0" @@ -2916,7 +2571,6 @@ description = "Open Neural Network Exchange" optional = false python-versions = ">=3.10" groups = ["dev"] -markers = "python_version >= \"3.10\"" files = [ {file = "onnx-1.20.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:7e706470f8b731af6d0347c4f01b8e0e1810855d0c71c467066a5bd7fa21704b"}, {file = "onnx-1.20.0-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3e941d0f3edd57e1d63e2562c74aec2803ead5b965e76ccc3d2b2bd4ae0ea054"}, @@ -2952,50 +2606,6 @@ typing_extensions = ">=4.7.1" [package.extras] reference = ["Pillow"] -[[package]] -name = "onnxruntime" -version = "1.19.2" -description = "ONNX Runtime is a runtime accelerator for Machine Learning models" -optional = false -python-versions = "*" -groups = ["main"] -markers = "python_version < \"3.10\"" -files = [ - {file = "onnxruntime-1.19.2-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:84fa57369c06cadd3c2a538ae2a26d76d583e7c34bdecd5769d71ca5c0fc750e"}, - {file = "onnxruntime-1.19.2-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bdc471a66df0c1cdef774accef69e9f2ca168c851ab5e4f2f3341512c7ef4666"}, - {file = "onnxruntime-1.19.2-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e3a4ce906105d99ebbe817f536d50a91ed8a4d1592553f49b3c23c4be2560ae6"}, - {file = "onnxruntime-1.19.2-cp310-cp310-win32.whl", hash = "sha256:4b3d723cc154c8ddeb9f6d0a8c0d6243774c6b5930847cc83170bfe4678fafb3"}, - {file = "onnxruntime-1.19.2-cp310-cp310-win_amd64.whl", hash = "sha256:17ed7382d2c58d4b7354fb2b301ff30b9bf308a1c7eac9546449cd122d21cae5"}, - {file = "onnxruntime-1.19.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:d863e8acdc7232d705d49e41087e10b274c42f09e259016a46f32c34e06dc4fd"}, - {file = "onnxruntime-1.19.2-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c1dfe4f660a71b31caa81fc298a25f9612815215a47b286236e61d540350d7b6"}, - {file = "onnxruntime-1.19.2-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a36511dc07c5c964b916697e42e366fa43c48cdb3d3503578d78cef30417cb84"}, - {file = "onnxruntime-1.19.2-cp311-cp311-win32.whl", hash = "sha256:50cbb8dc69d6befad4746a69760e5b00cc3ff0a59c6c3fb27f8afa20e2cab7e7"}, - {file = "onnxruntime-1.19.2-cp311-cp311-win_amd64.whl", hash = "sha256:1c3e5d415b78337fa0b1b75291e9ea9fb2a4c1f148eb5811e7212fed02cfffa8"}, - {file = "onnxruntime-1.19.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:68e7051bef9cfefcbb858d2d2646536829894d72a4130c24019219442b1dd2ed"}, - {file = "onnxruntime-1.19.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d2d366fbcc205ce68a8a3bde2185fd15c604d9645888703785b61ef174265168"}, - {file = "onnxruntime-1.19.2-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:477b93df4db467e9cbf34051662a4b27c18e131fa1836e05974eae0d6e4cf29b"}, - {file = "onnxruntime-1.19.2-cp312-cp312-win32.whl", hash = "sha256:9a174073dc5608fad05f7cf7f320b52e8035e73d80b0a23c80f840e5a97c0147"}, - {file = "onnxruntime-1.19.2-cp312-cp312-win_amd64.whl", hash = "sha256:190103273ea4507638ffc31d66a980594b237874b65379e273125150eb044857"}, - {file = "onnxruntime-1.19.2-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:636bc1d4cc051d40bc52e1f9da87fbb9c57d9d47164695dfb1c41646ea51ea66"}, - {file = "onnxruntime-1.19.2-cp38-cp38-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5bd8b875757ea941cbcfe01582970cc299893d1b65bd56731e326a8333f638a3"}, - {file = "onnxruntime-1.19.2-cp38-cp38-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b2046fc9560f97947bbc1acbe4c6d48585ef0f12742744307d3364b131ac5778"}, - {file = "onnxruntime-1.19.2-cp38-cp38-win32.whl", hash = "sha256:31c12840b1cde4ac1f7d27d540c44e13e34f2345cf3642762d2a3333621abb6a"}, - {file = "onnxruntime-1.19.2-cp38-cp38-win_amd64.whl", hash = "sha256:016229660adea180e9a32ce218b95f8f84860a200f0f13b50070d7d90e92956c"}, - {file = "onnxruntime-1.19.2-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:006c8d326835c017a9e9f74c9c77ebb570a71174a1e89fe078b29a557d9c3848"}, - {file = "onnxruntime-1.19.2-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:df2a94179a42d530b936f154615b54748239c2908ee44f0d722cb4df10670f68"}, - {file = "onnxruntime-1.19.2-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fae4b4de45894b9ce7ae418c5484cbf0341db6813effec01bb2216091c52f7fb"}, - {file = "onnxruntime-1.19.2-cp39-cp39-win32.whl", hash = "sha256:dc5430f473e8706fff837ae01323be9dcfddd3ea471c900a91fa7c9b807ec5d3"}, - {file = "onnxruntime-1.19.2-cp39-cp39-win_amd64.whl", hash = "sha256:38475e29a95c5f6c62c2c603d69fc7d4c6ccbf4df602bd567b86ae1138881c49"}, -] - -[package.dependencies] -coloredlogs = "*" -flatbuffers = "*" -numpy = ">=1.21.6" -packaging = "*" -protobuf = "*" -sympy = "*" - [[package]] name = "onnxruntime" version = "1.23.2" @@ -3003,7 +2613,6 @@ description = "ONNX Runtime is a runtime accelerator for Machine Learning models optional = false python-versions = ">=3.10" groups = ["main"] -markers = "python_version >= \"3.10\"" files = [ {file = "onnxruntime-1.23.2-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:a7730122afe186a784660f6ec5807138bf9d792fa1df76556b27307ea9ebcbe3"}, {file = "onnxruntime-1.23.2-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:b28740f4ecef1738ea8f807461dd541b8287d5650b5be33bca7b474e3cbd1f36"}, @@ -3125,7 +2734,7 @@ description = "Pexpect allows easy control of interactive console applications." optional = false python-versions = "*" groups = ["dev"] -markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\" or python_version < \"3.10\" and sys_platform != \"win32\"" +markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\"" files = [ {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, @@ -3134,105 +2743,6 @@ files = [ [package.dependencies] ptyprocess = ">=0.5" -[[package]] -name = "pillow" -version = "10.4.0" -description = "Python Imaging Library (Fork)" -optional = false -python-versions = ">=3.8" -groups = ["main", "docs"] -markers = "python_version < \"3.10\"" -files = [ - {file = "pillow-10.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:4d9667937cfa347525b319ae34375c37b9ee6b525440f3ef48542fcf66f2731e"}, - {file = "pillow-10.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:543f3dc61c18dafb755773efc89aae60d06b6596a63914107f75459cf984164d"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7928ecbf1ece13956b95d9cbcfc77137652b02763ba384d9ab508099a2eca856"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d49b85c4348ea0b31ea63bc75a9f3857869174e2bf17e7aba02945cd218e6f"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6c762a5b0997f5659a5ef2266abc1d8851ad7749ad9a6a5506eb23d314e4f46b"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a985e028fc183bf12a77a8bbf36318db4238a3ded7fa9df1b9a133f1cb79f8fc"}, - {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:812f7342b0eee081eaec84d91423d1b4650bb9828eb53d8511bcef8ce5aecf1e"}, - {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac1452d2fbe4978c2eec89fb5a23b8387aba707ac72810d9490118817d9c0b46"}, - {file = "pillow-10.4.0-cp310-cp310-win32.whl", hash = "sha256:bcd5e41a859bf2e84fdc42f4edb7d9aba0a13d29a2abadccafad99de3feff984"}, - {file = "pillow-10.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:ecd85a8d3e79cd7158dec1c9e5808e821feea088e2f69a974db5edf84dc53141"}, - {file = "pillow-10.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:ff337c552345e95702c5fde3158acb0625111017d0e5f24bf3acdb9cc16b90d1"}, - {file = "pillow-10.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0a9ec697746f268507404647e531e92889890a087e03681a3606d9b920fbee3c"}, - {file = "pillow-10.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe91cb65544a1321e631e696759491ae04a2ea11d36715eca01ce07284738be"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dc6761a6efc781e6a1544206f22c80c3af4c8cf461206d46a1e6006e4429ff3"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e84b6cc6a4a3d76c153a6b19270b3526a5a8ed6b09501d3af891daa2a9de7d6"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbc527b519bd3aa9d7f429d152fea69f9ad37c95f0b02aebddff592688998abe"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:76a911dfe51a36041f2e756b00f96ed84677cdeb75d25c767f296c1c1eda1319"}, - {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59291fb29317122398786c2d44427bbd1a6d7ff54017075b22be9d21aa59bd8d"}, - {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:416d3a5d0e8cfe4f27f574362435bc9bae57f679a7158e0096ad2beb427b8696"}, - {file = "pillow-10.4.0-cp311-cp311-win32.whl", hash = "sha256:7086cc1d5eebb91ad24ded9f58bec6c688e9f0ed7eb3dbbf1e4800280a896496"}, - {file = "pillow-10.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cbed61494057c0f83b83eb3a310f0bf774b09513307c434d4366ed64f4128a91"}, - {file = "pillow-10.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:f5f0c3e969c8f12dd2bb7e0b15d5c468b51e5017e01e2e867335c81903046a22"}, - {file = "pillow-10.4.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:673655af3eadf4df6b5457033f086e90299fdd7a47983a13827acf7459c15d94"}, - {file = "pillow-10.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:866b6942a92f56300012f5fbac71f2d610312ee65e22f1aa2609e491284e5597"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29dbdc4207642ea6aad70fbde1a9338753d33fb23ed6956e706936706f52dd80"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf2342ac639c4cf38799a44950bbc2dfcb685f052b9e262f446482afaf4bffca"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f5b92f4d70791b4a67157321c4e8225d60b119c5cc9aee8ecf153aace4aad4ef"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:86dcb5a1eb778d8b25659d5e4341269e8590ad6b4e8b44d9f4b07f8d136c414a"}, - {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780c072c2e11c9b2c7ca37f9a2ee8ba66f44367ac3e5c7832afcfe5104fd6d1b"}, - {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37fb69d905be665f68f28a8bba3c6d3223c8efe1edf14cc4cfa06c241f8c81d9"}, - {file = "pillow-10.4.0-cp312-cp312-win32.whl", hash = "sha256:7dfecdbad5c301d7b5bde160150b4db4c659cee2b69589705b6f8a0c509d9f42"}, - {file = "pillow-10.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1d846aea995ad352d4bdcc847535bd56e0fd88d36829d2c90be880ef1ee4668a"}, - {file = "pillow-10.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:e553cad5179a66ba15bb18b353a19020e73a7921296a7979c4a2b7f6a5cd57f9"}, - {file = "pillow-10.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8bc1a764ed8c957a2e9cacf97c8b2b053b70307cf2996aafd70e91a082e70df3"}, - {file = "pillow-10.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6209bb41dc692ddfee4942517c19ee81b86c864b626dbfca272ec0f7cff5d9fb"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bee197b30783295d2eb680b311af15a20a8b24024a19c3a26431ff83eb8d1f70"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ef61f5dd14c300786318482456481463b9d6b91ebe5ef12f405afbba77ed0be"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:297e388da6e248c98bc4a02e018966af0c5f92dfacf5a5ca22fa01cb3179bca0"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e4db64794ccdf6cb83a59d73405f63adbe2a1887012e308828596100a0b2f6cc"}, - {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd2880a07482090a3bcb01f4265f1936a903d70bc740bfcb1fd4e8a2ffe5cf5a"}, - {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b35b21b819ac1dbd1233317adeecd63495f6babf21b7b2512d244ff6c6ce309"}, - {file = "pillow-10.4.0-cp313-cp313-win32.whl", hash = "sha256:551d3fd6e9dc15e4c1eb6fc4ba2b39c0c7933fa113b220057a34f4bb3268a060"}, - {file = "pillow-10.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:030abdbe43ee02e0de642aee345efa443740aa4d828bfe8e2eb11922ea6a21ea"}, - {file = "pillow-10.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:5b001114dd152cfd6b23befeb28d7aee43553e2402c9f159807bf55f33af8a8d"}, - {file = "pillow-10.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8d4d5063501b6dd4024b8ac2f04962d661222d120381272deea52e3fc52d3736"}, - {file = "pillow-10.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c1ee6f42250df403c5f103cbd2768a28fe1a0ea1f0f03fe151c8741e1469c8b"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15e02e9bb4c21e39876698abf233c8c579127986f8207200bc8a8f6bb27acf2"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8d4bade9952ea9a77d0c3e49cbd8b2890a399422258a77f357b9cc9be8d680"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:43efea75eb06b95d1631cb784aa40156177bf9dd5b4b03ff38979e048258bc6b"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:950be4d8ba92aca4b2bb0741285a46bfae3ca699ef913ec8416c1b78eadd64cd"}, - {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d7480af14364494365e89d6fddc510a13e5a2c3584cb19ef65415ca57252fb84"}, - {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:73664fe514b34c8f02452ffb73b7a92c6774e39a647087f83d67f010eb9a0cf0"}, - {file = "pillow-10.4.0-cp38-cp38-win32.whl", hash = "sha256:e88d5e6ad0d026fba7bdab8c3f225a69f063f116462c49892b0149e21b6c0a0e"}, - {file = "pillow-10.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:5161eef006d335e46895297f642341111945e2c1c899eb406882a6c61a4357ab"}, - {file = "pillow-10.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0ae24a547e8b711ccaaf99c9ae3cd975470e1a30caa80a6aaee9a2f19c05701d"}, - {file = "pillow-10.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:298478fe4f77a4408895605f3482b6cc6222c018b2ce565c2b6b9c354ac3229b"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:134ace6dc392116566980ee7436477d844520a26a4b1bd4053f6f47d096997fd"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:930044bb7679ab003b14023138b50181899da3f25de50e9dbee23b61b4de2126"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c76e5786951e72ed3686e122d14c5d7012f16c8303a674d18cdcd6d89557fc5b"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b2724fdb354a868ddf9a880cb84d102da914e99119211ef7ecbdc613b8c96b3c"}, - {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dbc6ae66518ab3c5847659e9988c3b60dc94ffb48ef9168656e0019a93dbf8a1"}, - {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:06b2f7898047ae93fad74467ec3d28fe84f7831370e3c258afa533f81ef7f3df"}, - {file = "pillow-10.4.0-cp39-cp39-win32.whl", hash = "sha256:7970285ab628a3779aecc35823296a7869f889b8329c16ad5a71e4901a3dc4ef"}, - {file = "pillow-10.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:961a7293b2457b405967af9c77dcaa43cc1a8cd50d23c532e62d48ab6cdd56f5"}, - {file = "pillow-10.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:32cda9e3d601a52baccb2856b8ea1fc213c90b340c542dcef77140dfa3278a9e"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5b4815f2e65b30f5fbae9dfffa8636d992d49705723fe86a3661806e069352d4"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8f0aef4ef59694b12cadee839e2ba6afeab89c0f39a3adc02ed51d109117b8da"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f4727572e2918acaa9077c919cbbeb73bd2b3ebcfe033b72f858fc9fbef0026"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff25afb18123cea58a591ea0244b92eb1e61a1fd497bf6d6384f09bc3262ec3e"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dc3e2db6ba09ffd7d02ae9141cfa0ae23393ee7687248d46a7507b75d610f4f5"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02a2be69f9c9b8c1e97cf2713e789d4e398c751ecfd9967c18d0ce304efbf885"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0755ffd4a0c6f267cccbae2e9903d95477ca2f77c4fcf3a3a09570001856c8a5"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a02364621fe369e06200d4a16558e056fe2805d3468350df3aef21e00d26214b"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1b5dea9831a90e9d0721ec417a80d4cbd7022093ac38a568db2dd78363b00908"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b885f89040bb8c4a1573566bbb2f44f5c505ef6e74cec7ab9068c900047f04b"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87dd88ded2e6d74d31e1e0a99a726a6765cda32d00ba72dc37f0651f306daaa8"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:2db98790afc70118bd0255c2eeb465e9767ecf1f3c25f9a1abb8ffc8cfd1fe0a"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f7baece4ce06bade126fb84b8af1c33439a76d8a6fd818970215e0560ca28c27"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cfdd747216947628af7b259d274771d84db2268ca062dd5faf373639d00113a3"}, - {file = "pillow-10.4.0.tar.gz", hash = "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06"}, -] - -[package.extras] -docs = ["furo", "olefile", "sphinx (>=7.3)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] -fpx = ["olefile"] -mic = ["olefile"] -tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] -typing = ["typing-extensions ; python_version < \"3.10\""] -xmp = ["defusedxml"] - [[package]] name = "pillow" version = "11.3.0" @@ -3240,7 +2750,6 @@ description = "Python Imaging Library (Fork)" optional = false python-versions = ">=3.9" groups = ["main", "docs"] -markers = "python_version >= \"3.10\"" files = [ {file = "pillow-11.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1b9c17fd4ace828b3003dfd1e30bff24863e0eb59b535e8f80194d9cc7ecf860"}, {file = "pillow-11.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:65dc69160114cdd0ca0f35cb434633c75e8e7fad4cf855177a05bf38678f73ad"}, @@ -3361,33 +2870,14 @@ xmp = ["defusedxml"] [[package]] name = "platformdirs" -version = "4.4.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -optional = false -python-versions = ">=3.9" -groups = ["dev", "docs"] -markers = "python_version < \"3.10\"" -files = [ - {file = "platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85"}, - {file = "platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf"}, -] - -[package.extras] -docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"] -type = ["mypy (>=1.14.1)"] - -[[package]] -name = "platformdirs" -version = "4.5.0" +version = "4.5.1" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.10" groups = ["dev", "docs"] -markers = "python_version >= \"3.10\"" files = [ - {file = "platformdirs-4.5.0-py3-none-any.whl", hash = "sha256:e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3"}, - {file = "platformdirs-4.5.0.tar.gz", hash = "sha256:70ddccdd7c99fc5942e9fc25636a8b34d04c24b335100223152c2803e4063312"}, + {file = "platformdirs-4.5.1-py3-none-any.whl", hash = "sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31"}, + {file = "platformdirs-4.5.1.tar.gz", hash = "sha256:61d5cdcc6065745cdd94f0f878977f8de9437be93de97c1c12f853c9c0cdcbda"}, ] [package.extras] @@ -3462,22 +2952,22 @@ wcwidth = "*" [[package]] name = "protobuf" -version = "6.33.1" +version = "6.33.2" description = "" optional = false python-versions = ">=3.9" groups = ["main", "dev"] files = [ - {file = "protobuf-6.33.1-cp310-abi3-win32.whl", hash = "sha256:f8d3fdbc966aaab1d05046d0240dd94d40f2a8c62856d41eaa141ff64a79de6b"}, - {file = "protobuf-6.33.1-cp310-abi3-win_amd64.whl", hash = "sha256:923aa6d27a92bf44394f6abf7ea0500f38769d4b07f4be41cb52bd8b1123b9ed"}, - {file = "protobuf-6.33.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:fe34575f2bdde76ac429ec7b570235bf0c788883e70aee90068e9981806f2490"}, - {file = "protobuf-6.33.1-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:f8adba2e44cde2d7618996b3fc02341f03f5bc3f2748be72dc7b063319276178"}, - {file = "protobuf-6.33.1-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:0f4cf01222c0d959c2b399142deb526de420be8236f22c71356e2a544e153c53"}, - {file = "protobuf-6.33.1-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:8fd7d5e0eb08cd5b87fd3df49bc193f5cfd778701f47e11d127d0afc6c39f1d1"}, - {file = "protobuf-6.33.1-cp39-cp39-win32.whl", hash = "sha256:023af8449482fa884d88b4563d85e83accab54138ae098924a985bcbb734a213"}, - {file = "protobuf-6.33.1-cp39-cp39-win_amd64.whl", hash = "sha256:df051de4fd7e5e4371334e234c62ba43763f15ab605579e04c7008c05735cd82"}, - {file = "protobuf-6.33.1-py3-none-any.whl", hash = "sha256:d595a9fd694fdeb061a62fbe10eb039cc1e444df81ec9bb70c7fc59ebcb1eafa"}, - {file = "protobuf-6.33.1.tar.gz", hash = "sha256:97f65757e8d09870de6fd973aeddb92f85435607235d20b2dfed93405d00c85b"}, + {file = "protobuf-6.33.2-cp310-abi3-win32.whl", hash = "sha256:87eb388bd2d0f78febd8f4c8779c79247b26a5befad525008e49a6955787ff3d"}, + {file = "protobuf-6.33.2-cp310-abi3-win_amd64.whl", hash = "sha256:fc2a0e8b05b180e5fc0dd1559fe8ebdae21a27e81ac77728fb6c42b12c7419b4"}, + {file = "protobuf-6.33.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d9b19771ca75935b3a4422957bc518b0cecb978b31d1dd12037b088f6bcc0e43"}, + {file = "protobuf-6.33.2-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:b5d3b5625192214066d99b2b605f5783483575656784de223f00a8d00754fc0e"}, + {file = "protobuf-6.33.2-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:8cd7640aee0b7828b6d03ae518b5b4806fdfc1afe8de82f79c3454f8aef29872"}, + {file = "protobuf-6.33.2-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:1f8017c48c07ec5859106533b682260ba3d7c5567b1ca1f24297ce03384d1b4f"}, + {file = "protobuf-6.33.2-cp39-cp39-win32.whl", hash = "sha256:7109dcc38a680d033ffb8bf896727423528db9163be1b6a02d6a49606dcadbfe"}, + {file = "protobuf-6.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:2981c58f582f44b6b13173e12bb8656711189c2a70250845f264b877f00b1913"}, + {file = "protobuf-6.33.2-py3-none-any.whl", hash = "sha256:7636aad9bb01768870266de5dc009de2d1b936771b38a793f73cbbf279c91c5c"}, + {file = "protobuf-6.33.2.tar.gz", hash = "sha256:56dc370c91fbb8ac85bc13582c9e373569668a290aa2e66a590c2a0d35ddb9e4"}, ] [[package]] @@ -3520,7 +3010,7 @@ description = "Run a subprocess in a pseudo terminal" optional = false python-versions = "*" groups = ["dev"] -markers = "os_name != \"nt\" or sys_platform != \"win32\" and sys_platform != \"emscripten\" or sys_platform != \"win32\" and python_version < \"3.10\"" +markers = "os_name != \"nt\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" files = [ {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, @@ -3646,14 +3136,14 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pymdown-extensions" -version = "10.17.2" +version = "10.18" description = "Extension pack for Python Markdown." optional = false python-versions = ">=3.9" groups = ["docs"] files = [ - {file = "pymdown_extensions-10.17.2-py3-none-any.whl", hash = "sha256:bffae79a2e8b9e44aef0d813583a8fea63457b7a23643a43988055b7b79b4992"}, - {file = "pymdown_extensions-10.17.2.tar.gz", hash = "sha256:26bb3d7688e651606260c90fb46409fbda70bf9fdc3623c7868643a1aeee4713"}, + {file = "pymdown_extensions-10.18-py3-none-any.whl", hash = "sha256:090bca72be43f7d3186374e23c782899dbef9dc153ef24c59dcd3c346f9ffcae"}, + {file = "pymdown_extensions-10.18.tar.gz", hash = "sha256:20252abe6367354b24191431617a072ee6be9f68c5afcc74ea5573508a61f9e5"}, ] [package.dependencies] @@ -3750,43 +3240,9 @@ files = [ {file = "python_json_logger-4.0.0.tar.gz", hash = "sha256:f58e68eb46e1faed27e0f574a55a0455eecd7b8a5b88b85a784519ba3cff047f"}, ] -[package.dependencies] -typing_extensions = {version = "*", markers = "python_version < \"3.10\""} - [package.extras] dev = ["backports.zoneinfo ; python_version < \"3.9\"", "black", "build", "freezegun", "mdx_truly_sane_lists", "mike", "mkdocs", "mkdocs-awesome-pages-plugin", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-material (>=8.5)", "mkdocstrings[python]", "msgspec ; implementation_name != \"pypy\"", "mypy", "orjson ; implementation_name != \"pypy\"", "pylint", "pytest", "tzdata", "validate-pyproject[all]"] -[[package]] -name = "pywin32" -version = "311" -description = "Python for Window Extensions" -optional = false -python-versions = "*" -groups = ["dev", "docs"] -markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\" and python_version < \"3.10\"" -files = [ - {file = "pywin32-311-cp310-cp310-win32.whl", hash = "sha256:d03ff496d2a0cd4a5893504789d4a15399133fe82517455e78bad62efbb7f0a3"}, - {file = "pywin32-311-cp310-cp310-win_amd64.whl", hash = "sha256:797c2772017851984b97180b0bebe4b620bb86328e8a884bb626156295a63b3b"}, - {file = "pywin32-311-cp310-cp310-win_arm64.whl", hash = "sha256:0502d1facf1fed4839a9a51ccbcc63d952cf318f78ffc00a7e78528ac27d7a2b"}, - {file = "pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151"}, - {file = "pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503"}, - {file = "pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2"}, - {file = "pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31"}, - {file = "pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067"}, - {file = "pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852"}, - {file = "pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d"}, - {file = "pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d"}, - {file = "pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a"}, - {file = "pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee"}, - {file = "pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87"}, - {file = "pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42"}, - {file = "pywin32-311-cp38-cp38-win32.whl", hash = "sha256:6c6f2969607b5023b0d9ce2541f8d2cbb01c4f46bc87456017cf63b73f1e2d8c"}, - {file = "pywin32-311-cp38-cp38-win_amd64.whl", hash = "sha256:c8015b09fb9a5e188f83b7b04de91ddca4658cee2ae6f3bc483f0b21a77ef6cd"}, - {file = "pywin32-311-cp39-cp39-win32.whl", hash = "sha256:aba8f82d551a942cb20d4a83413ccbac30790b50efb89a75e4f586ac0bb8056b"}, - {file = "pywin32-311-cp39-cp39-win_amd64.whl", hash = "sha256:e0c4cfb0621281fe40387df582097fd796e80430597cb9944f0ae70447bacd91"}, - {file = "pywin32-311-cp39-cp39-win_arm64.whl", hash = "sha256:62ea666235135fee79bb154e695f3ff67370afefd71bd7fea7512fc70ef31e3d"}, -] - [[package]] name = "pywinpty" version = "3.0.2" @@ -4010,24 +3466,6 @@ files = [ [package.dependencies] cffi = {version = "*", markers = "implementation_name == \"pypy\""} -[[package]] -name = "referencing" -version = "0.36.2" -description = "JSON Referencing + Python" -optional = false -python-versions = ">=3.9" -groups = ["dev", "docs"] -markers = "python_version < \"3.10\"" -files = [ - {file = "referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0"}, - {file = "referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa"}, -] - -[package.dependencies] -attrs = ">=22.2.0" -rpds-py = ">=0.7.0" -typing-extensions = {version = ">=4.4.0", markers = "python_version < \"3.13\""} - [[package]] name = "referencing" version = "0.37.0" @@ -4035,7 +3473,6 @@ description = "JSON Referencing + Python" optional = false python-versions = ">=3.10" groups = ["dev", "docs"] -markers = "python_version >= \"3.10\"" files = [ {file = "referencing-0.37.0-py3-none-any.whl", hash = "sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231"}, {file = "referencing-0.37.0.tar.gz", hash = "sha256:44aefc3142c5b842538163acb373e24cce6632bd54bdb01b21ad5863489f50d8"}, @@ -4113,172 +3550,6 @@ lark = ">=1.2.2" [package.extras] testing = ["pytest (>=8.3.5)"] -[[package]] -name = "rpds-py" -version = "0.27.1" -description = "Python bindings to Rust's persistent data structures (rpds)" -optional = false -python-versions = ">=3.9" -groups = ["dev", "docs"] -markers = "python_version < \"3.10\"" -files = [ - {file = "rpds_py-0.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:68afeec26d42ab3b47e541b272166a0b4400313946871cba3ed3a4fc0cab1cef"}, - {file = "rpds_py-0.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74e5b2f7bb6fa38b1b10546d27acbacf2a022a8b5543efb06cfebc72a59c85be"}, - {file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9024de74731df54546fab0bfbcdb49fae19159ecaecfc8f37c18d2c7e2c0bd61"}, - {file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:31d3ebadefcd73b73928ed0b2fd696f7fefda8629229f81929ac9c1854d0cffb"}, - {file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2e7f8f169d775dd9092a1743768d771f1d1300453ddfe6325ae3ab5332b4657"}, - {file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d905d16f77eb6ab2e324e09bfa277b4c8e5e6b8a78a3e7ff8f3cdf773b4c013"}, - {file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50c946f048209e6362e22576baea09193809f87687a95a8db24e5fbdb307b93a"}, - {file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_31_riscv64.whl", hash = "sha256:3deab27804d65cd8289eb814c2c0e807c4b9d9916c9225e363cb0cf875eb67c1"}, - {file = "rpds_py-0.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8b61097f7488de4be8244c89915da8ed212832ccf1e7c7753a25a394bf9b1f10"}, - {file = "rpds_py-0.27.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8a3f29aba6e2d7d90528d3c792555a93497fe6538aa65eb675b44505be747808"}, - {file = "rpds_py-0.27.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dd6cd0485b7d347304067153a6dc1d73f7d4fd995a396ef32a24d24b8ac63ac8"}, - {file = "rpds_py-0.27.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6f4461bf931108c9fa226ffb0e257c1b18dc2d44cd72b125bec50ee0ab1248a9"}, - {file = "rpds_py-0.27.1-cp310-cp310-win32.whl", hash = "sha256:ee5422d7fb21f6a00c1901bf6559c49fee13a5159d0288320737bbf6585bd3e4"}, - {file = "rpds_py-0.27.1-cp310-cp310-win_amd64.whl", hash = "sha256:3e039aabf6d5f83c745d5f9a0a381d031e9ed871967c0a5c38d201aca41f3ba1"}, - {file = "rpds_py-0.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:be898f271f851f68b318872ce6ebebbc62f303b654e43bf72683dbdc25b7c881"}, - {file = "rpds_py-0.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:62ac3d4e3e07b58ee0ddecd71d6ce3b1637de2d373501412df395a0ec5f9beb5"}, - {file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4708c5c0ceb2d034f9991623631d3d23cb16e65c83736ea020cdbe28d57c0a0e"}, - {file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:abfa1171a9952d2e0002aba2ad3780820b00cc3d9c98c6630f2e93271501f66c"}, - {file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b507d19f817ebaca79574b16eb2ae412e5c0835542c93fe9983f1e432aca195"}, - {file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168b025f8fd8d8d10957405f3fdcef3dc20f5982d398f90851f4abc58c566c52"}, - {file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb56c6210ef77caa58e16e8c17d35c63fe3f5b60fd9ba9d424470c3400bcf9ed"}, - {file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:d252f2d8ca0195faa707f8eb9368955760880b2b42a8ee16d382bf5dd807f89a"}, - {file = "rpds_py-0.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6e5e54da1e74b91dbc7996b56640f79b195d5925c2b78efaa8c5d53e1d88edde"}, - {file = "rpds_py-0.27.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ffce0481cc6e95e5b3f0a47ee17ffbd234399e6d532f394c8dce320c3b089c21"}, - {file = "rpds_py-0.27.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a205fdfe55c90c2cd8e540ca9ceba65cbe6629b443bc05db1f590a3db8189ff9"}, - {file = "rpds_py-0.27.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:689fb5200a749db0415b092972e8eba85847c23885c8543a8b0f5c009b1a5948"}, - {file = "rpds_py-0.27.1-cp311-cp311-win32.whl", hash = "sha256:3182af66048c00a075010bc7f4860f33913528a4b6fc09094a6e7598e462fe39"}, - {file = "rpds_py-0.27.1-cp311-cp311-win_amd64.whl", hash = "sha256:b4938466c6b257b2f5c4ff98acd8128ec36b5059e5c8f8372d79316b1c36bb15"}, - {file = "rpds_py-0.27.1-cp311-cp311-win_arm64.whl", hash = "sha256:2f57af9b4d0793e53266ee4325535a31ba48e2f875da81a9177c9926dfa60746"}, - {file = "rpds_py-0.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ae2775c1973e3c30316892737b91f9283f9908e3cc7625b9331271eaaed7dc90"}, - {file = "rpds_py-0.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2643400120f55c8a96f7c9d858f7be0c88d383cd4653ae2cf0d0c88f668073e5"}, - {file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16323f674c089b0360674a4abd28d5042947d54ba620f72514d69be4ff64845e"}, - {file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a1f4814b65eacac94a00fc9a526e3fdafd78e439469644032032d0d63de4881"}, - {file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ba32c16b064267b22f1850a34051121d423b6f7338a12b9459550eb2096e7ec"}, - {file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5c20f33fd10485b80f65e800bbe5f6785af510b9f4056c5a3c612ebc83ba6cb"}, - {file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:466bfe65bd932da36ff279ddd92de56b042f2266d752719beb97b08526268ec5"}, - {file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:41e532bbdcb57c92ba3be62c42e9f096431b4cf478da9bc3bc6ce5c38ab7ba7a"}, - {file = "rpds_py-0.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f149826d742b406579466283769a8ea448eed82a789af0ed17b0cd5770433444"}, - {file = "rpds_py-0.27.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:80c60cfb5310677bd67cb1e85a1e8eb52e12529545441b43e6f14d90b878775a"}, - {file = "rpds_py-0.27.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7ee6521b9baf06085f62ba9c7a3e5becffbc32480d2f1b351559c001c38ce4c1"}, - {file = "rpds_py-0.27.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a512c8263249a9d68cac08b05dd59d2b3f2061d99b322813cbcc14c3c7421998"}, - {file = "rpds_py-0.27.1-cp312-cp312-win32.whl", hash = "sha256:819064fa048ba01b6dadc5116f3ac48610435ac9a0058bbde98e569f9e785c39"}, - {file = "rpds_py-0.27.1-cp312-cp312-win_amd64.whl", hash = "sha256:d9199717881f13c32c4046a15f024971a3b78ad4ea029e8da6b86e5aa9cf4594"}, - {file = "rpds_py-0.27.1-cp312-cp312-win_arm64.whl", hash = "sha256:33aa65b97826a0e885ef6e278fbd934e98cdcfed80b63946025f01e2f5b29502"}, - {file = "rpds_py-0.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:e4b9fcfbc021633863a37e92571d6f91851fa656f0180246e84cbd8b3f6b329b"}, - {file = "rpds_py-0.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1441811a96eadca93c517d08df75de45e5ffe68aa3089924f963c782c4b898cf"}, - {file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55266dafa22e672f5a4f65019015f90336ed31c6383bd53f5e7826d21a0e0b83"}, - {file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d78827d7ac08627ea2c8e02c9e5b41180ea5ea1f747e9db0915e3adf36b62dcf"}, - {file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae92443798a40a92dc5f0b01d8a7c93adde0c4dc965310a29ae7c64d72b9fad2"}, - {file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c46c9dd2403b66a2a3b9720ec4b74d4ab49d4fabf9f03dfdce2d42af913fe8d0"}, - {file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2efe4eb1d01b7f5f1939f4ef30ecea6c6b3521eec451fb93191bf84b2a522418"}, - {file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:15d3b4d83582d10c601f481eca29c3f138d44c92187d197aff663a269197c02d"}, - {file = "rpds_py-0.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4ed2e16abbc982a169d30d1a420274a709949e2cbdef119fe2ec9d870b42f274"}, - {file = "rpds_py-0.27.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a75f305c9b013289121ec0f1181931975df78738cdf650093e6b86d74aa7d8dd"}, - {file = "rpds_py-0.27.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:67ce7620704745881a3d4b0ada80ab4d99df390838839921f99e63c474f82cf2"}, - {file = "rpds_py-0.27.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d992ac10eb86d9b6f369647b6a3f412fc0075cfd5d799530e84d335e440a002"}, - {file = "rpds_py-0.27.1-cp313-cp313-win32.whl", hash = "sha256:4f75e4bd8ab8db624e02c8e2fc4063021b58becdbe6df793a8111d9343aec1e3"}, - {file = "rpds_py-0.27.1-cp313-cp313-win_amd64.whl", hash = "sha256:f9025faafc62ed0b75a53e541895ca272815bec18abe2249ff6501c8f2e12b83"}, - {file = "rpds_py-0.27.1-cp313-cp313-win_arm64.whl", hash = "sha256:ed10dc32829e7d222b7d3b93136d25a406ba9788f6a7ebf6809092da1f4d279d"}, - {file = "rpds_py-0.27.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:92022bbbad0d4426e616815b16bc4127f83c9a74940e1ccf3cfe0b387aba0228"}, - {file = "rpds_py-0.27.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:47162fdab9407ec3f160805ac3e154df042e577dd53341745fc7fb3f625e6d92"}, - {file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb89bec23fddc489e5d78b550a7b773557c9ab58b7946154a10a6f7a214a48b2"}, - {file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e48af21883ded2b3e9eb48cb7880ad8598b31ab752ff3be6457001d78f416723"}, - {file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6f5b7bd8e219ed50299e58551a410b64daafb5017d54bbe822e003856f06a802"}, - {file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08f1e20bccf73b08d12d804d6e1c22ca5530e71659e6673bce31a6bb71c1e73f"}, - {file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dc5dceeaefcc96dc192e3a80bbe1d6c410c469e97bdd47494a7d930987f18b2"}, - {file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:d76f9cc8665acdc0c9177043746775aa7babbf479b5520b78ae4002d889f5c21"}, - {file = "rpds_py-0.27.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:134fae0e36022edad8290a6661edf40c023562964efea0cc0ec7f5d392d2aaef"}, - {file = "rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb11a4f1b2b63337cfd3b4d110af778a59aae51c81d195768e353d8b52f88081"}, - {file = "rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:13e608ac9f50a0ed4faec0e90ece76ae33b34c0e8656e3dceb9a7db994c692cd"}, - {file = "rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dd2135527aa40f061350c3f8f89da2644de26cd73e4de458e79606384f4f68e7"}, - {file = "rpds_py-0.27.1-cp313-cp313t-win32.whl", hash = "sha256:3020724ade63fe320a972e2ffd93b5623227e684315adce194941167fee02688"}, - {file = "rpds_py-0.27.1-cp313-cp313t-win_amd64.whl", hash = "sha256:8ee50c3e41739886606388ba3ab3ee2aae9f35fb23f833091833255a31740797"}, - {file = "rpds_py-0.27.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:acb9aafccaae278f449d9c713b64a9e68662e7799dbd5859e2c6b3c67b56d334"}, - {file = "rpds_py-0.27.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b7fb801aa7f845ddf601c49630deeeccde7ce10065561d92729bfe81bd21fb33"}, - {file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe0dd05afb46597b9a2e11c351e5e4283c741237e7f617ffb3252780cca9336a"}, - {file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b6dfb0e058adb12d8b1d1b25f686e94ffa65d9995a5157afe99743bf7369d62b"}, - {file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed090ccd235f6fa8bb5861684567f0a83e04f52dfc2e5c05f2e4b1309fcf85e7"}, - {file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf876e79763eecf3e7356f157540d6a093cef395b65514f17a356f62af6cc136"}, - {file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12ed005216a51b1d6e2b02a7bd31885fe317e45897de81d86dcce7d74618ffff"}, - {file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:ee4308f409a40e50593c7e3bb8cbe0b4d4c66d1674a316324f0c2f5383b486f9"}, - {file = "rpds_py-0.27.1-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0b08d152555acf1f455154d498ca855618c1378ec810646fcd7c76416ac6dc60"}, - {file = "rpds_py-0.27.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:dce51c828941973a5684d458214d3a36fcd28da3e1875d659388f4f9f12cc33e"}, - {file = "rpds_py-0.27.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:c1476d6f29eb81aa4151c9a31219b03f1f798dc43d8af1250a870735516a1212"}, - {file = "rpds_py-0.27.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:3ce0cac322b0d69b63c9cdb895ee1b65805ec9ffad37639f291dd79467bee675"}, - {file = "rpds_py-0.27.1-cp314-cp314-win32.whl", hash = "sha256:dfbfac137d2a3d0725758cd141f878bf4329ba25e34979797c89474a89a8a3a3"}, - {file = "rpds_py-0.27.1-cp314-cp314-win_amd64.whl", hash = "sha256:a6e57b0abfe7cc513450fcf529eb486b6e4d3f8aee83e92eb5f1ef848218d456"}, - {file = "rpds_py-0.27.1-cp314-cp314-win_arm64.whl", hash = "sha256:faf8d146f3d476abfee026c4ae3bdd9ca14236ae4e4c310cbd1cf75ba33d24a3"}, - {file = "rpds_py-0.27.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:ba81d2b56b6d4911ce735aad0a1d4495e808b8ee4dc58715998741a26874e7c2"}, - {file = "rpds_py-0.27.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:84f7d509870098de0e864cad0102711c1e24e9b1a50ee713b65928adb22269e4"}, - {file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9e960fc78fecd1100539f14132425e1d5fe44ecb9239f8f27f079962021523e"}, - {file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:62f85b665cedab1a503747617393573995dac4600ff51869d69ad2f39eb5e817"}, - {file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fed467af29776f6556250c9ed85ea5a4dd121ab56a5f8b206e3e7a4c551e48ec"}, - {file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2729615f9d430af0ae6b36cf042cb55c0936408d543fb691e1a9e36648fd35a"}, - {file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b207d881a9aef7ba753d69c123a35d96ca7cb808056998f6b9e8747321f03b8"}, - {file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:639fd5efec029f99b79ae47e5d7e00ad8a773da899b6309f6786ecaf22948c48"}, - {file = "rpds_py-0.27.1-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fecc80cb2a90e28af8a9b366edacf33d7a91cbfe4c2c4544ea1246e949cfebeb"}, - {file = "rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:42a89282d711711d0a62d6f57d81aa43a1368686c45bc1c46b7f079d55692734"}, - {file = "rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:cf9931f14223de59551ab9d38ed18d92f14f055a5f78c1d8ad6493f735021bbb"}, - {file = "rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f39f58a27cc6e59f432b568ed8429c7e1641324fbe38131de852cd77b2d534b0"}, - {file = "rpds_py-0.27.1-cp314-cp314t-win32.whl", hash = "sha256:d5fa0ee122dc09e23607a28e6d7b150da16c662e66409bbe85230e4c85bb528a"}, - {file = "rpds_py-0.27.1-cp314-cp314t-win_amd64.whl", hash = "sha256:6567d2bb951e21232c2f660c24cf3470bb96de56cdcb3f071a83feeaff8a2772"}, - {file = "rpds_py-0.27.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c918c65ec2e42c2a78d19f18c553d77319119bf43aa9e2edf7fb78d624355527"}, - {file = "rpds_py-0.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1fea2b1a922c47c51fd07d656324531adc787e415c8b116530a1d29c0516c62d"}, - {file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbf94c58e8e0cd6b6f38d8de67acae41b3a515c26169366ab58bdca4a6883bb8"}, - {file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c2a8fed130ce946d5c585eddc7c8eeef0051f58ac80a8ee43bd17835c144c2cc"}, - {file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:037a2361db72ee98d829bc2c5b7cc55598ae0a5e0ec1823a56ea99374cfd73c1"}, - {file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5281ed1cc1d49882f9997981c88df1a22e140ab41df19071222f7e5fc4e72125"}, - {file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fd50659a069c15eef8aa3d64bbef0d69fd27bb4a50c9ab4f17f83a16cbf8905"}, - {file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_31_riscv64.whl", hash = "sha256:c4b676c4ae3921649a15d28ed10025548e9b561ded473aa413af749503c6737e"}, - {file = "rpds_py-0.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:079bc583a26db831a985c5257797b2b5d3affb0386e7ff886256762f82113b5e"}, - {file = "rpds_py-0.27.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4e44099bd522cba71a2c6b97f68e19f40e7d85399de899d66cdb67b32d7cb786"}, - {file = "rpds_py-0.27.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e202e6d4188e53c6661af813b46c37ca2c45e497fc558bacc1a7630ec2695aec"}, - {file = "rpds_py-0.27.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f41f814b8eaa48768d1bb551591f6ba45f87ac76899453e8ccd41dba1289b04b"}, - {file = "rpds_py-0.27.1-cp39-cp39-win32.whl", hash = "sha256:9e71f5a087ead99563c11fdaceee83ee982fd39cf67601f4fd66cb386336ee52"}, - {file = "rpds_py-0.27.1-cp39-cp39-win_amd64.whl", hash = "sha256:71108900c9c3c8590697244b9519017a400d9ba26a36c48381b3f64743a44aab"}, - {file = "rpds_py-0.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7ba22cb9693df986033b91ae1d7a979bc399237d45fccf875b76f62bb9e52ddf"}, - {file = "rpds_py-0.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5b640501be9288c77738b5492b3fd3abc4ba95c50c2e41273c8a1459f08298d3"}, - {file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb08b65b93e0c6dd70aac7f7890a9c0938d5ec71d5cb32d45cf844fb8ae47636"}, - {file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d7ff07d696a7a38152ebdb8212ca9e5baab56656749f3d6004b34ab726b550b8"}, - {file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fb7c72262deae25366e3b6c0c0ba46007967aea15d1eea746e44ddba8ec58dcc"}, - {file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b002cab05d6339716b03a4a3a2ce26737f6231d7b523f339fa061d53368c9d8"}, - {file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23f6b69d1c26c4704fec01311963a41d7de3ee0570a84ebde4d544e5a1859ffc"}, - {file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:530064db9146b247351f2a0250b8f00b289accea4596a033e94be2389977de71"}, - {file = "rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7b90b0496570bd6b0321724a330d8b545827c4df2034b6ddfc5f5275f55da2ad"}, - {file = "rpds_py-0.27.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:879b0e14a2da6a1102a3fc8af580fc1ead37e6d6692a781bd8c83da37429b5ab"}, - {file = "rpds_py-0.27.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:0d807710df3b5faa66c731afa162ea29717ab3be17bdc15f90f2d9f183da4059"}, - {file = "rpds_py-0.27.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:3adc388fc3afb6540aec081fa59e6e0d3908722771aa1e37ffe22b220a436f0b"}, - {file = "rpds_py-0.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c796c0c1cc68cb08b0284db4229f5af76168172670c74908fdbd4b7d7f515819"}, - {file = "rpds_py-0.27.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cdfe4bb2f9fe7458b7453ad3c33e726d6d1c7c0a72960bcc23800d77384e42df"}, - {file = "rpds_py-0.27.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:8fabb8fd848a5f75a2324e4a84501ee3a5e3c78d8603f83475441866e60b94a3"}, - {file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eda8719d598f2f7f3e0f885cba8646644b55a187762bec091fa14a2b819746a9"}, - {file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c64d07e95606ec402a0a1c511fe003873fa6af630bda59bac77fac8b4318ebc"}, - {file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93a2ed40de81bcff59aabebb626562d48332f3d028ca2036f1d23cbb52750be4"}, - {file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:387ce8c44ae94e0ec50532d9cb0edce17311024c9794eb196b90e1058aadeb66"}, - {file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aaf94f812c95b5e60ebaf8bfb1898a7d7cb9c1af5744d4a67fa47796e0465d4e"}, - {file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:4848ca84d6ded9b58e474dfdbad4b8bfb450344c0551ddc8d958bf4b36aa837c"}, - {file = "rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2bde09cbcf2248b73c7c323be49b280180ff39fadcfe04e7b6f54a678d02a7cf"}, - {file = "rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:94c44ee01fd21c9058f124d2d4f0c9dc7634bec93cd4b38eefc385dabe71acbf"}, - {file = "rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:df8b74962e35c9249425d90144e721eed198e6555a0e22a563d29fe4486b51f6"}, - {file = "rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:dc23e6820e3b40847e2f4a7726462ba0cf53089512abe9ee16318c366494c17a"}, - {file = "rpds_py-0.27.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:aa8933159edc50be265ed22b401125c9eebff3171f570258854dbce3ecd55475"}, - {file = "rpds_py-0.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a50431bf02583e21bf273c71b89d710e7a710ad5e39c725b14e685610555926f"}, - {file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78af06ddc7fe5cc0e967085a9115accee665fb912c22a3f54bad70cc65b05fe6"}, - {file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:70d0738ef8fee13c003b100c2fbd667ec4f133468109b3472d249231108283a3"}, - {file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2f6fd8a1cea5bbe599b6e78a6e5ee08db434fc8ffea51ff201c8765679698b3"}, - {file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8177002868d1426305bb5de1e138161c2ec9eb2d939be38291d7c431c4712df8"}, - {file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:008b839781d6c9bf3b6a8984d1d8e56f0ec46dc56df61fd669c49b58ae800400"}, - {file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:a55b9132bb1ade6c734ddd2759c8dc132aa63687d259e725221f106b83a0e485"}, - {file = "rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a46fdec0083a26415f11d5f236b79fa1291c32aaa4a17684d82f7017a1f818b1"}, - {file = "rpds_py-0.27.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:8a63b640a7845f2bdd232eb0d0a4a2dd939bcdd6c57e6bb134526487f3160ec5"}, - {file = "rpds_py-0.27.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:7e32721e5d4922deaaf963469d795d5bde6093207c52fec719bd22e5d1bedbc4"}, - {file = "rpds_py-0.27.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:2c426b99a068601b5f4623573df7a7c3d72e87533a2dd2253353a03e7502566c"}, - {file = "rpds_py-0.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4fc9b7fe29478824361ead6e14e4f5aed570d477e06088826537e202d25fe859"}, - {file = "rpds_py-0.27.1.tar.gz", hash = "sha256:26a1c73171d10b7acccbded82bf6a586ab8203601e565badc74bbbf8bc5a10f8"}, -] - [[package]] name = "rpds-py" version = "0.30.0" @@ -4286,7 +3557,6 @@ description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.10" groups = ["dev", "docs"] -markers = "python_version >= \"3.10\"" files = [ {file = "rpds_py-0.30.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:679ae98e00c0e8d68a7fda324e16b90fd5260945b45d3b824c892cec9eea3288"}, {file = "rpds_py-0.30.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4cc2206b76b4f576934f0ed374b10d7ca5f457858b157ca52064bdfc26b9fc00"}, @@ -4407,31 +3677,31 @@ files = [ [[package]] name = "ruff" -version = "0.14.7" +version = "0.14.8" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" groups = ["test"] files = [ - {file = "ruff-0.14.7-py3-none-linux_armv6l.whl", hash = "sha256:b9d5cb5a176c7236892ad7224bc1e63902e4842c460a0b5210701b13e3de4fca"}, - {file = "ruff-0.14.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:3f64fe375aefaf36ca7d7250292141e39b4cea8250427482ae779a2aa5d90015"}, - {file = "ruff-0.14.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:93e83bd3a9e1a3bda64cb771c0d47cda0e0d148165013ae2d3554d718632d554"}, - {file = "ruff-0.14.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3838948e3facc59a6070795de2ae16e5786861850f78d5914a03f12659e88f94"}, - {file = "ruff-0.14.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:24c8487194d38b6d71cd0fd17a5b6715cda29f59baca1defe1e3a03240f851d1"}, - {file = "ruff-0.14.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:79c73db6833f058a4be8ffe4a0913b6d4ad41f6324745179bd2aa09275b01d0b"}, - {file = "ruff-0.14.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:12eb7014fccff10fc62d15c79d8a6be4d0c2d60fe3f8e4d169a0d2def75f5dad"}, - {file = "ruff-0.14.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c623bbdc902de7ff715a93fa3bb377a4e42dd696937bf95669118773dbf0c50"}, - {file = "ruff-0.14.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f53accc02ed2d200fa621593cdb3c1ae06aa9b2c3cae70bc96f72f0000ae97a9"}, - {file = "ruff-0.14.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:281f0e61a23fcdcffca210591f0f53aafaa15f9025b5b3f9706879aaa8683bc4"}, - {file = "ruff-0.14.7-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:dbbaa5e14148965b91cb090236931182ee522a5fac9bc5575bafc5c07b9f9682"}, - {file = "ruff-0.14.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1464b6e54880c0fe2f2d6eaefb6db15373331414eddf89d6b903767ae2458143"}, - {file = "ruff-0.14.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:f217ed871e4621ea6128460df57b19ce0580606c23aeab50f5de425d05226784"}, - {file = "ruff-0.14.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6be02e849440ed3602d2eb478ff7ff07d53e3758f7948a2a598829660988619e"}, - {file = "ruff-0.14.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:19a0f116ee5e2b468dfe80c41c84e2bbd6b74f7b719bee86c2ecde0a34563bcc"}, - {file = "ruff-0.14.7-py3-none-win32.whl", hash = "sha256:e33052c9199b347c8937937163b9b149ef6ab2e4bb37b042e593da2e6f6cccfa"}, - {file = "ruff-0.14.7-py3-none-win_amd64.whl", hash = "sha256:e17a20ad0d3fad47a326d773a042b924d3ac31c6ca6deb6c72e9e6b5f661a7c6"}, - {file = "ruff-0.14.7-py3-none-win_arm64.whl", hash = "sha256:be4d653d3bea1b19742fcc6502354e32f65cd61ff2fbdb365803ef2c2aec6228"}, - {file = "ruff-0.14.7.tar.gz", hash = "sha256:3417deb75d23bd14a722b57b0a1435561db65f0ad97435b4cf9f85ffcef34ae5"}, + {file = "ruff-0.14.8-py3-none-linux_armv6l.whl", hash = "sha256:ec071e9c82eca417f6111fd39f7043acb53cd3fde9b1f95bbed745962e345afb"}, + {file = "ruff-0.14.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:8cdb162a7159f4ca36ce980a18c43d8f036966e7f73f866ac8f493b75e0c27e9"}, + {file = "ruff-0.14.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:2e2fcbefe91f9fad0916850edf0854530c15bd1926b6b779de47e9ab619ea38f"}, + {file = "ruff-0.14.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9d70721066a296f45786ec31916dc287b44040f553da21564de0ab4d45a869b"}, + {file = "ruff-0.14.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2c87e09b3cd9d126fc67a9ecd3b5b1d3ded2b9c7fce3f16e315346b9d05cfb52"}, + {file = "ruff-0.14.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d62cb310c4fbcb9ee4ac023fe17f984ae1e12b8a4a02e3d21489f9a2a5f730c"}, + {file = "ruff-0.14.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:1af35c2d62633d4da0521178e8a2641c636d2a7153da0bac1b30cfd4ccd91344"}, + {file = "ruff-0.14.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:25add4575ffecc53d60eed3f24b1e934493631b48ebbc6ebaf9d8517924aca4b"}, + {file = "ruff-0.14.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4c943d847b7f02f7db4201a0600ea7d244d8a404fbb639b439e987edcf2baf9a"}, + {file = "ruff-0.14.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb6e8bf7b4f627548daa1b69283dac5a296bfe9ce856703b03130732e20ddfe2"}, + {file = "ruff-0.14.8-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:7aaf2974f378e6b01d1e257c6948207aec6a9b5ba53fab23d0182efb887a0e4a"}, + {file = "ruff-0.14.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e5758ca513c43ad8a4ef13f0f081f80f08008f410790f3611a21a92421ab045b"}, + {file = "ruff-0.14.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:f74f7ba163b6e85a8d81a590363bf71618847e5078d90827749bfda1d88c9cdf"}, + {file = "ruff-0.14.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:eed28f6fafcc9591994c42254f5a5c5ca40e69a30721d2ab18bb0bb3baac3ab6"}, + {file = "ruff-0.14.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:21d48fa744c9d1cb8d71eb0a740c4dd02751a5de9db9a730a8ef75ca34cf138e"}, + {file = "ruff-0.14.8-py3-none-win32.whl", hash = "sha256:15f04cb45c051159baebb0f0037f404f1dc2f15a927418f29730f411a79bc4e7"}, + {file = "ruff-0.14.8-py3-none-win_amd64.whl", hash = "sha256:9eeb0b24242b5bbff3011409a739929f497f3fb5fe3b5698aba5e77e8c833097"}, + {file = "ruff-0.14.8-py3-none-win_arm64.whl", hash = "sha256:965a582c93c63fe715fd3e3f8aa37c4b776777203d8e1d8aa3cc0c14424a4b99"}, + {file = "ruff-0.14.8.tar.gz", hash = "sha256:774ed0dd87d6ce925e3b8496feb3a00ac564bea52b9feb551ecd17e0a23d1eed"}, ] [[package]] @@ -4639,7 +3909,7 @@ description = "A lil' TOML parser" optional = false python-versions = ">=3.8" groups = ["dev", "test", "types"] -markers = "python_version <= \"3.10\"" +markers = "python_version == \"3.10\"" files = [ {file = "tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45"}, {file = "tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba"}, @@ -4775,7 +4045,7 @@ files = [ {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, ] -markers = {test = "python_version <= \"3.10\""} +markers = {test = "python_version == \"3.10\""} [[package]] name = "tzdata" @@ -4806,21 +4076,21 @@ dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake [[package]] name = "urllib3" -version = "2.5.0" +version = "2.6.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" groups = ["main", "dev", "docs"] files = [ - {file = "urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc"}, - {file = "urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760"}, + {file = "urllib3-2.6.1-py3-none-any.whl", hash = "sha256:e67d06fe947c36a7ca39f4994b08d73922d40e6cca949907be05efa6fd75110b"}, + {file = "urllib3-2.6.1.tar.gz", hash = "sha256:5379eb6e1aba4088bae84f8242960017ec8d8e3decf30480b3a1abdaa9671a3f"}, ] [package.extras] -brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] +brotli = ["brotli (>=1.2.0) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=1.2.0.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] +zstd = ["backports-zstd (>=1.0.0) ; python_version < \"3.14\""] [[package]] name = "virtualenv" @@ -4899,19 +4169,6 @@ files = [ {file = "wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605"}, ] -[[package]] -name = "webcolors" -version = "24.11.1" -description = "A library for working with the color formats defined by HTML and CSS." -optional = false -python-versions = ">=3.9" -groups = ["dev"] -markers = "python_version < \"3.10\"" -files = [ - {file = "webcolors-24.11.1-py3-none-any.whl", hash = "sha256:515291393b4cdf0eb19c155749a096f779f7d909f7cceea072791cb9095b92e9"}, - {file = "webcolors-24.11.1.tar.gz", hash = "sha256:ecb3d768f32202af770477b8b65f318fa4f566c22948673a977b00d589dd80f6"}, -] - [[package]] name = "webcolors" version = "25.10.0" @@ -4919,7 +4176,6 @@ description = "A library for working with the color formats defined by HTML and optional = false python-versions = ">=3.10" groups = ["dev"] -markers = "python_version >= \"3.10\"" files = [ {file = "webcolors-25.10.0-py3-none-any.whl", hash = "sha256:032c727334856fc0b968f63daa252a1ac93d33db2f5267756623c210e57a4f1d"}, {file = "webcolors-25.10.0.tar.gz", hash = "sha256:62abae86504f66d0f6364c2a8520de4a0c47b80c03fc3a5f1815fedbef7c19bf"}, @@ -4970,28 +4226,7 @@ files = [ [package.extras] dev = ["black (>=19.3b0) ; python_version >= \"3.6\"", "pytest (>=4.6.2)"] -[[package]] -name = "zipp" -version = "3.23.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.9" -groups = ["dev", "docs"] -markers = "python_version < \"3.10\"" -files = [ - {file = "zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e"}, - {file = "zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166"}, -] - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more_itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] -type = ["pytest-mypy"] - [metadata] lock-version = "2.1" -python-versions = ">=3.9.0" -content-hash = "b811f65bf6ccb499c887baae7ec176cbe07843adf218e71ff0564e2da93a5658" +python-versions = ">=3.10.0" +content-hash = "cdceed9de4790023c96f06433d9e31c3b72c4562eed63c46f08e7388efb9b349" diff --git a/pyproject.toml b/pyproject.toml index eea8ce8a..3b72a305 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,9 +11,8 @@ repository = "https://github.com/qdrant/fastembed" keywords = ["vector", "embedding", "neural", "search", "qdrant", "sentence-transformers"] [tool.poetry.dependencies] -python = ">=3.9.0" +python = ">=3.10.0" numpy = [ - { version = ">=1.21,<2.1.0", python = "<3.10" }, { version = ">=1.21,<2.3.0", python = ">=3.10,<3.11" }, { version = ">=1.21", python = ">=3.11,<3.12" }, { version = ">=1.26", python = ">=3.12,<3.13" }, @@ -21,7 +20,6 @@ numpy = [ { version = ">=2.3.0", python = ">=3.14" }, ] onnxruntime = [ - { version = ">=1.17.0,<1.20.0", python = "<3.10" }, { version = ">1.20.0", python = ">=3.13" }, { version = ">=1.17.0,!=1.20.0", python = ">=3.10,<3.13" }, ] diff --git a/tests/test_multi_gpu.py b/tests/test_multi_gpu.py index 32d6c94c..235fb0ea 100644 --- a/tests/test_multi_gpu.py +++ b/tests/test_multi_gpu.py @@ -1,5 +1,5 @@ import pytest -from typing import Optional + from fastembed import ( TextEmbedding, SparseTextEmbedding, @@ -14,7 +14,7 @@ @pytest.mark.skip(reason="Requires a multi-gpu server") @pytest.mark.parametrize("device_id", [None, 0, 1]) -def test_gpu_via_providers(device_id: Optional[int]) -> None: +def test_gpu_via_providers(device_id: int | None) -> None: docs = ["hello world", "flag embedding"] device_id = device_id if device_id is not None else 0 @@ -86,7 +86,7 @@ def test_gpu_via_providers(device_id: Optional[int]) -> None: @pytest.mark.skip(reason="Requires a multi-gpu server") @pytest.mark.parametrize("device_ids", [None, [0], [1], [0, 1]]) -def test_gpu_cuda_device_ids(device_ids: Optional[list[int]]) -> None: +def test_gpu_cuda_device_ids(device_ids: list[int] | None) -> None: docs = ["hello world", "flag embedding"] device_id = device_ids[0] if device_ids else 0 embedding_model = TextEmbedding( @@ -171,7 +171,7 @@ def test_gpu_cuda_device_ids(device_ids: Optional[list[int]]) -> None: @pytest.mark.parametrize( "device_ids,parallel", [(None, None), (None, 2), ([1], None), ([1], 1), ([1], 2), ([0, 1], 2)] ) -def test_multi_gpu_parallel_inference(device_ids: Optional[list[int]], parallel: int) -> None: +def test_multi_gpu_parallel_inference(device_ids: list[int] | None, parallel: int) -> None: docs = ["hello world", "flag embedding"] * 100 batch_size = 5 diff --git a/tests/utils.py b/tests/utils.py index 5fc0fd17..481c0504 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -3,12 +3,12 @@ from pathlib import Path from types import TracebackType -from typing import Union, Callable, Any, Type, Optional +from typing import Callable, Any, Type from fastembed.common.model_description import BaseModelDescription -def delete_model_cache(model_dir: Union[str, Path]) -> None: +def delete_model_cache(model_dir: str | Path) -> None: """Delete the model cache directory. If a model was downloaded from the HuggingFace model hub, then _model_dir is the dir to snapshots, removing @@ -42,7 +42,7 @@ def on_error( def should_test_model( model_desc: BaseModelDescription, autotest_model_name: str, - is_ci: Optional[str], + is_ci: str | None, is_manual: bool, ): """Determine if a model should be tested based on environment