diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000..7db588e --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,4 @@ +* @shakefu + +# Ignore certain files so they can be auto-updated +.pre-commit-config.yaml diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml new file mode 100644 index 0000000..90e85f6 --- /dev/null +++ b/.github/FUNDING.yml @@ -0,0 +1 @@ +github: shakefu diff --git a/.github/actionlint.yaml b/.github/actionlint.yaml new file mode 100644 index 0000000..9261d80 --- /dev/null +++ b/.github/actionlint.yaml @@ -0,0 +1,2 @@ +self-hosted-runner: + labels: [] diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 0000000..17a8e57 --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,127 @@ +--- +name: CI + +on: + # Allow this config to be reused by other workflows in the repo + workflow_call: + pull_request: + branches: [main] + +concurrency: + group: ci-${{ github.event.number || 'main' }} + cancel-in-progress: true + +jobs: + preview: + name: Release preview + runs-on: ubuntu-latest + if: github.event_name == 'pull_request' + steps: + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 + with: + fetch-depth: 0 + - name: Generate notes + id: notes + uses: open-turo/actions-release/semantic-release@4d8a6b6aa2d051e7dba0429d8d08beef827b5ccd # v4 + with: + branches: ${{ github.head_ref }} + override-github-ref-name: ${{ github.head_ref }} + dry-run: true + ci: false + - name: Find Comment + uses: peter-evans/find-comment@a54c31d7fa095754bfef525c0c8e5e5674c4b4b1 # v2 + id: find + with: + issue-number: ${{ github.event.pull_request.number }} + body-includes: release-notes-preview + - name: Comment preview + uses: peter-evans/create-or-update-comment@67dcc547d311b736a8e6c5c236542148a47adc3d # v2 + if: steps.notes.outputs.new-release-notes != '' + with: + comment-id: ${{ steps.find.outputs.comment-id }} + issue-number: ${{ github.event.number }} + edit-mode: replace + body: | + + + ## Release notes preview + + ${{ steps.notes.outputs.new-release-notes }} + + lint: + name: Lint + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.10", "3.11", "3.12", "3.13"] + steps: + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 + - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v5 + with: + python-version: ${{ matrix.python-version }} + - uses: pre-commit/action@2c7b3805fd2a0fd8c1884dcaebf91fc102a13ecd # v3.0.1 + + test: + name: Test + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.10", "3.11", "3.12", "3.13"] + steps: + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 + - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v5 + with: + python-version: ${{ matrix.python-version }} + - uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0 + with: + version: "0.7.12" + python-version: ${{ matrix.python-version }} + enable-cache: true + cache-dependency-glob: "pyproject.toml" + - env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + # Pytest + uv sync --locked --all-extras --dev + uv run pytest --cov + uv run coveralls + + docs: + name: Docs + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 + - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v5 + with: + python-version-file: ".python-version" + - uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0 + with: + version: "0.7.12" + enable-cache: true + cache-dependency-glob: "pyproject.toml" + - env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + # Build docs + uv run script/docs + + checks: + name: Checks + needs: [lint, test, docs] + runs-on: ubuntu-latest + permissions: + issues: write + pull-requests: write + steps: + - name: Renovate / Auto-approve + if: github.actor == 'renovatebot' || github.actor == 'renovate[bot]' + uses: hmarr/auto-approve-action@44888193675f29a83e04faf4002fa8c0b537b1e4 # v3.2.1 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + - name: Renovate / Auto-merge + if: github.actor == 'renovatebot' || github.actor == 'renovate[bot]' + uses: pascalgn/automerge-action@22948e0bc22f0aa673800da838595a3e7347e584 # v0.15.6 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + MERGE_LABELS: dependencies + MERGE_METHOD: rebase diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml new file mode 100644 index 0000000..e7e3f07 --- /dev/null +++ b/.github/workflows/release.yaml @@ -0,0 +1,84 @@ +--- +name: Release + +on: + push: + branches: [main] + +concurrency: + group: release + +jobs: + ci: + name: CI + uses: ./.github/workflows/ci.yaml + + release: + name: Release + needs: ci + runs-on: ubuntu-latest + outputs: + version: ${{ steps.version.outputs.new-release-version }} + published: ${{ steps.version.outputs.new-release-published }} + steps: + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 + with: + fetch-depth: 0 + - id: version + uses: open-turo/actions-release/semantic-release@4d8a6b6aa2d051e7dba0429d8d08beef827b5ccd # v4 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + dry-run: true + ci: false + + publish: + name: Publish + needs: release + runs-on: ubuntu-latest + if: needs.release.outputs.published == 'true' + permissions: + contents: write + issues: write + pull-requests: write + id-token: write + steps: + - id: authenticate + uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2 + with: + app-id: ${{ secrets.BOT_CLIENT_ID }} + private-key: ${{ secrets.BOT_PRIVATE_KEY }} + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 + with: + token: ${{ steps.authenticate.outputs.token }} + fetch-depth: 0 + - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v5 + with: + python-version-file: ".python-version" + - uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0 + with: + version: "0.7.12" + enable-cache: true + cache-dependency-glob: "pyproject.toml" + - run: | + # Install dependencies + uv sync --locked --all-extras --dev + - id: version + uses: open-turo/actions-release/semantic-release@4d8a6b6aa2d051e7dba0429d8d08beef827b5ccd # v4 + with: + github-token: ${{ steps.authenticate.outputs.token }} + dry-run: true + ci: false + - run: | + # Update version + uv version ${{ steps.version.outputs.new-release-version }} + - uses: actions-js/push@master + with: + message: "chore: ${{ steps.version.outputs.new-release-version }} [skip actions]" + github_token: ${{ steps.authenticate.outputs.token }} + - run: | + # Build package + uv build + - uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc # release/v1 + - uses: open-turo/actions-release/semantic-release@4d8a6b6aa2d051e7dba0429d8d08beef827b5ccd # v4 + with: + github-token: ${{ steps.authenticate.outputs.token }} diff --git a/.gitignore b/.gitignore index 1c26085..23d5da4 100644 --- a/.gitignore +++ b/.gitignore @@ -18,7 +18,18 @@ pip-log.txt # Unit test / coverage reports .coverage +.coverage.* +coverage.xml +htmlcov/ .tox +.nox +.cache +.pytest_cache/ +.hypothesis/ +*.cover +*.py,cover +.coverage_html/ +.pytest_cache # Translations *.mo @@ -31,6 +42,12 @@ pip-log.txt # Sphinx docs/_build +docs/_output -# Ipython Notebooks -*.ipynb +# Cursor +.cursorrules +.cursor +.cursor/rules + +# VSCode +.vscode diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..6af6000 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,45 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v5.0.0 + hooks: + - id: check-yaml + - id: end-of-file-fixer + - id: trailing-whitespace + - repo: https://github.com/alessandrojcm/commitlint-pre-commit-hook + rev: v9.22.0 + hooks: + - id: commitlint + stages: [commit-msg] + additional_dependencies: ["@open-turo/commitlint-config-conventional"] + - repo: https://github.com/pre-commit/mirrors-prettier + rev: v4.0.0-alpha.8 + hooks: + - id: prettier + stages: [pre-commit] + # - repo: https://github.com/rhysd/actionlint + # rev: v1.7.7 + # hooks: + # - id: actionlint + # - repo: https://github.com/jumanjihouse/pre-commit-hooks + # rev: 3.0.0 # or specific git tag + # hooks: + # - id: shellcheck + # - id: shfmt + - repo: https://github.com/DavidAnson/markdownlint-cli2 + rev: v0.18.1 + hooks: + - id: markdownlint-cli2 + args: ["--fix", ".cursorrules"] + - repo: https://github.com/astral-sh/ruff-pre-commit + # Ruff version. + rev: v0.11.13 + hooks: + # Run the Ruff linter. + - id: ruff + # Run the Ruff formatter. + - id: ruff-format + - repo: https://github.com/astral-sh/uv-pre-commit + # uv version. + rev: 0.7.13 + hooks: + - id: uv-lock diff --git a/.python-version b/.python-version new file mode 100644 index 0000000..24ee5b1 --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.13 diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 0000000..3ffcc21 --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,36 @@ +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Set the version of Python and other tools you might need +build: + os: ubuntu-24.04 + tools: + python: "3.12" + jobs: + pre_create_environment: + - asdf plugin add uv + - asdf install uv latest + - asdf global uv latest + create_environment: + - uv venv "${READTHEDOCS_VIRTUALENV_PATH}" + install: + - UV_PROJECT_ENVIRONMENT="${READTHEDOCS_VIRTUALENV_PATH}" uv sync --frozen --all-extras + +# Build documentation in the "docs/" directory with Sphinx +sphinx: + configuration: docs/conf.py + # Fail on all warnings to avoid broken references + fail_on_warning: true + +# Optionally build your docs in additional formats +formats: + - pdf + - epub + +# Python requirements required to build your documentation +python: + install: + - requirements: docs/requirements.txt diff --git a/.releaserc.json b/.releaserc.json new file mode 100644 index 0000000..d457a7d --- /dev/null +++ b/.releaserc.json @@ -0,0 +1,42 @@ +{ + "branches": ["main"], + "plugins": [ + "@semantic-release/commit-analyzer", + [ + "@semantic-release/release-notes-generator", + { + "preset": "conventionalcommits", + "presetConfig": { + "type": [ + { "type": "build", "section": "Build System", "hidden": false }, + { "type": "chore", "section": "Miscellaneous", "hidden": false }, + { + "type": "ci", + "section": "Continuous Integration", + "hidden": false + }, + { "type": "docs", "section": "Documentation", "hidden": false }, + { "type": "feat", "section": "Features", "hidden": false }, + { "type": "fix", "section": "Bug Fixes", "hidden": false }, + { + "type": "perf", + "section": "Performance Improvements", + "hidden": false + }, + { + "type": "refactor", + "section": "Code Refactoring", + "hidden": false + }, + { "type": "style", "section": "Styles", "hidden": false }, + { "type": "test", "section": "Tests", "hidden": false } + ] + }, + "writerOpts": { + "commitsSort": ["subject", "scope"] + } + } + ], + "@semantic-release/github" + ] +} diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index d40b7ec..0000000 --- a/.travis.yml +++ /dev/null @@ -1,18 +0,0 @@ -language: python -python: - - 2.7 - - 3.5 - - 3.6 -services: mongodb -env: - - PYMONGO_VERSION='<3' - - PYMONGO_VERSION='<3.6' - - PYMONGO_VERSION='' # Allow latest -install: - - pip install "pymongo $PYMONGO_VERSION" - - pip install . - - pip install coveralls coverage -script: > - coverage run --source=humbledb setup.py test - -after_success: coveralls diff --git a/LICENSE.rst b/LICENSE.rst index a4a3f3a..f4c0075 100644 --- a/LICENSE.rst +++ b/LICENSE.rst @@ -14,4 +14,3 @@ distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. - diff --git a/README.md b/README.md new file mode 100644 index 0000000..ac58001 --- /dev/null +++ b/README.md @@ -0,0 +1,120 @@ +# HumbleDB - MongoDB Object-Document Mapper + +HumbleDB solves the age-old MongoDB dilemma: write readable, maintainable code +or optimize for storage efficiency. With HumbleDB, you get both. This +lightweight ODM lets you use clear, descriptive attribute names in your Python +code while automatically mapping them to ultra-short database keys, +dramatically reducing document size and memory usage without sacrificing code +clarity. + +[![CI](https://github.com/shakefu/humbledb/actions/workflows/ci.yaml/badge.svg)](https://github.com/shakefu/humbledb/actions) +[![Coverage Status](https://coveralls.io/repos/shakefu/humbledb/badge.png?branch=master)](https://coveralls.io/r/shakefu/humbledb?branch=master) +[![GitHub Sponsors](https://img.shields.io/github/sponsors/shakefu?style=flat&logo=github&logoColor=white&labelColor=ea4aaa&color=ea4aaa)](https://github.com/sponsors/shakefu) + +> [!WARNING] Version 7.0.0 Breaking Changes +> This release updates to Pymongo 4.x, which introduces [breaking +> changes](https://pymongo.readthedocs.io/en/stable/migrate-to-pymongo4.html). +> While we've maintained all older 3.x and 2.x Pymongo methods with identical +> signatures where possible, there may be other breaking changes not covered by +> our test suite. If you encounter any issues during upgrade, please [open an +> issue](https://github.com/shakefu/humbledb/issues/new/choose). + +## Features + +**Efficient Storage**: Map readable attribute names to ultra-short database keys, +reducing document size and memory usage while maintaining code clarity. + +**Full Pymongo Compatibility**: Maintains backwards-compatible methods for +Pymongo 4.x including `insert`, `find_and_modify`, `save`, and other familiar +operations. + +**Maximum Flexibility**: Documents are also dictionaries - no schema +restrictions, maximum adaptability to changing requirements. + +**Thread & Greenlet Safe**: Built for concurrent applications with safe +connection handling and resource management. + +**Context-Managed Connections**: [Connection paradigm](https://humbledb.readthedocs.io/en/latest/tutorial.html#configuring-connections) +minimizes socket usage from the connection pool through explicit context +managers. + +**Lightweight Design**: Thin wrapper around Pymongo that exposes the full power +of the underlying driver without performance overhead. + +## Quick Start + +### Define Your Document Schema + +Create a [`Document`](https://humbledb.readthedocs.io/en/latest/api.html#documents) +subclass with readable attribute names mapped to short database keys: + +```python +from humbledb import Mongo, Document + +class TestDoc(Document): + config_database = 'test' # Target database + config_collection = 'testdoc' # Target collection + test_key = 't' # Maps 'test_key' attribute to 't' in MongoDB + other_key = 'o' # Maps 'other_key' attribute to 'o' in MongoDB +``` + +### Create and Populate Documents + +Documents work like regular Python objects with attribute access, while storing +data efficiently: + +```python +doc = TestDoc() +doc.test_key = 'Hello' +doc.other_key = 'World' + +# View the actual MongoDB document structure +print(doc) +# TestDoc({'t': 'Hello', 'o': 'World'}) +``` + +### Flexible Data Access + +Access your data through mapped attributes or direct dictionary keys: + +```python +# Via mapped attributes (recommended) +print(doc.test_key) # 'Hello' + +# Via dictionary access +print(doc['t']) # 'Hello' +print(doc['o']) # 'World' +``` + +### Database Operations + +Use the [`Mongo`](https://humbledb.readthedocs.io/en/latest/api.html#mongodb-connections) +context manager for safe database operations: + +```python +# Insert document +with Mongo: + TestDoc.insert(doc) + +# Query documents +with Mongo: + found = TestDoc.find_one() + +print(found) +# TestDoc({'_id': ObjectId('50ad81586112797f89b99606'), 't': 'Hello', 'o': 'World'}) +``` + +See the documentation for more examples and detailed explanations. + +## Documentation + +The complete documentation can be found on . + +## License + +See LICENSE.rst. + +## Contributors + +- [shakefu](https://github.com/shakefu) (Creator, Maintainer) +- [paulnues](https://github.com/paulnues) diff --git a/README.rst b/README.rst deleted file mode 100644 index edb2e6d..0000000 --- a/README.rst +++ /dev/null @@ -1,80 +0,0 @@ -HumbleDB - MongoDB Object-Document Mapper -========================================= - -HumbleDB is an extremely lightweight ODM that works with pymongo to provide a -convenient and easy to use interface. It enforces strict explictness when a -connection to a MongoDB cluster or replica set is being used, by disallowing -any read or write interaction outside of a context manager's context block. - -.. image:: https://travis-ci.org/shakefu/humbledb.svg?branch=master - :target: https://travis-ci.org/shakefu/humbledb - -.. image:: https://coveralls.io/repos/shakefu/humbledb/badge.png?branch=master - :target: https://coveralls.io/r/shakefu/humbledb?branch=master - - - -Quick Example -------------- - -.. code-block:: python - - >>> from humbledb import Mongo, Document - >>> # config_database and config_collection are required attributes - >>> class TestDoc(Document): - ... config_database = 'test' - ... config_collection = 'testdoc' - ... test_key = 't' - ... other_key = 'o' - ... - >>> # When you create a Document instance, you can set its keys via any - >>> # mapped attributes you create - >>> doc = TestDoc() - >>> doc.test_key = 'Hello' - >>> doc.other_key = 'World' - >>> # The __repr__ for the instance shows the actual doc - >>> doc - TestDoc({'t': 'Hello', 'o': 'World'}) - >>> # A Document instance is also a dict, but you have to access the key - >>> # names directly - >>> doc['o'] - u'World' - >>> # Or use the mapped attribute - >>> doc[TestDoc.test_key] - u'Hello' - >>> # The Mongo class manages database connection and is a context manager - >>> with Mongo: - ... TestDoc.insert(doc) - ... - >>> with Mongo: - ... found = TestDoc.find_one() - ... - >>> found - TestDoc({u'_id': ObjectId('50ad81586112797f89b99606'), u't': u'Hello', u'o': u'World'}) - >>> doc - TestDoc({'_id': ObjectId('50ad81586112797f89b99606'), 't': 'Hello', 'o': 'World'}) - >>> found['_id'] - ObjectId('50ad81586112797f89b99606') - >>> found['t'] - u'Hello' - >>> found.test_key - u'Hello' - -See the documentation for more examples and detailed explanations. - -Documentation -------------- - -The complete documentation can be found on http://humbledb.readthedocs.org. - -License -------- - -See LICENSE.rst. - -Contributors ------------- - -* `shakefu `_ (Creator, Maintainer) -* `paulnues `_ - diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..437abb4 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,6 @@ +version: "3.8" +services: + mongodb: + image: mongo:latest + ports: + - "27017" diff --git a/docs/api.rst b/docs/api.rst index 26950f1..ad36db7 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -75,5 +75,3 @@ Helpers ======= .. autofunction:: humbledb.helpers.auto_increment - - diff --git a/docs/changes.rst b/docs/changes.rst index b1c6248..2253d07 100644 --- a/docs/changes.rst +++ b/docs/changes.rst @@ -124,7 +124,7 @@ This section contains all the changes that I can remember, by version. ----- - This release may break backwards compatibility. -- Total rewrite of the :module:`humbledb.report` module to make it much more +- Total rewrite of the :mod:`humbledb.report` module to make it much more useful. Sorry, but I'm fairly sure nobody was using it before anyway. @@ -212,7 +212,7 @@ This section contains all the changes that I can remember, by version. 3.0.2 ----- -- Fix bug with DocumentMeta accidentally getting extra ``name`` attribute, +- Fix bug with DocumentMeta accidentally getting extra ``name`` attribute, which in turn became available on Document, and would override mapping behavior. @@ -250,7 +250,7 @@ This section contains all the changes that I can remember, by version. ----- - Fix bug when old version by using pkg_resources.parse_version to check - pymongo version. + pymongo version. 2.2.0 ----- @@ -285,4 +285,3 @@ This section contains all the changes that I can remember, by version. ----- - First release fit for public consumption. - diff --git a/docs/conf.py b/docs/conf.py index f9fc7bb..7e849a4 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -10,6 +10,7 @@ # # All configuration values have a default; values that are commented out # serve to show the default. +from importlib.metadata import version as get_version # Uncomment this line if you need to use sys or os # import sys, os @@ -17,217 +18,213 @@ # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -#sys.path.insert(0, os.path.abspath('.')) +# sys.path.insert(0, os.path.abspath('.')) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' +# needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode', - 'sphinx.ext.intersphinx'] +extensions = ["sphinx.ext.autodoc", "sphinx.ext.viewcode", "sphinx.ext.intersphinx"] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix of source filenames. -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. -#source_encoding = 'utf-8-sig' +# source_encoding = 'utf-8-sig' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = u'HumbleDB' -copyright = u'2012, Jake Alheid' +project = "HumbleDB" +copyright = "2012, Jake Alheid" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # -import humbledb + # The short X.Y version. -version = humbledb.__version__.split('-')[0] +version = get_version("humbledb").split("-")[0] # The full version, including alpha/beta/rc tags. -release = humbledb.__version__ +release = get_version("humbledb") # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. -#language = None +# language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: -#today = '' +# today = '' # Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' +# today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ['_build'] +exclude_patterns = ["_build"] # The reST default role (used for this markup: `text`) to use for all documents. -#default_role = None +# default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). -#add_module_names = True +# add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. -#show_authors = False +# show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] +# modindex_common_prefix = [] # -- Intersphinx configuration intersphinx_mapping = { - 'python': ('http://docs.python.org/2.7', None), - 'pymongo': ('http://api.mongodb.org/python/current/', None), - } + "python": ("https://docs.python.org/3", None), + "pymongo": ( + "https://www.mongodb.com/docs/languages/python/pymongo-driver/current/", + None, + ), +} # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'default' +html_theme = "default" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -#html_theme_options = {} +# html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] +# html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". -#html_title = None +# html_title = None # A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None +# html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. -#html_logo = None +# html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. -#html_favicon = None +# html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['static'] +html_static_path = ["static"] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' +# html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. -#html_use_smartypants = True +# html_use_smartypants = True # Custom sidebar templates, maps document names to template names. -#html_sidebars = {} +# html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. -#html_additional_pages = {} +# html_additional_pages = {} # If false, no module index is generated. -#html_domain_indices = True +# html_domain_indices = True # If false, no index is generated. -#html_use_index = True +# html_use_index = True # If true, the index is split into individual pages for each letter. -#html_split_index = False +# html_split_index = False # If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True +# html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True +# html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True +# html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. -#html_use_opensearch = '' +# html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None +# html_file_suffix = None # Output file base name for HTML help builder. -htmlhelp_basename = 'doc' +htmlhelp_basename = "doc" # -- Options for LaTeX output -------------------------------------------------- latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', - -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', - -# Additional stuff for the LaTeX preamble. -#'preamble': '', + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ - ('index', 'humbledb.tex', u'HumbleDB Documentation', - u'Jake Alheid', 'manual'), + ("index", "humbledb.tex", "HumbleDB Documentation", "Jake Alheid", "manual"), ] # The name of an image file (relative to this directory) to place at the top of # the title page. -#latex_logo = None +# latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. -#latex_use_parts = False +# latex_use_parts = False # If true, show page references after internal links. -#latex_show_pagerefs = False +# latex_show_pagerefs = False # If true, show URL addresses after external links. -#latex_show_urls = False +# latex_show_urls = False # Documents to append as an appendix to all manuals. -#latex_appendices = [] +# latex_appendices = [] # If false, no module index is generated. -#latex_domain_indices = True +# latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [ - ('index', '', u'HumbleDB Documentation', - [u'Author'], 1) -] +man_pages = [("index", "", "HumbleDB Documentation", ["Author"], 1)] # If true, show URL addresses after external links. -#man_show_urls = False +# man_show_urls = False # -- Options for Texinfo output ------------------------------------------------ @@ -236,59 +233,65 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - ('index', '', u'HumbleDB Documentation', - u'Jake Alheid', '', 'MongoDB Python Object-Document Mapper.', - 'Miscellaneous'), + ( + "index", + "", + "HumbleDB Documentation", + "Jake Alheid", + "", + "MongoDB Python Object-Document Mapper.", + "Miscellaneous", + ), ] # Documents to append as an appendix to all manuals. -#texinfo_appendices = [] +# texinfo_appendices = [] # If false, no module index is generated. -#texinfo_domain_indices = True +# texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' +# texinfo_show_urls = 'footnote' # -- Options for Epub output --------------------------------------------------- # Bibliographic Dublin Core info. -epub_title = u'HumbleDB Documentation' -epub_author = u'Jake Alheid' -epub_publisher = u'Jake Alheid' -epub_copyright = u'2012, Jake Alheid' +epub_title = "HumbleDB Documentation" +epub_author = "Jake Alheid" +epub_publisher = "Jake Alheid" +epub_copyright = "2012, Jake Alheid" # The language of the text. It defaults to the language option # or en if the language is not set. -#epub_language = '' +# epub_language = '' # The scheme of the identifier. Typical schemes are ISBN or URL. -#epub_scheme = '' +# epub_scheme = '' # The unique identifier of the text. This can be a ISBN number # or the project homepage. -#epub_identifier = '' +# epub_identifier = '' # A unique identification for the text. -#epub_uid = '' +# epub_uid = '' # A tuple containing the cover image and cover page html template filenames. -#epub_cover = () +# epub_cover = () # HTML files that should be inserted before the pages created by sphinx. # The format is a list of tuples containing the path and title. -#epub_pre_files = [] +# epub_pre_files = [] # HTML files shat should be inserted after the pages created by sphinx. # The format is a list of tuples containing the path and title. -#epub_post_files = [] +# epub_post_files = [] # A list of files that should not be packed into the epub file. -#epub_exclude_files = [] +# epub_exclude_files = [] # The depth of the table of contents in toc.ncx. -#epub_tocdepth = 3 +# epub_tocdepth = 3 # Allow duplicate toc entries. -#epub_tocdup = True +# epub_tocdup = True diff --git a/docs/index.rst b/docs/index.rst index 3e9a463..9acf8f4 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -19,7 +19,7 @@ HumbleDB: MongoDB micro-ODM =========================== HumbleDB is a thin wrapper around Pymongo_ for MongoDB_ that is designed to -make working with flexible schema documents easy and readable. +make working with flexible schema documents easy and readable. * **Readable:** Short document keys can be mapped to long attribute names to keep document size small and efficient, while providing completely @@ -73,7 +73,7 @@ power and flexibility of Pymongo underneath. It's called "Humble" because ... description = 'd' ... # Same for the 'v' key and the value attribute ... value = 'v' - ... + ... >>> # Create a new empty document >>> doc = HumbleDoc() >>> # Set some values in the document @@ -83,7 +83,7 @@ power and flexibility of Pymongo underneath. It's called "Humble" because >>> with Mongo: ... # The insert method (and others) are the same as the pymongo methods ... HumbleDoc.insert(doc) - ... + ... >>> # Newly created documents will have their _id field set, and you can see >>> # what the raw document would look like in MongoDB >>> doc @@ -92,7 +92,7 @@ power and flexibility of Pymongo underneath. It's called "Humble" because .. rubric:: What's going on here? -* ``config_database = 'humble'`` - This tells the document that it's stored +* ``config_database = 'humble'`` - This tells the document that it's stored in the ``'humble'`` database. * ``config_collection = 'examples'`` - This tells the document that it's part of the ``'examples'`` collection. @@ -102,7 +102,7 @@ power and flexibility of Pymongo underneath. It's called "Humble" because ``'v'``. * ``with Mongo:`` - This :class:`Mongo` context manager tells the document which MongoDB connection to use (see :ref:`Connecting to MongoDB - `). + `). * ``HumbleDoc.insert(doc)`` - This inserts ``doc`` into the HumbleDoc collection (see :ref:`Working with a Collection `). @@ -126,7 +126,7 @@ User's Guide changes .. include:: ../LICENSE.rst - + Indices and tables ================== @@ -134,4 +134,3 @@ Indices and tables * :ref:`genindex` * :ref:`modindex` * :ref:`search` - diff --git a/docs/tutorial.rst b/docs/tutorial.rst index 775494e..00b8483 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -1067,6 +1067,3 @@ maximum array size for any single document. comments.remove(spec) # Remove all entries matching `spec` comments.clear() # Remove all entries - - - diff --git a/humbledb/__init__.py b/humbledb/__init__.py index 21b9f9b..bed20df 100644 --- a/humbledb/__init__.py +++ b/humbledb/__init__.py @@ -18,39 +18,43 @@ limitations under the License. """ -__version__ = '6.0.0' + +import importlib.metadata + +try: + __version__ = importlib.metadata.version("humbledb") +except importlib.metadata.PackageNotFoundError: + __version__ = "0.0.0-dev" # We only want to allow * imports for the most common classes. If you want # anything else, import it directly. __all__ = [ - 'Index', - 'Mongo', - 'Document', - 'Embed', - ] + "Index", + "Mongo", + "Document", + "Embed", +] + +# Shortcuts to pymongo index directions +import pymongo # Shortcut to pytool.lang.UNSET from pytool.lang import UNSET -# Shortcuts to pymongo index directions -import pymongo DESC = pymongo.DESCENDING ASC = pymongo.ASCENDING del pymongo # Clean up the namespace -# Import shortcuts to HumbleDB document basics -from .index import Index -from .mongo import Mongo -from .document import Document, Embed - +# Exceptions module # Import array and report framework modules. These need to be imported last or # it causes with circular imports -from . import array -from . import report +from . import array, errors, report +from .document import Document, Embed -# Exceptions module -from . import errors +# Import shortcuts to HumbleDB document basics +from .index import Index +from .mongo import Mongo # To make Pyflakes happy array = array diff --git a/humbledb/_version.py b/humbledb/_version.py index 9b52ee8..01f94bd 100644 --- a/humbledb/_version.py +++ b/humbledb/_version.py @@ -2,51 +2,171 @@ This module contains version checking helpers. """ -import pkg_resources -import pymongo +import functools +from importlib.metadata import version as get_version +from packaging.version import Version -def _lt(version): - """ Return ``True`` if ``pymongo.version`` is less than `version`. +try: + import ssl +except ImportError: + ssl = None - :param str version: Version string + +PYMONGO = Version(get_version("pymongo")) + + +@functools.lru_cache(maxsize=16) +def _lt(version: str) -> bool: + """Return ``True`` if ``pymongo.version`` is less than `version`. + + :param str version: Version string """ - return (pkg_resources.parse_version(pymongo.version) < - pkg_resources.parse_version(version)) + return PYMONGO < Version(version) -def _gte(version): - """ Return ``True`` if ``pymongo.version`` is greater than or equal to - `version`. +@functools.lru_cache(maxsize=16) +def _gte(version: str) -> bool: + """Return ``True`` if ``pymongo.version`` is greater than or equal to + `version`. - :param str version: Version string + :param str version: Version string """ - return (pkg_resources.parse_version(pymongo.version) >= - pkg_resources.parse_version(version)) + return PYMONGO >= Version(version) -def _clean(kwargs): - """ Mutate `kwargs` to handle backwards incompatible version discrepancies. +def _clean(kwargs: dict) -> None: + """Mutate `kwargs` to handle backwards incompatible version discrepancies. - Currently only changes the `safe` param into `w`. If ``safe=False`` is - passed it is transformed into ``w=0``. + Currently only changes the `safe` param into `w`. If ``safe=False`` is + passed it is transformed into ``w=0``. - Otherwise `safe` is removed from the args. + Otherwise `safe` is removed from the args. """ - if _lt('3.0'): + if _lt("3.0"): + return + + if "safe" not in kwargs: + return + + if kwargs["safe"] is False: + del kwargs["safe"] + kwargs["w"] = 0 return - if 'safe' not in kwargs: + del kwargs["safe"] + + +def _clean_create_index(kwargs: dict) -> None: + """Mutate `kwargs` to handle backwards incompatible version discrepancies. + + Versions older than 4.1: + + - Removes `comment` keyword argument + + Versions older than 3.11: + + - Removes `hidden` keyword argument + + Versions older than 3.6: + + - Removes `session` keyword argument + + Versions newer than 3.0: + + - Removes `cache_for` keyword argument + - Changes `drop_dups` to `dropDups` + - Changes `bucket_size` to `bucketSize` + - Changes `key_or_list` to `keys` + + """ + if _gte("3.0"): + kwargs.pop("cache_for", None) + + if "drop_dups" in kwargs: + kwargs["dropDups"] = kwargs.pop("drop_dups") + + if "bucket_size" in kwargs: + kwargs["bucketSize"] = kwargs.pop("bucket_size") + + if "key_or_list" in kwargs: + kwargs["keys"] = kwargs.pop("key_or_list") + + if _lt("2.3") and "cache_for" in kwargs: + kwargs["ttl"] = kwargs.pop("cache_for") + + if _lt("3.6"): + kwargs.pop("session", None) return - if kwargs['safe'] == False: - del kwargs['safe'] - kwargs['w'] = 0 + if _lt("3.11"): + kwargs.pop("hidden", None) return - del kwargs['safe'] + if _lt("4.1"): + kwargs.pop("comment", None) +def _clean_connection_kwargs(kwargs: dict) -> None: + """Mutate `kwargs` to handle backwards incompatible version discrepancies. + + Currently only changes the `safe` param into `w`. If ``safe=False`` is + passed it is transformed into ``w=0``. + + Otherwise `safe` is removed from the args. + """ + if _gte("2.1.0") and _lt("2.2.0"): + # This causes an error for the 2.1.x versions of Pymongo, so we + # remove it + kwargs.pop("auto_start_request") + kwargs.pop("use_greenlets") + + if _gte("3.0.0"): + # Handle removed keywords + kwargs.pop("use_greenlets") + kwargs.pop("auto_start_request") + + # Handle changed keywords + if "max_pool_size" in kwargs: + kwargs["maxPoolSize"] = kwargs.pop("max_pool_size") + + # Handle other 3.0 stuff + if kwargs.get("ssl"): + kwargs.setdefault("ssl_cert_reqs", ssl.CERT_NONE) + + if _gte("4.0.0"): + # SSL related keywords that have changed + if "ssl_pem_passphrase" in kwargs: + kwargs["tlsCertificateKeyFilePassword"] = kwargs.pop("ssl_pem_passphrase") + + if "ssl_ca_certs" in kwargs: + kwargs["tlsCAFile"] = kwargs.pop("ssl_ca_certs") + + if "ssl_crl_file" in kwargs: + kwargs["tlsCRLFile"] = kwargs.pop("ssl_crl_file") + + if "ssl_match_hostname" in kwargs: + kwargs["tlsAllowInvalidHostnames"] = kwargs.pop("ssl_match_hostname") + + if "ssl_certfile" in kwargs or "ssl_keyfile" in kwargs: + raise ValueError( + "ssl_certfile and ssl_keyfile are no longer supported. " + "Use tlsCertificateKeyFile instead (https://pymongo.readthedocs.io/en/stable/migrate-to-pymongo4.html#renamed-uri-options)." + ) + + if "ssl_cert_reqs" in kwargs: + ssl_cert_reqs = kwargs.pop("ssl_cert_reqs") + if ssl_cert_reqs == ssl.CERT_NONE or ssl_cert_reqs == ssl.CERT_OPTIONAL: + kwargs["tlsAllowInvalidCertificates"] = True + else: + kwargs["tlsCertificateKeyFilePassword"] = False + + # Other changed keywords + if "j" in kwargs: + kwargs["journal"] = kwargs.pop("j") + + if "wtimeout" in kwargs: + kwargs["wtimeoutMS"] = kwargs.pop("wtimeout") diff --git a/humbledb/array.py b/humbledb/array.py index b253425..acc1a29 100644 --- a/humbledb/array.py +++ b/humbledb/array.py @@ -1,19 +1,21 @@ import itertools -import six +from pytool.lang import UNSET import humbledb +from humbledb import _version +from humbledb.document import Document from humbledb.errors import NoConnection -from humbledb import Document, UNSET, _version class Page(Document): - """ Document class used by :class:`Array`. """ - size = 's' # Number of entries in this page + """Document class used by :class:`Array`.""" + + size = "s" # Number of entries in this page """ Number of entries currently in this page. """ - entries = 'e' # Array of entries + entries = "e" # Array of entries """ Array of entries. """ - _opts = {'safe': True} if _version._lt('3.0.0') else {} + _opts = {"safe": True} if _version._lt("3.0.0") else {} class ArrayMeta(type): @@ -23,45 +25,55 @@ class ArrayMeta(type): is specific to each Array subclass. """ + def __new__(mcs, name, bases, cls_dict): # Skip the Array base class - if name == 'Array' and bases == (object,): + if ( + name == "Array" + and not len(bases) + and mcs is ArrayMeta + and cls_dict["__qualname__"] == "Array" + ): return type.__new__(mcs, name, bases, cls_dict) # The dictionary for subclassing the Page document page_dict = {} # Check for required class members - for member in 'config_database', 'config_collection': + for member in "config_database", "config_collection": if member not in cls_dict: - raise TypeError("{!r} missing required {!r}".format(name, - member)) + raise TypeError("{!r} missing required {!r}".format(name, member)) # Move the config to the page page_dict[member] = cls_dict.pop(member) # Create our page subclass and assign to cls._page - cls_dict['_page'] = type(name + 'Page', (Page,), page_dict) + cls_dict["_page"] = type(name + "Page", (Page,), page_dict) # Return our new Array return type.__new__(mcs, name, bases, cls_dict) # Shortcut methods @property - def size(cls): return cls._page.size + def size(cls): + return cls._page.size @property - def entries(cls): return cls._page.entries + def entries(cls): + return cls._page.entries @property - def find(cls): return cls._page.find + def find(cls): + return cls._page.find @property - def update(cls): return cls._page.update + def update(cls): + return cls._page.update @property - def remove(cls): # This needs a try/except for nosetests - try: return cls._page.remove - except NoConnection: pass # Collection not available yet + def remove(cls): # This needs a try/except for tests + try: + return cls._page.remove + except NoConnection: + pass # Collection not available yet -@six.add_metaclass(ArrayMeta) -class Array(object): +class Array(metaclass=ArrayMeta): """ HumbleDB Array object. This helps manage paginated array documents in MongoDB. This class is designed to be inherited from, and not instantiated @@ -75,10 +87,11 @@ class Array(object): :param int page_count: Total number of pages that already exist (optional) """ + config_max_size = 100 """ Soft limit on the maximum number of entries per page. """ - config_page_marker = u'#' + config_page_marker = "#" """ Combined with the array_id and page number to create the page _id. """ config_padding = 0 @@ -105,8 +118,8 @@ def _id(self): @property def _id_regex(self): - _id = self._id.replace('.', '\.') - return {'$regex': '^' + _id} + _id = self._id.replace(".", "\.") + return {"$regex": "^" + _id} def new_page(self, page_number): """ @@ -122,7 +135,7 @@ def new_page(self, page_number): page._id = self.page_id(page_number) page.size = 0 page.entries = [] - page['padding'] = '0' * self.config_padding + page["padding"] = "0" * self.config_padding # Insert the new page try: # We need to do this as safe, because otherwise it may not be @@ -132,7 +145,7 @@ def new_page(self, page_number): # A race condition already created this page, so we are done return # Remove the padding - Page.update({'_id': page._id}, {'$unset': {'padding': 1}}, **Page._opts) + Page.update({"_id": page._id}, {"$unset": {"padding": 1}}, **Page._opts) def append(self, entry): """ @@ -152,8 +165,8 @@ def append(self, entry): self.new_page(self.page_count) # Shortcut page class Page = self._page - query = {'_id': self.page_id()} - modify = {'$inc': {Page.size: 1}, '$push': {Page.entries: entry}} + query = {"_id": self.page_id()} + modify = {"$inc": {Page.size: 1}, "$push": {Page.entries: entry}} fields = {Page.size: 1} # Append our entry to our page and get the page's size page = Page.find_and_modify(query, modify, new=True, fields=fields) @@ -181,56 +194,53 @@ def remove(self, spec): # Since we can't reliably use dot-notation when the query is against an # embedded document, we need to use the $elemMatch operator instead if isinstance(spec, dict): - query_spec = {'$elemMatch': spec} + query_spec = {"$elemMatch": spec} else: query_spec = spec # Update to set first instance matching ``spec`` on each page to # ``null`` (via $unset) - query = {'_id': self._id_regex, Page.entries: query_spec} - modify = {'$unset': {Page.entries+'.$': spec}, '$inc': {Page.size: -1}} + query = {"_id": self._id_regex, Page.entries: query_spec} + modify = {"$unset": {Page.entries + ".$": spec}, "$inc": {Page.size: -1}} result = Page.update(query, modify, multi=True) - if not result or not result.get('updatedExisting', None): + if not result or not result.get("updatedExisting", None): return # Update to remove all ``null`` entries from this array - query = {'_id': self._id_regex, Page.entries: None} - result = Page.update(query, {'$pull': {Page.entries: None}}, - multi=True) + query = {"_id": self._id_regex, Page.entries: None} + result = Page.update(query, {"$pull": {Page.entries: None}}, multi=True) # Check the result and return True if anything was modified - if result and result.get('updatedExisting', None): + if result and result.get("updatedExisting", None): return True def _all(self): - """ Return a cursor for iterating over all the pages. """ + """Return a cursor for iterating over all the pages.""" Page = self._page - return Page.find({'_id': self._id_regex}).sort('_id') + return Page.find({"_id": self._id_regex}).sort("_id") def all(self): - """ Return all entries in this array. """ + """Return all entries in this array.""" cursor = self._all() return list(itertools.chain.from_iterable(p.entries for p in cursor)) def clear(self): - """ Remove all documents in this array. """ + """Remove all documents in this array.""" self._page.remove({self._page._id: self._id_regex}) self.page_count = 0 def length(self): - """ Return the total number of items in this array. """ + """Return the total number of items in this array.""" # This is implemented rather than __len__ because it incurs a query, # and we don't want to query transparently Page = self._page - if _version._lt('3.0.0'): - cursor = Page.find({'_id': self._id_regex}, fields={Page.size: - 1, '_id': 0}) + if _version._lt("3.0.0"): + cursor = Page.find({"_id": self._id_regex}, fields={Page.size: 1, "_id": 0}) else: - cursor = Page.find({'_id': self._id_regex}, {Page.size: 1, '_id': - 0}) + cursor = Page.find({"_id": self._id_regex}, {Page.size: 1, "_id": 0}) return sum(p.size for p in cursor) def pages(self): - """ Return the total number of pages in this array. """ + """Return the total number of pages in this array.""" Page = self._page - return Page.find({'_id': self._id_regex}).count() + return Page.find({"_id": self._id_regex}).count() def __getitem__(self, index): """ @@ -240,8 +250,7 @@ def __getitem__(self, index): """ if not isinstance(index, (int, slice)): - raise TypeError("Array indices must be integers, not %s" % - type(index)) + raise TypeError("Array indices must be integers, not %s" % type(index)) Page = self._page # Shorthand the Page class # If we have an integer index, it's a simple query for the page number if isinstance(index, int): @@ -249,7 +258,7 @@ def __getitem__(self, index): raise IndexError("Array indices must be positive") # Page numbers are not zero indexed index += 1 - page = Page.find_one({'_id': self.page_id(index)}) + page = Page.find_one({"_id": self.page_id(index)}) if not page: raise IndexError("Array index out of range") return page.entries @@ -264,9 +273,8 @@ def __getitem__(self, index): # Page numbers are not zero indexed start = (index.start or 0) + 1 stop = (index.stop or 2**32) + 1 - start = '{}{:05d}'.format(self._id, start) - stop = '{}{:05d}'.format(self._id, stop) - cursor = Page.find({'_id': {'$gte': start, '$lt': stop}}) - return list(itertools.chain.from_iterable(p.entries for p in - cursor)) + start = "{}{:05d}".format(self._id, start) + stop = "{}{:05d}".format(self._id, stop) + cursor = Page.find({"_id": {"$gte": start, "$lt": stop}}) + return list(itertools.chain.from_iterable(p.entries for p in cursor)) # This comment will never be reached diff --git a/humbledb/cursor.py b/humbledb/cursor.py index 5d36065..3e1c995 100644 --- a/humbledb/cursor.py +++ b/humbledb/cursor.py @@ -1,28 +1,26 @@ -""" -""" -import six +""" """ + import pymongo from humbledb import _version class Cursor(pymongo.cursor.Cursor): - """ This subclass of :class:`pymongo.cursor.Cursor` is used to ensure that - documents are coerced to the correct type as they are returned by the - cursor. + """This subclass of :class:`pymongo.cursor.Cursor` is used to ensure that + documents are coerced to the correct type as they are returned by the + cursor. """ + # This class works by assigning a different class to this variable in a # cursor instance _doc_cls = dict def next(self): - if six.PY3 and _version._gte('3'): + if _version._gte("3"): doc = super().next() - elif six.PY3 and _version._lt('3'): - doc = super().__next__() else: - doc = super(Cursor, self).next() + doc = super().__next__() doc = self._doc_cls(doc) return doc @@ -33,24 +31,45 @@ def __getitem__(self, index): return self._doc_cls(doc) def __clone(self, deepcopy=True): - """ This is a direct copy of pymongo 2.4's __clone method. This is a - pretty fragile implementation since it relies on copying private - variables... + """This is a direct copy of pymongo 2.4's __clone method. This is a + pretty fragile implementation since it relies on copying private + variables... """ - clone = type(self)(self.__collection) - values_to_clone = ("spec", "fields", "skip", "limit", - "timeout", "snapshot", "tailable", - "ordering", "explain", "hint", "batch_size", - "max_scan", "as_class", "slave_okay", "await_data", - "partial", "manipulate", "read_preference", - "tag_sets", "secondary_acceptable_latency_ms", - "must_use_master", "uuid_subtype", "query_flags", - "kwargs") - more_values_to_clone = ('_doc_cls',) - data = dict((k, v) for k, v in six.iteritems(self.__dict__) - if (k.startswith('_Cursor__') and k[9:] in values_to_clone) - or (k in more_values_to_clone)) - if deepcopy and hasattr(self, '__deepcopy'): + clone = type(self)(self.collection) + values_to_clone = ( + "spec", + "fields", + "skip", + "limit", + "timeout", + "snapshot", + "tailable", + "ordering", + "explain", + "hint", + "batch_size", + "max_scan", + "as_class", + "slave_okay", + "await_data", + "partial", + "manipulate", + "read_preference", + "tag_sets", + "secondary_acceptable_latency_ms", + "must_use_master", + "uuid_subtype", + "query_flags", + "kwargs", + ) + more_values_to_clone = ("_doc_cls",) + data = dict( + (k, v) + for k, v in self.__dict__.items() + if (k.startswith("_Cursor__") and k[9:] in values_to_clone) + or (k in more_values_to_clone) + ) + if deepcopy and hasattr(self, "__deepcopy"): data = self.__deepcopy(data) clone.__dict__.update(data) return clone @@ -66,9 +85,14 @@ def clone(self): """ return self.__clone(True) + def count(self) -> int: + """ + Implements a backwards-compatible count taking the same arguments as :meth:`pymongo.cursor.Cursor.count` before :mod:`pymongo` 4.x. + """ + return self.collection.count_documents(self._query_spec()) + def __copy__(self): return self.__clone(deepcopy=False) def __deepcopy__(self, memo): return self.__clone(deepcopy=True) - diff --git a/humbledb/document.py b/humbledb/document.py index e941f4b..da5d77b 100644 --- a/humbledb/document.py +++ b/humbledb/document.py @@ -1,63 +1,65 @@ -""" -""" +""" """ + import logging from functools import wraps +from typing import Optional -import six -import pymongo import pyconfig -from six.moves import xrange +import pymongo from pytool.lang import UNSET from humbledb import _version +from humbledb.cursor import Cursor +from humbledb.errors import DatabaseMismatch, MissingConfig, NoConnection from humbledb.index import Index +from humbledb.maps import DictMap, ListMap, NameMap from humbledb.mongo import Mongo -from humbledb.cursor import Cursor -from humbledb.maps import DictMap, NameMap, ListMap -from humbledb.errors import NoConnection, MissingConfig, DatabaseMismatch _ = None -COLLECTION_METHODS = set([_ for _ in dir(pymongo.collection.Collection) if not - _.startswith('_') and callable(getattr(pymongo.collection.Collection, _))]) -del _ # This is necessary since _ lingers in the module namespace otherwise +COLLECTION_METHODS = set( + [ + _ + for _ in dir(pymongo.collection.Collection) + if not _.startswith("_") and callable(getattr(pymongo.collection.Collection, _)) + ] +) +del _ # This is necessary since _ lingers in the module namespace otherwise -class Embed(six.text_type): - """ This class is used to map attribute names on embedded subdocuments. +class Embed(str): + """This class is used to map attribute names on embedded subdocuments. - Example usage:: + Example usage:: - class MyDoc(Document): - config_database = 'db' - config_collection = 'example' + class MyDoc(Document): + config_database = 'db' + config_collection = 'example' - embed = Embed('e') - embed.val = 'v' - embed.time = 't' + embed = Embed('e') + embed.val = 'v' + embed.time = 't' """ - def __new__(cls, value=''): - if six.PY3: - return super().__new__(cls, value) - else: - return super(Embed, cls).__new__(cls, value) + + def __new__(cls, value=""): + return super().__new__(cls, value) def as_name_map(self, base_name): - """ Return this object mapped onto :class:`~humbledb.maps.NameMap` - objects. """ + """Return this object mapped onto :class:`~humbledb.maps.NameMap` + objects.""" name_map = NameMap(base_name) for name, value in self.__dict__.items(): # Skip most everything - if not isinstance(value, six.string_types): + if not isinstance(value, str): continue # Skip private stuff - if name.startswith('_'): + if name.startswith("_"): continue # Concatonate names if base_name: - cname = base_name + '.' + value + cname = base_name + "." + value # Recursively map if isinstance(value, Embed): @@ -70,16 +72,16 @@ def as_name_map(self, base_name): return name_map def as_reverse_name_map(self, base_name): - """ Return this object mapped onto reverse-lookup - :class:`~humbledb.maps.NameMap` objects. """ + """Return this object mapped onto reverse-lookup + :class:`~humbledb.maps.NameMap` objects.""" name_map = NameMap(base_name) for name, value in self.__dict__.items(): # Skip most everything - if not isinstance(value, six.string_types): + if not isinstance(value, str): continue # Skip private stuff - if name.startswith('_'): + if name.startswith("_"): continue # Recursively map @@ -95,10 +97,11 @@ def as_reverse_name_map(self, base_name): class CollectionAttribute(object): - """ Acts as the collection attribute. Refuses to be read unless the - the executing code is in a :class:`Mongo` context or has already called - :meth:`Mongo.start`. + """Acts as the collection attribute. Refuses to be read unless the + the executing code is in a :class:`Mongo` context or has already called + :meth:`Mongo.start`. """ + def __get__(self, instance, owner): self = instance or owner database = self.config_database @@ -108,20 +111,22 @@ def __get__(self, instance, owner): # Only allow access to the collection in a Mongo context if Mongo.context: db = Mongo.context.database - if db and db.name != database: - raise DatabaseMismatch("This document is configured for " - "database %r, while the connection is using %r") + if db is not None and db.name != database: + raise DatabaseMismatch( + "This document is configured for " + "database %r, while the connection is using %r" + ) return Mongo.context.connection[database][collection] - raise NoConnection("'collection' unavailable without connection " - "context") + raise NoConnection("'collection' unavailable without connection context") class DocumentMeta(type): - """ Metaclass for Documents. """ - _ignore_attributes = set(['__test__']) + """Metaclass for Documents.""" + + _ignore_attributes = set(["__test__"]) _collection_methods = COLLECTION_METHODS - _wrapped_methods = set(['find', 'find_one', 'find_and_modify']) - _wrapped_doc_methods = set(['find_one', 'find_and_modify']) + _wrapped_methods = set(["find", "find_one", "find_and_modify"]) + _wrapped_doc_methods = set(["find_one", "find_and_modify"]) _update = None # Helping pylint with identifying class attributes @@ -129,18 +134,36 @@ class DocumentMeta(type): def __new__(mcs, cls_name, bases, cls_dict): # Don't process Document superclass - if cls_name == 'Document' and bases == (dict,): + if cls_name == "Document" and bases == (dict,): return type.__new__(mcs, cls_name, bases, cls_dict) # Attribute names that are configuration settings - config_names = set(['config_database', 'config_collection', - 'config_indexes']) + config_names = set(["config_database", "config_collection", "config_indexes"]) # Attribute names that conflict with the dict base class - bad_names = mcs._collection_methods | set(['clear', 'collection', - 'copy', 'fromkeys', 'get', 'has_key', 'items', 'iteritems', - 'iterkeys', 'itervalues', 'keys', 'pop', 'popitem', 'setdefault', - 'update', 'values', 'viewitems', 'viewkeys', 'viewvalues']) + bad_names = mcs._collection_methods | set( + [ + "clear", + "collection", + "copy", + "fromkeys", + "get", + "has_key", + "items", + "iteritems", + "iterkeys", + "itervalues", + "keys", + "pop", + "popitem", + "setdefault", + "update", + "values", + "viewitems", + "viewkeys", + "viewvalues", + ] + ) # Merge inherited name_maps and saved defaults name_map = NameMap() @@ -148,22 +171,18 @@ def __new__(mcs, cls_name, bases, cls_dict): saved_defaults = {} for base in reversed(bases): if issubclass(base, Document): - name_map.merge(getattr(base, '_name_map', NameMap())) - reverse_name_map.merge(getattr(base, '_reverse_name_map', - NameMap())) - saved_defaults.update(getattr(base, '_saved_defaults', {})) + name_map.merge(getattr(base, "_name_map", NameMap())) + reverse_name_map.merge(getattr(base, "_reverse_name_map", NameMap())) + saved_defaults.update(getattr(base, "_saved_defaults", {})) # Always have an _id attribute - if '_id' not in cls_dict and '_id' not in name_map: - cls_dict['_id'] = '_id' + if "_id" not in cls_dict and "_id" not in name_map: + cls_dict["_id"] = "_id" # Iterate over the names in `cls_dict` looking for attributes whose # values are string literals or `NameMap` subclasses. These attributes # will be mapped to document keys where the key is the value - if six.PY3: - cls_keys = list(cls_dict) - else: - cls_keys = cls_dict.keys() + cls_keys = list(cls_dict) for name in cls_keys: # Raise error on bad attribute names if name in bad_names: @@ -172,10 +191,10 @@ def __new__(mcs, cls_name, bases, cls_dict): if name in config_names: continue # Skip most everything - if not isinstance(cls_dict[name], (six.string_types, tuple)): + if not isinstance(cls_dict[name], (str, tuple)): continue # Skip private stuff - if name.startswith('_') and name != '_id': + if name.startswith("_") and name != "_id": continue value = cls_dict.get(name) @@ -189,7 +208,7 @@ def __new__(mcs, cls_name, bases, cls_dict): continue value, default = value # Check that the tuple's first value is a string key - if not isinstance(value, six.string_types): + if not isinstance(value, str): continue # If the default is a callable, it's a saved default value, so # we memoize it for later @@ -216,26 +235,26 @@ def __new__(mcs, cls_name, bases, cls_dict): reverse_name_map[value] = reverse_value # Create _*name_map attributes - cls_dict['_name_map'] = name_map - cls_dict['_reverse_name_map'] = reverse_name_map + cls_dict["_name_map"] = name_map + cls_dict["_reverse_name_map"] = reverse_name_map # Create collection attribute - cls_dict['collection'] = CollectionAttribute() + cls_dict["collection"] = CollectionAttribute() # Create saved default value attribute - cls_dict['_saved_defaults'] = saved_defaults + cls_dict["_saved_defaults"] = saved_defaults # Create the class cls = type.__new__(mcs, cls_name, bases, cls_dict) # Check all the indexes - indexes = getattr(cls, 'config_indexes', None) + indexes = getattr(cls, "config_indexes", None) if indexes is not None: if not isinstance(indexes, list): raise TypeError("'config_indexes' must be a list") - for i in xrange(len(indexes)): + for i in range(len(indexes)): index = indexes[i] - if isinstance(index, six.string_types): + if isinstance(index, str): indexes[i] = Index(index) continue elif isinstance(index, Index): @@ -257,7 +276,7 @@ def __getattr__(cls, name): return value # Check if we have a mapped attribute name - name_map = object.__getattribute__(cls, '_name_map') + name_map = object.__getattribute__(cls, "_name_map") if name in name_map: return name_map[name] @@ -265,31 +284,33 @@ def __getattr__(cls, name): return object.__getattribute__(cls, name) def _wrap(cls, func): - """ Wraps ``func`` to ensure that it has the as_class keyword - argument set to ``cls``. Also guarantees indexes. + """Wraps ``func`` to ensure that it has the as_class keyword + argument set to ``cls``. Also guarantees indexes. - :param function func: Function to wrap. + :param function func: Function to wrap. """ # We have to handle find_and_modify separately because it doesn't take # a convenient as_class keyword argument, which is really too bad. if func.__name__ in cls._wrapped_doc_methods: + @wraps(func) def doc_wrapper(*args, **kwargs): - """ Wrapper function to guarantee object typing and indexes. """ + """Wrapper function to guarantee object typing and indexes.""" cls._ensure_indexes() doc = func(*args, **kwargs) # If doc is not iterable (e.g. None), then this will error if doc: doc = cls(doc) return doc + return doc_wrapper # If we've made it this far, it's not find_and_modify, and we can do a # "normal" wrap. @wraps(func) def cursor_wrapper(*args, **kwargs): - """ Wrapper function to guarantee indexes and object typing. """ + """Wrapper function to guarantee indexes and object typing.""" cls._ensure_indexes() # Get the cursor cursor = func(*args, **kwargs) @@ -311,19 +332,35 @@ def _wrap_update(cls, *args, **kwargs): """ _version._clean(kwargs) - return cls.collection.update(*args, **kwargs) - def _get_update(cls): return cls._update or cls._wrap_update - def _set_update(cls, value): cls._update = value - def _del_update(cls): cls._update = None + # If the multi keyworld is set, use update_many + if kwargs.pop("multi", False): + result = cls.collection.update_many(*args, **kwargs) + else: + result = cls.collection.update_one(*args, **kwargs) + + if result.matched_count: + return result.raw_result + + return None + + def _get_update(cls): + return cls._update or cls._wrap_update + + def _set_update(cls, value): + cls._update = value + + def _del_update(cls): + cls._update = None + update = property(_get_update, _set_update, _del_update) def mapped_keys(cls): - """ Return a list of the mapped keys. """ + """Return a list of the mapped keys.""" return cls._reverse_name_map.mapped() def mapped_attributes(cls): - """ Return a list of the mapped attributes. """ + """Return a list of the mapped attributes.""" return cls._name_map.mapped() def save(cls, *args, **kwargs): @@ -341,16 +378,30 @@ def save(cls, *args, **kwargs): """ _version._clean(kwargs) - if args and kwargs.get('manipulate', True): + if args and kwargs.get("manipulate", True): cls._ensure_saved_defaults(args[0]) - return cls.collection.save(*args, **kwargs) + doc = args[0] + if not isinstance(doc, dict): + raise ValueError("Invalid document type: {}".format(type(doc))) + + if "_id" in doc: + return cls.collection.replace_one( + {"_id": doc["_id"]}, + doc, + upsert=True, + ) + else: + result = cls.collection.insert_one(doc) + if result: + return result.inserted_id def insert(cls, *args, **kwargs): """ Override collection insert method to allow saved defaults. - Takes same arguments as :meth:`pymongo.collection.Collection.insert`. + Takes same arguments as :meth:`pymongo.collection.Collection.insert` + before :mod:`pymongo` 4.x. If `manipulate` is ``False`` then the saved defaults will not be inserted. @@ -361,7 +412,7 @@ def insert(cls, *args, **kwargs): """ _version._clean(kwargs) - if args and kwargs.get('manipulate', True): + if args and kwargs.get("manipulate", True): # Insert can take an iterable of documents or a single doc doc_or_docs = args[0] if isinstance(doc_or_docs, dict): @@ -370,41 +421,87 @@ def insert(cls, *args, **kwargs): for doc in doc_or_docs: cls._ensure_saved_defaults(doc) - return cls.collection.insert(*args, **kwargs) + # If we have one doc, use insert_one, otherwise use insert_many + if isinstance(doc_or_docs, dict): + result = cls.collection.insert_one(*args, **kwargs) + if result: + return result.inserted_id + elif isinstance(doc_or_docs, list): + result = cls.collection.insert_many(*args, **kwargs) + if result: + return result.inserted_ids + else: + raise ValueError("Invalid document type: {}".format(type(doc_or_docs))) + + def find_and_modify(cls, query: dict, update: Optional[dict] = None, **kwargs): + """ + Implements a backwards-compatible find_and_modify taking the same arguments as :meth:`pymongo.collection.Collection.find_and_modify` before :mod:`pymongo` 4.x. + """ + if kwargs.pop("new", False): + kwargs["return_document"] = pymongo.ReturnDocument.AFTER + + if not update: + return cls.collection.find_one_and_delete(query, **kwargs) + + # See if the document is using any of the modifier operators + replace = True + for k in update.keys(): + if k.startswith("$"): + replace = False + break + + if replace: + doc = cls.collection.find_one_and_replace(query, update, **kwargs) + else: + doc = cls.collection.find_one_and_update(query, update, **kwargs) + + if doc: + return cls(doc) + return None + + def remove(cls, query: dict, **kwargs): + """ + Implements a backwards-compatible remove taking the same arguments as :meth:`pymongo.collection.Collection.remove` before :mod:`pymongo` 4.x. + """ + multi = kwargs.pop("multi", True) + if multi: + return cls.collection.delete_many(query, **kwargs) + else: + return cls.collection.delete_one(query, **kwargs) def _ensure_saved_defaults(cls, doc): - """ Update `doc` to ensure saved defaults exist before saving. """ + """Update `doc` to ensure saved defaults exist before saving.""" # Shortcut out if we don't have any if not cls._saved_defaults: return # Iterate over the saved defaults and assign them if they don't already # exist - for key, value in six.iteritems(cls._saved_defaults): + for key, value in cls._saved_defaults.items(): if key not in doc: doc[key] = value() -@six.add_metaclass(DocumentMeta) -class Document(dict): - """ This is the base class for a HumbleDB document. It should not be used - directly, but rather configured via subclassing. +class Document(dict, metaclass=DocumentMeta): + """This is the base class for a HumbleDB document. It should not be used + directly, but rather configured via subclassing. - Example subclass:: + Example subclass:: - class BlogPost(Document): - config_database = 'db' - config_collection = 'example' + class BlogPost(Document): + config_database = 'db' + config_collection = 'example' - meta = Embed('m') - meta.tags = 't' - meta.slug = 's' - meta.published = 'p' + meta = Embed('m') + meta.tags = 't' + meta.slug = 's' + meta.published = 'p' - author = 'a' - title = 't' - body = 'b' + author = 'a' + title = 't' + body = 'b' """ + collection = None """ :class:`pymongo.collection.Collection` instance for this document. """ @@ -417,22 +514,22 @@ class BlogPost(Document): def __repr__(self): return "{}({})".format( - self.__class__.__name__, - super(Document, self).__repr__()) + self.__class__.__name__, super(Document, self).__repr__() + ) def for_json(self): - """ Return this document as a dictionary, with short key names mapped - to long names. This method is used by :meth:`pytools.json.as_json`. + """Return this document as a dictionary, with short key names mapped + to long names. This method is used by :meth:`pytools.json.as_json`. """ # Get the reverse mapped keys - reverse_name_map = object.__getattribute__(self, '_reverse_name_map') + reverse_name_map = object.__getattribute__(self, "_reverse_name_map") # Set saved default values if they aren't already # This has to be called on the class itself, not the instance type(self)._ensure_saved_defaults(self) def mapper(doc, submap): - """ Maps `doc` keys with the given `submap` substitution map. """ + """Maps `doc` keys with the given `submap` substitution map.""" copy = {} # A bit of trickiness here to get the default values that aren't @@ -440,11 +537,12 @@ def mapper(doc, submap): # `Document` subclass because right now defaults can only exist at # the top level document if isinstance(doc, Document): - name_map = object.__getattribute__(doc, '_name_map') + name_map = object.__getattribute__(doc, "_name_map") # The name map has no knowledge of the attribute key names, so # we have to use the reverse name map to get those - defaults = {reverse_name_map[k]: v for k, v in - six.iteritems(name_map._defaults())} + defaults = { + reverse_name_map[k]: v for k, v in name_map._defaults().items() + } copy.update(defaults) for key, value in doc.items(): @@ -465,9 +563,9 @@ def mapper(doc, submap): return copy def map_list(values, submap): - """ Maps `values` against `submap`. """ + """Maps `values` against `submap`.""" values = values[:] - for i in xrange(len(values)): + for i in range(len(values)): value = values[i] if isinstance(value, dict): # Recursively map items in a dictionary @@ -481,9 +579,9 @@ def map_list(values, submap): def __getattr__(self, name): # Get the mapped attributes - name_map = object.__getattribute__(self, '_name_map') - reverse_name_map = object.__getattribute__(self, '_reverse_name_map') - saved_defaults = object.__getattribute__(self, '_saved_defaults') + name_map = object.__getattribute__(self, "_name_map") + reverse_name_map = object.__getattribute__(self, "_reverse_name_map") + saved_defaults = object.__getattribute__(self, "_saved_defaults") # If the attribute is mapped, map it! if name in name_map: # name_map is a dict key and potentially a NameMap too here @@ -499,12 +597,10 @@ def __getattr__(self, name): # layers for unmapped items # If it's a dict, we need to keep mapping subkeys if isinstance(value, dict): - value = DictMap(value, name_map, self, key, - reverse_name_map) + value = DictMap(value, name_map, self, key, reverse_name_map) # If it's a list, we need to keep mapping subkeys elif isinstance(value, list): - value = ListMap(value, name_map, self, key, - reverse_name_map) + value = ListMap(value, name_map, self, key, reverse_name_map) return value elif isinstance(name_map, NameMap): if key in saved_defaults: @@ -527,7 +623,7 @@ def __getattr__(self, name): def __setattr__(self, name, value): # Get the mapped attributes - name_map = object.__getattribute__(self, '_name_map') + name_map = object.__getattribute__(self, "_name_map") # If it's mapped, let's map it! if name in name_map: key = name_map[name] @@ -542,7 +638,7 @@ def __setattr__(self, name, value): def __delattr__(self, name): # Get the mapped attributes - name_map = object.__getattribute__(self, '_name_map') + name_map = object.__getattribute__(self, "_name_map") # If we have the key, we delete it if name in name_map: key = name_map[name] @@ -555,34 +651,37 @@ def __delattr__(self, name): @classmethod def _ensure_indexes(cls): - """ Guarantees indexes are created once per connection instance. """ - ensured = getattr(cls, '_ensured', None) + """Guarantees indexes are created once per connection instance.""" + ensured = getattr(cls, "_ensured", None) if ensured: return if cls.config_indexes: for index in cls.config_indexes: - logging.getLogger(__name__).info("Ensuring index: {}" - .format(index)) + logging.getLogger(__name__).info("Ensuring index: {}".format(index)) if isinstance(index, Index): index.ensure(cls) else: # pragma: no cover + if _version._gte("4.0"): + raise RuntimeError("Pymongo 4.x does not support ensure_index") + # This code is no longer reachable with the new Indexes, # but I don't want to remove it yet - caching_key = 'cache_for' if _version._gte('2.3') else 'ttl' + caching_key = "cache_for" if _version._gte("2.3") else "ttl" kwargs = {caching_key: (60 * 60 * 24)} - cls.collection.ensure_index(getattr(cls, index), - background=True, **kwargs) + cls.collection.ensure_index( + getattr(cls, index), background=True, **kwargs + ) logging.getLogger(__name__).info("Indexing ensured.") cls._ensured = True # Create a reload hook for the first time we run if ensured is None: + @pyconfig.reload_hook def _reload(): - """ Allow index recreation if configuration settings change via - pyconfig. + """Allow index recreation if configuration settings change via + pyconfig. """ cls._ensured = False - diff --git a/humbledb/errors.py b/humbledb/errors.py index 142b96c..690047e 100644 --- a/humbledb/errors.py +++ b/humbledb/errors.py @@ -1,15 +1,16 @@ -""" -""" +""" """ + + class NoConnection(RuntimeError): - """ Raised when a connection is needed. """ + """Raised when a connection is needed.""" class NestedConnection(RuntimeError): - """ Raised when trying to nest the same connection within itself. """ + """Raised when trying to nest the same connection within itself.""" class MissingConfig(RuntimeError): - """ Raised when configuartion is not configured correctly at runtime. """ + """Raised when configuartion is not configured correctly at runtime.""" class DatabaseMismatch(RuntimeError): @@ -21,28 +22,29 @@ class DatabaseMismatch(RuntimeError): def _import_pymongo_errors(): - """ Tries to add all the pymongo exceptions to this module's namespace. """ + """Tries to add all the pymongo exceptions to this module's namespace.""" import pymongo.errors - _pymongo_errors = ['AutoReconnect', - 'BSONError', - 'CertificateError', - 'CollectionInvalid', - 'ConfigurationError', - 'ConnectionFailure', - 'DuplicateKeyError', - 'InvalidBSON', - 'InvalidDocument', - 'InvalidId', - 'InvalidName', - 'InvalidOperation', - 'InvalidStringData', - 'InvalidURI', - 'OperationFailure', - 'PyMongoError', - 'TimeoutError', - 'UnsupportedOption', - ] + _pymongo_errors = [ + "AutoReconnect", + "BSONError", + "CertificateError", + "CollectionInvalid", + "ConfigurationError", + "ConnectionFailure", + "DuplicateKeyError", + "InvalidBSON", + "InvalidDocument", + "InvalidId", + "InvalidName", + "InvalidOperation", + "InvalidStringData", + "InvalidURI", + "OperationFailure", + "PyMongoError", + "TimeoutError", + "UnsupportedOption", + ] for name in _pymongo_errors: try: @@ -50,8 +52,7 @@ def _import_pymongo_errors(): except AttributeError: pass + # Call the import helper and remove it _import_pymongo_errors() del _import_pymongo_errors - - diff --git a/humbledb/helpers.py b/humbledb/helpers.py index 8025a50..7df1c86 100644 --- a/humbledb/helpers.py +++ b/humbledb/helpers.py @@ -5,10 +5,12 @@ This module contains common helpers which make your life easier. """ + +import pymongo from pytool.lang import UNSET -from humbledb import Mongo -from humbledb.errors import NoConnection, DatabaseMismatch +from humbledb.errors import DatabaseMismatch, NoConnection +from humbledb.mongo import Mongo def auto_increment(database, collection, _id, field="value", increment=1): @@ -59,6 +61,7 @@ class MyDoc(Document): :type increment: int """ + def auto_incrementer(): """ Return an auto incremented value. @@ -67,13 +70,16 @@ def auto_incrementer(): # Make sure we're executing in a Mongo connection context context = Mongo.context if not context: - raise NoConnection("A connection is required for auto_increment " - "defaults to work correctly.") + raise NoConnection( + "A connection is required for auto_increment " + "defaults to work correctly." + ) - if context.database: + if context.database is not None: if context.database.name != database: - raise DatabaseMismatch("auto_increment database %r does not " - "match connection database %r") + raise DatabaseMismatch( + "auto_increment database %r does not match connection database %r" + ) # If we have a default database it should already be available db = context.database @@ -82,23 +88,29 @@ def auto_incrementer(): db = context.connection[database] # We just use this directly, instead of using a Document helper - doc = db[collection].find_and_modify({'_id': _id}, {'$inc': {field: - increment}}, new=True, upsert=True) + doc = db[collection].find_one_and_update( + {"_id": _id}, + {"$inc": {field: increment}}, + return_document=pymongo.ReturnDocument.AFTER, + upsert=True, + ) # Return the value if not doc: # TBD shakefu: Maybe a more specific error here? - raise RuntimeError("Could not get new auto_increment value for " - "%r.%r : %r" % (database, collection, _id)) + raise RuntimeError( + "Could not get new auto_increment value for " + "%r.%r : %r" % (database, collection, _id) + ) - value = doc.get('value', UNSET) + value = doc.get("value", UNSET) if value is UNSET: # TBD shakefu: Maybe a more specific error here? - raise RuntimeError("Could not get new auto_increment value for " - "%r.%r : %r" % (database, collection, _id)) + raise RuntimeError( + "Could not get new auto_increment value for " + "%r.%r : %r" % (database, collection, _id) + ) return value return auto_incrementer - - diff --git a/humbledb/index.py b/humbledb/index.py index 2b1056f..b73a787 100644 --- a/humbledb/index.py +++ b/humbledb/index.py @@ -1,73 +1,72 @@ -""" -""" -import six +""" """ + import pyconfig -from six.moves import xrange from pytool.lang import UNSET from humbledb import _version class Index(object): - """ This class is used to create more complex indices. Takes the same - arguments and keyword arguments as - :meth:`~pymongo.collection.Collection.ensure_index`. + """This class is used to create more complex indices. Takes the same + arguments and keyword arguments as + :meth:`~pymongo.collection.Collection.ensure_index`. - Example:: + Example:: - class MyDoc(Document): - config_database = 'db' - config_collection = 'example' - config_indexes = [Index('value', sparse=True)] + class MyDoc(Document): + config_database = 'db' + config_collection = 'example' + config_indexes = [Index('value', sparse=True)] - value = 'v' + value = 'v' - .. versionadded:: 2.2 + .. versionadded:: 2.2 """ - def __init__(self, index, cache_for=(60 * 60 * 24), background=True, - **kwargs): + + def __init__(self, index, cache_for=(60 * 60 * 24), background=True, **kwargs): self.index = index # Merge kwargs - kwargs['cache_for'] = cache_for - kwargs['background'] = background + kwargs["cache_for"] = cache_for + kwargs["background"] = background - if _version._lt('2.3') and 'cache_for' in kwargs: - kwargs['ttl'] = kwargs.pop('cache_for') + _version._clean_create_index(kwargs) self.kwargs = kwargs def ensure(self, cls): - """ Does an ensure_index call for this index with the given `cls`. + """Does an ensure_index call for this index with the given `cls`. - :param cls: A Document subclass + :param cls: A Document subclass """ # Allow disabling of index creation - if not pyconfig.get('humbledb.ensure_indexes', True): + if not pyconfig.get("humbledb.ensure_indexes", True): return # Map the attribute name to its key name, or just let it ride index = self._resolve_index(cls) - # Make the ensure index call - cls.collection.ensure_index(index, **self.kwargs) + # We could prevent multiple calls here, but we already do it in the + # calling _ensure_indexes method, so if you're calling this multiple + # times, you probably know what you're doing. + cls.collection.create_index(index, **self.kwargs) def _resolve_index(self, cls): - """ Resolves an index to its actual dot notation counterpart, or - returns the index as is. + """Resolves an index to its actual dot notation counterpart, or + returns the index as is. - :param cls: A Document subclass - :param str index: Index to resolve + :param cls: A Document subclass + :param str index: Index to resolve """ # If we have just a string, it's a simple index - if isinstance(self.index, six.string_types): + if isinstance(self.index, str): return self._resolve_name(cls, self.index) # Otherwise it must be an iterable - for i in xrange(len(self.index)): + for i in range(len(self.index)): # Of 2-tuples pair = self.index[i] if len(pair) != 2: @@ -78,19 +77,17 @@ def _resolve_index(self, cls): return self.index def _resolve_name(self, cls, name): - """ Resolve a dot notation index name to its real document keys. """ - attrs = name.split('.') + """Resolve a dot notation index name to its real document keys.""" + attrs = name.split(".") part = cls while attrs: attr = attrs.pop(0) part = getattr(part, attr, UNSET) if part is UNSET: return name - if not isinstance(part, six.string_types): + if not isinstance(part, str): raise TypeError("Invalid key: {!r}".format(part)) return part def __repr__(self): - return "{}({!r}, **{!r})".format(type(self).__name__, self.index, - self.kwargs) - + return "{}({!r}, **{!r})".format(type(self).__name__, self.index, self.kwargs) diff --git a/humbledb/maps.py b/humbledb/maps.py index 0d5828c..be41d9c 100644 --- a/humbledb/maps.py +++ b/humbledb/maps.py @@ -1,25 +1,18 @@ -""" -""" -import six +""" """ from pytool.lang import UNSET from pytool.proxy import DictProxy, ListProxy -class NameMap(six.text_type): - """ This class is used to map attribute names to document keys internally. - """ - def __new__(cls, value=''): - if six.PY3: - return super().__new__(cls, value) - else: - return super(NameMap, cls).__new__(cls, value) +class NameMap(str): + """This class is used to map attribute names to document keys internally.""" + + def __new__(cls, value=""): + return super().__new__(cls, value) - def __init__(self, value=''): - self._key = value.split('.')[-1] + def __init__(self, value=""): + self._key = value.split(".")[-1] self._default_value = UNSET - # TODO: Remove this later after Python3 is working - # super(NameMap, self).__init__(value) @property def key(self): @@ -28,7 +21,7 @@ def key(self): return self._key def _default(self, doc, key, reverse_name_map): - """ Return the default value for this name map. """ + """Return the default value for this name map.""" if self._default_value is not UNSET: return self._default_value # Return an empty dict map to allow sub-key assignment @@ -44,22 +37,24 @@ def __contains__(self, key): return key in self.__dict__ def filtered(self): - """ Return self.__dict__ minus any private keys. """ - return {k: v for k, v in self.__dict__.items() if not - k.startswith('_')} + """Return self.__dict__ minus any private keys.""" + return {k: v for k, v in self.__dict__.items() if not k.startswith("_")} def mapped(self): - """ Return the mapped attributes. """ + """Return the mapped attributes.""" return self.filtered().keys() def merge(self, other): - """ Merges another `.NameMap` instance into this one. """ + """Merges another `.NameMap` instance into this one.""" self.__dict__.update(other.filtered()) def _defaults(self): - """ Return a dict of default values. """ - return {n.key: n._default_value for n in self.filtered().values() if - isinstance(n, NameMap) and n._default_value is not UNSET} + """Return a dict of default values.""" + return { + n.key: n._default_value + for n in self.filtered().values() + if isinstance(n, NameMap) and n._default_value is not UNSET + } def empty(self): """ @@ -70,20 +65,19 @@ def empty(self): """ _dict = self.__dict__ - if (len(_dict) == 2 - and '_key' in _dict - and '_default_value' in _dict): + if len(_dict) == 2 and "_key" in _dict and "_default_value" in _dict: return True return False class DictMap(DictProxy): - """ This class is used to map embedded documents to their attribute names. - This class ensures that the original document is kept up to sync with - the embedded document clones via a reference to the `parent`, which at - the highest level is the main document. + """This class is used to map embedded documents to their attribute names. + This class ensures that the original document is kept up to sync with + the embedded document clones via a reference to the `parent`, which at + the highest level is the main document. """ + def __init__(self, value, name_map, parent, key, reverse_name_map): self._parent = parent self._key = key @@ -97,7 +91,7 @@ def _parent_mutable(self): def __getattr__(self, name): # Exclude private names from this behavior - if name.startswith('_'): + if name.startswith("_"): return object.__getattribute__(self, name) if name not in self._name_map: @@ -134,7 +128,7 @@ def __getattr__(self, name): def __setattr__(self, name, value): # Exclude private names from this behavior - if name.startswith('_'): + if name.startswith("_"): return object.__setattr__(self, name, value) if name not in self._name_map: @@ -152,7 +146,7 @@ def __setattr__(self, name, value): def __delattr__(self, name): # Exclude private names from this behavior - if name.startswith('_'): + if name.startswith("_"): return object.__delattr__(self, name) # If it's not mapped, let's delete it! @@ -202,7 +196,7 @@ def __delitem__(self, key): super(DictMap, self).__delitem__(key) def for_json(self): - """ Return this suitable for JSON encoding. """ + """Return this suitable for JSON encoding.""" mapped = {} reverse_name_map = self._reverse_name_map # We iterate over the keys contained in this. If a key is in the @@ -228,7 +222,7 @@ def __init__(self, value, name_map, parent, key, reverse_name_map): super(ListMap, self).__init__(value) def new(self): - """ Create a new embedded document in this list. """ + """Create a new embedded document in this list.""" # We start with a new, empty dictionary value = {} # Append it to ourselves @@ -237,19 +231,16 @@ def new(self): if not self._name_map.empty(): # We pass None as the 'key' so that an IndexError would be raised if # the dict map tries to modify the parent - value = DictMap(value, self._name_map, self, None, - self._reverse_name_map) + value = DictMap(value, self._name_map, self, None, self._reverse_name_map) return value def __getitem__(self, index): value = super(ListMap, self).__getitem__(index) # Only create a new DictMap if we actually map into this list if isinstance(value, dict) and not self._name_map.empty(): - value = DictMap(value, self._name_map, self, None, - self._reverse_name_map) + value = DictMap(value, self._name_map, self, None, self._reverse_name_map) return value def for_json(self): - """ Return this suitable for JSON encoding. """ + """Return this suitable for JSON encoding.""" return list(self) - diff --git a/humbledb/mongo.py b/humbledb/mongo.py index 2ae2987..29177d0 100644 --- a/humbledb/mongo.py +++ b/humbledb/mongo.py @@ -1,12 +1,11 @@ -""" -""" +""" """ + import logging import threading -import six -import pymongo import pyconfig -from pytool.lang import classproperty, UNSET +import pymongo +from pytool.lang import UNSET, classproperty from humbledb import _version from humbledb.errors import NestedConnection @@ -17,78 +16,81 @@ ssl = None __all__ = [ - 'Mongo', - ] + "Mongo", +] class MongoMeta(type): - """ Metaclass to allow :class:`Mongo` to be used as a context manager - without having to instantiate it. + """Metaclass to allow :class:`Mongo` to be used as a context manager + without having to instantiate it. """ + _connection = None def __new__(mcs, name, bases, cls_dict): - """ Return the Mongo class. """ + """Return the Mongo class.""" # This ensures that a late-declared class does not inherit an existing # connection object. - cls_dict['_connection'] = None + cls_dict["_connection"] = None # Choose the correct connection class - if cls_dict.get('config_connection_cls', UNSET) is UNSET: + if cls_dict.get("config_connection_cls", UNSET) is UNSET: # Are we using a replica? # XXX: Getting the connection type at class creation time rather # than connection instantiation time means that disabling # config_replica (setting to None) at runtime has no effect. I # doubt anyone would ever do this, but you never know. - _replica = cls_dict.get('config_replica', UNSET) + _replica = cls_dict.get("config_replica", UNSET) # Handle attribute descriptors responsibly - if _replica and hasattr(_replica, '__get__'): + if _replica and hasattr(_replica, "__get__"): try: _replica = _replica.__get__(None, None) - except: - raise TypeError("'%s.config_replica' appears to be a " - "descriptor and its value could not be " - "retrieved reliably." % name) - # Handle replica set connections - if _replica: - if _version._lt('2.1'): - raise TypeError("Need pymongo.version >= 2.1 for replica " - "sets.") - elif _version._gte('3.0.0'): + except Exception: + raise TypeError( + "'%s.config_replica' appears to be a " + "descriptor and its value could not be " + "retrieved reliably." % name + ) + if _version._gte("4.0"): + # It's all the same above Pymongo 4.x + conn = pymongo.MongoClient + elif _replica: + # Handle replica set connections + if _version._lt("2.1"): + raise TypeError("Need pymongo.version >= 2.1 for replica sets.") + elif _version._gte("3.0.0"): conn = pymongo.MongoClient - elif _version._gte('2.4'): + elif _version._gte("2.4"): conn = pymongo.MongoReplicaSetClient else: conn = pymongo.ReplicaSetConnection else: # Get the correct regular connection - if _version._gte('2.4'): + if _version._gte("2.4"): conn = pymongo.MongoClient else: conn = pymongo.Connection # Set our connection type - cls_dict['config_connection_cls'] = conn + cls_dict["config_connection_cls"] = conn # Specially handle base class - if name == 'Mongo' and bases == (object,): + if name == "Mongo" and bases == (object,): # Create thread local self - cls_dict['_self'] = threading.local() + cls_dict["_self"] = threading.local() return type.__new__(mcs, name, bases, cls_dict) - if cls_dict.get('config_uri', UNSET) is UNSET: + if cls_dict.get("config_uri", UNSET) is UNSET: # Ensure we have minimum configuration params - if cls_dict.get('config_host', UNSET) is UNSET: + if cls_dict.get("config_host", UNSET) is UNSET: raise TypeError("missing required 'config_host'") - if cls_dict.get('config_port', UNSET) is UNSET: + if cls_dict.get("config_port", UNSET) is UNSET: raise TypeError("missing required 'config_port'") # Validate if pymongo version supports SSL. - if (cls_dict.get('config_ssl', False) is True and - _version._lt('2.1')): - raise TypeError("Need pymongo.version >= 2.1 to use " - "SSL.") + if cls_dict.get("config_ssl", False) is True and _version._lt("2.1"): + raise TypeError("Need pymongo.version >= 2.1 to use SSL.") # Create new class cls = type.__new__(mcs, name, bases, cls_dict) @@ -96,30 +98,34 @@ def __new__(mcs, name, bases, cls_dict): # This reload hook uses a closure to access the class @pyconfig.reload_hook def _reload(): - """ A hook for reloading the connection settings with pyconfig. """ + """A hook for reloading the connection settings with pyconfig.""" cls.reconnect() return cls def start(cls): - """ Public function for manually starting a session/context. Use - carefully! + """Public function for manually starting a session/context. Use + carefully! """ if cls in Mongo.contexts: - raise NestedConnection("Do not nest a connection within itself, it " - "may cause undefined behavior.") - if (pyconfig.get('humbledb.allow_explicit_request', True) - and _version._lt('3.0.0')): + raise NestedConnection( + "Do not nest a connection within itself, it " + "may cause undefined behavior." + ) + if pyconfig.get("humbledb.allow_explicit_request", True) and _version._lt( + "3.0.0" + ): cls.connection.start_request() Mongo.contexts.append(cls) def end(cls): - """ Public function for manually closing a session/context. Should be - idempotent. This must always be called after :meth:`Mongo.start` - to ensure the socket is returned to the connection pool. + """Public function for manually closing a session/context. Should be + idempotent. This must always be called after :meth:`Mongo.start` + to ensure the socket is returned to the connection pool. """ - if (pyconfig.get('humbledb.allow_explicit_request', True) - and _version._lt('3.0.0')): + if pyconfig.get("humbledb.allow_explicit_request", True) and _version._lt( + "3.0.0" + ): cls.connection.end_request() try: Mongo.contexts.pop() @@ -127,10 +133,9 @@ def end(cls): pass def reconnect(cls): - """ Replace the current connection with a new connection. """ - logging.getLogger(__name__).info("Reloading '{}'" - .format(cls.__name__)) - if (cls._connection and _version._lt('3.0.0')): + """Replace the current connection with a new connection.""" + logging.getLogger(__name__).info("Reloading '{}'".format(cls.__name__)) + if cls._connection and _version._lt("3.0.0"): cls._connection.disconnect() cls._connection = cls._new_connection() @@ -141,8 +146,7 @@ def __exit__(cls, exc_type, exc_val, exc_tb): cls.end() -@six.add_metaclass(MongoMeta) -class Mongo(object): +class Mongo(object, metaclass=MongoMeta): """ Singleton context manager class for managing a single :class:`pymongo.connection.Connection` instance. It is necessary that @@ -171,12 +175,13 @@ class MyConnection(Mongo): doc = MyDoc.find_one() """ + _self = None config_uri = UNSET """ A MongoDB URI to connect to. """ - config_host = 'localhost' + config_host = "localhost" """ The host name or address to connect to. """ config_port = 27017 @@ -190,20 +195,19 @@ class MyConnection(Mongo): intelligently choose a class based on your replica settings and PyMongo version. """ - config_max_pool_size = pyconfig.setting('humbledb.connection_pool', 300) + config_max_pool_size = pyconfig.setting("humbledb.connection_pool", 300) """ This specifies the max_pool_size of the connection. """ - config_auto_start_request = pyconfig.setting('humbledb.auto_start_request', - True) + config_auto_start_request = pyconfig.setting("humbledb.auto_start_request", True) """ This specifies the auto_start_request option to the connection. """ - config_use_greenlets = pyconfig.setting('humbledb.use_greenlets', False) + config_use_greenlets = pyconfig.setting("humbledb.use_greenlets", False) """ This specifies the use_greenlets option to the connection. """ - config_tz_aware = pyconfig.setting('humbledb.tz_aware', True) + config_tz_aware = pyconfig.setting("humbledb.tz_aware", True) """ This specifies the tz_aware option to the connection. """ - config_write_concern = pyconfig.setting('humbledb.write_concern', 1) + config_write_concern = pyconfig.setting("humbledb.write_concern", 1) """ This specifies the write concern (``w=``) for this connection. This was added so that Pymongo before 2.4 will by default use ``getLastError()``. @@ -212,61 +216,54 @@ class MyConnection(Mongo): """ - config_ssl = pyconfig.setting('humbledb.ssl', False) + config_ssl = pyconfig.setting("humbledb.ssl", False) """ Specifies whether or not to use SSL for a connection. .. versionadded: 5.5 """ - config_mongo_client = pyconfig.setting('humbledb.mongo_client', {}) + config_mongo_client = pyconfig.setting("humbledb.mongo_client", {}) """ Allows free-form ``pymongo.MongoClient`` constructor parameters to be passed to this connection to support new features. .. versionadded: 5.6 """ def __new__(cls): - """ This class cannot be instantiated. """ + """This class cannot be instantiated.""" return cls @classmethod def _new_connection(cls): - """ Return a new connection to this class' database. """ + """Return a new connection to this class' database.""" kwargs = cls._connection_info() - kwargs.update({ - 'max_pool_size': cls.config_max_pool_size, - 'auto_start_request': cls.config_auto_start_request, - 'use_greenlets': cls.config_use_greenlets, - 'tz_aware': cls.config_tz_aware, - 'w': cls.config_write_concern, - 'ssl': cls.config_ssl, - }) + kwargs.update( + { + "max_pool_size": cls.config_max_pool_size, + "auto_start_request": cls.config_auto_start_request, + "use_greenlets": cls.config_use_greenlets, + "tz_aware": cls.config_tz_aware, + "w": cls.config_write_concern, + "ssl": cls.config_ssl, + } + ) kwargs.update(cls.config_mongo_client) - if _version._gte('2.1.0') and _version._lt('2.2.0'): - # This causes an error for the 2.1.x versions of Pymongo, so we - # remove it - kwargs.pop('auto_start_request') - kwargs.pop('use_greenlets') - - if _version._gte('3.0.0'): - # Handle removed keywords - kwargs.pop('use_greenlets') - kwargs.pop('auto_start_request') - # Handle changed keywords - kwargs['maxPoolSize'] = kwargs.pop('max_pool_size') - # Handle other 3.0 stuff - if kwargs.get('ssl') and ssl: - kwargs.setdefault('ssl_cert_reqs', ssl.CERT_NONE) + _version._clean_connection_kwargs(kwargs) if cls.config_replica: - kwargs['replicaSet'] = cls.config_replica - logging.getLogger(__name__).info("Creating new MongoDB connection " - "to '{}:{}' replica: {}".format(cls.config_host, - cls.config_port, cls.config_replica)) + kwargs["replicaSet"] = cls.config_replica + logging.getLogger(__name__).info( + "Creating new MongoDB connection to '{}:{}' replica: {}".format( + cls.config_host, cls.config_port, cls.config_replica + ) + ) else: - logging.getLogger(__name__).info("Creating new MongoDB connection " - "to '{}:{}'".format(cls.config_host, cls.config_port)) + logging.getLogger(__name__).info( + "Creating new MongoDB connection to '{}:{}'".format( + cls.config_host, cls.config_port + ) + ) return cls.config_connection_cls(**kwargs) @@ -279,14 +276,14 @@ def _connection_info(cls): """ if cls.config_uri: - return {'host': cls.config_uri} + return {"host": cls.config_uri} - return {'host': cls.config_host, 'port': cls.config_port} + return {"host": cls.config_host, "port": cls.config_port} @classproperty def connection(cls): - """ Return the current connection. If no connection exists, one is - created. + """Return the current connection. If no connection exists, one is + created. """ if not cls._connection: cls._connection = cls._new_connection() @@ -294,15 +291,15 @@ def connection(cls): @classproperty def contexts(cls): - """ Return the current context stack. """ - if not hasattr(Mongo._self, 'contexts'): + """Return the current context stack.""" + if not hasattr(Mongo._self, "contexts"): Mongo._self.contexts = [] return Mongo._self.contexts @classproperty def context(cls): - """ Return the current context (a :class:`.Mongo` subclass) if it - exists or ``None``. + """Return the current context (a :class:`.Mongo` subclass) if it + exists or ``None``. """ try: return Mongo.contexts[-1] @@ -319,12 +316,9 @@ def database(cls): .. note:: This requires ``pymongo >= 2.6.0``. """ - if _version._lt('2.6.0'): + if _version._lt("2.6.0"): return None try: return cls.connection.get_default_database() except pymongo.errors.ConfigurationError: return None - - - diff --git a/humbledb/report.py b/humbledb/report.py index 21a53bb..161b356 100644 --- a/humbledb/report.py +++ b/humbledb/report.py @@ -2,19 +2,19 @@ Preaggregated reporting """ -import random + import calendar import datetime +import random from collections import defaultdict -import six import pytool -from six.moves import xrange from pytool.lang import classproperty import humbledb -from humbledb import Document, Embed, Index, _version - +from humbledb import _version +from humbledb.document import Document, Embed +from humbledb.index import Index # Interval and Period constants YEAR = 5 @@ -25,12 +25,12 @@ # Constants used for informative string messages _PERIOD_NAMES = { - YEAR: "YEAR", - MONTH: "MONTH", - DAY: "DAY", - HOUR: "HOUR", - MINUTE: "MINUTE", - } + YEAR: "YEAR", + MONTH: "MONTH", + DAY: "DAY", + HOUR: "HOUR", + MINUTE: "MINUTE", +} class Report(Document): @@ -38,6 +38,7 @@ class Report(Document): A report document. """ + config_period = MONTH """ The period for which this report stores data. There will be one document created per period for each event. For example, if period is @@ -47,7 +48,9 @@ class Report(Document): """ The intervals at which event counts are recorded. The intervals listed here must be less than or equal to the period for this report. """ - config_id_format = "%(event)s@%(year)04d%(month)02d%(day)02d-%(hour)02d:%(minute)02d" + config_id_format = ( + "%(event)s@%(year)04d%(month)02d%(day)02d-%(hour)02d:%(minute)02d" + ) """ The format for the ``_id`` for a report document. If the event is placed first in the string, then the documents will be spread more evenly on sharding, but an index on ``meta.period`` is needed for range queries. @@ -60,19 +63,20 @@ class Report(Document): attempted, from 0.0 to 1.0. Set this to 0 to disable future preallocation. """ - config_indexes = [Index([('meta.period', humbledb.ASC), ('meta.event', - humbledb.ASC)])] + config_indexes = [ + Index([("meta.period", humbledb.ASC), ("meta.event", humbledb.ASC)]) + ] """ Default indexes. """ - meta = Embed('u') - meta.period = 'p' - meta.event = 'e' + meta = Embed("u") + meta.period = "p" + meta.event = "e" - year = 'Y' - month = 'M' - day = 'd' - hour = 'h' - minute = 'm' + year = "Y" + month = "M" + day = "d" + hour = "h" + minute = "m" # Mapping of intervals to their document keys. This is used by # :meth:`_map_interval`. This has to be created after class declaration so @@ -95,13 +99,13 @@ def record_id(cls, event, stamp): """ period = cls._period(stamp) info = { - 'event': event, - 'year': period.year, - 'month': period.month, - 'day': period.day, - 'hour': period.hour, - 'minute': period.minute, - } + "event": event, + "year": period.year, + "month": period.month, + "day": period.day, + "hour": period.hour, + "minute": period.minute, + } return cls.config_id_format % info @classmethod @@ -123,13 +127,13 @@ def record(cls, event, stamp=None, safe=False, count=1): :type count: int """ - if not isinstance(count, six.integer_types): - raise ValueError("'count' must be int or long, got %r instead" % - type(count)) + if not isinstance(count, int): + raise ValueError("'count' must be int, got %r instead" % type(count)) if stamp and not isinstance(stamp, (datetime.datetime, datetime.date)): - raise ValueError("'stamp' must be datetime or date, got %r instead" - % type(stamp)) + raise ValueError( + "'stamp' must be datetime or date, got %r instead" % type(stamp) + ) # Get our stamp as UTC time or use the current time stamp = pytool.time.as_utc(stamp) if stamp else pytool.time.utcnow() @@ -138,10 +142,10 @@ def record(cls, event, stamp=None, safe=False, count=1): # Get the update query update = cls._update_query(stamp, count) # Get our query doc - doc = {'_id': cls.record_id(event, stamp)} + doc = {"_id": cls.record_id(event, stamp)} _opts = {} - if _version._lt('3.0.0'): - _opts['safe'] = safe + if _version._lt("3.0.0"): + _opts["safe"] = safe # Update/upsert the document, hooray cls.update(doc, update, upsert=True, **_opts) @@ -181,7 +185,7 @@ def _update_query(cls, stamp, count=1): for interval in cls.config_intervals: update.update(cls._update_clause(interval, stamp, count)) - return {'$inc': update} + return {"$inc": update} @classmethod def _update_clause(cls, interval, stamp, count=1): @@ -208,7 +212,7 @@ def _update_clause(cls, interval, stamp, count=1): # Build a dotted key name like 'y.12.25' intervals = [0, minute, hour, day, month] key = intervals[interval:period] + [cls._map_interval(interval)] - key = '.'.join(str(s) for s in reversed(key)) + key = ".".join(str(s) for s in reversed(key)) return {key: count} @@ -224,13 +228,15 @@ def _map_interval(cls, interval): # Memoize the intervals to the class so we don't rebuild this dict # with every lookup if not cls._intervals: - cls._intervals.update({ - YEAR: cls.year, - MONTH: cls.month, - DAY: cls.day, - HOUR: cls.hour, - MINUTE: cls.minute - }) + cls._intervals.update( + { + YEAR: cls.year, + MONTH: cls.month, + DAY: cls.day, + HOUR: cls.hour, + MINUTE: cls.minute, + } + ) return cls._intervals[interval] @classmethod @@ -276,15 +282,16 @@ def _preallocate(cls, event, stamp): return # Do a fast check if the document exists - if cls.find({cls._id: cls.record_id(event, stamp)}).limit(1).count(): + # if cls.find({cls._id: cls.record_id(event, stamp)}).limit(1).count(): + if cls.find_one({cls._id: cls.record_id(event, stamp)}): return # Get our query and update clauses query, update = cls._preallocate_query(event, stamp) try: _opts = {} - if _version._lt('3.0.0'): - _opts['safe'] = True + if _version._lt("3.0.0"): + _opts["safe"] = True # We always want preallocation to be "safe" in order to avoid race # conditions with the subsequent update cls.update(query, update, upsert=True, **_opts) @@ -318,7 +325,7 @@ def _preallocate_query(cls, event, stamp): period = cls.config_period # Build the base query, which is just a lookup against the id - query = {'_id': cls.record_id(event, stamp)} + query = {"_id": cls.record_id(event, stamp)} # Start with an empty update clause update = {} @@ -326,7 +333,7 @@ def _preallocate_query(cls, event, stamp): key = cls._map_interval(interval) # Update the query to exclude documents which already have a value # for each interval key - query[key] = {'$exists': 0} + query[key] = {"$exists": 0} # Update the update clause with the preallocated structures update[key] = cls._preallocate_interval(period, interval, stamp) @@ -334,7 +341,7 @@ def _preallocate_query(cls, event, stamp): update[cls.meta.period] = cls._period(stamp) # Make the update clause a $set - update = {'$set': update} + update = {"$set": update} return query, update @@ -374,8 +381,10 @@ def _preallocate_interval(cls, period, interval, stamp, hint=None): elif interval <= MONTH and period == YEAR: count = 12 + 1 - return [cls._preallocate_interval(period - 1, interval, stamp, r) for r - in xrange(start, count)] + return [ + cls._preallocate_interval(period - 1, interval, stamp, r) + for r in range(start, count) + ] @classmethod def _period(cls, stamp): @@ -443,6 +452,7 @@ class PageViews(Report): print 'home', count.timestamp, count """ + def __init__(self, cls, interval): self.cls = cls self.interval = interval @@ -452,14 +462,16 @@ def __init__(self, cls, interval): # We need to get a document key that works best for the interval we're # looking for - self.query_interval = max([0] + [k for k in self.cls.config_intervals - if k <= self.interval]) + self.query_interval = max( + [0] + [k for k in self.cls.config_intervals if k <= self.interval] + ) # If the query_interval is equal to 0, it means we can't satisfy the # required precision for this query type if not self.query_interval: - raise ValueError("Unable to satisfy precision: %r" % ( - _PERIOD_NAMES[self.interval])) + raise ValueError( + "Unable to satisfy precision: %r" % (_PERIOD_NAMES[self.interval]) + ) self.query_key = self.cls._map_interval(self.query_interval) def __call__(self, event, regex=False, anywhere=False): @@ -513,12 +525,14 @@ def _get_range(self, start, stop): query_key = self.query_key # Get our results - results = self.cls.find(query, {query_key: 1, self.cls.meta.event: 1, - self.cls.meta.period: 1}, sort=[(self.cls.meta.period, 1)]) + results = self.cls.find( + query, + {query_key: 1, self.cls.meta.event: 1, self.cls.meta.period: 1}, + sort=[(self.cls.meta.period, 1)], + ) # Now we have to parse the results for the maximum ease of consumption - results = self._parse_results(results, start, stop, query_key, - query_interval) + results = self._parse_results(results, start, stop, query_key, query_interval) # We need to coerce the results according to whether or not we have # a distinct event, or we're using a regex query @@ -546,16 +560,18 @@ def _range_query(self, start, stop): # The base query looks for any report documents matching the period query = { - period_key: {'$gte': starting_period}, - period_key: {'$lte': ending_period}, - } + period_key: { + "$gte": starting_period, + "$lte": ending_period, + }, + } # If the event is a regex, we make it a regex query against _id event = self.event if event and self.regex: - if not self.anywhere and not event.startswith('^'): - event = '^' + event - query['_id'] = {'$regex': event} + if not self.anywhere and not event.startswith("^"): + event = "^" + event + query["_id"] = {"$regex": event} # Otherwise we just query against the indexed event field elif event and not self.regex: @@ -618,8 +634,7 @@ def _parse_results(self, results, start, stop, query_key, query_interval): # Iterate over the parsed counts and timestamps, which will come # according to the doc's interval - for stamp, count in _parse_section(values, key_interval, - doc_period): + for stamp, count in _parse_section(values, key_interval, doc_period): # Ensure we only take values from within the query frame if stamp < start: # If we're before the start, we skip @@ -692,11 +707,10 @@ def _coerce_index(self, index, now, stop=False): # If it's a date, make a datetime out of it if isinstance(index, datetime.date): - return datetime.datetime(*index.timetuple()[:6], - tzinfo=pytool.time.UTC()) + return datetime.datetime(*index.timetuple()[:6], tzinfo=pytool.time.UTC()) # If it's an integer, we have to handle it depending on the interval - if isinstance(index, six.integer_types): + if isinstance(index, int): now = now or pytool.time.utcnow() return self._coerce_int(index, now, stop) @@ -749,8 +763,7 @@ def _coerce_int(self, index, now, stop=False): return _relative_period(MONTH, now, 1) self._check_range(index, 1, end_of_month + 1) - return datetime.datetime(now.year, now.month, index, - tzinfo=now.tzinfo) + return datetime.datetime(now.year, now.month, index, tzinfo=now.tzinfo) if interval == HOUR: if stop and index == 24: @@ -759,8 +772,9 @@ def _coerce_int(self, index, now, stop=False): return _relative_period(DAY, now, 1) self._check_range(index, 0, 24) - return datetime.datetime(now.year, now.month, now.day, index, - tzinfo=now.tzinfo) + return datetime.datetime( + now.year, now.month, now.day, index, tzinfo=now.tzinfo + ) if interval == MINUTE: if stop and index == 60: @@ -768,8 +782,9 @@ def _coerce_int(self, index, now, stop=False): # return the start of the next hour return _relative_period(HOUR, now, 1) self._check_range(index, 0, 60) - return datetime.datetime(now.year, now.month, now.day, now.hour, - index, tzinfo=now.tzinfo) + return datetime.datetime( + now.year, now.month, now.day, now.hour, index, tzinfo=now.tzinfo + ) @staticmethod def _check_range(value, minimum, maximum): @@ -785,8 +800,9 @@ def _check_range(value, minimum, maximum): """ if value < minimum or value > maximum: - raise IndexError("Value %r out of range [%r, %r]" % (value, - minimum, maximum)) + raise IndexError( + "Value %r out of range [%r, %r]" % (value, minimum, maximum) + ) class ReportCount(int): @@ -800,6 +816,7 @@ class ReportCount(int): :type timestamp: datetime.datetime """ + def __new__(cls, value, timestamp): instance = super(ReportCount, cls).__new__(cls, value) instance.timestamp = timestamp @@ -867,11 +884,11 @@ def _relative_period(period, stamp, diff): # Hours is just seconds, right? if period == HOUR: - return _period(period, stamp + datetime.timedelta(seconds=diff*60*60)) + return _period(period, stamp + datetime.timedelta(seconds=diff * 60 * 60)) # Minutes are also seconds if period == MINUTE: - return _period(period, stamp + datetime.timedelta(seconds=diff*60)) + return _period(period, stamp + datetime.timedelta(seconds=diff * 60)) # This algorithm for calculating a relative date is borrowed in part # from relativedelta in the python-dateutil library. @@ -909,11 +926,13 @@ def _period(period, stamp): return pytool.time.floor_day(stamp) if period == HOUR: seconds = stamp.minute * 60 + stamp.second - return stamp - datetime.timedelta(seconds=seconds, - microseconds=stamp.microsecond) + return stamp - datetime.timedelta( + seconds=seconds, microseconds=stamp.microsecond + ) if period == MINUTE: - return stamp - datetime.timedelta(seconds=stamp.second, - microseconds=stamp.microsecond) + return stamp - datetime.timedelta( + seconds=stamp.second, microseconds=stamp.microsecond + ) def _parse_section(values, interval, stamp): @@ -925,11 +944,11 @@ def _parse_section(values, interval, stamp): """ # If it's a number, we yield it - if isinstance(values, six.integer_types): + if isinstance(values, int): yield stamp, values else: # If it's a list, we iterate over it - for i in xrange(len(values)): + for i in range(len(values)): if interval == MINUTE: stamp = stamp.replace(minute=i) elif interval == HOUR: @@ -947,10 +966,9 @@ def _parse_section(values, interval, stamp): # Get the value we're working with value = values[i] # If it's a number, yield it - if isinstance(value, six.integer_types): + if isinstance(value, int): yield stamp, value continue # If it's a list, recursively process it for vals in _parse_section(value, interval - 1, stamp): yield vals - diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..acfb718 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,38 @@ +[project] +name = "humbledb" +version = "6.0.0" +description = "HumbleDB - MongoDB Object-Document Mapper" +readme = "README.md" +license = "MIT" +authors = [ + {name = "Jacob Alheid", email = "shakefu@gmail.com"}, +] +requires-python = ">=3.10,<4.0" +dependencies = [ + "pymongo >= 2.0.1", + "pyconfig", + "pytool >= 3.4.1", +] + +[dependency-groups] +dev = [ + "coveralls>=4.0.1", + "pytest>=8.4.0", + "pytest-cov>=6.2.1", + "pytest-docker>=3.2.2", +] +docs = [ + "sphinx-rtd-theme>=3.0.0,<4.0.0", + "sphinx-autodoc-typehints>=3.0.0,<4.0.0", + "sphinx>=8.0.0, <9.0.0", +] + +# This is required so setuptools doesn't see other directories as a package +[tool.setuptools] +packages = ["humbledb"] + +[tool.coverage.run] +omit = [ + "humbledb/_version.py", + "test/**", +] diff --git a/script/docs b/script/docs new file mode 100755 index 0000000..4423a03 --- /dev/null +++ b/script/docs @@ -0,0 +1,20 @@ +#!/usr/bin/env bash + +# This script will install the project and all its dependencies into a virtual +# environment and then build the documentation. +# +# It maintains parity with the Read the Docs build process. + +# Use a separate venv from the main development venv +# shellcheck disable=SC2034 +UV_PROJECT_ENVIRONMENT="${UV_PROJECT_ENVIRONMENT:-./docs/.venv/}" + +# Create a virtual environment for the documentation +uv venv "${UV_PROJECT_ENVIRONMENT}" + +# Install the project and all its dependencies into the virtual environment +uv sync --frozen --all-extras --all-groups --no-editable --link-mode=copy +uv pip install --link-mode=copy . + +# Build the documentation, failing on warnings +uv run python -m sphinx -T -W --keep-going -b html -d ./docs/_build/doctrees -D language=en ./docs ./docs/_output diff --git a/setup.py b/setup.py deleted file mode 100644 index 5ccb2bd..0000000 --- a/setup.py +++ /dev/null @@ -1,46 +0,0 @@ -from setuptools import setup, find_packages - - -def version(): - try: - import re - return re.search("^__version__ = '(.*)'", - open('humbledb/__init__.py').read(), re.M).group(1) - except: - raise RuntimeError("Could not get version") - - -setup( - name='humbledb', - version=version(), - description="HumbleDB - MongoDB Object-Document Mapper", - author="Jacob Alheid", - author_email="jake@about.me", - packages=find_packages(exclude=['test']), - install_requires=[ - 'pymongo >= 2.0.1', - 'pyconfig', - 'pytool >= 3.4.1', - 'six', - ], - test_suite='nose.collector', - tests_require=[ - 'nose', - 'mock', - ], - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: Apache Software License', - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.3', - 'Programming Language :: Python :: 3.4', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Topic :: Software Development :: Libraries', - 'Topic :: Utilities', - 'Topic :: Database', - ] - ) - diff --git a/test/conftest.py b/test/conftest.py new file mode 100644 index 0000000..e88e00f --- /dev/null +++ b/test/conftest.py @@ -0,0 +1,155 @@ +import logging +import re +from typing import Generator + +import pyconfig +import pymongo +import pytest + +import humbledb + +from .util import database_name + + +@pytest.fixture(scope="session") +def docker_compose_file(pytestconfig): + """ + Override default `docker-compose.yml` path. + + This is a `pytest-docker` fixture. + """ + # TODO(shakefu): Figure out if this override is actually used + + return "docker-compose.yml" + + +def mongodb_ready(host, port): + """Return a function that checks if MongoDB is ready.""" + + def ping(): + try: + with pymongo.timeout(0.1): + client = pymongo.MongoClient(host, port) + client.admin.command("ping") + except pymongo.errors.ServerSelectionTimeoutError: + return False + return True + + return ping + + +@pytest.fixture(scope="session") +def mongodb_service(docker_ip, docker_services): + """ + Ensure that MongoDB service is up and responsive. + + This is a `pytest-docker` fixture. + """ + port = docker_services.port_for("mongodb", 27017) + host = docker_ip + + docker_services.wait_until_responsive( + timeout=30.0, pause=0.1, check=mongodb_ready(host, port) + ) + + return host, port + + +@pytest.fixture(scope="session") +def mongodb_uri(mongodb_service: tuple[str, int]): + """ + Return a MongoDB URI for the test service. + + This can be overridden by setting the pyconfig variable + `humbledb.test.db.host` and `humbledb.test.db.port`. + + Args: + mongodb_service (tuple[str, int]): _description_ + + Yields: + _type_: _description_ + """ + host, port = mongodb_service + host = pyconfig.get("humbledb.test.db.host", host) + port = pyconfig.get("humbledb.test.db.port", port) + uri = "mongodb://{}:{}/{}".format(host, port, database_name()) + return uri + + +@pytest.fixture() +def mongodb(mongodb_service: tuple[str, int]) -> Generator[humbledb.Mongo, None, None]: + """ + Return a MongoDB client for the test service. + + This fixture also drops all the dbs created attached to it. + """ + host, port = mongodb_service + + class db(humbledb.Mongo): + config_host = host + config_port = port + + yield db + + # Drop all the dbs created attached to this fixture + names = db.connection.list_database_names() + for name in names: + if name == "admin": + continue + db.connection.drop_database(name) + + +@pytest.fixture(scope="module") +def DBTest( + mongodb_service: tuple[str, int], +) -> Generator[humbledb.Mongo, None, None]: + """ + Return a DBTest class with the MongoDB connection details. + """ + host, port = mongodb_service + + class DBTest(humbledb.Mongo): + config_host = host + config_port = port + + yield DBTest + + # Drop all the dbs created attached to this fixture + names = DBTest.connection.list_database_names() + for name in names: + if name == "admin": + continue + DBTest.connection.drop_database(name) + + +@pytest.fixture() +def enable_sharding(DBTest): + """Enable sharding for `collection`.""" + + def _enable_sharding(collection, key): + conn = DBTest.connection + try: + conn.admin.command("listShards") + except humbledb.errors.OperationFailure as exc: + if re.match(".*no such.*listShards", str(exc)): + logging.getLogger(__name__).info("Sharding not available.") + return False + raise + try: + conn.admin.command("enableSharding", database_name()) + except humbledb.errors.OperationFailure as exc: + if "already" not in str(exc): + raise + try: + conn.admin.command( + "shardCollection", database_name() + "." + collection, key=key + ) + except humbledb.errors.OperationFailure as exc: + if "already" not in str(exc): + raise + logging.getLogger(__name__).info( + "Sharding enabled for %r.%r on %r.", database_name(), collection, key + ) + return True + + return _enable_sharding diff --git a/test/test_humbledb/test_array.py b/test/test_humbledb/test_array.py index 80c4cf5..d292992 100644 --- a/test/test_humbledb/test_array.py +++ b/test/test_humbledb/test_array.py @@ -1,402 +1,399 @@ import random -from six.moves import xrange +import pytest from humbledb import Document from humbledb.array import Array -from test.util import (database_name, DBTest, ok_, eq_, enable_sharding, - SkipTest, raises) +from ..util import database_name -class TestArray(Array): + +class ArrayTest(Array): config_database = database_name() - config_collection = 'arrays' + config_collection = "arrays" config_max_size = 3 config_padding = 100 -def teardown(): - DBTest.connection.drop_database(database_name()) - - def _word(): - """ Return a random "word". """ + """Return a random "word".""" return str(random.randint(1, 15000)) def test_document_without_configuration_works_as_mapper(): class Entry(Document): - name = 'n' - display = 'd' + name = "n" + display = "d" entry = Entry() entry.name = "Test" - eq_(entry, {Entry.name: "Test"}) - eq_(entry.for_json(), {u'name': 'Test'}) + assert entry == {Entry.name: "Test"} + assert entry.for_json() == {"name": "Test"} -def test_creates_a_new_page_on_first_insert(): - t = TestArray('new_page', 0) +def test_creates_a_new_page_on_first_insert(DBTest): + t = ArrayTest("new_page", 0) with DBTest: t.append("Test") - eq_(t.pages(), 1) + assert t.pages() == 1 -def test_all_returns_single_insert_ok(): - t = TestArray('single_insert', 0) +def test_all_returns_single_insert_ok(DBTest): + t = ArrayTest("single_insert", 0) v = "Test" with DBTest: - eq_(t.append(v), 1) - eq_(t.all(), [v]) + assert t.append(v) == 1 + assert t.all() == [v] -def test_appends_over_max_size_creates_second_page(): - t = TestArray('appends_second_page', 0) +def test_appends_over_max_size_creates_second_page(DBTest): + t = ArrayTest("appends_second_page", 0) with DBTest: - eq_(t.append(_word()), 1) - eq_(t.append(_word()), 1) - eq_(t.append(_word()), 2) - eq_(t.append(_word()), 2) - eq_(t.pages(), 2) - eq_(len(t.all()), 4) + assert t.append(_word()) == 1 + assert t.append(_word()) == 1 + assert t.append(_word()) == 2 + assert t.append(_word()) == 2 + assert t.pages() == 2 + assert len(t.all()) == 4 -def test_multiple_appends_with_zero_pages_works_ok(): - t = TestArray('zero_pages', 0) +def test_multiple_appends_with_zero_pages_works_ok(DBTest): + t = ArrayTest("zero_pages", 0) with DBTest: - eq_(t.append(_word()), 1) - t = TestArray('zero_pages', 0) + assert t.append(_word()) == 1 + t = ArrayTest("zero_pages", 0) with DBTest: - eq_(t.append(_word()), 1) - eq_(len(t.all()), 2) + assert t.append(_word()) == 1 + assert len(t.all()) == 2 -def test_length_for_single_page_works(): - t = TestArray('length_single', 0) +def test_length_for_single_page_works(DBTest): + t = ArrayTest("length_single", 0) with DBTest: t.append(_word()) - eq_(t.length(), 1) + assert t.length() == 1 t.append(_word()) - eq_(t.length(), 2) + assert t.length() == 2 t.append(_word()) - eq_(t.length(), 3) + assert t.length() == 3 -def test_length_for_multiple_pages_works(): - t = TestArray('length_multi', 0) +def test_length_for_multiple_pages_works(DBTest): + t = ArrayTest("length_multi", 0) with DBTest: - for i in xrange(10): + for i in range(10): t.append(_word()) - eq_(t.length(), 10) - eq_(t.pages(), 4) + assert t.length() == 10 + assert t.pages() == 4 -def test_remove_works_with_single_page(): - t = TestArray('remove', 0) +def test_remove_works_with_single_page(DBTest): + t = ArrayTest("remove", 0) v = "Test" with DBTest: t.append(_word()) t.append(v) t.append(_word()) - eq_(t.length(), 3) + assert t.length() == 3 t.remove(v) - eq_(t.length(), 2) + assert t.length() == 2 -def test_remove_works_with_multiple_pages(): - t = TestArray('remove_multi_page', 0) +def test_remove_works_with_multiple_pages(DBTest): + t = ArrayTest("remove_multi_page", 0) v = "Test" with DBTest: - for i in xrange(5): + for i in range(5): t.append(_word()) t.append(v) - for i in xrange(5): + for i in range(5): t.append(_word()) - eq_(t.length(), 11) - ok_(v in t.all()) + assert t.length() == 11 + assert v in t.all() t.remove(v) - eq_(t.length(), 10) - ok_(v not in t.all()) + assert t.length() == 10 + assert v not in t.all() -def test_remove_works_with_embedded_documents(): - t = TestArray('remove_embedded_docs') +def test_remove_works_with_embedded_documents(DBTest): + t = ArrayTest("remove_embedded_docs") with DBTest: - for i in xrange(5): - t.append({'i': i, 'k': i}) - eq_(t.length(), 5) - t.remove({'i': 3}) - eq_(t.length(), 4) + for i in range(5): + t.append({"i": i, "k": i}) + assert t.length() == 5 + t.remove({"i": 3}) + assert t.length() == 4 -def test_remove_works_with_complex_embedded_documents_and_dot_notation(): - t = TestArray('remove_complex_embedded_docs') +def test_remove_works_with_complex_embedded_documents_and_dot_notation(DBTest): + t = ArrayTest("remove_complex_embedded_docs") with DBTest: - for i in xrange(5): - t.append({'foo': 'bar', 'fnord': {'i': i, 'spam': 'eggs'}}) - eq_(t.length(), 5) - ok_(t.remove({'fnord.i': 3})) - eq_(t.length(), 4) + for i in range(5): + t.append({"foo": "bar", "fnord": {"i": i, "spam": "eggs"}}) + assert t.length() == 5 + assert t.remove({"fnord.i": 3}) + assert t.length() == 4 -def test_multiple_removes_maintains_correct_count_with_dupes_on_diff_pages(): - t = TestArray('remove_count') +def test_multiple_removes_maintains_correct_count_with_dupes_on_diff_pages(DBTest): + t = ArrayTest("remove_count") with DBTest: - t.append({'i': 9}) - for i in xrange(3): - t.append({'i': i}) - t.append({'i': 9}) - t.remove({'i': 9}) - pages = list(TestArray.find({'_id': t._id_regex})) + t.append({"i": 9}) + for i in range(3): + t.append({"i": i}) + t.append({"i": 9}) + t.remove({"i": 9}) + pages = list(ArrayTest.find({"_id": t._id_regex})) for page in pages: - eq_(page.size, len(page.entries)) + assert page.size == len(page.entries) -def test_multiple_removes_maintains_correct_count_with_dupes_on_same_page(): - t = TestArray('remove_count_dupes') +def test_multiple_removes_maintains_correct_count_with_dupes_on_same_page(DBTest): + t = ArrayTest("remove_count_dupes") with DBTest: - for i in xrange(3): - t.append({'i': i}) - t.append({'i': 9}) - t.append({'i': 9}) - eq_(t.length(), 5) - t.remove({'i': 9}) - eq_(t.length(), 4) - pages = list(TestArray.find({'_id': t._id_regex})) + for i in range(3): + t.append({"i": i}) + t.append({"i": 9}) + t.append({"i": 9}) + assert t.length() == 5 + t.remove({"i": 9}) + assert t.length() == 4 + pages = list(ArrayTest.find({"_id": t._id_regex})) for page in pages: - eq_(page.size, len(page.entries)) - t.remove({'i': 9}) - eq_(t.length(), 3) - pages = list(TestArray.find({'_id': t._id_regex})) + assert page.size == len(page.entries) + t.remove({"i": 9}) + assert t.length() == 3 + pages = list(ArrayTest.find({"_id": t._id_regex})) for page in pages: - eq_(page.size, len(page.entries)) - t.remove({'i': 9}) - eq_(t.length(), 3) + assert page.size == len(page.entries) + t.remove({"i": 9}) + assert t.length() == 3 -def test_remove_one_more_time_just_for_kicks(): - t = TestArray('never_stop_testing_remove') +def test_remove_one_more_time_just_for_kicks(DBTest): + t = ArrayTest("never_stop_testing_remove") with DBTest: - for i in xrange(10): + for i in range(10): t.append(i) - eq_(t[:], [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]) - eq_(t.length(), 10) + assert t[:] == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] + assert t.length() == 10 t.remove(2) - eq_(t[0:], [0, 1, 3, 4, 5, 6, 7, 8, 9]) - eq_(t.length(), 9) + assert t[0:] == [0, 1, 3, 4, 5, 6, 7, 8, 9] + assert t.length() == 9 t.remove(9) - eq_(t[:3], [0, 1, 3, 4, 5, 6, 7, 8]) - eq_(t.length(), 8) + assert t[:3] == [0, 1, 3, 4, 5, 6, 7, 8] + assert t.length() == 8 -def test_sharded_remove_works(): - t = TestArray('test_sharded_remove') - if not enable_sharding(TestArray._page.config_collection, {'_id': 1}): - raise SkipTest +def test_sharded_remove_works(DBTest, enable_sharding): + t = ArrayTest("test_sharded_remove") + if not enable_sharding(ArrayTest._page.config_collection, {"_id": 1}): + pytest.skip("Sharding not enabled") with DBTest: for word in "The quick brown fox jumps over the lazy dog.".split(): t.append(word) - eq_(t.length(), 9) - eq_(t.pages(), 4) - eq_(t[3], []) - t.remove('lazy') - eq_(t.length(), 8) - eq_(t[2], ['the', 'dog.']) - t.remove('fox') - eq_(t.length(), 7) - eq_(t[1], ['jumps', 'over']) - - -def test_sharded_remove_works_with_embedded_documents(): - t = TestArray('test_sharded_remove_embedded') - if not enable_sharding(TestArray._page.config_collection, {'_id': 1}): - raise SkipTest + assert t.length() == 9 + assert t.pages() == 4 + assert t[3] == [] + t.remove("lazy") + assert t.length() == 8 + assert t[2] == ["the", "dog."] + t.remove("fox") + assert t.length() == 7 + assert t[1] == ["jumps", "over"] + + +def test_sharded_remove_works_with_embedded_documents(DBTest, enable_sharding): + t = ArrayTest("test_sharded_remove_embedded") + if not enable_sharding(ArrayTest._page.config_collection, {"_id": 1}): + pytest.skip("Sharding not enabled") with DBTest: for word in "The quick brown fox jumps over the lazy dog.".split(): - t.append({'word': word}) - eq_(t.length(), 9) - eq_(t.pages(), 4) - eq_(t[3], []) - t.remove({'word': 'lazy'}) - eq_(t.length(), 8) - eq_(t[2], [{'word': 'the'}, {'word': 'dog.'}]) - t.remove({'word': 'fox'}) - eq_(t.length(), 7) - eq_(t[1], [{'word': 'jumps'}, {'word': 'over'}]) - - -@raises(TypeError) + t.append({"word": word}) + assert t.length() == 9 + assert t.pages() == 4 + assert t[3] == [] + t.remove({"word": "lazy"}) + assert t.length() == 8 + assert t[2] == [{"word": "the"}, {"word": "dog."}] + t.remove({"word": "fox"}) + assert t.length() == 7 + assert t[1] == [{"word": "jumps"}, {"word": "over"}] + + def test_class_errors_if_missing_database(): - class Test(Array): - config_collection = 'c' + with pytest.raises(TypeError): + + class Test(Array): + config_collection = "c" -@raises(TypeError) def test_class_errors_if_missing_collection(): - class Test(Array): - config_database = 'd' + with pytest.raises(TypeError): + + class Test(Array): + config_database = "d" -@raises(RuntimeError) -def test_append_fails_if_page_is_missing(): - t = TestArray('append_fails_with_missing_page', 0) +def test_append_fails_if_page_is_missing(DBTest): + t = ArrayTest("append_fails_with_missing_page", 0) with DBTest: t.append(1) t._page.remove({t._page._id: t._id_regex}) - t.append(1) + with pytest.raises(RuntimeError): + t.append(1) -def test_clear_removes_all_pages(): - t = TestArray('clear', 0) +def test_clear_removes_all_pages(DBTest): + t = ArrayTest("clear", 0) with DBTest: - for i in xrange(10): + for i in range(10): t.append(_word()) - eq_(t.length(), 10) - eq_(t.pages(), 4) + assert t.length() == 10 + assert t.pages() == 4 t.clear() - eq_(t.length(), 0) - eq_(t.pages(), 0) + assert t.length() == 0 + assert t.pages() == 0 -def test_append_works_after_clearing(): - t = TestArray('clear_and_append', 0) +def test_append_works_after_clearing(DBTest): + t = ArrayTest("clear_and_append", 0) with DBTest: - for i in xrange(10): + for i in range(10): t.append(_word()) - eq_(t.length(), 10) - eq_(t.pages(), 4) + assert t.length() == 10 + assert t.pages() == 4 t.clear() - eq_(t.length(), 0) - eq_(t.pages(), 0) + assert t.length() == 0 + assert t.pages() == 0 t.append(1) - eq_(t.length(), 1) - eq_(t.pages(), 1) + assert t.length() == 1 + assert t.pages() == 1 -def test_getitem_works_for_single_page(): - t = TestArray('getitem_single', 0) +def test_getitem_works_for_single_page(DBTest): + t = ArrayTest("getitem_single", 0) with DBTest: - for i in xrange(10): + for i in range(10): t.append(i) - eq_(t.pages(), 4) - eq_(t[0], [0, 1, 2]) - eq_(t[1], [3, 4, 5]) - eq_(t[2], [6, 7, 8]) - eq_(t[3], [9]) + assert t.pages() == 4 + assert t[0] == [0, 1, 2] + assert t[1] == [3, 4, 5] + assert t[2] == [6, 7, 8] + assert t[3] == [9] -def test_getitem_works_for_slices(): - t = TestArray('getitem_sliced', 0) +def test_getitem_works_for_slices(DBTest): + t = ArrayTest("getitem_sliced", 0) with DBTest: - for i in xrange(10): + for i in range(10): t.append(i) - eq_(t.pages(), 4) - eq_(t[0:1], [0, 1, 2]) - eq_(t[1:2], [3, 4, 5]) - eq_(t[0:2], [0, 1, 2, 3, 4, 5]) - eq_(t[2:4], [6, 7, 8, 9]) - eq_(t[0:100], list(range(10))) + assert t.pages() == 4 + assert t[0:1] == [0, 1, 2] + assert t[1:2] == [3, 4, 5] + assert t[0:2] == [0, 1, 2, 3, 4, 5] + assert t[2:4] == [6, 7, 8, 9] + assert t[0:100] == list(range(10)) -@raises(TypeError) def test_getitem_does_not_work_for_extended_slices(): - t = TestArray('test', 0) - t[0:1:2] + t = ArrayTest("test", 0) + with pytest.raises(TypeError): + t[0:1:2] -@raises(TypeError) def test_getitem_disallows_non_integers(): - t = TestArray('test', 0) - t['foo'] + t = ArrayTest("test", 0) + with pytest.raises(TypeError): + t["foo"] -@raises(IndexError) -def test_getitem_raises_indexerror_for_out_of_range_when_empty(): - t = TestArray('getitem_out_of_range_empty', 0) +def test_getitem_raises_indexerror_for_out_of_range_when_empty(DBTest): + t = ArrayTest("getitem_out_of_range_empty", 0) with DBTest: - t[0] + with pytest.raises(IndexError): + t[0] -@raises(IndexError) -def test_getitem_raises_indexerror_for_out_of_range(): - t = TestArray('getitem_out_of_range', 0) +def test_getitem_raises_indexerror_for_out_of_range(DBTest): + t = ArrayTest("getitem_out_of_range", 0) with DBTest: - for i in xrange(10): + for i in range(10): t.append(i) - ok_(t[0]) - ok_(t[1]) - ok_(t[2]) - ok_(t[3]) - t[4] + assert t[0] + assert t[1] + assert t[2] + assert t[3] + with pytest.raises(IndexError): + t[4] -def test_find_gives_us_a_working_find(): - t = TestArray('find', 0) +def test_find_gives_us_a_working_find(DBTest): + t = ArrayTest("find", 0) with DBTest: - eq_(list(TestArray.find({'_id': t._id_regex})), []) + assert list(ArrayTest.find({"_id": t._id_regex})) == [] -def test_entries_returns_key_on_class(): - t = TestArray('entries', 0) +def test_entries_returns_key_on_class(DBTest): + t = ArrayTest("entries", 0) with DBTest: - eq_(TestArray.entries, t._page.entries) - eq_(TestArray.entries, TestArray._page.entries) - eq_(TestArray.entries, 'e') + assert ArrayTest.entries == t._page.entries + assert ArrayTest.entries == ArrayTest._page.entries + assert ArrayTest.entries == "e" -def test_size_returns_key_on_class(): - t = TestArray('size', 0) +def test_size_returns_key_on_class(DBTest): + t = ArrayTest("size", 0) with DBTest: - eq_(TestArray.size, t._page.size) - eq_(TestArray.size, TestArray._page.size) - eq_(TestArray.size, 's') + assert ArrayTest.size == t._page.size + assert ArrayTest.size == ArrayTest._page.size + assert ArrayTest.size == "s" -def test_unset_page_count_queries_for_the_page_count(): - t = TestArray('unset_page_count', 0) +def test_unset_page_count_queries_for_the_page_count(DBTest): + t = ArrayTest("unset_page_count", 0) with DBTest: - for i in xrange(6): + for i in range(6): t.append(i) - t2 = TestArray('unset_page_count') + t2 = ArrayTest("unset_page_count") t2.append(7) - eq_(t.pages(), t2.page_count) - eq_(t2.page_count, 3) - eq_(t[2], [7]) + assert t.pages() == t2.page_count + assert t2.page_count == 3 + assert t[2] == [7] -def test_all_returns_unmapped_entries(): - t = TestArray('all_unmapped') +def test_all_returns_unmapped_entries(DBTest): + t = ArrayTest("all_unmapped") with DBTest: - for i in xrange(3): + for i in range(3): t.append({str(i): i}) for o in t.all(): - eq_(type(o), dict) + assert isinstance(o, dict) -def test_iteration(): - t = TestArray('iteration') +def test_iteration(DBTest): + t = ArrayTest("iteration") with DBTest: - l = set(xrange(15)) - for i in l: + items = set(range(15)) + for i in items: t.append(i) for page in t: if not page: break - eq_(len(page), 3) + assert len(page) == 3 for e in page: - l.remove(e) - eq_(l, set()) + items.remove(e) + assert items == set() -def test_array_regex_ignores_dots(): - t = TestArray('with.dot') - t2 = TestArray('with_dot') +def test_array_regex_ignores_dots(DBTest): + t = ArrayTest("with.dot") + t2 = ArrayTest("with_dot") with DBTest: t.append(1) t2.append(2) - eq_(t.all(), [1]) - eq_(t2.all(), [2]) - + assert t.all() == [1] + assert t2.all() == [2] diff --git a/test/test_humbledb/test_cursor.py b/test/test_humbledb/test_cursor.py index cd06f35..3122d47 100644 --- a/test/test_humbledb/test_cursor.py +++ b/test/test_humbledb/test_cursor.py @@ -1,76 +1,72 @@ from copy import copy, deepcopy +from unittest import mock -import mock - -from ..util import * from humbledb import Document from humbledb.cursor import Cursor - -def teardown(): - DBTest.connection.drop_database(database_name()) +from ..util import database_name class DocTest(Document): config_database = database_name() - config_collection = 'test' + config_collection = "test" - user_name = 'u' + user_name = "u" -def test_cloned_cursor_still_a_humbledb_cursor(): +def test_cloned_cursor_still_a_humbledb_cursor(DBTest): with DBTest: cursor = DocTest.find() cursor = cursor.clone() - is_instance_(cursor, Cursor) - is_subclass_(cursor._doc_cls, DocTest) + assert isinstance(cursor, Cursor) + assert issubclass(cursor._doc_cls, DocTest) -def test_cloned_cursor_returns_correct_type(): +def test_cloned_cursor_returns_correct_type(DBTest): with DBTest: # Ensure we have a document DocTest.insert({}) # Get the cursor cursor = DocTest.find() - is_instance_(cursor[0], DocTest) + assert isinstance(cursor[0], DocTest) # Check the clone cursor = cursor.clone() - is_instance_(cursor[0], DocTest) + assert isinstance(cursor[0], DocTest) -def test_copy_of_cursor_returns_correct_type(): +def test_copy_of_cursor_returns_correct_type(DBTest): with DBTest: # Ensure we have a document DocTest.insert({}) # Get the cursor cursor = DocTest.find() - is_instance_(cursor[0], DocTest) + assert isinstance(cursor[0], DocTest) # Check the clone cursor = copy(cursor) - is_instance_(cursor[0], DocTest) + assert isinstance(cursor[0], DocTest) -def test_deepcopy_of_cursor_returns_correct_type(): +def test_deepcopy_of_cursor_returns_correct_type(DBTest): with DBTest: # Ensure we have a document DocTest.insert({}) # Get the cursor cursor = DocTest.find() - is_instance_(cursor[0], DocTest) + assert isinstance(cursor[0], DocTest) # Check the clone cursor = deepcopy(cursor) - is_instance_(cursor[0], DocTest) + assert isinstance(cursor[0], DocTest) -def test_if_a_cursor_is_not_returned_properly_we_exit_quickly(): +def test_if_a_cursor_is_not_returned_properly_we_exit_quickly(DBTest): with DBTest: - with mock.patch.object(DocTest, 'collection') as coll: - coll.find.__name__ = 'find' + with mock.patch.object(DocTest, "collection") as coll: + coll.find.__name__ = "find" cursor = DocTest.find() - is_(cursor, coll.find.return_value) + assert cursor is coll.find.return_value -def test_cursor_ensures_document_types_when_iterating_explicitly(): +def test_cursor_ensures_document_types_when_iterating_explicitly(DBTest): with DBTest: # Ensure we have a document DocTest.insert({}) @@ -78,15 +74,14 @@ def test_cursor_ensures_document_types_when_iterating_explicitly(): cursor = DocTest.find() cursor = iter(cursor) item = cursor.next() - is_instance_(item, DocTest) + assert isinstance(item, DocTest) -def test_cursor_ensures_document_types_when_iterating_to_list(): +def test_cursor_ensures_document_types_when_iterating_to_list(DBTest): with DBTest: # Ensure we have a document DocTest.insert({}) # Get the cursor cursor = DocTest.find() items = list(cursor) - is_instance_(items[0], DocTest) - + assert isinstance(items[0], DocTest) diff --git a/test/test_humbledb/test_document.py b/test/test_humbledb/test_document.py index b2dd3fe..842d7ea 100644 --- a/test/test_humbledb/test_document.py +++ b/test/test_humbledb/test_document.py @@ -1,225 +1,270 @@ -import six -import mock -import pytool +from unittest import mock + import pyconfig -from six.moves import xrange +import pytest +import pytool import humbledb -from humbledb import Mongo, Document, Embed, _version -from ..util import eq_, ok_, raises, DBTest, database_name +from humbledb import Document, Embed, _version +from ..util import database_name # The safe= keyword doesn't exist in 3.0 -if _version._lt('3.0.0'): - _safe = {'safe': True} +if _version._lt("3.0.0"): + _safe = {"safe": True} else: _safe = {} -def teardown(): - DBTest.connection.drop_database(database_name()) - - def cache_for(val): - # This is a work around for the version changing the cache argument - if _version._lt('2.3'): - return {'ttl': val} - return {'cache_for': val} + """Return the expected cache argument based on the version, used for + asserting against mock calls.""" + if _version._lt("2.3"): + return {"ttl": val} + return {"cache_for": val} class DocTest(Document): config_database = database_name() - config_collection = 'test' + config_collection = "test" - user_name = 'u' + user_name = "u" class EmbedTestDoc(Document): - attr = 'a' - attr2 = 'a2' + attr = "a" + attr2 = "a2" - embed = Embed('e') - embed.attr = 'a' + embed = Embed("e") + embed.attr = "a" - embed.embed = Embed('e') - embed.embed.attr = 'a' + embed.embed = Embed("e") + embed.embed.attr = "a" def test_delete(): n = DocTest() - n.user_name = 'test' - ok_(DocTest.user_name in n) + n.user_name = "test" + assert DocTest.user_name in n del n.user_name - eq_(DocTest.user_name in n, False) + assert DocTest.user_name not in n -@raises(RuntimeError) def test_without_context(): - DocTest.find_one() + with pytest.raises(RuntimeError): + DocTest.find_one() -@raises(TypeError) def test_bad_name(): - class Test(Document): - items = 'i' + with pytest.raises(TypeError): + class Test(Document): + items = "i" -@raises(RuntimeError) -def test_missing_database(): - class Test(Document): - config_collection = 'test' - with DBTest: - Test.collection +def test_missing_database(DBTest): + with pytest.raises(RuntimeError): + class Test(Document): + config_collection = "test" -@raises(RuntimeError) -def test_missing_collection(): - class Test(Document): - config_database = database_name() + with DBTest: + Test.collection - with DBTest: - Test.collection +def test_missing_collection(DBTest): + with pytest.raises(RuntimeError): -@raises(AttributeError) -def test_bad_attribute(): - with DBTest: - DocTest.foo + class Test(Document): + config_database = database_name() + + with DBTest: + Test.collection + + +def test_bad_attribute(DBTest): + with pytest.raises(AttributeError): + with DBTest: + DocTest.foo def test_ignore_method(): class Test(Document): config_database = database_name() - config_collection = 'test' + config_collection = "test" def test(self): pass - ok_(callable(Test.test)) + assert callable(Test.test) def test_unmapped_fields(): - n = DocTest(foo='bar') - ok_('foo' in n) - eq_(n['foo'], 'bar') - ok_('foo' in n.for_json()) - eq_(n.for_json()['foo'], 'bar') + n = DocTest(foo="bar") + assert "foo" in n + assert n["foo"] == "bar" + assert "foo" in n.for_json() + assert n.for_json()["foo"] == "bar" def test_instance_dictproxy_attr(): _doc = DocTest() - _doc.user_name = 'value' - eq_(_doc.user_name, 'value') - eq_(DocTest().user_name, {}) + _doc.user_name = "value" + assert _doc.user_name == "value" + assert DocTest().user_name == {} -def test_ensure_indexes_called(): +def test_ensure_indexes_called(DBTest): + if _version._gte("4.0"): + pytest.skip("ensure_index was removed in Pymongo 4.x") + class Test(Document): config_database = database_name() - config_collection = 'test' - config_indexes = ['user_name'] + config_collection = "test" + config_indexes = ["user_name"] - user_name = 'u' + user_name = "u" with DBTest: - with mock.patch.object(Test, '_ensure_indexes') as _ensure: - eq_(Test._ensure_indexes, _ensure) + with mock.patch.object(Test, "_ensure_indexes") as _ensure: + assert Test._ensure_indexes == _ensure Test._ensured = None Test.find_one() _ensure.assert_called_once() -def test_ensure_indexes_calls_ensure_index(): +def test_ensure_indexes_calls_ensure_index_pymongo_4(DBTest): + if _version._lt("4.0"): + pytest.skip("create_index was introduced in Pymongo 4.x") + class Test(Document): config_database = database_name() - config_collection = 'test' - config_indexes = ['user_name'] + config_collection = "test" + config_indexes = ["user_name"] - user_name = 'u' + user_name = "u" with DBTest: - with mock.patch.object(Test, 'collection') as coll: - coll.find_one.__name__ = 'find_one' + with mock.patch.object(Test, "collection") as coll: + coll.find_one.__name__ = "find_one" + Test._ensured = None + Test.find_one() + coll.create_index.assert_called_with(Test.user_name, background=True) + + +def test_ensure_indexes_calls_ensure_index(DBTest): + if _version._gte("4.0"): + pytest.skip("ensure_index was removed in Pymongo 4.x") + + class Test(Document): + config_database = database_name() + config_collection = "test" + config_indexes = ["user_name"] + + user_name = "u" + + with DBTest: + with mock.patch.object(Test, "collection") as coll: + coll.find_one.__name__ = "find_one" Test._ensured = None Test.find_one() coll.ensure_index.assert_called_with( - Test.user_name, - background=True, - **cache_for(60*60*24)) + Test.user_name, background=True, **cache_for(60 * 60 * 24) + ) -def test_ensure_indexes_reload_hook(): +def test_ensure_indexes_calls_create_index_pymongo_4(DBTest): + if _version._lt("4.0"): + pytest.skip("create_index was introduced in Pymongo 4.x") + class Test(Document): config_database = database_name() - config_collection = 'test' - config_indexes = ['user_name'] + config_collection = "test" + config_indexes = ["user_name"] - user_name = 'u' + user_name = "u" + + with DBTest: + with mock.patch.object(Test, "collection") as coll: + coll.find_one.__name__ = "find_one" + Test._ensured = None + Test.find_one() + coll.create_index.assert_called_with(Test.user_name, background=True) + + +def test_ensure_indexes_reload_hook(DBTest): + class Test(Document): + config_database = database_name() + config_collection = "test" + config_indexes = ["user_name"] + + user_name = "u" with DBTest: Test.find_one() - eq_(Test._ensured, True) + assert Test._ensured is True pyconfig.reload() - eq_(Test._ensured, False) + assert Test._ensured is False -def test_wrap_methods(): +def test_wrap_methods(DBTest): with DBTest: - with mock.patch.object(DocTest, '_wrap') as _wrap: - _wrap.return_value = '_wrapper' - eq_(DocTest.find, _wrap.return_value) + with mock.patch.object(DocTest, "_wrap") as _wrap: + _wrap.return_value = "_wrapper" + assert DocTest.find == _wrap.return_value _wrap.assert_called_once() -def test_wrap_method_behaves_itself(): +def test_wrap_method_behaves_itself(DBTest): with DBTest: - with mock.patch.object(DocTest, 'collection') as coll: - coll.find.__name__ = 'find' + with mock.patch.object(DocTest, "collection") as coll: + coll.find.__name__ = "find" coll.find.return_value = mock.Mock(spec=humbledb.cursor.Cursor) DocTest.find() coll.find.assert_called_with() -def test_update_wrapping(): +def test_update_wrapping(DBTest): with DBTest: - eq_(DocTest._wrap_update, DocTest.update) + assert DocTest._wrap_update == DocTest.update def test_document_repr(): # Coverage all the coverage! - d = {'foo': 'bar'} - eq_(repr(DocTest(d)), "DocTest({})".format(repr(d))) + d = {"foo": "bar"} + assert repr(DocTest(d)) == "DocTest({})".format(repr(d)) def test_for_json(): - eq_(DocTest({'u': 'test_name'}).for_json(), {'user_name': 'test_name'}) + assert DocTest({"u": "test_name"}).for_json() == {"user_name": "test_name"} def test_for_json_list(): - eq_(DocTest({'u': ["foo", ["bar"]]}).for_json(), {'user_name': ["foo", - ["bar"]]}) + assert DocTest({"u": ["foo", ["bar"]]}).for_json() == { + "user_name": ["foo", ["bar"]] + } def test_for_json_embedded_list(): - eq_(EmbedTestDoc({'e': [{'e': [{'a': 1}]}]}).for_json(), {'embed': - [{'embed': [{'attr': 1}]}]}) + assert EmbedTestDoc({"e": [{"e": [{"a": 1}]}]}).for_json() == { + "embed": [{"embed": [{"attr": 1}]}] + } def test_non_mapped_attribute_assignment_works_fine(): d = DocTest() d.foo = "bar" - eq_(d.foo, "bar") + assert d.foo == "bar" -@raises(AttributeError) def test_non_mapped_attribute_deletion_works(): d = DocTest() d.foo = "bar" - eq_(d.foo, "bar") + assert d.foo == "bar" del d.foo - d.foo + with pytest.raises(AttributeError): + d.foo def test_nonstring(): @@ -227,50 +272,50 @@ def test_nonstring(): class _TestNonString(Document): config_database = database_name() - config_collection = 'test' + config_collection = "test" foo = 2 bar = True cls = object instance = _instance - ok = 'OK' + ok = "OK" - eq_(_TestNonString._name_map.filtered(), {'ok': 'OK'}) - eq_(_TestNonString.foo, 2) - eq_(_TestNonString.bar, True) - eq_(_TestNonString.cls, object) - eq_(_TestNonString.instance, _instance) - eq_(_TestNonString.ok, 'OK') - eq_(_TestNonString().foo, 2) - eq_(_TestNonString().bar, True) - eq_(_TestNonString().cls, object) - eq_(_TestNonString().instance, _instance) - eq_(_TestNonString().ok, {}) + assert _TestNonString._name_map.filtered() == {"ok": "OK"} + assert _TestNonString.foo == 2 + assert _TestNonString.bar is True + assert _TestNonString.cls is object + assert _TestNonString.instance is _instance + assert _TestNonString.ok == "OK" + assert _TestNonString().foo == 2 + assert _TestNonString().bar is True + assert _TestNonString().cls is object + assert _TestNonString().instance is _instance + assert _TestNonString().ok == {} def test_property_attribute(): class _TestProperty(Document): config_database = database_name() - config_collection = 'test' + config_collection = "test" @property def attr(self): return self - foo = 'bar' + foo = "bar" - eq_(_TestProperty._name_map.filtered(), {'foo': 'bar'}) + assert _TestProperty._name_map.filtered() == {"foo": "bar"} tp = _TestProperty() - eq_(tp.attr, tp) + assert tp.attr is tp -def test_inheritance(): +def test_inheritance(DBTest): class DocTest2(DocTest): pass - eq_(DocTest2.user_name, DocTest.user_name) - eq_(DocTest2.config_database, DocTest.config_database) - eq_(DocTest2.config_collection, DocTest.config_collection) + assert DocTest2.user_name == DocTest.user_name + assert DocTest2.config_database == DocTest.config_database + assert DocTest2.config_collection == DocTest.config_collection # This is to ensure the collection is accessible, e.g. not raising an error with DBTest: @@ -279,247 +324,247 @@ class DocTest2(DocTest): def test_inheritance_combined(): class DocTest2(DocTest): - new_name = 'n' + new_name = "n" - eq_(DocTest2.new_name, 'n') - eq_(DocTest2.user_name, DocTest.user_name) + assert DocTest2.new_name == "n" + assert DocTest2.user_name == DocTest.user_name def test_classproperty_attribute(): class _TestClassProp(Document): config_database = database_name() - config_collection = 'test' + config_collection = "test" @pytool.lang.classproperty def attr(cls): return cls - eq_(_TestClassProp.attr, _TestClassProp) - eq_(_TestClassProp().attr, _TestClassProp) + assert _TestClassProp.attr is _TestClassProp + assert _TestClassProp().attr is _TestClassProp -def test_self_insertion(): +def test_self_insertion(DBTest): t = DocTest() with DBTest: type(t).insert(t) - ok_(t._id) + assert t._id -def test_cls_self_insertion(): +def test_cls_self_insertion(DBTest): with DBTest: - DocTest.insert({'_id': 'tsci', 't': True}) - ok_(DocTest.find_one({'_id': 'tsci'})) + DocTest.insert({"_id": "tsci", "t": True}) + assert DocTest.find_one({"_id": "tsci"}) -@raises(AttributeError) -def test_collection_attributes_not_accessible_from_instance(): +def test_collection_attributes_not_accessible_from_instance(DBTest): t = DocTest() - with DBTest: - t.find + with pytest.raises(AttributeError): + with DBTest: + t.find -def test_collection_accessible_from_instance(): +def test_collection_accessible_from_instance(DBTest): t = DocTest() with DBTest: t.collection def test_attr(): - eq_(EmbedTestDoc.attr, 'a') + assert EmbedTestDoc.attr == "a" def test_embed(): - eq_(EmbedTestDoc.embed, 'e') + assert EmbedTestDoc.embed == "e" def test_embed_attr(): - eq_(EmbedTestDoc.embed.attr, 'e.a') + assert EmbedTestDoc.embed.attr == "e.a" def test_embed_embed(): - eq_(EmbedTestDoc.embed.embed, 'e.e') + assert EmbedTestDoc.embed.embed == "e.e" def test_embed_embed_attr(): - eq_(EmbedTestDoc.embed.embed.attr, 'e.e.a') + assert EmbedTestDoc.embed.embed.attr == "e.e.a" def test_instance_attr(): t = EmbedTestDoc() - t['a'] = 'hello' - eq_(t.attr, 'hello') + t["a"] = "hello" + assert t.attr == "hello" def test_instance_embed_attr(): t = EmbedTestDoc() - t['e'] = {} - t['e']['a'] = 'hello' - eq_(t.embed.attr, 'hello') + t["e"] = {} + t["e"]["a"] = "hello" + assert t.embed.attr == "hello" def test_instance_embed_embed_attr(): t = EmbedTestDoc() - t['e'] = {} - t['e']['e'] = {} - t['e']['e']['a'] = 'hello' - eq_(t.embed.embed.attr, 'hello') + t["e"] = {} + t["e"]["e"] = {} + t["e"]["e"]["a"] = "hello" + assert t.embed.embed.attr == "hello" def test_instance_replace_attr(): t = EmbedTestDoc() - t['a'] = 'hello' - t.attr = 'goodbye' - eq_(t['a'], 'goodbye') - eq_(t.attr, 'goodbye') + t["a"] = "hello" + t.attr = "goodbye" + assert t["a"] == "goodbye" + assert t.attr == "goodbye" def test_instance_replace_embed_attr(): t = EmbedTestDoc() - t['e'] = {} - t['e']['a'] = 'hello' - t.embed.attr = 'goodbye' - eq_(t['e']['a'], 'goodbye') - eq_(t.embed.attr, 'goodbye') + t["e"] = {} + t["e"]["a"] = "hello" + t.embed.attr = "goodbye" + assert t["e"]["a"] == "goodbye" + assert t.embed.attr == "goodbye" def test_instance_replace_embed_embed_attr(): t = EmbedTestDoc() - t['e'] = {} - t['e']['e'] = {} - t['e']['e']['a'] = 'hello' - t.embed.embed.attr = 'goodbye' - eq_(t['e']['e']['a'], 'goodbye') - eq_(t.embed.embed.attr, 'goodbye') + t["e"] = {} + t["e"]["e"] = {} + t["e"]["e"]["a"] = "hello" + t.embed.embed.attr = "goodbye" + assert t["e"]["e"]["a"] == "goodbye" + assert t.embed.embed.attr == "goodbye" def test_instance_set_attr(): t = EmbedTestDoc() - t.attr = 'hello' - eq_(t['a'], 'hello') - eq_(t.attr, 'hello') + t.attr = "hello" + assert t["a"] == "hello" + assert t.attr == "hello" def test_instance_set_embed_attr(): t = EmbedTestDoc() - t.embed.attr = 'hello' - eq_(t['e']['a'], 'hello') - eq_(t.embed.attr, 'hello') + t.embed.attr = "hello" + assert t["e"]["a"] == "hello" + assert t.embed.attr == "hello" def test_instance_set_embed_embed_attr(): t = EmbedTestDoc() - t.embed.embed.attr = 'hello' - eq_(t['e']['e']['a'], 'hello') - eq_(t.embed.embed.attr, 'hello') + t.embed.embed.attr = "hello" + assert t["e"]["e"]["a"] == "hello" + assert t.embed.embed.attr == "hello" def test_instance_set_embed_embed_attr2(): t = EmbedTestDoc() - t.embed.embed.attr = 'hello' - eq_(t, {'e': {'e': {'a': 'hello'}}}) + t.embed.embed.attr = "hello" + assert t == {"e": {"e": {"a": "hello"}}} def test_instance_set_embed_embed_attr_side_effects(): t = EmbedTestDoc() - t.embed.embed.attr = 'hello' + t.embed.embed.attr = "hello" t.embed.embed.attr - eq_(t['e']['e']['a'], 'hello') - eq_(t.embed.embed.attr, 'hello') + assert t["e"]["e"]["a"] == "hello" + assert t.embed.embed.attr == "hello" def test_instance_embed_override(): t = EmbedTestDoc() - t.embed = 'hello' - eq_(t, {'e': 'hello'}) + t.embed = "hello" + assert t == {"e": "hello"} def test_instance_embed_embed_override(): t = EmbedTestDoc() - t.embed.embed = 'hello' - eq_(t, {'e': {'e': 'hello'}}) + t.embed.embed = "hello" + assert t == {"e": {"e": "hello"}} def test_delete_attr(): t = EmbedTestDoc() - t.attr = 'hello' - eq_(t, {'a': 'hello'}) + t.attr = "hello" + assert t == {"a": "hello"} del t.attr - eq_(t, {}) + assert t == {} def test_delete_embed_attr(): t = EmbedTestDoc() - t.embed.attr = 'hello' - eq_(t, {'e': {'a': 'hello'}}) + t.embed.attr = "hello" + assert t == {"e": {"a": "hello"}} del t.embed.attr - eq_(t, {}) + assert t == {} def test_delete_embed_embed_attr(): t = EmbedTestDoc() - t.embed.embed.attr = 'hello' - eq_(t, {'e': {'e': {'a': 'hello'}}}) + t.embed.embed.attr = "hello" + assert t == {"e": {"e": {"a": "hello"}}} del t.embed.embed.attr - eq_(t, {}) + assert t == {} def test_delete_partial_key(): t = EmbedTestDoc() - t.embed.embed.attr = 'hello' - t['e']['e']['b'] = 'world' - eq_(t, {'e': {'e': {'a': 'hello', 'b': 'world'}}}) + t.embed.embed.attr = "hello" + t["e"]["e"]["b"] = "world" + assert t == {"e": {"e": {"a": "hello", "b": "world"}}} del t.embed.embed.attr - eq_(t, {'e': {'e': {'b': 'world'}}}) + assert t == {"e": {"e": {"b": "world"}}} def test_delete_subdoc(): t = EmbedTestDoc() - t.embed.embed.attr = 'hello' - t['e']['e']['b'] = 'world' - eq_(t, {'e': {'e': {'a': 'hello', 'b': 'world'}}}) + t.embed.embed.attr = "hello" + t["e"]["e"]["b"] = "world" + assert t == {"e": {"e": {"a": "hello", "b": "world"}}} del t.embed - eq_(t, {}) + assert t == {} def test_delete_subsubdoc(): t = EmbedTestDoc() - t.embed.embed.attr = 'hello' - t['e']['e']['b'] = 'world' - eq_(t, {'e': {'e': {'a': 'hello', 'b': 'world'}}}) + t.embed.embed.attr = "hello" + t["e"]["e"]["b"] = "world" + assert t == {"e": {"e": {"a": "hello", "b": "world"}}} del t.embed.embed - eq_(t, {}) + assert t == {} def test_embed_for_json(): t = EmbedTestDoc() - t.embed.embed.attr = 'hello' - eq_(t.for_json(), {'embed': {'embed': {'attr': 'hello'}}}) + t.embed.embed.attr = "hello" + assert t.for_json() == {"embed": {"embed": {"attr": "hello"}}} -def test_embed_retrieval_types(): +def test_embed_retrieval_types(DBTest): class Retriever(EmbedTestDoc): config_database = database_name() - config_collection = 'test' + config_collection = "test" t = Retriever() - t.embed.embed.attr = 'hello' + t.embed.embed.attr = "hello" with DBTest: doc_id = Retriever.insert(t) doc = Retriever.find_one({Retriever._id: doc_id}) - eq_(doc, {'_id': doc_id, 'e': {'e': {'a': 'hello'}}}) - eq_(type(doc), Retriever) - eq_(type(doc['e']), dict) - eq_(type(doc['e']['e']), dict) - eq_(type(doc['e']['e']['a']), six.text_type) + assert doc == {"_id": doc_id, "e": {"e": {"a": "hello"}}} + assert type(doc) is Retriever + assert type(doc["e"]) is dict + assert type(doc["e"]["e"]) is dict + assert type(doc["e"]["e"]["a"]) is str def test_always_id(): class TestId(Document): pass - eq_(TestId._id, '_id') + assert TestId._id == "_id" def test_always_id_subclass(): @@ -529,321 +574,337 @@ class TestId(Document): class TestSub(TestId): pass - eq_(TestSub._id, '_id') + assert TestSub._id == "_id" -def test_find_returns_same_class(): +def test_find_returns_same_class(DBTest): doc = DocTest() - doc.user_name = 'testing find' + doc.user_name = "testing find" with DBTest: DocTest.insert(doc) - ok_(doc._id) + assert doc._id with DBTest: doc = list(DocTest.find({DocTest._id: doc._id})) - ok_(doc) + assert doc doc = doc[0] - eq_(type(doc), DocTest) + assert type(doc) is DocTest -def test_find_one_returns_same_class(): +def test_find_one_returns_same_class(DBTest): doc = DocTest() - doc.user_name = 'testing find_one' + doc.user_name = "testing find_one" with DBTest: DocTest.insert(doc) - ok_(doc._id) + assert doc._id with DBTest: doc = DocTest.find_one({DocTest._id: doc._id}) - ok_(doc) - eq_(doc.user_name, 'testing find_one') - eq_(type(doc), DocTest) + assert doc + assert doc.user_name == "testing find_one" + assert type(doc) is DocTest -def test_find_and_modify_returns_same_class(): +def test_find_and_modify_returns_same_class(DBTest): doc = DocTest() - doc.user_name = 'testing find_and_modify' + doc.user_name = "testing find_and_modify" with DBTest: DocTest.insert(doc) - ok_(doc._id) + assert doc._id with DBTest: - doc = DocTest.find_and_modify({DocTest._id: doc._id}, - {'$set': {DocTest.user_name: 'tested find_and_modify'}}, - new=True) + doc = DocTest.find_and_modify( + {DocTest._id: doc._id}, + {"$set": {DocTest.user_name: "tested find_and_modify"}}, + new=True, + ) - ok_(doc) - eq_(doc.user_name, 'tested find_and_modify') - eq_(type(doc), DocTest) + assert doc + assert doc.user_name == "tested find_and_modify" + assert type(doc) is DocTest -def test_find_and_modify_doesnt_error_when_none(): +def test_find_and_modify_doesnt_error_when_none(DBTest): with DBTest: - doc = DocTest.find_and_modify({DocTest._id: 'doesnt_exist'}, - {'$set': {'foo': 1}}) + doc = DocTest.find_and_modify( + {DocTest._id: "doesnt_exist"}, {"$set": {"foo": 1}} + ) - eq_(doc, None) + assert doc is None -def test_list_subdocuments_should_be_regular_dicts(): +def test_list_subdocuments_should_be_regular_dicts(DBTest): class ListTest(DocTest): - vals = 'v' + vals = "v" + # Create a new instance - l = ListTest() - vals = [{'a': {'test': True}, 'b': 2}] + items = ListTest() + vals = [{"a": {"test": True}, "b": 2}] + # Insert the instance - with Mongo: - l_id = ListTest.insert(l) + with DBTest: + l_id = ListTest.insert(items) # Set the list - ListTest.update({ListTest._id: l_id}, {'$set': {ListTest.vals: vals}}) + ListTest.update({ListTest._id: l_id}, {"$set": {ListTest.vals: vals}}) # Re-retrieve the instance to allow pymongo to coerce types - l = list(ListTest.find({ListTest._id: l_id}))[0] + items = list(ListTest.find({ListTest._id: l_id}))[0] l2 = ListTest.find_one({ListTest._id: l_id}) + # Check the type - ok_(not isinstance(l.vals[0], Document), l.vals[0]) - ok_(not isinstance(l2.vals[0], Document), l2.vals[0]) + assert not isinstance(items.vals[0], Document) + assert not isinstance(l2.vals[0], Document) -def test_unpatching_document_update_works_nicely(): +def test_unpatching_document_update_works_nicely(DBTest): with DBTest: original_update = DocTest.update - with mock.patch.object(DocTest, 'update') as update: - update.return_value = 'updated' - value = DocTest.update({DocTest._id: 1}, {'$set': - {DocTest.user_name: 'hello'}}) - eq_(value, 'updated') - eq_(DocTest.update, original_update) + with mock.patch.object(DocTest, "update") as update: + update.return_value = "updated" + value = DocTest.update( + {DocTest._id: 1}, {"$set": {DocTest.user_name: "hello"}} + ) + assert value == "updated" + assert DocTest.update == original_update -def test_unmapped_subdocument_saves_and_retrieves_ok(): +def test_unmapped_subdocument_saves_and_retrieves_ok(DBTest): class Test(DocTest): - val = 'v' + val = "v" t = Test() - eq_(t.val, {}) - t.val['hello'] = 'world' + assert t.val == {} + t.val["hello"] = "world" with DBTest: t_id = Test.insert(t) t = Test.find_one({Test._id: t_id}) - eq_(t.val, {'hello': 'world'}) + assert t.val == {"hello": "world"} -@raises(AttributeError) def test_name_attribute(): - class Test(Document): - pass + with pytest.raises(AttributeError): - Test.name + class Test(Document): + pass + + Test.name -@raises(TypeError) def test_config_indexes_must_be_a_list(): - class Test(Document): - config_database = database_name() - config_collection = 'test' - config_indexes = 'foo' + with pytest.raises(TypeError): + + class Test(Document): + config_database = database_name() + config_collection = "test" + config_indexes = "foo" -def test_exercise_normal_index(): +def test_exercise_normal_index(DBTest): class Test(Document): config_database = database_name() - config_collection = 'potato' - config_indexes = ['user_name'] + config_collection = "potato" + config_indexes = ["user_name"] - user_name = 'u' + user_name = "u" with DBTest: Test.find_one() def test_callable_default_creates_saved_defaults(): - func = lambda: 1 + def func(): + return 1 + class Default(DocTest): - saved = 's', func + saved = "s", func t = Default() - eq_(t._saved_defaults, {'s': func}) + assert t._saved_defaults == {"s": func} def test_saved_default_is_returned_on_instance(): - func = lambda: 1 + def func(): + return 1 + class Default(DocTest): - saved = 's', func + saved = "s", func t = Default() - eq_(t.saved, 1) + assert t.saved == 1 def test_saved_default_is_part_of_the_doc_after_access(): - func = lambda: 1 + def func(): + return 1 + class Default(DocTest): - saved = 's', func + saved = "s", func t = Default() - eq_(t.saved, 1) - eq_(t, {Default.saved: 1}) + assert t.saved == 1 + assert t == {Default.saved: 1} def test_saved_default_memoizes_first_value_on_multiple_accesses(): s = [0] + def func(): s[0] += 1 return s[0] class Default(DocTest): - saved = 's', func + saved = "s", func t = Default() - eq_(t.saved, 1) - eq_(t.saved, 1) - eq_(s[0], 1) + assert t.saved == 1 + assert t.saved == 1 + assert s[0] == 1 def test_saved_default_is_inheritable(): class Default(DocTest): - saved = 's', lambda: 1 + saved = "s", lambda: 1 class Sub(Default): - other = 'o', lambda: 2 + other = "o", lambda: 2 class Over(Default): - saved = 's', lambda: 3 + saved = "s", lambda: 3 d = Default() - eq_(d.saved, 1) + assert d.saved == 1 d = Sub() - eq_(d.saved, 1) - eq_(d.other, 2) + assert d.saved == 1 + assert d.other == 2 d = Over() - eq_(d.saved, 3) + assert d.saved == 3 def test_default_is_inheritable(): class Default(DocTest): - val = 's', 1 + val = "s", 1 class Sub(Default): - other = 'o', 2 + other = "o", 2 class Over(Default): - val = 's', 3 + val = "s", 3 d = Default() - eq_(d.val, 1) + assert d.val == 1 d = Sub() - eq_(d.val, 1) - eq_(d.other, 2) + assert d.val == 1 + assert d.other == 2 d = Over() - eq_(d.val, 3) + assert d.val == 3 -def test_saved_default_is_set_on_saving(): +def test_saved_default_is_set_on_saving(DBTest): class Default(DocTest): - saved = 's', lambda: 1 + saved = "s", lambda: 1 d = Default() with DBTest: _id = Default.save(d, **_safe) d = Default.find_one(_id) - d.pop('_id') - eq_(dict(d), {Default.saved: 1}) + d.pop("_id") + assert dict(d) == {Default.saved: 1} - -def test_saved_default_is_set_on_inserting(): +def test_saved_default_is_set_on_inserting(DBTest): class Default(DocTest): - saved = 's', lambda: 1 + saved = "s", lambda: 1 d = Default() with DBTest: _id = Default.insert(d, **_safe) d = Default.find_one(_id) - d.pop('_id') - eq_(dict(d), {Default.saved: 1}) + d.pop("_id") + assert dict(d) == {Default.saved: 1} -def test_saved_default_is_set_on_multiple_inserts(): +def test_saved_default_is_set_on_multiple_inserts(DBTest): class Default(DocTest): - saved = 's', lambda: 1 + saved = "s", lambda: 1 docs = [] - for i in xrange(3): + for i in range(3): d = Default() - d._id = 'test_saved_default_%s' % i + d._id = "test_saved_default_%s" % i docs.append(d) with DBTest: _ids = Default.insert(docs, **_safe) - eq_(len(_ids), 3) - docs = list(Default.find({Default._id: {'$in': _ids}})) + assert len(_ids) == 3 + docs = list(Default.find({Default._id: {"$in": _ids}})) - eq_(len(docs), 3) + assert len(docs) == 3 for doc in docs: - doc.pop('_id') - eq_(dict(doc), {Default.saved: 1}) + doc.pop("_id") + assert dict(doc) == {Default.saved: 1} def test_saved_defaults_are_set_in_json(): class Default(DocTest): - saved = 's', lambda: 1 + saved = "s", lambda: 1 d = Default() - eq_(d.for_json(), {'saved': 1}) + assert d.for_json() == {"saved": 1} def test_defaults_are_set_in_json_but_not_in_doc(): class Default(DocTest): - val = 'v', 1 + val = "v", 1 d = Default() - eq_(d.for_json(), {'val': 1}) - eq_(d.get(Default.val, None), None) + assert d.for_json() == {"val": 1} + assert d.get(Default.val, None) is None def test_saved_defaults_with_defaults_as_json(): class Default(DocTest): - saved = 's', lambda: 1 - val = 'v', 2 + saved = "s", lambda: 1 + val = "v", 2 d = Default() - eq_(d.for_json(), {'val': 2, 'saved': 1}) - eq_(d.get(Default.val, None), None) - eq_(d.saved, 1) + assert d.for_json() == {"val": 2, "saved": 1} + assert d.get(Default.val, None) is None + assert d.saved == 1 -@raises(AttributeError) def test_embedded_defaults_are_unmapped(): - class Embedded(DocTest): - val = Embed('v') - val.sub = 's', 1 - val.sub2 = 's' + with pytest.raises(AttributeError): + + class Embedded(DocTest): + val = Embed("v") + val.sub = "s", 1 + val.sub2 = "s" - eq_(Embedded.val.sub, ('s', 1)) - eq_(Embedded.val.sub2, 'v.s') - d = Embedded() - d.val.sub # This will raise AttributeError since val.sub is unmapped + assert Embedded.val.sub == ("s", 1) + assert Embedded.val.sub2 == "v.s" + d = Embedded() + d.val.sub # This will raise AttributeError since val.sub is unmapped def test_only_two_tuples_with_leading_string_are_interpreted_as_defaults(): - v1 = ('a',) - v2 = ('b', 2) - v3 = ('c', 3, 4) + v1 = ("a",) + v2 = ("b", 2) + v3 = ("c", 3, 4) v4 = (1, 2) class TupleTest(DocTest): @@ -852,29 +913,32 @@ class TupleTest(DocTest): attr3 = v3 attr4 = v4 - eq_(TupleTest.attr1, v1) - eq_(TupleTest.attr2, 'b') - eq_(TupleTest.attr3, v3) - eq_(TupleTest.attr4, v4) + assert TupleTest.attr1 == v1 + assert TupleTest.attr2 == "b" + assert TupleTest.attr3 == v3 + assert TupleTest.attr4 == v4 d = TupleTest() - eq_(d.attr1, v1) - eq_(d.attr2, 2) - eq_(d.attr3, v3) - eq_(d.attr4, v4) + assert d.attr1 == v1 + assert d.attr2 == 2 + assert d.attr3 == v3 + assert d.attr4 == v4 -def test_update_with_safe_keyword_doesnt_break_pymongo_3(): +def test_update_with_safe_keyword_doesnt_break_pymongo_3(DBTest): with DBTest: - DocTest.update({'_id': 'update_safe_pymongo_3'}, {'$set': {'ok': - True}}, upsert=True, safe=True) + DocTest.update( + {"_id": "update_safe_pymongo_3"}, + {"$set": {"ok": True}}, + upsert=True, + safe=True, + ) -def test_save_with_safe_keyword_doesnt_break_pymongo_3(): +def test_save_with_safe_keyword_doesnt_break_pymongo_3(DBTest): with DBTest: - DocTest.save({'_id': 'save_safe_pymongo_3'}, safe=True) + DocTest.save({"_id": "save_safe_pymongo_3"}, safe=True) -def test_insert_with_safe_keyword_doesnt_break_pymongo_3(): +def test_insert_with_safe_keyword_doesnt_break_pymongo_3(DBTest): with DBTest: - DocTest.insert({'_id': 'insert_safe_pymongo_3'}, safe=True) - + DocTest.insert({"_id": "insert_safe_pymongo_3"}, safe=True) diff --git a/test/test_humbledb/test_helpers.py b/test/test_humbledb/test_helpers.py index c01847b..209d785 100644 --- a/test/test_humbledb/test_helpers.py +++ b/test/test_humbledb/test_helpers.py @@ -3,144 +3,148 @@ ================= """ + import pyconfig +import pytest -from humbledb import _version -from humbledb import Document, Mongo -from humbledb.helpers import auto_increment +from humbledb import Document, Mongo, _version from humbledb.errors import DatabaseMismatch, NoConnection -from ..util import DBTest, database_name, eq_, ok_, raises, SkipTest +from humbledb.helpers import auto_increment +from ..util import database_name -SIDECAR = 'sidecars' +SIDECAR = "sidecars" # The safe= keyword doesn't exist in 3.0 -if _version._lt('3.0.0'): - _safe = {'safe': True} +if _version._lt("3.0.0"): + _safe = {"safe": True} else: _safe = {} class MyDoc(Document): config_database = database_name() - config_collection = 'test' + config_collection = "test" + + auto = "a", auto_increment(database_name(), SIDECAR, "MyDoc") - auto = 'a', auto_increment(database_name(), SIDECAR, 'MyDoc') class MyFloatCounterDoc(Document): config_database = database_name() - config_collection = 'float_test' + config_collection = "float_test" + + auto = "a", auto_increment(database_name(), SIDECAR, "FloatDoc") - auto = 'a', auto_increment(database_name(), SIDECAR, 'FloatDoc') class BigCounterDoc(Document): config_database = database_name() - config_collection = 'big_doc' + config_collection = "big_doc" + + auto = "a", auto_increment(database_name(), SIDECAR, "BigCounterDoc", increment=10) - auto = 'a', auto_increment(database_name(), SIDECAR, - 'BigCounterDoc', increment=10) def setup(): # Set up a float counter in the sidecar collection. import pymongo - if _version._gte('2.4'): - conn = pymongo.MongoClient('127.0.0.1') + + if _version._gte("2.4"): + conn = pymongo.MongoClient("127.0.0.1") else: - conn = pymongo.Connection('127.0.0.1') + conn = pymongo.Connection("127.0.0.1") coll = conn[database_name()][SIDECAR] - coll.insert({'_id': 'FloatDoc', 'value':float(100)}) + coll.insert({"_id": "FloatDoc", "value": float(100)}) + def teardown(): pass -def test_auto_increment_works_as_advertised(): +def test_auto_increment_works_as_advertised(DBTest): doc = MyDoc() with DBTest: MyDoc.save(doc, **_safe) # Counters are expected to be integers. - ok_(isinstance(doc.auto, int)) - eq_(doc.auto, 1) + assert isinstance(doc.auto, int) + assert doc.auto == 1 doc = MyDoc() with DBTest: - eq_(doc.auto, 2) + assert doc.auto == 2 MyDoc.save(doc) - ok_(isinstance(doc.auto, int)) - eq_(doc.auto, 2) + assert isinstance(doc.auto, int) + assert doc.auto == 2 + +def test_auto_increment_initial_float_counter_value_remains_a_float(DBTest): + if _version._gte("4.0"): + pytest.skip("Pymongo 4.x / Python 3.x changed the float consistency behavior") -def test_auto_increment_initial_float_counter_value_remains_a_float(): doc = MyFloatCounterDoc() with DBTest: MyFloatCounterDoc.save(doc, **_safe) # There are instances where the counters can be initialized # as MongoDB Double types. - ok_(isinstance(doc.auto, float)) + assert isinstance(doc.auto, float) - eq_(doc.auto, 101) # Floats can pass as integers - eq_(doc.auto, 101.0) # But are really floats + assert doc.auto == 101 # Floats can pass as integers + assert doc.auto == 101.0 # But are really floats - ok_(str(doc.auto) == "101.0") # And will output as floats. - ok_(str(doc.auto) != "101") # Not as integers. + assert str(doc.auto) == "101.0" # And will output as floats. + assert str(doc.auto) != "101" # Not as integers. doc = MyFloatCounterDoc() with DBTest: - eq_(doc.auto, 102.0) + assert doc.auto == 102.0 MyFloatCounterDoc.save(doc, **_safe) - ok_(isinstance(doc.auto, float)) - eq_(doc.auto, 102) - eq_(doc.auto, 102.0) + assert isinstance(doc.auto, float) + assert doc.auto == 102 + assert doc.auto == 102.0 -def test_auto_increment_works_with_user_defined_increment_step(): +def test_auto_increment_works_with_user_defined_increment_step(DBTest): doc = BigCounterDoc() with DBTest: BigCounterDoc.save(doc, **_safe) - eq_(doc.auto, 10) + assert doc.auto == 10 doc = BigCounterDoc() with DBTest: - eq_(doc.auto, 20) + assert doc.auto == 20 BigCounterDoc.save(doc) - eq_(doc.auto, 20) + assert doc.auto == 20 -@raises(DatabaseMismatch) def test_auto_increment_errors_with_wrong_db(): - if _version._lt('2.6.0'): - raise SkipTest + if _version._lt("2.6.0"): + pytest.skip("SkipTest") - host = pyconfig.get('humbledb.test.db.host', 'localhost') - port = pyconfig.get('humbledb.test.db.port', 27017) - uri = 'mongodb://{}:{}/{}'.format(host, port, database_name()) + host = pyconfig.get("humbledb.test.db.host", "localhost") + port = pyconfig.get("humbledb.test.db.port", 27017) + uri = "mongodb://{}:{}/{}".format(host, port, database_name()) class DBuri(Mongo): config_uri = uri class MyDoc2(Document): config_database = database_name() - config_collection = 'test' - - auto = 'a', auto_increment(database_name() + '_is_different', SIDECAR, - 'MyDoc2') + config_collection = "test" + auto = "a", auto_increment(database_name() + "_is_different", SIDECAR, "MyDoc2") doc = MyDoc2() - with DBuri: - doc.auto + with pytest.raises(DatabaseMismatch): + with DBuri: + doc.auto -@raises(NoConnection) def test_autoincrement_requires_connection(): doc = MyDoc() - doc.auto - - + with pytest.raises(NoConnection): + doc.auto diff --git a/test/test_humbledb/test_index.py b/test/test_humbledb/test_index.py index cf86387..3852a2b 100644 --- a/test/test_humbledb/test_index.py +++ b/test/test_humbledb/test_index.py @@ -1,203 +1,377 @@ +from unittest import mock -import mock import pyconfig +import pytest import humbledb -from ..util import * from humbledb import Document, Embed, Index, _version - -def teardown(): - DBTest.connection.drop_database(database_name()) +from ..util import database_name def cache_for(val): # This is a work around for the version changing the cache argument - if _version._lt('2.3'): - return {'ttl': val} - return {'cache_for': val} + if _version._lt("2.3"): + return {"ttl": val} + return {"cache_for": val} class DocTest(Document): config_database = database_name() - config_collection = 'test' + config_collection = "test" + + user_name = "u" - user_name = 'u' +def test_index_basic(DBTest): + if _version._gte("4.0"): + pytest.skip("ensure_index was removed in Pymongo 4.x") -def test_index_basic(): class Test(Document): config_database = database_name() - config_collection = 'test' - config_indexes = [Index('user_name')] + config_collection = "test" + config_indexes = [Index("user_name")] - user_name = 'u' + user_name = "u" with DBTest: - with mock.patch.object(Test, 'collection') as coll: - coll.find_one.__name__ = 'find_one' + with mock.patch.object(Test, "collection") as coll: + coll.find_one.__name__ = "find_one" Test._ensured = None Test.find_one() coll.ensure_index.assert_called_with( - Test.user_name, - background=True, - **cache_for(60*60*24)) + Test.user_name, background=True, **cache_for(60 * 60 * 24) + ) + +def test_index_basic_sparse(DBTest): + if _version._gte("4.0"): + pytest.skip("ensure_index was removed in Pymongo 4.x") -def test_index_basic_sparse(): class Test(Document): config_database = database_name() - config_collection = 'test' - config_indexes = [Index('user_name', sparse=True)] + config_collection = "test" + config_indexes = [Index("user_name", sparse=True)] - user_name = 'u' + user_name = "u" with DBTest: - with mock.patch.object(Test, 'collection') as coll: - coll.find_one.__name__ = 'find_one' + with mock.patch.object(Test, "collection") as coll: + coll.find_one.__name__ = "find_one" Test._ensured = None Test.find_one() coll.ensure_index.assert_called_with( - Test.user_name, - background=True, - sparse=True, - **cache_for(60*60*24)) + Test.user_name, background=True, sparse=True, **cache_for(60 * 60 * 24) + ) -def test_index_basic_directional(): +def test_index_basic_directional(DBTest): + if _version._gte("4.0"): + pytest.skip("ensure_index was removed in Pymongo 4.x") + class Test(Document): config_database = database_name() - config_collection = 'test' - config_indexes = [Index([('user_name', humbledb.DESC)])] + config_collection = "test" + config_indexes = [Index([("user_name", humbledb.DESC)])] - user_name = 'u' + user_name = "u" with DBTest: - with mock.patch.object(Test, 'collection') as coll: - coll.find_one.__name__ = 'find_one' + with mock.patch.object(Test, "collection") as coll: + coll.find_one.__name__ = "find_one" Test._ensured = None Test.find_one() coll.ensure_index.assert_called_with( - [(Test.user_name, humbledb.DESC)], - background=True, - **cache_for(60*60*24)) + [(Test.user_name, humbledb.DESC)], + background=True, + **cache_for(60 * 60 * 24), + ) + +def test_index_override_defaults(DBTest): + if _version._gte("4.0"): + pytest.skip("ensure_index was removed in Pymongo 4.x") -def test_index_override_defaults(): class Test(Document): config_database = database_name() - config_collection = 'test' - config_indexes = [Index('user_name', background=False, cache_for=60)] + config_collection = "test" + config_indexes = [Index("user_name", background=False, cache_for=60)] - user_name = 'u' + user_name = "u" with DBTest: - with mock.patch.object(Test, 'collection') as coll: - coll.find_one.__name__ = 'find_one' + with mock.patch.object(Test, "collection") as coll: + coll.find_one.__name__ = "find_one" Test._ensured = None Test.find_one() coll.ensure_index.assert_called_with( - Test.user_name, - background=False, - **cache_for(60)) + Test.user_name, background=False, **cache_for(60) + ) def test_resolve_dotted_index(): class TestResolveIndex(DocTest): - meta = Embed('m') - meta.tag = 't' + meta = Embed("m") + meta.tag = "t" - eq_(Index('meta')._resolve_index(TestResolveIndex), 'm') - eq_(Index('meta.tag')._resolve_index(TestResolveIndex), 'm.t') - eq_(Index('meta.foo')._resolve_index(TestResolveIndex), 'meta.foo') + assert Index("meta")._resolve_index(TestResolveIndex) == "m" + assert Index("meta.tag")._resolve_index(TestResolveIndex) == "m.t" + assert Index("meta.foo")._resolve_index(TestResolveIndex) == "meta.foo" def test_resolve_deep_dotted_index(): class TestResolveIndex(DocTest): - meta = Embed('m') - meta.deep = Embed('d') - meta.deep.deeper = Embed('d') - meta.deep.deeper.deeper_still = Embed('d') - meta.deep.deeper.deeper_still.tag = 't' + meta = Embed("m") + meta.deep = Embed("d") + meta.deep.deeper = Embed("d") + meta.deep.deeper.deeper_still = Embed("d") + meta.deep.deeper.deeper_still.tag = "t" + + assert Index("meta.deep")._resolve_index(TestResolveIndex) == "m.d" + assert Index("meta.deep.deeper")._resolve_index(TestResolveIndex) == "m.d.d" + assert ( + Index("meta.deep.deeper.deeper_still")._resolve_index(TestResolveIndex) + == "m.d.d.d" + ) + assert ( + Index("meta.deep.deeper.deeper_still.tag")._resolve_index(TestResolveIndex) + == "m.d.d.d.t" + ) + + +def test_resolve_compound_index(DBTest): + if _version._gte("4.0"): + pytest.skip("ensure_index was removed in Pymongo 4.x") - eq_(Index('meta.deep')._resolve_index(TestResolveIndex), 'm.d') - eq_(Index('meta.deep.deeper')._resolve_index(TestResolveIndex), 'm.d.d') - eq_(Index('meta.deep.deeper.deeper_still')._resolve_index( - TestResolveIndex), 'm.d.d.d') - eq_(Index('meta.deep.deeper.deeper_still.tag')._resolve_index( - TestResolveIndex), 'm.d.d.d.t') - - -def test_resolve_compound_index(): class Test(Document): config_database = database_name() - config_collection = 'test' - config_indexes = [Index([('user_name', humbledb.ASC), ('compound', - humbledb.DESC)])] + config_collection = "test" + config_indexes = [ + Index([("user_name", humbledb.ASC), ("compound", humbledb.DESC)]) + ] - user_name = 'u' - compound = 'c' + user_name = "u" + compound = "c" with DBTest: # This will raise a TypeError - with mock.patch.object(Test, 'collection') as coll: - coll.find_one.__name__ = 'find_one' + with mock.patch.object(Test, "collection") as coll: + coll.find_one.__name__ = "find_one" Test._ensured = None Test.find_one() coll.ensure_index.assert_called_with( - [(Test.user_name, humbledb.ASC), (Test.compound, - humbledb.DESC)], - background=True, - **cache_for(60*60*24)) + [(Test.user_name, humbledb.ASC), (Test.compound, humbledb.DESC)], + background=True, + **cache_for(60 * 60 * 24), + ) + +def test_resolve_non_string_attribute_fails(DBTest): + if _version._gte("4.0"): + pytest.skip("ensure_index was removed in Pymongo 4.x") -@raises(TypeError) -def test_resolve_non_string_attribute_fails(): class Test(Document): config_database = database_name() - config_collection = 'test' - config_indexes = [Index('value')] + config_collection = "test" + config_indexes = [Index("value")] value = True with DBTest: # This will raise a TypeError - with mock.patch.object(Test, 'collection') as coll: - coll.find_one.__name__ = 'find_one' + with mock.patch.object(Test, "collection") as coll: + coll.find_one.__name__ = "find_one" Test._ensured = None - Test.find_one() + with pytest.raises(TypeError): + Test.find_one() coll.ensure_index.assert_not_called() -@raises(TypeError) -def test_badly_formed_index_raises_error(): +def test_badly_formed_index_raises_error(DBTest): + if _version._gte("4.0"): + pytest.skip("ensure_index was removed in Pymongo 4.x") + class Test(Document): config_database = database_name() - config_collection = 'test' - config_indexes = [Index([('value',)])] + config_collection = "test" + config_indexes = [Index([("value",)])] with DBTest: # This will raise a TypeError - with mock.patch.object(Test, 'collection') as coll: - coll.find_one.__name__ = 'find_one' + with mock.patch.object(Test, "collection") as coll: + coll.find_one.__name__ = "find_one" + Test._ensured = None + with pytest.raises(TypeError): + Test.find_one() + assert not coll.ensure_index.called + + +def test_ensure_index_can_be_skipped(DBTest): + if _version._gte("4.0"): + pytest.skip("ensure_index was removed in Pymongo 4.x") + + class Test(Document): + config_database = database_name() + config_collection = "test" + config_indexes = [Index("value")] + value = "v" + + with DBTest: + with mock.patch.object(Test, "collection") as coll: + coll.find_one.__name__ = "find_one" + pyconfig.set("humbledb.ensure_indexes", False) + Test.find_one() + pyconfig.set("humbledb.ensure_indexes", True) + assert not coll.ensure_index.called + + +def test_index_basic_pymongo_4(DBTest): + if _version._lt("4.0"): + pytest.skip("create_index was introduced in Pymongo 4.x") + + class Test(Document): + config_database = database_name() + config_collection = "test" + config_indexes = [Index("user_name")] + + user_name = "u" + + with DBTest: + with mock.patch.object(Test, "collection") as coll: + coll.find_one.__name__ = "find_one" + Test._ensured = None + Test.find_one() + coll.create_index.assert_called_with(Test.user_name, background=True) + + +def test_index_basic_sparse_pymongo_4(DBTest): + if _version._lt("4.0"): + pytest.skip("create_index was introduced in Pymongo 4.x") + + class Test(Document): + config_database = database_name() + config_collection = "test" + config_indexes = [Index("user_name", sparse=True)] + + user_name = "u" + + with DBTest: + with mock.patch.object(Test, "collection") as coll: + coll.find_one.__name__ = "find_one" + Test._ensured = None + Test.find_one() + coll.create_index.assert_called_with( + Test.user_name, background=True, sparse=True + ) + + +def test_index_basic_directional_pymongo_4(DBTest): + if _version._lt("4.0"): + pytest.skip("create_index was introduced in Pymongo 4.x") + + class Test(Document): + config_database = database_name() + config_collection = "test" + config_indexes = [Index([("user_name", humbledb.DESC)])] + + user_name = "u" + + with DBTest: + with mock.patch.object(Test, "collection") as coll: + coll.find_one.__name__ = "find_one" + Test._ensured = None + Test.find_one() + coll.create_index.assert_called_with( + [(Test.user_name, humbledb.DESC)], + background=True, + ) + + +def test_index_override_defaults_pymongo_4(DBTest): + if _version._lt("4.0"): + pytest.skip("create_index was introduced in Pymongo 4.x") + + class Test(Document): + config_database = database_name() + config_collection = "test" + config_indexes = [Index("user_name", background=False, cache_for=60)] + + user_name = "u" + + with DBTest: + with mock.patch.object(Test, "collection") as coll: + coll.find_one.__name__ = "find_one" Test._ensured = None Test.find_one() - eq_(coll.ensure_index.called, False) + coll.create_index.assert_called_with(Test.user_name, background=False) -def test_ensure_index_can_be_skipped(): +def test_resolve_compound_index_pymongo_4(DBTest): + if _version._lt("4.0"): + pytest.skip("create_index was introduced in Pymongo 4.x") + class Test(Document): config_database = database_name() - config_collection = 'test' - config_indexes = [Index('value')] - value = 'v' + config_collection = "test" + config_indexes = [ + Index([("user_name", humbledb.ASC), ("compound", humbledb.DESC)]) + ] + + user_name = "u" + compound = "c" with DBTest: - with mock.patch.object(Test, 'collection') as coll: - coll.find_one.__name__ = 'find_one' - pyconfig.set('humbledb.ensure_indexes', False) + # This will raise a TypeError + with mock.patch.object(Test, "collection") as coll: + coll.find_one.__name__ = "find_one" + Test._ensured = None Test.find_one() - pyconfig.set('humbledb.ensure_indexes', True) - eq_(coll.ensure_index.called, False) + coll.create_index.assert_called_with( + [(Test.user_name, humbledb.ASC), (Test.compound, humbledb.DESC)], + background=True, + ) + + +def test_resolve_non_string_attribute_fails_pymongo_4(): + if _version._lt("4.0"): + pytest.skip("create_index was introduced in Pymongo 4.x") + + with pytest.raises(TypeError): + + class Test(Document): + config_database = database_name() + config_collection = "test" + config_indexes = [Index("value")] + + value = True + +def test_badly_formed_index_raises_error_pymongo_4(): + if _version._lt("4.0"): + pytest.skip("create_index was introduced in Pymongo 4.x") + with pytest.raises(TypeError): + + class Test(Document): + config_database = database_name() + config_collection = "test" + config_indexes = [Index([("value",)])] + + +def test_create_index_can_be_skipped_pymongo_4(DBTest): + if _version._lt("4.0"): + pytest.skip("create_index was introduced in Pymongo 4.x") + + class Test(Document): + config_database = database_name() + config_collection = "test" + config_indexes = [Index("value")] + value = "v" + + with DBTest: + with mock.patch.object(Test, "collection") as coll: + coll.find_one.__name__ = "find_one" + pyconfig.set("humbledb.ensure_indexes", False) + Test.find_one() + pyconfig.set("humbledb.ensure_indexes", True) + assert not coll.create_index.called diff --git a/test/test_humbledb/test_maps.py b/test/test_humbledb/test_maps.py index 3e36a00..138abcc 100644 --- a/test/test_humbledb/test_maps.py +++ b/test/test_humbledb/test_maps.py @@ -1,204 +1,201 @@ +import pytest import pytool -from six.moves import xrange from humbledb import Document, Embed -from humbledb.maps import ListMap, DictMap -from ..util import eq_, ok_, DBTest, is_instance_, is_, raises, database_name +from humbledb.maps import DictMap, ListMap - -def teardown(): - DBTest.connection.drop_database(database_name()) +from ..util import database_name class MapTest(Document): - em = Embed('e') - em.val = 'v' - val = 'v' + em = Embed("e") + em.val = "v" + val = "v" class DocTest(Document): config_database = database_name() - config_collection = 'doc_test' + config_collection = "doc_test" class ListTest(DocTest): - vals = Embed('l') - vals.one = 'o' - vals.two = 't' + vals = Embed("l") + vals.one = "o" + vals.two = "t" def test_mapped_keys(): class TestMapped(Document): - key1 = '1' - key2 = '2' - key3 = '3' + key1 = "1" + key2 = "2" + key3 = "3" - eq_(sorted(TestMapped.mapped_keys()), ['1', '2', '3']) + assert sorted(TestMapped.mapped_keys()) == ["1", "2", "3"] def test_mapped_attributes(): class TestMapped(Document): - key1 = '1' - key2 = '2' - key3 = '3' + key1 = "1" + key2 = "2" + key3 = "3" - eq_(sorted(TestMapped.mapped_attributes()), ['key1', 'key2', 'key3']) + assert sorted(TestMapped.mapped_attributes()) == ["key1", "key2", "key3"] def test_embed_mapped_keys(): class TestMapped(Document): - key1 = '1' - key2 = '2' - key3 = '3' + key1 = "1" + key2 = "2" + key3 = "3" - embed = Embed('e') + embed = Embed("e") - eq_(sorted(TestMapped.mapped_keys()), ['1', '2', '3', 'e']) + assert sorted(TestMapped.mapped_keys()) == ["1", "2", "3", "e"] def test_embed_mapped_attributes(): class TestMapped(Document): - key1 = '1' - key2 = '2' - key3 = '3' + key1 = "1" + key2 = "2" + key3 = "3" - embed = Embed('e') + embed = Embed("e") - eq_(sorted(TestMapped.mapped_attributes()), ['embed', 'key1', 'key2', - 'key3']) + assert sorted(TestMapped.mapped_attributes()) == ["embed", "key1", "key2", "key3"] def test_embed_non_string_values_are_not_mapped(): class TestMapped(Document): - embed = Embed('e') - embed.good = 'g' + embed = Embed("e") + embed.good = "g" embed.bad = True - eq_(TestMapped.embed.good, 'e.g') - eq_(getattr(TestMapped.embed, 'bad', -1), -1) + assert TestMapped.embed.good == "e.g" + assert getattr(TestMapped.embed, "bad", -1) == -1 def test_embed_private_values_are_not_mapped(): class TestMapped(Document): - embed = Embed('e') - embed.good = 'g' - embed._bad = 'b' + embed = Embed("e") + embed.good = "g" + embed._bad = "b" - eq_(TestMapped.embed.good, 'e.g') - eq_(getattr(TestMapped.embed, '_bad', -1), -1) + assert TestMapped.embed.good == "e.g" + assert getattr(TestMapped.embed, "_bad", -1) == -1 -@raises(AttributeError) def test_bad_embedded_mappings_raise_an_attribute_error_on_the_instance(): class Test(Document): - embed = Embed('e') - embed.mapped = 'm' + embed = Embed("e") + embed.mapped = "m" # This will raise an attribute error - Test().embed.not_mapped + with pytest.raises(AttributeError): + Test().embed.not_mapped def test_embedded_key_retrieval_on_instance_is_empty_dict(): m = MapTest() - eq_(MapTest().em.val, {}) - eq_(m.em.val, {}) + assert MapTest().em.val == {} + assert m.em.val == {} def test_missing_key_retrieval_is_empty_dict(): - eq_(MapTest().val, {}) + assert MapTest().val == {} -@raises(AttributeError) def test_unmapped_attribute_assignment_to_dict_map_is_an_error(): t = MapTest() - t.em.foo = 'bar' + with pytest.raises(AttributeError): + t.em.foo = "bar" def test_deleting_an_unmapped_attribute_from_dict_map_works(): t = MapTest() em = t.em - object.__setattr__(em, 'foo', True) + object.__setattr__(em, "foo", True) del em.foo - is_(getattr(em, 'foo', None), None) + assert getattr(em, "foo", None) is None -@raises(AttributeError) def test_deleting_an_unset_mapped_attribute_from_dict_map_is_an_error(): t = MapTest() em = t.em - del em.val + with pytest.raises(AttributeError): + del em.val def test_deleting_a_subkey_when_unset_is_harmless(): t = MapTest() - del t.em['v'] + del t.em["v"] -@raises(KeyError) def test_deleting_a_missing_key_is_an_error(): t = MapTest() t.val = {} - del t.val['k'] + with pytest.raises(KeyError): + del t.val["k"] def test_deleting_the_last_key_removes_an_embedded_doc(): t = MapTest() - t.val = {'a': 1} - eq_(t, {'v': {'a': 1}}) - del t.val['a'] - eq_(t, {}) + t.val = {"a": 1} + assert t == {"v": {"a": 1}} + del t.val["a"] + assert t == {} -def test_lists_are_mapped(): +def test_lists_are_mapped(DBTest): doc = ListTest() - doc.vals = ['hello', 'world'] + doc.vals = ["hello", "world"] with DBTest: ListTest.insert(doc) doc = ListTest.find_one() - is_instance_(doc.vals, ListMap) + assert isinstance(doc.vals, ListMap) def test_embedded_list_as_json_replaces_embedded_doc_field_names(): doc = ListTest() - doc.vals = [{'o': 'hello'}, 'world'] - eq_(doc.for_json(), {'vals': [{'one': 'hello'}, 'world']}) + doc.vals = [{"o": "hello"}, "world"] + assert doc.for_json() == {"vals": [{"one": "hello"}, "world"]} def test_embedded_list_as_json_recursively_sets_field_names(): class Test(DocTest): - vals = Embed('l') - vals.one = 'o' - vals.sub = Embed('s') - vals.sub.two = 't' + vals = Embed("l") + vals.one = "o" + vals.sub = Embed("s") + vals.sub.two = "t" doc = Test() - doc.vals = [{'s': [{'t': 'hello'}], 'o': 1}, {'o': 1}] - eq_(doc.for_json(), {'vals': [{'sub': [{'two': 'hello'}], 'one': 1}, - {'one': 1}]}) + doc.vals = [{"s": [{"t": "hello"}], "o": 1}, {"o": 1}] + assert doc.for_json() == { + "vals": [{"sub": [{"two": "hello"}], "one": 1}, {"one": 1}] + } def test_embedded_list_creation_with_attributes(): class Test(DocTest): - vals = Embed('l') - vals.one = 'o' - vals.two = 't' + vals = Embed("l") + vals.one = "o" + vals.two = "t" doc = Test() doc.vals = [] val = doc.vals.new() val.one = 1 val.two = 2 - eq_(doc.for_json(), {'vals': [{'one': 1, 'two': 2}]}) + assert doc.for_json() == {"vals": [{"one": 1, "two": 2}]} -def test_embedded_list_with_crazy_complex_heirarchy(): +def test_embedded_list_with_crazy_complex_heirarchy(DBTest): class Test(DocTest): - s1 = 's1' - l1 = Embed('l1') - l1.s2 = 's2' - l1.l2 = Embed('l2') - l1.l2.s3 = 's3' + s1 = "s1" + l1 = Embed("l1") + l1.s2 = "s2" + l1.l2 = Embed("l2") + l1.l2.s3 = "s3" doc = Test() doc.l1 = [] @@ -209,42 +206,42 @@ class Test(DocTest): item2 = item.l2.new() item2.s3 = 3 - eq_(doc, {'s1': 1, 'l1':[{'s2': 2, 'l2': [{'s3': 3}]}]}) + assert doc == {"s1": 1, "l1": [{"s2": 2, "l2": [{"s3": 3}]}]} with DBTest: doc_id = Test.insert(doc) doc = Test.find_one({Test._id: doc_id}) - doc.pop('_id') - eq_(doc, {'s1': 1, 'l1':[{'s2': 2, 'l2': [{'s3': 3}]}]}) + doc.pop("_id") + assert doc == {"s1": 1, "l1": [{"s2": 2, "l2": [{"s3": 3}]}]} def test_embedded_list_iteration(): class Test(DocTest): - vals = Embed('v') - vals.i = 'i' + vals = Embed("v") + vals.i = "i" doc = Test() doc.vals = [] - for i in xrange(5): + for i in range(5): item = doc.vals.new() item.i = i for item in doc.vals: - is_instance_(item.i, int) + assert isinstance(item.i, int) - for i in xrange(len(doc.vals)): - is_instance_(item.i, int) + for i in range(len(doc.vals)): + assert isinstance(item.i, int) -def test_modified_items_save_ok(): +def test_modified_items_save_ok(DBTest): class Test(DocTest): - vals = Embed('v') - vals.i = 'i' + vals = Embed("v") + vals.i = "i" doc = Test() doc.vals = [] - for i in xrange(5): + for i in range(5): item = doc.vals.new() item.i = i @@ -259,24 +256,24 @@ class Test(DocTest): Test.save(doc) doc = Test.find_one({Test._id: doc_id}) - eq_(len(doc.vals), 5) + assert len(doc.vals) == 5 total = 0 for item in doc.vals: - eq_(item.i, 12) + assert item.i == 12 total += item.i - eq_(total, 60) + assert total == 60 def test_embedded_lists_are_json_serializable(): class BasicList(DocTest): - vals = 'l' + vals = "l" doc = BasicList() - doc.vals = ['a', 'b', 'c'] + doc.vals = ["a", "b", "c"] - eq_(pytool.json.as_json(doc.vals), '["a", "b", "c"]') + assert pytool.json.as_json(doc.vals) == '["a", "b", "c"]' def test_embedded_lists_with_embedded_docs_are_json_serializable(): @@ -287,98 +284,98 @@ def test_embedded_lists_with_embedded_docs_are_json_serializable(): item.two = 2 json = pytool.json.as_json(doc.vals) - ok_(json) - ok_('"one": 1' in json) - ok_('"two": 2' in json) - ok_(json.startswith('[{"')) - ok_(json.endswith('}]')) - eq_(len(json), len('[{"one": 1, "two": 2}]')) + assert json + assert '"one": 1' in json + assert '"two": 2' in json + assert json.startswith('[{"') + assert json.endswith("}]") + assert len(json) == len('[{"one": 1, "two": 2}]') def test_deeply_embedded_documents_are_json_serializable(): class Test(DocTest): - one = Embed('o') - one.two = Embed('t') - one.two.three = Embed('r') - one.two.three.four = 'f' + one = Embed("o") + one.two = Embed("t") + one.two.three = Embed("r") + one.two.three.four = "f" doc = Test() doc.one.two.three.four = 4 - eq_(pytool.json.as_json(doc), '{"one": {"two": {"three": {"four": 4}}}}') - eq_(pytool.json.as_json(doc.one), '{"two": {"three": {"four": 4}}}') - eq_(pytool.json.as_json(doc.one.two), '{"three": {"four": 4}}') - eq_(pytool.json.as_json(doc.one.two.three), '{"four": 4}') + assert pytool.json.as_json(doc) == '{"one": {"two": {"three": {"four": 4}}}}' + assert pytool.json.as_json(doc.one) == '{"two": {"three": {"four": 4}}}' + assert pytool.json.as_json(doc.one.two) == '{"three": {"four": 4}}' + assert pytool.json.as_json(doc.one.two.three) == '{"four": 4}' def test_deeply_embedded_documents_and_lists(): class Test(DocTest): - one = Embed('o') - one.two = Embed('t') - one.two.three = Embed('r') - one.two.three.four = 'f' + one = Embed("o") + one.two = Embed("t") + one.two.three = Embed("r") + one.two.three.four = "f" doc = Test() doc.one = [] one_item = doc.one.new() one_item.two.three = [] three_item = one_item.two.three.new() - three_item.four = ['five'] + three_item.four = ["five"] - eq_(pytool.json.as_json(doc), '{"one": [{"two": {"three": [{"four": ' - '["five"]}]}}]}') + assert ( + pytool.json.as_json(doc) + == '{"one": [{"two": {"three": [{"four": ["five"]}]}}]}' + ) def test_unmapped_list_works_with_new(): class List(DocTest): - vals = 'v' + vals = "v" - l = List() - l.vals = [] - v = l.vals.new() - v['1'] = 1 + items = List() + items.vals = [] + v = items.vals.new() + v["1"] = 1 - eq_(l, {'v': [{'1': 1}]}) + assert items == {"v": [{"1": 1}]} def test_mapped_keys_take_default_values_as_tuples(): class Default(DocTest): - key = 'k', 1 + key = "k", 1 t = Default() - eq_(t.key, 1) + assert t.key == 1 def test_mapped_keys_with_default_value_doesnt_save_to_doc(): class Default(DocTest): - key = 'k', 1 + key = "k", 1 t = Default() - eq_(t.key, 1) - eq_(dict(t), {}) + assert t.key == 1 + assert dict(t) == {} def test_saved_default_wraps_lists_appropriately(): class Default(DocTest): - saved = 's', lambda: [1] + saved = "s", lambda: [1] t = Default() - eq_(t.saved, [1]) - is_instance_(t.saved, ListMap) + assert t.saved == [1] + assert isinstance(t.saved, ListMap) def test_saved_default_wraps_dicts_appropriately(): class Default(DocTest): - saved = 's', lambda: {'1': 1} + saved = "s", lambda: {"1": 1} t = Default() - eq_(t.saved, {'1': 1}) - is_instance_(t.saved, DictMap) + assert t.saved == {"1": 1} + assert isinstance(t.saved, DictMap) def test_empty(): - eq_(ListTest._name_map.vals.empty(), False) - eq_(ListTest._name_map.vals.one.empty(), True) - - + assert ListTest._name_map.vals.empty() is False + assert ListTest._name_map.vals.one.empty() is True diff --git a/test/test_humbledb/test_mongo.py b/test/test_humbledb/test_mongo.py index c29db85..21531e5 100644 --- a/test/test_humbledb/test_mongo.py +++ b/test/test_humbledb/test_mongo.py @@ -1,34 +1,35 @@ -import mock -import pyconfig +from unittest import mock from unittest.case import SkipTest -from humbledb import Mongo, Document, _version -from humbledb.errors import DatabaseMismatch, ConnectionFailure -from ..util import database_name, ok_, eq_, DBTest, raises +import pyconfig +import pytest +from humbledb import Document, Mongo, _version +from humbledb.errors import ConnectionFailure, DatabaseMismatch -def teardown(): - DBTest.connection.drop_database(database_name()) +from ..util import database_name -def test_new(): - eq_(DBTest, DBTest()) +def test_new(DBTest): + assert DBTest == DBTest() -@raises(TypeError) def test_missing_config_host(): - class Test(Mongo): - config_port = 27017 + with pytest.raises(TypeError): + + class Test(Mongo): + config_port = 27017 -@raises(TypeError) def test_missing_config_port(): - class Test(Mongo): - config_host = 'localhost' + with pytest.raises(TypeError): + + class Test(Mongo): + config_host = "localhost" -def test_reload(): - with mock.patch.object(DBTest, '_new_connection') as _new_conn: +def test_reload(DBTest): + with mock.patch.object(DBTest, "_new_connection") as _new_conn: pyconfig.reload() _new_conn.assert_called_once() @@ -36,14 +37,14 @@ def test_reload(): pyconfig.reload() -@raises(RuntimeError) -def test_nested_conn(): - with DBTest: +def test_nested_conn(DBTest): + with pytest.raises(RuntimeError): with DBTest: - pass + with DBTest: + pass -def test_harmless_end(): +def test_harmless_end(DBTest): # This shouldn't raise any errors DBTest.end() DBTest.start() @@ -52,14 +53,15 @@ def test_harmless_end(): def test_replica_works_for_versions_between_2_1_and_2_4(): - if _version._lt('2.1') or _version._gte('2.4'): + if _version._lt("2.1") or _version._gte("2.4"): raise SkipTest - with mock.patch('pymongo.ReplicaSetConnection') as replica: + with mock.patch("pymongo.ReplicaSetConnection") as replica: + class Replica(Mongo): - config_host = 'localhost' + config_host = "localhost" config_port = 27017 - config_replica = 'test' + config_replica = "test" with Replica: pass @@ -68,17 +70,18 @@ class Replica(Mongo): def test_replica_works_for_versions_after_2_4(): - if _version._lt('2.4'): + if _version._lt("2.4"): raise SkipTest - if _version._gte('3'): + if _version._gte("3"): raise SkipTest - with mock.patch('pymongo.MongoReplicaSetClient') as replica: + with mock.patch("pymongo.MongoReplicaSetClient") as replica: + class Replica(Mongo): - config_host = 'localhost' + config_host = "localhost" config_port = 27017 - config_replica = 'test' + config_replica = "test" with Replica: pass @@ -86,19 +89,20 @@ class Replica(Mongo): replica.assert_called_once() -@raises(TypeError) def test_replica_errors_for_versions_before_2_1(): - if _version._gte('2.1'): + if _version._gte("2.1"): raise SkipTest - class Replica(Mongo): - config_host = 'localhost' - config_port = 27017 - config_replica = 'test' + with pytest.raises(TypeError): + + class Replica(Mongo): + config_host = "localhost" + config_port = 27017 + config_replica = "test" -def test_reconnect(): - with mock.patch.object(DBTest, '_new_connection') as _new_conn: +def test_reconnect(DBTest): + with mock.patch.object(DBTest, "_new_connection") as _new_conn: DBTest.reconnect() _new_conn.assert_called_once() @@ -106,70 +110,69 @@ def test_reconnect(): DBTest.reconnect() -def test_mongo_uri_with_database(): - if _version._lt('2.6.0'): +def test_mongo_uri_with_database(mongodb_uri): + if _version._lt("2.6.0"): raise SkipTest("Needs version 2.6.0 or later") - host = pyconfig.get('humbledb.test.db.host', 'localhost') - port = pyconfig.get('humbledb.test.db.port', 27017) - uri = 'mongodb://{}:{}/{}'.format(host, port, database_name()) - class DBuri(Mongo): - config_uri = uri + config_uri = mongodb_uri with DBuri: - eq_(DBuri.database.name, database_name()) - eq_(Mongo.context.database.name, database_name()) + assert DBuri.database.name == database_name() + assert Mongo.context.database.name == database_name() -@raises(DatabaseMismatch) -def test_mongo_uri_database_with_conflict_raises_error(): - if _version._lt('2.6.0'): +def test_mongo_uri_database_with_conflict_raises_error(mongodb_uri): + if _version._lt("2.6.0"): raise SkipTest("Needs version 2.6.0 or later") - host = pyconfig.get('humbledb.test.db.host', 'localhost') - port = pyconfig.get('humbledb.test.db.port', 27017) - uri = 'mongodb://{}:{}/{}'.format(host, port, database_name()) - class DBuri(Mongo): - config_uri = uri + config_uri = mongodb_uri from humbledb import Document + class TestDoc(Document): - config_database = database_name() + '_is_different' - config_collection = 'test' + config_database = database_name() + "_is_different" + config_collection = "test" with DBuri: - TestDoc.find() + with pytest.raises(DatabaseMismatch): + TestDoc.find() -@raises(TypeError) -def test_mongo_client_with_ssl_before_2_1(): - if _version._gte('2.1'): +def test_mongo_client_with_ssl_before_2_1(mongodb_service): + if _version._gte("2.1"): raise SkipTest("Only test this with version 2.1 or earlier.") - class SSLMongo(Mongo): - config_host = 'localhost' - config_port = 27017 - config_ssl = True + host, port = mongodb_service + + with pytest.raises(TypeError): + class SSLMongo(Mongo): + config_host = host + config_port = port + config_ssl = True -def test_mongo_client_with_ssl_after_2_1(): - if _version._lt('2.1'): + +def test_mongo_client_with_ssl_after_2_1(mongodb_service): + if _version._lt("2.1"): raise SkipTest("This test requires version 2.1 or later.") + host, port = mongodb_service + class SSLMongo(Mongo): - config_host = 'localhost' - config_port = 27017 + config_host = host + config_port = port config_ssl = True - config_mongo_client = ({'serverSelectionTimeoutMS': 300} if - _version._gte('3.0') else {}) + config_mongo_client = ( + {"serverSelectionTimeoutMS": 300} if _version._gte("3.0") else {} + ) class SomeDoc(Document): config_database = database_name() - config_collection = 'ssl_collection' + config_collection = "ssl_collection" - name = 'n' + name = "n" try: SomeDoc.insert @@ -178,11 +181,10 @@ class SomeDoc(Document): try: import socket + socket.setdefaulttimeout(3) with SSLMongo: - SomeDoc.insert({SomeDoc.name:'foobar'}) - ok_(SomeDoc.find({SomeDoc.name:'foobar'})) + SomeDoc.insert({SomeDoc.name: "foobar"}) + assert SomeDoc.find({SomeDoc.name: "foobar"}) except ConnectionFailure as err: raise SkipTest("SSL may not be enabled on mongodb server: %r" % err) - - diff --git a/test/test_humbledb/test_report.py b/test/test_humbledb/test_report.py index 3d2c195..997b4a4 100644 --- a/test/test_humbledb/test_report.py +++ b/test/test_humbledb/test_report.py @@ -1,427 +1,424 @@ import calendar import datetime +import pytest import pytool -from nose.tools import raises from humbledb import report -from ..util import DBTest, database_name, eq_, ok_ -from humbledb.report import (Report, YEAR, MONTH, DAY, HOUR, MINUTE) +from humbledb.report import DAY, HOUR, MINUTE, MONTH, YEAR, Report + +from ..util import database_name class Yearly(Report): config_database = database_name() - config_collection = 'report.year' + config_collection = "report.year" config_period = YEAR config_intervals = [YEAR, DAY] class Monthly(Report): config_database = database_name() - config_collection = 'report.month' + config_collection = "report.month" config_period = MONTH config_intervals = [MONTH, HOUR] class Daily(Report): config_database = database_name() - config_collection = 'report.day' + config_collection = "report.day" config_period = DAY config_intervals = [DAY, MINUTE] class Full(Report): config_database = database_name() - config_collection = 'report.day' + config_collection = "report.day" config_period = YEAR config_intervals = [YEAR, MINUTE] class ByHour(Report): config_database = database_name() - config_collection = 'report.by_hour' + config_collection = "report.by_hour" config_period = DAY config_intervals = [DAY, HOUR] -def teardown(): - DBTest.connection.drop_database(database_name()) - - def test_update_clause_creates_dot_notated_clause(): stamp = datetime.datetime(2013, 1, 5, 7, 9, 0, tzinfo=pytool.time.UTC()) - eq_(Yearly._update_clause(YEAR, stamp), {Yearly.year: 1}) - eq_(Yearly._update_clause(MONTH, stamp), {Yearly.month + '.0': 1}) - eq_(Yearly._update_clause(DAY, stamp), {Yearly.day + '.0.4': 1}) - eq_(Yearly._update_clause(HOUR, stamp), {Yearly.hour + '.0.4.7': 1}) - eq_(Yearly._update_clause(MINUTE, stamp), {Yearly.minute + '.0.4.7.9': 1}) + assert Yearly._update_clause(YEAR, stamp) == {Yearly.year: 1} + assert Yearly._update_clause(MONTH, stamp) == {Yearly.month + ".0": 1} + assert Yearly._update_clause(DAY, stamp) == {Yearly.day + ".0.4": 1} + assert Yearly._update_clause(HOUR, stamp) == {Yearly.hour + ".0.4.7": 1} + assert Yearly._update_clause(MINUTE, stamp) == {Yearly.minute + ".0.4.7.9": 1} - eq_(Monthly._update_clause(YEAR, stamp), {Monthly.year: 1}) - eq_(Monthly._update_clause(MONTH, stamp), {Monthly.month: 1}) - eq_(Monthly._update_clause(DAY, stamp), {Monthly.day + '.4': 1}) - eq_(Monthly._update_clause(HOUR, stamp), {Monthly.hour + '.4.7': 1}) - eq_(Monthly._update_clause(MINUTE, stamp), {Monthly.minute + '.4.7.9': 1}) + assert Monthly._update_clause(YEAR, stamp) == {Monthly.year: 1} + assert Monthly._update_clause(MONTH, stamp) == {Monthly.month: 1} + assert Monthly._update_clause(DAY, stamp) == {Monthly.day + ".4": 1} + assert Monthly._update_clause(HOUR, stamp) == {Monthly.hour + ".4.7": 1} + assert Monthly._update_clause(MINUTE, stamp) == {Monthly.minute + ".4.7.9": 1} - eq_(Daily._update_clause(YEAR, stamp), {Daily.year: 1}) - eq_(Daily._update_clause(MONTH, stamp), {Daily.month: 1}) - eq_(Daily._update_clause(DAY, stamp), {Daily.day: 1}) - eq_(Daily._update_clause(HOUR, stamp), {Daily.hour + '.7': 1}) - eq_(Daily._update_clause(MINUTE, stamp), {Daily.minute + '.7.9': 1}) + assert Daily._update_clause(YEAR, stamp) == {Daily.year: 1} + assert Daily._update_clause(MONTH, stamp) == {Daily.month: 1} + assert Daily._update_clause(DAY, stamp) == {Daily.day: 1} + assert Daily._update_clause(HOUR, stamp) == {Daily.hour + ".7": 1} + assert Daily._update_clause(MINUTE, stamp) == {Daily.minute + ".7.9": 1} -def test_record_event_yearly(): - event = 'yearly_record_event' +def test_record_event_yearly(DBTest): + event = "yearly_record_event" now = pytool.time.utcnow() with DBTest: Yearly.record(event, now) doc = Yearly.find_one() - eq_(doc.meta.event, event) - eq_(doc.meta.period, Yearly._period(now)) + assert doc.meta.event == event + assert doc.meta.period == Yearly._period(now) - eq_(len(doc.day), 12) + assert len(doc.day) == 12 for month in doc.day: - ok_(len(month) >= 28) + assert len(month) >= 28 - eq_(doc.year, 1) - eq_(doc.day[now.month-1][now.day-1], 1) + assert doc.year == 1 + assert doc.day[now.month - 1][now.day - 1] == 1 with DBTest: Yearly.record(event, now) doc = Yearly.find_one() - eq_(doc.year, 2) - eq_(doc.day[now.month-1][now.day-1], 2) + assert doc.year == 2 + assert doc.day[now.month - 1][now.day - 1] == 2 -def test_record_event_monthly(): - event = 'monthly_record_event' +def test_record_event_monthly(DBTest): + event = "monthly_record_event" now = pytool.time.utcnow() with DBTest: Monthly.record(event, now) doc = Monthly.find_one() - eq_(doc.meta.event, event) - eq_(doc.meta.period, Monthly._period(now)) + assert doc.meta.event == event + assert doc.meta.period == Monthly._period(now) - ok_(len(doc.hour) >= 28) + assert len(doc.hour) >= 28 for day in doc.hour: - eq_(len(day), 24) + assert len(day) == 24 - eq_(doc.month, 1) - eq_(doc.hour[now.day-1][now.hour], 1) + assert doc.month == 1 + assert doc.hour[now.day - 1][now.hour] == 1 with DBTest: Monthly.record(event, now) doc = Monthly.find_one() - eq_(doc.month, 2) - eq_(doc.hour[now.day-1][now.hour], 2) + assert doc.month == 2 + assert doc.hour[now.day - 1][now.hour] == 2 -def test_record_event_daily(): - event = 'daily_record_event' +def test_record_event_daily(DBTest): + event = "daily_record_event" now = pytool.time.utcnow() with DBTest: Daily.record(event, now) doc = Daily.find_one() - eq_(doc.meta.event, event) - eq_(doc.meta.period, Daily._period(now)) + assert doc.meta.event == event + assert doc.meta.period == Daily._period(now) - eq_(len(doc.minute), 24) + assert len(doc.minute) == 24 for minute in doc.minute: - eq_(len(minute), 60) + assert len(minute) == 60 - eq_(doc.day, 1) - eq_(doc.minute[now.hour][now.minute], 1) + assert doc.day == 1 + assert doc.minute[now.hour][now.minute] == 1 with DBTest: Daily.record(event, now) doc = Daily.find_one() - eq_(doc.day, 2) - eq_(doc.minute[now.hour][now.minute], 2) + assert doc.day == 2 + assert doc.minute[now.hour][now.minute] == 2 -def test_preallocate_future(): +def test_preallocate_future(DBTest): class PreallocAlways(Report): config_database = database_name() - config_collection = 'prealloc' + config_collection = "prealloc" config_period = MONTH config_intervals = [MONTH, HOUR] config_preallocation = 1 - event = 'prealloc_future' + event = "prealloc_future" now = pytool.time.utcnow() with DBTest: PreallocAlways.record(event, now) - eq_(PreallocAlways.find().count(), 2) + assert PreallocAlways.find().count() == 2 # Ensure we don't preallocate too many PreallocAlways._preallocated[PreallocAlways._period(now)].remove(event) PreallocAlways.record(event) - eq_(PreallocAlways.find().count(), 2) + assert PreallocAlways.find().count() == 2 -def test_report_query_by_hour(): +def test_report_query_by_hour(DBTest): now = pytool.time.utcnow() - event = 'event_test_report_query_by_hour' + event = "event_test_report_query_by_hour" with DBTest: ByHour.record(event, now) - ByHour.record(event, now - datetime.timedelta(seconds=60*60)) + ByHour.record(event, now - datetime.timedelta(seconds=60 * 60)) counts = ByHour.hourly(event)[-3:] - eq_(counts, [0, 1, 1]) + assert counts == [0, 1, 1] -def test_report_query_by_hour_across_edge(): +def test_report_query_by_hour_across_edge(DBTest): stamp = datetime.datetime(2013, 1, 1, tzinfo=pytool.time.UTC()) - stamp2 = stamp - datetime.timedelta(seconds=60*60) - event = 'event_test_report_query_by_hour_edge' + stamp2 = stamp - datetime.timedelta(seconds=60 * 60) + event = "event_test_report_query_by_hour_edge" with DBTest: ByHour.record(event, stamp) ByHour.record(event, stamp2) - stamp += datetime.timedelta(seconds=60*60+1) - stamp2 -= datetime.timedelta(seconds=60*60) + stamp += datetime.timedelta(seconds=60 * 60 + 1) + stamp2 -= datetime.timedelta(seconds=60 * 60) counts = ByHour.hourly(event)[stamp2:stamp] - eq_(counts, [0, 1, 1, 0]) - eq_([c.year for c in counts], [2012, 2012, 2013, 2013]) - eq_([c.month for c in counts], [12, 12, 1, 1]) - eq_([c.hour for c in counts], [22, 23, 0, 1]) - eq_([c.minute for c in counts], [0] * 4) + assert counts == [0, 1, 1, 0] + assert [c.year for c in counts] == [2012, 2012, 2013, 2013] + assert [c.month for c in counts] == [12, 12, 1, 1] + assert [c.hour for c in counts] == [22, 23, 0, 1] + assert [c.minute for c in counts] == [0] * 4 -@raises(ValueError) def test_resolution_error(): - ByHour.per_minute + with pytest.raises(ValueError): + ByHour.per_minute -@raises(TypeError) def test_index_error(): - ByHour.hourly[-1] + with pytest.raises(TypeError): + ByHour.hourly[-1] -@raises(TypeError) def test_extended_slice_error(): - ByHour.hourly[2:3:4] + with pytest.raises(TypeError): + ByHour.hourly[2:3:4] -def test_report_query_monthly_by_yearly(): +def test_report_query_monthly_by_yearly(DBTest): stamp = pytool.time.utcnow() stamp = stamp.replace(hour=1, minute=0, second=0, microsecond=0) - hour = datetime.timedelta(seconds=60*60) - event = 'event_report_query_monthly_by_yearly' + hour = datetime.timedelta(seconds=60 * 60) + event = "event_report_query_monthly_by_yearly" with DBTest: Monthly.record(event, stamp) Monthly.record(event, stamp + hour) Monthly.record(event, stamp + hour + hour) counts = Monthly.yearly(event)[-1:] - eq_(counts, [3]) + assert counts == [3] count = counts[0] - eq_(count.timestamp.timetuple()[:1], stamp.timetuple()[:1]) - eq_(count.month, 1) - eq_(count.day, 1) - eq_(count.hour, 0) + assert count.timestamp.timetuple()[:1] == stamp.timetuple()[:1] + assert count.month == 1 + assert count.day == 1 + assert count.hour == 0 -def test_report_query_monthly_by_monthly(): +def test_report_query_monthly_by_monthly(DBTest): stamp = pytool.time.utcnow() stamp = report._relative_period(report.MONTH, stamp, -1) stamp = stamp.replace(hour=1, minute=0, second=0, microsecond=0) - hour = datetime.timedelta(seconds=60*60) - event = 'event_report_query_monthly_by_monthly' + hour = datetime.timedelta(seconds=60 * 60) + event = "event_report_query_monthly_by_monthly" with DBTest: Monthly.record(event, stamp) Monthly.record(event, stamp + hour) Monthly.record(event, stamp + hour + hour) counts = Monthly.monthly(event)[-2:-1] - eq_(counts, [3]) + assert counts == [3] count = counts[0] - eq_(count.timestamp.timetuple()[:2], stamp.timetuple()[:2]) - eq_(count.day, 1) - eq_(count.hour, 0) + assert count.timestamp.timetuple()[:2] == stamp.timetuple()[:2] + assert count.day == 1 + assert count.hour == 0 -def test_report_query_monthly_by_daily(): +def test_report_query_monthly_by_daily(DBTest): stamp = pytool.time.utcnow() stamp -= datetime.timedelta(days=1) stamp = stamp.replace(hour=1, minute=0, second=0, microsecond=0) - hour = datetime.timedelta(seconds=60*60) - event = 'event_report_query_monthly_by_daily' + hour = datetime.timedelta(seconds=60 * 60) + event = "event_report_query_monthly_by_daily" with DBTest: Monthly.record(event, stamp) Monthly.record(event, stamp + hour) Monthly.record(event, stamp + hour + hour) counts = Monthly.daily(event)[-2:-1] - eq_(counts, [3]) + assert counts == [3] count = counts[0] - eq_(count.timestamp.timetuple()[:3], stamp.timetuple()[:3]) - eq_(count.hour, 0) + assert count.timestamp.timetuple()[:3] == stamp.timetuple()[:3] + assert count.hour == 0 -def test_report_query_monthly_by_hourly(): +def test_report_query_monthly_by_hourly(DBTest): stamp = pytool.time.utcnow() stamp = stamp.replace(hour=1, minute=0, second=0, microsecond=0) - hour = datetime.timedelta(seconds=60*60) - event = 'event_report_query_monthly_by_hourly' + hour = datetime.timedelta(seconds=60 * 60) + event = "event_report_query_monthly_by_hourly" with DBTest: Monthly.record(event, stamp) Monthly.record(event, stamp + hour) Monthly.record(event, stamp + hour + hour) - counts = Monthly.hourly(event)[stamp - hour:stamp + hour * 4] - eq_(counts, [0, 1, 1, 1, 0]) + counts = Monthly.hourly(event)[stamp - hour : stamp + hour * 4] + assert counts == [0, 1, 1, 1, 0] for count in counts: - eq_(count.year, stamp.year) - eq_(count.month, stamp.month) - eq_(count.day, stamp.day) - eq_(count.minute, 0) + assert count.year == stamp.year + assert count.month == stamp.month + assert count.day == stamp.day + assert count.minute == 0 - eq_([c.hour for c in counts], [0, 1, 2, 3, 4]) + assert [c.hour for c in counts] == [0, 1, 2, 3, 4] -def test_report_query_yearly_by_monthly(): +def test_report_query_yearly_by_monthly(DBTest): stamp = pytool.time.utcnow() stamp = report._relative_period(YEAR, stamp, -1) stamp = stamp.replace(hour=1, minute=0, second=0, microsecond=0) - hour = datetime.timedelta(seconds=60*60) - event = 'event_report_query_yearly_by_monthly' + hour = datetime.timedelta(seconds=60 * 60) + event = "event_report_query_yearly_by_monthly" with DBTest: Yearly.record(event, stamp) Yearly.record(event, stamp + hour) Yearly.record(event, stamp + hour + hour) - counts = Yearly.monthly(event)[stamp - hour:stamp + hour] - eq_(counts, [3]) + counts = Yearly.monthly(event)[stamp - hour : stamp + hour] + assert counts == [3] count = counts[0] - eq_(count.timestamp.timetuple()[:2], stamp.timetuple()[:2]) - eq_(count.day, 1) - eq_(count.hour, 0) + assert count.timestamp.timetuple()[:2] == stamp.timetuple()[:2] + assert count.day == 1 + assert count.hour == 0 -def test_report_query_regex(): +def test_report_query_regex(DBTest): stamp = pytool.time.utcnow() stamp -= datetime.timedelta(days=1) stamp = stamp.replace(hour=1, minute=0, second=0, microsecond=0) - hour = datetime.timedelta(seconds=60*60) + hour = datetime.timedelta(seconds=60 * 60) with DBTest: - Monthly.record('regex_test1', stamp) - Monthly.record('regex_test2', stamp + hour) - counts = Monthly.daily('regex_test', regex=True)[-2:-1] - eq_(len(counts), 2) - eq_(counts['regex_test1'], [1]) - eq_(counts['regex_test1'], [1]) + Monthly.record("regex_test1", stamp) + Monthly.record("regex_test2", stamp + hour) + counts = Monthly.daily("regex_test", regex=True)[-2:-1] + assert len(counts) == 2 + assert counts["regex_test1"] == [1] + assert counts["regex_test1"] == [1] -def test_report_query_end_index(): +def test_report_query_end_index(DBTest): stamp = pytool.time.utcnow() this_year = stamp.year - last_year = stamp.year-1 + last_year = stamp.year - 1 - stamp = datetime.datetime(this_year, 12, 31, 23, 59, 59, - tzinfo=pytool.time.UTC()) + stamp = datetime.datetime(this_year, 12, 31, 23, 59, 59, tzinfo=pytool.time.UTC()) - event = 'event_report_query_end_index' + event = "event_report_query_end_index" with DBTest: Daily.record(event, stamp, safe=True) - eq_(Daily.yearly(event)[last_year+1:this_year+1][-1], 1) - eq_(Daily.monthly(event)[1:13][-1], 1) + assert Daily.yearly(event)[last_year + 1 : this_year + 1][-1] == 1 + assert Daily.monthly(event)[1:13][-1] == 1 stamp = pytool.time.utcnow() stamp = report._relative_period(MONTH, stamp, 1) stamp -= datetime.timedelta(seconds=1) - event = 'event_report_query_end_index_daily' + event = "event_report_query_end_index_daily" with DBTest: Daily.record(event, stamp) _, end_of_month = calendar.monthrange(stamp.year, stamp.month) - eq_(Daily.daily(event)[1:end_of_month+1][-1], 1) + assert Daily.daily(event)[1 : end_of_month + 1][-1] == 1 stamp = pytool.time.utcnow() stamp = stamp.replace(hour=23, minute=59, second=59) - event = 'event_report_query_end_index_daily_day' + event = "event_report_query_end_index_daily_day" with DBTest: Daily.record(event, stamp) - eq_(Daily.hourly(event)[0:24][-1], 1) + assert Daily.hourly(event)[0:24][-1] == 1 stamp = pytool.time.utcnow() stamp = stamp.replace(minute=59, second=59) - event = 'event_query_end_index_per_minute' + event = "event_query_end_index_per_minute" with DBTest: Daily.record(event, stamp) - eq_(Daily.per_minute(event)[0:60][-1], 1) + assert Daily.per_minute(event)[0:60][-1] == 1 -def test_unspecified_start_year_index(): +def test_unspecified_start_year_index(DBTest): stamp = pytool.time.utcnow() this_year = stamp.year - two_years_ago = this_year-2 + two_years_ago = this_year - 2 two_years_ago_stamp = stamp.replace(year=two_years_ago) - diff = 0-(this_year-two_years_ago) - event = 'event_unspecified_start_year_index' + diff = 0 - (this_year - two_years_ago) + event = "event_unspecified_start_year_index" with DBTest: ByHour.record(event, two_years_ago_stamp) - eq_(ByHour.yearly(event)[:-1][diff], 1) + assert ByHour.yearly(event)[:-1][diff] == 1 -def test_no_results(): + +def test_no_results(DBTest): with DBTest: - eq_(ByHour.hourly('None')[-1:], []) + assert ByHour.hourly("None")[-1:] == [] -@raises(IndexError) def test_year_index_out_of_range(): - ByHour.yearly[:2038] + with pytest.raises(IndexError): + ByHour.yearly[:2038] -@raises(IndexError) def test_year_index_out_of_range_lower(): - ByHour.yearly[1969:] + with pytest.raises(IndexError): + ByHour.yearly[1969:] -@raises(IndexError) def test_month_index_out_of_range(): - ByHour.monthly[:14] + with pytest.raises(IndexError): + ByHour.monthly[:14] -@raises(IndexError) def test_month_index_out_of_range2(): - ByHour.monthly[0:] + with pytest.raises(IndexError): + ByHour.monthly[0:] -@raises(IndexError) def test_daily_index_out_of_range(): - ByHour.daily[:33] + with pytest.raises(IndexError): + ByHour.daily[:33] -@raises(IndexError) def test_daily_index_out_of_range2(): - ByHour.daily[0:] + with pytest.raises(IndexError): + ByHour.daily[0:] -@raises(IndexError) def test_hour_index_out_of_range(): - ByHour.hourly[:25] + with pytest.raises(IndexError): + ByHour.hourly[:25] -@raises(IndexError) def test_minute_index_out_of_range(): - Daily.per_minute[:61] + with pytest.raises(IndexError): + Daily.per_minute[:61] -@raises(TypeError) def test_bad_index_type(): - Daily.per_minute['foo'] + with pytest.raises(TypeError): + Daily.per_minute["foo"] -@raises(TypeError) def test_bad_index_type_slice(): - Daily.per_minute['foo':] + with pytest.raises(TypeError): + Daily.per_minute["foo":] def test_report_count_addition_maintains_lesser_timestamp(): @@ -430,55 +427,57 @@ def test_report_count_addition_maintains_lesser_timestamp(): a = report.ReportCount(3, stamp) b = report.ReportCount(5, stamp2) c = a + b - eq_(c, 8) - eq_(c.timestamp, stamp) + assert c == 8 + assert c.timestamp == stamp def test_report_count_works_with_integers(): stamp = pytool.time.utcnow() a = report.ReportCount(3, stamp) b = a + 2 - eq_(b, 5) - eq_(b.timestamp, stamp) + assert b == 5 + assert b.timestamp == stamp c = b + 3 - eq_(c, 8) - eq_(c.timestamp, stamp) + assert c == 8 + assert c.timestamp == stamp c += 5 - eq_(c, 13) - eq_(c.timestamp, stamp) + assert c == 13 + assert c.timestamp == stamp -def test_report_query_coerces_date(): +def test_report_query_coerces_date(DBTest): stamp = pytool.time.utcnow() - hour = datetime.timedelta(seconds=60*60) + hour = datetime.timedelta(seconds=60 * 60) - event = 'event_date_coercion' + event = "event_date_coercion" with DBTest: ByHour.record(event, pytool.time.floor_day(stamp) - hour) - eq_(ByHour.daily(event)[-2:-1], [1]) + assert ByHour.daily(event)[-2:-1] == [1] def test_relative_period_MONTH_across_end_of_year_and_beginning(): stamp = datetime.datetime(2013, 1, 1, tzinfo=pytool.time.UTC()) - eq_(report._relative_period(MONTH, stamp, -1), datetime.datetime(2012, 12, - 1, tzinfo=pytool.time.UTC())) + assert report._relative_period(MONTH, stamp, -1) == datetime.datetime( + 2012, 12, 1, tzinfo=pytool.time.UTC() + ) stamp = datetime.datetime(2013, 12, 1, tzinfo=pytool.time.UTC()) - eq_(report._relative_period(MONTH, stamp, 1), datetime.datetime(2014, 1, - 1, tzinfo=pytool.time.UTC())) + assert report._relative_period(MONTH, stamp, 1) == datetime.datetime( + 2014, 1, 1, tzinfo=pytool.time.UTC() + ) -def test_monthly_report_queried_daily_returns_correct_length(): +def test_monthly_report_queried_daily_returns_correct_length(DBTest): class Sum(Report): config_database = database_name() - config_collection = 'report.sum' + config_collection = "report.sum" config_period = MONTH config_intervals = [MONTH, HOUR] now = pytool.time.utcnow() earlier = report._relative_period(MONTH, now, -1) - event = 'monthly_as_daily' + event = "monthly_as_daily" with DBTest: Sum.record(event) Sum.record(event, stamp=earlier) @@ -486,67 +485,67 @@ class Sum(Report): days = days.get(event, []) # Check we get the right number of days - eq_(len(days), 65) + assert len(days) == 65 # Check we get the correct total - eq_(sum(days), 2) + assert sum(days) == 2 # Ensure dates come back in correct order date = report._relative_period(DAY, days[0].timestamp, -1) for day in days: - ok_(day.timestamp > date) + assert day.timestamp > date date = day.timestamp -def test_report_queried_with_date_works(): +def test_report_queried_with_date_works(DBTest): now = pytool.time.utcnow() today = now.date() tomorrow = now + datetime.timedelta(days=1) tomorrow = tomorrow.date() - event = 'event_query_with_date' + event = "event_query_with_date" with DBTest: Monthly.record(event) Monthly.record(event) Monthly.record(event) - eq_(sum(Monthly.daily(event)[today:tomorrow]), 3) + assert sum(Monthly.daily(event)[today:tomorrow]) == 3 -def test_record_arbitrary_count(): - event = 'event_arbitrary_count' +def test_record_arbitrary_count(DBTest): + event = "event_arbitrary_count" with DBTest: Monthly.record(event, count=20) - eq_(sum(Monthly.hourly(event)[-1:]), 20) + assert sum(Monthly.hourly(event)[-1:]) == 20 -def test_record_negative_count(): - event = 'event_negative_count' +def test_record_negative_count(DBTest): + event = "event_negative_count" with DBTest: Monthly.record(event, count=-5) - eq_(sum(Monthly.hourly(event)[-1:]), -5) + assert sum(Monthly.hourly(event)[-1:]) == -5 -@raises(ValueError) def test_record_bad_stamp_type_raises_value_error(): - Monthly.record('foo', 20) + with pytest.raises(ValueError): + Monthly.record("foo", 20) -@raises(ValueError) def test_record_bad_count_type_raises_value_error(): - Monthly.record('foo', count='bar') + with pytest.raises(ValueError): + Monthly.record("foo", count="bar") -@raises(ValueError) def test_record_bad_count_type_raises_value_error2(): - Monthly.record('foo', count=2.5) + with pytest.raises(ValueError): + Monthly.record("foo", count=2.5) -def test_recording_and_retrieving_in_september_works(): +def test_recording_and_retrieving_in_september_works(DBTest): with DBTest: - Monthly.record('test_september', datetime.datetime(2013, 9, 1, 12)) + Monthly.record("test_september", datetime.datetime(2013, 9, 1, 12)) with DBTest: - vals = Monthly.hourly('test_september')[datetime.datetime(2013, 8, 1): - datetime.datetime(2013, 10, 10)] - - eq_(sum(vals), 1) + vals = Monthly.hourly("test_september")[ + datetime.datetime(2013, 8, 1) : datetime.datetime(2013, 10, 10) + ] + assert sum(vals) == 1 diff --git a/test/test_humbledb/test_version.py b/test/test_humbledb/test_version.py new file mode 100644 index 0000000..5f0e53e --- /dev/null +++ b/test/test_humbledb/test_version.py @@ -0,0 +1,227 @@ +""" +Tests for humbledb._version module. + +This module tests version checking helpers and kwargs cleaning functionality. +""" + +from unittest.mock import patch + +from packaging.version import Version + +from humbledb._version import _clean, _gte, _lt + +# All these patched versions need to be unique so we don't hit the lru_cache on +# _lt and _gte This is a bit of a hack, but it's the best we can do for now. + + +# Test _lt function +@patch("humbledb._version.PYMONGO", Version("7.8.0")) +def test_lt_true_when_pymongo_version_is_lower(): + """Test _lt returns True when pymongo version is lower than target.""" + assert _lt("8.0.0") is True + + +@patch("humbledb._version.PYMONGO", Version("9.2.0")) +def test_lt_false_when_pymongo_version_is_higher(): + """Test _lt returns False when pymongo version is higher than target.""" + assert _lt("9.0.0") is False + + +@patch("humbledb._version.PYMONGO", Version("10.0.0")) +def test_lt_false_when_pymongo_version_is_equal(): + """Test _lt returns False when pymongo version equals target.""" + assert _lt("10.0.0") is False + + +@patch("humbledb._version.PYMONGO", Version("11.0.1")) +def test_lt_with_complex_versions(): + """Test _lt with complex version strings.""" + assert _lt("11.0.2") is True + assert _lt("11.0.0") is False + + +@patch("humbledb._version.PYMONGO", Version("12.0.0rc1")) +def test_lt_with_prerelease_versions(): + """Test _lt with pre-release versions.""" + assert _lt("12.0.0") is True + + +# Test _gte function +@patch("humbledb._version.PYMONGO", Version("13.2.0")) +def test_gte_true_when_pymongo_version_is_higher(): + """Test _gte returns True when pymongo version is higher than target.""" + assert _gte("13.0.0") is True + + +@patch("humbledb._version.PYMONGO", Version("14.0.0")) +def test_gte_true_when_pymongo_version_is_equal(): + """Test _gte returns True when pymongo version equals target.""" + assert _gte("14.0.0") is True + + +@patch("humbledb._version.PYMONGO", Version("2.8.0")) +def test_gte_false_when_pymongo_version_is_lower(): + """Test _gte returns False when pymongo version is lower than target.""" + assert _gte("3.0.9") is False + + +@patch("humbledb._version.PYMONGO", Version("3.0.1")) +def test_gte_with_complex_versions(): + """Test _gte with complex version strings.""" + assert _gte("3.0.0") is True + assert _gte("3.0.1") is True + assert _gte("3.0.2") is False + + +@patch("humbledb._version.PYMONGO", Version("3.0.0rc1")) +def test_gte_with_prerelease_versions(): + """Test _gte with pre-release versions.""" + assert _gte("2.9.0") is True + + +# Test _clean function +@patch("humbledb._version._lt") +def test_clean_returns_early_for_old_pymongo_versions(mock_lt): + """Test _clean returns early when pymongo version is less than 3.0.""" + mock_lt.return_value = True + kwargs = {"safe": False, "other_param": "value"} + original_kwargs = kwargs.copy() + + _clean(kwargs) + + # Should not modify kwargs for old versions + assert kwargs == original_kwargs + mock_lt.assert_called_once_with("3.0") + + +@patch("humbledb._version._lt") +def test_clean_returns_early_when_no_safe_param(mock_lt): + """Test _clean returns early when 'safe' parameter is not present.""" + mock_lt.return_value = False + kwargs = {"other_param": "value", "w": 1} + original_kwargs = kwargs.copy() + + _clean(kwargs) + + # Should not modify kwargs when no 'safe' param + assert kwargs == original_kwargs + + +@patch("humbledb._version._lt") +def test_clean_converts_safe_false_to_w_zero(mock_lt): + """Test _clean converts safe=False to w=0.""" + mock_lt.return_value = False + kwargs = {"safe": False, "other_param": "value"} + + _clean(kwargs) + + expected = {"w": 0, "other_param": "value"} + assert kwargs == expected + + +@patch("humbledb._version._lt") +def test_clean_removes_safe_true(mock_lt): + """Test _clean removes safe=True without adding w parameter.""" + mock_lt.return_value = False + kwargs = {"safe": True, "other_param": "value"} + + _clean(kwargs) + + expected = {"other_param": "value"} + assert kwargs == expected + + +@patch("humbledb._version._lt") +def test_clean_removes_safe_none(mock_lt): + """Test _clean removes safe=None without adding w parameter.""" + mock_lt.return_value = False + kwargs = {"safe": None, "other_param": "value"} + + _clean(kwargs) + + expected = {"other_param": "value"} + assert kwargs == expected + + +@patch("humbledb._version._lt") +def test_clean_removes_safe_string(mock_lt): + """Test _clean removes safe with string value without adding w parameter.""" + mock_lt.return_value = False + kwargs = {"safe": "acknowledge", "other_param": "value"} + + _clean(kwargs) + + expected = {"other_param": "value"} + assert kwargs == expected + + +@patch("humbledb._version._lt") +def test_clean_handles_empty_kwargs(mock_lt): + """Test _clean handles empty kwargs dictionary.""" + mock_lt.return_value = False + kwargs = {} + + _clean(kwargs) + + assert kwargs == {} + + +@patch("humbledb._version._lt") +def test_clean_preserves_existing_w_parameter_when_safe_false(mock_lt): + """Test _clean overwrites existing w parameter when safe=False.""" + mock_lt.return_value = False + kwargs = {"safe": False, "w": 2, "other_param": "value"} + + _clean(kwargs) + + expected = {"w": 0, "other_param": "value"} + assert kwargs == expected + + +@patch("humbledb._version._lt") +def test_clean_preserves_existing_w_parameter_when_safe_not_false(mock_lt): + """Test _clean preserves existing w parameter when safe is not False.""" + mock_lt.return_value = False + kwargs = {"safe": True, "w": 2, "other_param": "value"} + + _clean(kwargs) + + expected = {"w": 2, "other_param": "value"} + assert kwargs == expected + + +# Integration and edge case tests +@patch("humbledb._version.get_version") +def test_version_functions_with_actual_version_format(mock_get_version): + """Test version functions with realistic version formats.""" + # Test with actual PyMongo-like version + mock_get_version.return_value = "4.6.1" + + assert _lt("5.0.0") is True + assert _lt("4.0.0") is False + assert _gte("4.0.0") is True + assert _gte("5.0.0") is False + + +@patch("humbledb._version._lt") +def test_clean_function_integration_old_version(mock_lt): + """Test _clean function behavior with old PyMongo version.""" + mock_lt.return_value = True # Simulate old version < 3.0 + + kwargs = {"safe": False, "fsync": True} + _clean(kwargs) + + # Should be unchanged for old versions + assert kwargs == {"safe": False, "fsync": True} + + +@patch("humbledb._version._lt") +def test_clean_function_integration_new_version(mock_lt): + """Test _clean function behavior with new PyMongo version.""" + mock_lt.return_value = False # Simulate new version >= 3.0 + + kwargs = {"safe": False, "fsync": True} + _clean(kwargs) + + # Should transform safe=False to w=0 + assert kwargs == {"w": 0, "fsync": True} diff --git a/test/util.py b/test/util.py index 6084db8..c9417c9 100644 --- a/test/util.py +++ b/test/util.py @@ -1,94 +1,35 @@ -import re -import logging -import pyconfig -import nose.tools from unittest.case import SkipTest -import humbledb -from humbledb import Mongo - +import pyconfig __all__ = [ - 'eq_', - 'ok_', - 'raises', - 'assert_equal', - 'assert_is_instance', - 'is_instance_', - 'assert_is_subclass', - 'is_subclass_', - 'assert_is', - 'is_', - 'SkipTest', - 'database_name', - 'DBTest', - 'enable_sharding', - ] - - -# Shortcut aliases for nose imports -eq_ = nose.tools.eq_ -ok_ = nose.tools.ok_ -raises = nose.tools.raises -assert_equal = nose.tools.assert_equal -assert_is_instance = nose.tools.assert_is_instance -is_instance_ = assert_is_instance + "assert_is_subclass", + "is_subclass_", + "assert_is", + "is_", + "SkipTest", + "database_name", +] def database_name(): - """ Return the test database name. """ - return pyconfig.get('humbledb.test.db.name', 'nose_humbledb') - - -def enable_sharding(collection, key): - """ Enable sharding for `collection`. """ - conn = DBTest.connection - try: - conn.admin.command('listShards') - except humbledb.errors.OperationFailure as exc: - if re.match('.*no such.*listShards', str(exc)): - logging.getLogger(__name__).info("Sharding not available.") - return False - raise - try: - conn.admin.command('enableSharding', database_name()) - except humbledb.errors.OperationFailure as exc: - if 'already' not in str(exc): - raise - try: - conn.admin.command('shardCollection', database_name() + '.' + - collection, key=key) - except humbledb.errors.OperationFailure as exc: - if 'already' not in str(exc): - raise - logging.getLogger(__name__).info("Sharding enabled for %r.%r on %r.", - database_name(), collection, key) - return True - - -class DBTest(Mongo): - config_host = pyconfig.setting('humbledb.test.db.host', 'localhost') - config_port = pyconfig.setting('humbledb.test.db.port', 27017) - - -# This instantiates the connection and causes nose to crap out if there's no -# database available, which is what we want -with DBTest: - logging.getLogger(__name__).info("Connection successful.") + """Return the test database name.""" + return pyconfig.get("humbledb.test.db.name", "humbledb_test") def assert_is_subclass(obj, cls): - """ Assert an object is a subclas of another. """ - assert issubclass(obj, cls), "{!r} is not a subclass of {!r}".format(obj, - cls) + """Assert an object is a subclas of another.""" + assert issubclass(obj, cls), "{!r} is not a subclass of {!r}".format(obj, cls) + # Shortcut alias is_subclass_ = assert_is_subclass def assert_is(obj1, obj2): - """ Assert an object is identical (same object). """ + """Assert an object is identical (same object).""" assert obj1 is obj2, "{!r} is not {!r}".format(obj1, obj2) + # Shortcut alias is_ = assert_is diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000..a4f5137 --- /dev/null +++ b/uv.lock @@ -0,0 +1,852 @@ +version = 1 +revision = 2 +requires-python = ">=3.10, <4.0" +resolution-markers = [ + "python_full_version >= '3.11'", + "python_full_version < '3.11'", +] + +[[package]] +name = "alabaster" +version = "0.7.16" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c9/3e/13dd8e5ed9094e734ac430b5d0eb4f2bb001708a8b7856cbf8e084e001ba/alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65", size = 23776, upload-time = "2024-01-10T00:56:10.189Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/34/d4e1c02d3bee589efb5dfa17f88ea08bdb3e3eac12bc475462aec52ed223/alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92", size = 13511, upload-time = "2024-01-10T00:56:08.388Z" }, +] + +[[package]] +name = "attrs" +version = "25.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, +] + +[[package]] +name = "babel" +version = "2.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7d/6b/d52e42361e1aa00709585ecc30b3f9684b3ab62530771402248b1b1d6240/babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d", size = 9951852, upload-time = "2025-02-01T15:17:41.026Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b8/3fe70c75fe32afc4bb507f75563d39bc5642255d1d94f1f23604725780bf/babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2", size = 10182537, upload-time = "2025-02-01T15:17:37.39Z" }, +] + +[[package]] +name = "certifi" +version = "2025.6.15" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/73/f7/f14b46d4bcd21092d7d3ccef689615220d8a08fb25e564b65d20738e672e/certifi-2025.6.15.tar.gz", hash = "sha256:d747aa5a8b9bbbb1bb8c22bb13e22bd1f18e9796defa16bab421f7f7a317323b", size = 158753, upload-time = "2025-06-15T02:45:51.329Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/84/ae/320161bd181fc06471eed047ecce67b693fd7515b16d495d8932db763426/certifi-2025.6.15-py3-none-any.whl", hash = "sha256:2e0c7ce7cb5d8f8634ca55d2ba7e6ec2689a2fd6537d8dec1296a477a4910057", size = 157650, upload-time = "2025-06-15T02:45:49.977Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367, upload-time = "2025-05-02T08:34:42.01Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/28/9901804da60055b406e1a1c5ba7aac1276fb77f1dde635aabfc7fd84b8ab/charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941", size = 201818, upload-time = "2025-05-02T08:31:46.725Z" }, + { url = "https://files.pythonhosted.org/packages/d9/9b/892a8c8af9110935e5adcbb06d9c6fe741b6bb02608c6513983048ba1a18/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd", size = 144649, upload-time = "2025-05-02T08:31:48.889Z" }, + { url = "https://files.pythonhosted.org/packages/7b/a5/4179abd063ff6414223575e008593861d62abfc22455b5d1a44995b7c101/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6", size = 155045, upload-time = "2025-05-02T08:31:50.757Z" }, + { url = "https://files.pythonhosted.org/packages/3b/95/bc08c7dfeddd26b4be8c8287b9bb055716f31077c8b0ea1cd09553794665/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d", size = 147356, upload-time = "2025-05-02T08:31:52.634Z" }, + { url = "https://files.pythonhosted.org/packages/a8/2d/7a5b635aa65284bf3eab7653e8b4151ab420ecbae918d3e359d1947b4d61/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86", size = 149471, upload-time = "2025-05-02T08:31:56.207Z" }, + { url = "https://files.pythonhosted.org/packages/ae/38/51fc6ac74251fd331a8cfdb7ec57beba8c23fd5493f1050f71c87ef77ed0/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c", size = 151317, upload-time = "2025-05-02T08:31:57.613Z" }, + { url = "https://files.pythonhosted.org/packages/b7/17/edee1e32215ee6e9e46c3e482645b46575a44a2d72c7dfd49e49f60ce6bf/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0", size = 146368, upload-time = "2025-05-02T08:31:59.468Z" }, + { url = "https://files.pythonhosted.org/packages/26/2c/ea3e66f2b5f21fd00b2825c94cafb8c326ea6240cd80a91eb09e4a285830/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef", size = 154491, upload-time = "2025-05-02T08:32:01.219Z" }, + { url = "https://files.pythonhosted.org/packages/52/47/7be7fa972422ad062e909fd62460d45c3ef4c141805b7078dbab15904ff7/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6", size = 157695, upload-time = "2025-05-02T08:32:03.045Z" }, + { url = "https://files.pythonhosted.org/packages/2f/42/9f02c194da282b2b340f28e5fb60762de1151387a36842a92b533685c61e/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366", size = 154849, upload-time = "2025-05-02T08:32:04.651Z" }, + { url = "https://files.pythonhosted.org/packages/67/44/89cacd6628f31fb0b63201a618049be4be2a7435a31b55b5eb1c3674547a/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db", size = 150091, upload-time = "2025-05-02T08:32:06.719Z" }, + { url = "https://files.pythonhosted.org/packages/1f/79/4b8da9f712bc079c0f16b6d67b099b0b8d808c2292c937f267d816ec5ecc/charset_normalizer-3.4.2-cp310-cp310-win32.whl", hash = "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a", size = 98445, upload-time = "2025-05-02T08:32:08.66Z" }, + { url = "https://files.pythonhosted.org/packages/7d/d7/96970afb4fb66497a40761cdf7bd4f6fca0fc7bafde3a84f836c1f57a926/charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509", size = 105782, upload-time = "2025-05-02T08:32:10.46Z" }, + { url = "https://files.pythonhosted.org/packages/05/85/4c40d00dcc6284a1c1ad5de5e0996b06f39d8232f1031cd23c2f5c07ee86/charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2", size = 198794, upload-time = "2025-05-02T08:32:11.945Z" }, + { url = "https://files.pythonhosted.org/packages/41/d9/7a6c0b9db952598e97e93cbdfcb91bacd89b9b88c7c983250a77c008703c/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645", size = 142846, upload-time = "2025-05-02T08:32:13.946Z" }, + { url = "https://files.pythonhosted.org/packages/66/82/a37989cda2ace7e37f36c1a8ed16c58cf48965a79c2142713244bf945c89/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd", size = 153350, upload-time = "2025-05-02T08:32:15.873Z" }, + { url = "https://files.pythonhosted.org/packages/df/68/a576b31b694d07b53807269d05ec3f6f1093e9545e8607121995ba7a8313/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8", size = 145657, upload-time = "2025-05-02T08:32:17.283Z" }, + { url = "https://files.pythonhosted.org/packages/92/9b/ad67f03d74554bed3aefd56fe836e1623a50780f7c998d00ca128924a499/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f", size = 147260, upload-time = "2025-05-02T08:32:18.807Z" }, + { url = "https://files.pythonhosted.org/packages/a6/e6/8aebae25e328160b20e31a7e9929b1578bbdc7f42e66f46595a432f8539e/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7", size = 149164, upload-time = "2025-05-02T08:32:20.333Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f2/b3c2f07dbcc248805f10e67a0262c93308cfa149a4cd3d1fe01f593e5fd2/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9", size = 144571, upload-time = "2025-05-02T08:32:21.86Z" }, + { url = "https://files.pythonhosted.org/packages/60/5b/c3f3a94bc345bc211622ea59b4bed9ae63c00920e2e8f11824aa5708e8b7/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544", size = 151952, upload-time = "2025-05-02T08:32:23.434Z" }, + { url = "https://files.pythonhosted.org/packages/e2/4d/ff460c8b474122334c2fa394a3f99a04cf11c646da895f81402ae54f5c42/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82", size = 155959, upload-time = "2025-05-02T08:32:24.993Z" }, + { url = "https://files.pythonhosted.org/packages/a2/2b/b964c6a2fda88611a1fe3d4c400d39c66a42d6c169c924818c848f922415/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0", size = 153030, upload-time = "2025-05-02T08:32:26.435Z" }, + { url = "https://files.pythonhosted.org/packages/59/2e/d3b9811db26a5ebf444bc0fa4f4be5aa6d76fc6e1c0fd537b16c14e849b6/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5", size = 148015, upload-time = "2025-05-02T08:32:28.376Z" }, + { url = "https://files.pythonhosted.org/packages/90/07/c5fd7c11eafd561bb51220d600a788f1c8d77c5eef37ee49454cc5c35575/charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a", size = 98106, upload-time = "2025-05-02T08:32:30.281Z" }, + { url = "https://files.pythonhosted.org/packages/a8/05/5e33dbef7e2f773d672b6d79f10ec633d4a71cd96db6673625838a4fd532/charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28", size = 105402, upload-time = "2025-05-02T08:32:32.191Z" }, + { url = "https://files.pythonhosted.org/packages/d7/a4/37f4d6035c89cac7930395a35cc0f1b872e652eaafb76a6075943754f095/charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", size = 199936, upload-time = "2025-05-02T08:32:33.712Z" }, + { url = "https://files.pythonhosted.org/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", size = 143790, upload-time = "2025-05-02T08:32:35.768Z" }, + { url = "https://files.pythonhosted.org/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", size = 153924, upload-time = "2025-05-02T08:32:37.284Z" }, + { url = "https://files.pythonhosted.org/packages/86/2d/fb55fdf41964ec782febbf33cb64be480a6b8f16ded2dbe8db27a405c09f/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", size = 146626, upload-time = "2025-05-02T08:32:38.803Z" }, + { url = "https://files.pythonhosted.org/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", size = 148567, upload-time = "2025-05-02T08:32:40.251Z" }, + { url = "https://files.pythonhosted.org/packages/09/14/957d03c6dc343c04904530b6bef4e5efae5ec7d7990a7cbb868e4595ee30/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", size = 150957, upload-time = "2025-05-02T08:32:41.705Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c8/8174d0e5c10ccebdcb1b53cc959591c4c722a3ad92461a273e86b9f5a302/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", size = 145408, upload-time = "2025-05-02T08:32:43.709Z" }, + { url = "https://files.pythonhosted.org/packages/58/aa/8904b84bc8084ac19dc52feb4f5952c6df03ffb460a887b42615ee1382e8/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", size = 153399, upload-time = "2025-05-02T08:32:46.197Z" }, + { url = "https://files.pythonhosted.org/packages/c2/26/89ee1f0e264d201cb65cf054aca6038c03b1a0c6b4ae998070392a3ce605/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", size = 156815, upload-time = "2025-05-02T08:32:48.105Z" }, + { url = "https://files.pythonhosted.org/packages/fd/07/68e95b4b345bad3dbbd3a8681737b4338ff2c9df29856a6d6d23ac4c73cb/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", size = 154537, upload-time = "2025-05-02T08:32:49.719Z" }, + { url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", size = 149565, upload-time = "2025-05-02T08:32:51.404Z" }, + { url = "https://files.pythonhosted.org/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", size = 98357, upload-time = "2025-05-02T08:32:53.079Z" }, + { url = "https://files.pythonhosted.org/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", size = 105776, upload-time = "2025-05-02T08:32:54.573Z" }, + { url = "https://files.pythonhosted.org/packages/ea/12/a93df3366ed32db1d907d7593a94f1fe6293903e3e92967bebd6950ed12c/charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0", size = 199622, upload-time = "2025-05-02T08:32:56.363Z" }, + { url = "https://files.pythonhosted.org/packages/04/93/bf204e6f344c39d9937d3c13c8cd5bbfc266472e51fc8c07cb7f64fcd2de/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", size = 143435, upload-time = "2025-05-02T08:32:58.551Z" }, + { url = "https://files.pythonhosted.org/packages/22/2a/ea8a2095b0bafa6c5b5a55ffdc2f924455233ee7b91c69b7edfcc9e02284/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", size = 153653, upload-time = "2025-05-02T08:33:00.342Z" }, + { url = "https://files.pythonhosted.org/packages/b6/57/1b090ff183d13cef485dfbe272e2fe57622a76694061353c59da52c9a659/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1", size = 146231, upload-time = "2025-05-02T08:33:02.081Z" }, + { url = "https://files.pythonhosted.org/packages/e2/28/ffc026b26f441fc67bd21ab7f03b313ab3fe46714a14b516f931abe1a2d8/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", size = 148243, upload-time = "2025-05-02T08:33:04.063Z" }, + { url = "https://files.pythonhosted.org/packages/c0/0f/9abe9bd191629c33e69e47c6ef45ef99773320e9ad8e9cb08b8ab4a8d4cb/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691", size = 150442, upload-time = "2025-05-02T08:33:06.418Z" }, + { url = "https://files.pythonhosted.org/packages/67/7c/a123bbcedca91d5916c056407f89a7f5e8fdfce12ba825d7d6b9954a1a3c/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0", size = 145147, upload-time = "2025-05-02T08:33:08.183Z" }, + { url = "https://files.pythonhosted.org/packages/ec/fe/1ac556fa4899d967b83e9893788e86b6af4d83e4726511eaaad035e36595/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b", size = 153057, upload-time = "2025-05-02T08:33:09.986Z" }, + { url = "https://files.pythonhosted.org/packages/2b/ff/acfc0b0a70b19e3e54febdd5301a98b72fa07635e56f24f60502e954c461/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff", size = 156454, upload-time = "2025-05-02T08:33:11.814Z" }, + { url = "https://files.pythonhosted.org/packages/92/08/95b458ce9c740d0645feb0e96cea1f5ec946ea9c580a94adfe0b617f3573/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b", size = 154174, upload-time = "2025-05-02T08:33:13.707Z" }, + { url = "https://files.pythonhosted.org/packages/78/be/8392efc43487ac051eee6c36d5fbd63032d78f7728cb37aebcc98191f1ff/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148", size = 149166, upload-time = "2025-05-02T08:33:15.458Z" }, + { url = "https://files.pythonhosted.org/packages/44/96/392abd49b094d30b91d9fbda6a69519e95802250b777841cf3bda8fe136c/charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7", size = 98064, upload-time = "2025-05-02T08:33:17.06Z" }, + { url = "https://files.pythonhosted.org/packages/e9/b0/0200da600134e001d91851ddc797809e2fe0ea72de90e09bec5a2fbdaccb/charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980", size = 105641, upload-time = "2025-05-02T08:33:18.753Z" }, + { url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626, upload-time = "2025-05-02T08:34:40.053Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "coverage" +version = "7.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/e0/98670a80884f64578f0c22cd70c5e81a6e07b08167721c7487b4d70a7ca0/coverage-7.9.1.tar.gz", hash = "sha256:6cf43c78c4282708a28e466316935ec7489a9c487518a77fa68f716c67909cec", size = 813650, upload-time = "2025-06-13T13:02:28.627Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/78/1c1c5ec58f16817c09cbacb39783c3655d54a221b6552f47ff5ac9297603/coverage-7.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cc94d7c5e8423920787c33d811c0be67b7be83c705f001f7180c7b186dcf10ca", size = 212028, upload-time = "2025-06-13T13:00:29.293Z" }, + { url = "https://files.pythonhosted.org/packages/98/db/e91b9076f3a888e3b4ad7972ea3842297a52cc52e73fd1e529856e473510/coverage-7.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:16aa0830d0c08a2c40c264cef801db8bc4fc0e1892782e45bcacbd5889270509", size = 212420, upload-time = "2025-06-13T13:00:34.027Z" }, + { url = "https://files.pythonhosted.org/packages/0e/d0/2b3733412954576b0aea0a16c3b6b8fbe95eb975d8bfa10b07359ead4252/coverage-7.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf95981b126f23db63e9dbe4cf65bd71f9a6305696fa5e2262693bc4e2183f5b", size = 241529, upload-time = "2025-06-13T13:00:35.786Z" }, + { url = "https://files.pythonhosted.org/packages/b3/00/5e2e5ae2e750a872226a68e984d4d3f3563cb01d1afb449a17aa819bc2c4/coverage-7.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f05031cf21699785cd47cb7485f67df619e7bcdae38e0fde40d23d3d0210d3c3", size = 239403, upload-time = "2025-06-13T13:00:37.399Z" }, + { url = "https://files.pythonhosted.org/packages/37/3b/a2c27736035156b0a7c20683afe7df498480c0dfdf503b8c878a21b6d7fb/coverage-7.9.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb4fbcab8764dc072cb651a4bcda4d11fb5658a1d8d68842a862a6610bd8cfa3", size = 240548, upload-time = "2025-06-13T13:00:39.647Z" }, + { url = "https://files.pythonhosted.org/packages/98/f5/13d5fc074c3c0e0dc80422d9535814abf190f1254d7c3451590dc4f8b18c/coverage-7.9.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0f16649a7330ec307942ed27d06ee7e7a38417144620bb3d6e9a18ded8a2d3e5", size = 240459, upload-time = "2025-06-13T13:00:40.934Z" }, + { url = "https://files.pythonhosted.org/packages/36/24/24b9676ea06102df824c4a56ffd13dc9da7904478db519efa877d16527d5/coverage-7.9.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:cea0a27a89e6432705fffc178064503508e3c0184b4f061700e771a09de58187", size = 239128, upload-time = "2025-06-13T13:00:42.343Z" }, + { url = "https://files.pythonhosted.org/packages/be/05/242b7a7d491b369ac5fee7908a6e5ba42b3030450f3ad62c645b40c23e0e/coverage-7.9.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e980b53a959fa53b6f05343afbd1e6f44a23ed6c23c4b4c56c6662bbb40c82ce", size = 239402, upload-time = "2025-06-13T13:00:43.634Z" }, + { url = "https://files.pythonhosted.org/packages/73/e0/4de7f87192fa65c9c8fbaeb75507e124f82396b71de1797da5602898be32/coverage-7.9.1-cp310-cp310-win32.whl", hash = "sha256:70760b4c5560be6ca70d11f8988ee6542b003f982b32f83d5ac0b72476607b70", size = 214518, upload-time = "2025-06-13T13:00:45.622Z" }, + { url = "https://files.pythonhosted.org/packages/d5/ab/5e4e2fe458907d2a65fab62c773671cfc5ac704f1e7a9ddd91996f66e3c2/coverage-7.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:a66e8f628b71f78c0e0342003d53b53101ba4e00ea8dabb799d9dba0abbbcebe", size = 215436, upload-time = "2025-06-13T13:00:47.245Z" }, + { url = "https://files.pythonhosted.org/packages/60/34/fa69372a07d0903a78ac103422ad34db72281c9fc625eba94ac1185da66f/coverage-7.9.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:95c765060e65c692da2d2f51a9499c5e9f5cf5453aeaf1420e3fc847cc060582", size = 212146, upload-time = "2025-06-13T13:00:48.496Z" }, + { url = "https://files.pythonhosted.org/packages/27/f0/da1894915d2767f093f081c42afeba18e760f12fdd7a2f4acbe00564d767/coverage-7.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ba383dc6afd5ec5b7a0d0c23d38895db0e15bcba7fb0fa8901f245267ac30d86", size = 212536, upload-time = "2025-06-13T13:00:51.535Z" }, + { url = "https://files.pythonhosted.org/packages/10/d5/3fc33b06e41e390f88eef111226a24e4504d216ab8e5d1a7089aa5a3c87a/coverage-7.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37ae0383f13cbdcf1e5e7014489b0d71cc0106458878ccde52e8a12ced4298ed", size = 245092, upload-time = "2025-06-13T13:00:52.883Z" }, + { url = "https://files.pythonhosted.org/packages/0a/39/7aa901c14977aba637b78e95800edf77f29f5a380d29768c5b66f258305b/coverage-7.9.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:69aa417a030bf11ec46149636314c24c8d60fadb12fc0ee8f10fda0d918c879d", size = 242806, upload-time = "2025-06-13T13:00:54.571Z" }, + { url = "https://files.pythonhosted.org/packages/43/fc/30e5cfeaf560b1fc1989227adedc11019ce4bb7cce59d65db34fe0c2d963/coverage-7.9.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a4be2a28656afe279b34d4f91c3e26eccf2f85500d4a4ff0b1f8b54bf807338", size = 244610, upload-time = "2025-06-13T13:00:56.932Z" }, + { url = "https://files.pythonhosted.org/packages/bf/15/cca62b13f39650bc87b2b92bb03bce7f0e79dd0bf2c7529e9fc7393e4d60/coverage-7.9.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:382e7ddd5289f140259b610e5f5c58f713d025cb2f66d0eb17e68d0a94278875", size = 244257, upload-time = "2025-06-13T13:00:58.545Z" }, + { url = "https://files.pythonhosted.org/packages/cd/1a/c0f2abe92c29e1464dbd0ff9d56cb6c88ae2b9e21becdb38bea31fcb2f6c/coverage-7.9.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e5532482344186c543c37bfad0ee6069e8ae4fc38d073b8bc836fc8f03c9e250", size = 242309, upload-time = "2025-06-13T13:00:59.836Z" }, + { url = "https://files.pythonhosted.org/packages/57/8d/c6fd70848bd9bf88fa90df2af5636589a8126d2170f3aade21ed53f2b67a/coverage-7.9.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a39d18b3f50cc121d0ce3838d32d58bd1d15dab89c910358ebefc3665712256c", size = 242898, upload-time = "2025-06-13T13:01:02.506Z" }, + { url = "https://files.pythonhosted.org/packages/c2/9e/6ca46c7bff4675f09a66fe2797cd1ad6a24f14c9c7c3b3ebe0470a6e30b8/coverage-7.9.1-cp311-cp311-win32.whl", hash = "sha256:dd24bd8d77c98557880def750782df77ab2b6885a18483dc8588792247174b32", size = 214561, upload-time = "2025-06-13T13:01:04.012Z" }, + { url = "https://files.pythonhosted.org/packages/a1/30/166978c6302010742dabcdc425fa0f938fa5a800908e39aff37a7a876a13/coverage-7.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:6b55ad10a35a21b8015eabddc9ba31eb590f54adc9cd39bcf09ff5349fd52125", size = 215493, upload-time = "2025-06-13T13:01:05.702Z" }, + { url = "https://files.pythonhosted.org/packages/60/07/a6d2342cd80a5be9f0eeab115bc5ebb3917b4a64c2953534273cf9bc7ae6/coverage-7.9.1-cp311-cp311-win_arm64.whl", hash = "sha256:6ad935f0016be24c0e97fc8c40c465f9c4b85cbbe6eac48934c0dc4d2568321e", size = 213869, upload-time = "2025-06-13T13:01:09.345Z" }, + { url = "https://files.pythonhosted.org/packages/68/d9/7f66eb0a8f2fce222de7bdc2046ec41cb31fe33fb55a330037833fb88afc/coverage-7.9.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a8de12b4b87c20de895f10567639c0797b621b22897b0af3ce4b4e204a743626", size = 212336, upload-time = "2025-06-13T13:01:10.909Z" }, + { url = "https://files.pythonhosted.org/packages/20/20/e07cb920ef3addf20f052ee3d54906e57407b6aeee3227a9c91eea38a665/coverage-7.9.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5add197315a054e92cee1b5f686a2bcba60c4c3e66ee3de77ace6c867bdee7cb", size = 212571, upload-time = "2025-06-13T13:01:12.518Z" }, + { url = "https://files.pythonhosted.org/packages/78/f8/96f155de7e9e248ca9c8ff1a40a521d944ba48bec65352da9be2463745bf/coverage-7.9.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:600a1d4106fe66f41e5d0136dfbc68fe7200a5cbe85610ddf094f8f22e1b0300", size = 246377, upload-time = "2025-06-13T13:01:14.87Z" }, + { url = "https://files.pythonhosted.org/packages/3e/cf/1d783bd05b7bca5c10ded5f946068909372e94615a4416afadfe3f63492d/coverage-7.9.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a876e4c3e5a2a1715a6608906aa5a2e0475b9c0f68343c2ada98110512ab1d8", size = 243394, upload-time = "2025-06-13T13:01:16.23Z" }, + { url = "https://files.pythonhosted.org/packages/02/dd/e7b20afd35b0a1abea09fb3998e1abc9f9bd953bee548f235aebd2b11401/coverage-7.9.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81f34346dd63010453922c8e628a52ea2d2ccd73cb2487f7700ac531b247c8a5", size = 245586, upload-time = "2025-06-13T13:01:17.532Z" }, + { url = "https://files.pythonhosted.org/packages/4e/38/b30b0006fea9d617d1cb8e43b1bc9a96af11eff42b87eb8c716cf4d37469/coverage-7.9.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:888f8eee13f2377ce86d44f338968eedec3291876b0b8a7289247ba52cb984cd", size = 245396, upload-time = "2025-06-13T13:01:19.164Z" }, + { url = "https://files.pythonhosted.org/packages/31/e4/4d8ec1dc826e16791f3daf1b50943e8e7e1eb70e8efa7abb03936ff48418/coverage-7.9.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9969ef1e69b8c8e1e70d591f91bbc37fc9a3621e447525d1602801a24ceda898", size = 243577, upload-time = "2025-06-13T13:01:22.433Z" }, + { url = "https://files.pythonhosted.org/packages/25/f4/b0e96c5c38e6e40ef465c4bc7f138863e2909c00e54a331da335faf0d81a/coverage-7.9.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:60c458224331ee3f1a5b472773e4a085cc27a86a0b48205409d364272d67140d", size = 244809, upload-time = "2025-06-13T13:01:24.143Z" }, + { url = "https://files.pythonhosted.org/packages/8a/65/27e0a1fa5e2e5079bdca4521be2f5dabf516f94e29a0defed35ac2382eb2/coverage-7.9.1-cp312-cp312-win32.whl", hash = "sha256:5f646a99a8c2b3ff4c6a6e081f78fad0dde275cd59f8f49dc4eab2e394332e74", size = 214724, upload-time = "2025-06-13T13:01:25.435Z" }, + { url = "https://files.pythonhosted.org/packages/9b/a8/d5b128633fd1a5e0401a4160d02fa15986209a9e47717174f99dc2f7166d/coverage-7.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:30f445f85c353090b83e552dcbbdad3ec84c7967e108c3ae54556ca69955563e", size = 215535, upload-time = "2025-06-13T13:01:27.861Z" }, + { url = "https://files.pythonhosted.org/packages/a3/37/84bba9d2afabc3611f3e4325ee2c6a47cd449b580d4a606b240ce5a6f9bf/coverage-7.9.1-cp312-cp312-win_arm64.whl", hash = "sha256:af41da5dca398d3474129c58cb2b106a5d93bbb196be0d307ac82311ca234342", size = 213904, upload-time = "2025-06-13T13:01:29.202Z" }, + { url = "https://files.pythonhosted.org/packages/d0/a7/a027970c991ca90f24e968999f7d509332daf6b8c3533d68633930aaebac/coverage-7.9.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:31324f18d5969feef7344a932c32428a2d1a3e50b15a6404e97cba1cc9b2c631", size = 212358, upload-time = "2025-06-13T13:01:30.909Z" }, + { url = "https://files.pythonhosted.org/packages/f2/48/6aaed3651ae83b231556750280682528fea8ac7f1232834573472d83e459/coverage-7.9.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0c804506d624e8a20fb3108764c52e0eef664e29d21692afa375e0dd98dc384f", size = 212620, upload-time = "2025-06-13T13:01:32.256Z" }, + { url = "https://files.pythonhosted.org/packages/6c/2a/f4b613f3b44d8b9f144847c89151992b2b6b79cbc506dee89ad0c35f209d/coverage-7.9.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef64c27bc40189f36fcc50c3fb8f16ccda73b6a0b80d9bd6e6ce4cffcd810bbd", size = 245788, upload-time = "2025-06-13T13:01:33.948Z" }, + { url = "https://files.pythonhosted.org/packages/04/d2/de4fdc03af5e4e035ef420ed26a703c6ad3d7a07aff2e959eb84e3b19ca8/coverage-7.9.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4fe2348cc6ec372e25adec0219ee2334a68d2f5222e0cba9c0d613394e12d86", size = 243001, upload-time = "2025-06-13T13:01:35.285Z" }, + { url = "https://files.pythonhosted.org/packages/f5/e8/eed18aa5583b0423ab7f04e34659e51101135c41cd1dcb33ac1d7013a6d6/coverage-7.9.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34ed2186fe52fcc24d4561041979a0dec69adae7bce2ae8d1c49eace13e55c43", size = 244985, upload-time = "2025-06-13T13:01:36.712Z" }, + { url = "https://files.pythonhosted.org/packages/17/f8/ae9e5cce8885728c934eaa58ebfa8281d488ef2afa81c3dbc8ee9e6d80db/coverage-7.9.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:25308bd3d00d5eedd5ae7d4357161f4df743e3c0240fa773ee1b0f75e6c7c0f1", size = 245152, upload-time = "2025-06-13T13:01:39.303Z" }, + { url = "https://files.pythonhosted.org/packages/5a/c8/272c01ae792bb3af9b30fac14d71d63371db227980682836ec388e2c57c0/coverage-7.9.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:73e9439310f65d55a5a1e0564b48e34f5369bee943d72c88378f2d576f5a5751", size = 243123, upload-time = "2025-06-13T13:01:40.727Z" }, + { url = "https://files.pythonhosted.org/packages/8c/d0/2819a1e3086143c094ab446e3bdf07138527a7b88cb235c488e78150ba7a/coverage-7.9.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:37ab6be0859141b53aa89412a82454b482c81cf750de4f29223d52268a86de67", size = 244506, upload-time = "2025-06-13T13:01:42.184Z" }, + { url = "https://files.pythonhosted.org/packages/8b/4e/9f6117b89152df7b6112f65c7a4ed1f2f5ec8e60c4be8f351d91e7acc848/coverage-7.9.1-cp313-cp313-win32.whl", hash = "sha256:64bdd969456e2d02a8b08aa047a92d269c7ac1f47e0c977675d550c9a0863643", size = 214766, upload-time = "2025-06-13T13:01:44.482Z" }, + { url = "https://files.pythonhosted.org/packages/27/0f/4b59f7c93b52c2c4ce7387c5a4e135e49891bb3b7408dcc98fe44033bbe0/coverage-7.9.1-cp313-cp313-win_amd64.whl", hash = "sha256:be9e3f68ca9edb897c2184ad0eee815c635565dbe7a0e7e814dc1f7cbab92c0a", size = 215568, upload-time = "2025-06-13T13:01:45.772Z" }, + { url = "https://files.pythonhosted.org/packages/09/1e/9679826336f8c67b9c39a359352882b24a8a7aee48d4c9cad08d38d7510f/coverage-7.9.1-cp313-cp313-win_arm64.whl", hash = "sha256:1c503289ffef1d5105d91bbb4d62cbe4b14bec4d13ca225f9c73cde9bb46207d", size = 213939, upload-time = "2025-06-13T13:01:47.087Z" }, + { url = "https://files.pythonhosted.org/packages/bb/5b/5c6b4e7a407359a2e3b27bf9c8a7b658127975def62077d441b93a30dbe8/coverage-7.9.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0b3496922cb5f4215bf5caaef4cf12364a26b0be82e9ed6d050f3352cf2d7ef0", size = 213079, upload-time = "2025-06-13T13:01:48.554Z" }, + { url = "https://files.pythonhosted.org/packages/a2/22/1e2e07279fd2fd97ae26c01cc2186e2258850e9ec125ae87184225662e89/coverage-7.9.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:9565c3ab1c93310569ec0d86b017f128f027cab0b622b7af288696d7ed43a16d", size = 213299, upload-time = "2025-06-13T13:01:49.997Z" }, + { url = "https://files.pythonhosted.org/packages/14/c0/4c5125a4b69d66b8c85986d3321520f628756cf524af810baab0790c7647/coverage-7.9.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2241ad5dbf79ae1d9c08fe52b36d03ca122fb9ac6bca0f34439e99f8327ac89f", size = 256535, upload-time = "2025-06-13T13:01:51.314Z" }, + { url = "https://files.pythonhosted.org/packages/81/8b/e36a04889dda9960be4263e95e777e7b46f1bb4fc32202612c130a20c4da/coverage-7.9.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bb5838701ca68b10ebc0937dbd0eb81974bac54447c55cd58dea5bca8451029", size = 252756, upload-time = "2025-06-13T13:01:54.403Z" }, + { url = "https://files.pythonhosted.org/packages/98/82/be04eff8083a09a4622ecd0e1f31a2c563dbea3ed848069e7b0445043a70/coverage-7.9.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b30a25f814591a8c0c5372c11ac8967f669b97444c47fd794926e175c4047ece", size = 254912, upload-time = "2025-06-13T13:01:56.769Z" }, + { url = "https://files.pythonhosted.org/packages/0f/25/c26610a2c7f018508a5ab958e5b3202d900422cf7cdca7670b6b8ca4e8df/coverage-7.9.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2d04b16a6062516df97969f1ae7efd0de9c31eb6ebdceaa0d213b21c0ca1a683", size = 256144, upload-time = "2025-06-13T13:01:58.19Z" }, + { url = "https://files.pythonhosted.org/packages/c5/8b/fb9425c4684066c79e863f1e6e7ecebb49e3a64d9f7f7860ef1688c56f4a/coverage-7.9.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7931b9e249edefb07cd6ae10c702788546341d5fe44db5b6108a25da4dca513f", size = 254257, upload-time = "2025-06-13T13:01:59.645Z" }, + { url = "https://files.pythonhosted.org/packages/93/df/27b882f54157fc1131e0e215b0da3b8d608d9b8ef79a045280118a8f98fe/coverage-7.9.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:52e92b01041151bf607ee858e5a56c62d4b70f4dac85b8c8cb7fb8a351ab2c10", size = 255094, upload-time = "2025-06-13T13:02:01.37Z" }, + { url = "https://files.pythonhosted.org/packages/41/5f/cad1c3dbed8b3ee9e16fa832afe365b4e3eeab1fb6edb65ebbf745eabc92/coverage-7.9.1-cp313-cp313t-win32.whl", hash = "sha256:684e2110ed84fd1ca5f40e89aa44adf1729dc85444004111aa01866507adf363", size = 215437, upload-time = "2025-06-13T13:02:02.905Z" }, + { url = "https://files.pythonhosted.org/packages/99/4d/fad293bf081c0e43331ca745ff63673badc20afea2104b431cdd8c278b4c/coverage-7.9.1-cp313-cp313t-win_amd64.whl", hash = "sha256:437c576979e4db840539674e68c84b3cda82bc824dd138d56bead1435f1cb5d7", size = 216605, upload-time = "2025-06-13T13:02:05.638Z" }, + { url = "https://files.pythonhosted.org/packages/1f/56/4ee027d5965fc7fc126d7ec1187529cc30cc7d740846e1ecb5e92d31b224/coverage-7.9.1-cp313-cp313t-win_arm64.whl", hash = "sha256:18a0912944d70aaf5f399e350445738a1a20b50fbea788f640751c2ed9208b6c", size = 214392, upload-time = "2025-06-13T13:02:07.642Z" }, + { url = "https://files.pythonhosted.org/packages/3e/e5/c723545c3fd3204ebde3b4cc4b927dce709d3b6dc577754bb57f63ca4a4a/coverage-7.9.1-pp39.pp310.pp311-none-any.whl", hash = "sha256:db0f04118d1db74db6c9e1cb1898532c7dcc220f1d2718f058601f7c3f499514", size = 204009, upload-time = "2025-06-13T13:02:25.787Z" }, + { url = "https://files.pythonhosted.org/packages/08/b8/7ddd1e8ba9701dea08ce22029917140e6f66a859427406579fd8d0ca7274/coverage-7.9.1-py3-none-any.whl", hash = "sha256:66b974b145aa189516b6bf2d8423e888b742517d37872f6ee4c5be0073bd9a3c", size = 204000, upload-time = "2025-06-13T13:02:27.173Z" }, +] + +[package.optional-dependencies] +toml = [ + { name = "tomli", marker = "python_full_version <= '3.11'" }, +] + +[[package]] +name = "coveralls" +version = "4.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage", extra = ["toml"] }, + { name = "docopt" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/75/a454fb443eb6a053833f61603a432ffbd7dd6ae53a11159bacfadb9d6219/coveralls-4.0.1.tar.gz", hash = "sha256:7b2a0a2bcef94f295e3cf28dcc55ca40b71c77d1c2446b538e85f0f7bc21aa69", size = 12419, upload-time = "2024-05-15T12:56:14.297Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/63/e5/6708c75e2a4cfca929302d4d9b53b862c6dc65bd75e6933ea3d20016d41d/coveralls-4.0.1-py3-none-any.whl", hash = "sha256:7a6b1fa9848332c7b2221afb20f3df90272ac0167060f41b5fe90429b30b1809", size = 13599, upload-time = "2024-05-15T12:56:12.342Z" }, +] + +[[package]] +name = "dnspython" +version = "2.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/4a/263763cb2ba3816dd94b08ad3a33d5fdae34ecb856678773cc40a3605829/dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1", size = 345197, upload-time = "2024-10-05T20:14:59.362Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/68/1b/e0a87d256e40e8c888847551b20a017a6b98139178505dc7ffb96f04e954/dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", size = 313632, upload-time = "2024-10-05T20:14:57.687Z" }, +] + +[[package]] +name = "docopt" +version = "0.6.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/55/8f8cab2afd404cf578136ef2cc5dfb50baa1761b68c9da1fb1e4eed343c9/docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491", size = 25901, upload-time = "2014-06-16T11:18:57.406Z" } + +[[package]] +name = "docutils" +version = "0.20.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1f/53/a5da4f2c5739cf66290fac1431ee52aff6851c7c8ffd8264f13affd7bcdd/docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b", size = 2058365, upload-time = "2023-05-16T23:39:19.748Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/87/f238c0670b94533ac0353a4e2a1a771a0cc73277b88bff23d3ae35a256c1/docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6", size = 572666, upload-time = "2023-05-16T23:39:15.976Z" }, +] + +[[package]] +name = "exceptiongroup" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, +] + +[[package]] +name = "humbledb" +version = "6.0.0" +source = { virtual = "." } +dependencies = [ + { name = "pyconfig" }, + { name = "pymongo" }, + { name = "pytool" }, +] + +[package.dev-dependencies] +dev = [ + { name = "coveralls" }, + { name = "pytest" }, + { name = "pytest-cov" }, + { name = "pytest-docker" }, +] +docs = [ + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "sphinx-autodoc-typehints", version = "3.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "sphinx-autodoc-typehints", version = "3.2.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "sphinx-rtd-theme" }, +] + +[package.metadata] +requires-dist = [ + { name = "pyconfig" }, + { name = "pymongo", specifier = ">=2.0.1" }, + { name = "pytool", specifier = ">=3.4.1" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "coveralls", specifier = ">=4.0.1" }, + { name = "pytest", specifier = ">=8.4.0" }, + { name = "pytest-cov", specifier = ">=6.2.1" }, + { name = "pytest-docker", specifier = ">=3.2.2" }, +] +docs = [ + { name = "sphinx", specifier = ">=8.0.0,<9.0.0" }, + { name = "sphinx-autodoc-typehints", specifier = ">=3.0.0,<4.0.0" }, + { name = "sphinx-rtd-theme", specifier = ">=3.0.0,<4.0.0" }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, +] + +[[package]] +name = "imagesize" +version = "1.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/84/62473fb57d61e31fef6e36d64a179c8781605429fd927b5dd608c997be31/imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a", size = 1280026, upload-time = "2022-07-01T12:21:05.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/62/85c4c919272577931d407be5ba5d71c20f0b616d31a0befe0ae45bb79abd/imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b", size = 8769, upload-time = "2022-07-01T12:21:02.467Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, +] + +[[package]] +name = "markupsafe" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/90/d08277ce111dd22f77149fd1a5d4653eeb3b3eaacbdfcbae5afb2600eebd/MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8", size = 14357, upload-time = "2024-10-18T15:20:51.44Z" }, + { url = "https://files.pythonhosted.org/packages/04/e1/6e2194baeae0bca1fae6629dc0cbbb968d4d941469cbab11a3872edff374/MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158", size = 12393, upload-time = "2024-10-18T15:20:52.426Z" }, + { url = "https://files.pythonhosted.org/packages/1d/69/35fa85a8ece0a437493dc61ce0bb6d459dcba482c34197e3efc829aa357f/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579", size = 21732, upload-time = "2024-10-18T15:20:53.578Z" }, + { url = "https://files.pythonhosted.org/packages/22/35/137da042dfb4720b638d2937c38a9c2df83fe32d20e8c8f3185dbfef05f7/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d", size = 20866, upload-time = "2024-10-18T15:20:55.06Z" }, + { url = "https://files.pythonhosted.org/packages/29/28/6d029a903727a1b62edb51863232152fd335d602def598dade38996887f0/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb", size = 20964, upload-time = "2024-10-18T15:20:55.906Z" }, + { url = "https://files.pythonhosted.org/packages/cc/cd/07438f95f83e8bc028279909d9c9bd39e24149b0d60053a97b2bc4f8aa51/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b", size = 21977, upload-time = "2024-10-18T15:20:57.189Z" }, + { url = "https://files.pythonhosted.org/packages/29/01/84b57395b4cc062f9c4c55ce0df7d3108ca32397299d9df00fedd9117d3d/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c", size = 21366, upload-time = "2024-10-18T15:20:58.235Z" }, + { url = "https://files.pythonhosted.org/packages/bd/6e/61ebf08d8940553afff20d1fb1ba7294b6f8d279df9fd0c0db911b4bbcfd/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171", size = 21091, upload-time = "2024-10-18T15:20:59.235Z" }, + { url = "https://files.pythonhosted.org/packages/11/23/ffbf53694e8c94ebd1e7e491de185124277964344733c45481f32ede2499/MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50", size = 15065, upload-time = "2024-10-18T15:21:00.307Z" }, + { url = "https://files.pythonhosted.org/packages/44/06/e7175d06dd6e9172d4a69a72592cb3f7a996a9c396eee29082826449bbc3/MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a", size = 15514, upload-time = "2024-10-18T15:21:01.122Z" }, + { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353, upload-time = "2024-10-18T15:21:02.187Z" }, + { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392, upload-time = "2024-10-18T15:21:02.941Z" }, + { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984, upload-time = "2024-10-18T15:21:03.953Z" }, + { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120, upload-time = "2024-10-18T15:21:06.495Z" }, + { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032, upload-time = "2024-10-18T15:21:07.295Z" }, + { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057, upload-time = "2024-10-18T15:21:08.073Z" }, + { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359, upload-time = "2024-10-18T15:21:09.318Z" }, + { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306, upload-time = "2024-10-18T15:21:10.185Z" }, + { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094, upload-time = "2024-10-18T15:21:11.005Z" }, + { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521, upload-time = "2024-10-18T15:21:12.911Z" }, + { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload-time = "2024-10-18T15:21:13.777Z" }, + { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload-time = "2024-10-18T15:21:14.822Z" }, + { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload-time = "2024-10-18T15:21:15.642Z" }, + { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload-time = "2024-10-18T15:21:17.133Z" }, + { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload-time = "2024-10-18T15:21:18.064Z" }, + { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload-time = "2024-10-18T15:21:18.859Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload-time = "2024-10-18T15:21:19.671Z" }, + { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload-time = "2024-10-18T15:21:20.971Z" }, + { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097, upload-time = "2024-10-18T15:21:22.646Z" }, + { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601, upload-time = "2024-10-18T15:21:23.499Z" }, + { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274, upload-time = "2024-10-18T15:21:24.577Z" }, + { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352, upload-time = "2024-10-18T15:21:25.382Z" }, + { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122, upload-time = "2024-10-18T15:21:26.199Z" }, + { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085, upload-time = "2024-10-18T15:21:27.029Z" }, + { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978, upload-time = "2024-10-18T15:21:27.846Z" }, + { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208, upload-time = "2024-10-18T15:21:28.744Z" }, + { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357, upload-time = "2024-10-18T15:21:29.545Z" }, + { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344, upload-time = "2024-10-18T15:21:30.366Z" }, + { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101, upload-time = "2024-10-18T15:21:31.207Z" }, + { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603, upload-time = "2024-10-18T15:21:32.032Z" }, + { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510, upload-time = "2024-10-18T15:21:33.625Z" }, + { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486, upload-time = "2024-10-18T15:21:34.611Z" }, + { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480, upload-time = "2024-10-18T15:21:35.398Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914, upload-time = "2024-10-18T15:21:36.231Z" }, + { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796, upload-time = "2024-10-18T15:21:37.073Z" }, + { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473, upload-time = "2024-10-18T15:21:37.932Z" }, + { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114, upload-time = "2024-10-18T15:21:39.799Z" }, + { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098, upload-time = "2024-10-18T15:21:40.813Z" }, + { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208, upload-time = "2024-10-18T15:21:41.814Z" }, + { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload-time = "2024-10-18T15:21:42.784Z" }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pyconfig" +version = "3.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytool" }, + { name = "simplejson" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ba/ed/8dad88ac88034a5428a7432325720637a9e7b8423fb7f9b4447d35ce413b/pyconfig-3.3.0.tar.gz", hash = "sha256:197e5eb6e3bdb6dbd9fd944df7e0330af6b166cb558bd9ca41930f4495110e7e", size = 26392, upload-time = "2025-06-20T06:51:36.33Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/28/3b93b9d53b5900de055063e7958f70424a9e2fcd21cae3fdd85c36d1f6e6/pyconfig-3.3.0-py3-none-any.whl", hash = "sha256:f056047f832defb4a828004298ec67b5d66303537c194c14a600f791047559e1", size = 18058, upload-time = "2025-06-20T06:51:34.974Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581, upload-time = "2025-01-06T17:26:30.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293, upload-time = "2025-01-06T17:26:25.553Z" }, +] + +[[package]] +name = "pymongo" +version = "4.13.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dnspython" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4b/5a/d664298bf54762f0c89b8aa2c276868070e06afb853b4a8837de5741e5f9/pymongo-4.13.2.tar.gz", hash = "sha256:0f64c6469c2362962e6ce97258ae1391abba1566a953a492562d2924b44815c2", size = 2167844, upload-time = "2025-06-16T18:16:30.685Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/df/a8/293dfd3accda06ae94c54e7c15ac5108614d31263708236b4743554ad6ee/pymongo-4.13.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:01065eb1838e3621a30045ab14d1a60ee62e01f65b7cf154e69c5c722ef14d2f", size = 802768, upload-time = "2025-06-16T18:14:39.521Z" }, + { url = "https://files.pythonhosted.org/packages/ce/7f/2cbc897dd2867b9b5f8e9e6587dc4bf23e3777a4ddd712064ed21aea99e0/pymongo-4.13.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9ab0325d436075f5f1901cde95afae811141d162bc42d9a5befb647fda585ae6", size = 803053, upload-time = "2025-06-16T18:14:43.318Z" }, + { url = "https://files.pythonhosted.org/packages/b6/da/07cdbaf507cccfdac837f612ea276523d2cdd380c5253c86ceae0369f0e2/pymongo-4.13.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdd8041902963c84dc4e27034fa045ac55fabcb2a4ba5b68b880678557573e70", size = 1180427, upload-time = "2025-06-16T18:14:44.841Z" }, + { url = "https://files.pythonhosted.org/packages/2b/5c/5f61269c87e565a6f4016e644e2bd20473b4b5a47c362ad3d57a1428ef33/pymongo-4.13.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b00ab04630aa4af97294e9abdbe0506242396269619c26f5761fd7b2524ef501", size = 1214655, upload-time = "2025-06-16T18:14:46.635Z" }, + { url = "https://files.pythonhosted.org/packages/26/51/757ee06299e2bb61c0ae7b886ca845a78310cf94fc95bbc044bbe7892392/pymongo-4.13.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16440d0da30ba804c6c01ea730405fdbbb476eae760588ea09e6e7d28afc06de", size = 1197586, upload-time = "2025-06-16T18:14:48.129Z" }, + { url = "https://files.pythonhosted.org/packages/5a/a8/9ddf0ad0884046c34c5eb3de9a944c47d37e39989ae782ded2b207462a97/pymongo-4.13.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad9a2d1357aed5d6750deb315f62cb6f5b3c4c03ffb650da559cb09cb29e6fe8", size = 1183599, upload-time = "2025-06-16T18:14:49.576Z" }, + { url = "https://files.pythonhosted.org/packages/7b/57/61b289b440e77524e4b0d6881f6c6f50cf9a55a72b5ba2adaa43d70531e6/pymongo-4.13.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c793223aef21a8c415c840af1ca36c55a05d6fa3297378da35de3fb6661c0174", size = 1162761, upload-time = "2025-06-16T18:14:51.558Z" }, + { url = "https://files.pythonhosted.org/packages/05/22/bd328cedc79768ab03942fd828f0cd1d50a3ae2c3caf3aebad65a644eb75/pymongo-4.13.2-cp310-cp310-win32.whl", hash = "sha256:8ef6ae029a3390565a0510c872624514dde350007275ecd8126b09175aa02cca", size = 790062, upload-time = "2025-06-16T18:14:53.024Z" }, + { url = "https://files.pythonhosted.org/packages/9f/70/2d8bbdac28e869cebb8081a43f8b16c6dd2384f6aef28fcc6ec0693a7042/pymongo-4.13.2-cp310-cp310-win_amd64.whl", hash = "sha256:66f168f8c5b1e2e3d518507cf9f200f0c86ac79e2b2be9e7b6c8fd1e2f7d7824", size = 800198, upload-time = "2025-06-16T18:14:54.481Z" }, + { url = "https://files.pythonhosted.org/packages/94/df/4c4ef17b48c70120f834ba7151860c300924915696c4a57170cb5b09787f/pymongo-4.13.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7af8c56d0a7fcaf966d5292e951f308fb1f8bac080257349e14742725fd7990d", size = 857145, upload-time = "2025-06-16T18:14:56.516Z" }, + { url = "https://files.pythonhosted.org/packages/e7/41/480ca82b3b3320fc70fe699a01df28db15a4ea154c8759ab4a437a74c808/pymongo-4.13.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ad24f5864706f052b05069a6bc59ff875026e28709548131448fe1e40fc5d80f", size = 857437, upload-time = "2025-06-16T18:14:58.572Z" }, + { url = "https://files.pythonhosted.org/packages/50/d4/eb74e98ea980a5e1ec4f06f383ec6c52ab02076802de24268f477ef616d2/pymongo-4.13.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a10069454195d1d2dda98d681b1dbac9a425f4b0fe744aed5230c734021c1cb9", size = 1426516, upload-time = "2025-06-16T18:15:00.589Z" }, + { url = "https://files.pythonhosted.org/packages/aa/fe/c5960c0e6438bd489367261e5ef1a5db01e34349f0dbf7529fb938d3d2ef/pymongo-4.13.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3e20862b81e3863bcd72334e3577a3107604553b614a8d25ee1bb2caaea4eb90", size = 1477477, upload-time = "2025-06-16T18:15:02.283Z" }, + { url = "https://files.pythonhosted.org/packages/f6/9f/ef4395175fc97876978736c8493d8ffa4d13aa7a4e12269a2cb0d52a1246/pymongo-4.13.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6b4d5794ca408317c985d7acfb346a60f96f85a7c221d512ff0ecb3cce9d6110", size = 1451921, upload-time = "2025-06-16T18:15:04.35Z" }, + { url = "https://files.pythonhosted.org/packages/2a/b9/397cb2a3ec03f880e882102eddcb46c3d516c6cf47a05f44db48067924d9/pymongo-4.13.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c8e0420fb4901006ae7893e76108c2a36a343b4f8922466d51c45e9e2ceb717", size = 1431045, upload-time = "2025-06-16T18:15:06.392Z" }, + { url = "https://files.pythonhosted.org/packages/f5/0d/e150a414e5cb07f2fefca817fa071a6da8d96308469a85a777244c8c4337/pymongo-4.13.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:239b5f83b83008471d54095e145d4c010f534af99e87cc8877fc6827736451a0", size = 1399697, upload-time = "2025-06-16T18:15:08.975Z" }, + { url = "https://files.pythonhosted.org/packages/b8/29/5190eafb994721c30a38a8a62df225c47a9da364ab5c8cffe90aabf6a54e/pymongo-4.13.2-cp311-cp311-win32.whl", hash = "sha256:6bceb524110c32319eb7119422e400dbcafc5b21bcc430d2049a894f69b604e5", size = 836261, upload-time = "2025-06-16T18:15:10.459Z" }, + { url = "https://files.pythonhosted.org/packages/d3/da/30bdcc83b23fc4f2996b39b41b2ff0ff2184230a78617c7b8636aac4d81d/pymongo-4.13.2-cp311-cp311-win_amd64.whl", hash = "sha256:ab87484c97ae837b0a7bbdaa978fa932fbb6acada3f42c3b2bee99121a594715", size = 851451, upload-time = "2025-06-16T18:15:12.181Z" }, + { url = "https://files.pythonhosted.org/packages/03/e0/0e187750e23eed4227282fcf568fdb61f2b53bbcf8cbe3a71dde2a860d12/pymongo-4.13.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ec89516622dfc8b0fdff499612c0bd235aa45eeb176c9e311bcc0af44bf952b6", size = 912004, upload-time = "2025-06-16T18:15:14.299Z" }, + { url = "https://files.pythonhosted.org/packages/57/c2/9b79795382daaf41e5f7379bffdef1880d68160adea352b796d6948cb5be/pymongo-4.13.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f30eab4d4326df54fee54f31f93e532dc2918962f733ee8e115b33e6fe151d92", size = 911698, upload-time = "2025-06-16T18:15:16.334Z" }, + { url = "https://files.pythonhosted.org/packages/6f/e4/f04dc9ed5d1d9dbc539dc2d8758dd359c5373b0e06fcf25418b2c366737c/pymongo-4.13.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cce9428d12ba396ea245fc4c51f20228cead01119fcc959e1c80791ea45f820", size = 1690357, upload-time = "2025-06-16T18:15:18.358Z" }, + { url = "https://files.pythonhosted.org/packages/bb/de/41478a7d527d38f1b98b084f4a78bbb805439a6ebd8689fbbee0a3dfacba/pymongo-4.13.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac9241b727a69c39117c12ac1e52d817ea472260dadc66262c3fdca0bab0709b", size = 1754593, upload-time = "2025-06-16T18:15:20.096Z" }, + { url = "https://files.pythonhosted.org/packages/df/d9/8fa2eb110291e154f4312779b1a5b815090b8b05a59ecb4f4a32427db1df/pymongo-4.13.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3efc4c515b371a9fa1d198b6e03340985bfe1a55ae2d2b599a714934e7bc61ab", size = 1723637, upload-time = "2025-06-16T18:15:22.048Z" }, + { url = "https://files.pythonhosted.org/packages/27/7b/9863fa60a4a51ea09f5e3cd6ceb231af804e723671230f2daf3bd1b59c2b/pymongo-4.13.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f57a664aa74610eb7a52fa93f2cf794a1491f4f76098343485dd7da5b3bcff06", size = 1693613, upload-time = "2025-06-16T18:15:24.866Z" }, + { url = "https://files.pythonhosted.org/packages/9b/89/a42efa07820a59089836f409a63c96e7a74e33313e50dc39c554db99ac42/pymongo-4.13.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3dcb0b8cdd499636017a53f63ef64cf9b6bd3fd9355796c5a1d228e4be4a4c94", size = 1652745, upload-time = "2025-06-16T18:15:27.078Z" }, + { url = "https://files.pythonhosted.org/packages/6a/cf/2c77d1acda61d281edd3e3f00d5017d3fac0c29042c769efd3b8018cb469/pymongo-4.13.2-cp312-cp312-win32.whl", hash = "sha256:bf43ae07804d7762b509f68e5ec73450bb8824e960b03b861143ce588b41f467", size = 883232, upload-time = "2025-06-16T18:15:29.169Z" }, + { url = "https://files.pythonhosted.org/packages/d2/4f/727f59156e3798850c3c2901f106804053cb0e057ed1bd9883f5fa5aa8fa/pymongo-4.13.2-cp312-cp312-win_amd64.whl", hash = "sha256:812a473d584bcb02ab819d379cd5e752995026a2bb0d7713e78462b6650d3f3a", size = 903304, upload-time = "2025-06-16T18:15:31.346Z" }, + { url = "https://files.pythonhosted.org/packages/e0/95/b44b8e24b161afe7b244f6d43c09a7a1f93308cad04198de1c14c67b24ce/pymongo-4.13.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d6044ca0eb74d97f7d3415264de86a50a401b7b0b136d30705f022f9163c3124", size = 966232, upload-time = "2025-06-16T18:15:33.057Z" }, + { url = "https://files.pythonhosted.org/packages/6d/fc/d4d59799a52033acb187f7bd1f09bc75bebb9fd12cef4ba2964d235ad3f9/pymongo-4.13.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:dd326bcb92d28d28a3e7ef0121602bad78691b6d4d1f44b018a4616122f1ba8b", size = 965935, upload-time = "2025-06-16T18:15:34.826Z" }, + { url = "https://files.pythonhosted.org/packages/07/a8/67502899d89b317ea9952e4769bc193ca15efee561b24b38a86c59edde6f/pymongo-4.13.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfb0c21bdd58e58625c9cd8de13e859630c29c9537944ec0a14574fdf88c2ac4", size = 1954070, upload-time = "2025-06-16T18:15:36.576Z" }, + { url = "https://files.pythonhosted.org/packages/da/3b/0dac5d81d1af1b96b3200da7ccc52fc261a35efb7d2ac493252eb40a2b11/pymongo-4.13.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9c7d345d57f17b1361008aea78a37e8c139631a46aeb185dd2749850883c7ba", size = 2031424, upload-time = "2025-06-16T18:15:38.723Z" }, + { url = "https://files.pythonhosted.org/packages/31/ed/7a5af49a153224ca7e31e9915703e612ad9c45808cc39540e9dd1a2a7537/pymongo-4.13.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8860445a8da1b1545406fab189dc20319aff5ce28e65442b2b4a8f4228a88478", size = 1995339, upload-time = "2025-06-16T18:15:40.474Z" }, + { url = "https://files.pythonhosted.org/packages/f1/e9/9c72eceae8439c4f1bdebc4e6b290bf035e3f050a80eeb74abb5e12ef8e2/pymongo-4.13.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01c184b612f67d5a4c8f864ae7c40b6cc33c0e9bb05e39d08666f8831d120504", size = 1956066, upload-time = "2025-06-16T18:15:42.272Z" }, + { url = "https://files.pythonhosted.org/packages/ac/79/9b019c47923395d5fced03856996465fb9340854b0f5a2ddf16d47e2437c/pymongo-4.13.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ea8c62d5f3c6529407c12471385d9a05f9fb890ce68d64976340c85cd661b", size = 1905642, upload-time = "2025-06-16T18:15:43.978Z" }, + { url = "https://files.pythonhosted.org/packages/93/2f/ebf56c7fa9298fa2f9716e7b66cf62b29e7fc6e11774f3b87f55d214d466/pymongo-4.13.2-cp313-cp313-win32.whl", hash = "sha256:d13556e91c4a8cb07393b8c8be81e66a11ebc8335a40fa4af02f4d8d3b40c8a1", size = 930184, upload-time = "2025-06-16T18:15:46.899Z" }, + { url = "https://files.pythonhosted.org/packages/76/2f/49c35464cbd5d116d950ff5d24b4b20491aaae115d35d40b945c33b29250/pymongo-4.13.2-cp313-cp313-win_amd64.whl", hash = "sha256:cfc69d7bc4d4d5872fd1e6de25e6a16e2372c7d5556b75c3b8e2204dce73e3fb", size = 955111, upload-time = "2025-06-16T18:15:48.85Z" }, + { url = "https://files.pythonhosted.org/packages/57/56/b17c8b5329b1842b7847cf0fa224ef0a272bf2e5126360f4da8065c855a1/pymongo-4.13.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a457d2ac34c05e9e8a6bb724115b093300bf270f0655fb897df8d8604b2e3700", size = 1022735, upload-time = "2025-06-16T18:15:50.672Z" }, + { url = "https://files.pythonhosted.org/packages/83/e6/66fec65a7919bf5f35be02e131b4dc4bf3152b5e8d78cd04b6d266a44514/pymongo-4.13.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:02f131a6e61559613b1171b53fbe21fed64e71b0cb4858c47fc9bc7c8e0e501c", size = 1022740, upload-time = "2025-06-16T18:15:53.218Z" }, + { url = "https://files.pythonhosted.org/packages/17/92/cda7383df0d5e71dc007f172c1ecae6313d64ea05d82bbba06df7f6b3e49/pymongo-4.13.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c942d1c6334e894271489080404b1a2e3b8bd5de399f2a0c14a77d966be5bc9", size = 2282430, upload-time = "2025-06-16T18:15:55.356Z" }, + { url = "https://files.pythonhosted.org/packages/84/da/285e05eb1d617b30dc7a7a98ebeb264353a8903e0e816a4eec6487c81f18/pymongo-4.13.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:850168d115680ab66a0931a6aa9dd98ed6aa5e9c3b9a6c12128049b9a5721bc5", size = 2369470, upload-time = "2025-06-16T18:15:57.5Z" }, + { url = "https://files.pythonhosted.org/packages/89/c0/c0d5eae236de9ca293497dc58fc1e4872382223c28ec223f76afc701392c/pymongo-4.13.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af7dfff90647ee77c53410f7fe8ca4fe343f8b768f40d2d0f71a5602f7b5a541", size = 2328857, upload-time = "2025-06-16T18:15:59.59Z" }, + { url = "https://files.pythonhosted.org/packages/2b/5a/d8639fba60def128ce9848b99c56c54c8a4d0cd60342054cd576f0bfdf26/pymongo-4.13.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8057f9bc9c94a8fd54ee4f5e5106e445a8f406aff2df74746f21c8791ee2403", size = 2280053, upload-time = "2025-06-16T18:16:02.166Z" }, + { url = "https://files.pythonhosted.org/packages/a1/69/d56f0897cc4932a336820c5d2470ffed50be04c624b07d1ad6ea75aaa975/pymongo-4.13.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:51040e1ba78d6671f8c65b29e2864483451e789ce93b1536de9cc4456ede87fa", size = 2219378, upload-time = "2025-06-16T18:16:04.108Z" }, + { url = "https://files.pythonhosted.org/packages/04/1e/427e7f99801ee318b6331062d682d3816d7e1d6b6013077636bd75d49c87/pymongo-4.13.2-cp313-cp313t-win32.whl", hash = "sha256:7ab86b98a18c8689514a9f8d0ec7d9ad23a949369b31c9a06ce4a45dcbffcc5e", size = 979460, upload-time = "2025-06-16T18:16:06.128Z" }, + { url = "https://files.pythonhosted.org/packages/b5/9c/00301a6df26f0f8d5c5955192892241e803742e7c3da8c2c222efabc0df6/pymongo-4.13.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c38168263ed94a250fc5cf9c6d33adea8ab11c9178994da1c3481c2a49d235f8", size = 1011057, upload-time = "2025-06-16T18:16:07.917Z" }, +] + +[[package]] +name = "pytest" +version = "8.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/08/ba/45911d754e8eba3d5a841a5ce61a65a685ff1798421ac054f85aa8747dfb/pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c", size = 1517714, upload-time = "2025-06-18T05:48:06.109Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/29/16/c8a903f4c4dffe7a12843191437d7cd8e32751d5de349d45d3fe69544e87/pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7", size = 365474, upload-time = "2025-06-18T05:48:03.955Z" }, +] + +[[package]] +name = "pytest-cov" +version = "6.2.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage", extra = ["toml"] }, + { name = "pluggy" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/18/99/668cade231f434aaa59bbfbf49469068d2ddd945000621d3d165d2e7dd7b/pytest_cov-6.2.1.tar.gz", hash = "sha256:25cc6cc0a5358204b8108ecedc51a9b57b34cc6b8c967cc2c01a4e00d8a67da2", size = 69432, upload-time = "2025-06-12T10:47:47.684Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/16/4ea354101abb1287856baa4af2732be351c7bee728065aed451b678153fd/pytest_cov-6.2.1-py3-none-any.whl", hash = "sha256:f5bc4c23f42f1cdd23c70b1dab1bbaef4fc505ba950d53e0081d0730dd7e86d5", size = 24644, upload-time = "2025-06-12T10:47:45.932Z" }, +] + +[[package]] +name = "pytest-docker" +version = "3.2.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/53/59/156a6aa71d79b411e8517996e24e63b58b3972b85dbc23acf7339a9caee8/pytest_docker-3.2.2.tar.gz", hash = "sha256:58ce79f3173209634bfff8ccaed2ce5593463d5272325c912e1b52a53154f452", size = 13469, upload-time = "2025-05-26T12:24:07.46Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/80/6d/f4c400ce2af6170f23a5d8183e431bb2ddf83131dbf0d23712da708de570/pytest_docker-3.2.2-py3-none-any.whl", hash = "sha256:2926033d48a10de611070fce17f6e67b9e81af2d8ccc59debbbf39872b8ebef9", size = 8575, upload-time = "2025-05-26T12:24:06.513Z" }, +] + +[[package]] +name = "pytool" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "simplejson" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/be/13/9884ee0c6aa49774eeccd0558e26c9325bd15384b0ecff2038a29d457458/pytool-6.0.3.tar.gz", hash = "sha256:4cd8ae2aab998c672726194bc09115f2c7b25ad276b2fd6cc37df959f1abcbb2", size = 31060, upload-time = "2025-06-10T20:55:53.705Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/27/90a1da41e5c493f7f6173e8ddb29341d4e004b1d69626076a2cb5781d537/pytool-6.0.3-py3-none-any.whl", hash = "sha256:50c5284fbe8abe4170bd2c4c05301b424d9c469cd4254f7a4e91b8aefb61011a", size = 23607, upload-time = "2025-06-10T20:55:52.274Z" }, +] + +[[package]] +name = "requests" +version = "2.32.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e1/0a/929373653770d8a0d7ea76c37de6e41f11eb07559b103b1c02cafb3f7cf8/requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422", size = 135258, upload-time = "2025-06-09T16:43:07.34Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/e4/56027c4a6b4ae70ca9de302488c5ca95ad4a39e190093d6c1a8ace08341b/requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c", size = 64847, upload-time = "2025-06-09T16:43:05.728Z" }, +] + +[[package]] +name = "roman-numerals-py" +version = "3.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/30/76/48fd56d17c5bdbdf65609abbc67288728a98ed4c02919428d4f52d23b24b/roman_numerals_py-3.1.0.tar.gz", hash = "sha256:be4bf804f083a4ce001b5eb7e3c0862479d10f94c936f6c4e5f250aa5ff5bd2d", size = 9017, upload-time = "2025-02-22T07:34:54.333Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/53/97/d2cbbaa10c9b826af0e10fdf836e1bf344d9f0abb873ebc34d1f49642d3f/roman_numerals_py-3.1.0-py3-none-any.whl", hash = "sha256:9da2ad2fb670bcf24e81070ceb3be72f6c11c440d73bd579fbeca1e9f330954c", size = 7742, upload-time = "2025-02-22T07:34:52.422Z" }, +] + +[[package]] +name = "simplejson" +version = "3.20.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/af/92/51b417685abd96b31308b61b9acce7ec50d8e1de8fbc39a7fd4962c60689/simplejson-3.20.1.tar.gz", hash = "sha256:e64139b4ec4f1f24c142ff7dcafe55a22b811a74d86d66560c8815687143037d", size = 85591, upload-time = "2025-02-15T05:18:53.15Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/c4/627214fb418cd4a17fb0230ff0b6c3bb4a85cbb48dd69c85dcc3b85df828/simplejson-3.20.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e580aa65d5f6c3bf41b9b4afe74be5d5ddba9576701c107c772d936ea2b5043a", size = 93790, upload-time = "2025-02-15T05:15:32.954Z" }, + { url = "https://files.pythonhosted.org/packages/15/ca/56a6a2a33cbcf330c4d71af3f827c47e4e0ba791e78f2642f3d1ab02ff31/simplejson-3.20.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4a586ce4f78cec11f22fe55c5bee0f067e803aab9bad3441afe2181693b5ebb5", size = 75707, upload-time = "2025-02-15T05:15:34.954Z" }, + { url = "https://files.pythonhosted.org/packages/a9/c8/3d92b67e03a3b6207d97202669f9454ed700b35ade9bd4428265a078fb6c/simplejson-3.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74a1608f9e6e8c27a4008d70a54270868306d80ed48c9df7872f9f4b8ac87808", size = 75700, upload-time = "2025-02-15T05:15:37.144Z" }, + { url = "https://files.pythonhosted.org/packages/74/30/20001219d6fdca4aaa3974c96dfb6955a766b4e2cc950505a5b51fd050b0/simplejson-3.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03db8cb64154189a92a7786209f24e391644f3a3fa335658be2df2af1960b8d8", size = 138672, upload-time = "2025-02-15T05:15:38.547Z" }, + { url = "https://files.pythonhosted.org/packages/21/47/50157810876c2a7ebbd6e6346ec25eda841fe061fecaa02538a7742a3d2a/simplejson-3.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eea7e2b7d858f6fdfbf0fe3cb846d6bd8a45446865bc09960e51f3d473c2271b", size = 146616, upload-time = "2025-02-15T05:15:39.871Z" }, + { url = "https://files.pythonhosted.org/packages/95/60/8c97cdc93096437b0aca2745aca63c880fe2315fd7f6a6ce6edbb344a2ae/simplejson-3.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e66712b17d8425bb7ff8968d4c7c7fd5a2dd7bd63728b28356223c000dd2f91f", size = 134344, upload-time = "2025-02-15T05:15:42.091Z" }, + { url = "https://files.pythonhosted.org/packages/bb/9e/da184f0e9bb3a5d7ffcde713bd41b4fe46cca56b6f24d9bd155fac56805a/simplejson-3.20.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2cc4f6486f9f515b62f5831ff1888886619b84fc837de68f26d919ba7bbdcbc", size = 138017, upload-time = "2025-02-15T05:15:43.542Z" }, + { url = "https://files.pythonhosted.org/packages/31/db/00d1a8d9b036db98f678c8a3c69ed17d2894d1768d7a00576e787ad3e546/simplejson-3.20.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a3c2df555ee4016148fa192e2b9cd9e60bc1d40769366134882685e90aee2a1e", size = 140118, upload-time = "2025-02-15T05:15:45.7Z" }, + { url = "https://files.pythonhosted.org/packages/52/21/57fc47eab8c1c73390b933a5ba9271f08e3e1ec83162c580357f28f5b97c/simplejson-3.20.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:78520f04b7548a5e476b5396c0847e066f1e0a4c0c5e920da1ad65e95f410b11", size = 140314, upload-time = "2025-02-15T05:16:07.949Z" }, + { url = "https://files.pythonhosted.org/packages/ad/cc/7cfd78d1e0fa5e57350b98cfe77353b6dfa13dce21afa4060e1019223852/simplejson-3.20.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f4bd49ecde87b0fe9f55cc971449a32832bca9910821f7072bbfae1155eaa007", size = 148544, upload-time = "2025-02-15T05:16:09.455Z" }, + { url = "https://files.pythonhosted.org/packages/63/26/1c894a1c2bd95dc8be0cf5a2fa73b0d173105b6ca18c90cb981ff10443d0/simplejson-3.20.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7eaae2b88eb5da53caaffdfa50e2e12022553949b88c0df4f9a9663609373f72", size = 141172, upload-time = "2025-02-15T05:16:10.966Z" }, + { url = "https://files.pythonhosted.org/packages/93/27/0717dccc10cd9988dbf1314def52ab32678a95a95328bb37cafacf499400/simplejson-3.20.1-cp310-cp310-win32.whl", hash = "sha256:e836fb88902799eac8debc2b642300748f4860a197fa3d9ea502112b6bb8e142", size = 74181, upload-time = "2025-02-15T05:16:12.361Z" }, + { url = "https://files.pythonhosted.org/packages/5f/af/593f896573f306519332d4287b1ab8b7b888c239bbd5159f7054d7055c2d/simplejson-3.20.1-cp310-cp310-win_amd64.whl", hash = "sha256:b122a19b552b212fc3b5b96fc5ce92333d4a9ac0a800803e1f17ebb16dac4be5", size = 75738, upload-time = "2025-02-15T05:16:14.438Z" }, + { url = "https://files.pythonhosted.org/packages/76/59/74bc90d1c051bc2432c96b34bd4e8036875ab58b4fcbe4d6a5a76985f853/simplejson-3.20.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:325b8c107253d3217e89d7b50c71015b5b31e2433e6c5bf38967b2f80630a8ca", size = 92132, upload-time = "2025-02-15T05:16:15.743Z" }, + { url = "https://files.pythonhosted.org/packages/71/c7/1970916e0c51794fff89f76da2f632aaf0b259b87753c88a8c409623d3e1/simplejson-3.20.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88a7baa8211089b9e58d78fbc1b0b322103f3f3d459ff16f03a36cece0d0fcf0", size = 74956, upload-time = "2025-02-15T05:16:17.062Z" }, + { url = "https://files.pythonhosted.org/packages/c8/0d/98cc5909180463f1d75fac7180de62d4cdb4e82c4fef276b9e591979372c/simplejson-3.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:299b1007b8101d50d95bc0db1bf5c38dc372e85b504cf77f596462083ee77e3f", size = 74772, upload-time = "2025-02-15T05:16:19.204Z" }, + { url = "https://files.pythonhosted.org/packages/e1/94/a30a5211a90d67725a3e8fcc1c788189f2ae2ed2b96b63ed15d0b7f5d6bb/simplejson-3.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ec618ed65caab48e81e3ed29586236a8e57daef792f1f3bb59504a7e98cd10", size = 143575, upload-time = "2025-02-15T05:16:21.337Z" }, + { url = "https://files.pythonhosted.org/packages/ee/08/cdb6821f1058eb5db46d252de69ff7e6c53f05f1bae6368fe20d5b51d37e/simplejson-3.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2cdead1d3197f0ff43373cf4730213420523ba48697743e135e26f3d179f38", size = 153241, upload-time = "2025-02-15T05:16:22.859Z" }, + { url = "https://files.pythonhosted.org/packages/4c/2d/ca3caeea0bdc5efc5503d5f57a2dfb56804898fb196dfada121323ee0ccb/simplejson-3.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3466d2839fdc83e1af42e07b90bc8ff361c4e8796cd66722a40ba14e458faddd", size = 141500, upload-time = "2025-02-15T05:16:25.068Z" }, + { url = "https://files.pythonhosted.org/packages/e1/33/d3e0779d5c58245e7370c98eb969275af6b7a4a5aec3b97cbf85f09ad328/simplejson-3.20.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d492ed8e92f3a9f9be829205f44b1d0a89af6582f0cf43e0d129fa477b93fe0c", size = 144757, upload-time = "2025-02-15T05:16:28.301Z" }, + { url = "https://files.pythonhosted.org/packages/54/53/2d93128bb55861b2fa36c5944f38da51a0bc6d83e513afc6f7838440dd15/simplejson-3.20.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f924b485537b640dc69434565463fd6fc0c68c65a8c6e01a823dd26c9983cf79", size = 144409, upload-time = "2025-02-15T05:16:29.687Z" }, + { url = "https://files.pythonhosted.org/packages/99/4c/dac310a98f897ad3435b4bdc836d92e78f09e38c5dbf28211ed21dc59fa2/simplejson-3.20.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9e8eacf6a3491bf76ea91a8d46726368a6be0eb94993f60b8583550baae9439e", size = 146082, upload-time = "2025-02-15T05:16:31.064Z" }, + { url = "https://files.pythonhosted.org/packages/ee/22/d7ba958cfed39827335b82656b1c46f89678faecda9a7677b47e87b48ee6/simplejson-3.20.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:d34d04bf90b4cea7c22d8b19091633908f14a096caa301b24c2f3d85b5068fb8", size = 154339, upload-time = "2025-02-15T05:16:32.719Z" }, + { url = "https://files.pythonhosted.org/packages/b8/c8/b072b741129406a7086a0799c6f5d13096231bf35fdd87a0cffa789687fc/simplejson-3.20.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:69dd28d4ce38390ea4aaf212902712c0fd1093dc4c1ff67e09687c3c3e15a749", size = 147915, upload-time = "2025-02-15T05:16:34.291Z" }, + { url = "https://files.pythonhosted.org/packages/6c/46/8347e61e9cf3db5342a42f7fd30a81b4f5cf85977f916852d7674a540907/simplejson-3.20.1-cp311-cp311-win32.whl", hash = "sha256:dfe7a9da5fd2a3499436cd350f31539e0a6ded5da6b5b3d422df016444d65e43", size = 73972, upload-time = "2025-02-15T05:16:35.712Z" }, + { url = "https://files.pythonhosted.org/packages/01/85/b52f24859237b4e9d523d5655796d911ba3d46e242eb1959c45b6af5aedd/simplejson-3.20.1-cp311-cp311-win_amd64.whl", hash = "sha256:896a6c04d7861d507d800da7642479c3547060bf97419d9ef73d98ced8258766", size = 75595, upload-time = "2025-02-15T05:16:36.957Z" }, + { url = "https://files.pythonhosted.org/packages/8d/eb/34c16a1ac9ba265d024dc977ad84e1659d931c0a700967c3e59a98ed7514/simplejson-3.20.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f31c4a3a7ab18467ee73a27f3e59158255d1520f3aad74315edde7a940f1be23", size = 93100, upload-time = "2025-02-15T05:16:38.801Z" }, + { url = "https://files.pythonhosted.org/packages/41/fc/2c2c007d135894971e6814e7c0806936e5bade28f8db4dd7e2a58b50debd/simplejson-3.20.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:884e6183d16b725e113b83a6fc0230152ab6627d4d36cb05c89c2c5bccfa7bc6", size = 75464, upload-time = "2025-02-15T05:16:40.905Z" }, + { url = "https://files.pythonhosted.org/packages/0f/05/2b5ecb33b776c34bb5cace5de5d7669f9b60e3ca13c113037b2ca86edfbd/simplejson-3.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03d7a426e416fe0d3337115f04164cd9427eb4256e843a6b8751cacf70abc832", size = 75112, upload-time = "2025-02-15T05:16:42.246Z" }, + { url = "https://files.pythonhosted.org/packages/fe/36/1f3609a2792f06cd4b71030485f78e91eb09cfd57bebf3116bf2980a8bac/simplejson-3.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:000602141d0bddfcff60ea6a6e97d5e10c9db6b17fd2d6c66199fa481b6214bb", size = 150182, upload-time = "2025-02-15T05:16:43.557Z" }, + { url = "https://files.pythonhosted.org/packages/2f/b0/053fbda38b8b602a77a4f7829def1b4f316cd8deb5440a6d3ee90790d2a4/simplejson-3.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:af8377a8af78226e82e3a4349efdde59ffa421ae88be67e18cef915e4023a595", size = 158363, upload-time = "2025-02-15T05:16:45.748Z" }, + { url = "https://files.pythonhosted.org/packages/d1/4b/2eb84ae867539a80822e92f9be4a7200dffba609275faf99b24141839110/simplejson-3.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15c7de4c88ab2fbcb8781a3b982ef883696736134e20b1210bca43fb42ff1acf", size = 148415, upload-time = "2025-02-15T05:16:47.861Z" }, + { url = "https://files.pythonhosted.org/packages/e0/bd/400b0bd372a5666addf2540c7358bfc3841b9ce5cdbc5cc4ad2f61627ad8/simplejson-3.20.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:455a882ff3f97d810709f7b620007d4e0aca8da71d06fc5c18ba11daf1c4df49", size = 152213, upload-time = "2025-02-15T05:16:49.25Z" }, + { url = "https://files.pythonhosted.org/packages/50/12/143f447bf6a827ee9472693768dc1a5eb96154f8feb140a88ce6973a3cfa/simplejson-3.20.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:fc0f523ce923e7f38eb67804bc80e0a028c76d7868500aa3f59225574b5d0453", size = 150048, upload-time = "2025-02-15T05:16:51.5Z" }, + { url = "https://files.pythonhosted.org/packages/5e/ea/dd9b3e8e8ed710a66f24a22c16a907c9b539b6f5f45fd8586bd5c231444e/simplejson-3.20.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76461ec929282dde4a08061071a47281ad939d0202dc4e63cdd135844e162fbc", size = 151668, upload-time = "2025-02-15T05:16:53Z" }, + { url = "https://files.pythonhosted.org/packages/99/af/ee52a8045426a0c5b89d755a5a70cc821815ef3c333b56fbcad33c4435c0/simplejson-3.20.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ab19c2da8c043607bde4d4ef3a6b633e668a7d2e3d56f40a476a74c5ea71949f", size = 158840, upload-time = "2025-02-15T05:16:54.851Z" }, + { url = "https://files.pythonhosted.org/packages/68/db/ab32869acea6b5de7d75fa0dac07a112ded795d41eaa7e66c7813b17be95/simplejson-3.20.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b2578bedaedf6294415197b267d4ef678fea336dd78ee2a6d2f4b028e9d07be3", size = 154212, upload-time = "2025-02-15T05:16:56.318Z" }, + { url = "https://files.pythonhosted.org/packages/fa/7a/e3132d454977d75a3bf9a6d541d730f76462ebf42a96fea2621498166f41/simplejson-3.20.1-cp312-cp312-win32.whl", hash = "sha256:339f407373325a36b7fd744b688ba5bae0666b5d340ec6d98aebc3014bf3d8ea", size = 74101, upload-time = "2025-02-15T05:16:57.746Z" }, + { url = "https://files.pythonhosted.org/packages/bc/5d/4e243e937fa3560107c69f6f7c2eed8589163f5ed14324e864871daa2dd9/simplejson-3.20.1-cp312-cp312-win_amd64.whl", hash = "sha256:627d4486a1ea7edf1f66bb044ace1ce6b4c1698acd1b05353c97ba4864ea2e17", size = 75736, upload-time = "2025-02-15T05:16:59.017Z" }, + { url = "https://files.pythonhosted.org/packages/c4/03/0f453a27877cb5a5fff16a975925f4119102cc8552f52536b9a98ef0431e/simplejson-3.20.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:71e849e7ceb2178344998cbe5ade101f1b329460243c79c27fbfc51c0447a7c3", size = 93109, upload-time = "2025-02-15T05:17:00.377Z" }, + { url = "https://files.pythonhosted.org/packages/74/1f/a729f4026850cabeaff23e134646c3f455e86925d2533463420635ae54de/simplejson-3.20.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b63fdbab29dc3868d6f009a59797cefaba315fd43cd32ddd998ee1da28e50e29", size = 75475, upload-time = "2025-02-15T05:17:02.544Z" }, + { url = "https://files.pythonhosted.org/packages/e2/14/50a2713fee8ff1f8d655b1a14f4a0f1c0c7246768a1b3b3d12964a4ed5aa/simplejson-3.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1190f9a3ce644fd50ec277ac4a98c0517f532cfebdcc4bd975c0979a9f05e1fb", size = 75112, upload-time = "2025-02-15T05:17:03.875Z" }, + { url = "https://files.pythonhosted.org/packages/45/86/ea9835abb646755140e2d482edc9bc1e91997ed19a59fd77ae4c6a0facea/simplejson-3.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1336ba7bcb722ad487cd265701ff0583c0bb6de638364ca947bb84ecc0015d1", size = 150245, upload-time = "2025-02-15T05:17:06.899Z" }, + { url = "https://files.pythonhosted.org/packages/12/b4/53084809faede45da829fe571c65fbda8479d2a5b9c633f46b74124d56f5/simplejson-3.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e975aac6a5acd8b510eba58d5591e10a03e3d16c1cf8a8624ca177491f7230f0", size = 158465, upload-time = "2025-02-15T05:17:08.707Z" }, + { url = "https://files.pythonhosted.org/packages/a9/7d/d56579468d1660b3841e1f21c14490d103e33cf911886b22652d6e9683ec/simplejson-3.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6a6dd11ee282937ad749da6f3b8d87952ad585b26e5edfa10da3ae2536c73078", size = 148514, upload-time = "2025-02-15T05:17:11.323Z" }, + { url = "https://files.pythonhosted.org/packages/19/e3/874b1cca3d3897b486d3afdccc475eb3a09815bf1015b01cf7fcb52a55f0/simplejson-3.20.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab980fcc446ab87ea0879edad41a5c28f2d86020014eb035cf5161e8de4474c6", size = 152262, upload-time = "2025-02-15T05:17:13.543Z" }, + { url = "https://files.pythonhosted.org/packages/32/84/f0fdb3625292d945c2bd13a814584603aebdb38cfbe5fe9be6b46fe598c4/simplejson-3.20.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f5aee2a4cb6b146bd17333ac623610f069f34e8f31d2f4f0c1a2186e50c594f0", size = 150164, upload-time = "2025-02-15T05:17:15.021Z" }, + { url = "https://files.pythonhosted.org/packages/95/51/6d625247224f01eaaeabace9aec75ac5603a42f8ebcce02c486fbda8b428/simplejson-3.20.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:652d8eecbb9a3b6461b21ec7cf11fd0acbab144e45e600c817ecf18e4580b99e", size = 151795, upload-time = "2025-02-15T05:17:16.542Z" }, + { url = "https://files.pythonhosted.org/packages/7f/d9/bb921df6b35be8412f519e58e86d1060fddf3ad401b783e4862e0a74c4c1/simplejson-3.20.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:8c09948f1a486a89251ee3a67c9f8c969b379f6ffff1a6064b41fea3bce0a112", size = 159027, upload-time = "2025-02-15T05:17:18.083Z" }, + { url = "https://files.pythonhosted.org/packages/03/c5/5950605e4ad023a6621cf4c931b29fd3d2a9c1f36be937230bfc83d7271d/simplejson-3.20.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:cbbd7b215ad4fc6f058b5dd4c26ee5c59f72e031dfda3ac183d7968a99e4ca3a", size = 154380, upload-time = "2025-02-15T05:17:20.334Z" }, + { url = "https://files.pythonhosted.org/packages/66/ad/b74149557c5ec1e4e4d55758bda426f5d2ec0123cd01a53ae63b8de51fa3/simplejson-3.20.1-cp313-cp313-win32.whl", hash = "sha256:ae81e482476eaa088ef9d0120ae5345de924f23962c0c1e20abbdff597631f87", size = 74102, upload-time = "2025-02-15T05:17:22.475Z" }, + { url = "https://files.pythonhosted.org/packages/db/a9/25282fdd24493e1022f30b7f5cdf804255c007218b2bfaa655bd7ad34b2d/simplejson-3.20.1-cp313-cp313-win_amd64.whl", hash = "sha256:1b9fd15853b90aec3b1739f4471efbf1ac05066a2c7041bf8db821bb73cd2ddc", size = 75736, upload-time = "2025-02-15T05:17:24.122Z" }, + { url = "https://files.pythonhosted.org/packages/4b/30/00f02a0a921556dd5a6db1ef2926a1bc7a8bbbfb1c49cfed68a275b8ab2b/simplejson-3.20.1-py3-none-any.whl", hash = "sha256:8a6c1bbac39fa4a79f83cbf1df6ccd8ff7069582a9fd8db1e52cea073bc2c697", size = 57121, upload-time = "2025-02-15T05:18:51.243Z" }, +] + +[[package]] +name = "snowballstemmer" +version = "3.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/75/a7/9810d872919697c9d01295633f5d574fb416d47e535f258272ca1f01f447/snowballstemmer-3.0.1.tar.gz", hash = "sha256:6d5eeeec8e9f84d4d56b847692bacf79bc2c8e90c7f80ca4444ff8b6f2e52895", size = 105575, upload-time = "2025-05-09T16:34:51.843Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/78/3565d011c61f5a43488987ee32b6f3f656e7f107ac2782dd57bdd7d91d9a/snowballstemmer-3.0.1-py3-none-any.whl", hash = "sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064", size = 103274, upload-time = "2025-05-09T16:34:50.371Z" }, +] + +[[package]] +name = "sphinx" +version = "8.1.3" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.11'", +] +dependencies = [ + { name = "alabaster", marker = "python_full_version < '3.11'" }, + { name = "babel", marker = "python_full_version < '3.11'" }, + { name = "colorama", marker = "python_full_version < '3.11' and sys_platform == 'win32'" }, + { name = "docutils", marker = "python_full_version < '3.11'" }, + { name = "imagesize", marker = "python_full_version < '3.11'" }, + { name = "jinja2", marker = "python_full_version < '3.11'" }, + { name = "packaging", marker = "python_full_version < '3.11'" }, + { name = "pygments", marker = "python_full_version < '3.11'" }, + { name = "requests", marker = "python_full_version < '3.11'" }, + { name = "snowballstemmer", marker = "python_full_version < '3.11'" }, + { name = "sphinxcontrib-applehelp", marker = "python_full_version < '3.11'" }, + { name = "sphinxcontrib-devhelp", marker = "python_full_version < '3.11'" }, + { name = "sphinxcontrib-htmlhelp", marker = "python_full_version < '3.11'" }, + { name = "sphinxcontrib-jsmath", marker = "python_full_version < '3.11'" }, + { name = "sphinxcontrib-qthelp", marker = "python_full_version < '3.11'" }, + { name = "sphinxcontrib-serializinghtml", marker = "python_full_version < '3.11'" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/be0b61178fe2cdcb67e2a92fc9ebb488e3c51c4f74a36a7824c0adf23425/sphinx-8.1.3.tar.gz", hash = "sha256:43c1911eecb0d3e161ad78611bc905d1ad0e523e4ddc202a58a821773dc4c927", size = 8184611, upload-time = "2024-10-13T20:27:13.93Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/60/1ddff83a56d33aaf6f10ec8ce84b4c007d9368b21008876fceda7e7381ef/sphinx-8.1.3-py3-none-any.whl", hash = "sha256:09719015511837b76bf6e03e42eb7595ac8c2e41eeb9c29c5b755c6b677992a2", size = 3487125, upload-time = "2024-10-13T20:27:10.448Z" }, +] + +[[package]] +name = "sphinx" +version = "8.2.3" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.11'", +] +dependencies = [ + { name = "alabaster", marker = "python_full_version >= '3.11'" }, + { name = "babel", marker = "python_full_version >= '3.11'" }, + { name = "colorama", marker = "python_full_version >= '3.11' and sys_platform == 'win32'" }, + { name = "docutils", marker = "python_full_version >= '3.11'" }, + { name = "imagesize", marker = "python_full_version >= '3.11'" }, + { name = "jinja2", marker = "python_full_version >= '3.11'" }, + { name = "packaging", marker = "python_full_version >= '3.11'" }, + { name = "pygments", marker = "python_full_version >= '3.11'" }, + { name = "requests", marker = "python_full_version >= '3.11'" }, + { name = "roman-numerals-py", marker = "python_full_version >= '3.11'" }, + { name = "snowballstemmer", marker = "python_full_version >= '3.11'" }, + { name = "sphinxcontrib-applehelp", marker = "python_full_version >= '3.11'" }, + { name = "sphinxcontrib-devhelp", marker = "python_full_version >= '3.11'" }, + { name = "sphinxcontrib-htmlhelp", marker = "python_full_version >= '3.11'" }, + { name = "sphinxcontrib-jsmath", marker = "python_full_version >= '3.11'" }, + { name = "sphinxcontrib-qthelp", marker = "python_full_version >= '3.11'" }, + { name = "sphinxcontrib-serializinghtml", marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/ad/4360e50ed56cb483667b8e6dadf2d3fda62359593faabbe749a27c4eaca6/sphinx-8.2.3.tar.gz", hash = "sha256:398ad29dee7f63a75888314e9424d40f52ce5a6a87ae88e7071e80af296ec348", size = 8321876, upload-time = "2025-03-02T22:31:59.658Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/53/136e9eca6e0b9dc0e1962e2c908fbea2e5ac000c2a2fbd9a35797958c48b/sphinx-8.2.3-py3-none-any.whl", hash = "sha256:4405915165f13521d875a8c29c8970800a0141c14cc5416a38feca4ea5d9b9c3", size = 3589741, upload-time = "2025-03-02T22:31:56.836Z" }, +] + +[[package]] +name = "sphinx-autodoc-typehints" +version = "3.0.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.11'", +] +dependencies = [ + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/26/f0/43c6a5ff3e7b08a8c3b32f81b859f1b518ccc31e45f22e2b41ced38be7b9/sphinx_autodoc_typehints-3.0.1.tar.gz", hash = "sha256:b9b40dd15dee54f6f810c924f863f9cf1c54f9f3265c495140ea01be7f44fa55", size = 36282, upload-time = "2025-01-16T18:25:30.958Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/dc/dc46c5c7c566b7ec5e8f860f9c89533bf03c0e6aadc96fb9b337867e4460/sphinx_autodoc_typehints-3.0.1-py3-none-any.whl", hash = "sha256:4b64b676a14b5b79cefb6628a6dc8070e320d4963e8ff640a2f3e9390ae9045a", size = 20245, upload-time = "2025-01-16T18:25:27.394Z" }, +] + +[[package]] +name = "sphinx-autodoc-typehints" +version = "3.2.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.11'", +] +dependencies = [ + { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/93/68/a388a9b8f066cd865d9daa65af589d097efbfab9a8c302d2cb2daa43b52e/sphinx_autodoc_typehints-3.2.0.tar.gz", hash = "sha256:107ac98bc8b4837202c88c0736d59d6da44076e65a0d7d7d543a78631f662a9b", size = 36724, upload-time = "2025-04-25T16:53:25.872Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/c7/8aab362e86cbf887e58be749a78d20ad743e1eb2c73c2b13d4761f39a104/sphinx_autodoc_typehints-3.2.0-py3-none-any.whl", hash = "sha256:884b39be23b1d884dcc825d4680c9c6357a476936e3b381a67ae80091984eb49", size = 20563, upload-time = "2025-04-25T16:53:24.492Z" }, +] + +[[package]] +name = "sphinx-rtd-theme" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docutils" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "sphinxcontrib-jquery" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/44/c97faec644d29a5ceddd3020ae2edffa69e7d00054a8c7a6021e82f20335/sphinx_rtd_theme-3.0.2.tar.gz", hash = "sha256:b7457bc25dda723b20b086a670b9953c859eab60a2a03ee8eb2bb23e176e5f85", size = 7620463, upload-time = "2024-11-13T11:06:04.545Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/85/77/46e3bac77b82b4df5bb5b61f2de98637724f246b4966cfc34bc5895d852a/sphinx_rtd_theme-3.0.2-py2.py3-none-any.whl", hash = "sha256:422ccc750c3a3a311de4ae327e82affdaf59eb695ba4936538552f3b00f4ee13", size = 7655561, upload-time = "2024-11-13T11:06:02.094Z" }, +] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/6e/b837e84a1a704953c62ef8776d45c3e8d759876b4a84fe14eba2859106fe/sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1", size = 20053, upload-time = "2024-07-29T01:09:00.465Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/85/9ebeae2f76e9e77b952f4b274c27238156eae7979c5421fba91a28f4970d/sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5", size = 119300, upload-time = "2024-07-29T01:08:58.99Z" }, +] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/d2/5beee64d3e4e747f316bae86b55943f51e82bb86ecd325883ef65741e7da/sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad", size = 12967, upload-time = "2024-07-29T01:09:23.417Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/35/7a/987e583882f985fe4d7323774889ec58049171828b58c2217e7f79cdf44e/sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2", size = 82530, upload-time = "2024-07-29T01:09:21.945Z" }, +] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/93/983afd9aa001e5201eab16b5a444ed5b9b0a7a010541e0ddfbbfd0b2470c/sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9", size = 22617, upload-time = "2024-07-29T01:09:37.889Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0a/7b/18a8c0bcec9182c05a0b3ec2a776bba4ead82750a55ff798e8d406dae604/sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8", size = 98705, upload-time = "2024-07-29T01:09:36.407Z" }, +] + +[[package]] +name = "sphinxcontrib-jquery" +version = "4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/de/f3/aa67467e051df70a6330fe7770894b3e4f09436dea6881ae0b4f3d87cad8/sphinxcontrib-jquery-4.1.tar.gz", hash = "sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a", size = 122331, upload-time = "2023-03-14T15:01:01.944Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/85/749bd22d1a68db7291c89e2ebca53f4306c3f205853cf31e9de279034c3c/sphinxcontrib_jquery-4.1-py2.py3-none-any.whl", hash = "sha256:f936030d7d0147dd026a4f2b5a57343d233f1fc7b363f68b3d4f1cb0993878ae", size = 121104, upload-time = "2023-03-14T15:01:00.356Z" }, +] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/e8/9ed3830aeed71f17c026a07a5097edcf44b692850ef215b161b8ad875729/sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8", size = 5787, upload-time = "2019-01-21T16:10:16.347Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/42/4c8646762ee83602e3fb3fbe774c2fac12f317deb0b5dbeeedd2d3ba4b77/sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", size = 5071, upload-time = "2019-01-21T16:10:14.333Z" }, +] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/68/bc/9104308fc285eb3e0b31b67688235db556cd5b0ef31d96f30e45f2e51cae/sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab", size = 17165, upload-time = "2024-07-29T01:09:56.435Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/83/859ecdd180cacc13b1f7e857abf8582a64552ea7a061057a6c716e790fce/sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb", size = 88743, upload-time = "2024-07-29T01:09:54.885Z" }, +] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3b/44/6716b257b0aa6bfd51a1b31665d1c205fb12cb5ad56de752dfa15657de2f/sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d", size = 16080, upload-time = "2024-07-29T01:10:09.332Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/a7/d2782e4e3f77c8450f727ba74a8f12756d5ba823d81b941f1b04da9d033a/sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331", size = 92072, upload-time = "2024-07-29T01:10:08.203Z" }, +] + +[[package]] +name = "tomli" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077, upload-time = "2024-11-27T22:37:54.956Z" }, + { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429, upload-time = "2024-11-27T22:37:56.698Z" }, + { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067, upload-time = "2024-11-27T22:37:57.63Z" }, + { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030, upload-time = "2024-11-27T22:37:59.344Z" }, + { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898, upload-time = "2024-11-27T22:38:00.429Z" }, + { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894, upload-time = "2024-11-27T22:38:02.094Z" }, + { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319, upload-time = "2024-11-27T22:38:03.206Z" }, + { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273, upload-time = "2024-11-27T22:38:04.217Z" }, + { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310, upload-time = "2024-11-27T22:38:05.908Z" }, + { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309, upload-time = "2024-11-27T22:38:06.812Z" }, + { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762, upload-time = "2024-11-27T22:38:07.731Z" }, + { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453, upload-time = "2024-11-27T22:38:09.384Z" }, + { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486, upload-time = "2024-11-27T22:38:10.329Z" }, + { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349, upload-time = "2024-11-27T22:38:11.443Z" }, + { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159, upload-time = "2024-11-27T22:38:13.099Z" }, + { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243, upload-time = "2024-11-27T22:38:14.766Z" }, + { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645, upload-time = "2024-11-27T22:38:15.843Z" }, + { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" }, + { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875, upload-time = "2024-11-27T22:38:19.159Z" }, + { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418, upload-time = "2024-11-27T22:38:20.064Z" }, + { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708, upload-time = "2024-11-27T22:38:21.659Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582, upload-time = "2024-11-27T22:38:22.693Z" }, + { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543, upload-time = "2024-11-27T22:38:24.367Z" }, + { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691, upload-time = "2024-11-27T22:38:26.081Z" }, + { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170, upload-time = "2024-11-27T22:38:27.921Z" }, + { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530, upload-time = "2024-11-27T22:38:29.591Z" }, + { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666, upload-time = "2024-11-27T22:38:30.639Z" }, + { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954, upload-time = "2024-11-27T22:38:31.702Z" }, + { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724, upload-time = "2024-11-27T22:38:32.837Z" }, + { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383, upload-time = "2024-11-27T22:38:34.455Z" }, + { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.14.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d1/bc/51647cd02527e87d05cb083ccc402f93e441606ff1f01739a62c8ad09ba5/typing_extensions-4.14.0.tar.gz", hash = "sha256:8676b788e32f02ab42d9e7c61324048ae4c6d844a399eebace3d4979d75ceef4", size = 107423, upload-time = "2025-06-02T14:52:11.399Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/e0/552843e0d356fbb5256d21449fa957fa4eff3bbc135a74a691ee70c7c5da/typing_extensions-4.14.0-py3-none-any.whl", hash = "sha256:a1514509136dd0b477638fc68d6a91497af5076466ad0fa6c338e44e359944af", size = 43839, upload-time = "2025-06-02T14:52:10.026Z" }, +] + +[[package]] +name = "urllib3" +version = "2.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, +]