diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 75a0e8ff..c782cd93 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -1,10 +1,39 @@ -Fixes # + + -## One line description for the changelog +**Related Issue:** # +**Type of change:** + +- Bug fix (non-breaking change which fixes an issue) +- New feature (non-breaking change which adds functionality) +- Breaking change (fix or feature that would cause existing functionality to not work as expected) +- This change requires a documentation update +- Other (please describe): -- [ ] Tests pass -- [ ] Appropriate changes to README are included in PR +**Description:** + + +--- + +**Pre-submission checklist:** +- [ ] I have read the [CONTRIBUTING.md](https://github.com/cloudevents/sdk-python/blob/main/CONTRIBUTING.md) file. +- [ ] I have signed off my commits using `git commit --signoff`. +- [ ] I have added tests that prove my fix is effective or that my feature works. +- [ ] I have updated the documentation (`README.md`, `CHANGELOG.md`, etc.) as necessary. +- [ ] I have run `pre-commit` and `tox` and all checks pass. +- [ ] This pull request is ready to be reviewed. diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 107bf9e7..c6350341 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -3,37 +3,38 @@ name: CI on: [push, pull_request] jobs: - lint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - - name: Setup Python - uses: actions/setup-python@v5 + - uses: actions/checkout@v5 + - name: Install uv + uses: astral-sh/setup-uv@v7 with: - python-version: '3.11' - cache: 'pip' - cache-dependency-path: 'requirements/*.txt' - - name: Install dev dependencies - run: python -m pip install -r requirements/dev.txt - - name: Run linting - run: python -m tox -e lint + enable-cache: true + cache-dependency-glob: "uv.lock" + - name: Set up Python + run: uv python install 3.12 + - name: Install the project + run: uv sync --all-extras --dev + - name: Lint + run: uv run ruff check --select I test: strategy: matrix: - python: ['3.8', '3.9', '3.10', '3.11'] + python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"] os: [ubuntu-latest, windows-latest, macos-latest] runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@v4 - - name: Setup Python - uses: actions/setup-python@v5 + - uses: actions/checkout@v5 + - name: Install uv + uses: astral-sh/setup-uv@v7 with: - python-version: ${{ matrix.python }} - cache: 'pip' - cache-dependency-path: 'requirements/*.txt' - - name: Install dev dependencies - run: python -m pip install -r requirements/dev.txt + enable-cache: true + cache-dependency-glob: "uv.lock" + - name: Set up Python ${{ matrix.python-version }} + run: uv python install ${{ matrix.python-version }} + - name: Install the project + run: uv sync --all-extras --dev - name: Run tests - run: python -m tox -e py # Run tox using the version of Python in `PATH` + run: uv run pytest tests diff --git a/.github/workflows/pypi-release.yml b/.github/workflows/pypi-release.yml index 4cb248bc..1dd1106a 100644 --- a/.github/workflows/pypi-release.yml +++ b/.github/workflows/pypi-release.yml @@ -5,46 +5,44 @@ on: push: branches: - main + - v2 - 'tag/v**' jobs: build_dist: name: Build source distribution - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: fetch-depth: 0 + - name: Install uv + uses: astral-sh/setup-uv@v7 + - name: Set up Python + run: uv python install 3.12 + - name: Install the project + run: uv sync --all-extras --dev - name: Build SDist and wheel - run: pipx run build + run: uv build - - uses: actions/upload-artifact@v4 + - uses: actions/upload-artifact@v5 with: name: artifact path: dist/* - name: Check metadata - run: pipx run twine check dist/* + run: uvx twine check dist/* publish: - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' needs: [ build_dist ] steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: fetch-depth: 0 - - name: Set up Python - uses: actions/setup-python@v4 + - uses: actions/download-artifact@v5 with: - python-version: "3.11" - cache: 'pip' - - name: Install build dependencies - run: pip install -U setuptools wheel build - - uses: actions/download-artifact@v4 - with: - # unpacks default artifact into dist/ - # if `name: artifact` is omitted, the action will create extra parent dir name: artifact path: dist - name: Publish @@ -52,7 +50,19 @@ jobs: with: user: __token__ password: ${{ secrets.pypi_password }} - - name: Install GitPython and cloudevents for pypi_packaging - run: pip install -U -r requirements/publish.txt - - name: Create Tag - run: python pypi_packaging.py + tag: + runs-on: ubuntu-24.04 + needs: [ publish ] + steps: + - uses: actions/checkout@v5 + with: + fetch-depth: 0 + - name: Extract version + id: version + run: | + echo "version=$(grep -oP '__version__ = \"\K[^\"]+' src/cloudevents/__init__.py)" >> $GITHUB_OUTPUT + - name: Create and push tag + uses: pxpm/github-tag-action@1.0.1 + with: + repo-token: "${{ secrets.GITHUB_TOKEN }}" + tag: ${{ steps.version.outputs.version }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 75ad2ef1..affd6723 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,27 +1,24 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 + rev: v6.0.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer - id: check-toml - - repo: https://github.com/pycqa/isort - rev: 5.13.2 + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.14.10 hooks: - - id: isort - args: [ "--profile", "black", "--filter-files" ] - - repo: https://github.com/psf/black - rev: 24.4.2 - hooks: - - id: black - language_version: python3.11 + # Run the linter. + - id: ruff + # Run the formatter. + - id: ruff-format - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.10.0 + rev: v1.19.1 hooks: - id: mypy - files: ^(cloudevents/) - exclude: ^(cloudevents/tests/) - types: [ python ] - args: [ ] + files: ^(src/cloudevents/|tests/) + exclude: ^(src/cloudevents/v1/) + types: [python] + args: ["--config-file=pyproject.toml"] additional_dependencies: - - "pydantic~=2.7" + - types-python-dateutil>=2.9.0.20251115 diff --git a/README.md b/README.md index abcf5cbf..7d3e5fb6 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Python SDK for [CloudEvents](https://github.com/cloudevents/spec) +# Python SDK v2 for [CloudEvents](https://github.com/cloudevents/spec) [![PyPI version](https://badge.fury.io/py/cloudevents.svg)](https://badge.fury.io/py/cloudevents) @@ -14,8 +14,8 @@ This SDK current supports the following versions of CloudEvents: ## Python SDK -Package **cloudevents** provides primitives to work with CloudEvents specification: -https://github.com/cloudevents/spec. +Package [**cloudevents**](src/cloudevents) provides primitives to work with +[CloudEvents specification](https://github.com/cloudevents/spec). ### Installing @@ -33,15 +33,15 @@ Below we will provide samples on how to send cloudevents using the popular ### Binary HTTP CloudEvent ```python -from cloudevents.http import CloudEvent -from cloudevents.conversion import to_binary +from cloudevents_v1.http import CloudEvent +from cloudevents_v1.conversion import to_binary import requests # Create a CloudEvent # - The CloudEvent "id" is generated if omitted. "specversion" defaults to "1.0". attributes = { - "type": "com.example.sampletype1", - "source": "https://example.com/event-producer", + "type": "com.example.sampletype1", + "source": "https://example.com/event-producer", } data = {"message": "Hello World!"} event = CloudEvent(attributes, data) @@ -56,15 +56,15 @@ requests.post("", data=body, headers=headers) ### Structured HTTP CloudEvent ```python -from cloudevents.conversion import to_structured -from cloudevents.http import CloudEvent +from cloudevents_v1.conversion import to_structured +from cloudevents_v1.http import CloudEvent import requests # Create a CloudEvent # - The CloudEvent "id" is generated if omitted. "specversion" defaults to "1.0". attributes = { - "type": "com.example.sampletype2", - "source": "https://example.com/event-producer", + "type": "com.example.sampletype2", + "source": "https://example.com/event-producer", } data = {"message": "Hello World!"} event = CloudEvent(attributes, data) @@ -87,7 +87,7 @@ The code below shows how to consume a cloudevent using the popular python web fr ```python from flask import Flask, request -from cloudevents.http import from_http +from cloudevents_v1.http import from_http app = Flask(__name__) @@ -95,20 +95,20 @@ app = Flask(__name__) # create an endpoint at http://localhost:/3000/ @app.route("/", methods=["POST"]) def home(): - # create a CloudEvent - event = from_http(request.headers, request.get_data()) + # create a CloudEvent + event = from_http(request.headers, request.get_data()) - # you can access cloudevent fields as seen below - print( - f"Found {event['id']} from {event['source']} with type " - f"{event['type']} and specversion {event['specversion']}" - ) + # you can access cloudevent fields as seen below + print( + f"Found {event['id']} from {event['source']} with type " + f"{event['type']} and specversion {event['specversion']}" + ) - return "", 204 + return "", 204 if __name__ == "__main__": - app.run(port=3000) + app.run(port=3000) ``` You can find a complete example of turning a CloudEvent into a HTTP request @@ -162,18 +162,13 @@ with one of the project's SDKs, please send an email to ## Maintenance -We use [black][black] and [isort][isort] for autoformatting. We set up a [tox][tox] -environment to reformat the codebase. - -e.g. - -```bash -pip install tox -tox -e reformat -``` +We use [uv][uv] for dependency and package management, [ruff][ruff] and [isort][isort] +for autoformatting and [pre-commit][pre-commit] to automate those with commit +hooks. For information on releasing version bumps see [RELEASING.md](RELEASING.md) -[black]: https://black.readthedocs.io/ +[uv]: https://docs.astral.sh/uv/ +[ruff]: https://docs.astral.sh/ruff [isort]: https://pycqa.github.io/isort/ -[tox]: https://tox.wiki/ +[pre-commit]: https://pre-commit.com diff --git a/RELEASING.md b/RELEASING.md index f6ca05b1..39c621e5 100644 --- a/RELEASING.md +++ b/RELEASING.md @@ -4,22 +4,15 @@ This repository is configured to automatically publish the corresponding [PyPI package](https://pypi.org/project/cloudevents/) and GitHub Tag via GitHub Actions. To release a new CloudEvents SDK, contributors should bump `__version__` in -[cloudevents](cloudevents/__init__.py) to reflect the new release version. On merge, the action -will automatically build and release to PyPI using -[this PyPI GitHub Action](https://github.com/pypa/gh-action-pypi-publish). This -action gets called on all pushes to main (such as a version branch being merged -into main), but only releases a new version when the version number has changed. Note, -this action assumes pushes to main are version updates. Consequently, -[pypi-release.yml](.github/workflows/pypi-release.yml) will fail if you attempt to -push to main without updating `__version__` in -[cloudevents](cloudevents/__init__.py) so don't forget to do so. +`src/cloudevents/__init__.py` to reflect the new release version. On merge, the action +will automatically build and release to PyPI. This action gets called on all pushes to main +(such as a version branch being merged into main), but only releases a new version when the +version number has changed. Note, this action assumes pushes to main are version updates. +Consequently, the release workflow will fail if you attempt to push to main without updating +`__version__` in `src/cloudevents/__init__.py` so don't forget to do so. -After a version update is merged, the script [pypi_packaging.py](pypi_packaging.py) -will create a GitHub tag for the new cloudevents version using `__version__`. -The script fails if `__version__` and the local pypi version for -cloudevents are out of sync. For this reason, [pypi-release.yml](.github/workflows/pypi-release.yml) -first must upload the new cloudevents pypi package, and then download the recently updated pypi -cloudevents package for [pypi_packaging.py](pypi_packaging.py) not to fail. +After a version update is merged, a GitHub tag for the new cloudevents version is created +using `__version__`. View the GitHub workflow [pypi-release.yml](.github/workflows/pypi-release.yml) for more information. diff --git a/mypy.ini b/mypy.ini deleted file mode 100644 index d8fb9cc0..00000000 --- a/mypy.ini +++ /dev/null @@ -1,16 +0,0 @@ -[mypy] -plugins = pydantic.mypy -python_version = 3.8 - -pretty = True -show_error_context = True -follow_imports_for_stubs = True -# subset of mypy --strict -# https://mypy.readthedocs.io/en/stable/config_file.html -check_untyped_defs = True -disallow_incomplete_defs = True -warn_return_any = True -strict_equality = True - -[mypy-deprecation.*] -ignore_missing_imports = True diff --git a/pypi_packaging.py b/pypi_packaging.py deleted file mode 100644 index c81986d5..00000000 --- a/pypi_packaging.py +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright 2018-Present The CloudEvents Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import os - -import pkg_resources - -from setup import pypi_config - - -def createTag(): - from git import Repo - - # Get local pypi cloudevents version - published_pypi_version = pkg_resources.get_distribution( - pypi_config["package_name"] - ).version - - # Ensure pypi and local package versions match - if pypi_config["version_target"] == published_pypi_version: - # Create local git tag - repo = Repo(os.getcwd()) - repo.create_tag(pypi_config["version_target"]) - - # Push git tag to remote main - origin = repo.remote() - origin.push(pypi_config["version_target"]) - - else: - # PyPI publish likely failed - print( - f"Expected {pypi_config['package_name']}=={pypi_config['version_target']} " - f"but found {pypi_config['package_name']}=={published_pypi_version}" - ) - exit(1) - - -if __name__ == "__main__": - createTag() diff --git a/pyproject.toml b/pyproject.toml index 8727d44f..edfc9e06 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,19 +1,149 @@ -[tool.black] +[project] +name = "cloudevents" +dynamic = ["version"] +description = "CloudEvents Python SDK" +authors = [ + { name = "The Cloud Events Contributors", email = "cncfcloudevents@gmail.com" } +] +readme = "README.md" +requires-python = ">= 3.10" +license = "Apache-2.0" +classifiers = [ + "Intended Audience :: Information Technology", + "Intended Audience :: System Administrators", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Development Status :: 5 - Production/Stable", + "Operating System :: OS Independent", + "Natural Language :: English", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", + "Typing :: Typed", +] +keywords = [ + "CloudEvents", + "Eventing", + "Serverless", +] +dependencies = [ + "python-dateutil>=2.8.2", +] + +[project.urls] +"Source code" = "https://github.com/cloudevents/sdk-python" +"Documentation" = "https://cloudevents.io" +"Home page" = "https://cloudevents.io" + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[dependency-groups] +dev = [ + "ruff>=0.14.10", + "pytest>=9.0.2", + "mypy>=1.19.1", + "isort>=7.0.0", + "flake8>=7.3.0", + "pep8-naming>=0.15.1", + "flake8-print>=5.0.0", + "pre-commit>=4.5.1", + "pytest-cov>=7.0.0", + "types-python-dateutil>=2.9.0.20251115", +] + +[tool.uv.pip] +universal = true +generate-hashes = true + +[tool.hatch.version] +path = "src/cloudevents/__init__.py" + +[tool.hatch.metadata] +allow-direct-references = true + +[tool.hatch.build.targets.wheel.force-include] +"CHANGELOG.md" = "CHANGELOG.md" +"MAINTAINERS.md" = "MAINTAINERS.md" +"README.md" = "README.md" + +[tool.hatch.build.targets.sdist] +packages = ["src/cloudevents"] + +[tool.hatch.build.targets.sdist.force-include] +"CHANGELOG.md" = "CHANGELOG.md" +"MAINTAINERS.md" = "MAINTAINERS.md" + +[tool.ruff] line-length = 88 -include = '\.pyi?$' -exclude = ''' -/( - \.git - | \.hg - | \.mypy_cache - | \.tox - | \.venv - | _build - | buck-out - | build - | dist -)/ -''' - -[tool.isort] -profile = "black" +target-version = "py310" + +exclude = [ + ".bzr", + ".direnv", + ".eggs", + ".git", + ".git-rewrite", + ".hg", + ".mypy_cache", + ".nox", + ".pants.d", + ".pyenv", + ".pytest_cache", + ".pytype", + ".ruff_cache", + ".svn", + ".tox", + ".venv", + ".vscode", + "__pypackages__", + "_build", + "buck-out", + "build", + "dist", + "site-packages", + "venv", +] + +[tool.ruff.lint] +ignore = ["E731"] +extend-ignore = ["E203"] +select = [ + "I", # isort - import sorting + "F401", # unused imports +] + + +[tool.pytest.ini_options] +testpaths = [ + "tests", +] + +[tool.mypy] +python_version = "3.10" + +ignore_missing_imports = true +namespace_packages = true +explicit_package_bases = true +scripts_are_modules = true +pretty = true +show_error_context = true +follow_imports_for_stubs = true +warn_redundant_casts = true +warn_unused_ignores = true +# subset of mypy --strict +# https://mypy.readthedocs.io/en/stable/config_file.html +check_untyped_defs = true +disallow_incomplete_defs = true +warn_return_any = true +strict_equality = true +disallow_untyped_defs = true +exclude = [ + "src/cloudevents/v1", +] diff --git a/requirements/dev.txt b/requirements/dev.txt deleted file mode 100644 index 63872949..00000000 --- a/requirements/dev.txt +++ /dev/null @@ -1,7 +0,0 @@ -black -isort -flake8 -pep8-naming -flake8-print -tox -pre-commit diff --git a/requirements/publish.txt b/requirements/publish.txt deleted file mode 100644 index a296666f..00000000 --- a/requirements/publish.txt +++ /dev/null @@ -1,2 +0,0 @@ -GitPython -cloudevents diff --git a/requirements/test.txt b/requirements/test.txt deleted file mode 100644 index 3e32e4a8..00000000 --- a/requirements/test.txt +++ /dev/null @@ -1,13 +0,0 @@ -flake8 -pep8-naming -flake8-print -pytest -pytest-cov -# web app tests -sanic -sanic-testing -aiohttp -Pillow -requests -flask -pydantic>=2.0.0,<3.0 diff --git a/samples/http-image-cloudevents/client.py b/samples/http-image-cloudevents/client.py index 021c1f56..a61303f1 100644 --- a/samples/http-image-cloudevents/client.py +++ b/samples/http-image-cloudevents/client.py @@ -15,9 +15,8 @@ import sys import requests - -from cloudevents.conversion import to_binary, to_structured -from cloudevents.http import CloudEvent +from cloudevents_v1.conversion import to_binary, to_structured +from cloudevents_v1.http import CloudEvent resp = requests.get( "https://raw.githubusercontent.com/cncf/artwork/master/projects/cloudevents/horizontal/color/cloudevents-horizontal-color.png" # noqa diff --git a/samples/http-image-cloudevents/image_sample_server.py b/samples/http-image-cloudevents/image_sample_server.py index da303025..ead3e596 100644 --- a/samples/http-image-cloudevents/image_sample_server.py +++ b/samples/http-image-cloudevents/image_sample_server.py @@ -14,11 +14,10 @@ import io +from cloudevents_v1.http import from_http from flask import Flask, request from PIL import Image -from cloudevents.http import from_http - app = Flask(__name__) diff --git a/samples/http-image-cloudevents/image_sample_test.py b/samples/http-image-cloudevents/image_sample_test.py index 5fe6ec9d..33895c69 100644 --- a/samples/http-image-cloudevents/image_sample_test.py +++ b/samples/http-image-cloudevents/image_sample_test.py @@ -18,12 +18,11 @@ import pytest from client import image_bytes +from cloudevents_v1.conversion import to_binary, to_structured +from cloudevents_v1.http import CloudEvent, from_http from image_sample_server import app from PIL import Image -from cloudevents.conversion import to_binary, to_structured -from cloudevents.http import CloudEvent, from_http - image_fileobj = io.BytesIO(image_bytes) image_expected_shape = (1880, 363) diff --git a/samples/http-json-cloudevents/client.py b/samples/http-json-cloudevents/client.py index 5ecc3793..f68f27b3 100644 --- a/samples/http-json-cloudevents/client.py +++ b/samples/http-json-cloudevents/client.py @@ -15,9 +15,8 @@ import sys import requests - -from cloudevents.conversion import to_binary, to_structured -from cloudevents.http import CloudEvent +from cloudevents_v1.conversion import to_binary, to_structured +from cloudevents_v1.http import CloudEvent def send_binary_cloud_event(url): diff --git a/samples/http-json-cloudevents/json_sample_server.py b/samples/http-json-cloudevents/json_sample_server.py index c3a399ee..9fa1a6d1 100644 --- a/samples/http-json-cloudevents/json_sample_server.py +++ b/samples/http-json-cloudevents/json_sample_server.py @@ -12,10 +12,9 @@ # License for the specific language governing permissions and limitations # under the License. +from cloudevents_v1.http import from_http from flask import Flask, request -from cloudevents.http import from_http - app = Flask(__name__) diff --git a/samples/http-json-cloudevents/json_sample_test.py b/samples/http-json-cloudevents/json_sample_test.py index 1d92874d..612aade0 100644 --- a/samples/http-json-cloudevents/json_sample_test.py +++ b/samples/http-json-cloudevents/json_sample_test.py @@ -13,11 +13,10 @@ # under the License. import pytest +from cloudevents_v1.conversion import to_binary, to_structured +from cloudevents_v1.http import CloudEvent from json_sample_server import app -from cloudevents.conversion import to_binary, to_structured -from cloudevents.http import CloudEvent - @pytest.fixture def client(): diff --git a/setup.py b/setup.py deleted file mode 100644 index a4e4befc..00000000 --- a/setup.py +++ /dev/null @@ -1,83 +0,0 @@ -# Copyright 2018-Present The CloudEvents Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import codecs -import os -import pathlib - -from setuptools import find_packages, setup - - -def read(rel_path): - here = os.path.abspath(os.path.dirname(__file__)) - with codecs.open(os.path.join(here, rel_path), "r") as fp: - return fp.read() - - -def get_version(rel_path): - for line in read(rel_path).splitlines(): - if line.startswith("__version__"): - delim = '"' if '"' in line else "'" - return line.split(delim)[1] - else: - raise RuntimeError("Unable to find version string.") - - -# FORMAT: 1.x.x -pypi_config = { - "version_target": get_version("cloudevents/__init__.py"), - "package_name": "cloudevents", -} - -here = pathlib.Path(__file__).parent.resolve() -long_description = (here / "README.md").read_text(encoding="utf-8") - -if __name__ == "__main__": - setup( - name=pypi_config["package_name"], - summary="CloudEvents Python SDK", - long_description_content_type="text/markdown", - long_description=long_description, - description="CloudEvents Python SDK", - url="https://github.com/cloudevents/sdk-python", - author="The Cloud Events Contributors", - author_email="cncfcloudevents@gmail.com", - home_page="https://cloudevents.io", - classifiers=[ - "Intended Audience :: Information Technology", - "Intended Audience :: System Administrators", - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Development Status :: 5 - Production/Stable", - "Operating System :: OS Independent", - "Natural Language :: English", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Typing :: Typed", - ], - keywords="CloudEvents Eventing Serverless", - license="https://www.apache.org/licenses/LICENSE-2.0", - license_file="LICENSE", - packages=find_packages(exclude=["cloudevents.tests"]), - include_package_data=True, - version=pypi_config["version_target"], - install_requires=["deprecation>=2.0,<3.0"], - extras_require={"pydantic": "pydantic>=1.0.0,<3.0"}, - zip_safe=True, - ) diff --git a/src/cloudevents/__init__.py b/src/cloudevents/__init__.py new file mode 100644 index 00000000..9ef80432 --- /dev/null +++ b/src/cloudevents/__init__.py @@ -0,0 +1,15 @@ +# Copyright 2018-Present The CloudEvents Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +__version__ = "2.0.0-alpha1" diff --git a/src/cloudevents/core/__init__.py b/src/cloudevents/core/__init__.py new file mode 100644 index 00000000..e01d2a11 --- /dev/null +++ b/src/cloudevents/core/__init__.py @@ -0,0 +1,15 @@ +# Copyright 2018-Present The CloudEvents Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""This package contains the core functionality of the CloudEvents spec.""" diff --git a/src/cloudevents/core/base.py b/src/cloudevents/core/base.py new file mode 100644 index 00000000..747f5a6a --- /dev/null +++ b/src/cloudevents/core/base.py @@ -0,0 +1,145 @@ +# Copyright 2018-Present The CloudEvents Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from datetime import datetime +from typing import Any, Callable, Protocol + +EventFactory = Callable[ + [dict[str, Any], dict[str, Any] | str | bytes | None], "BaseCloudEvent" +] +""" +Type alias for a callable that creates a BaseCloudEvent from attributes and data. + +Args: + attributes: The CloudEvent attributes (required fields like id, source, type, etc.) + data: The CloudEvent data payload (optional) + +Returns: + A BaseCloudEvent instance +""" + + +class BaseCloudEvent(Protocol): + """ + The CloudEvent Python wrapper contract exposing generically-available + properties and APIs. + + Implementations might handle fields and have other APIs exposed but are + obliged to follow this contract. + """ + + def __init__( + self, + attributes: dict[str, Any], + data: dict[str, Any] | str | bytes | None = None, + ) -> None: + """ + Create a new CloudEvent instance. + + :param attributes: The attributes of the CloudEvent instance. + :param data: The payload of the CloudEvent instance. + + :raises ValueError: If any of the required attributes are missing or have invalid values. + :raises TypeError: If any of the attributes have invalid types. + """ + ... + + def get_id(self) -> str: + """ + Retrieve the ID of the event. + + :return: The ID of the event. + """ + ... + + def get_source(self) -> str: + """ + Retrieve the source of the event. + + :return: The source of the event. + """ + ... + + def get_type(self) -> str: + """ + Retrieve the type of the event. + + :return: The type of the event. + """ + ... + + def get_specversion(self) -> str: + """ + Retrieve the specversion of the event. + + :return: The specversion of the event. + """ + ... + + def get_datacontenttype(self) -> str | None: + """ + Retrieve the datacontenttype of the event. + + :return: The datacontenttype of the event. + """ + ... + + def get_dataschema(self) -> str | None: + """ + Retrieve the dataschema of the event. + + :return: The dataschema of the event. + """ + ... + + def get_subject(self) -> str | None: + """ + Retrieve the subject of the event. + + :return: The subject of the event. + """ + ... + + def get_time(self) -> datetime | None: + """ + Retrieve the time of the event. + + :return: The time of the event. + """ + ... + + def get_extension(self, extension_name: str) -> Any: + """ + Retrieve an extension attribute of the event. + + :param extension_name: The name of the extension attribute. + :return: The value of the extension attribute. + """ + ... + + def get_data(self) -> dict[str, Any] | str | bytes | None: + """ + Retrieve data of the event. + + :return: The data of the event. + """ + ... + + def get_attributes(self) -> dict[str, Any]: + """ + Retrieve all attributes of the event. + + :return: The attributes of the event. + """ + ... diff --git a/src/cloudevents/core/bindings/__init__.py b/src/cloudevents/core/bindings/__init__.py new file mode 100644 index 00000000..2379308a --- /dev/null +++ b/src/cloudevents/core/bindings/__init__.py @@ -0,0 +1,20 @@ +# Copyright 2018-Present The CloudEvents Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +""" +CloudEvents protocol bindings. + +This package provides protocol-specific bindings for CloudEvents, including HTTP and Kafka. +Each binding module provides functions to convert CloudEvents to/from protocol-specific messages. +""" diff --git a/src/cloudevents/core/bindings/amqp.py b/src/cloudevents/core/bindings/amqp.py new file mode 100644 index 00000000..7791c888 --- /dev/null +++ b/src/cloudevents/core/bindings/amqp.py @@ -0,0 +1,462 @@ +# Copyright 2018-Present The CloudEvents Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from dataclasses import dataclass +from datetime import datetime, timezone +from typing import Any, Final + +from dateutil.parser import isoparse + +from cloudevents.core.base import BaseCloudEvent, EventFactory +from cloudevents.core.bindings.common import get_event_factory_for_version +from cloudevents.core.formats.base import Format +from cloudevents.core.formats.json import JSONFormat +from cloudevents.core.spec import SPECVERSION_V1_0 +from cloudevents.core.v1.event import CloudEvent + +# AMQP CloudEvents spec allows both cloudEvents_ and cloudEvents: prefixes +# The underscore variant is preferred for JMS 2.0 compatibility +CE_PREFIX_UNDERSCORE: Final[str] = "cloudEvents_" +CE_PREFIX_COLON: Final[str] = "cloudEvents:" +CONTENT_TYPE_PROPERTY: Final[str] = "content-type" + + +@dataclass(frozen=True) +class AMQPMessage: + """ + Represents an AMQP 1.0 message containing CloudEvent data. + + This dataclass encapsulates AMQP message properties, application properties, + and application data for transmitting CloudEvents over AMQP. It is immutable + to prevent accidental modifications and works with any AMQP 1.0 library + (e.g., Pika, aio-pika, qpid-proton, azure-servicebus). + + Attributes: + properties: AMQP message properties as a dictionary + application_properties: AMQP application properties as a dictionary + application_data: AMQP application data section as bytes + """ + + properties: dict[str, Any] + application_properties: dict[str, Any] + application_data: bytes + + +def _encode_amqp_value(value: Any) -> Any: + """ + Encode a CloudEvent attribute value for AMQP application properties. + + Handles special encoding for datetime objects to AMQP timestamp type + (milliseconds since Unix epoch as int). Per AMQP 1.0 CloudEvents spec, + senders SHOULD use native AMQP types when efficient. + + :param value: The attribute value to encode + :return: Encoded value (int for datetime timestamp, original type otherwise) + """ + if isinstance(value, datetime): + # AMQP 1.0 timestamp: milliseconds since Unix epoch (UTC) + timestamp_ms = int(value.timestamp() * 1000) + return timestamp_ms + + return value + + +def _decode_amqp_value(attr_name: str, value: Any) -> Any: + """ + Decode a CloudEvent attribute value from AMQP application properties. + + Handles special parsing for the 'time' attribute. Per AMQP 1.0 CloudEvents spec, + receivers MUST accept both native AMQP timestamp (int milliseconds since epoch) + and canonical string form (ISO 8601). + + :param attr_name: The name of the CloudEvent attribute + :param value: The AMQP property value + :return: Decoded value (datetime for 'time' attribute, original type otherwise) + """ + if attr_name == "time": + if isinstance(value, int): + # AMQP timestamp: milliseconds since Unix epoch + return datetime.fromtimestamp(value / 1000.0, tz=timezone.utc) + if isinstance(value, str): + # ISO 8601 string (canonical form, also accepted per spec) + return isoparse(value) + + return value + + +def to_binary(event: BaseCloudEvent, event_format: Format) -> AMQPMessage: + """ + Convert a CloudEvent to AMQP binary content mode. + + In binary mode, CloudEvent attributes are mapped to AMQP application properties + with the 'cloudEvents_' prefix, except for 'datacontenttype' which maps to the + AMQP 'content-type' property. The event data is placed directly in the AMQP + application-data section. Datetime values are encoded as AMQP timestamp type + (milliseconds since Unix epoch), while boolean and integer values are preserved + as native types. + + Note: Per AMQP CloudEvents spec, attributes may use 'cloudEvents_' or 'cloudEvents:' + prefix. This implementation uses 'cloudEvents_' for JMS 2.0 compatibility. + + Example: + >>> from cloudevents.core.v1.event import CloudEvent + >>> from cloudevents.core.formats.json import JSONFormat + >>> + >>> event = CloudEvent( + ... attributes={"type": "com.example.test", "source": "/test"}, + ... data={"message": "Hello"} + ... ) + >>> message = to_binary(event, JSONFormat()) + >>> # message.application_properties = {"cloudEvents_type": "com.example.test", ...} + >>> # message.properties = {"content-type": "application/json"} + >>> # message.application_data = b'{"message": "Hello"}' + + :param event: The CloudEvent to convert + :param event_format: Format implementation for data serialization + :return: AMQPMessage with CloudEvent attributes as application properties + """ + properties: dict[str, Any] = {} + application_properties: dict[str, Any] = {} + attributes = event.get_attributes() + + for attr_name, attr_value in attributes.items(): + if attr_name == "datacontenttype": + properties[CONTENT_TYPE_PROPERTY] = str(attr_value) + else: + property_name = f"{CE_PREFIX_UNDERSCORE}{attr_name}" + # Encode datetime to AMQP timestamp (milliseconds since epoch) + # Other types (bool, int, str, bytes) use native AMQP types + application_properties[property_name] = _encode_amqp_value(attr_value) + + data = event.get_data() + datacontenttype = attributes.get("datacontenttype") + application_data = event_format.write_data(data, datacontenttype) + + return AMQPMessage( + properties=properties, + application_properties=application_properties, + application_data=application_data, + ) + + +def from_binary( + message: AMQPMessage, + event_format: Format, + event_factory: EventFactory | None = None, +) -> BaseCloudEvent: + """ + Parse an AMQP binary content mode message to a CloudEvent. + + Auto-detects the CloudEvents version from the application properties + and uses the appropriate event factory if not explicitly provided. + + Extracts CloudEvent attributes from AMQP application properties with either + 'cloudEvents_' or 'cloudEvents:' prefix (per AMQP CloudEvents spec), and treats + the AMQP 'content-type' property as the 'datacontenttype' attribute. The + application-data section is parsed as event data according to the content type. + The 'time' attribute accepts both AMQP timestamp (int milliseconds) and ISO 8601 + string, while other native AMQP types (boolean, integer) are preserved. + + Example: + >>> from cloudevents.core.v1.event import CloudEvent + >>> from cloudevents.core.formats.json import JSONFormat + >>> + >>> message = AMQPMessage( + ... properties={"content-type": "application/json"}, + ... application_properties={ + ... "cloudEvents_type": "com.example.test", + ... "cloudEvents_source": "/test", + ... "cloudEvents_id": "123", + ... "cloudEvents_specversion": "1.0" + ... }, + ... application_data=b'{"message": "Hello"}' + ... ) + >>> event = from_binary(message, JSONFormat(), CloudEvent) + + :param message: AMQPMessage to parse + :param event_format: Format implementation for data deserialization + :param event_factory: Factory function to create CloudEvent instances + :return: CloudEvent instance + """ + attributes: dict[str, Any] = {} + + for prop_name, prop_value in message.application_properties.items(): + # Check for both cloudEvents_ and cloudEvents: prefixes + attr_name = None + + if prop_name.startswith(CE_PREFIX_UNDERSCORE): + attr_name = prop_name[len(CE_PREFIX_UNDERSCORE) :] + elif prop_name.startswith(CE_PREFIX_COLON): + attr_name = prop_name[len(CE_PREFIX_COLON) :] + + if attr_name: + # Decode timestamp (int or ISO 8601 string) to datetime, preserve other native types + attributes[attr_name] = _decode_amqp_value(attr_name, prop_value) + + if CONTENT_TYPE_PROPERTY in message.properties: + attributes["datacontenttype"] = message.properties[CONTENT_TYPE_PROPERTY] + + # Auto-detect version if factory not provided + if event_factory is None: + specversion = attributes.get("specversion", SPECVERSION_V1_0) + event_factory = get_event_factory_for_version(specversion) + + datacontenttype = attributes.get("datacontenttype") + data = event_format.read_data(message.application_data, datacontenttype) + + return event_factory(attributes, data) + + +def to_structured(event: BaseCloudEvent, event_format: Format) -> AMQPMessage: + """ + Convert a CloudEvent to AMQP structured content mode. + + In structured mode, the entire CloudEvent (attributes and data) is serialized + into the AMQP application-data section using the specified format. The + content-type property is set to the format's media type (e.g., + "application/cloudevents+json"). + + Example: + >>> from cloudevents.core.v1.event import CloudEvent + >>> from cloudevents.core.formats.json import JSONFormat + >>> + >>> event = CloudEvent( + ... attributes={"type": "com.example.test", "source": "/test"}, + ... data={"message": "Hello"} + ... ) + >>> message = to_structured(event, JSONFormat()) + >>> # message.properties = {"content-type": "application/cloudevents+json"} + >>> # message.application_data = b'{"type": "com.example.test", ...}' + + :param event: The CloudEvent to convert + :param event_format: Format implementation for serialization + :return: AMQPMessage with structured content in application-data + """ + content_type = event_format.get_content_type() + + properties = {CONTENT_TYPE_PROPERTY: content_type} + application_properties: dict[str, Any] = {} + + application_data = event_format.write(event) + + return AMQPMessage( + properties=properties, + application_properties=application_properties, + application_data=application_data, + ) + + +def from_structured( + message: AMQPMessage, + event_format: Format, + event_factory: EventFactory | None = None, +) -> BaseCloudEvent: + """ + Parse an AMQP structured content mode message to a CloudEvent. + + Deserializes the CloudEvent from the AMQP application-data section using the + specified format. Any cloudEvents_-prefixed application properties are ignored + as the application-data contains all event metadata. + + If event_factory is not provided, version detection is delegated to the format + implementation, which will auto-detect based on the 'specversion' field. + + Example: + >>> from cloudevents.core.v1.event import CloudEvent + >>> from cloudevents.core.formats.json import JSONFormat + >>> + >>> # Explicit factory + >>> message = AMQPMessage( + ... properties={"content-type": "application/cloudevents+json"}, + ... application_properties={}, + ... application_data=b'{"type": "com.example.test", "source": "/test", ...}' + ... ) + >>> event = from_structured(message, JSONFormat(), CloudEvent) + >>> + >>> # Auto-detect version + >>> event = from_structured(message, JSONFormat()) + + :param message: AMQPMessage to parse + :param event_format: Format implementation for deserialization + :param event_factory: Factory function to create CloudEvent instances. + If None, the format will auto-detect the version. + :return: CloudEvent instance + """ + # Delegate version detection to format layer + return event_format.read(event_factory, message.application_data) + + +def from_amqp( + message: AMQPMessage, + event_format: Format, + event_factory: EventFactory | None = None, +) -> BaseCloudEvent: + """ + Parse an AMQP message to a CloudEvent with automatic mode detection. + + Auto-detects CloudEvents version and uses appropriate event factory if not provided. + + Automatically detects whether the message uses binary or structured content mode: + - If content-type starts with "application/cloudevents" → structured mode + - Otherwise → binary mode + + This function provides a convenient way to handle both content modes without + requiring the caller to determine the mode beforehand. + + Example: + >>> from cloudevents.core.v1.event import CloudEvent + >>> from cloudevents.core.formats.json import JSONFormat + >>> + >>> # Works with binary mode + >>> binary_msg = AMQPMessage( + ... properties={"content-type": "application/json"}, + ... application_properties={"cloudEvents_type": "com.example.test", ...}, + ... application_data=b'...' + ... ) + >>> event1 = from_amqp(binary_msg, JSONFormat(), CloudEvent) + >>> + >>> # Also works with structured mode + >>> structured_msg = AMQPMessage( + ... properties={"content-type": "application/cloudevents+json"}, + ... application_properties={}, + ... application_data=b'{"type": "com.example.test", ...}' + ... ) + >>> event2 = from_amqp(structured_msg, JSONFormat(), CloudEvent) + + :param message: AMQPMessage to parse + :param event_format: Format implementation for deserialization + :param event_factory: Factory function to create CloudEvent instances (auto-detected if None) + :return: CloudEvent instance + """ + content_type = message.properties.get(CONTENT_TYPE_PROPERTY, "") + + if isinstance(content_type, str) and content_type.lower().startswith( + "application/cloudevents" + ): + return from_structured(message, event_format, event_factory) + + return from_binary(message, event_format, event_factory) + + +def to_binary_event( + event: BaseCloudEvent, + event_format: Format | None = None, +) -> AMQPMessage: + """ + Convenience wrapper for to_binary with JSON format as default. + + Example: + >>> from cloudevents.core.v1.event import CloudEvent + >>> from cloudevents.core.bindings import amqp + >>> + >>> event = CloudEvent( + ... attributes={"type": "com.example.test", "source": "/test"}, + ... data={"message": "Hello"} + ... ) + >>> message = amqp.to_binary_event(event) + + :param event: The CloudEvent to convert + :param event_format: Format implementation (defaults to JSONFormat) + :return: AMQPMessage with CloudEvent attributes as application properties + """ + if event_format is None: + event_format = JSONFormat() + return to_binary(event, event_format) + + +def from_binary_event( + message: AMQPMessage, + event_format: Format | None = None, +) -> CloudEvent: + """ + Convenience wrapper for from_binary with JSON format and CloudEvent as defaults. + + Example: + >>> from cloudevents.core.bindings import amqp + >>> event = amqp.from_binary_event(message) + + :param message: AMQPMessage to parse + :param event_format: Format implementation (defaults to JSONFormat) + :return: CloudEvent instance + """ + if event_format is None: + event_format = JSONFormat() + return from_binary(message, event_format, CloudEvent) + + +def to_structured_event( + event: BaseCloudEvent, + event_format: Format | None = None, +) -> AMQPMessage: + """ + Convenience wrapper for to_structured with JSON format as default. + + Example: + >>> from cloudevents.core.v1.event import CloudEvent + >>> from cloudevents.core.bindings import amqp + >>> + >>> event = CloudEvent( + ... attributes={"type": "com.example.test", "source": "/test"}, + ... data={"message": "Hello"} + ... ) + >>> message = amqp.to_structured_event(event) + + :param event: The CloudEvent to convert + :param event_format: Format implementation (defaults to JSONFormat) + :return: AMQPMessage with structured content in application-data + """ + if event_format is None: + event_format = JSONFormat() + return to_structured(event, event_format) + + +def from_structured_event( + message: AMQPMessage, + event_format: Format | None = None, +) -> CloudEvent: + """ + Convenience wrapper for from_structured with JSON format and CloudEvent as defaults. + + Example: + >>> from cloudevents.core.bindings import amqp + >>> event = amqp.from_structured_event(message) + + :param message: AMQPMessage to parse + :param event_format: Format implementation (defaults to JSONFormat) + :return: CloudEvent instance + """ + if event_format is None: + event_format = JSONFormat() + return from_structured(message, event_format, CloudEvent) + + +def from_amqp_event( + message: AMQPMessage, + event_format: Format | None = None, +) -> CloudEvent: + """ + Convenience wrapper for from_amqp with JSON format and CloudEvent as defaults. + Auto-detects binary or structured mode. + + Example: + >>> from cloudevents.core.bindings import amqp + >>> event = amqp.from_amqp_event(message) + + :param message: AMQPMessage to parse + :param event_format: Format implementation (defaults to JSONFormat) + :return: CloudEvent instance + """ + if event_format is None: + event_format = JSONFormat() + return from_amqp(message, event_format, CloudEvent) diff --git a/src/cloudevents/core/bindings/common.py b/src/cloudevents/core/bindings/common.py new file mode 100644 index 00000000..05475ac3 --- /dev/null +++ b/src/cloudevents/core/bindings/common.py @@ -0,0 +1,89 @@ +# Copyright 2018-Present The CloudEvents Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +""" +Common utilities for CloudEvents protocol bindings. + +This module provides shared functionality for protocol bindings (HTTP, Kafka, etc.) +to handle CloudEvent attribute encoding and decoding per the CloudEvents specification. +""" + +from datetime import datetime +from typing import Any, Final +from urllib.parse import quote, unquote + +from dateutil.parser import isoparse + +from cloudevents.core.base import EventFactory +from cloudevents.core.spec import SPECVERSION_V0_3 +from cloudevents.core.v03.event import CloudEvent as CloudEventV03 +from cloudevents.core.v1.event import CloudEvent + +TIME_ATTR: Final[str] = "time" +CONTENT_TYPE_HEADER: Final[str] = "content-type" +DATACONTENTTYPE_ATTR: Final[str] = "datacontenttype" + + +def encode_header_value(value: Any) -> str: + """ + Encode a CloudEvent attribute value for use in a protocol header. + + Handles special encoding for datetime objects (ISO 8601 with 'Z' suffix for UTC) + and applies percent-encoding for non-ASCII and special characters per RFC 3986. + + :param value: The attribute value to encode + :return: Percent-encoded string suitable for protocol headers + """ + if isinstance(value, datetime): + str_value = value.isoformat() + if str_value.endswith("+00:00"): + str_value = str_value[:-6] + "Z" + return quote(str_value, safe="") + + return quote(str(value), safe="") + + +def decode_header_value(attr_name: str, value: str) -> Any: + """ + Decode a CloudEvent attribute value from a protocol header. + + Applies percent-decoding and special parsing for the 'time' attribute + (converts to datetime object using RFC 3339 parsing). + + :param attr_name: The name of the CloudEvent attribute + :param value: The percent-encoded header value + :return: Decoded value (datetime for 'time' attribute, string otherwise) + """ + decoded = unquote(value) + + if attr_name == TIME_ATTR: + return isoparse(decoded) + + return decoded + + +def get_event_factory_for_version(specversion: str) -> EventFactory: + """ + Get the appropriate event factory based on the CloudEvents specification version. + + This function returns the CloudEvent class implementation for the specified + version. Used by protocol bindings for automatic version detection. + + :param specversion: The CloudEvents specification version (e.g., "0.3" or "1.0") + :return: EventFactory for the specified version (defaults to v1.0 for unknown versions) + """ + if specversion == SPECVERSION_V0_3: + return CloudEventV03 + # Default to v1.0 for unknown versions + return CloudEvent diff --git a/src/cloudevents/core/bindings/http.py b/src/cloudevents/core/bindings/http.py new file mode 100644 index 00000000..86e013b9 --- /dev/null +++ b/src/cloudevents/core/bindings/http.py @@ -0,0 +1,379 @@ +# Copyright 2018-Present The CloudEvents Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from dataclasses import dataclass +from typing import Any, Final + +from cloudevents.core.base import BaseCloudEvent, EventFactory +from cloudevents.core.bindings.common import ( + CONTENT_TYPE_HEADER, + DATACONTENTTYPE_ATTR, + decode_header_value, + encode_header_value, + get_event_factory_for_version, +) +from cloudevents.core.formats.base import Format +from cloudevents.core.formats.json import JSONFormat +from cloudevents.core.spec import SPECVERSION_V1_0 + +CE_PREFIX: Final[str] = "ce-" + + +@dataclass(frozen=True) +class HTTPMessage: + """ + Represents an HTTP message (request or response) containing CloudEvent data. + + This dataclass encapsulates HTTP headers and body for transmitting CloudEvents + over HTTP. It is immutable to prevent accidental modifications and works with + any HTTP framework or library. + + Attributes: + headers: HTTP headers as a dictionary with string keys and values + body: HTTP body as bytes + """ + + headers: dict[str, str] + body: bytes + + +def to_binary(event: BaseCloudEvent, event_format: Format) -> HTTPMessage: + """ + Convert a CloudEvent to HTTP binary content mode. + + In binary mode, CloudEvent attributes are mapped to HTTP headers with the 'ce-' prefix, + except for 'datacontenttype' which maps to the 'Content-Type' header. The event data + is placed directly in the HTTP body. + + Example: + >>> from cloudevents.core.v1.event import CloudEvent + >>> from cloudevents.core.formats.json import JSONFormat + >>> + >>> event = CloudEvent( + ... attributes={"type": "com.example.test", "source": "/test"}, + ... data={"message": "Hello"} + ... ) + >>> message = to_binary(event, JSONFormat()) + >>> # message.headers = {"ce-type": "com.example.test", "ce-source": "/test", ...} + >>> # message.body = b'{"message": "Hello"}' + + :param event: The CloudEvent to convert + :param event_format: Format implementation for data serialization + :return: HTTPMessage with ce-prefixed headers and event data as body + """ + headers: dict[str, str] = {} + attributes = event.get_attributes() + + for attr_name, attr_value in attributes.items(): + if attr_value is None: + continue + + if attr_name == DATACONTENTTYPE_ATTR: + headers[CONTENT_TYPE_HEADER] = str(attr_value) + else: + header_name = f"{CE_PREFIX}{attr_name}" + headers[header_name] = encode_header_value(attr_value) + + data = event.get_data() + datacontenttype = attributes.get(DATACONTENTTYPE_ATTR) + body = event_format.write_data(data, datacontenttype) + + return HTTPMessage(headers=headers, body=body) + + +def from_binary( + message: HTTPMessage, + event_format: Format, + event_factory: EventFactory | None = None, +) -> BaseCloudEvent: + """ + Parse an HTTP binary content mode message to a CloudEvent. + + Auto-detects the CloudEvents version from the 'ce-specversion' header + and uses the appropriate event factory if not explicitly provided. + + Extracts CloudEvent attributes from ce-prefixed HTTP headers and treats the + 'Content-Type' header as the 'datacontenttype' attribute. The HTTP body is + parsed as event data according to the content type. + + Example: + >>> from cloudevents.core.v1.event import CloudEvent + >>> from cloudevents.core.formats.json import JSONFormat + >>> + >>> message = HTTPMessage( + ... headers={"ce-type": "com.example.test", "ce-source": "/test", + ... "ce-id": "123", "ce-specversion": "1.0"}, + ... body=b'{"message": "Hello"}' + ... ) + >>> event = from_binary(message, JSONFormat(), CloudEvent) + + :param message: HTTPMessage to parse + :param event_format: Format implementation for data deserialization + :param event_factory: Factory function to create CloudEvent instances (auto-detected if None) + :return: CloudEvent instance + """ + attributes: dict[str, Any] = {} + + for header_name, header_value in message.headers.items(): + normalized_name = header_name.lower() + + if normalized_name.startswith(CE_PREFIX): + attr_name = normalized_name[len(CE_PREFIX) :] + attributes[attr_name] = decode_header_value(attr_name, header_value) + elif normalized_name == CONTENT_TYPE_HEADER: + attributes[DATACONTENTTYPE_ATTR] = header_value + + # Auto-detect version if factory not provided + if event_factory is None: + specversion = attributes.get("specversion", SPECVERSION_V1_0) + event_factory = get_event_factory_for_version(specversion) + + datacontenttype = attributes.get(DATACONTENTTYPE_ATTR) + data = event_format.read_data(message.body, datacontenttype) + + return event_factory(attributes, data) + + +def to_structured(event: BaseCloudEvent, event_format: Format) -> HTTPMessage: + """ + Convert a CloudEvent to HTTP structured content mode. + + In structured mode, the entire CloudEvent (attributes and data) is serialized + into the HTTP body using the specified format. The Content-Type header is set + to the format's media type. + + Example: + >>> from cloudevents.core.v1.event import CloudEvent + >>> from cloudevents.core.formats.json import JSONFormat + >>> + >>> event = CloudEvent( + ... attributes={"type": "com.example.test", "source": "/test"}, + ... data={"message": "Hello"} + ... ) + >>> message = to_structured(event, JSONFormat()) + >>> # message.headers = {"content-type": "application/cloudevents+json"} + >>> # message.body = b'{"type": "com.example.test", "source": "/test", ...}' + + :param event: The CloudEvent to convert + :param event_format: Format implementation for serialization + :return: HTTPMessage with structured content in body + """ + content_type = event_format.get_content_type() + + headers = {CONTENT_TYPE_HEADER: content_type} + + body = event_format.write(event) + + return HTTPMessage(headers=headers, body=body) + + +def from_structured( + message: HTTPMessage, + event_format: Format, + event_factory: EventFactory | None = None, +) -> BaseCloudEvent: + """ + Parse an HTTP structured content mode message to a CloudEvent. + + Deserializes the CloudEvent from the HTTP body using the specified format. + Any ce-prefixed headers are ignored as the body contains all event metadata. + + If event_factory is not provided, version detection is delegated to the format + implementation, which will auto-detect based on the 'specversion' field. + + Example: + >>> from cloudevents.core.v1.event import CloudEvent + >>> from cloudevents.core.formats.json import JSONFormat + >>> + >>> # Explicit factory (recommended for performance) + >>> message = HTTPMessage( + ... headers={"content-type": "application/cloudevents+json"}, + ... body=b'{"type": "com.example.test", "source": "/test", ...}' + ... ) + >>> event = from_structured(message, JSONFormat(), CloudEvent) + >>> + >>> # Auto-detect version (convenient) + >>> event = from_structured(message, JSONFormat()) + + :param message: HTTPMessage to parse + :param event_format: Format implementation for deserialization + :param event_factory: Factory function to create CloudEvent instances. + If None, the format will auto-detect the version. + :return: CloudEvent instance + """ + # Delegate version detection to format layer + return event_format.read(event_factory, message.body) + + +def from_http( + message: HTTPMessage, + event_format: Format, + event_factory: EventFactory | None = None, +) -> BaseCloudEvent: + """ + Parse an HTTP message to a CloudEvent with automatic mode detection. + + Auto-detects CloudEvents version and uses appropriate event factory if not provided. + + Automatically detects whether the message uses binary or structured content mode: + - If any ce- prefixed headers are present → binary mode + - Otherwise → structured mode + + This function provides a convenient way to handle both content modes without + requiring the caller to determine the mode beforehand. + + Example: + >>> from cloudevents.core.v1.event import CloudEvent + >>> from cloudevents.core.formats.json import JSONFormat + >>> + >>> # Works with binary mode + >>> binary_msg = HTTPMessage( + ... headers={"ce-type": "com.example.test", ...}, + ... body=b'...' + ... ) + >>> event1 = from_http(binary_msg, JSONFormat(), CloudEvent) + >>> + >>> # Also works with structured mode + >>> structured_msg = HTTPMessage( + ... headers={"content-type": "application/cloudevents+json"}, + ... body=b'{"type": "com.example.test", ...}' + ... ) + >>> event2 = from_http(structured_msg, JSONFormat(), CloudEvent) + + :param message: HTTPMessage to parse + :param event_format: Format implementation for deserialization + :param event_factory: Factory function to create CloudEvent instances (auto-detected if None) + :return: CloudEvent instance + """ + if any(key.lower().startswith(CE_PREFIX) for key in message.headers.keys()): + return from_binary(message, event_format, event_factory) + + return from_structured(message, event_format, event_factory) + + +def to_binary_event( + event: BaseCloudEvent, + event_format: Format | None = None, +) -> HTTPMessage: + """ + Convenience wrapper for to_binary with JSON format as default. + + Example: + >>> from cloudevents.core.v1.event import CloudEvent + >>> from cloudevents.core.bindings import http + >>> + >>> event = CloudEvent( + ... attributes={"type": "com.example.test", "source": "/test"}, + ... data={"message": "Hello"} + ... ) + >>> message = http.to_binary_event(event) + + :param event: The CloudEvent to convert + :param event_format: Format implementation (defaults to JSONFormat) + :return: HTTPMessage with ce-prefixed headers + """ + if event_format is None: + event_format = JSONFormat() + return to_binary(event, event_format) + + +def from_binary_event( + message: HTTPMessage, + event_format: Format | None = None, +) -> BaseCloudEvent: + """ + Convenience wrapper for from_binary with JSON format and auto-detection. + + Auto-detects CloudEvents version (v0.3 or v1.0) from headers. + + Example: + >>> from cloudevents.core.bindings import http + >>> event = http.from_binary_event(message) + + :param message: HTTPMessage to parse + :param event_format: Format implementation (defaults to JSONFormat) + :return: CloudEvent instance (v0.3 or v1.0 based on specversion) + """ + if event_format is None: + event_format = JSONFormat() + return from_binary(message, event_format, None) + + +def to_structured_event( + event: BaseCloudEvent, + event_format: Format | None = None, +) -> HTTPMessage: + """ + Convenience wrapper for to_structured with JSON format as default. + + Example: + >>> from cloudevents.core.v1.event import CloudEvent + >>> from cloudevents.core.bindings import http + >>> + >>> event = CloudEvent( + ... attributes={"type": "com.example.test", "source": "/test"}, + ... data={"message": "Hello"} + ... ) + >>> message = http.to_structured_event(event) + + :param event: The CloudEvent to convert + :param event_format: Format implementation (defaults to JSONFormat) + :return: HTTPMessage with structured content + """ + if event_format is None: + event_format = JSONFormat() + return to_structured(event, event_format) + + +def from_structured_event( + message: HTTPMessage, + event_format: Format | None = None, +) -> BaseCloudEvent: + """ + Convenience wrapper for from_structured with JSON format and auto-detection. + + Auto-detects CloudEvents version (v0.3 or v1.0) from body. + + Example: + >>> from cloudevents.core.bindings import http + >>> event = http.from_structured_event(message) + + :param message: HTTPMessage to parse + :param event_format: Format implementation (defaults to JSONFormat) + :return: CloudEvent instance (v0.3 or v1.0 based on specversion) + """ + if event_format is None: + event_format = JSONFormat() + return from_structured(message, event_format, None) + + +def from_http_event( + message: HTTPMessage, + event_format: Format | None = None, +) -> BaseCloudEvent: + """ + Convenience wrapper for from_http with JSON format and auto-detection. + Auto-detects binary or structured mode, and CloudEvents version. + + Example: + >>> from cloudevents.core.bindings import http + >>> event = http.from_http_event(message) + + :param message: HTTPMessage to parse + :param event_format: Format implementation (defaults to JSONFormat) + :return: CloudEvent instance (v0.3 or v1.0 based on specversion) + """ + if event_format is None: + event_format = JSONFormat() + return from_http(message, event_format, None) diff --git a/src/cloudevents/core/bindings/kafka.py b/src/cloudevents/core/bindings/kafka.py new file mode 100644 index 00000000..1cb5b480 --- /dev/null +++ b/src/cloudevents/core/bindings/kafka.py @@ -0,0 +1,460 @@ +# Copyright 2018-Present The CloudEvents Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from dataclasses import dataclass +from typing import Any, Callable, Final + +from cloudevents.core.base import BaseCloudEvent, EventFactory +from cloudevents.core.bindings.common import ( + CONTENT_TYPE_HEADER, + DATACONTENTTYPE_ATTR, + decode_header_value, + encode_header_value, + get_event_factory_for_version, +) +from cloudevents.core.formats.base import Format +from cloudevents.core.formats.json import JSONFormat +from cloudevents.core.spec import SPECVERSION_V1_0 + +CE_PREFIX: Final[str] = "ce_" +PARTITIONKEY_ATTR: Final[str] = "partitionkey" + +KeyMapper = Callable[[BaseCloudEvent], str | bytes | None] + + +@dataclass(frozen=True) +class KafkaMessage: + """ + Represents a Kafka message containing CloudEvent data. + + This dataclass encapsulates Kafka message components for transmitting CloudEvents + over Kafka. It is immutable to prevent accidental modifications and works with + any Kafka client library (kafka-python, confluent-kafka, etc.). + + Attributes: + headers: Kafka message headers as bytes (per Kafka protocol requirement) + key: Optional Kafka message key for partitioning + value: Kafka message value/payload as bytes + """ + + headers: dict[str, bytes] + key: str | bytes | None + value: bytes + + +def _default_key_mapper(event: BaseCloudEvent) -> str | bytes | None: + """ + Default key mapper that extracts the partitionkey extension attribute. + + :param event: The CloudEvent to extract key from + :return: The partitionkey extension attribute value, or None if not present + """ + value = event.get_extension(PARTITIONKEY_ATTR) + # Type narrowing: get_extension returns Any, but we know partitionkey should be str/bytes/None + return value if value is None or isinstance(value, (str, bytes)) else str(value) + + +def to_binary( + event: BaseCloudEvent, + event_format: Format, + key_mapper: KeyMapper | None = None, +) -> KafkaMessage: + """ + Convert a CloudEvent to Kafka binary content mode. + + In binary mode, CloudEvent attributes are mapped to Kafka headers with the 'ce_' prefix, + except for 'datacontenttype' which maps to the 'content-type' header. The event data + is placed in the Kafka message value. The message key is derived from the partitionkey + extension attribute or a custom key_mapper function. + + Example: + >>> from cloudevents.core.v1.event import CloudEvent + >>> from cloudevents.core.formats.json import JSONFormat + >>> + >>> event = CloudEvent( + ... attributes={"type": "com.example.test", "source": "/test"}, + ... data={"message": "Hello"} + ... ) + >>> message = to_binary(event, JSONFormat()) + >>> # message.headers = {"ce_type": b"com.example.test", "ce_source": b"/test", ...} + >>> # message.value = b'{"message": "Hello"}' + >>> # message.key = None + + :param event: The CloudEvent to convert + :param event_format: Format implementation for data serialization + :param key_mapper: Optional function to extract message key from event (defaults to partitionkey attribute) + :return: KafkaMessage with ce_-prefixed headers and event data as value + """ + headers: dict[str, bytes] = {} + attributes = event.get_attributes() + + # Apply key mapper + if key_mapper is None: + key_mapper = _default_key_mapper + message_key = key_mapper(event) + + for attr_name, attr_value in attributes.items(): + if attr_value is None: + continue + + # Skip partitionkey - it goes in the message key, not headers + if attr_name == PARTITIONKEY_ATTR: + continue + + if attr_name == DATACONTENTTYPE_ATTR: + headers[CONTENT_TYPE_HEADER] = str(attr_value).encode("utf-8") + else: + header_name = f"{CE_PREFIX}{attr_name}" + headers[header_name] = encode_header_value(attr_value).encode("utf-8") + + data = event.get_data() + datacontenttype = attributes.get(DATACONTENTTYPE_ATTR) + value = event_format.write_data(data, datacontenttype) + + return KafkaMessage(headers=headers, key=message_key, value=value) + + +def from_binary( + message: KafkaMessage, + event_format: Format, + event_factory: EventFactory | None = None, +) -> BaseCloudEvent: + """ + Parse a Kafka binary content mode message to a CloudEvent. + + Auto-detects the CloudEvents version from the 'ce_specversion' header + and uses the appropriate event factory if not explicitly provided. + + Extracts CloudEvent attributes from ce_-prefixed Kafka headers and treats the + 'content-type' header as the 'datacontenttype' attribute. The Kafka message value + is parsed as event data according to the content type. If the message has a key, + it is added as the 'partitionkey' extension attribute. + + Example: + >>> from cloudevents.core.v1.event import CloudEvent + >>> from cloudevents.core.formats.json import JSONFormat + >>> + >>> message = KafkaMessage( + ... headers={"ce_type": b"com.example.test", "ce_source": b"/test", + ... "ce_id": b"123", "ce_specversion": b"1.0"}, + ... key=b"partition-key-123", + ... value=b'{"message": "Hello"}' + ... ) + >>> event = from_binary(message, JSONFormat(), CloudEvent) + + :param message: KafkaMessage to parse + :param event_format: Format implementation for data deserialization + :param event_factory: Factory function to create CloudEvent instances + :return: CloudEvent instance + """ + attributes: dict[str, Any] = {} + + for header_name, header_value_bytes in message.headers.items(): + header_value = header_value_bytes.decode("utf-8") + + normalized_name = header_name.lower() + + if normalized_name.startswith(CE_PREFIX): + attr_name = normalized_name[len(CE_PREFIX) :] + attributes[attr_name] = decode_header_value(attr_name, header_value) + elif normalized_name == CONTENT_TYPE_HEADER: + attributes[DATACONTENTTYPE_ATTR] = header_value + + # If message has a key, add it as partitionkey extension attribute + if message.key is not None: + key_value = ( + message.key.decode("utf-8") + if isinstance(message.key, bytes) + else message.key + ) + attributes[PARTITIONKEY_ATTR] = key_value + + # Auto-detect version if factory not provided + if event_factory is None: + specversion = attributes.get("specversion", SPECVERSION_V1_0) + event_factory = get_event_factory_for_version(specversion) + + datacontenttype = attributes.get(DATACONTENTTYPE_ATTR) + data = event_format.read_data(message.value, datacontenttype) + + return event_factory(attributes, data) + + +def to_structured( + event: BaseCloudEvent, + event_format: Format, + key_mapper: KeyMapper | None = None, +) -> KafkaMessage: + """ + Convert a CloudEvent to Kafka structured content mode. + + In structured mode, the entire CloudEvent (attributes and data) is serialized + into the Kafka message value using the specified format. The content-type header + is set to the format's media type. The message key is derived from the partitionkey + extension attribute or a custom key_mapper function. + + Example: + >>> from cloudevents.core.v1.event import CloudEvent + >>> from cloudevents.core.formats.json import JSONFormat + >>> + >>> event = CloudEvent( + ... attributes={"type": "com.example.test", "source": "/test"}, + ... data={"message": "Hello"} + ... ) + >>> message = to_structured(event, JSONFormat()) + >>> # message.headers = {"content-type": b"application/cloudevents+json"} + >>> # message.value = b'{"type": "com.example.test", "source": "/test", ...}' + + :param event: The CloudEvent to convert + :param event_format: Format implementation for serialization + :param key_mapper: Optional function to extract message key from event (defaults to partitionkey attribute) + :return: KafkaMessage with structured content in value + """ + content_type = event_format.get_content_type() + + headers = {CONTENT_TYPE_HEADER: content_type.encode("utf-8")} + + value = event_format.write(event) + + if key_mapper is None: + key_mapper = _default_key_mapper + message_key = key_mapper(event) + + return KafkaMessage(headers=headers, key=message_key, value=value) + + +def from_structured( + message: KafkaMessage, + event_format: Format, + event_factory: EventFactory | None = None, +) -> BaseCloudEvent: + """ + Parse a Kafka structured content mode message to a CloudEvent. + + Deserializes the CloudEvent from the Kafka message value using the specified format. + Any ce_-prefixed headers are ignored as the value contains all event metadata. + If the message has a key, it is added as the 'partitionkey' extension attribute. + + If event_factory is not provided, version detection is delegated to the format + implementation, which will auto-detect based on the 'specversion' field. + + Example: + >>> from cloudevents.core.v1.event import CloudEvent + >>> from cloudevents.core.formats.json import JSONFormat + >>> + >>> # Explicit factory + >>> message = KafkaMessage( + ... headers={"content-type": b"application/cloudevents+json"}, + ... key=b"partition-key-123", + ... value=b'{"type": "com.example.test", "source": "/test", ...}' + ... ) + >>> event = from_structured(message, JSONFormat(), CloudEvent) + >>> + >>> # Auto-detect version + >>> event = from_structured(message, JSONFormat()) + + :param message: KafkaMessage to parse + :param event_format: Format implementation for deserialization + :param event_factory: Factory function to create CloudEvent instances. + If None, the format will auto-detect the version. + :return: CloudEvent instance + """ + # Delegate version detection to format layer + event = event_format.read(event_factory, message.value) + + # If message has a key, we need to add it as partitionkey extension attribute + # Since the event is already created, we need to reconstruct it with the additional attribute + if message.key is not None: + key_value = ( + message.key.decode("utf-8") + if isinstance(message.key, bytes) + else message.key + ) + attributes = event.get_attributes() + attributes[PARTITIONKEY_ATTR] = key_value + data = event.get_data() + + event = type(event)(attributes, data) + + return event + + +def from_kafka( + message: KafkaMessage, + event_format: Format, + event_factory: EventFactory | None = None, +) -> BaseCloudEvent: + """ + Parse a Kafka message to a CloudEvent with automatic mode detection. + + Auto-detects CloudEvents version and uses appropriate event factory if not provided. + + Automatically detects whether the message uses binary or structured content mode: + - If any ce_ prefixed headers are present → binary mode + - Otherwise → structured mode + + This function provides a convenient way to handle both content modes without + requiring the caller to determine the mode beforehand. + + Example: + >>> from cloudevents.core.v1.event import CloudEvent + >>> from cloudevents.core.formats.json import JSONFormat + >>> + >>> # Works with binary mode + >>> binary_msg = KafkaMessage( + ... headers={"ce_type": b"com.example.test", ...}, + ... key=None, + ... value=b'...' + ... ) + >>> event1 = from_kafka(binary_msg, JSONFormat(), CloudEvent) + >>> + >>> # Also works with structured mode + >>> structured_msg = KafkaMessage( + ... headers={"content-type": b"application/cloudevents+json"}, + ... key=None, + ... value=b'{"type": "com.example.test", ...}' + ... ) + >>> event2 = from_kafka(structured_msg, JSONFormat(), CloudEvent) + + :param message: KafkaMessage to parse + :param event_format: Format implementation for deserialization + :param event_factory: Factory function to create CloudEvent instances (auto-detected if None) + :return: CloudEvent instance + """ + for header_name in message.headers.keys(): + if header_name.lower().startswith(CE_PREFIX): + return from_binary(message, event_format, event_factory) + + return from_structured(message, event_format, event_factory) + + +def to_binary_event( + event: BaseCloudEvent, + event_format: Format | None = None, + key_mapper: KeyMapper | None = None, +) -> KafkaMessage: + """ + Convenience wrapper for to_binary with JSON format and CloudEvent as defaults. + + Example: + >>> from cloudevents.core.v1.event import CloudEvent + >>> from cloudevents.core.bindings import kafka + >>> + >>> event = CloudEvent( + ... attributes={"type": "com.example.test", "source": "/test"}, + ... data={"message": "Hello"} + ... ) + >>> message = kafka.to_binary_event(event) + + :param event: The CloudEvent to convert + :param event_format: Format implementation (defaults to JSONFormat) + :param key_mapper: Optional function to extract message key from event + :return: KafkaMessage with ce_-prefixed headers + """ + if event_format is None: + event_format = JSONFormat() + return to_binary(event, event_format, key_mapper) + + +def from_binary_event( + message: KafkaMessage, + event_format: Format | None = None, +) -> BaseCloudEvent: + """ + Convenience wrapper for from_binary with JSON format and auto-detection. + + Auto-detects CloudEvents version (v0.3 or v1.0) from headers. + + Example: + >>> from cloudevents.core.bindings import kafka + >>> event = kafka.from_binary_event(message) + + :param message: KafkaMessage to parse + :param event_format: Format implementation (defaults to JSONFormat) + :return: CloudEvent instance (v0.3 or v1.0 based on specversion) + """ + if event_format is None: + event_format = JSONFormat() + return from_binary(message, event_format, None) + + +def to_structured_event( + event: BaseCloudEvent, + event_format: Format | None = None, + key_mapper: KeyMapper | None = None, +) -> KafkaMessage: + """ + Convenience wrapper for to_structured with JSON format as default. + + Example: + >>> from cloudevents.core.v1.event import CloudEvent + >>> from cloudevents.core.bindings import kafka + >>> + >>> event = CloudEvent( + ... attributes={"type": "com.example.test", "source": "/test"}, + ... data={"message": "Hello"} + ... ) + >>> message = kafka.to_structured_event(event) + + :param event: The CloudEvent to convert + :param event_format: Format implementation (defaults to JSONFormat) + :param key_mapper: Optional function to extract message key from event + :return: KafkaMessage with structured content + """ + if event_format is None: + event_format = JSONFormat() + return to_structured(event, event_format, key_mapper) + + +def from_structured_event( + message: KafkaMessage, + event_format: Format | None = None, +) -> BaseCloudEvent: + """ + Convenience wrapper for from_structured with JSON format and auto-detection. + + Auto-detects CloudEvents version (v0.3 or v1.0) from message body. + + Example: + >>> from cloudevents.core.bindings import kafka + >>> event = kafka.from_structured_event(message) + + :param message: KafkaMessage to parse + :param event_format: Format implementation (defaults to JSONFormat) + :return: CloudEvent instance (v0.3 or v1.0 based on specversion) + """ + if event_format is None: + event_format = JSONFormat() + return from_structured(message, event_format, None) + + +def from_kafka_event( + message: KafkaMessage, + event_format: Format | None = None, +) -> BaseCloudEvent: + """ + Convenience wrapper for from_kafka with JSON format and auto-detection. + Auto-detects binary or structured mode, and CloudEvents version. + + Example: + >>> from cloudevents.core.bindings import kafka + >>> event = kafka.from_kafka_event(message) + + :param message: KafkaMessage to parse + :param event_format: Format implementation (defaults to JSONFormat) + :return: CloudEvent instance (v0.3 or v1.0 based on specversion) + """ + if event_format is None: + event_format = JSONFormat() + return from_kafka(message, event_format, None) diff --git a/src/cloudevents/core/exceptions.py b/src/cloudevents/core/exceptions.py new file mode 100644 index 00000000..c4a186c4 --- /dev/null +++ b/src/cloudevents/core/exceptions.py @@ -0,0 +1,81 @@ +# Copyright 2018-Present The CloudEvents Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +""" +Common exceptions for CloudEvents (version-agnostic). +""" + + +class BaseCloudEventException(Exception): + """A CloudEvent generic exception.""" + + +class CloudEventValidationError(BaseCloudEventException): + """ + Holds validation errors aggregated during a CloudEvent creation. + """ + + def __init__(self, errors: dict[str, list[BaseCloudEventException]]) -> None: + """ + :param errors: The errors gathered during the CloudEvent creation where key + is the name of the attribute and value is a list of errors related to that attribute. + """ + super().__init__("Failed to create CloudEvent due to the validation errors:") + self.errors: dict[str, list[BaseCloudEventException]] = errors + + def __str__(self) -> str: + error_messages: list[str] = [ + f"{key}: {', '.join(str(e) for e in value)}" + for key, value in self.errors.items() + ] + return f"{super().__str__()}: {', '.join(error_messages)}" + + +class MissingRequiredAttributeError(BaseCloudEventException, ValueError): + """ + Raised for attributes that are required to be present by the specification. + """ + + def __init__(self, attribute_name: str) -> None: + self.attribute_name: str = attribute_name + super().__init__(f"Missing required attribute: '{attribute_name}'") + + +class CustomExtensionAttributeError(BaseCloudEventException, ValueError): + """ + Raised when a custom extension attribute violates naming conventions. + """ + + def __init__(self, attribute_name: str, msg: str) -> None: + self.attribute_name: str = attribute_name + super().__init__(msg) + + +class InvalidAttributeTypeError(BaseCloudEventException, TypeError): + """ + Raised when an attribute has an unsupported type. + """ + + def __init__(self, attribute_name: str, expected_type: type) -> None: + self.attribute_name: str = attribute_name + super().__init__(f"Attribute '{attribute_name}' must be a {expected_type}") + + +class InvalidAttributeValueError(BaseCloudEventException, ValueError): + """ + Raised when an attribute has an invalid value. + """ + + def __init__(self, attribute_name: str, msg: str) -> None: + self.attribute_name: str = attribute_name + super().__init__(msg) diff --git a/cloudevents/sdk/__init__.py b/src/cloudevents/core/formats/__init__.py similarity index 100% rename from cloudevents/sdk/__init__.py rename to src/cloudevents/core/formats/__init__.py diff --git a/src/cloudevents/core/formats/base.py b/src/cloudevents/core/formats/base.py new file mode 100644 index 00000000..ae2d9d0a --- /dev/null +++ b/src/cloudevents/core/formats/base.py @@ -0,0 +1,90 @@ +# Copyright 2018-Present The CloudEvents Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from typing import Any, Protocol + +from cloudevents.core.base import BaseCloudEvent, EventFactory + + +class Format(Protocol): + """ + Protocol defining the contract for CloudEvent format implementations. + + Format implementations are responsible for serializing and deserializing CloudEvents + to and from specific wire formats (e.g., JSON, Avro, Protobuf). Each format must + implement both read and write operations to convert between CloudEvent objects and + their byte representations according to the CloudEvents specification. + """ + + def read( + self, + event_factory: EventFactory | None, + data: str | bytes, + ) -> BaseCloudEvent: + """ + Deserialize a CloudEvent from its wire format representation. + + :param event_factory: A factory function that creates CloudEvent instances from + attributes and data. The factory should accept a dictionary of attributes and + optional event data (dict, str, or bytes). + If None, the format implementation should auto-detect the version from the data. + :param data: The serialized CloudEvent data as a string or bytes. + :return: A CloudEvent instance constructed from the deserialized data. + :raises ValueError: If the data cannot be parsed or is invalid according to the format. + """ + ... + + def write(self, event: BaseCloudEvent) -> bytes: + """ + Serialize a CloudEvent to its wire format representation. + + :param event: The CloudEvent instance to serialize. + :return: The CloudEvent serialized as bytes in the format's wire representation. + :raises ValueError: If the event cannot be serialized according to the format. + """ + ... + + def write_data( + self, + data: dict[str, Any] | str | bytes | None, + datacontenttype: str | None, + ) -> bytes: + """ + Serialize just the data payload for protocol bindings (e.g., HTTP binary mode). + + :param data: Event data to serialize (dict, str, bytes, or None) + :param datacontenttype: Content type of the data + :return: Serialized data as bytes + """ + ... + + def read_data( + self, body: bytes, datacontenttype: str | None + ) -> dict[str, Any] | str | bytes | None: + """ + Deserialize data payload from protocol bindings (e.g., HTTP binary mode). + + :param body: HTTP body as bytes + :param datacontenttype: Content type of the data + :return: Deserialized data (dict for JSON, str for text, bytes for binary) + """ + ... + + def get_content_type(self) -> str: + """ + Get the Content-Type header value for structured mode. + + :return: Content type string for CloudEvents structured content mode + """ + ... diff --git a/src/cloudevents/core/formats/json.py b/src/cloudevents/core/formats/json.py new file mode 100644 index 00000000..9ac0e44a --- /dev/null +++ b/src/cloudevents/core/formats/json.py @@ -0,0 +1,223 @@ +# Copyright 2018-Present The CloudEvents Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import base64 +import re +from datetime import datetime +from json import JSONEncoder, dumps, loads +from typing import Any, Final, Pattern + +from dateutil.parser import isoparse + +from cloudevents.core.base import BaseCloudEvent, EventFactory +from cloudevents.core.formats.base import Format +from cloudevents.core.spec import SPECVERSION_V0_3, SPECVERSION_V1_0 + + +class _JSONEncoderWithDatetime(JSONEncoder): + """ + Custom JSON encoder to handle datetime objects in the format required by the CloudEvents spec. + """ + + def default(self, obj: Any) -> Any: + if isinstance(obj, datetime): + dt = obj.isoformat() + # 'Z' denotes a UTC offset of 00:00 see + # https://www.rfc-editor.org/rfc/rfc3339#section-2 + if dt.endswith("+00:00"): + dt = dt.removesuffix("+00:00") + "Z" + return dt + + return super().default(obj) + + +class JSONFormat(Format): + CONTENT_TYPE: Final[str] = "application/cloudevents+json" + JSON_CONTENT_TYPE_PATTERN: Pattern[str] = re.compile( + r"^(application|text)/([a-zA-Z0-9\-\.]+\+)?json(;.*)?$" + ) + + def read( + self, + event_factory: EventFactory | None, + data: str | bytes, + ) -> BaseCloudEvent: + """ + Read a CloudEvent from a JSON formatted byte string. + + Supports both v0.3 and v1.0 CloudEvents: + - v0.3: Uses 'datacontentencoding' attribute with 'data' field + - v1.0: Uses 'data_base64' field (no datacontentencoding) + + :param event_factory: A factory function to create CloudEvent instances. + If None, automatically detects version from 'specversion' field. + :param data: The JSON formatted byte array. + :return: The CloudEvent instance. + """ + decoded_data: str + if isinstance(data, bytes): + decoded_data = data.decode("utf-8") + else: + decoded_data = data + + event_attributes = loads(decoded_data) + + # Auto-detect version if factory not provided + if event_factory is None: + from cloudevents.core.bindings.common import get_event_factory_for_version + + specversion = event_attributes.get("specversion", SPECVERSION_V1_0) + event_factory = get_event_factory_for_version(specversion) + + if "time" in event_attributes: + event_attributes["time"] = isoparse(event_attributes["time"]) + + # Handle data field based on version + specversion = event_attributes.get("specversion", SPECVERSION_V1_0) + event_data: dict[str, Any] | str | bytes | None = event_attributes.pop( + "data", None + ) + + # v0.3: Check for datacontentencoding attribute + if ( + specversion == SPECVERSION_V0_3 + and "datacontentencoding" in event_attributes + ): + encoding = event_attributes.get("datacontentencoding", "").lower() + if encoding == "base64" and isinstance(event_data, str): + # Decode base64 encoded data in v0.3 + event_data = base64.b64decode(event_data) + + # v1.0: Check for data_base64 field (when data is None) + if event_data is None: + event_data_base64 = event_attributes.pop("data_base64", None) + if event_data_base64 is not None: + event_data = base64.b64decode(event_data_base64) + + return event_factory(event_attributes, event_data) + + def write(self, event: BaseCloudEvent) -> bytes: + """ + Write a CloudEvent to a JSON formatted byte string. + + Supports both v0.3 and v1.0 CloudEvents: + - v0.3: Uses 'datacontentencoding: base64' with base64-encoded 'data' field + - v1.0: Uses 'data_base64' field (no datacontentencoding) + + :param event: The CloudEvent to write. + :return: The CloudEvent as a JSON formatted byte array. + """ + event_data = event.get_data() + event_dict: dict[str, Any] = dict(event.get_attributes()) + specversion = event_dict.get("specversion", SPECVERSION_V1_0) + + if event_data is not None: + if isinstance(event_data, (bytes, bytearray)): + # Handle binary data based on version + if specversion == SPECVERSION_V0_3: + # v0.3: Use datacontentencoding with base64-encoded data field + event_dict["datacontentencoding"] = "base64" + event_dict["data"] = base64.b64encode(event_data).decode("utf-8") + else: + # v1.0: Use data_base64 field + event_dict["data_base64"] = base64.b64encode(event_data).decode( + "utf-8" + ) + else: + datacontenttype = event_dict.get("datacontenttype", "application/json") + if re.match(JSONFormat.JSON_CONTENT_TYPE_PATTERN, datacontenttype): + event_dict["data"] = event_data + else: + event_dict["data"] = str(event_data) + + return dumps(event_dict, cls=_JSONEncoderWithDatetime).encode("utf-8") + + def write_data( + self, + data: dict[str, Any] | str | bytes | None, + datacontenttype: str | None, + ) -> bytes: + """ + Serialize just the data payload for HTTP binary mode. + + This method is used by HTTP binary content mode to serialize only the event + data (not the attributes) into the HTTP body. + + :param data: Event data to serialize (dict, str, bytes, or None) + :param datacontenttype: Content type of the data + :return: Serialized data as bytes + """ + if data is None: + return b"" + + # If data is already bytes, return as-is + if isinstance(data, (bytes, bytearray)): + return bytes(data) + + # If data is a string, encode as UTF-8 + if isinstance(data, str): + return data.encode("utf-8") + + # If data is a dict and content type is JSON, serialize as JSON + if isinstance(data, dict): + if datacontenttype and re.match( + JSONFormat.JSON_CONTENT_TYPE_PATTERN, datacontenttype + ): + return dumps(data, cls=_JSONEncoderWithDatetime).encode("utf-8") + + # Default: convert to string and encode + return str(data).encode("utf-8") + + def read_data( + self, body: bytes, datacontenttype: str | None + ) -> dict[str, Any] | str | bytes | None: + """ + Deserialize data payload from HTTP binary mode body. + + This method is used by HTTP binary content mode to deserialize the HTTP body + into event data based on the content type. + + :param body: HTTP body as bytes + :param datacontenttype: Content type of the data + :return: Deserialized data (dict for JSON, str for text, bytes for binary) + """ + if not body or len(body) == 0: + return None + + # If content type indicates JSON, try to parse as JSON + if datacontenttype and re.match( + JSONFormat.JSON_CONTENT_TYPE_PATTERN, datacontenttype + ): + try: + decoded = body.decode("utf-8") + parsed: dict[str, Any] = loads(decoded) + return parsed + except (ValueError, UnicodeDecodeError): + # If JSON parsing fails, fall through to other handling + pass + + # Try to decode as UTF-8 string + try: + return body.decode("utf-8") + except UnicodeDecodeError: + # If UTF-8 decoding fails, return as bytes + return body + + def get_content_type(self) -> str: + """ + Get the Content-Type header value for structured mode. + + :return: Content type string for CloudEvents structured content mode + """ + return self.CONTENT_TYPE diff --git a/src/cloudevents/core/spec.py b/src/cloudevents/core/spec.py new file mode 100644 index 00000000..e3858189 --- /dev/null +++ b/src/cloudevents/core/spec.py @@ -0,0 +1,18 @@ +# Copyright 2018-Present The CloudEvents Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +from typing import Literal + +SpecVersion = Literal["1.0", "0.3"] +SPECVERSION_V1_0 = "1.0" +SPECVERSION_V0_3 = "0.3" diff --git a/src/cloudevents/core/v03/__init__.py b/src/cloudevents/core/v03/__init__.py new file mode 100644 index 00000000..67b5e010 --- /dev/null +++ b/src/cloudevents/core/v03/__init__.py @@ -0,0 +1,15 @@ +# Copyright 2018-Present The CloudEvents Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""CloudEvents v0.3 implementation module.""" diff --git a/src/cloudevents/core/v03/event.py b/src/cloudevents/core/v03/event.py new file mode 100644 index 00000000..b0b6094e --- /dev/null +++ b/src/cloudevents/core/v03/event.py @@ -0,0 +1,319 @@ +# Copyright 2018-Present The CloudEvents Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import re +from collections import defaultdict +from datetime import datetime +from typing import Any, Final + +from cloudevents.core.base import BaseCloudEvent +from cloudevents.core.exceptions import ( + BaseCloudEventException, + CloudEventValidationError, + CustomExtensionAttributeError, + InvalidAttributeTypeError, + InvalidAttributeValueError, + MissingRequiredAttributeError, +) +from cloudevents.core.spec import SPECVERSION_V0_3 + +REQUIRED_ATTRIBUTES: Final[list[str]] = ["id", "source", "type", "specversion"] +OPTIONAL_ATTRIBUTES: Final[list[str]] = [ + "datacontenttype", + "datacontentencoding", + "schemaurl", + "subject", + "time", +] + + +class CloudEvent(BaseCloudEvent): + """ + CloudEvents v0.3 implementation. + + This class represents a CloudEvent conforming to the v0.3 specification. + See https://github.com/cloudevents/spec/blob/v0.3/spec.md for details. + """ + + def __init__( + self, + attributes: dict[str, Any], + data: dict[str, Any] | str | bytes | None = None, + ) -> None: + """ + Create a new CloudEvent v0.3 instance. + + :param attributes: The attributes of the CloudEvent instance. + :param data: The payload of the CloudEvent instance. + + :raises CloudEventValidationError: If any of the required attributes are missing or have invalid values. + """ + self._validate_attribute(attributes=attributes) + self._attributes: dict[str, Any] = attributes + self._data: dict[str, Any] | str | bytes | None = data + + @staticmethod + def _validate_attribute(attributes: dict[str, Any]) -> None: + """ + Validates the attributes of the CloudEvent as per the CloudEvents v0.3 specification. + + See https://github.com/cloudevents/spec/blob/v0.3/spec.md#required-attributes + """ + errors: dict[str, list[BaseCloudEventException]] = defaultdict(list) + errors.update(CloudEvent._validate_required_attributes(attributes=attributes)) + errors.update(CloudEvent._validate_optional_attributes(attributes=attributes)) + errors.update(CloudEvent._validate_extension_attributes(attributes=attributes)) + if errors: + raise CloudEventValidationError(errors=errors) + + @staticmethod + def _validate_required_attributes( + attributes: dict[str, Any], + ) -> dict[str, list[BaseCloudEventException]]: + """ + Validates the types of the required attributes. + + :param attributes: The attributes of the CloudEvent instance. + :return: A dictionary of validation error messages. + """ + errors: dict[str, list[BaseCloudEventException]] = defaultdict(list) + + if "id" not in attributes: + errors["id"].append(MissingRequiredAttributeError(attribute_name="id")) + if attributes.get("id") is None: + errors["id"].append( + InvalidAttributeValueError( + attribute_name="id", msg="Attribute 'id' must not be None" + ) + ) + if not isinstance(attributes.get("id"), str): + errors["id"].append( + InvalidAttributeTypeError(attribute_name="id", expected_type=str) + ) + + if "source" not in attributes: + errors["source"].append( + MissingRequiredAttributeError(attribute_name="source") + ) + if not isinstance(attributes.get("source"), str): + errors["source"].append( + InvalidAttributeTypeError(attribute_name="source", expected_type=str) + ) + + if "type" not in attributes: + errors["type"].append(MissingRequiredAttributeError(attribute_name="type")) + if not isinstance(attributes.get("type"), str): + errors["type"].append( + InvalidAttributeTypeError(attribute_name="type", expected_type=str) + ) + + if "specversion" not in attributes: + errors["specversion"].append( + MissingRequiredAttributeError(attribute_name="specversion") + ) + if not isinstance(attributes.get("specversion"), str): + errors["specversion"].append( + InvalidAttributeTypeError( + attribute_name="specversion", expected_type=str + ) + ) + if attributes.get("specversion") != SPECVERSION_V0_3: + errors["specversion"].append( + InvalidAttributeValueError( + attribute_name="specversion", + msg=f"Attribute 'specversion' must be '{SPECVERSION_V0_3}'", + ) + ) + return errors + + @staticmethod + def _validate_optional_attributes( + attributes: dict[str, Any], + ) -> dict[str, list[BaseCloudEventException]]: + """ + Validates the types and values of the optional attributes. + + :param attributes: The attributes of the CloudEvent instance. + :return: A dictionary of validation error messages. + """ + errors: dict[str, list[BaseCloudEventException]] = defaultdict(list) + + if "time" in attributes: + if not isinstance(attributes["time"], datetime): + errors["time"].append( + InvalidAttributeTypeError( + attribute_name="time", expected_type=datetime + ) + ) + if hasattr(attributes["time"], "tzinfo") and not attributes["time"].tzinfo: + errors["time"].append( + InvalidAttributeValueError( + attribute_name="time", + msg="Attribute 'time' must be timezone aware", + ) + ) + if "subject" in attributes: + if not isinstance(attributes["subject"], str): + errors["subject"].append( + InvalidAttributeTypeError( + attribute_name="subject", expected_type=str + ) + ) + if not attributes["subject"]: + errors["subject"].append( + InvalidAttributeValueError( + attribute_name="subject", + msg="Attribute 'subject' must not be empty", + ) + ) + if "datacontenttype" in attributes: + if not isinstance(attributes["datacontenttype"], str): + errors["datacontenttype"].append( + InvalidAttributeTypeError( + attribute_name="datacontenttype", expected_type=str + ) + ) + if not attributes["datacontenttype"]: + errors["datacontenttype"].append( + InvalidAttributeValueError( + attribute_name="datacontenttype", + msg="Attribute 'datacontenttype' must not be empty", + ) + ) + if "datacontentencoding" in attributes: + if not isinstance(attributes["datacontentencoding"], str): + errors["datacontentencoding"].append( + InvalidAttributeTypeError( + attribute_name="datacontentencoding", expected_type=str + ) + ) + if not attributes["datacontentencoding"]: + errors["datacontentencoding"].append( + InvalidAttributeValueError( + attribute_name="datacontentencoding", + msg="Attribute 'datacontentencoding' must not be empty", + ) + ) + if "schemaurl" in attributes: + if not isinstance(attributes["schemaurl"], str): + errors["schemaurl"].append( + InvalidAttributeTypeError( + attribute_name="schemaurl", expected_type=str + ) + ) + if not attributes["schemaurl"]: + errors["schemaurl"].append( + InvalidAttributeValueError( + attribute_name="schemaurl", + msg="Attribute 'schemaurl' must not be empty", + ) + ) + return errors + + @staticmethod + def _validate_extension_attributes( + attributes: dict[str, Any], + ) -> dict[str, list[BaseCloudEventException]]: + """ + Validates the extension attributes. + + :param attributes: The attributes of the CloudEvent instance. + :return: A dictionary of validation error messages. + """ + errors: dict[str, list[BaseCloudEventException]] = defaultdict(list) + extension_attributes = [ + key + for key in attributes.keys() + if key not in REQUIRED_ATTRIBUTES and key not in OPTIONAL_ATTRIBUTES + ] + for extension_attribute in extension_attributes: + if extension_attribute == "data": + errors[extension_attribute].append( + CustomExtensionAttributeError( + attribute_name=extension_attribute, + msg="Extension attribute 'data' is reserved and must not be used", + ) + ) + if not (1 <= len(extension_attribute) <= 20): + errors[extension_attribute].append( + CustomExtensionAttributeError( + attribute_name=extension_attribute, + msg=f"Extension attribute '{extension_attribute}' should be between 1 and 20 characters long", + ) + ) + if not re.match(r"^[a-z0-9]+$", extension_attribute): + errors[extension_attribute].append( + CustomExtensionAttributeError( + attribute_name=extension_attribute, + msg=f"Extension attribute '{extension_attribute}' should only contain lowercase letters and numbers", + ) + ) + return errors + + def get_id(self) -> str: + return self._attributes["id"] # type: ignore + + def get_source(self) -> str: + return self._attributes["source"] # type: ignore + + def get_type(self) -> str: + return self._attributes["type"] # type: ignore + + def get_specversion(self) -> str: + return self._attributes["specversion"] # type: ignore + + def get_datacontenttype(self) -> str | None: + return self._attributes.get("datacontenttype") + + def get_dataschema(self) -> str | None: + """ + Get the dataschema attribute. + + Note: In v0.3, this is called 'schemaurl'. This method provides + compatibility with the BaseCloudEvent interface. + """ + return self._attributes.get("schemaurl") + + def get_subject(self) -> str | None: + return self._attributes.get("subject") + + def get_time(self) -> datetime | None: + return self._attributes.get("time") + + def get_extension(self, extension_name: str) -> Any: + return self._attributes.get(extension_name) + + def get_data(self) -> dict[str, Any] | str | bytes | None: + return self._data + + def get_attributes(self) -> dict[str, Any]: + return self._attributes + + # v0.3 specific methods + + def get_datacontentencoding(self) -> str | None: + """ + Get the datacontentencoding attribute (v0.3 only). + + This attribute was removed in v1.0. + """ + return self._attributes.get("datacontentencoding") + + def get_schemaurl(self) -> str | None: + """ + Get the schemaurl attribute (v0.3 only). + + This attribute was renamed to 'dataschema' in v1.0. + """ + return self._attributes.get("schemaurl") diff --git a/src/cloudevents/core/v1/__init__.py b/src/cloudevents/core/v1/__init__.py new file mode 100644 index 00000000..896dfe12 --- /dev/null +++ b/src/cloudevents/core/v1/__init__.py @@ -0,0 +1,17 @@ +# Copyright 2018-Present The CloudEvents Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +""" +CloudEvent implementation for v1.0 +""" diff --git a/src/cloudevents/core/v1/event.py b/src/cloudevents/core/v1/event.py new file mode 100644 index 00000000..1de0b674 --- /dev/null +++ b/src/cloudevents/core/v1/event.py @@ -0,0 +1,265 @@ +# Copyright 2018-Present The CloudEvents Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import re +from collections import defaultdict +from datetime import datetime +from typing import Any, Final + +from cloudevents.core.base import BaseCloudEvent +from cloudevents.core.exceptions import ( + BaseCloudEventException, + CloudEventValidationError, + CustomExtensionAttributeError, + InvalidAttributeTypeError, + InvalidAttributeValueError, + MissingRequiredAttributeError, +) +from cloudevents.core.spec import SPECVERSION_V1_0 + +REQUIRED_ATTRIBUTES: Final[list[str]] = ["id", "source", "type", "specversion"] +OPTIONAL_ATTRIBUTES: Final[list[str]] = [ + "datacontenttype", + "dataschema", + "subject", + "time", +] + + +class CloudEvent(BaseCloudEvent): + def __init__( + self, + attributes: dict[str, Any], + data: dict[str, Any] | str | bytes | None = None, + ) -> None: + self._validate_attribute(attributes=attributes) + self._attributes: dict[str, Any] = attributes + self._data: dict[str, Any] | str | bytes | None = data + + @staticmethod + def _validate_attribute(attributes: dict[str, Any]) -> None: + """ + Validates the attributes of the CloudEvent as per the CloudEvents specification. + + See https://github.com/cloudevents/spec/blob/main/cloudevents/spec.md#required-attributes + """ + errors: dict[str, list[BaseCloudEventException]] = defaultdict(list) + errors.update(CloudEvent._validate_required_attributes(attributes=attributes)) + errors.update(CloudEvent._validate_optional_attributes(attributes=attributes)) + errors.update(CloudEvent._validate_extension_attributes(attributes=attributes)) + if errors: + raise CloudEventValidationError(errors=errors) + + @staticmethod + def _validate_required_attributes( + attributes: dict[str, Any], + ) -> dict[str, list[BaseCloudEventException]]: + """ + Validates the types of the required attributes. + + :param attributes: The attributes of the CloudEvent instance. + :return: A dictionary of validation error messages. + """ + errors: dict[str, list[BaseCloudEventException]] = defaultdict(list) + + if "id" not in attributes: + errors["id"].append(MissingRequiredAttributeError(attribute_name="id")) + if attributes.get("id") is None: + errors["id"].append( + InvalidAttributeValueError( + attribute_name="id", msg="Attribute 'id' must not be None" + ) + ) + if not isinstance(attributes.get("id"), str): + errors["id"].append( + InvalidAttributeTypeError(attribute_name="id", expected_type=str) + ) + + if "source" not in attributes: + errors["source"].append( + MissingRequiredAttributeError(attribute_name="source") + ) + if not isinstance(attributes.get("source"), str): + errors["source"].append( + InvalidAttributeTypeError(attribute_name="source", expected_type=str) + ) + + if "type" not in attributes: + errors["type"].append(MissingRequiredAttributeError(attribute_name="type")) + if not isinstance(attributes.get("type"), str): + errors["type"].append( + InvalidAttributeTypeError(attribute_name="type", expected_type=str) + ) + + if "specversion" not in attributes: + errors["specversion"].append( + MissingRequiredAttributeError(attribute_name="specversion") + ) + if not isinstance(attributes.get("specversion"), str): + errors["specversion"].append( + InvalidAttributeTypeError( + attribute_name="specversion", expected_type=str + ) + ) + if attributes.get("specversion") != SPECVERSION_V1_0: + errors["specversion"].append( + InvalidAttributeValueError( + attribute_name="specversion", + msg=f"Attribute 'specversion' must be '{SPECVERSION_V1_0}'", + ) + ) + return errors + + @staticmethod + def _validate_optional_attributes( + attributes: dict[str, Any], + ) -> dict[str, list[BaseCloudEventException]]: + """ + Validates the types and values of the optional attributes. + + :param attributes: The attributes of the CloudEvent instance. + :return: A dictionary of validation error messages. + """ + errors: dict[str, list[BaseCloudEventException]] = defaultdict(list) + + if "time" in attributes: + if not isinstance(attributes["time"], datetime): + errors["time"].append( + InvalidAttributeTypeError( + attribute_name="time", expected_type=datetime + ) + ) + if hasattr(attributes["time"], "tzinfo") and not attributes["time"].tzinfo: + errors["time"].append( + InvalidAttributeValueError( + attribute_name="time", + msg="Attribute 'time' must be timezone aware", + ) + ) + if "subject" in attributes: + if not isinstance(attributes["subject"], str): + errors["subject"].append( + InvalidAttributeTypeError( + attribute_name="subject", expected_type=str + ) + ) + if not attributes["subject"]: + errors["subject"].append( + InvalidAttributeValueError( + attribute_name="subject", + msg="Attribute 'subject' must not be empty", + ) + ) + if "datacontenttype" in attributes: + if not isinstance(attributes["datacontenttype"], str): + errors["datacontenttype"].append( + InvalidAttributeTypeError( + attribute_name="datacontenttype", expected_type=str + ) + ) + if not attributes["datacontenttype"]: + errors["datacontenttype"].append( + InvalidAttributeValueError( + attribute_name="datacontenttype", + msg="Attribute 'datacontenttype' must not be empty", + ) + ) + if "dataschema" in attributes: + if not isinstance(attributes["dataschema"], str): + errors["dataschema"].append( + InvalidAttributeTypeError( + attribute_name="dataschema", expected_type=str + ) + ) + if not attributes["dataschema"]: + errors["dataschema"].append( + InvalidAttributeValueError( + attribute_name="dataschema", + msg="Attribute 'dataschema' must not be empty", + ) + ) + return errors + + @staticmethod + def _validate_extension_attributes( + attributes: dict[str, Any], + ) -> dict[str, list[BaseCloudEventException]]: + """ + Validates the extension attributes. + + :param attributes: The attributes of the CloudEvent instance. + :return: A dictionary of validation error messages. + """ + errors: dict[str, list[BaseCloudEventException]] = defaultdict(list) + extension_attributes = [ + key + for key in attributes.keys() + if key not in REQUIRED_ATTRIBUTES and key not in OPTIONAL_ATTRIBUTES + ] + for extension_attribute in extension_attributes: + if extension_attribute == "data": + errors[extension_attribute].append( + CustomExtensionAttributeError( + attribute_name=extension_attribute, + msg="Extension attribute 'data' is reserved and must not be used", + ) + ) + if not (1 <= len(extension_attribute) <= 20): + errors[extension_attribute].append( + CustomExtensionAttributeError( + attribute_name=extension_attribute, + msg=f"Extension attribute '{extension_attribute}' should be between 1 and 20 characters long", + ) + ) + if not re.match(r"^[a-z0-9]+$", extension_attribute): + errors[extension_attribute].append( + CustomExtensionAttributeError( + attribute_name=extension_attribute, + msg=f"Extension attribute '{extension_attribute}' should only contain lowercase letters and numbers", + ) + ) + return errors + + def get_id(self) -> str: + return self._attributes["id"] # type: ignore + + def get_source(self) -> str: + return self._attributes["source"] # type: ignore + + def get_type(self) -> str: + return self._attributes["type"] # type: ignore + + def get_specversion(self) -> str: + return self._attributes["specversion"] # type: ignore + + def get_datacontenttype(self) -> str | None: + return self._attributes.get("datacontenttype") + + def get_dataschema(self) -> str | None: + return self._attributes.get("dataschema") + + def get_subject(self) -> str | None: + return self._attributes.get("subject") + + def get_time(self) -> datetime | None: + return self._attributes.get("time") + + def get_extension(self, extension_name: str) -> Any: + return self._attributes.get(extension_name) + + def get_data(self) -> dict[str, Any] | str | bytes | None: + return self._data + + def get_attributes(self) -> dict[str, Any]: + return self._attributes diff --git a/cloudevents/py.typed b/src/cloudevents/py.typed similarity index 100% rename from cloudevents/py.typed rename to src/cloudevents/py.typed diff --git a/cloudevents/__init__.py b/src/cloudevents/v1/__init__.py similarity index 100% rename from cloudevents/__init__.py rename to src/cloudevents/v1/__init__.py diff --git a/cloudevents/abstract/__init__.py b/src/cloudevents/v1/abstract/__init__.py similarity index 90% rename from cloudevents/abstract/__init__.py rename to src/cloudevents/v1/abstract/__init__.py index 4000c8a7..13ebec5b 100644 --- a/cloudevents/abstract/__init__.py +++ b/src/cloudevents/v1/abstract/__init__.py @@ -12,6 +12,6 @@ # License for the specific language governing permissions and limitations # under the License. -from cloudevents.abstract.event import AnyCloudEvent, CloudEvent +from cloudevents_v1.abstract.event import AnyCloudEvent, CloudEvent __all__ = ["AnyCloudEvent", "CloudEvent"] diff --git a/cloudevents/abstract/event.py b/src/cloudevents/v1/abstract/event.py similarity index 100% rename from cloudevents/abstract/event.py rename to src/cloudevents/v1/abstract/event.py diff --git a/cloudevents/conversion.py b/src/cloudevents/v1/conversion.py similarity index 97% rename from cloudevents/conversion.py rename to src/cloudevents/v1/conversion.py index c73e3ed0..3d9899a2 100644 --- a/cloudevents/conversion.py +++ b/src/cloudevents/v1/conversion.py @@ -16,11 +16,11 @@ import json import typing -from cloudevents import exceptions as cloud_exceptions -from cloudevents.abstract import AnyCloudEvent -from cloudevents.sdk import converters, marshaller, types -from cloudevents.sdk.converters import is_binary -from cloudevents.sdk.event import v1, v03 +from cloudevents_v1 import exceptions as cloud_exceptions +from cloudevents_v1.abstract import AnyCloudEvent +from cloudevents_v1.sdk import converters, marshaller, types +from cloudevents_v1.sdk.converters import is_binary +from cloudevents_v1.sdk.event import v03, v1 def _best_effort_serialize_to_json( # type: ignore[no-untyped-def] diff --git a/cloudevents/exceptions.py b/src/cloudevents/v1/exceptions.py similarity index 100% rename from cloudevents/exceptions.py rename to src/cloudevents/v1/exceptions.py diff --git a/cloudevents/http/__init__.py b/src/cloudevents/v1/http/__init__.py similarity index 73% rename from cloudevents/http/__init__.py rename to src/cloudevents/v1/http/__init__.py index 6e75636e..86f6030a 100644 --- a/cloudevents/http/__init__.py +++ b/src/cloudevents/v1/http/__init__.py @@ -13,16 +13,16 @@ # under the License. -from cloudevents.http.conversion import from_dict, from_http, from_json -from cloudevents.http.event import CloudEvent -from cloudevents.http.event_type import is_binary, is_structured # deprecated -from cloudevents.http.http_methods import ( # deprecated +from cloudevents_v1.http.conversion import from_dict, from_http, from_json +from cloudevents_v1.http.event import CloudEvent +from cloudevents_v1.http.event_type import is_binary, is_structured # deprecated +from cloudevents_v1.http.http_methods import ( # deprecated to_binary, to_binary_http, to_structured, to_structured_http, ) -from cloudevents.http.json_methods import to_json # deprecated +from cloudevents_v1.http.json_methods import to_json # deprecated __all__ = [ "to_binary", diff --git a/cloudevents/http/conversion.py b/src/cloudevents/v1/http/conversion.py similarity index 88% rename from cloudevents/http/conversion.py rename to src/cloudevents/v1/http/conversion.py index a7da926b..050eb25c 100644 --- a/cloudevents/http/conversion.py +++ b/src/cloudevents/v1/http/conversion.py @@ -14,11 +14,11 @@ import typing -from cloudevents.conversion import from_dict as _abstract_from_dict -from cloudevents.conversion import from_http as _abstract_from_http -from cloudevents.conversion import from_json as _abstract_from_json -from cloudevents.http.event import CloudEvent -from cloudevents.sdk import types +from cloudevents_v1.conversion import from_dict as _abstract_from_dict +from cloudevents_v1.conversion import from_http as _abstract_from_http +from cloudevents_v1.conversion import from_json as _abstract_from_json +from cloudevents_v1.http.event import CloudEvent +from cloudevents_v1.sdk import types def from_json( diff --git a/cloudevents/http/event.py b/src/cloudevents/v1/http/event.py similarity index 96% rename from cloudevents/http/event.py rename to src/cloudevents/v1/http/event.py index c7a066d6..69f38110 100644 --- a/cloudevents/http/event.py +++ b/src/cloudevents/v1/http/event.py @@ -16,9 +16,9 @@ import typing import uuid -import cloudevents.exceptions as cloud_exceptions -from cloudevents import abstract -from cloudevents.sdk.event import v1, v03 +import cloudevents_v1.exceptions as cloud_exceptions +from cloudevents_v1 import abstract +from cloudevents_v1.sdk.event import v03, v1 _required_by_version = { "1.0": v1.Event._ce_required_fields, diff --git a/cloudevents/http/event_type.py b/src/cloudevents/v1/http/event_type.py similarity index 88% rename from cloudevents/http/event_type.py rename to src/cloudevents/v1/http/event_type.py index 52259e1e..38fceffb 100644 --- a/cloudevents/http/event_type.py +++ b/src/cloudevents/v1/http/event_type.py @@ -13,11 +13,10 @@ # under the License. import typing +from cloudevents_v1.sdk.converters import is_binary as _moved_is_binary +from cloudevents_v1.sdk.converters import is_structured as _moved_is_structured from deprecation import deprecated -from cloudevents.sdk.converters import is_binary as _moved_is_binary -from cloudevents.sdk.converters import is_structured as _moved_is_structured - # THIS MODULE IS DEPRECATED, YOU SHOULD NOT ADD NEW FUNCTIONALLY HERE diff --git a/cloudevents/http/http_methods.py b/src/cloudevents/v1/http/http_methods.py similarity index 86% rename from cloudevents/http/http_methods.py rename to src/cloudevents/v1/http/http_methods.py index 091c51b5..fe5cd42b 100644 --- a/cloudevents/http/http_methods.py +++ b/src/cloudevents/v1/http/http_methods.py @@ -14,15 +14,14 @@ import typing +from cloudevents_v1.abstract import AnyCloudEvent +from cloudevents_v1.conversion import to_binary as _moved_to_binary +from cloudevents_v1.conversion import to_structured as _moved_to_structured +from cloudevents_v1.http.conversion import from_http as _moved_from_http +from cloudevents_v1.http.event import CloudEvent +from cloudevents_v1.sdk import types from deprecation import deprecated -from cloudevents.abstract import AnyCloudEvent -from cloudevents.conversion import to_binary as _moved_to_binary -from cloudevents.conversion import to_structured as _moved_to_structured -from cloudevents.http.conversion import from_http as _moved_from_http -from cloudevents.http.event import CloudEvent -from cloudevents.sdk import types - # THIS MODULE IS DEPRECATED, YOU SHOULD NOT ADD NEW FUNCTIONALLY HERE diff --git a/cloudevents/http/json_methods.py b/src/cloudevents/v1/http/json_methods.py similarity index 83% rename from cloudevents/http/json_methods.py rename to src/cloudevents/v1/http/json_methods.py index 58e322c7..28a9873f 100644 --- a/cloudevents/http/json_methods.py +++ b/src/cloudevents/v1/http/json_methods.py @@ -14,14 +14,13 @@ import typing +from cloudevents_v1.abstract import AnyCloudEvent +from cloudevents_v1.conversion import to_json as _moved_to_json +from cloudevents_v1.http import CloudEvent +from cloudevents_v1.http.conversion import from_json as _moved_from_json +from cloudevents_v1.sdk import types from deprecation import deprecated -from cloudevents.abstract import AnyCloudEvent -from cloudevents.conversion import to_json as _moved_to_json -from cloudevents.http import CloudEvent -from cloudevents.http.conversion import from_json as _moved_from_json -from cloudevents.sdk import types - # THIS MODULE IS DEPRECATED, YOU SHOULD NOT ADD NEW FUNCTIONALLY HERE diff --git a/cloudevents/http/util.py b/src/cloudevents/v1/http/util.py similarity index 96% rename from cloudevents/http/util.py rename to src/cloudevents/v1/http/util.py index f44395e6..8158fb66 100644 --- a/cloudevents/http/util.py +++ b/src/cloudevents/v1/http/util.py @@ -13,11 +13,10 @@ # under the License. import typing -from deprecation import deprecated - -from cloudevents.conversion import ( +from cloudevents_v1.conversion import ( _best_effort_serialize_to_json as _moved_default_marshaller, ) +from deprecation import deprecated # THIS MODULE IS DEPRECATED, YOU SHOULD NOT ADD NEW FUNCTIONALLY HERE diff --git a/cloudevents/kafka/__init__.py b/src/cloudevents/v1/kafka/__init__.py similarity index 94% rename from cloudevents/kafka/__init__.py rename to src/cloudevents/v1/kafka/__init__.py index fbe1dfb0..b5648c30 100644 --- a/cloudevents/kafka/__init__.py +++ b/src/cloudevents/v1/kafka/__init__.py @@ -12,7 +12,7 @@ # License for the specific language governing permissions and limitations # under the License. -from cloudevents.kafka.conversion import ( +from cloudevents_v1.kafka.conversion import ( KafkaMessage, KeyMapper, from_binary, diff --git a/cloudevents/kafka/conversion.py b/src/cloudevents/v1/kafka/conversion.py similarity index 95% rename from cloudevents/kafka/conversion.py rename to src/cloudevents/v1/kafka/conversion.py index 97c355f2..3c0d3c7e 100644 --- a/cloudevents/kafka/conversion.py +++ b/src/cloudevents/v1/kafka/conversion.py @@ -15,11 +15,11 @@ import json import typing -from cloudevents import exceptions as cloud_exceptions -from cloudevents import http -from cloudevents.abstract import AnyCloudEvent -from cloudevents.kafka.exceptions import KeyMapperError -from cloudevents.sdk import types +from cloudevents_v1 import exceptions as cloud_exceptions +from cloudevents_v1 import http +from cloudevents_v1.abstract import AnyCloudEvent +from cloudevents_v1.kafka.exceptions import KeyMapperError +from cloudevents_v1.sdk import types DEFAULT_MARSHALLER: types.MarshallerType = json.dumps DEFAULT_UNMARSHALLER: types.MarshallerType = json.loads @@ -228,7 +228,7 @@ def from_structured( structure = envelope_unmarshaller(message.value) except Exception as e: raise cloud_exceptions.DataUnmarshallerError( - "Failed to unmarshall message with error: " f"{type(e).__name__}('{e}')" + f"Failed to unmarshall message with error: {type(e).__name__}('{e}')" ) attributes: typing.Dict[str, typing.Any] = {} @@ -247,7 +247,7 @@ def from_structured( decoded_value = value except Exception as e: raise cloud_exceptions.DataUnmarshallerError( - "Failed to unmarshall data with error: " f"{type(e).__name__}('{e}')" + f"Failed to unmarshall data with error: {type(e).__name__}('{e}')" ) if name == "data": data = decoded_value diff --git a/cloudevents/kafka/exceptions.py b/src/cloudevents/v1/kafka/exceptions.py similarity index 92% rename from cloudevents/kafka/exceptions.py rename to src/cloudevents/v1/kafka/exceptions.py index 6459f0a2..352e9d24 100644 --- a/cloudevents/kafka/exceptions.py +++ b/src/cloudevents/v1/kafka/exceptions.py @@ -11,7 +11,7 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. -from cloudevents import exceptions as cloud_exceptions +from cloudevents_v1 import exceptions as cloud_exceptions class KeyMapperError(cloud_exceptions.GenericException): diff --git a/src/cloudevents/v1/py.typed b/src/cloudevents/v1/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/cloudevents/pydantic/__init__.py b/src/cloudevents/v1/pydantic/__init__.py similarity index 81% rename from cloudevents/pydantic/__init__.py rename to src/cloudevents/v1/pydantic/__init__.py index f8556ca1..4d22fe41 100644 --- a/cloudevents/pydantic/__init__.py +++ b/src/cloudevents/v1/pydantic/__init__.py @@ -14,24 +14,29 @@ from typing import TYPE_CHECKING -from cloudevents.exceptions import PydanticFeatureNotInstalled +from cloudevents_v1.exceptions import PydanticFeatureNotInstalled try: if TYPE_CHECKING: - from cloudevents.pydantic.v2 import CloudEvent, from_dict, from_http, from_json + from cloudevents_v1.pydantic.v2 import ( + CloudEvent, + from_dict, + from_http, + from_json, + ) else: from pydantic import VERSION as PYDANTIC_VERSION pydantic_major_version = PYDANTIC_VERSION.split(".")[0] if pydantic_major_version == "1": - from cloudevents.pydantic.v1 import ( + from cloudevents_v1.pydantic.v1 import ( CloudEvent, from_dict, from_http, from_json, ) else: - from cloudevents.pydantic.v2 import ( + from cloudevents_v1.pydantic.v2 import ( CloudEvent, from_dict, from_http, diff --git a/cloudevents/pydantic/fields_docs.py b/src/cloudevents/v1/pydantic/fields_docs.py similarity index 99% rename from cloudevents/pydantic/fields_docs.py rename to src/cloudevents/v1/pydantic/fields_docs.py index 00ed0bd3..0abeaf15 100644 --- a/cloudevents/pydantic/fields_docs.py +++ b/src/cloudevents/v1/pydantic/fields_docs.py @@ -12,7 +12,7 @@ # License for the specific language governing permissions and limitations # under the License. -from cloudevents.sdk.event import attribute +from cloudevents_v1.sdk.event import attribute FIELD_DESCRIPTIONS = { "data": { diff --git a/cloudevents/pydantic/v2/__init__.py b/src/cloudevents/v1/pydantic/v1/__init__.py similarity index 83% rename from cloudevents/pydantic/v2/__init__.py rename to src/cloudevents/v1/pydantic/v1/__init__.py index 55d2a7fd..3b0e435c 100644 --- a/cloudevents/pydantic/v2/__init__.py +++ b/src/cloudevents/v1/pydantic/v1/__init__.py @@ -12,7 +12,7 @@ # License for the specific language governing permissions and limitations # under the License. -from cloudevents.pydantic.v2.conversion import from_dict, from_http, from_json -from cloudevents.pydantic.v2.event import CloudEvent +from cloudevents_v1.pydantic.v1.conversion import from_dict, from_http, from_json +from cloudevents_v1.pydantic.v1.event import CloudEvent __all__ = ["CloudEvent", "from_json", "from_dict", "from_http"] diff --git a/cloudevents/pydantic/v1/conversion.py b/src/cloudevents/v1/pydantic/v1/conversion.py similarity index 88% rename from cloudevents/pydantic/v1/conversion.py rename to src/cloudevents/v1/pydantic/v1/conversion.py index dcf0b7db..efd7a7f4 100644 --- a/cloudevents/pydantic/v1/conversion.py +++ b/src/cloudevents/v1/pydantic/v1/conversion.py @@ -13,11 +13,11 @@ # under the License. import typing -from cloudevents.conversion import from_dict as _abstract_from_dict -from cloudevents.conversion import from_http as _abstract_from_http -from cloudevents.conversion import from_json as _abstract_from_json -from cloudevents.pydantic.v1.event import CloudEvent -from cloudevents.sdk import types +from cloudevents_v1.conversion import from_dict as _abstract_from_dict +from cloudevents_v1.conversion import from_http as _abstract_from_http +from cloudevents_v1.conversion import from_json as _abstract_from_json +from cloudevents_v1.pydantic.v1.event import CloudEvent +from cloudevents_v1.sdk import types def from_http( diff --git a/cloudevents/pydantic/v1/event.py b/src/cloudevents/v1/pydantic/v1/event.py similarity index 96% rename from cloudevents/pydantic/v1/event.py rename to src/cloudevents/v1/pydantic/v1/event.py index d18736a4..999828b3 100644 --- a/cloudevents/pydantic/v1/event.py +++ b/src/cloudevents/v1/pydantic/v1/event.py @@ -15,8 +15,8 @@ import json import typing -from cloudevents.exceptions import PydanticFeatureNotInstalled -from cloudevents.pydantic.fields_docs import FIELD_DESCRIPTIONS +from cloudevents_v1.exceptions import PydanticFeatureNotInstalled +from cloudevents_v1.pydantic.fields_docs import FIELD_DESCRIPTIONS try: from pydantic import VERSION as PYDANTIC_VERSION @@ -32,9 +32,9 @@ "Install it using pip install cloudevents[pydantic]" ) -from cloudevents import abstract, conversion, http -from cloudevents.exceptions import IncompatibleArgumentsError -from cloudevents.sdk.event import attribute +from cloudevents_v1 import abstract, conversion, http +from cloudevents_v1.exceptions import IncompatibleArgumentsError +from cloudevents_v1.sdk.event import attribute def _ce_json_dumps( # type: ignore[no-untyped-def] @@ -71,7 +71,9 @@ def _ce_json_dumps( # type: ignore[no-untyped-def] def _ce_json_loads( # type: ignore[no-untyped-def] - data: typing.AnyStr, *args, **kwargs # noqa + data: typing.AnyStr, + *args, + **kwargs, # noqa ) -> typing.Dict[typing.Any, typing.Any]: """Performs Pydantic-specific deserialization of the event. diff --git a/cloudevents/pydantic/v1/__init__.py b/src/cloudevents/v1/pydantic/v2/__init__.py similarity index 83% rename from cloudevents/pydantic/v1/__init__.py rename to src/cloudevents/v1/pydantic/v2/__init__.py index e17151a4..0bda7d88 100644 --- a/cloudevents/pydantic/v1/__init__.py +++ b/src/cloudevents/v1/pydantic/v2/__init__.py @@ -12,7 +12,7 @@ # License for the specific language governing permissions and limitations # under the License. -from cloudevents.pydantic.v1.conversion import from_dict, from_http, from_json -from cloudevents.pydantic.v1.event import CloudEvent +from cloudevents_v1.pydantic.v2.conversion import from_dict, from_http, from_json +from cloudevents_v1.pydantic.v2.event import CloudEvent __all__ = ["CloudEvent", "from_json", "from_dict", "from_http"] diff --git a/cloudevents/pydantic/v2/conversion.py b/src/cloudevents/v1/pydantic/v2/conversion.py similarity index 88% rename from cloudevents/pydantic/v2/conversion.py rename to src/cloudevents/v1/pydantic/v2/conversion.py index 65108544..a164091b 100644 --- a/cloudevents/pydantic/v2/conversion.py +++ b/src/cloudevents/v1/pydantic/v2/conversion.py @@ -14,11 +14,11 @@ import typing -from cloudevents.conversion import from_dict as _abstract_from_dict -from cloudevents.conversion import from_http as _abstract_from_http -from cloudevents.conversion import from_json as _abstract_from_json -from cloudevents.pydantic.v2.event import CloudEvent -from cloudevents.sdk import types +from cloudevents_v1.conversion import from_dict as _abstract_from_dict +from cloudevents_v1.conversion import from_http as _abstract_from_http +from cloudevents_v1.conversion import from_json as _abstract_from_json +from cloudevents_v1.pydantic.v2.event import CloudEvent +from cloudevents_v1.sdk import types def from_http( diff --git a/cloudevents/pydantic/v2/event.py b/src/cloudevents/v1/pydantic/v2/event.py similarity index 96% rename from cloudevents/pydantic/v2/event.py rename to src/cloudevents/v1/pydantic/v2/event.py index 643794c1..26c2fcb9 100644 --- a/cloudevents/pydantic/v2/event.py +++ b/src/cloudevents/v1/pydantic/v2/event.py @@ -17,11 +17,10 @@ import typing from typing import Any +from cloudevents_v1.exceptions import PydanticFeatureNotInstalled +from cloudevents_v1.pydantic.fields_docs import FIELD_DESCRIPTIONS from pydantic.deprecated import parse as _deprecated_parse -from cloudevents.exceptions import PydanticFeatureNotInstalled -from cloudevents.pydantic.fields_docs import FIELD_DESCRIPTIONS - try: from pydantic import BaseModel, ConfigDict, Field, model_serializer except ImportError: # pragma: no cover # hard to test @@ -30,9 +29,9 @@ "Install it using pip install cloudevents[pydantic]" ) -from cloudevents import abstract, conversion -from cloudevents.exceptions import IncompatibleArgumentsError -from cloudevents.sdk.event import attribute +from cloudevents_v1 import abstract, conversion +from cloudevents_v1.exceptions import IncompatibleArgumentsError +from cloudevents_v1.sdk.event import attribute class CloudEvent(abstract.CloudEvent, BaseModel): # type: ignore diff --git a/cloudevents/sdk/event/__init__.py b/src/cloudevents/v1/sdk/__init__.py similarity index 100% rename from cloudevents/sdk/event/__init__.py rename to src/cloudevents/v1/sdk/__init__.py diff --git a/cloudevents/sdk/converters/__init__.py b/src/cloudevents/v1/sdk/converters/__init__.py similarity index 82% rename from cloudevents/sdk/converters/__init__.py rename to src/cloudevents/v1/sdk/converters/__init__.py index cd8df680..c70f1464 100644 --- a/cloudevents/sdk/converters/__init__.py +++ b/src/cloudevents/v1/sdk/converters/__init__.py @@ -12,9 +12,9 @@ # License for the specific language governing permissions and limitations # under the License. -from cloudevents.sdk.converters import binary, structured -from cloudevents.sdk.converters.binary import is_binary -from cloudevents.sdk.converters.structured import is_structured +from cloudevents_v1.sdk.converters import binary, structured +from cloudevents_v1.sdk.converters.binary import is_binary +from cloudevents_v1.sdk.converters.structured import is_structured TypeBinary: str = binary.BinaryHTTPCloudEventConverter.TYPE TypeStructured: str = structured.JSONHTTPCloudEventConverter.TYPE diff --git a/cloudevents/sdk/converters/base.py b/src/cloudevents/v1/sdk/converters/base.py similarity index 97% rename from cloudevents/sdk/converters/base.py rename to src/cloudevents/v1/sdk/converters/base.py index 43edf5d2..c0b0b3fb 100644 --- a/cloudevents/sdk/converters/base.py +++ b/src/cloudevents/v1/sdk/converters/base.py @@ -14,7 +14,7 @@ import typing -from cloudevents.sdk.event import base +from cloudevents_v1.sdk.event import base class Converter(object): diff --git a/cloudevents/sdk/converters/binary.py b/src/cloudevents/v1/sdk/converters/binary.py similarity index 90% rename from cloudevents/sdk/converters/binary.py rename to src/cloudevents/v1/sdk/converters/binary.py index c5fcbf54..a06d92ce 100644 --- a/cloudevents/sdk/converters/binary.py +++ b/src/cloudevents/v1/sdk/converters/binary.py @@ -14,11 +14,11 @@ import typing -from cloudevents.sdk import exceptions, types -from cloudevents.sdk.converters import base -from cloudevents.sdk.converters.util import has_binary_headers -from cloudevents.sdk.event import base as event_base -from cloudevents.sdk.event import v1, v03 +from cloudevents_v1.sdk import exceptions, types +from cloudevents_v1.sdk.converters import base +from cloudevents_v1.sdk.converters.util import has_binary_headers +from cloudevents_v1.sdk.event import base as event_base +from cloudevents_v1.sdk.event import v03, v1 class BinaryHTTPCloudEventConverter(base.Converter): diff --git a/cloudevents/sdk/converters/structured.py b/src/cloudevents/v1/sdk/converters/structured.py similarity index 92% rename from cloudevents/sdk/converters/structured.py rename to src/cloudevents/v1/sdk/converters/structured.py index 24eda895..b5e090ef 100644 --- a/cloudevents/sdk/converters/structured.py +++ b/src/cloudevents/v1/sdk/converters/structured.py @@ -14,10 +14,10 @@ import typing -from cloudevents.sdk import types -from cloudevents.sdk.converters import base -from cloudevents.sdk.converters.util import has_binary_headers -from cloudevents.sdk.event import base as event_base +from cloudevents_v1.sdk import types +from cloudevents_v1.sdk.converters import base +from cloudevents_v1.sdk.converters.util import has_binary_headers +from cloudevents_v1.sdk.event import base as event_base # TODO: Singleton? diff --git a/cloudevents/sdk/converters/util.py b/src/cloudevents/v1/sdk/converters/util.py similarity index 100% rename from cloudevents/sdk/converters/util.py rename to src/cloudevents/v1/sdk/converters/util.py diff --git a/cloudevents/tests/__init__.py b/src/cloudevents/v1/sdk/event/__init__.py similarity index 100% rename from cloudevents/tests/__init__.py rename to src/cloudevents/v1/sdk/event/__init__.py diff --git a/cloudevents/sdk/event/attribute.py b/src/cloudevents/v1/sdk/event/attribute.py similarity index 100% rename from cloudevents/sdk/event/attribute.py rename to src/cloudevents/v1/sdk/event/attribute.py diff --git a/cloudevents/sdk/event/base.py b/src/cloudevents/v1/sdk/event/base.py similarity index 98% rename from cloudevents/sdk/event/base.py rename to src/cloudevents/v1/sdk/event/base.py index 53e05d35..d200fa9a 100644 --- a/cloudevents/sdk/event/base.py +++ b/src/cloudevents/v1/sdk/event/base.py @@ -17,8 +17,8 @@ import typing from typing import Set -import cloudevents.exceptions as cloud_exceptions -from cloudevents.sdk import types +import cloudevents_v1.exceptions as cloud_exceptions +from cloudevents_v1.sdk import types # TODO(slinkydeveloper) is this really needed? @@ -245,8 +245,7 @@ def UnmarshalJSON( decoded_value = value except Exception as e: raise cloud_exceptions.DataUnmarshallerError( - "Failed to unmarshall data with error: " - f"{type(e).__name__}('{e}')" + f"Failed to unmarshall data with error: {type(e).__name__}('{e}')" ) self.Set(name, decoded_value) diff --git a/cloudevents/sdk/event/opt.py b/src/cloudevents/v1/sdk/event/opt.py similarity index 100% rename from cloudevents/sdk/event/opt.py rename to src/cloudevents/v1/sdk/event/opt.py diff --git a/cloudevents/sdk/event/v03.py b/src/cloudevents/v1/sdk/event/v03.py similarity index 99% rename from cloudevents/sdk/event/v03.py rename to src/cloudevents/v1/sdk/event/v03.py index 6d69d2ab..d686b536 100644 --- a/cloudevents/sdk/event/v03.py +++ b/src/cloudevents/v1/sdk/event/v03.py @@ -13,7 +13,7 @@ # under the License. import typing -from cloudevents.sdk.event import base, opt +from cloudevents_v1.sdk.event import base, opt class Event(base.BaseEvent): diff --git a/cloudevents/sdk/event/v1.py b/src/cloudevents/v1/sdk/event/v1.py similarity index 98% rename from cloudevents/sdk/event/v1.py rename to src/cloudevents/v1/sdk/event/v1.py index 18d1f3af..dfa470d1 100644 --- a/cloudevents/sdk/event/v1.py +++ b/src/cloudevents/v1/sdk/event/v1.py @@ -13,7 +13,7 @@ # under the License. import typing -from cloudevents.sdk.event import base, opt +from cloudevents_v1.sdk.event import base, opt class Event(base.BaseEvent): diff --git a/cloudevents/sdk/exceptions.py b/src/cloudevents/v1/sdk/exceptions.py similarity index 91% rename from cloudevents/sdk/exceptions.py rename to src/cloudevents/v1/sdk/exceptions.py index 878bc704..eb9e250d 100644 --- a/cloudevents/sdk/exceptions.py +++ b/src/cloudevents/v1/sdk/exceptions.py @@ -36,6 +36,7 @@ def __init__(self, converter_type): class UnsupportedEventConverter(Exception): def __init__(self, content_type): super().__init__( - "Unable to identify valid event converter " - "for content-type: '{0}'".format(content_type) + "Unable to identify valid event converter for content-type: '{0}'".format( + content_type + ) ) diff --git a/cloudevents/sdk/marshaller.py b/src/cloudevents/v1/sdk/marshaller.py similarity index 96% rename from cloudevents/sdk/marshaller.py rename to src/cloudevents/v1/sdk/marshaller.py index dfd18965..b650c26a 100644 --- a/cloudevents/sdk/marshaller.py +++ b/src/cloudevents/v1/sdk/marshaller.py @@ -15,9 +15,9 @@ import json import typing -from cloudevents.sdk import exceptions, types -from cloudevents.sdk.converters import base, binary, structured -from cloudevents.sdk.event import base as event_base +from cloudevents_v1.sdk import exceptions, types +from cloudevents_v1.sdk.converters import base, binary, structured +from cloudevents_v1.sdk.event import base as event_base class HTTPMarshaller(object): diff --git a/cloudevents/sdk/types.py b/src/cloudevents/v1/sdk/types.py similarity index 100% rename from cloudevents/sdk/types.py rename to src/cloudevents/v1/sdk/types.py diff --git a/src/cloudevents/v1/tests/__init__.py b/src/cloudevents/v1/tests/__init__.py new file mode 100644 index 00000000..8043675e --- /dev/null +++ b/src/cloudevents/v1/tests/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2018-Present The CloudEvents Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. diff --git a/cloudevents/tests/data.py b/src/cloudevents/v1/tests/data.py similarity index 97% rename from cloudevents/tests/data.py rename to src/cloudevents/v1/tests/data.py index f5b0ea33..a7866c94 100644 --- a/cloudevents/tests/data.py +++ b/src/cloudevents/v1/tests/data.py @@ -12,7 +12,7 @@ # License for the specific language governing permissions and limitations # under the License. -from cloudevents.sdk.event import v1, v03 +from cloudevents_v1.sdk.event import v03, v1 content_type = "application/json" ce_type = "word.found.exclamation" diff --git a/cloudevents/tests/test_backwards_compatability.py b/src/cloudevents/v1/tests/test_backwards_compatability.py similarity index 62% rename from cloudevents/tests/test_backwards_compatability.py rename to src/cloudevents/v1/tests/test_backwards_compatability.py index 0a20f4cf..157b0dfe 100644 --- a/cloudevents/tests/test_backwards_compatability.py +++ b/src/cloudevents/v1/tests/test_backwards_compatability.py @@ -12,9 +12,8 @@ # License for the specific language governing permissions and limitations # under the License. import pytest - -from cloudevents.conversion import _best_effort_serialize_to_json -from cloudevents.http import CloudEvent +from cloudevents_v1.conversion import _best_effort_serialize_to_json +from cloudevents_v1.http import CloudEvent @pytest.fixture() @@ -23,10 +22,10 @@ def dummy_event(): def test_json_methods(dummy_event): - from cloudevents.conversion import to_json - from cloudevents.http.conversion import from_json - from cloudevents.http.json_methods import from_json as deprecated_from_json - from cloudevents.http.json_methods import to_json as deprecated_to_json + from cloudevents_v1.conversion import to_json + from cloudevents_v1.http.conversion import from_json + from cloudevents_v1.http.json_methods import from_json as deprecated_from_json + from cloudevents_v1.http.json_methods import to_json as deprecated_to_json assert from_json(to_json(dummy_event)) == deprecated_from_json( deprecated_to_json(dummy_event) @@ -34,10 +33,12 @@ def test_json_methods(dummy_event): def test_http_methods(dummy_event): - from cloudevents.http import from_http, to_binary, to_structured - from cloudevents.http.http_methods import from_http as deprecated_from_http - from cloudevents.http.http_methods import to_binary as deprecated_to_binary - from cloudevents.http.http_methods import to_structured as deprecated_to_structured + from cloudevents_v1.http import from_http, to_binary, to_structured + from cloudevents_v1.http.http_methods import from_http as deprecated_from_http + from cloudevents_v1.http.http_methods import to_binary as deprecated_to_binary + from cloudevents_v1.http.http_methods import ( + to_structured as deprecated_to_structured, + ) assert from_http(*to_binary(dummy_event)) == deprecated_from_http( *deprecated_to_binary(dummy_event) @@ -48,17 +49,17 @@ def test_http_methods(dummy_event): def test_util(): - from cloudevents.http.util import default_marshaller # noqa + from cloudevents_v1.http.util import default_marshaller # noqa assert _best_effort_serialize_to_json(None) == default_marshaller(None) def test_event_type(): - from cloudevents.http.event_type import is_binary, is_structured # noqa + from cloudevents_v1.http.event_type import is_binary, is_structured # noqa def test_http_module_imports(): - from cloudevents.http import ( # noqa + from cloudevents_v1.http import ( # noqa CloudEvent, from_dict, from_http, diff --git a/cloudevents/tests/test_base_events.py b/src/cloudevents/v1/tests/test_base_events.py similarity index 92% rename from cloudevents/tests/test_base_events.py rename to src/cloudevents/v1/tests/test_base_events.py index 8eb83d44..04db6470 100644 --- a/cloudevents/tests/test_base_events.py +++ b/src/cloudevents/v1/tests/test_base_events.py @@ -12,10 +12,9 @@ # License for the specific language governing permissions and limitations # under the License. +import cloudevents_v1.exceptions as cloud_exceptions import pytest - -import cloudevents.exceptions as cloud_exceptions -from cloudevents.sdk.event import v1, v03 +from cloudevents_v1.sdk.event import v03, v1 @pytest.mark.parametrize("event_class", [v1.Event, v03.Event]) diff --git a/cloudevents/tests/test_converters.py b/src/cloudevents/v1/tests/test_converters.py similarity index 93% rename from cloudevents/tests/test_converters.py rename to src/cloudevents/v1/tests/test_converters.py index b91d6b39..f9940409 100644 --- a/cloudevents/tests/test_converters.py +++ b/src/cloudevents/v1/tests/test_converters.py @@ -13,9 +13,8 @@ # under the License. import pytest - -from cloudevents.sdk import exceptions -from cloudevents.sdk.converters import base, binary +from cloudevents_v1.sdk import exceptions +from cloudevents_v1.sdk.converters import base, binary def test_binary_converter_raise_unsupported(): diff --git a/cloudevents/tests/test_data_encaps_refs.py b/src/cloudevents/v1/tests/test_data_encaps_refs.py similarity index 96% rename from cloudevents/tests/test_data_encaps_refs.py rename to src/cloudevents/v1/tests/test_data_encaps_refs.py index 02405a93..cf923a9c 100644 --- a/cloudevents/tests/test_data_encaps_refs.py +++ b/src/cloudevents/v1/tests/test_data_encaps_refs.py @@ -16,10 +16,9 @@ from uuid import uuid4 import pytest - -from cloudevents.sdk import converters, marshaller -from cloudevents.sdk.event import v1, v03 -from cloudevents.tests import data +from cloudevents_v1.sdk import converters, marshaller +from cloudevents_v1.sdk.event import v03, v1 +from cloudevents_v1.tests import data @pytest.mark.parametrize("event_class", [v03.Event, v1.Event]) diff --git a/cloudevents/tests/test_deprecated_functions.py b/src/cloudevents/v1/tests/test_deprecated_functions.py similarity index 97% rename from cloudevents/tests/test_deprecated_functions.py rename to src/cloudevents/v1/tests/test_deprecated_functions.py index a99f6247..eec0f527 100644 --- a/cloudevents/tests/test_deprecated_functions.py +++ b/src/cloudevents/v1/tests/test_deprecated_functions.py @@ -13,8 +13,7 @@ # under the License. import pytest - -from cloudevents.http import ( +from cloudevents_v1.http import ( CloudEvent, to_binary, to_binary_http, diff --git a/cloudevents/tests/test_event_extensions.py b/src/cloudevents/v1/tests/test_event_extensions.py similarity index 97% rename from cloudevents/tests/test_event_extensions.py rename to src/cloudevents/v1/tests/test_event_extensions.py index eea8edfa..87424090 100644 --- a/cloudevents/tests/test_event_extensions.py +++ b/src/cloudevents/v1/tests/test_event_extensions.py @@ -15,8 +15,7 @@ import json import pytest - -from cloudevents.http import CloudEvent, from_http, to_binary, to_structured +from cloudevents_v1.http import CloudEvent, from_http, to_binary, to_structured test_data = json.dumps({"data-key": "val"}) test_attributes = { diff --git a/cloudevents/tests/test_event_from_request_converter.py b/src/cloudevents/v1/tests/test_event_from_request_converter.py similarity index 93% rename from cloudevents/tests/test_event_from_request_converter.py rename to src/cloudevents/v1/tests/test_event_from_request_converter.py index 901284bb..2f98a640 100644 --- a/cloudevents/tests/test_event_from_request_converter.py +++ b/src/cloudevents/v1/tests/test_event_from_request_converter.py @@ -15,11 +15,10 @@ import json import pytest - -from cloudevents.sdk import marshaller -from cloudevents.sdk.converters import binary, structured -from cloudevents.sdk.event import v1, v03 -from cloudevents.tests import data +from cloudevents_v1.sdk import marshaller +from cloudevents_v1.sdk.converters import binary, structured +from cloudevents_v1.sdk.event import v03, v1 +from cloudevents_v1.tests import data @pytest.mark.parametrize("event_class", [v03.Event, v1.Event]) diff --git a/cloudevents/tests/test_event_pipeline.py b/src/cloudevents/v1/tests/test_event_pipeline.py similarity index 94% rename from cloudevents/tests/test_event_pipeline.py rename to src/cloudevents/v1/tests/test_event_pipeline.py index efc79749..fdb547d5 100644 --- a/cloudevents/tests/test_event_pipeline.py +++ b/src/cloudevents/v1/tests/test_event_pipeline.py @@ -15,11 +15,10 @@ import json import pytest - -from cloudevents.sdk import converters, marshaller -from cloudevents.sdk.converters import structured -from cloudevents.sdk.event import v1, v03 -from cloudevents.tests import data +from cloudevents_v1.sdk import converters, marshaller +from cloudevents_v1.sdk.converters import structured +from cloudevents_v1.sdk.event import v03, v1 +from cloudevents_v1.tests import data @pytest.mark.parametrize("event_class", [v03.Event, v1.Event]) diff --git a/cloudevents/tests/test_event_to_request_converter.py b/src/cloudevents/v1/tests/test_event_to_request_converter.py similarity index 93% rename from cloudevents/tests/test_event_to_request_converter.py rename to src/cloudevents/v1/tests/test_event_to_request_converter.py index fd25be5a..c7fb7022 100644 --- a/cloudevents/tests/test_event_to_request_converter.py +++ b/src/cloudevents/v1/tests/test_event_to_request_converter.py @@ -15,10 +15,9 @@ import json import pytest - -from cloudevents.sdk import converters, marshaller -from cloudevents.sdk.event import v1, v03 -from cloudevents.tests import data +from cloudevents_v1.sdk import converters, marshaller +from cloudevents_v1.sdk.event import v03, v1 +from cloudevents_v1.tests import data @pytest.mark.parametrize("event_class", [v03.Event, v1.Event]) diff --git a/cloudevents/tests/test_http_cloudevent.py b/src/cloudevents/v1/tests/test_http_cloudevent.py similarity index 97% rename from cloudevents/tests/test_http_cloudevent.py rename to src/cloudevents/v1/tests/test_http_cloudevent.py index 6ad1537f..0c68c15c 100644 --- a/cloudevents/tests/test_http_cloudevent.py +++ b/src/cloudevents/v1/tests/test_http_cloudevent.py @@ -12,11 +12,10 @@ # License for the specific language governing permissions and limitations # under the License. +import cloudevents_v1.exceptions as cloud_exceptions import pytest - -import cloudevents.exceptions as cloud_exceptions -from cloudevents.conversion import _json_or_string -from cloudevents.http import CloudEvent +from cloudevents_v1.conversion import _json_or_string +from cloudevents_v1.http import CloudEvent @pytest.fixture(params=["0.3", "1.0"]) diff --git a/cloudevents/tests/test_http_conversions.py b/src/cloudevents/v1/tests/test_http_conversions.py similarity index 96% rename from cloudevents/tests/test_http_conversions.py rename to src/cloudevents/v1/tests/test_http_conversions.py index 3b9c6717..db582a21 100644 --- a/cloudevents/tests/test_http_conversions.py +++ b/src/cloudevents/v1/tests/test_http_conversions.py @@ -17,10 +17,9 @@ import json import pytest - -from cloudevents.conversion import to_dict, to_json -from cloudevents.http import CloudEvent, from_dict, from_json -from cloudevents.sdk.event.attribute import SpecVersion +from cloudevents_v1.conversion import to_dict, to_json +from cloudevents_v1.http import CloudEvent, from_dict, from_json +from cloudevents_v1.sdk.event.attribute import SpecVersion test_data = json.dumps({"data-key": "val"}) test_attributes = { diff --git a/cloudevents/tests/test_http_events.py b/src/cloudevents/v1/tests/test_http_events.py similarity index 97% rename from cloudevents/tests/test_http_events.py rename to src/cloudevents/v1/tests/test_http_events.py index b21c3729..6956df2a 100644 --- a/cloudevents/tests/test_http_events.py +++ b/src/cloudevents/v1/tests/test_http_events.py @@ -17,17 +17,16 @@ import json import typing +import cloudevents_v1.exceptions as cloud_exceptions import pytest +from cloudevents_v1.http import CloudEvent, from_http, to_binary, to_structured +from cloudevents_v1.http.event_type import is_binary as deprecated_is_binary +from cloudevents_v1.http.event_type import is_structured as deprecated_is_structured +from cloudevents_v1.sdk import converters +from cloudevents_v1.sdk.converters.binary import is_binary +from cloudevents_v1.sdk.converters.structured import is_structured from sanic import Sanic, response -import cloudevents.exceptions as cloud_exceptions -from cloudevents.http import CloudEvent, from_http, to_binary, to_structured -from cloudevents.http.event_type import is_binary as deprecated_is_binary -from cloudevents.http.event_type import is_structured as deprecated_is_structured -from cloudevents.sdk import converters -from cloudevents.sdk.converters.binary import is_binary -from cloudevents.sdk.converters.structured import is_structured - invalid_test_headers = [ { "ce-source": "", diff --git a/cloudevents/tests/test_kafka_conversions.py b/src/cloudevents/v1/tests/test_kafka_conversions.py similarity index 98% rename from cloudevents/tests/test_kafka_conversions.py rename to src/cloudevents/v1/tests/test_kafka_conversions.py index 5580773a..661aebbc 100644 --- a/cloudevents/tests/test_kafka_conversions.py +++ b/src/cloudevents/v1/tests/test_kafka_conversions.py @@ -17,18 +17,17 @@ import json import pytest - -from cloudevents import exceptions as cloud_exceptions -from cloudevents.http import CloudEvent -from cloudevents.kafka.conversion import ( +from cloudevents_v1 import exceptions as cloud_exceptions +from cloudevents_v1.http import CloudEvent +from cloudevents_v1.kafka.conversion import ( KafkaMessage, from_binary, from_structured, to_binary, to_structured, ) -from cloudevents.kafka.exceptions import KeyMapperError -from cloudevents.sdk import types +from cloudevents_v1.kafka.exceptions import KeyMapperError +from cloudevents_v1.sdk import types def simple_serialize(data: dict) -> bytes: diff --git a/cloudevents/tests/test_marshaller.py b/src/cloudevents/v1/tests/test_marshaller.py similarity index 93% rename from cloudevents/tests/test_marshaller.py rename to src/cloudevents/v1/tests/test_marshaller.py index 90609891..d3ba81a7 100644 --- a/cloudevents/tests/test_marshaller.py +++ b/src/cloudevents/v1/tests/test_marshaller.py @@ -14,13 +14,12 @@ import json +import cloudevents_v1.exceptions as cloud_exceptions import pytest - -import cloudevents.exceptions as cloud_exceptions -from cloudevents.http import CloudEvent, from_http, to_binary, to_structured -from cloudevents.sdk import exceptions, marshaller -from cloudevents.sdk.converters import binary, structured -from cloudevents.sdk.event import v1 +from cloudevents_v1.http import CloudEvent, from_http, to_binary, to_structured +from cloudevents_v1.sdk import exceptions, marshaller +from cloudevents_v1.sdk.converters import binary, structured +from cloudevents_v1.sdk.event import v1 @pytest.fixture diff --git a/cloudevents/tests/test_options.py b/src/cloudevents/v1/tests/test_options.py similarity index 95% rename from cloudevents/tests/test_options.py rename to src/cloudevents/v1/tests/test_options.py index aba812b9..86b9ef4c 100644 --- a/cloudevents/tests/test_options.py +++ b/src/cloudevents/v1/tests/test_options.py @@ -13,8 +13,7 @@ # under the License. import pytest - -from cloudevents.sdk.event.opt import Option +from cloudevents_v1.sdk.event.opt import Option def test_set_raise_error(): diff --git a/cloudevents/tests/test_pydantic_cloudevent.py b/src/cloudevents/v1/tests/test_pydantic_cloudevent.py similarity index 97% rename from cloudevents/tests/test_pydantic_cloudevent.py rename to src/cloudevents/v1/tests/test_pydantic_cloudevent.py index 87ac5507..4a2762a5 100644 --- a/cloudevents/tests/test_pydantic_cloudevent.py +++ b/src/cloudevents/v1/tests/test_pydantic_cloudevent.py @@ -15,15 +15,14 @@ from json import loads import pytest +from cloudevents_v1.conversion import _json_or_string +from cloudevents_v1.exceptions import IncompatibleArgumentsError +from cloudevents_v1.pydantic.v1.event import CloudEvent as PydanticV1CloudEvent +from cloudevents_v1.pydantic.v2.event import CloudEvent as PydanticV2CloudEvent +from cloudevents_v1.sdk.event.attribute import SpecVersion from pydantic import ValidationError as PydanticV2ValidationError from pydantic.v1 import ValidationError as PydanticV1ValidationError -from cloudevents.conversion import _json_or_string -from cloudevents.exceptions import IncompatibleArgumentsError -from cloudevents.pydantic.v1.event import CloudEvent as PydanticV1CloudEvent -from cloudevents.pydantic.v2.event import CloudEvent as PydanticV2CloudEvent -from cloudevents.sdk.event.attribute import SpecVersion - _DUMMY_SOURCE = "dummy:source" _DUMMY_TYPE = "tests.cloudevents.override" _DUMMY_TIME = "2022-07-16T11:20:34.284130+00:00" diff --git a/cloudevents/tests/test_pydantic_conversions.py b/src/cloudevents/v1/tests/test_pydantic_conversions.py similarity index 90% rename from cloudevents/tests/test_pydantic_conversions.py rename to src/cloudevents/v1/tests/test_pydantic_conversions.py index 801b76bd..abf5cf6e 100644 --- a/cloudevents/tests/test_pydantic_conversions.py +++ b/src/cloudevents/v1/tests/test_pydantic_conversions.py @@ -17,18 +17,17 @@ import json import pytest +from cloudevents_v1.conversion import to_json +from cloudevents_v1.pydantic.v1.conversion import from_dict as pydantic_v1_from_dict +from cloudevents_v1.pydantic.v1.conversion import from_json as pydantic_v1_from_json +from cloudevents_v1.pydantic.v1.event import CloudEvent as PydanticV1CloudEvent +from cloudevents_v1.pydantic.v2.conversion import from_dict as pydantic_v2_from_dict +from cloudevents_v1.pydantic.v2.conversion import from_json as pydantic_v2_from_json +from cloudevents_v1.pydantic.v2.event import CloudEvent as PydanticV2CloudEvent +from cloudevents_v1.sdk.event.attribute import SpecVersion from pydantic import ValidationError as PydanticV2ValidationError from pydantic.v1 import ValidationError as PydanticV1ValidationError -from cloudevents.conversion import to_json -from cloudevents.pydantic.v1.conversion import from_dict as pydantic_v1_from_dict -from cloudevents.pydantic.v1.conversion import from_json as pydantic_v1_from_json -from cloudevents.pydantic.v1.event import CloudEvent as PydanticV1CloudEvent -from cloudevents.pydantic.v2.conversion import from_dict as pydantic_v2_from_dict -from cloudevents.pydantic.v2.conversion import from_json as pydantic_v2_from_json -from cloudevents.pydantic.v2.event import CloudEvent as PydanticV2CloudEvent -from cloudevents.sdk.event.attribute import SpecVersion - test_data = json.dumps({"data-key": "val"}) test_attributes = { "type": "com.example.string", diff --git a/cloudevents/tests/test_pydantic_events.py b/src/cloudevents/v1/tests/test_pydantic_events.py similarity index 96% rename from cloudevents/tests/test_pydantic_events.py rename to src/cloudevents/v1/tests/test_pydantic_events.py index 3e536f05..3b1921cf 100644 --- a/cloudevents/tests/test_pydantic_events.py +++ b/src/cloudevents/v1/tests/test_pydantic_events.py @@ -17,21 +17,20 @@ import json import typing +import cloudevents_v1.exceptions as cloud_exceptions import pytest +from cloudevents_v1.conversion import to_binary, to_structured +from cloudevents_v1.pydantic.v1.conversion import from_http as pydantic_v1_from_http +from cloudevents_v1.pydantic.v1.event import CloudEvent as PydanticV1CloudEvent +from cloudevents_v1.pydantic.v2.conversion import from_http as pydantic_v2_from_http +from cloudevents_v1.pydantic.v2.event import CloudEvent as PydanticV2CloudEvent +from cloudevents_v1.sdk import converters +from cloudevents_v1.sdk.converters.binary import is_binary +from cloudevents_v1.sdk.converters.structured import is_structured from pydantic import ValidationError as PydanticV2ValidationError from pydantic.v1 import ValidationError as PydanticV1ValidationError from sanic import Sanic, response -import cloudevents.exceptions as cloud_exceptions -from cloudevents.conversion import to_binary, to_structured -from cloudevents.pydantic.v1.conversion import from_http as pydantic_v1_from_http -from cloudevents.pydantic.v1.event import CloudEvent as PydanticV1CloudEvent -from cloudevents.pydantic.v2.conversion import from_http as pydantic_v2_from_http -from cloudevents.pydantic.v2.event import CloudEvent as PydanticV2CloudEvent -from cloudevents.sdk import converters -from cloudevents.sdk.converters.binary import is_binary -from cloudevents.sdk.converters.structured import is_structured - invalid_test_headers = [ { "ce-source": "", diff --git a/cloudevents/tests/test_v03_event.py b/src/cloudevents/v1/tests/test_v03_event.py similarity index 97% rename from cloudevents/tests/test_v03_event.py rename to src/cloudevents/v1/tests/test_v03_event.py index a4755318..a66ebe20 100644 --- a/cloudevents/tests/test_v03_event.py +++ b/src/cloudevents/v1/tests/test_v03_event.py @@ -12,7 +12,7 @@ # License for the specific language governing permissions and limitations # under the License. -from cloudevents.sdk.event import v03 +from cloudevents_v1.sdk.event import v03 def test_v03_time_property(): diff --git a/cloudevents/tests/test_v1_event.py b/src/cloudevents/v1/tests/test_v1_event.py similarity index 97% rename from cloudevents/tests/test_v1_event.py rename to src/cloudevents/v1/tests/test_v1_event.py index de900b0a..e72ecf47 100644 --- a/cloudevents/tests/test_v1_event.py +++ b/src/cloudevents/v1/tests/test_v1_event.py @@ -12,7 +12,7 @@ # License for the specific language governing permissions and limitations # under the License. -from cloudevents.sdk.event import v1 +from cloudevents_v1.sdk.event import v1 def test_v1_time_property(): diff --git a/cloudevents/tests/test_with_sanic.py b/src/cloudevents/v1/tests/test_with_sanic.py similarity index 93% rename from cloudevents/tests/test_with_sanic.py rename to src/cloudevents/v1/tests/test_with_sanic.py index 026f55b7..2f6d788e 100644 --- a/cloudevents/tests/test_with_sanic.py +++ b/src/cloudevents/v1/tests/test_with_sanic.py @@ -12,12 +12,11 @@ # License for the specific language governing permissions and limitations # under the License. +from cloudevents_v1.sdk import converters, marshaller +from cloudevents_v1.sdk.event import v1 +from cloudevents_v1.tests import data as test_data from sanic import Sanic, response -from cloudevents.sdk import converters, marshaller -from cloudevents.sdk.event import v1 -from cloudevents.tests import data as test_data - m = marshaller.NewDefaultHTTPMarshaller() app = Sanic("test_with_sanic") diff --git a/tests/test_cloudevents/__init__.py b/tests/test_cloudevents/__init__.py new file mode 100644 index 00000000..8043675e --- /dev/null +++ b/tests/test_cloudevents/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2018-Present The CloudEvents Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. diff --git a/tests/test_cloudevents/test_cloudevents_version.py b/tests/test_cloudevents/test_cloudevents_version.py new file mode 100644 index 00000000..d895c5f5 --- /dev/null +++ b/tests/test_cloudevents/test_cloudevents_version.py @@ -0,0 +1,19 @@ +# Copyright 2018-Present The CloudEvents Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from cloudevents import __version__ + + +def test_cloudevents_version() -> None: + assert __version__ is not None diff --git a/tests/test_core/__init__.py b/tests/test_core/__init__.py new file mode 100644 index 00000000..8043675e --- /dev/null +++ b/tests/test_core/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2018-Present The CloudEvents Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. diff --git a/tests/test_core/test_bindings/__init__.py b/tests/test_core/test_bindings/__init__.py new file mode 100644 index 00000000..8043675e --- /dev/null +++ b/tests/test_core/test_bindings/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2018-Present The CloudEvents Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. diff --git a/tests/test_core/test_bindings/test_amqp.py b/tests/test_core/test_bindings/test_amqp.py new file mode 100644 index 00000000..d3a704b5 --- /dev/null +++ b/tests/test_core/test_bindings/test_amqp.py @@ -0,0 +1,876 @@ +# Copyright 2018-Present The CloudEvents Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from datetime import datetime, timezone +from typing import Any + +import pytest + +from cloudevents.core.bindings.amqp import ( + AMQPMessage, + from_amqp, + from_binary, + from_structured, + to_binary, + to_structured, +) +from cloudevents.core.formats.json import JSONFormat +from cloudevents.core.v1.event import CloudEvent + + +@pytest.fixture +def minimal_attributes() -> dict[str, str]: + """Minimal valid CloudEvent attributes""" + return { + "type": "com.example.test", + "source": "/test", + "id": "test-id-123", + "specversion": "1.0", + } + + +def create_event( + extra_attrs: dict[str, Any] | None = None, + data: dict[str, Any] | str | bytes | None = None, +) -> CloudEvent: + """Helper to create CloudEvent with valid required attributes""" + attrs: dict[str, Any] = { + "type": "com.example.test", + "source": "/test", + "id": "test-id-123", + "specversion": "1.0", + } + if extra_attrs: + attrs.update(extra_attrs) + return CloudEvent(attributes=attrs, data=data) + + +def test_amqp_message_creation() -> None: + """Test basic AMQPMessage creation""" + message = AMQPMessage( + properties={"content-type": "application/json"}, + application_properties={"cloudEvents_type": "test"}, + application_data=b"test", + ) + assert message.properties == {"content-type": "application/json"} + assert message.application_properties == {"cloudEvents_type": "test"} + assert message.application_data == b"test" + + +def test_amqp_message_immutable() -> None: + """Test that AMQPMessage is immutable (frozen dataclass)""" + message = AMQPMessage( + properties={"test": "value"}, + application_properties={}, + application_data=b"data", + ) + + with pytest.raises(Exception): # FrozenInstanceError + message.properties = {"new": "dict"} + + with pytest.raises(Exception): # FrozenInstanceError + message.application_properties = {"new": "dict"} + + with pytest.raises(Exception): # FrozenInstanceError + message.application_data = b"new data" + + +def test_amqp_message_with_empty_properties() -> None: + """Test AMQPMessage with empty properties""" + message = AMQPMessage( + properties={}, application_properties={}, application_data=b"test" + ) + assert message.properties == {} + assert message.application_properties == {} + assert message.application_data == b"test" + + +def test_amqp_message_with_empty_application_data() -> None: + """Test AMQPMessage with empty application data""" + message = AMQPMessage( + properties={"test": "value"}, application_properties={}, application_data=b"" + ) + assert message.properties == {"test": "value"} + assert message.application_data == b"" + + +def test_to_binary_required_attributes() -> None: + """Test to_binary with only required attributes""" + event = create_event() + message = to_binary(event, JSONFormat()) + + assert "cloudEvents_type" in message.application_properties + assert message.application_properties["cloudEvents_type"] == "com.example.test" + assert message.application_properties["cloudEvents_source"] == "/test" + assert message.application_properties["cloudEvents_id"] == "test-id-123" + assert message.application_properties["cloudEvents_specversion"] == "1.0" + + +def test_to_binary_with_optional_attributes() -> None: + """Test to_binary with optional attributes""" + event = create_event( + extra_attrs={ + "subject": "test-subject", + "dataschema": "https://example.com/schema", + } + ) + message = to_binary(event, JSONFormat()) + + assert message.application_properties["cloudEvents_subject"] == "test-subject" + assert ( + message.application_properties["cloudEvents_dataschema"] + == "https://example.com/schema" + ) + + +def test_to_binary_with_extensions() -> None: + """Test to_binary with custom extension attributes""" + event = create_event(extra_attrs={"customext": "custom-value"}) + message = to_binary(event, JSONFormat()) + + assert message.application_properties["cloudEvents_customext"] == "custom-value" + + +def test_to_binary_datetime_as_timestamp() -> None: + """Test to_binary converts datetime to AMQP timestamp (milliseconds since epoch)""" + dt = datetime(2023, 1, 15, 10, 30, 45, tzinfo=timezone.utc) + event = create_event(extra_attrs={"time": dt}) + message = to_binary(event, JSONFormat()) + + # Should be serialized as AMQP timestamp (milliseconds since epoch) + expected_timestamp = int(dt.timestamp() * 1000) # 1673781045000 + assert message.application_properties["cloudEvents_time"] == expected_timestamp + assert isinstance(message.application_properties["cloudEvents_time"], int) + + +def test_to_binary_boolean_as_boolean() -> None: + """Test to_binary preserves boolean type (not converted to string)""" + event = create_event(extra_attrs={"boolext": True}) + message = to_binary(event, JSONFormat()) + + # Should be native boolean, not string "true" or "True" + assert message.application_properties["cloudEvents_boolext"] is True + assert isinstance(message.application_properties["cloudEvents_boolext"], bool) + + +def test_to_binary_integer_as_long() -> None: + """Test to_binary preserves integer type (not converted to string)""" + event = create_event(extra_attrs={"intext": 42}) + message = to_binary(event, JSONFormat()) + + # Should be native int/long, not string "42" + assert message.application_properties["cloudEvents_intext"] == 42 + assert isinstance(message.application_properties["cloudEvents_intext"], int) + + +def test_to_binary_datacontenttype_mapping() -> None: + """Test datacontenttype maps to AMQP content-type property""" + event = create_event( + extra_attrs={"datacontenttype": "application/json"}, data={"key": "value"} + ) + message = to_binary(event, JSONFormat()) + + # datacontenttype should go to properties, not application_properties + assert message.properties["content-type"] == "application/json" + assert "cloudEvents_datacontenttype" not in message.application_properties + + +def test_to_binary_with_json_data() -> None: + """Test to_binary with JSON dict data""" + event = create_event( + extra_attrs={"datacontenttype": "application/json"}, + data={"message": "Hello", "count": 42}, + ) + message = to_binary(event, JSONFormat()) + + # JSON serialization may vary in formatting, so check it can be parsed back + import json + + parsed = json.loads(message.application_data) + assert parsed == {"message": "Hello", "count": 42} + + +def test_to_binary_with_string_data() -> None: + """Test to_binary with string data""" + event = create_event(data="Hello World") + message = to_binary(event, JSONFormat()) + + # String data should be serialized + assert b"Hello World" in message.application_data + + +def test_to_binary_with_bytes_data() -> None: + """Test to_binary with bytes data""" + binary_data = b"\x00\x01\x02\x03" + event = create_event(data=binary_data) + message = to_binary(event, JSONFormat()) + + # Bytes should be preserved in application_data + assert len(message.application_data) > 0 + + +def test_to_binary_with_none_data() -> None: + """Test to_binary with None data""" + event = create_event(data=None) + message = to_binary(event, JSONFormat()) + + # None data should result in empty or null serialization + assert message.application_data is not None # Should be bytes + + +def test_from_binary_required_attributes() -> None: + """Test from_binary extracts required attributes""" + message = AMQPMessage( + properties={}, + application_properties={ + "cloudEvents_type": "com.example.test", + "cloudEvents_source": "/test", + "cloudEvents_id": "123", + "cloudEvents_specversion": "1.0", + }, + application_data=b"{}", + ) + event = from_binary(message, JSONFormat(), CloudEvent) + + assert event.get_type() == "com.example.test" + assert event.get_source() == "/test" + assert event.get_id() == "123" + assert event.get_specversion() == "1.0" + + +def test_from_binary_with_timestamp_property() -> None: + """Test from_binary parses AMQP timestamp (int milliseconds) to datetime""" + dt = datetime(2023, 1, 15, 10, 30, 45, tzinfo=timezone.utc) + timestamp_ms = int(dt.timestamp() * 1000) # 1673781045000 + + message = AMQPMessage( + properties={}, + application_properties={ + "cloudEvents_type": "test", + "cloudEvents_source": "/test", + "cloudEvents_id": "123", + "cloudEvents_specversion": "1.0", + "cloudEvents_time": timestamp_ms, # AMQP timestamp as int + }, + application_data=b"{}", + ) + event = from_binary(message, JSONFormat(), CloudEvent) + + assert event.get_time() == dt + assert isinstance(event.get_time(), datetime) + + +def test_from_binary_with_timestamp_string() -> None: + """Test from_binary also accepts ISO 8601 string (canonical form per spec)""" + dt = datetime(2023, 1, 15, 10, 30, 45, tzinfo=timezone.utc) + + message = AMQPMessage( + properties={}, + application_properties={ + "cloudEvents_type": "test", + "cloudEvents_source": "/test", + "cloudEvents_id": "123", + "cloudEvents_specversion": "1.0", + "cloudEvents_time": "2023-01-15T10:30:45Z", # ISO 8601 string (also valid) + }, + application_data=b"{}", + ) + event = from_binary(message, JSONFormat(), CloudEvent) + + assert event.get_time() == dt + assert isinstance(event.get_time(), datetime) + + +def test_from_binary_with_boolean_property() -> None: + """Test from_binary preserves boolean type""" + message = AMQPMessage( + properties={}, + application_properties={ + "cloudEvents_type": "test", + "cloudEvents_source": "/test", + "cloudEvents_id": "123", + "cloudEvents_specversion": "1.0", + "cloudEvents_boolext": True, + }, + application_data=b"{}", + ) + event = from_binary(message, JSONFormat(), CloudEvent) + + assert event.get_extension("boolext") is True + assert isinstance(event.get_extension("boolext"), bool) + + +def test_from_binary_with_long_property() -> None: + """Test from_binary preserves integer/long type""" + message = AMQPMessage( + properties={}, + application_properties={ + "cloudEvents_type": "test", + "cloudEvents_source": "/test", + "cloudEvents_id": "123", + "cloudEvents_specversion": "1.0", + "cloudEvents_intext": 42, + }, + application_data=b"{}", + ) + event = from_binary(message, JSONFormat(), CloudEvent) + + assert event.get_extension("intext") == 42 + assert isinstance(event.get_extension("intext"), int) + + +def test_from_binary_with_json_data() -> None: + """Test from_binary with JSON data""" + message = AMQPMessage( + properties={"content-type": "application/json"}, + application_properties={ + "cloudEvents_type": "test", + "cloudEvents_source": "/test", + "cloudEvents_id": "123", + "cloudEvents_specversion": "1.0", + }, + application_data=b'{"message": "Hello"}', + ) + event = from_binary(message, JSONFormat(), CloudEvent) + + assert event.get_data() == {"message": "Hello"} + assert event.get_datacontenttype() == "application/json" + + +def test_from_binary_with_text_data() -> None: + """Test from_binary with text data""" + message = AMQPMessage( + properties={"content-type": "text/plain"}, + application_properties={ + "cloudEvents_type": "test", + "cloudEvents_source": "/test", + "cloudEvents_id": "123", + "cloudEvents_specversion": "1.0", + }, + application_data=b"Hello World", + ) + event = from_binary(message, JSONFormat(), CloudEvent) + + # JSONFormat will decode as UTF-8 string for non-JSON content types + assert event.get_data() == "Hello World" + + +def test_from_binary_with_bytes_data() -> None: + """Test from_binary with binary data""" + binary_data = b"\x00\x01\x02\x03" + message = AMQPMessage( + properties={"content-type": "application/octet-stream"}, + application_properties={ + "cloudEvents_type": "test", + "cloudEvents_source": "/test", + "cloudEvents_id": "123", + "cloudEvents_specversion": "1.0", + }, + application_data=binary_data, + ) + event = from_binary(message, JSONFormat(), CloudEvent) + + # Binary data should be preserved + assert isinstance(event.get_data(), (bytes, str)) + + +def test_binary_round_trip() -> None: + """Test binary mode round-trip preserves event data""" + original = create_event( + extra_attrs={"subject": "test-subject", "datacontenttype": "application/json"}, + data={"message": "Hello", "count": 42}, + ) + + message = to_binary(original, JSONFormat()) + recovered = from_binary(message, JSONFormat(), CloudEvent) + + assert recovered.get_type() == original.get_type() + assert recovered.get_source() == original.get_source() + assert recovered.get_id() == original.get_id() + assert recovered.get_specversion() == original.get_specversion() + assert recovered.get_subject() == original.get_subject() + assert recovered.get_data() == original.get_data() + + +def test_binary_preserves_types() -> None: + """Test binary mode preserves native types (bool, int, datetime)""" + dt = datetime(2023, 1, 15, 10, 30, 45, tzinfo=timezone.utc) + original = create_event( + extra_attrs={"time": dt, "boolext": True, "intext": 42, "strext": "value"} + ) + + message = to_binary(original, JSONFormat()) + recovered = from_binary(message, JSONFormat(), CloudEvent) + + # Types should be preserved + assert recovered.get_time() == dt + assert isinstance(recovered.get_time(), datetime) + assert recovered.get_extension("boolext") is True + assert isinstance(recovered.get_extension("boolext"), bool) + assert recovered.get_extension("intext") == 42 + assert isinstance(recovered.get_extension("intext"), int) + assert recovered.get_extension("strext") == "value" + + +def test_structured_round_trip() -> None: + """Test structured mode round-trip preserves event data""" + original = create_event( + extra_attrs={"subject": "test-subject", "datacontenttype": "application/json"}, + data={"message": "Hello", "count": 42}, + ) + + message = to_structured(original, JSONFormat()) + recovered = from_structured(message, JSONFormat(), CloudEvent) + + assert recovered.get_type() == original.get_type() + assert recovered.get_source() == original.get_source() + assert recovered.get_id() == original.get_id() + assert recovered.get_specversion() == original.get_specversion() + assert recovered.get_subject() == original.get_subject() + assert recovered.get_data() == original.get_data() + + +def test_to_structured_basic_event() -> None: + """Test to_structured with basic event""" + event = create_event(data={"message": "Hello"}) + message = to_structured(event, JSONFormat()) + + # Should have content-type in properties + assert message.properties["content-type"] == "application/cloudevents+json" + + # application_data should contain the complete event + assert b"com.example.test" in message.application_data + assert b"message" in message.application_data + + +def test_to_structured_content_type_header() -> None: + """Test to_structured sets correct content-type""" + event = create_event() + message = to_structured(event, JSONFormat()) + + assert "content-type" in message.properties + assert message.properties["content-type"] == "application/cloudevents+json" + + +def test_to_structured_with_all_attributes() -> None: + """Test to_structured includes all attributes in serialized form""" + dt = datetime(2023, 1, 15, 10, 30, 45, tzinfo=timezone.utc) + event = create_event( + extra_attrs={ + "time": dt, + "subject": "test-subject", + "dataschema": "https://example.com/schema", + "customext": "custom-value", + }, + data={"message": "Hello"}, + ) + message = to_structured(event, JSONFormat()) + + # All attributes should be in the serialized data + assert b"test-subject" in message.application_data + assert b"customext" in message.application_data + + +def test_from_structured_basic_event() -> None: + """Test from_structured parses complete event""" + message = AMQPMessage( + properties={"content-type": "application/cloudevents+json"}, + application_properties={}, + application_data=b'{"type": "com.example.test", "source": "/test", ' + b'"id": "123", "specversion": "1.0", "data": {"message": "Hello"}}', + ) + event = from_structured(message, JSONFormat(), CloudEvent) + + assert event.get_type() == "com.example.test" + assert event.get_source() == "/test" + assert event.get_id() == "123" + assert event.get_data() == {"message": "Hello"} + + +def test_from_amqp_detects_binary_mode() -> None: + """Test from_amqp detects binary mode""" + message = AMQPMessage( + properties={"content-type": "application/json"}, + application_properties={ + "cloudEvents_type": "test", + "cloudEvents_source": "/test", + "cloudEvents_id": "123", + "cloudEvents_specversion": "1.0", + }, + application_data=b'{"message": "Hello"}', + ) + event = from_amqp(message, JSONFormat(), CloudEvent) + + assert event.get_type() == "test" + assert event.get_data() == {"message": "Hello"} + + +def test_from_amqp_detects_structured_mode() -> None: + """Test from_amqp detects structured mode""" + message = AMQPMessage( + properties={"content-type": "application/cloudevents+json"}, + application_properties={}, + application_data=b'{"type": "com.example.test", "source": "/test", ' + b'"id": "123", "specversion": "1.0"}', + ) + event = from_amqp(message, JSONFormat(), CloudEvent) + + assert event.get_type() == "com.example.test" + assert event.get_source() == "/test" + + +def test_from_amqp_case_insensitive_detection() -> None: + """Test from_amqp detection is case-insensitive""" + # Uppercase CLOUDEVENTS + message = AMQPMessage( + properties={"content-type": "application/CLOUDEVENTS+json"}, + application_properties={}, + application_data=b'{"type": "com.example.test", "source": "/test", ' + b'"id": "123", "specversion": "1.0"}', + ) + event = from_amqp(message, JSONFormat(), CloudEvent) + + assert event.get_type() == "com.example.test" + + +def test_from_amqp_defaults_to_binary_when_no_content_type() -> None: + """Test from_amqp defaults to binary mode when content-type is missing""" + message = AMQPMessage( + properties={}, # No content-type + application_properties={ + "cloudEvents_type": "test", + "cloudEvents_source": "/test", + "cloudEvents_id": "123", + "cloudEvents_specversion": "1.0", + }, + application_data=b"{}", + ) + event = from_amqp(message, JSONFormat(), CloudEvent) + + # Should successfully parse as binary mode + assert event.get_type() == "test" + + +def test_unicode_in_attributes() -> None: + """Test handling of unicode characters in attributes""" + event = create_event(extra_attrs={"subject": "测试-subject-🌍"}) + message = to_binary(event, JSONFormat()) + recovered = from_binary(message, JSONFormat(), CloudEvent) + + assert recovered.get_subject() == "测试-subject-🌍" + + +def test_unicode_in_data() -> None: + """Test handling of unicode characters in data""" + event = create_event(data={"message": "Hello 世界 🌍"}) + message = to_binary(event, JSONFormat()) + recovered = from_binary(message, JSONFormat(), CloudEvent) + + # Data should be preserved, whether as dict or string representation + data = recovered.get_data() + if isinstance(data, dict): + assert data == {"message": "Hello 世界 🌍"} + else: + assert "Hello 世界 🌍" in str(data) + + +def test_datetime_utc_handling() -> None: + """Test datetime with UTC timezone""" + dt_utc = datetime(2023, 1, 15, 10, 30, 45, tzinfo=timezone.utc) + event = create_event(extra_attrs={"time": dt_utc}) + message = to_binary(event, JSONFormat()) + recovered = from_binary(message, JSONFormat(), CloudEvent) + + assert recovered.get_time() == dt_utc + + +def test_datetime_non_utc_handling() -> None: + """Test datetime with non-UTC timezone""" + from datetime import timedelta + + # Create a custom timezone (UTC+5) + custom_tz = timezone(timedelta(hours=5)) + dt_custom = datetime(2023, 1, 15, 10, 30, 45, tzinfo=custom_tz) + + event = create_event(extra_attrs={"time": dt_custom}) + message = to_binary(event, JSONFormat()) + recovered = from_binary(message, JSONFormat(), CloudEvent) + + # Datetime should be preserved + assert recovered.get_time() == dt_custom + + +def test_empty_application_properties() -> None: + """Test message with no application properties (structured mode)""" + message = AMQPMessage( + properties={"content-type": "application/cloudevents+json"}, + application_properties={}, + application_data=b'{"type": "test", "source": "/test", "id": "123", ' + b'"specversion": "1.0"}', + ) + event = from_structured(message, JSONFormat(), CloudEvent) + + assert event.get_type() == "test" + + +def test_to_binary_with_multiple_extensions() -> None: + """Test to_binary with multiple custom extensions""" + event = create_event( + extra_attrs={ + "ext1": "value1", + "ext2": "value2", + "ext3": 123, + "ext4": True, + } + ) + message = to_binary(event, JSONFormat()) + + assert message.application_properties["cloudEvents_ext1"] == "value1" + assert message.application_properties["cloudEvents_ext2"] == "value2" + assert message.application_properties["cloudEvents_ext3"] == 123 + assert message.application_properties["cloudEvents_ext4"] is True + + +def test_from_binary_ignores_non_cloudevents_properties() -> None: + """Test from_binary only extracts cloudEvents_ prefixed properties""" + message = AMQPMessage( + properties={}, + application_properties={ + "cloudEvents_type": "test", + "cloudEvents_source": "/test", + "cloudEvents_id": "123", + "cloudEvents_specversion": "1.0", + "custom_property": "should-be-ignored", # No cloudEvents_ prefix + "another_prop": "also-ignored", + }, + application_data=b"{}", + ) + event = from_binary(message, JSONFormat(), CloudEvent) + + # Only cloudEvents_ prefixed properties should be extracted + assert event.get_type() == "test" + # Non-prefixed properties should not become extensions + # get_extension returns None for missing extensions + assert event.get_extension("custom_property") is None + assert event.get_extension("another_prop") is None + + +def test_from_binary_with_colon_prefix() -> None: + """Test from_binary accepts cloudEvents: prefix per AMQP spec""" + message = AMQPMessage( + properties={"content-type": "application/json"}, + application_properties={ + "cloudEvents:type": "com.example.test", + "cloudEvents:source": "/test", + "cloudEvents:id": "test-123", + "cloudEvents:specversion": "1.0", + }, + application_data=b'{"message": "Hello"}', + ) + event = from_binary(message, JSONFormat(), CloudEvent) + + assert event.get_type() == "com.example.test" + assert event.get_source() == "/test" + assert event.get_id() == "test-123" + assert event.get_specversion() == "1.0" + assert event.get_data() == {"message": "Hello"} + + +def test_from_binary_colon_prefix_with_extensions() -> None: + """Test from_binary with cloudEvents: prefix handles extensions""" + message = AMQPMessage( + properties={}, + application_properties={ + "cloudEvents:type": "test", + "cloudEvents:source": "/test", + "cloudEvents:id": "123", + "cloudEvents:specversion": "1.0", + "cloudEvents:customext": "custom-value", + "cloudEvents:boolext": True, + "cloudEvents:intext": 42, + }, + application_data=b"{}", + ) + event = from_binary(message, JSONFormat(), CloudEvent) + + assert event.get_extension("customext") == "custom-value" + assert event.get_extension("boolext") is True + assert event.get_extension("intext") == 42 + + +def test_from_binary_colon_prefix_with_datetime() -> None: + """Test from_binary with cloudEvents: prefix handles datetime""" + dt = datetime(2023, 1, 15, 10, 30, 45, tzinfo=timezone.utc) + timestamp_ms = int(dt.timestamp() * 1000) + + message = AMQPMessage( + properties={}, + application_properties={ + "cloudEvents:type": "test", + "cloudEvents:source": "/test", + "cloudEvents:id": "123", + "cloudEvents:specversion": "1.0", + "cloudEvents:time": timestamp_ms, # AMQP timestamp + }, + application_data=b"{}", + ) + event = from_binary(message, JSONFormat(), CloudEvent) + + assert event.get_time() == dt + + +def test_from_binary_colon_prefix_round_trip() -> None: + """Test round-trip with cloudEvents: prefix (manual construction)""" + # Create event with underscore prefix + original_event = create_event( + extra_attrs={"customext": "value", "datacontenttype": "application/json"}, + data={"message": "test"}, + ) + message_underscore = to_binary(original_event, JSONFormat()) + + # Manually construct message with colon prefix (simulate receiving from another system) + message_colon = AMQPMessage( + properties=message_underscore.properties, + application_properties={ + # Convert underscore to colon prefix + key.replace("cloudEvents_", "cloudEvents:"): value + for key, value in message_underscore.application_properties.items() + }, + application_data=message_underscore.application_data, + ) + + # Should parse correctly + recovered = from_binary(message_colon, JSONFormat(), CloudEvent) + + assert recovered.get_type() == original_event.get_type() + assert recovered.get_source() == original_event.get_source() + assert recovered.get_extension("customext") == "value" + assert recovered.get_data() == {"message": "test"} + + +def test_from_binary_mixed_prefixes_accepted() -> None: + """Test from_binary accepts mixed cloudEvents_ and cloudEvents: prefixes""" + message = AMQPMessage( + properties={}, + application_properties={ + "cloudEvents_type": "test", # Underscore + "cloudEvents:source": "/test", # Colon - mixed is OK + "cloudEvents_id": "123", + "cloudEvents_specversion": "1.0", + }, + application_data=b"{}", + ) + + event = from_binary(message, JSONFormat(), CloudEvent) + + # Should extract all attributes regardless of prefix + assert event.get_type() == "test" + assert event.get_source() == "/test" + assert event.get_id() == "123" + assert event.get_specversion() == "1.0" + + +def test_from_amqp_with_colon_prefix_binary_mode() -> None: + """Test from_amqp detects binary mode with cloudEvents: prefix""" + message = AMQPMessage( + properties={"content-type": "application/json"}, + application_properties={ + "cloudEvents:type": "test", + "cloudEvents:source": "/test", + "cloudEvents:id": "123", + "cloudEvents:specversion": "1.0", + }, + application_data=b'{"data": "value"}', + ) + + event = from_amqp(message, JSONFormat(), CloudEvent) + + assert event.get_type() == "test" + assert event.get_source() == "/test" + assert event.get_data() == {"data": "value"} + + +def test_from_amqp_mixed_prefixes_accepted() -> None: + """Test from_amqp accepts mixed prefixes""" + message = AMQPMessage( + properties={"content-type": "application/json"}, + application_properties={ + "cloudEvents_type": "test", + "cloudEvents:source": "/test", # Mixed is OK + "cloudEvents_id": "123", + "cloudEvents_specversion": "1.0", + }, + application_data=b"{}", + ) + + event = from_amqp(message, JSONFormat(), CloudEvent) + + assert event.get_type() == "test" + assert event.get_source() == "/test" + + +def test_from_binary_all_underscore_prefix_valid() -> None: + """Test from_binary accepts all cloudEvents_ prefix (baseline)""" + message = AMQPMessage( + properties={}, + application_properties={ + "cloudEvents_type": "test", + "cloudEvents_source": "/test", + "cloudEvents_id": "123", + "cloudEvents_specversion": "1.0", + }, + application_data=b"{}", + ) + + event = from_binary(message, JSONFormat(), CloudEvent) + assert event.get_type() == "test" + + +def test_from_binary_all_colon_prefix_valid() -> None: + """Test from_binary accepts all cloudEvents: prefix""" + message = AMQPMessage( + properties={}, + application_properties={ + "cloudEvents:type": "test", + "cloudEvents:source": "/test", + "cloudEvents:id": "123", + "cloudEvents:specversion": "1.0", + }, + application_data=b"{}", + ) + + event = from_binary(message, JSONFormat(), CloudEvent) + assert event.get_type() == "test" + + +def test_from_binary_colon_prefix_ignores_non_ce_properties() -> None: + """Test from_binary with colon prefix ignores non-CloudEvents properties""" + message = AMQPMessage( + properties={}, + application_properties={ + "cloudEvents:type": "test", + "cloudEvents:source": "/test", + "cloudEvents:id": "123", + "cloudEvents:specversion": "1.0", + "customProperty": "ignored", # No prefix + "anotherProp": 123, + }, + application_data=b"{}", + ) + + event = from_binary(message, JSONFormat(), CloudEvent) + + assert event.get_type() == "test" + assert event.get_extension("customProperty") is None + assert event.get_extension("anotherProp") is None diff --git a/tests/test_core/test_bindings/test_http.py b/tests/test_core/test_bindings/test_http.py new file mode 100644 index 00000000..cb5b5600 --- /dev/null +++ b/tests/test_core/test_bindings/test_http.py @@ -0,0 +1,1125 @@ +# Copyright 2018-Present The CloudEvents Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from datetime import datetime, timezone +from typing import Any + +import pytest + +from cloudevents.core.bindings.http import ( + HTTPMessage, + from_binary, + from_binary_event, + from_http, + from_http_event, + from_structured, + from_structured_event, + to_binary, + to_binary_event, + to_structured, + to_structured_event, +) +from cloudevents.core.formats.json import JSONFormat +from cloudevents.core.v1.event import CloudEvent + + +@pytest.fixture +def minimal_attributes() -> dict[str, str]: + """Minimal valid CloudEvent attributes""" + return { + "type": "com.example.test", + "source": "/test", + "id": "test-id-123", + "specversion": "1.0", + } + + +def create_event( + extra_attrs: dict[str, Any] | None = None, + data: dict[str, Any] | str | bytes | None = None, +) -> CloudEvent: + """Helper to create CloudEvent with valid required attributes""" + attrs: dict[str, Any] = { + "type": "com.example.test", + "source": "/test", + "id": "test-id-123", + "specversion": "1.0", + } + if extra_attrs: + attrs.update(extra_attrs) + return CloudEvent(attributes=attrs, data=data) + + +def test_http_message_creation() -> None: + """Test basic HTTPMessage creation""" + message = HTTPMessage(headers={"content-type": "application/json"}, body=b"test") + assert message.headers == {"content-type": "application/json"} + assert message.body == b"test" + + +def test_http_message_immutable() -> None: + """Test that HTTPMessage is immutable (frozen dataclass)""" + message = HTTPMessage(headers={"test": "value"}, body=b"data") + + with pytest.raises(Exception): # FrozenInstanceError + message.headers = {"new": "dict"} + + with pytest.raises(Exception): # FrozenInstanceError + message.body = b"new data" + + +def test_http_message_with_empty_headers() -> None: + """Test HTTPMessage with empty headers""" + message = HTTPMessage(headers={}, body=b"test") + assert message.headers == {} + assert message.body == b"test" + + +def test_http_message_with_empty_body() -> None: + """Test HTTPMessage with empty body""" + message = HTTPMessage(headers={"test": "value"}, body=b"") + assert message.headers == {"test": "value"} + assert message.body == b"" + + +def test_http_message_equality() -> None: + """Test HTTPMessage equality comparison""" + msg1 = HTTPMessage(headers={"test": "value"}, body=b"data") + msg2 = HTTPMessage(headers={"test": "value"}, body=b"data") + msg3 = HTTPMessage(headers={"other": "value"}, body=b"data") + + assert msg1 == msg2 + assert msg1 != msg3 + + +def test_to_binary_returns_http_message() -> None: + """Test that to_binary returns an HTTPMessage instance""" + event = create_event() + message = to_binary(event, JSONFormat()) + assert isinstance(message, HTTPMessage) + + +def test_to_binary_required_attributes() -> None: + """Test to_binary with only required attributes""" + event = create_event() + message = to_binary(event, JSONFormat()) + + assert "ce-type" in message.headers + assert message.headers["ce-type"] == "com.example.test" + assert "ce-source" in message.headers + assert message.headers["ce-source"] == "%2Ftest" # Forward slash is percent-encoded + assert "ce-id" in message.headers + assert message.headers["ce-id"] == "test-id-123" + assert "ce-specversion" in message.headers + assert message.headers["ce-specversion"] == "1.0" + + +def test_to_binary_with_optional_attributes() -> None: + """Test to_binary with optional attributes""" + event = create_event( + {"subject": "test-subject", "dataschema": "https://example.com/schema"}, + data=None, + ) + message = to_binary(event, JSONFormat()) + + assert message.headers["ce-subject"] == "test-subject" + # All special characters including : and / are percent-encoded + assert message.headers["ce-dataschema"] == "https%3A%2F%2Fexample.com%2Fschema" + + +def test_to_binary_with_extensions() -> None: + """Test to_binary with extension attributes""" + event = create_event( + {"customext": "custom-value", "anotherext": "another-value"}, + data=None, + ) + message = to_binary(event, JSONFormat()) + + assert message.headers["ce-customext"] == "custom-value" + assert message.headers["ce-anotherext"] == "another-value" + + +def test_to_binary_with_json_data() -> None: + """Test to_binary with dict (JSON) data""" + event = create_event( + {"datacontenttype": "application/json"}, + data={"message": "Hello", "count": 42}, + ) + message = to_binary(event, JSONFormat()) + + assert message.body == b'{"message": "Hello", "count": 42}' + assert message.headers["content-type"] == "application/json" + + +def test_to_binary_with_string_data() -> None: + """Test to_binary with string data""" + event = create_event( + {"datacontenttype": "text/plain"}, + data="Hello World", + ) + message = to_binary(event, JSONFormat()) + + assert message.body == b"Hello World" + assert message.headers["content-type"] == "text/plain" + + +def test_to_binary_with_bytes_data() -> None: + """Test to_binary with bytes data""" + event = create_event( + {"datacontenttype": "application/octet-stream"}, + data=b"\x00\x01\x02\x03", + ) + message = to_binary(event, JSONFormat()) + + assert message.body == b"\x00\x01\x02\x03" + assert message.headers["content-type"] == "application/octet-stream" + + +def test_to_binary_with_none_data() -> None: + """Test to_binary with None data""" + event = create_event() + message = to_binary(event, JSONFormat()) + + assert message.body == b"" + + +def test_to_binary_datetime_encoding() -> None: + """Test to_binary with datetime (time attribute)""" + dt = datetime(2023, 1, 15, 10, 30, 45, tzinfo=timezone.utc) + event = create_event( + {"time": dt}, + data=None, + ) + message = to_binary(event, JSONFormat()) + + # Should encode with 'Z' suffix for UTC + assert "ce-time" in message.headers + assert "2023-01-15T10%3A30%3A45Z" in message.headers["ce-time"] + + +def test_to_binary_special_characters() -> None: + """Test to_binary with special characters in attributes""" + event = create_event( + {"subject": "Hello World!"}, + data=None, + ) + message = to_binary(event, JSONFormat()) + + # Should be percent-encoded + assert "ce-subject" in message.headers + # Space becomes %20, ! becomes %21 + assert "Hello%20World%21" == message.headers["ce-subject"] + + +def test_to_binary_datacontenttype_mapping() -> None: + """Test that datacontenttype maps to Content-Type header""" + event = create_event( + {"datacontenttype": "application/xml"}, + data=None, + ) + message = to_binary(event, JSONFormat()) + + assert "content-type" in message.headers + assert message.headers["content-type"] == "application/xml" + + +def test_to_binary_no_ce_prefix_on_content_type() -> None: + """Test that Content-Type header does not have ce- prefix""" + event = create_event( + {"datacontenttype": "application/json"}, + data={"test": "data"}, + ) + message = to_binary(event, JSONFormat()) + + assert "content-type" in message.headers + assert "ce-datacontenttype" not in message.headers + + +def test_to_binary_header_encoding() -> None: + """Test percent encoding in headers""" + event = create_event( + {"subject": "test with spaces and special: chars"}, + data=None, + ) + message = to_binary(event, JSONFormat()) + + # Should be percent-encoded + encoded_subject = message.headers["ce-subject"] + assert " " not in encoded_subject # Spaces should be encoded + assert "%20" in encoded_subject # Encoded space + assert "%3A" in encoded_subject # Encoded colon + + +def test_from_binary_accepts_http_message() -> None: + """Test that from_binary accepts HTTPMessage parameter""" + message = HTTPMessage( + headers={ + "ce-type": "com.example.test", + "ce-source": "/test", + "ce-id": "test-id", + "ce-specversion": "1.0", + }, + body=b"", + ) + event = from_binary(message, JSONFormat(), CloudEvent) + assert event.get_type() == "com.example.test" + + +def test_from_binary_required_attributes() -> None: + """Test from_binary parsing required attributes""" + message = HTTPMessage( + headers={ + "ce-type": "com.example.test", + "ce-source": "/test", + "ce-id": "test-123", + "ce-specversion": "1.0", + }, + body=b"", + ) + event = from_binary(message, JSONFormat(), CloudEvent) + + assert event.get_type() == "com.example.test" + assert event.get_source() == "/test" + assert event.get_id() == "test-123" + assert event.get_specversion() == "1.0" + + +def test_from_binary_with_optional_attributes() -> None: + """Test from_binary with optional attributes""" + message = HTTPMessage( + headers={ + "ce-type": "com.example.test", + "ce-source": "/test", + "ce-id": "test-123", + "ce-specversion": "1.0", + "ce-subject": "test-subject", + "ce-dataschema": "https://example.com/schema", + }, + body=b"", + ) + event = from_binary(message, JSONFormat(), CloudEvent) + + assert event.get_subject() == "test-subject" + assert event.get_dataschema() == "https://example.com/schema" + + +def test_from_binary_with_extensions() -> None: + """Test from_binary with extension attributes""" + message = HTTPMessage( + headers={ + "ce-type": "com.example.test", + "ce-source": "/test", + "ce-id": "test-123", + "ce-specversion": "1.0", + "ce-customext": "custom-value", + }, + body=b"", + ) + event = from_binary(message, JSONFormat(), CloudEvent) + + attributes = event.get_attributes() + assert attributes["customext"] == "custom-value" + + +def test_from_binary_with_json_data() -> None: + """Test from_binary with JSON body""" + message = HTTPMessage( + headers={ + "ce-type": "com.example.test", + "ce-source": "/test", + "ce-id": "test-123", + "ce-specversion": "1.0", + "content-type": "application/json", + }, + body=b'{"message": "Hello", "count": 42}', + ) + event = from_binary(message, JSONFormat(), CloudEvent) + + data = event.get_data() + assert isinstance(data, dict) + assert data["message"] == "Hello" + assert data["count"] == 42 + + +def test_from_binary_with_text_data() -> None: + """Test from_binary with text body""" + message = HTTPMessage( + headers={ + "ce-type": "com.example.test", + "ce-source": "/test", + "ce-id": "test-123", + "ce-specversion": "1.0", + "content-type": "text/plain", + }, + body=b"Hello World", + ) + event = from_binary(message, JSONFormat(), CloudEvent) + + data = event.get_data() + assert data == "Hello World" + + +def test_from_binary_with_bytes_data() -> None: + """Test from_binary with binary body""" + # Use bytes that are NOT valid UTF-8 to test binary handling + message = HTTPMessage( + headers={ + "ce-type": "com.example.test", + "ce-source": "/test", + "ce-id": "test-123", + "ce-specversion": "1.0", + "content-type": "application/octet-stream", + }, + body=b"\xff\xfe\xfd\xfc", # Invalid UTF-8 bytes + ) + event = from_binary(message, JSONFormat(), CloudEvent) + + data = event.get_data() + # For non-UTF8 data, should remain as bytes + assert isinstance(data, bytes) + assert data == b"\xff\xfe\xfd\xfc" + + +def test_from_binary_datetime_parsing() -> None: + """Test from_binary parsing time attribute""" + message = HTTPMessage( + headers={ + "ce-type": "com.example.test", + "ce-source": "/test", + "ce-id": "test-123", + "ce-specversion": "1.0", + "ce-time": "2023-01-15T10%3A30%3A45Z", + }, + body=b"", + ) + event = from_binary(message, JSONFormat(), CloudEvent) + + time = event.get_time() + assert isinstance(time, datetime) + assert time.year == 2023 + assert time.month == 1 + assert time.day == 15 + + +def test_from_binary_header_decoding() -> None: + """Test percent decoding of headers""" + message = HTTPMessage( + headers={ + "ce-type": "com.example.test", + "ce-source": "/test", + "ce-id": "test-123", + "ce-specversion": "1.0", + "ce-subject": "Hello%20World%21", + }, + body=b"", + ) + event = from_binary(message, JSONFormat(), CloudEvent) + + # Should be percent-decoded + assert event.get_subject() == "Hello World!" + + +def test_from_binary_case_insensitive_headers() -> None: + """Test that header parsing is case-insensitive""" + message = HTTPMessage( + headers={ + "CE-Type": "com.example.test", + "Ce-Source": "/test", + "ce-ID": "test-123", + "CE-SPECVERSION": "1.0", + "Content-Type": "application/json", + }, + body=b'{"test": "data"}', + ) + event = from_binary(message, JSONFormat(), CloudEvent) + + assert event.get_type() == "com.example.test" + assert event.get_source() == "/test" + + +def test_from_binary_content_type_as_datacontenttype() -> None: + """Test that Content-Type header becomes datacontenttype attribute""" + message = HTTPMessage( + headers={ + "ce-type": "com.example.test", + "ce-source": "/test", + "ce-id": "test-123", + "ce-specversion": "1.0", + "content-type": "application/xml", + }, + body=b"data", + ) + event = from_binary(message, JSONFormat(), CloudEvent) + + assert event.get_datacontenttype() == "application/xml" + + +def test_from_binary_round_trip() -> None: + """Test that to_binary followed by from_binary preserves the event""" + original = create_event( + {"subject": "round-trip", "datacontenttype": "application/json"}, + data={"message": "Hello", "value": 123}, + ) + + # Convert to binary + message = to_binary(original, JSONFormat()) + + # Parse back + parsed = from_binary(message, JSONFormat(), CloudEvent) + + # Verify attributes + assert parsed.get_type() == original.get_type() + assert parsed.get_source() == original.get_source() + assert parsed.get_subject() == original.get_subject() + assert parsed.get_datacontenttype() == original.get_datacontenttype() + + # Verify data + assert parsed.get_data() == original.get_data() + + +def test_to_structured_returns_http_message() -> None: + """Test that to_structured returns an HTTPMessage instance""" + event = create_event() + message = to_structured(event, JSONFormat()) + assert isinstance(message, HTTPMessage) + + +def test_to_structured_basic_event() -> None: + """Test to_structured with basic event""" + event = create_event() + message = to_structured(event, JSONFormat()) + + # Should have JSON CloudEvents content type + assert message.headers["content-type"] == "application/cloudevents+json" + + # Body should contain serialized event + assert b'"type"' in message.body + assert b'"source"' in message.body + assert b"com.example.test" in message.body + + +def test_to_structured_content_type_header() -> None: + """Test that to_structured sets correct Content-Type header""" + event = create_event() + message = to_structured(event, JSONFormat()) + + assert "content-type" in message.headers + assert message.headers["content-type"] == "application/cloudevents+json" + + +def test_to_structured_with_all_attributes() -> None: + """Test to_structured with all attributes""" + event = create_event( + { + "subject": "test-subject", + "datacontenttype": "application/json", + "dataschema": "https://example.com/schema", + "customext": "custom-value", + }, + data={"message": "Hello"}, + ) + message = to_structured(event, JSONFormat()) + + # All attributes should be in the body + assert b'"type"' in message.body + assert b'"source"' in message.body + assert b'"subject"' in message.body + assert b'"datacontenttype"' in message.body + assert b'"dataschema"' in message.body + assert b'"customext"' in message.body + assert b'"data"' in message.body + + +def test_to_structured_with_binary_data() -> None: + """Test to_structured with binary data""" + event = create_event( + data=b"\x00\x01\x02\x03", + ) + message = to_structured(event, JSONFormat()) + + # Binary data should be base64 encoded in JSON + assert b'"data_base64"' in message.body + assert b'"data"' not in message.body # Should not have 'data' field + + +def test_from_structured_accepts_http_message() -> None: + """Test that from_structured accepts HTTPMessage parameter""" + message = HTTPMessage( + headers={"content-type": "application/cloudevents+json"}, + body=b'{"type": "com.example.test", "source": "/test", "id": "123", "specversion": "1.0"}', + ) + event = from_structured(message, JSONFormat(), CloudEvent) + assert event.get_type() == "com.example.test" + + +def test_from_structured_basic_event() -> None: + """Test from_structured with basic event""" + message = HTTPMessage( + headers={"content-type": "application/cloudevents+json"}, + body=b'{"type": "com.example.test", "source": "/test", "id": "123", "specversion": "1.0"}', + ) + event = from_structured(message, JSONFormat(), CloudEvent) + + assert event.get_type() == "com.example.test" + assert event.get_source() == "/test" + assert event.get_id() == "123" + assert event.get_specversion() == "1.0" + + +def test_from_structured_round_trip() -> None: + """Test that to_structured followed by from_structured preserves the event""" + original = create_event( + { + "subject": "round-trip", + "datacontenttype": "application/json", + "customext": "custom-value", + }, + data={"message": "Hello", "value": 123}, + ) + + # Convert to structured + message = to_structured(original, JSONFormat()) + + # Parse back + parsed = from_structured(message, JSONFormat(), CloudEvent) + + # Verify attributes + assert parsed.get_type() == original.get_type() + assert parsed.get_source() == original.get_source() + assert parsed.get_subject() == original.get_subject() + assert parsed.get_datacontenttype() == original.get_datacontenttype() + + # Verify data + assert parsed.get_data() == original.get_data() + + +def test_from_http_accepts_http_message() -> None: + """Test that from_http accepts HTTPMessage parameter""" + message = HTTPMessage( + headers={ + "ce-type": "com.example.test", + "ce-source": "/test", + "ce-id": "123", + "ce-specversion": "1.0", + }, + body=b"", + ) + event = from_http(message, JSONFormat(), CloudEvent) + assert event.get_type() == "com.example.test" + + +def test_from_http_detects_binary_mode() -> None: + """Test that from_http detects binary mode from ce- headers""" + message = HTTPMessage( + headers={ + "ce-type": "com.example.test", + "ce-source": "/test", + "ce-id": "123", + "ce-specversion": "1.0", + }, + body=b"test data", + ) + event = from_http(message, JSONFormat(), CloudEvent) + + assert event.get_type() == "com.example.test" + assert event.get_source() == "/test" + + +def test_from_http_detects_structured_mode() -> None: + """Test that from_http detects structured mode when no ce- headers""" + message = HTTPMessage( + headers={"content-type": "application/cloudevents+json"}, + body=b'{"type": "com.example.test", "source": "/test", "id": "123", "specversion": "1.0"}', + ) + event = from_http(message, JSONFormat(), CloudEvent) + + assert event.get_type() == "com.example.test" + assert event.get_source() == "/test" + + +def test_from_http_binary_mode_with_content_type() -> None: + """Test from_http with binary mode and Content-Type""" + message = HTTPMessage( + headers={ + "ce-type": "com.example.test", + "ce-source": "/test", + "ce-id": "123", + "ce-specversion": "1.0", + "content-type": "application/json", + }, + body=b'{"message": "Hello"}', + ) + event = from_http(message, JSONFormat(), CloudEvent) + + # Should detect binary mode due to ce- headers + data = event.get_data() + assert isinstance(data, dict) + assert data["message"] == "Hello" + + +def test_from_http_structured_mode_json() -> None: + """Test from_http with structured JSON event""" + message = HTTPMessage( + headers={"content-type": "application/cloudevents+json"}, + body=b'{"type": "com.example.test", "source": "/test", "id": "123", "specversion": "1.0", "data": {"msg": "Hi"}}', + ) + event = from_http(message, JSONFormat(), CloudEvent) + + assert event.get_type() == "com.example.test" + data = event.get_data() + assert isinstance(data, dict) + assert data["msg"] == "Hi" + + +def test_from_http_defaults_to_structured() -> None: + """Test that from_http defaults to structured mode when ambiguous""" + message = HTTPMessage( + headers={"content-type": "application/json"}, + body=b'{"type": "com.example.test", "source": "/test", "id": "123", "specversion": "1.0"}', + ) + event = from_http(message, JSONFormat(), CloudEvent) + + # Should parse as structured mode + assert event.get_type() == "com.example.test" + + +def test_from_http_case_insensitive_detection() -> None: + """Test that from_http detection is case-insensitive""" + message = HTTPMessage( + headers={ + "CE-Type": "com.example.test", + "CE-Source": "/test", + "CE-ID": "123", + "CE-SPECVERSION": "1.0", + }, + body=b"", + ) + event = from_http(message, JSONFormat(), CloudEvent) + + # Should detect binary mode despite mixed case + assert event.get_type() == "com.example.test" + + +def test_from_http_mixed_headers() -> None: + """Test from_http when both ce- headers and structured content are present""" + message = HTTPMessage( + headers={ + "ce-type": "com.example.binary", + "ce-source": "/binary", + "ce-id": "123", + "ce-specversion": "1.0", + "content-type": "application/cloudevents+json", + }, + body=b'{"type": "com.example.structured", "source": "/structured", "id": "456", "specversion": "1.0"}', + ) + event = from_http(message, JSONFormat(), CloudEvent) + + # Binary mode should take precedence (ce- headers present) + assert event.get_type() == "com.example.binary" + assert event.get_source() == "/binary" + + +def test_percent_encoding_special_chars() -> None: + """Test percent encoding of special characters""" + event = create_event( + {"subject": 'Hello World! "quotes" & special'}, + data=None, + ) + message = to_binary(event, JSONFormat()) + + # All special chars should be encoded + encoded = message.headers["ce-subject"] + assert " " not in encoded + assert '"' not in encoded + assert "&" not in encoded + + +def test_percent_encoding_unicode() -> None: + """Test percent encoding of unicode characters""" + event = create_event( + {"subject": "Hello 世界 🌍"}, + data=None, + ) + message = to_binary(event, JSONFormat()) + + # Unicode should be percent-encoded + encoded = message.headers["ce-subject"] + assert "世界" not in encoded + assert "🌍" not in encoded + assert "%" in encoded # Should have percent-encoded bytes + + +def test_percent_decoding_round_trip() -> None: + """Test that percent encoding/decoding is reversible""" + original_subject = 'Test: "quotes", spaces & unicode 世界' + event = create_event( + {"subject": original_subject}, + data=None, + ) + + # Encode + message = to_binary(event, JSONFormat()) + + # Decode + parsed = from_binary(message, JSONFormat(), CloudEvent) + + # Should match original + assert parsed.get_subject() == original_subject + + +def test_datetime_encoding_utc() -> None: + """Test datetime encoding for UTC timezone""" + dt_utc = datetime(2023, 6, 15, 14, 30, 45, tzinfo=timezone.utc) + event = create_event( + {"time": dt_utc}, + data=None, + ) + message = to_binary(event, JSONFormat()) + + # Should use 'Z' suffix for UTC + time_header = message.headers["ce-time"] + assert "Z" in time_header or "%5A" in time_header # Z or encoded Z + + +def test_datetime_encoding_non_utc() -> None: + """Test datetime encoding for non-UTC timezone""" + from datetime import timedelta + + # Create timezone +05:30 (IST) + dt_ist = datetime( + 2023, 6, 15, 14, 30, 45, tzinfo=timezone(timedelta(hours=5, minutes=30)) + ) + event = create_event( + {"time": dt_ist}, + data=None, + ) + message = to_binary(event, JSONFormat()) + + # Should preserve timezone offset + time_header = message.headers["ce-time"] + # Will be percent-encoded but should contain timezone info + assert "ce-time" in message.headers + + +def test_datetime_parsing_rfc3339() -> None: + """Test parsing various RFC 3339 datetime formats""" + test_cases = [ + "2023-01-15T10:30:45Z", + "2023-01-15T10%3A30%3A45Z", + "2023-01-15T10:30:45.123Z", + "2023-01-15T10:30:45%2B00:00", + ] + + for time_str in test_cases: + message = HTTPMessage( + headers={ + "ce-type": "com.example.test", + "ce-source": "/test", + "ce-id": "123", + "ce-specversion": "1.0", + "ce-time": time_str, + }, + body=b"", + ) + event = from_binary(message, JSONFormat(), CloudEvent) + + # Should successfully parse to datetime + time = event.get_time() + assert isinstance(time, datetime) + + +def test_http_binary_with_json_format() -> None: + """Test complete binary mode flow with JSON format""" + # Create event + event = create_event( + { + "type": "com.example.order.created", + "source": "/orders/service", + "subject": "order-123", + "datacontenttype": "application/json", + }, + data={"orderId": "123", "amount": 99.99, "status": "pending"}, + ) + + # Convert to HTTP binary mode + message = to_binary(event, JSONFormat()) + + # Verify headers + assert message.headers["ce-type"] == "com.example.order.created" + assert message.headers["content-type"] == "application/json" + + # Verify body + assert b'"orderId"' in message.body + assert b'"123"' in message.body + + # Parse back + parsed = from_binary(message, JSONFormat(), CloudEvent) + + # Verify round-trip + assert parsed.get_type() == event.get_type() + assert parsed.get_source() == event.get_source() + parsed_data = parsed.get_data() + assert isinstance(parsed_data, dict) + assert parsed_data["orderId"] == "123" + + +def test_http_structured_with_json_format() -> None: + """Test complete structured mode flow with JSON format""" + # Create event + event = create_event( + { + "type": "com.example.user.registered", + "source": "/users/service", + "datacontenttype": "application/json", + }, + data={"userId": "user-456", "email": "test@example.com"}, + ) + + # Convert to HTTP structured mode + message = to_structured(event, JSONFormat()) + + # Verify content type + assert message.headers["content-type"] == "application/cloudevents+json" + + # Verify body contains everything + assert b'"type"' in message.body + assert b'"source"' in message.body + assert b'"data"' in message.body + assert b'"userId"' in message.body + + # Parse back + parsed = from_structured(message, JSONFormat(), CloudEvent) + + # Verify round-trip + assert parsed.get_type() == event.get_type() + assert parsed.get_source() == event.get_source() + parsed_data = parsed.get_data() + assert isinstance(parsed_data, dict) + assert parsed_data["userId"] == "user-456" + + +def test_custom_event_factory() -> None: + """Test using custom event factory function""" + + def custom_factory( + attributes: dict[str, Any], data: dict[str, Any] | str | bytes | None + ) -> CloudEvent: + # Custom factory that adds a prefix to the type + attributes["type"] = f"custom.{attributes.get('type', 'unknown')}" + return CloudEvent(attributes, data) + + message = HTTPMessage( + headers={ + "ce-type": "test.event", + "ce-source": "/test", + "ce-id": "123", + "ce-specversion": "1.0", + }, + body=b"", + ) + + event = from_binary(message, JSONFormat(), custom_factory) + + # Should use custom factory + assert event.get_type() == "custom.test.event" + + +def test_real_world_scenario() -> None: + """Test a realistic end-to-end scenario""" + # Simulate a webhook notification + original_event = create_event( + { + "type": "com.github.push", + "source": "https://github.com/myorg/myrepo", + "subject": "refs/heads/main", + "datacontenttype": "application/json", + }, + data={ + "ref": "refs/heads/main", + "commits": [ + {"id": "abc123", "message": "Fix bug"}, + {"id": "def456", "message": "Add feature"}, + ], + }, + ) + + # Send as HTTP binary mode + http_message = to_binary(original_event, JSONFormat()) + + # Simulate network transmission (receiver side) + # Receiver auto-detects mode and parses + received_event = from_http(http_message, JSONFormat(), CloudEvent) + + # Verify data integrity + assert received_event.get_type() == "com.github.push" + assert received_event.get_source() == "https://github.com/myorg/myrepo" + assert received_event.get_subject() == "refs/heads/main" + + data = received_event.get_data() + assert isinstance(data, dict) + assert data["ref"] == "refs/heads/main" + assert len(data["commits"]) == 2 + assert data["commits"][0]["message"] == "Fix bug" + + +def test_to_binary_with_defaults() -> None: + """Test to_binary_event convenience wrapper using default JSONFormat""" + event = create_event( + extra_attrs={"datacontenttype": "application/json"}, + data={"message": "Hello"}, + ) + + message = to_binary_event(event) + + assert "ce-type" in message.headers + assert message.headers["ce-type"] == "com.example.test" + assert b'"message"' in message.body + assert b'"Hello"' in message.body + + +def test_to_structured_with_defaults() -> None: + """Test to_structured_event convenience wrapper using default JSONFormat""" + event = create_event(data={"message": "Hello"}) + + message = to_structured_event(event) + + assert "content-type" in message.headers + assert message.headers["content-type"] == "application/cloudevents+json" + assert b'"type"' in message.body + assert b'"com.example.test"' in message.body + assert b'"data"' in message.body + + +def test_from_binary_with_defaults() -> None: + """Test from_binary_event convenience wrapper using default JSONFormat and CloudEvent factory""" + message = HTTPMessage( + headers={ + "ce-type": "com.example.test", + "ce-source": "/test", + "ce-id": "123", + "ce-specversion": "1.0", + "content-type": "application/json", + }, + body=b'{"message": "Hello"}', + ) + + event = from_binary_event(message) + + assert isinstance(event, CloudEvent) + assert event.get_type() == "com.example.test" + assert event.get_source() == "/test" + assert event.get_id() == "123" + assert event.get_data() == {"message": "Hello"} + + +def test_from_structured_with_defaults() -> None: + """Test from_structured_event convenience wrapper using default JSONFormat and CloudEvent factory""" + message = HTTPMessage( + headers={"content-type": "application/cloudevents+json"}, + body=b'{"type": "com.example.test", "source": "/test", "id": "123", "specversion": "1.0", "data": {"message": "Hello"}}', + ) + + event = from_structured_event(message) + + assert isinstance(event, CloudEvent) + assert event.get_type() == "com.example.test" + assert event.get_source() == "/test" + assert event.get_id() == "123" + assert event.get_data() == {"message": "Hello"} + + +def test_from_http_with_defaults_binary() -> None: + """Test from_http_event convenience wrapper with auto-detection (binary mode)""" + message = HTTPMessage( + headers={ + "ce-type": "com.example.test", + "ce-source": "/test", + "ce-id": "123", + "ce-specversion": "1.0", + }, + body=b'{"message": "Hello"}', + ) + + event = from_http_event(message) + + assert isinstance(event, CloudEvent) + assert event.get_type() == "com.example.test" + assert event.get_source() == "/test" + + +def test_from_http_with_defaults_structured() -> None: + """Test from_http_event convenience wrapper with auto-detection (structured mode)""" + message = HTTPMessage( + headers={"content-type": "application/cloudevents+json"}, + body=b'{"type": "com.example.test", "source": "/test", "id": "123", "specversion": "1.0"}', + ) + + # Call wrapper function (should use defaults and detect structured mode) + event = from_http_event(message) + + assert isinstance(event, CloudEvent) + assert event.get_type() == "com.example.test" + assert event.get_source() == "/test" + + +def test_convenience_roundtrip_binary() -> None: + """Test complete roundtrip using convenience wrapper functions with binary mode""" + original_event = create_event( + extra_attrs={"datacontenttype": "application/json"}, + data={"message": "Roundtrip test"}, + ) + + # Convert to message using wrapper + message = to_binary_event(original_event) + + # Convert back using wrapper + recovered_event = from_binary_event(message) + + assert recovered_event.get_type() == original_event.get_type() + assert recovered_event.get_source() == original_event.get_source() + assert recovered_event.get_id() == original_event.get_id() + assert recovered_event.get_data() == original_event.get_data() + + +def test_convenience_roundtrip_structured() -> None: + """Test complete roundtrip using convenience wrapper functions with structured mode""" + original_event = create_event( + extra_attrs={"datacontenttype": "application/json"}, + data={"message": "Roundtrip test"}, + ) + + # Convert to message using wrapper + message = to_structured_event(original_event) + + # Convert back using wrapper + recovered_event = from_structured_event(message) + + assert recovered_event.get_type() == original_event.get_type() + assert recovered_event.get_source() == original_event.get_source() + assert recovered_event.get_id() == original_event.get_id() + assert recovered_event.get_data() == original_event.get_data() + + +def test_convenience_with_explicit_format_override() -> None: + """Test that wrapper functions can override format (still flexible)""" + event = create_event( + extra_attrs={"datacontenttype": "application/json"}, + data={"message": "Hello"}, + ) + + message = to_binary_event(event, JSONFormat()) + recovered = from_binary_event(message, JSONFormat()) + + assert recovered.get_type() == event.get_type() + assert recovered.get_data() == event.get_data() diff --git a/tests/test_core/test_bindings/test_kafka.py b/tests/test_core/test_bindings/test_kafka.py new file mode 100644 index 00000000..3d33a290 --- /dev/null +++ b/tests/test_core/test_bindings/test_kafka.py @@ -0,0 +1,817 @@ +# Copyright 2018-Present The CloudEvents Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from datetime import datetime, timezone +from typing import Any + +import pytest + +from cloudevents.core.base import BaseCloudEvent +from cloudevents.core.bindings.kafka import ( + KafkaMessage, + from_binary, + from_binary_event, + from_kafka, + from_kafka_event, + from_structured, + from_structured_event, + to_binary, + to_binary_event, + to_structured, + to_structured_event, +) +from cloudevents.core.formats.json import JSONFormat +from cloudevents.core.v1.event import CloudEvent + + +@pytest.fixture +def minimal_attributes() -> dict[str, str]: + """Minimal valid CloudEvent attributes""" + return { + "type": "com.example.test", + "source": "/test", + "id": "test-id-123", + "specversion": "1.0", + } + + +def create_event( + extra_attrs: dict[str, Any] | None = None, + data: dict[str, Any] | str | bytes | None = None, +) -> CloudEvent: + """Helper to create CloudEvent with valid required attributes""" + attrs: dict[str, Any] = { + "type": "com.example.test", + "source": "/test", + "id": "test-id-123", + "specversion": "1.0", + } + if extra_attrs: + attrs.update(extra_attrs) + return CloudEvent(attributes=attrs, data=data) + + +def test_kafka_message_creation() -> None: + """Test basic KafkaMessage creation""" + message = KafkaMessage( + headers={"content-type": b"application/json"}, + key=b"test-key", + value=b"test", + ) + assert message.headers == {"content-type": b"application/json"} + assert message.key == b"test-key" + assert message.value == b"test" + + +def test_kafka_message_immutable() -> None: + """Test that KafkaMessage is immutable (frozen dataclass)""" + message = KafkaMessage(headers={"test": b"value"}, key=None, value=b"data") + + with pytest.raises(Exception): # FrozenInstanceError + message.headers = {b"new": b"dict"} + + with pytest.raises(Exception): # FrozenInstanceError + message.value = b"new data" + + +def test_to_binary_required_attributes() -> None: + """Test to_binary with only required attributes""" + event = create_event() + message = to_binary(event, JSONFormat()) + + assert "ce_type" in message.headers + assert message.headers["ce_type"] == b"com.example.test" + assert "ce_source" in message.headers + assert ( + message.headers["ce_source"] == b"%2Ftest" + ) # Forward slash is percent-encoded + assert "ce_id" in message.headers + assert message.headers["ce_id"] == b"test-id-123" + assert "ce_specversion" in message.headers + assert message.headers["ce_specversion"] == b"1.0" + + +def test_to_binary_with_optional_attributes() -> None: + """Test to_binary with optional attributes""" + event = create_event( + {"subject": "test-subject", "dataschema": "https://example.com/schema"}, + data=None, + ) + message = to_binary(event, JSONFormat()) + + assert message.headers["ce_subject"] == b"test-subject" + # All special characters including : and / are percent-encoded + assert message.headers["ce_dataschema"] == b"https%3A%2F%2Fexample.com%2Fschema" + + +def test_to_binary_with_extensions() -> None: + """Test to_binary with extension attributes""" + event = create_event( + {"customext": "custom-value", "anotherext": "another-value"}, + data=None, + ) + message = to_binary(event, JSONFormat()) + + assert message.headers["ce_customext"] == b"custom-value" + assert message.headers["ce_anotherext"] == b"another-value" + + +def test_to_binary_with_json_data() -> None: + """Test to_binary with dict (JSON) data and datacontenttype""" + event = create_event( + {"datacontenttype": "application/json"}, data={"message": "Hello", "count": 42} + ) + message = to_binary(event, JSONFormat()) + + # With application/json datacontenttype, data should be serialized as JSON + assert b'"message"' in message.value + assert b'"Hello"' in message.value + assert message.value != b"" + + +def test_to_binary_with_string_data() -> None: + """Test to_binary with string data""" + event = create_event(data="Hello World") + message = to_binary(event, JSONFormat()) + + assert message.value == b"Hello World" + + +def test_to_binary_with_bytes_data() -> None: + """Test to_binary with bytes data""" + event = create_event(data=b"\x00\x01\x02\x03") + message = to_binary(event, JSONFormat()) + + assert message.value == b"\x00\x01\x02\x03" + + +def test_to_binary_with_none_data() -> None: + """Test to_binary with None data""" + event = create_event(data=None) + message = to_binary(event, JSONFormat()) + + assert message.value == b"" + + +def test_to_binary_datetime_encoding() -> None: + """Test to_binary with datetime attribute""" + test_time = datetime(2023, 1, 15, 10, 30, 45, tzinfo=timezone.utc) + event = create_event({"time": test_time}) + message = to_binary(event, JSONFormat()) + + assert "ce_time" in message.headers + # Should be ISO 8601 with Z suffix, percent-encoded + assert b"2023-01-15T10%3A30%3A45Z" in message.headers["ce_time"] + + +def test_to_binary_special_characters() -> None: + """Test to_binary with special characters in attributes""" + event = create_event({"subject": 'Hello World! "quotes" & special'}) + message = to_binary(event, JSONFormat()) + + assert "ce_subject" in message.headers + assert b"%" in message.headers["ce_subject"] # Percent encoding present + + +def test_to_binary_datacontenttype_mapping() -> None: + """Test that datacontenttype maps to content-type header""" + event = create_event({"datacontenttype": "application/json"}, data={"test": "data"}) + message = to_binary(event, JSONFormat()) + + assert "content-type" in message.headers + assert message.headers["content-type"] == b"application/json" + assert "ce_datacontenttype" not in message.headers + + +def test_to_binary_partitionkey_in_key() -> None: + """Test that partitionkey extension attribute becomes message key""" + event = create_event({"partitionkey": "user-123"}) + message = to_binary(event, JSONFormat()) + + assert message.key == "user-123" + assert "ce_partitionkey" not in message.headers + + +def test_to_binary_custom_key_mapper() -> None: + """Test to_binary with custom key mapper""" + + def custom_mapper(event: BaseCloudEvent) -> str: + return f"custom-{event.get_type()}" + + event = create_event() + message = to_binary(event, JSONFormat(), key_mapper=custom_mapper) + + assert message.key == "custom-com.example.test" + + +def test_to_binary_no_partitionkey() -> None: + """Test to_binary without partitionkey returns None key""" + event = create_event() + message = to_binary(event, JSONFormat()) + + assert message.key is None + + +def test_from_binary_required_attributes() -> None: + """Test from_binary extracts required attributes""" + message = KafkaMessage( + headers={ + "ce_type": b"com.example.test", + "ce_source": b"%2Ftest", + "ce_id": b"test-123", + "ce_specversion": b"1.0", + }, + key=None, + value=b"", + ) + event = from_binary(message, JSONFormat(), CloudEvent) + + assert event.get_type() == "com.example.test" + assert event.get_source() == "/test" # Percent-decoded + assert event.get_id() == "test-123" + assert event.get_specversion() == "1.0" + + +def test_from_binary_with_optional_attributes() -> None: + """Test from_binary with optional attributes""" + message = KafkaMessage( + headers={ + "ce_type": b"com.example.test", + "ce_source": b"/test", + "ce_id": b"123", + "ce_specversion": b"1.0", + "ce_subject": b"test-subject", + "ce_dataschema": b"https%3A%2F%2Fexample.com%2Fschema", + }, + key=None, + value=b"", + ) + event = from_binary(message, JSONFormat(), CloudEvent) + + assert event.get_subject() == "test-subject" + assert event.get_dataschema() == "https://example.com/schema" # Percent-decoded + + +def test_from_binary_with_extensions() -> None: + """Test from_binary with extension attributes""" + message = KafkaMessage( + headers={ + "ce_type": b"com.example.test", + "ce_source": b"/test", + "ce_id": b"123", + "ce_specversion": b"1.0", + "ce_customext": b"custom-value", + }, + key=None, + value=b"", + ) + event = from_binary(message, JSONFormat(), CloudEvent) + + assert event.get_extension("customext") == "custom-value" + + +def test_from_binary_with_json_data() -> None: + """Test from_binary with JSON data""" + message = KafkaMessage( + headers={ + "ce_type": b"com.example.test", + "ce_source": b"/test", + "ce_id": b"123", + "ce_specversion": b"1.0", + "content-type": b"application/json", + }, + key=None, + value=b'{"message": "Hello", "count": 42}', + ) + event = from_binary(message, JSONFormat(), CloudEvent) + + data = event.get_data() + assert isinstance(data, dict) + assert data["message"] == "Hello" + assert data["count"] == 42 + + +def test_from_binary_datetime_parsing() -> None: + """Test from_binary parses datetime correctly""" + message = KafkaMessage( + headers={ + "ce_type": b"com.example.test", + "ce_source": b"/test", + "ce_id": b"123", + "ce_specversion": b"1.0", + "ce_time": b"2023-01-15T10%3A30%3A45Z", + }, + key=None, + value=b"", + ) + event = from_binary(message, JSONFormat(), CloudEvent) + + time = event.get_time() + assert isinstance(time, datetime) + assert time.year == 2023 + assert time.month == 1 + assert time.day == 15 + + +def test_from_binary_case_insensitive_headers() -> None: + """Test from_binary handles case-insensitive headers""" + message = KafkaMessage( + headers={ + "CE_TYPE": b"com.example.test", + "CE_SOURCE": b"/test", + "ce_id": b"123", + "Ce_Specversion": b"1.0", + }, + key=None, + value=b"", + ) + event = from_binary(message, JSONFormat(), CloudEvent) + + assert event.get_type() == "com.example.test" + assert event.get_source() == "/test" + + +def test_from_binary_content_type_as_datacontenttype() -> None: + """Test that content-type header becomes datacontenttype attribute""" + message = KafkaMessage( + headers={ + "ce_type": b"com.example.test", + "ce_source": b"/test", + "ce_id": b"123", + "ce_specversion": b"1.0", + "content-type": b"application/json", + }, + key=None, + value=b'{"test": "data"}', + ) + event = from_binary(message, JSONFormat(), CloudEvent) + + assert event.get_datacontenttype() == "application/json" + + +def test_from_binary_key_to_partitionkey() -> None: + """Test that message key becomes partitionkey extension attribute""" + message = KafkaMessage( + headers={ + "ce_type": b"com.example.test", + "ce_source": b"/test", + "ce_id": b"123", + "ce_specversion": b"1.0", + }, + key=b"user-123", + value=b"", + ) + event = from_binary(message, JSONFormat(), CloudEvent) + + assert event.get_extension("partitionkey") == "user-123" + + +def test_from_binary_round_trip() -> None: + """Test round-trip conversion preserves all data""" + original = create_event( + { + "time": datetime(2023, 1, 15, 10, 30, 45, tzinfo=timezone.utc), + "subject": "test-subject", + "partitionkey": "user-456", + }, + data={"message": "Hello", "count": 42}, + ) + + message = to_binary(original, JSONFormat()) + recovered = from_binary(message, JSONFormat(), CloudEvent) + + assert recovered.get_type() == original.get_type() + assert recovered.get_source() == original.get_source() + assert recovered.get_id() == original.get_id() + assert recovered.get_subject() == original.get_subject() + assert recovered.get_extension("partitionkey") == "user-456" + + +def test_to_structured_basic_event() -> None: + """Test to_structured with basic event""" + event = create_event(data={"message": "Hello"}) + message = to_structured(event, JSONFormat()) + + assert "content-type" in message.headers + assert message.headers["content-type"] == b"application/cloudevents+json" + assert b"type" in message.value + assert b"source" in message.value + + +def test_to_structured_with_all_attributes() -> None: + """Test to_structured with all optional attributes""" + event = create_event( + { + "time": datetime(2023, 1, 15, 10, 30, 45, tzinfo=timezone.utc), + "subject": "test-subject", + "datacontenttype": "application/json", + "dataschema": "https://example.com/schema", + }, + data={"message": "Hello"}, + ) + message = to_structured(event, JSONFormat()) + + assert b"time" in message.value + assert b"subject" in message.value + assert b"datacontenttype" in message.value + + +def test_to_structured_partitionkey_in_key() -> None: + """Test that partitionkey becomes message key in structured mode""" + event = create_event({"partitionkey": "user-789"}) + message = to_structured(event, JSONFormat()) + + assert message.key == "user-789" + + +def test_to_structured_custom_key_mapper() -> None: + """Test to_structured with custom key mapper""" + + def custom_mapper(event: BaseCloudEvent) -> str: + return f"type-{event.get_type().split('.')[-1]}" + + event = create_event() + message = to_structured(event, JSONFormat(), key_mapper=custom_mapper) + + assert message.key == "type-test" + + +def test_to_structured_with_binary_data() -> None: + """Test to_structured with binary data (should be base64 encoded)""" + event = create_event(data=b"\x00\x01\x02\x03") + message = to_structured(event, JSONFormat()) + + # Binary data should be base64 encoded in structured mode + assert b"data_base64" in message.value + + +def test_from_structured_basic_event() -> None: + """Test from_structured with basic event""" + message = KafkaMessage( + headers={"content-type": b"application/cloudevents+json"}, + key=None, + value=b'{"type":"com.example.test","source":"/test","id":"123","specversion":"1.0","data":{"message":"Hello"}}', + ) + event = from_structured(message, JSONFormat(), CloudEvent) + + assert event.get_type() == "com.example.test" + assert event.get_source() == "/test" + assert event.get_data() == {"message": "Hello"} + + +def test_from_structured_key_to_partitionkey() -> None: + """Test that message key becomes partitionkey in structured mode""" + message = KafkaMessage( + headers={"content-type": b"application/cloudevents+json"}, + key=b"user-999", + value=b'{"type":"com.example.test","source":"/test","id":"123","specversion":"1.0"}', + ) + event = from_structured(message, JSONFormat(), CloudEvent) + + assert event.get_extension("partitionkey") == "user-999" + + +def test_from_structured_round_trip() -> None: + """Test structured mode round-trip""" + original = create_event( + { + "time": datetime(2023, 1, 15, 10, 30, 45, tzinfo=timezone.utc), + "subject": "test-subject", + "partitionkey": "key-123", + }, + data={"message": "Hello", "count": 42}, + ) + + message = to_structured(original, JSONFormat()) + recovered = from_structured(message, JSONFormat(), CloudEvent) + + assert recovered.get_type() == original.get_type() + assert recovered.get_source() == original.get_source() + assert recovered.get_extension("partitionkey") == "key-123" + + +def test_from_kafka_detects_binary_mode() -> None: + """Test from_kafka detects binary mode (ce_ headers present)""" + message = KafkaMessage( + headers={ + "ce_type": b"com.example.test", + "ce_source": b"/test", + "ce_id": b"123", + "ce_specversion": b"1.0", + }, + key=None, + value=b'{"message": "Hello"}', + ) + event = from_kafka(message, JSONFormat(), CloudEvent) + + assert event.get_type() == "com.example.test" + + +def test_from_kafka_detects_structured_mode() -> None: + """Test from_kafka detects structured mode (no ce_ headers)""" + message = KafkaMessage( + headers={"content-type": b"application/cloudevents+json"}, + key=None, + value=b'{"type":"com.example.test","source":"/test","id":"123","specversion":"1.0"}', + ) + event = from_kafka(message, JSONFormat(), CloudEvent) + + assert event.get_type() == "com.example.test" + + +def test_from_kafka_case_insensitive_detection() -> None: + """Test from_kafka detection is case-insensitive""" + message = KafkaMessage( + headers={ + "CE_TYPE": b"com.example.test", + "CE_SOURCE": b"/test", + "ce_id": b"123", + "ce_specversion": b"1.0", + }, + key=None, + value=b"", + ) + event = from_kafka(message, JSONFormat(), CloudEvent) + + assert event.get_type() == "com.example.test" + + +def test_from_kafka_binary_with_partitionkey() -> None: + """Test from_kafka binary mode with partition key""" + message = KafkaMessage( + headers={ + "ce_type": b"com.example.test", + "ce_source": b"/test", + "ce_id": b"123", + "ce_specversion": b"1.0", + }, + key=b"user-555", + value=b"", + ) + event = from_kafka(message, JSONFormat(), CloudEvent) + + assert event.get_extension("partitionkey") == "user-555" + + +def test_from_kafka_structured_with_partitionkey() -> None: + """Test from_kafka structured mode with partition key""" + message = KafkaMessage( + headers={"content-type": b"application/cloudevents+json"}, + key=b"user-666", + value=b'{"type":"com.example.test","source":"/test","id":"123","specversion":"1.0"}', + ) + event = from_kafka(message, JSONFormat(), CloudEvent) + + assert event.get_extension("partitionkey") == "user-666" + + +def test_empty_headers() -> None: + """Test handling of empty headers in structured mode""" + message = KafkaMessage( + headers={}, + key=None, + value=b'{"type":"com.example.test","source":"/test","id":"123","specversion":"1.0"}', + ) + # Should default to structured mode + event = from_kafka(message, JSONFormat(), CloudEvent) + assert event.get_type() == "com.example.test" + + +def test_unicode_in_attributes() -> None: + """Test handling of unicode characters in attributes""" + event = create_event({"subject": "Hello 世界 🌍"}) + message = to_binary(event, JSONFormat()) + recovered = from_binary(message, JSONFormat(), CloudEvent) + + assert recovered.get_subject() == "Hello 世界 🌍" + + +def test_unicode_in_data() -> None: + """Test handling of unicode characters in data""" + event = create_event( + {"datacontenttype": "application/json"}, data={"message": "Hello 世界 🌍"} + ) + message = to_binary(event, JSONFormat()) + recovered = from_binary(message, JSONFormat(), CloudEvent) + + assert isinstance(recovered.get_data(), dict) + assert recovered.get_data()["message"] == "Hello 世界 🌍" + + +def test_string_key_vs_bytes_key() -> None: + """Test that both string and bytes keys work""" + # String key + event1 = create_event({"partitionkey": "string-key"}) + msg1 = to_binary(event1, JSONFormat()) + assert msg1.key == "string-key" + + # Bytes key through custom mapper + def bytes_mapper(event: BaseCloudEvent) -> bytes: + return b"bytes-key" + + event2 = create_event() + msg2 = to_binary(event2, JSONFormat(), key_mapper=bytes_mapper) + assert msg2.key == b"bytes-key" + + +def test_to_binary_with_defaults() -> None: + """Test to_binary_event convenience wrapper using default JSONFormat""" + event = create_event( + extra_attrs={"datacontenttype": "application/json"}, + data={"message": "Hello"}, + ) + + message = to_binary_event(event) + + assert "ce_type" in message.headers + assert message.headers["ce_type"] == b"com.example.test" + assert b'"message"' in message.value + assert b'"Hello"' in message.value + + +def test_to_structured_with_defaults() -> None: + """Test to_structured_event convenience wrapper using default JSONFormat""" + event = create_event(data={"message": "Hello"}) + + message = to_structured_event(event) + + assert "content-type" in message.headers + assert message.headers["content-type"] == b"application/cloudevents+json" + assert b'"type"' in message.value + assert b'"com.example.test"' in message.value + assert b'"data"' in message.value + + +def test_from_binary_with_defaults() -> None: + """Test from_binary_event convenience wrapper using default JSONFormat and CloudEvent factory""" + message = KafkaMessage( + headers={ + "ce_type": b"com.example.test", + "ce_source": b"%2Ftest", + "ce_id": b"123", + "ce_specversion": b"1.0", + "content-type": b"application/json", + }, + key=None, + value=b'{"message": "Hello"}', + ) + + # Call wrapper function (should use defaults) + event = from_binary_event(message) + + assert isinstance(event, CloudEvent) + assert event.get_type() == "com.example.test" + assert event.get_source() == "/test" + assert event.get_id() == "123" + assert event.get_data() == {"message": "Hello"} + + +def test_from_structured_with_defaults() -> None: + """Test from_structured_event convenience wrapper using default JSONFormat and CloudEvent factory""" + message = KafkaMessage( + headers={"content-type": b"application/cloudevents+json"}, + key=None, + value=b'{"type": "com.example.test", "source": "/test", "id": "123", "specversion": "1.0", "data": {"message": "Hello"}}', + ) + + # Call wrapper function (should use defaults) + event = from_structured_event(message) + + assert isinstance(event, CloudEvent) + assert event.get_type() == "com.example.test" + assert event.get_source() == "/test" + assert event.get_id() == "123" + assert event.get_data() == {"message": "Hello"} + + +def test_from_kafka_with_defaults_binary() -> None: + """Test from_kafka_event convenience wrapper with auto-detection (binary mode)""" + message = KafkaMessage( + headers={ + "ce_type": b"com.example.test", + "ce_source": b"%2Ftest", + "ce_id": b"123", + "ce_specversion": b"1.0", + }, + key=None, + value=b'{"message": "Hello"}', + ) + + # Call wrapper function (should use defaults and detect binary mode) + event = from_kafka_event(message) + + assert isinstance(event, CloudEvent) + assert event.get_type() == "com.example.test" + assert event.get_source() == "/test" + + +def test_from_kafka_with_defaults_structured() -> None: + """Test from_kafka_event convenience wrapper with auto-detection (structured mode)""" + message = KafkaMessage( + headers={"content-type": b"application/cloudevents+json"}, + key=None, + value=b'{"type": "com.example.test", "source": "/test", "id": "123", "specversion": "1.0"}', + ) + + # Call wrapper function (should use defaults and detect structured mode) + event = from_kafka_event(message) + + assert isinstance(event, CloudEvent) + assert event.get_type() == "com.example.test" + assert event.get_source() == "/test" + + +def test_convenience_roundtrip_binary() -> None: + """Test complete roundtrip using convenience wrapper functions with binary mode""" + original_event = create_event( + extra_attrs={"datacontenttype": "application/json"}, + data={"message": "Roundtrip test"}, + ) + + # Convert to message using wrapper + message = to_binary_event(original_event) + + # Convert back using wrapper + recovered_event = from_binary_event(message) + + assert recovered_event.get_type() == original_event.get_type() + assert recovered_event.get_source() == original_event.get_source() + assert recovered_event.get_id() == original_event.get_id() + assert recovered_event.get_data() == original_event.get_data() + + +def test_convenience_roundtrip_structured() -> None: + """Test complete roundtrip using convenience wrapper functions with structured mode""" + original_event = create_event( + extra_attrs={"datacontenttype": "application/json"}, + data={"message": "Roundtrip test"}, + ) + + # Convert to message using wrapper + message = to_structured_event(original_event) + + # Convert back using wrapper + recovered_event = from_structured_event(message) + + assert recovered_event.get_type() == original_event.get_type() + assert recovered_event.get_source() == original_event.get_source() + assert recovered_event.get_id() == original_event.get_id() + assert recovered_event.get_data() == original_event.get_data() + + +def test_convenience_with_explicit_format_override() -> None: + """Test that wrapper functions can override format (still flexible)""" + event = create_event( + extra_attrs={"datacontenttype": "application/json"}, + data={"message": "Hello"}, + ) + + # Explicitly pass JSONFormat to wrapper function + message = to_binary_event(event, JSONFormat()) + recovered = from_binary_event(message, JSONFormat()) + + assert recovered.get_type() == event.get_type() + assert recovered.get_data() == event.get_data() + + +def test_from_structured_with_key_auto_detect_v1() -> None: + """Test that auto-detection works when message has key (v1.0)""" + message = KafkaMessage( + headers={"content-type": b"application/cloudevents+json"}, + key=b"partition-key-123", + value=b'{"specversion":"1.0","type":"com.example.test","source":"/test","id":"123"}', + ) + + # Auto-detect version (factory=None) + event = from_structured(message, JSONFormat()) + + assert event.get_type() == "com.example.test" + assert event.get_extension("partitionkey") == "partition-key-123" + assert event.get_attributes()["specversion"] == "1.0" + + +def test_from_structured_with_key_auto_detect_v03() -> None: + """Test that auto-detection works when message has key (v0.3)""" + message = KafkaMessage( + headers={"content-type": b"application/cloudevents+json"}, + key=b"partition-key-456", + value=b'{"specversion":"0.3","type":"com.example.test","source":"/test","id":"456"}', + ) + + # Auto-detect version (factory=None) + event = from_structured(message, JSONFormat()) + + assert event.get_type() == "com.example.test" + assert event.get_extension("partitionkey") == "partition-key-456" + assert event.get_attributes()["specversion"] == "0.3" diff --git a/tests/test_core/test_format/__init__.py b/tests/test_core/test_format/__init__.py new file mode 100644 index 00000000..8043675e --- /dev/null +++ b/tests/test_core/test_format/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2018-Present The CloudEvents Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. diff --git a/tests/test_core/test_format/test_json.py b/tests/test_core/test_format/test_json.py new file mode 100644 index 00000000..12f75435 --- /dev/null +++ b/tests/test_core/test_format/test_json.py @@ -0,0 +1,325 @@ +# Copyright 2018-Present The CloudEvents Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + + +from datetime import datetime, timezone + +from cloudevents.core.formats.json import JSONFormat +from cloudevents.core.v1.event import CloudEvent + + +def test_write_cloud_event_to_json_with_attributes_only() -> None: + attributes = { + "id": "123", + "source": "source", + "type": "type", + "specversion": "1.0", + "time": datetime(2023, 10, 25, 17, 9, 19, 736166, tzinfo=timezone.utc), + "datacontenttype": "application/json", + "dataschema": "http://example.com/schema", + "subject": "test_subject", + } + event = CloudEvent(attributes=attributes, data=None) + formatter = JSONFormat() + result = formatter.write(event) + + assert ( + result + == '{"id": "123", "source": "source", "type": "type", "specversion": "1.0", "time": "2023-10-25T17:09:19.736166Z", "datacontenttype": "application/json", "dataschema": "http://example.com/schema", "subject": "test_subject"}'.encode( + "utf-8" + ) + ) + + +def test_write_cloud_event_to_json_with_data_as_json() -> None: + attributes = { + "id": "123", + "source": "source", + "type": "type", + "specversion": "1.0", + "time": datetime(2023, 10, 25, 17, 9, 19, 736166, tzinfo=timezone.utc), + "datacontenttype": "application/json", + "dataschema": "http://example.com/schema", + "subject": "test_subject", + } + event = CloudEvent(attributes=attributes, data={"key": "value"}) + formatter = JSONFormat() + result = formatter.write(event) + + assert ( + result + == '{"id": "123", "source": "source", "type": "type", "specversion": "1.0", "time": "2023-10-25T17:09:19.736166Z", "datacontenttype": "application/json", "dataschema": "http://example.com/schema", "subject": "test_subject", "data": {"key": "value"}}'.encode( + "utf-8" + ) + ) + + +def test_write_cloud_event_to_json_with_data_as_bytes() -> None: + attributes = { + "id": "123", + "source": "source", + "type": "type", + "specversion": "1.0", + "time": datetime(2023, 10, 25, 17, 9, 19, 736166, tzinfo=timezone.utc), + "datacontenttype": "application/json", + "dataschema": "http://example.com/schema", + "subject": "test_subject", + } + event = CloudEvent(attributes=attributes, data=b"test") + formatter = JSONFormat() + result = formatter.write(event) + + assert ( + result + == '{"id": "123", "source": "source", "type": "type", "specversion": "1.0", "time": "2023-10-25T17:09:19.736166Z", "datacontenttype": "application/json", "dataschema": "http://example.com/schema", "subject": "test_subject", "data_base64": "dGVzdA=="}'.encode( + "utf-8" + ) + ) + + +def test_write_cloud_event_to_json_with_data_as_str_and_content_type_not_json() -> None: + attributes = { + "id": "123", + "source": "source", + "type": "type", + "specversion": "1.0", + "time": datetime(2023, 10, 25, 17, 9, 19, 736166, tzinfo=timezone.utc), + "datacontenttype": "text/plain", + "dataschema": "http://example.com/schema", + "subject": "test_subject", + } + event = CloudEvent(attributes=attributes, data="test") + formatter = JSONFormat() + result = formatter.write(event) + + assert ( + result + == '{"id": "123", "source": "source", "type": "type", "specversion": "1.0", "time": "2023-10-25T17:09:19.736166Z", "datacontenttype": "text/plain", "dataschema": "http://example.com/schema", "subject": "test_subject", "data": "test"}'.encode( + "utf-8" + ) + ) + + +def test_write_cloud_event_to_json_with_no_content_type_set_and_data_as_str() -> None: + attributes = { + "id": "123", + "source": "source", + "type": "type", + "specversion": "1.0", + "time": datetime(2023, 10, 25, 17, 9, 19, 736166, tzinfo=timezone.utc), + "dataschema": "http://example.com/schema", + "subject": "test_subject", + } + event = CloudEvent(attributes=attributes, data="I'm just a string") + formatter = JSONFormat() + result = formatter.write(event) + + assert ( + result + == '{"id": "123", "source": "source", "type": "type", "specversion": "1.0", "time": "2023-10-25T17:09:19.736166Z", "dataschema": "http://example.com/schema", "subject": "test_subject", "data": "I\'m just a string"}'.encode( + "utf-8" + ) + ) + + +def test_write_cloud_event_to_json_with_no_content_type_set_and_data_as_json() -> None: + attributes = { + "id": "123", + "source": "source", + "type": "type", + "specversion": "1.0", + "time": datetime(2023, 10, 25, 17, 9, 19, 736166, tzinfo=timezone.utc), + "dataschema": "http://example.com/schema", + "subject": "test_subject", + } + event = CloudEvent(attributes=attributes, data={"key": "value"}) + formatter = JSONFormat() + result = formatter.write(event) + + assert ( + result + == '{"id": "123", "source": "source", "type": "type", "specversion": "1.0", "time": "2023-10-25T17:09:19.736166Z", "dataschema": "http://example.com/schema", "subject": "test_subject", "data": {"key": "value"}}'.encode( + "utf-8" + ) + ) + + +def test_read_cloud_event_from_json_with_attributes_only() -> None: + data = '{"id": "123", "source": "source", "type": "type", "specversion": "1.0", "time": "2023-10-25T17:09:19.736166Z", "datacontenttype": "application/json", "dataschema": "http://example.com/schema", "subject": "test_subject"}'.encode( + "utf-8" + ) + formatter = JSONFormat() + result = formatter.read(CloudEvent, data) + + assert result.get_id() == "123" + assert result.get_source() == "source" + assert result.get_type() == "type" + assert result.get_specversion() == "1.0" + assert result.get_time() == datetime( + 2023, 10, 25, 17, 9, 19, 736166, tzinfo=timezone.utc + ) + assert result.get_datacontenttype() == "application/json" + assert result.get_dataschema() == "http://example.com/schema" + assert result.get_subject() == "test_subject" + assert result.get_data() is None + + +def test_read_cloud_event_from_json_with_bytes_as_data() -> None: + data = '{"id": "123", "source": "source", "type": "type", "specversion": "1.0", "time": "2023-10-25T17:09:19.736166Z", "datacontenttype": "application/json", "dataschema": "http://example.com/schema", "subject": "test_subject", "data_base64": "dGVzdA=="}'.encode( + "utf-8" + ) + formatter = JSONFormat() + result = formatter.read(CloudEvent, data) + + assert result.get_id() == "123" + assert result.get_source() == "source" + assert result.get_type() == "type" + assert result.get_specversion() == "1.0" + assert result.get_time() == datetime( + 2023, 10, 25, 17, 9, 19, 736166, tzinfo=timezone.utc + ) + assert result.get_datacontenttype() == "application/json" + assert result.get_dataschema() == "http://example.com/schema" + assert result.get_subject() == "test_subject" + assert result.get_data() == b"test" + + +def test_read_cloud_event_from_json_with_json_as_data() -> None: + data = '{"id": "123", "source": "source", "type": "type", "specversion": "1.0", "time": "2023-10-25T17:09:19.736166Z", "datacontenttype": "application/json", "dataschema": "http://example.com/schema", "subject": "test_subject", "data": {"key": "value"}}'.encode( + "utf-8" + ) + formatter = JSONFormat() + result = formatter.read(CloudEvent, data) + + assert result.get_id() == "123" + assert result.get_source() == "source" + assert result.get_type() == "type" + assert result.get_specversion() == "1.0" + assert result.get_time() == datetime( + 2023, 10, 25, 17, 9, 19, 736166, tzinfo=timezone.utc + ) + assert result.get_datacontenttype() == "application/json" + assert result.get_dataschema() == "http://example.com/schema" + assert result.get_subject() == "test_subject" + assert result.get_data() == {"key": "value"} + + +def test_write_cloud_event_with_extension_attributes() -> None: + attributes = { + "id": "123", + "source": "source", + "type": "type", + "specversion": "1.0", + "customext1": "value1", + "customext2": 123, + } + event = CloudEvent(attributes=attributes, data=None) + formatter = JSONFormat() + result = formatter.write(event) + + assert b'"customext1": "value1"' in result + assert b'"customext2": 123' in result + + +def test_read_cloud_event_with_extension_attributes() -> None: + data = '{"id": "123", "source": "source", "type": "type", "specversion": "1.0", "customext1": "value1", "customext2": 123}'.encode( + "utf-8" + ) + formatter = JSONFormat() + result = formatter.read(CloudEvent, data) + + assert result.get_extension("customext1") == "value1" + assert result.get_extension("customext2") == 123 + + +def test_write_cloud_event_with_different_json_content_types() -> None: + test_cases = [ + ("application/vnd.api+json", {"key": "value"}), + ("text/json", {"key": "value"}), + ("application/json; charset=utf-8", {"key": "value"}), + ] + + for content_type, data in test_cases: + attributes = { + "id": "123", + "source": "source", + "type": "type", + "specversion": "1.0", + "datacontenttype": content_type, + } + event = CloudEvent(attributes=attributes, data=data) + formatter = JSONFormat() + result = formatter.write(event) + + assert b'"data": {"key": "value"}' in result + + +def test_read_cloud_event_with_string_data() -> None: + data = '{"id": "123", "source": "source", "type": "type", "specversion": "1.0", "data": "plain string data"}'.encode( + "utf-8" + ) + formatter = JSONFormat() + result = formatter.read(CloudEvent, data) + + assert result.get_data() == "plain string data" + + +def test_write_cloud_event_with_utc_timezone_z_suffix() -> None: + attributes = { + "id": "123", + "source": "source", + "type": "type", + "specversion": "1.0", + "time": datetime(2023, 10, 25, 17, 9, 19, 736166, tzinfo=timezone.utc), + } + event = CloudEvent(attributes=attributes, data=None) + formatter = JSONFormat() + result = formatter.write(event) + + assert b'"time": "2023-10-25T17:09:19.736166Z"' in result + + +def test_write_cloud_event_with_unicode_data() -> None: + attributes = { + "id": "123", + "source": "source", + "type": "type", + "specversion": "1.0", + } + event = CloudEvent(attributes=attributes, data="Hello 世界 🌍") + formatter = JSONFormat() + result = formatter.write(event) + + decoded = result.decode("utf-8") + assert '"data": "Hello' in decoded + assert "Hello" in decoded + + +def test_read_cloud_event_with_unicode_data() -> None: + data = '{"id": "123", "source": "source", "type": "type", "specversion": "1.0", "data": "Hello 世界 🌍"}'.encode( + "utf-8" + ) + formatter = JSONFormat() + result = formatter.read(CloudEvent, data) + + assert result.get_data() == "Hello 世界 🌍" + + +def test_read_cloud_event_from_string_input() -> None: + data = '{"id": "123", "source": "source", "type": "type", "specversion": "1.0"}' + formatter = JSONFormat() + result = formatter.read(CloudEvent, data) + + assert result.get_id() == "123" + assert result.get_source() == "source" diff --git a/tests/test_core/test_v03/__init__.py b/tests/test_core/test_v03/__init__.py new file mode 100644 index 00000000..09b419aa --- /dev/null +++ b/tests/test_core/test_v03/__init__.py @@ -0,0 +1,15 @@ +# Copyright 2018-Present The CloudEvents Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Tests for CloudEvents v0.3 implementation.""" diff --git a/tests/test_core/test_v03/test_event.py b/tests/test_core/test_v03/test_event.py new file mode 100644 index 00000000..aec260bf --- /dev/null +++ b/tests/test_core/test_v03/test_event.py @@ -0,0 +1,438 @@ +# Copyright 2018-Present The CloudEvents Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from datetime import datetime, timezone +from typing import Any + +import pytest + +from cloudevents.core.exceptions import ( + CloudEventValidationError, + CustomExtensionAttributeError, + InvalidAttributeTypeError, + InvalidAttributeValueError, + MissingRequiredAttributeError, +) +from cloudevents.core.v03.event import CloudEvent + + +def test_missing_required_attributes() -> None: + with pytest.raises(CloudEventValidationError) as e: + CloudEvent({}) + + expected_errors = { + "id": [ + str(MissingRequiredAttributeError("id")), + str(InvalidAttributeValueError("id", "Attribute 'id' must not be None")), + str(InvalidAttributeTypeError("id", str)), + ], + "source": [ + str(MissingRequiredAttributeError("source")), + str(InvalidAttributeTypeError("source", str)), + ], + "type": [ + str(MissingRequiredAttributeError("type")), + str(InvalidAttributeTypeError("type", str)), + ], + "specversion": [ + str(MissingRequiredAttributeError("specversion")), + str(InvalidAttributeTypeError("specversion", str)), + str( + InvalidAttributeValueError( + "specversion", "Attribute 'specversion' must be '0.3'" + ) + ), + ], + } + + actual_errors = { + key: [str(e) for e in value] for key, value in e.value.errors.items() + } + assert actual_errors == expected_errors + + +def test_invalid_specversion() -> None: + """Test that v0.3 CloudEvent rejects non-0.3 specversion""" + with pytest.raises(CloudEventValidationError) as e: + CloudEvent( + { + "id": "1", + "source": "/", + "type": "test", + "specversion": "1.0", # Wrong version! + } + ) + + assert "specversion" in e.value.errors + assert any("must be '0.3'" in str(err) for err in e.value.errors["specversion"]) + + +@pytest.mark.parametrize( + "time,expected_error", + [ + ( + "2023-10-25T17:09:19.736166Z", + {"time": [str(InvalidAttributeTypeError("time", datetime))]}, + ), + ( + datetime(2023, 10, 25, 17, 9, 19, 736166), + { + "time": [ + str( + InvalidAttributeValueError( + "time", "Attribute 'time' must be timezone aware" + ) + ) + ] + }, + ), + ( + 1, + {"time": [str(InvalidAttributeTypeError("time", datetime))]}, + ), + ], +) +def test_time_validation(time: Any, expected_error: dict) -> None: + with pytest.raises(CloudEventValidationError) as e: + CloudEvent( + { + "id": "1", + "source": "/", + "type": "test", + "specversion": "0.3", + "time": time, + } + ) + actual_errors = { + key: [str(e) for e in value] for key, value in e.value.errors.items() + } + assert actual_errors == expected_error + + +@pytest.mark.parametrize( + "subject,expected_error", + [ + ( + 1234, + {"subject": [str(InvalidAttributeTypeError("subject", str))]}, + ), + ( + "", + { + "subject": [ + str( + InvalidAttributeValueError( + "subject", "Attribute 'subject' must not be empty" + ) + ) + ] + }, + ), + ], +) +def test_subject_validation(subject: Any, expected_error: dict) -> None: + with pytest.raises(CloudEventValidationError) as e: + CloudEvent( + { + "id": "1", + "source": "/", + "type": "test", + "specversion": "0.3", + "subject": subject, + } + ) + + actual_errors = { + key: [str(e) for e in value] for key, value in e.value.errors.items() + } + assert actual_errors == expected_error + + +@pytest.mark.parametrize( + "datacontenttype,expected_error", + [ + ( + 1234, + { + "datacontenttype": [ + str(InvalidAttributeTypeError("datacontenttype", str)) + ] + }, + ), + ( + "", + { + "datacontenttype": [ + str( + InvalidAttributeValueError( + "datacontenttype", + "Attribute 'datacontenttype' must not be empty", + ) + ) + ] + }, + ), + ], +) +def test_datacontenttype_validation(datacontenttype: Any, expected_error: dict) -> None: + with pytest.raises(CloudEventValidationError) as e: + CloudEvent( + { + "id": "1", + "source": "/", + "type": "test", + "specversion": "0.3", + "datacontenttype": datacontenttype, + } + ) + + actual_errors = { + key: [str(e) for e in value] for key, value in e.value.errors.items() + } + assert actual_errors == expected_error + + +@pytest.mark.parametrize( + "datacontentencoding,expected_error", + [ + ( + 1234, + { + "datacontentencoding": [ + str(InvalidAttributeTypeError("datacontentencoding", str)) + ] + }, + ), + ( + "", + { + "datacontentencoding": [ + str( + InvalidAttributeValueError( + "datacontentencoding", + "Attribute 'datacontentencoding' must not be empty", + ) + ) + ] + }, + ), + ], +) +def test_datacontentencoding_validation( + datacontentencoding: Any, expected_error: dict +) -> None: + """Test v0.3 specific datacontentencoding attribute validation""" + with pytest.raises(CloudEventValidationError) as e: + CloudEvent( + { + "id": "1", + "source": "/", + "type": "test", + "specversion": "0.3", + "datacontentencoding": datacontentencoding, + } + ) + + actual_errors = { + key: [str(e) for e in value] for key, value in e.value.errors.items() + } + assert actual_errors == expected_error + + +@pytest.mark.parametrize( + "schemaurl,expected_error", + [ + ( + 1234, + {"schemaurl": [str(InvalidAttributeTypeError("schemaurl", str))]}, + ), + ( + "", + { + "schemaurl": [ + str( + InvalidAttributeValueError( + "schemaurl", "Attribute 'schemaurl' must not be empty" + ) + ) + ] + }, + ), + ], +) +def test_schemaurl_validation(schemaurl: Any, expected_error: dict) -> None: + """Test v0.3 specific schemaurl attribute validation""" + with pytest.raises(CloudEventValidationError) as e: + CloudEvent( + { + "id": "1", + "source": "/", + "type": "test", + "specversion": "0.3", + "schemaurl": schemaurl, + } + ) + + actual_errors = { + key: [str(e) for e in value] for key, value in e.value.errors.items() + } + assert actual_errors == expected_error + + +@pytest.mark.parametrize( + "extension_name,expected_error", + [ + ( + "", + { + "": [ + str( + CustomExtensionAttributeError( + "", + "Extension attribute '' should be between 1 and 20 characters long", + ) + ), + str( + CustomExtensionAttributeError( + "", + "Extension attribute '' should only contain lowercase letters and numbers", + ) + ), + ] + }, + ), + ( + "thisisaverylongextension", + { + "thisisaverylongextension": [ + str( + CustomExtensionAttributeError( + "thisisaverylongextension", + "Extension attribute 'thisisaverylongextension' should be between 1 and 20 characters long", + ) + ) + ] + }, + ), + ( + "data", + { + "data": [ + str( + CustomExtensionAttributeError( + "data", + "Extension attribute 'data' is reserved and must not be used", + ) + ) + ] + }, + ), + ], +) +def test_custom_extension(extension_name: str, expected_error: dict) -> None: + with pytest.raises(CloudEventValidationError) as e: + CloudEvent( + { + "id": "1", + "source": "/", + "type": "test", + "specversion": "0.3", + extension_name: "value", + } + ) + + actual_errors = { + key: [str(e) for e in value] for key, value in e.value.errors.items() + } + assert actual_errors == expected_error + + +def test_cloud_event_v03_constructor() -> None: + """Test creating a v0.3 CloudEvent with all attributes""" + id = "1" + source = "/source" + type = "com.test.type" + specversion = "0.3" + datacontenttype = "application/json" + datacontentencoding = "base64" + schemaurl = "http://example.com/schema.json" + subject = "test_subject" + time = datetime.now(tz=timezone.utc) + data = {"key": "value"} + customextension = "customExtension" + + event = CloudEvent( + attributes={ + "id": id, + "source": source, + "type": type, + "specversion": specversion, + "datacontenttype": datacontenttype, + "datacontentencoding": datacontentencoding, + "schemaurl": schemaurl, + "subject": subject, + "time": time, + "customextension": customextension, + }, + data=data, + ) + + assert event.get_id() == id + assert event.get_source() == source + assert event.get_type() == type + assert event.get_specversion() == specversion + assert event.get_datacontenttype() == datacontenttype + assert event.get_datacontentencoding() == datacontentencoding + assert event.get_schemaurl() == schemaurl + assert event.get_subject() == subject + assert event.get_time() == time + assert event.get_extension("customextension") == customextension + assert event.get_data() == data + + +def test_get_dataschema_returns_schemaurl() -> None: + """Test that get_dataschema() returns schemaurl for v0.3 compatibility""" + event = CloudEvent( + attributes={ + "id": "1", + "source": "/source", + "type": "com.test.type", + "specversion": "0.3", + "schemaurl": "http://example.com/schema.json", + } + ) + + # get_dataschema should return the schemaurl value for compatibility + assert event.get_dataschema() == "http://example.com/schema.json" + assert event.get_schemaurl() == "http://example.com/schema.json" + + +def test_v03_minimal_event() -> None: + """Test creating a minimal v0.3 CloudEvent""" + event = CloudEvent( + attributes={ + "id": "test-123", + "source": "https://example.com/source", + "type": "com.example.test", + "specversion": "0.3", + } + ) + + assert event.get_id() == "test-123" + assert event.get_source() == "https://example.com/source" + assert event.get_type() == "com.example.test" + assert event.get_specversion() == "0.3" + assert event.get_data() is None + assert event.get_datacontentencoding() is None + assert event.get_schemaurl() is None diff --git a/tests/test_core/test_v03/test_http_bindings.py b/tests/test_core/test_v03/test_http_bindings.py new file mode 100644 index 00000000..13bcd592 --- /dev/null +++ b/tests/test_core/test_v03/test_http_bindings.py @@ -0,0 +1,511 @@ +# Copyright 2018-Present The CloudEvents Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from datetime import datetime, timezone + +from cloudevents.core.bindings.http import ( + HTTPMessage, + from_binary, + from_binary_event, + from_http, + from_http_event, + from_structured, + from_structured_event, + to_binary, + to_structured, +) +from cloudevents.core.formats.json import JSONFormat +from cloudevents.core.v03.event import CloudEvent + + +def test_v03_to_binary_minimal() -> None: + """Test converting minimal v0.3 event to HTTP binary mode""" + event = CloudEvent( + attributes={ + "specversion": "0.3", + "type": "com.example.test", + "source": "/test", + "id": "test-123", + } + ) + + message = to_binary(event, JSONFormat()) + + assert "ce-specversion" in message.headers + assert message.headers["ce-specversion"] == "0.3" + assert "ce-type" in message.headers + assert "ce-source" in message.headers + assert "ce-id" in message.headers + + +def test_v03_to_binary_with_schemaurl() -> None: + """Test converting v0.3 event with schemaurl to HTTP binary mode""" + event = CloudEvent( + attributes={ + "specversion": "0.3", + "type": "com.example.test", + "source": "/test", + "id": "test-123", + "schemaurl": "https://example.com/schema.json", + } + ) + + message = to_binary(event, JSONFormat()) + + assert "ce-schemaurl" in message.headers + # URL should be percent-encoded + assert "https" in message.headers["ce-schemaurl"] + + +def test_v03_to_binary_with_datacontentencoding() -> None: + """Test converting v0.3 event with datacontentencoding to HTTP binary mode""" + event = CloudEvent( + attributes={ + "specversion": "0.3", + "type": "com.example.test", + "source": "/test", + "id": "test-123", + "datacontentencoding": "base64", + } + ) + + message = to_binary(event, JSONFormat()) + + assert "ce-datacontentencoding" in message.headers + assert message.headers["ce-datacontentencoding"] == "base64" + + +def test_v03_from_binary_minimal() -> None: + """Test parsing minimal v0.3 binary HTTP message""" + message = HTTPMessage( + headers={ + "ce-specversion": "0.3", + "ce-type": "com.example.test", + "ce-source": "/test", + "ce-id": "test-123", + }, + body=b"", + ) + + event = from_binary(message, JSONFormat(), CloudEvent) + + assert event.get_specversion() == "0.3" + assert event.get_type() == "com.example.test" + assert event.get_source() == "/test" + assert event.get_id() == "test-123" + + +def test_v03_from_binary_with_schemaurl() -> None: + """Test parsing v0.3 binary HTTP message with schemaurl""" + message = HTTPMessage( + headers={ + "ce-specversion": "0.3", + "ce-type": "com.example.test", + "ce-source": "/test", + "ce-id": "test-123", + "ce-schemaurl": "https://example.com/schema.json", + }, + body=b"", + ) + + event = from_binary(message, JSONFormat(), CloudEvent) + + assert event.get_schemaurl() == "https://example.com/schema.json" + + +def test_v03_from_binary_with_datacontentencoding() -> None: + """Test parsing v0.3 binary HTTP message with datacontentencoding""" + message = HTTPMessage( + headers={ + "ce-specversion": "0.3", + "ce-type": "com.example.test", + "ce-source": "/test", + "ce-id": "test-123", + "ce-datacontentencoding": "base64", + }, + body=b"", + ) + + event = from_binary(message, JSONFormat(), CloudEvent) + + assert event.get_datacontentencoding() == "base64" + + +def test_v03_binary_round_trip() -> None: + """Test v0.3 binary mode round-trip""" + original = CloudEvent( + attributes={ + "specversion": "0.3", + "type": "com.example.test", + "source": "/test", + "id": "test-123", + "subject": "test-subject", + "schemaurl": "https://example.com/schema.json", + "datacontenttype": "application/json", + }, + data={"message": "Hello", "count": 42}, + ) + + # Convert to binary + message = to_binary(original, JSONFormat()) + + # Parse back + parsed = from_binary(message, JSONFormat(), CloudEvent) + + assert parsed.get_specversion() == original.get_specversion() + assert parsed.get_type() == original.get_type() + assert parsed.get_source() == original.get_source() + assert parsed.get_id() == original.get_id() + assert parsed.get_subject() == original.get_subject() + assert parsed.get_schemaurl() == original.get_schemaurl() + assert parsed.get_datacontenttype() == original.get_datacontenttype() + assert parsed.get_data() == original.get_data() + + +def test_v03_to_structured_minimal() -> None: + """Test converting minimal v0.3 event to HTTP structured mode""" + event = CloudEvent( + attributes={ + "specversion": "0.3", + "type": "com.example.test", + "source": "/test", + "id": "test-123", + } + ) + + message = to_structured(event, JSONFormat()) + + assert message.headers["content-type"] == "application/cloudevents+json" + assert b'"specversion": "0.3"' in message.body + assert b'"type": "com.example.test"' in message.body + + +def test_v03_to_structured_with_schemaurl() -> None: + """Test converting v0.3 event with schemaurl to structured mode""" + event = CloudEvent( + attributes={ + "specversion": "0.3", + "type": "com.example.test", + "source": "/test", + "id": "test-123", + "schemaurl": "https://example.com/schema.json", + } + ) + + message = to_structured(event, JSONFormat()) + + assert b'"schemaurl": "https://example.com/schema.json"' in message.body + + +def test_v03_from_structured_minimal() -> None: + """Test parsing minimal v0.3 structured HTTP message""" + message = HTTPMessage( + headers={"content-type": "application/cloudevents+json"}, + body=b'{"specversion": "0.3", "type": "com.example.test", "source": "/test", "id": "test-123"}', + ) + + event = from_structured(message, JSONFormat(), CloudEvent) + + assert event.get_specversion() == "0.3" + assert event.get_type() == "com.example.test" + assert event.get_source() == "/test" + assert event.get_id() == "test-123" + + +def test_v03_from_structured_with_schemaurl() -> None: + """Test parsing v0.3 structured HTTP message with schemaurl""" + message = HTTPMessage( + headers={"content-type": "application/cloudevents+json"}, + body=b'{"specversion": "0.3", "type": "com.example.test", "source": "/test", "id": "test-123", "schemaurl": "https://example.com/schema.json"}', + ) + + event = from_structured(message, JSONFormat(), CloudEvent) + + assert event.get_schemaurl() == "https://example.com/schema.json" + + +def test_v03_structured_round_trip() -> None: + """Test v0.3 structured mode round-trip""" + original = CloudEvent( + attributes={ + "specversion": "0.3", + "type": "com.example.test", + "source": "/test", + "id": "test-123", + "subject": "test-subject", + "schemaurl": "https://example.com/schema.json", + "datacontenttype": "application/json", + }, + data={"message": "Hello", "count": 42}, + ) + + # Convert to structured + message = to_structured(original, JSONFormat()) + + # Parse back + parsed = from_structured(message, JSONFormat(), CloudEvent) + + assert parsed.get_specversion() == original.get_specversion() + assert parsed.get_type() == original.get_type() + assert parsed.get_source() == original.get_source() + assert parsed.get_id() == original.get_id() + assert parsed.get_subject() == original.get_subject() + assert parsed.get_schemaurl() == original.get_schemaurl() + assert parsed.get_datacontenttype() == original.get_datacontenttype() + assert parsed.get_data() == original.get_data() + + +def test_v03_from_http_auto_detects_binary() -> None: + """Test that from_http auto-detects v0.3 binary mode""" + message = HTTPMessage( + headers={ + "ce-specversion": "0.3", + "ce-type": "com.example.test", + "ce-source": "/test", + "ce-id": "test-123", + }, + body=b"", + ) + + event = from_http(message, JSONFormat(), CloudEvent) + + assert event.get_specversion() == "0.3" + assert event.get_type() == "com.example.test" + + +def test_v03_from_http_auto_detects_structured() -> None: + """Test that from_http auto-detects v0.3 structured mode""" + message = HTTPMessage( + headers={"content-type": "application/cloudevents+json"}, + body=b'{"specversion": "0.3", "type": "com.example.test", "source": "/test", "id": "test-123"}', + ) + + event = from_http(message, JSONFormat(), CloudEvent) + + assert event.get_specversion() == "0.3" + assert event.get_type() == "com.example.test" + + +def test_v03_auto_detect_version_from_binary_headers() -> None: + """Test auto-detection of v0.3 from binary mode headers""" + message = HTTPMessage( + headers={ + "ce-specversion": "0.3", + "ce-type": "com.example.test", + "ce-source": "/test", + "ce-id": "test-123", + }, + body=b"", + ) + + # Don't provide event_factory, let it auto-detect + event = from_binary(message, JSONFormat()) + + assert isinstance(event, CloudEvent) + assert event.get_specversion() == "0.3" + + +def test_v03_auto_detect_version_from_structured_body() -> None: + """Test auto-detection of v0.3 from structured mode body""" + message = HTTPMessage( + headers={"content-type": "application/cloudevents+json"}, + body=b'{"specversion": "0.3", "type": "com.example.test", "source": "/test", "id": "test-123"}', + ) + + # Don't provide event_factory, let it auto-detect + event = from_structured(message, JSONFormat()) + + assert isinstance(event, CloudEvent) + assert event.get_specversion() == "0.3" + + +def test_v03_from_http_auto_detect_version_binary() -> None: + """Test from_http auto-detects v0.3 with no explicit factory""" + message = HTTPMessage( + headers={ + "ce-specversion": "0.3", + "ce-type": "com.example.test", + "ce-source": "/test", + "ce-id": "test-123", + }, + body=b"", + ) + + # Auto-detect both mode and version + event = from_http(message, JSONFormat()) + + assert isinstance(event, CloudEvent) + assert event.get_specversion() == "0.3" + + +def test_v03_from_http_auto_detect_version_structured() -> None: + """Test from_http auto-detects v0.3 structured with no explicit factory""" + message = HTTPMessage( + headers={"content-type": "application/cloudevents+json"}, + body=b'{"specversion": "0.3", "type": "com.example.test", "source": "/test", "id": "test-123"}', + ) + + # Auto-detect both mode and version + event = from_http(message, JSONFormat()) + + assert isinstance(event, CloudEvent) + assert event.get_specversion() == "0.3" + + +def test_v03_convenience_wrappers_binary() -> None: + """Test convenience wrapper functions with v0.3 binary mode""" + message = HTTPMessage( + headers={ + "ce-specversion": "0.3", + "ce-type": "com.example.test", + "ce-source": "/test", + "ce-id": "test-123", + }, + body=b"", + ) + + event = from_binary_event(message) + + assert isinstance(event, CloudEvent) + assert event.get_specversion() == "0.3" + + +def test_v03_convenience_wrappers_structured() -> None: + """Test convenience wrapper functions with v0.3 structured mode""" + message = HTTPMessage( + headers={"content-type": "application/cloudevents+json"}, + body=b'{"specversion": "0.3", "type": "com.example.test", "source": "/test", "id": "test-123"}', + ) + + event = from_structured_event(message) + + assert isinstance(event, CloudEvent) + assert event.get_specversion() == "0.3" + + +def test_v03_convenience_wrappers_from_http() -> None: + """Test from_http_event convenience wrapper with v0.3""" + # Binary mode + binary_message = HTTPMessage( + headers={ + "ce-specversion": "0.3", + "ce-type": "com.example.test", + "ce-source": "/test", + "ce-id": "test-123", + }, + body=b"", + ) + + event1 = from_http_event(binary_message) + assert event1.get_specversion() == "0.3" + + # Structured mode + structured_message = HTTPMessage( + headers={"content-type": "application/cloudevents+json"}, + body=b'{"specversion": "0.3", "type": "com.example.test", "source": "/test", "id": "test-123"}', + ) + + event2 = from_http_event(structured_message) + assert event2.get_specversion() == "0.3" + + +def test_v03_binary_with_time() -> None: + """Test v0.3 binary mode with time attribute""" + dt = datetime(2023, 6, 15, 14, 30, 45, tzinfo=timezone.utc) + + event = CloudEvent( + attributes={ + "specversion": "0.3", + "type": "com.example.test", + "source": "/test", + "id": "test-123", + "time": dt, + } + ) + + message = to_binary(event, JSONFormat()) + parsed = from_binary(message, JSONFormat(), CloudEvent) + + assert parsed.get_time() is not None + assert parsed.get_time().year == 2023 + + +def test_v03_complete_binary_event() -> None: + """Test v0.3 complete event with all attributes in binary mode""" + dt = datetime(2023, 6, 15, 14, 30, 45, tzinfo=timezone.utc) + + event = CloudEvent( + attributes={ + "specversion": "0.3", + "type": "com.example.test", + "source": "/test", + "id": "test-123", + "time": dt, + "subject": "test-subject", + "datacontenttype": "application/json", + "datacontentencoding": "base64", + "schemaurl": "https://example.com/schema.json", + "customext": "custom-value", + }, + data={"message": "Hello World!"}, + ) + + message = to_binary(event, JSONFormat()) + parsed = from_binary(message, JSONFormat()) # Auto-detect + + assert isinstance(parsed, CloudEvent) + assert parsed.get_specversion() == "0.3" + assert parsed.get_type() == "com.example.test" + assert parsed.get_source() == "/test" + assert parsed.get_id() == "test-123" + assert parsed.get_subject() == "test-subject" + assert parsed.get_datacontenttype() == "application/json" + assert parsed.get_datacontentencoding() == "base64" + assert parsed.get_schemaurl() == "https://example.com/schema.json" + assert parsed.get_extension("customext") == "custom-value" + assert parsed.get_data() == {"message": "Hello World!"} + + +def test_v03_complete_structured_event() -> None: + """Test v0.3 complete event with all attributes in structured mode""" + dt = datetime(2023, 6, 15, 14, 30, 45, tzinfo=timezone.utc) + + event = CloudEvent( + attributes={ + "specversion": "0.3", + "type": "com.example.test", + "source": "/test", + "id": "test-123", + "time": dt, + "subject": "test-subject", + "datacontenttype": "application/json", + "schemaurl": "https://example.com/schema.json", + "customext": "custom-value", + }, + data={"message": "Hello World!"}, + ) + + message = to_structured(event, JSONFormat()) + parsed = from_structured(message, JSONFormat()) # Auto-detect + + assert isinstance(parsed, CloudEvent) + assert parsed.get_specversion() == "0.3" + assert parsed.get_type() == "com.example.test" + assert parsed.get_source() == "/test" + assert parsed.get_id() == "test-123" + assert parsed.get_subject() == "test-subject" + assert parsed.get_datacontenttype() == "application/json" + assert parsed.get_schemaurl() == "https://example.com/schema.json" + assert parsed.get_extension("customext") == "custom-value" + assert parsed.get_data() == {"message": "Hello World!"} diff --git a/tests/test_core/test_v03/test_json_format.py b/tests/test_core/test_v03/test_json_format.py new file mode 100644 index 00000000..f863500a --- /dev/null +++ b/tests/test_core/test_v03/test_json_format.py @@ -0,0 +1,324 @@ +# Copyright 2018-Present The CloudEvents Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import base64 +from datetime import datetime, timezone + +from cloudevents.core.formats.json import JSONFormat +from cloudevents.core.v03.event import CloudEvent + + +def test_v03_json_read_minimal() -> None: + """Test reading a minimal v0.3 CloudEvent from JSON""" + json_data = b"""{ + "specversion": "0.3", + "type": "com.example.test", + "source": "/test", + "id": "test-123" + }""" + + format = JSONFormat() + event = format.read(CloudEvent, json_data) + + assert event.get_specversion() == "0.3" + assert event.get_type() == "com.example.test" + assert event.get_source() == "/test" + assert event.get_id() == "test-123" + assert event.get_data() is None + + +def test_v03_json_write_minimal() -> None: + """Test writing a minimal v0.3 CloudEvent to JSON""" + event = CloudEvent( + attributes={ + "specversion": "0.3", + "type": "com.example.test", + "source": "/test", + "id": "test-123", + } + ) + + format = JSONFormat() + json_bytes = format.write(event) + json_str = json_bytes.decode("utf-8") + + assert '"specversion": "0.3"' in json_str + assert '"type": "com.example.test"' in json_str + assert '"source": "/test"' in json_str + assert '"id": "test-123"' in json_str + + +def test_v03_json_with_schemaurl() -> None: + """Test v0.3 schemaurl attribute in JSON""" + json_data = b"""{ + "specversion": "0.3", + "type": "com.example.test", + "source": "/test", + "id": "test-123", + "schemaurl": "https://example.com/schema.json" + }""" + + format = JSONFormat() + event = format.read(CloudEvent, json_data) + + assert event.get_schemaurl() == "https://example.com/schema.json" + assert event.get_dataschema() == "https://example.com/schema.json" + + +def test_v03_json_write_with_schemaurl() -> None: + """Test writing v0.3 event with schemaurl to JSON""" + event = CloudEvent( + attributes={ + "specversion": "0.3", + "type": "com.example.test", + "source": "/test", + "id": "test-123", + "schemaurl": "https://example.com/schema.json", + } + ) + + format = JSONFormat() + json_bytes = format.write(event) + json_str = json_bytes.decode("utf-8") + + assert '"schemaurl": "https://example.com/schema.json"' in json_str + + +def test_v03_json_with_datacontentencoding_base64() -> None: + """Test v0.3 datacontentencoding with base64 encoded data""" + # In v0.3, when datacontentencoding is base64, the data field contains base64 string + original_data = b"Hello World!" + base64_data = base64.b64encode(original_data).decode("utf-8") + + json_data = f'''{{ + "specversion": "0.3", + "type": "com.example.test", + "source": "/test", + "id": "test-123", + "datacontentencoding": "base64", + "data": "{base64_data}" + }}'''.encode("utf-8") + + format = JSONFormat() + event = format.read(CloudEvent, json_data) + + assert event.get_datacontentencoding() == "base64" + assert event.get_data() == original_data # Should be decoded + + +def test_v03_json_write_binary_data_with_base64() -> None: + """Test writing v0.3 event with binary data (uses datacontentencoding)""" + binary_data = b"Hello World!" + + event = CloudEvent( + attributes={ + "specversion": "0.3", + "type": "com.example.test", + "source": "/test", + "id": "test-123", + }, + data=binary_data, + ) + + format = JSONFormat() + json_bytes = format.write(event) + json_str = json_bytes.decode("utf-8") + + # v0.3 should use datacontentencoding with base64-encoded data field + assert '"datacontentencoding": "base64"' in json_str + assert '"data"' in json_str + assert '"data_base64"' not in json_str # v1.0 field should not be present + + # Verify we can read it back + event_read = format.read(CloudEvent, json_bytes) + assert event_read.get_data() == binary_data + + +def test_v03_json_round_trip_with_binary_data() -> None: + """Test complete round-trip of v0.3 event with binary data""" + original_data = b"\x00\x01\x02\x03\x04\x05" + + event = CloudEvent( + attributes={ + "specversion": "0.3", + "type": "com.example.test", + "source": "/test", + "id": "test-123", + "datacontenttype": "application/octet-stream", + }, + data=original_data, + ) + + format = JSONFormat() + + # Write to JSON + json_bytes = format.write(event) + + # Read back + event_read = format.read(CloudEvent, json_bytes) + + assert event_read.get_specversion() == "0.3" + assert event_read.get_data() == original_data + assert event_read.get_datacontentencoding() == "base64" + + +def test_v03_json_with_dict_data() -> None: + """Test v0.3 event with JSON dict data""" + json_data = b"""{ + "specversion": "0.3", + "type": "com.example.test", + "source": "/test", + "id": "test-123", + "datacontenttype": "application/json", + "data": {"message": "Hello", "count": 42} + }""" + + format = JSONFormat() + event = format.read(CloudEvent, json_data) + + data = event.get_data() + assert isinstance(data, dict) + assert data["message"] == "Hello" + assert data["count"] == 42 + + +def test_v03_json_write_with_dict_data() -> None: + """Test writing v0.3 event with dict data""" + event = CloudEvent( + attributes={ + "specversion": "0.3", + "type": "com.example.test", + "source": "/test", + "id": "test-123", + "datacontenttype": "application/json", + }, + data={"message": "Hello", "count": 42}, + ) + + format = JSONFormat() + json_bytes = format.write(event) + json_str = json_bytes.decode("utf-8") + + assert ( + '"data": {"message": "Hello", "count": 42}' in json_str + or '"data": {"count": 42, "message": "Hello"}' in json_str + ) + + +def test_v03_json_with_time() -> None: + """Test v0.3 event with time attribute""" + json_data = b"""{ + "specversion": "0.3", + "type": "com.example.test", + "source": "/test", + "id": "test-123", + "time": "2023-06-15T14:30:45Z" + }""" + + format = JSONFormat() + event = format.read(CloudEvent, json_data) + + time = event.get_time() + assert isinstance(time, datetime) + assert time.year == 2023 + assert time.month == 6 + assert time.day == 15 + + +def test_v03_json_write_with_time() -> None: + """Test writing v0.3 event with time""" + dt = datetime(2023, 6, 15, 14, 30, 45, tzinfo=timezone.utc) + + event = CloudEvent( + attributes={ + "specversion": "0.3", + "type": "com.example.test", + "source": "/test", + "id": "test-123", + "time": dt, + } + ) + + format = JSONFormat() + json_bytes = format.write(event) + json_str = json_bytes.decode("utf-8") + + assert '"time": "2023-06-15T14:30:45Z"' in json_str + + +def test_v03_json_complete_event() -> None: + """Test v0.3 event with all optional attributes""" + json_data = b"""{ + "specversion": "0.3", + "type": "com.example.test", + "source": "/test", + "id": "test-123", + "time": "2023-06-15T14:30:45Z", + "subject": "test-subject", + "datacontenttype": "application/json", + "schemaurl": "https://example.com/schema.json", + "customext": "custom-value", + "data": {"message": "Hello"} + }""" + + format = JSONFormat() + event = format.read(CloudEvent, json_data) + + assert event.get_specversion() == "0.3" + assert event.get_type() == "com.example.test" + assert event.get_source() == "/test" + assert event.get_id() == "test-123" + assert event.get_subject() == "test-subject" + assert event.get_datacontenttype() == "application/json" + assert event.get_schemaurl() == "https://example.com/schema.json" + assert event.get_extension("customext") == "custom-value" + assert event.get_data() == {"message": "Hello"} + + +def test_v03_json_round_trip_complete() -> None: + """Test complete round-trip of v0.3 event with all attributes""" + dt = datetime(2023, 6, 15, 14, 30, 45, tzinfo=timezone.utc) + + event = CloudEvent( + attributes={ + "specversion": "0.3", + "type": "com.example.test", + "source": "/test", + "id": "test-123", + "time": dt, + "subject": "test-subject", + "datacontenttype": "application/json", + "schemaurl": "https://example.com/schema.json", + "customext": "custom-value", + }, + data={"message": "Hello", "count": 42}, + ) + + format = JSONFormat() + + # Write to JSON + json_bytes = format.write(event) + + # Read back + event_read = format.read(CloudEvent, json_bytes) + + assert event_read.get_specversion() == event.get_specversion() + assert event_read.get_type() == event.get_type() + assert event_read.get_source() == event.get_source() + assert event_read.get_id() == event.get_id() + assert event_read.get_subject() == event.get_subject() + assert event_read.get_datacontenttype() == event.get_datacontenttype() + assert event_read.get_schemaurl() == event.get_schemaurl() + assert event_read.get_extension("customext") == event.get_extension("customext") + assert event_read.get_data() == event.get_data() diff --git a/tests/test_core/test_v1/__init__.py b/tests/test_core/test_v1/__init__.py new file mode 100644 index 00000000..8043675e --- /dev/null +++ b/tests/test_core/test_v1/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2018-Present The CloudEvents Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. diff --git a/tests/test_core/test_v1/test_event.py b/tests/test_core/test_v1/test_event.py new file mode 100644 index 00000000..167db109 --- /dev/null +++ b/tests/test_core/test_v1/test_event.py @@ -0,0 +1,333 @@ +# Copyright 2018-Present The CloudEvents Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from datetime import datetime, timezone +from typing import Any + +import pytest + +from cloudevents.core.exceptions import ( + CloudEventValidationError, + CustomExtensionAttributeError, + InvalidAttributeTypeError, + InvalidAttributeValueError, + MissingRequiredAttributeError, +) +from cloudevents.core.v1.event import CloudEvent + + +def test_missing_required_attributes() -> None: + with pytest.raises(CloudEventValidationError) as e: + CloudEvent({}) + + expected_errors = { + "id": [ + str(MissingRequiredAttributeError("id")), + str(InvalidAttributeValueError("id", "Attribute 'id' must not be None")), + str(InvalidAttributeTypeError("id", str)), + ], + "source": [ + str(MissingRequiredAttributeError("source")), + str(InvalidAttributeTypeError("source", str)), + ], + "type": [ + str(MissingRequiredAttributeError("type")), + str(InvalidAttributeTypeError("type", str)), + ], + "specversion": [ + str(MissingRequiredAttributeError("specversion")), + str(InvalidAttributeTypeError("specversion", str)), + str( + InvalidAttributeValueError( + "specversion", "Attribute 'specversion' must be '1.0'" + ) + ), + ], + } + + actual_errors = { + key: [str(e) for e in value] for key, value in e.value.errors.items() + } + assert actual_errors == expected_errors + + +@pytest.mark.parametrize( + "time,expected_error", + [ + ( + "2023-10-25T17:09:19.736166Z", + {"time": [str(InvalidAttributeTypeError("time", datetime))]}, + ), + ( + datetime(2023, 10, 25, 17, 9, 19, 736166), + { + "time": [ + str( + InvalidAttributeValueError( + "time", "Attribute 'time' must be timezone aware" + ) + ) + ] + }, + ), + ( + 1, + {"time": [str(InvalidAttributeTypeError("time", datetime))]}, + ), + ], +) +def test_time_validation(time: Any, expected_error: dict) -> None: + with pytest.raises(CloudEventValidationError) as e: + CloudEvent( + { + "id": "1", + "source": "/", + "type": "test", + "specversion": "1.0", + "time": time, + } + ) + actual_errors = { + key: [str(e) for e in value] for key, value in e.value.errors.items() + } + assert actual_errors == expected_error + + +@pytest.mark.parametrize( + "subject,expected_error", + [ + ( + 1234, + {"subject": [str(InvalidAttributeTypeError("subject", str))]}, + ), + ( + "", + { + "subject": [ + str( + InvalidAttributeValueError( + "subject", "Attribute 'subject' must not be empty" + ) + ) + ] + }, + ), + ], +) +def test_subject_validation(subject: Any, expected_error: dict) -> None: + with pytest.raises(CloudEventValidationError) as e: + CloudEvent( + { + "id": "1", + "source": "/", + "type": "test", + "specversion": "1.0", + "subject": subject, + } + ) + + actual_errors = { + key: [str(e) for e in value] for key, value in e.value.errors.items() + } + assert actual_errors == expected_error + + +@pytest.mark.parametrize( + "datacontenttype,expected_error", + [ + ( + 1234, + { + "datacontenttype": [ + str(InvalidAttributeTypeError("datacontenttype", str)) + ] + }, + ), + ( + "", + { + "datacontenttype": [ + str( + InvalidAttributeValueError( + "datacontenttype", + "Attribute 'datacontenttype' must not be empty", + ) + ) + ] + }, + ), + ], +) +def test_datacontenttype_validation(datacontenttype: Any, expected_error: dict) -> None: + with pytest.raises(CloudEventValidationError) as e: + CloudEvent( + { + "id": "1", + "source": "/", + "type": "test", + "specversion": "1.0", + "datacontenttype": datacontenttype, + } + ) + + actual_errors = { + key: [str(e) for e in value] for key, value in e.value.errors.items() + } + assert actual_errors == expected_error + + +@pytest.mark.parametrize( + "dataschema,expected_error", + [ + ( + 1234, + {"dataschema": [str(InvalidAttributeTypeError("dataschema", str))]}, + ), + ( + "", + { + "dataschema": [ + str( + InvalidAttributeValueError( + "dataschema", "Attribute 'dataschema' must not be empty" + ) + ) + ] + }, + ), + ], +) +def test_dataschema_validation(dataschema: Any, expected_error: dict) -> None: + with pytest.raises(CloudEventValidationError) as e: + CloudEvent( + { + "id": "1", + "source": "/", + "type": "test", + "specversion": "1.0", + "dataschema": dataschema, + } + ) + + actual_errors = { + key: [str(e) for e in value] for key, value in e.value.errors.items() + } + assert actual_errors == expected_error + + +@pytest.mark.parametrize( + "extension_name,expected_error", + [ + ( + "", + { + "": [ + str( + CustomExtensionAttributeError( + "", + "Extension attribute '' should be between 1 and 20 characters long", + ) + ), + str( + CustomExtensionAttributeError( + "", + "Extension attribute '' should only contain lowercase letters and numbers", + ) + ), + ] + }, + ), + ( + "thisisaverylongextension", + { + "thisisaverylongextension": [ + str( + CustomExtensionAttributeError( + "thisisaverylongextension", + "Extension attribute 'thisisaverylongextension' should be between 1 and 20 characters long", + ) + ) + ] + }, + ), + ( + "data", + { + "data": [ + str( + CustomExtensionAttributeError( + "data", + "Extension attribute 'data' is reserved and must not be used", + ) + ) + ] + }, + ), + ], +) +def test_custom_extension(extension_name: str, expected_error: dict) -> None: + with pytest.raises(CloudEventValidationError) as e: + CloudEvent( + { + "id": "1", + "source": "/", + "type": "test", + "specversion": "1.0", + extension_name: "value", + } + ) + + actual_errors = { + key: [str(e) for e in value] for key, value in e.value.errors.items() + } + assert actual_errors == expected_error + + +def test_cloud_event_constructor() -> None: + id = "1" + source = "/source" + type = "com.test.type" + specversion = "1.0" + datacontenttype = "application/json" + dataschema = "http://example.com/schema" + subject = "test_subject" + time = datetime.now(tz=timezone.utc) + data = {"key": "value"} + customextension = "customExtension" + + event = CloudEvent( + attributes={ + "id": id, + "source": source, + "type": type, + "specversion": specversion, + "datacontenttype": datacontenttype, + "dataschema": dataschema, + "subject": subject, + "time": time, + "customextension": customextension, + }, + data=data, + ) + + assert event.get_id() == id + assert event.get_source() == source + assert event.get_type() == type + assert event.get_specversion() == specversion + assert event.get_datacontenttype() == datacontenttype + assert event.get_dataschema() == dataschema + assert event.get_subject() == subject + assert event.get_time() == time + assert event.get_extension("customextension") == customextension + assert event.get_data() == data diff --git a/uv.lock b/uv.lock new file mode 100644 index 00000000..4c896628 --- /dev/null +++ b/uv.lock @@ -0,0 +1,645 @@ +version = 1 +revision = 3 +requires-python = ">=3.10" + +[[package]] +name = "cfgv" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114, upload-time = "2023-08-12T20:38:17.776Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249, upload-time = "2023-08-12T20:38:16.269Z" }, +] + +[[package]] +name = "cloudevents" +source = { editable = "." } +dependencies = [ + { name = "python-dateutil" }, +] + +[package.dev-dependencies] +dev = [ + { name = "flake8" }, + { name = "flake8-print" }, + { name = "isort" }, + { name = "mypy" }, + { name = "pep8-naming" }, + { name = "pre-commit" }, + { name = "pytest" }, + { name = "pytest-cov" }, + { name = "ruff" }, + { name = "types-python-dateutil" }, +] + +[package.metadata] +requires-dist = [{ name = "python-dateutil", specifier = ">=2.8.2" }] + +[package.metadata.requires-dev] +dev = [ + { name = "flake8", specifier = ">=7.3.0" }, + { name = "flake8-print", specifier = ">=5.0.0" }, + { name = "isort", specifier = ">=7.0.0" }, + { name = "mypy", specifier = ">=1.19.1" }, + { name = "pep8-naming", specifier = ">=0.15.1" }, + { name = "pre-commit", specifier = ">=4.5.1" }, + { name = "pytest", specifier = ">=9.0.2" }, + { name = "pytest-cov", specifier = ">=7.0.0" }, + { name = "ruff", specifier = ">=0.14.10" }, + { name = "types-python-dateutil", specifier = ">=2.9.0.20251115" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "coverage" +version = "7.13.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b6/45/2c665ca77ec32ad67e25c77daf1cee28ee4558f3bc571cdbaf88a00b9f23/coverage-7.13.0.tar.gz", hash = "sha256:a394aa27f2d7ff9bc04cf703817773a59ad6dfbd577032e690f961d2460ee936", size = 820905, upload-time = "2025-12-08T13:14:38.055Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/db/08/bdd7ccca14096f7eb01412b87ac11e5d16e4cb54b6e328afc9dee8bdaec1/coverage-7.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:02d9fb9eccd48f6843c98a37bd6817462f130b86da8660461e8f5e54d4c06070", size = 217979, upload-time = "2025-12-08T13:12:14.505Z" }, + { url = "https://files.pythonhosted.org/packages/fa/f0/d1302e3416298a28b5663ae1117546a745d9d19fde7e28402b2c5c3e2109/coverage-7.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:367449cf07d33dc216c083f2036bb7d976c6e4903ab31be400ad74ad9f85ce98", size = 218496, upload-time = "2025-12-08T13:12:16.237Z" }, + { url = "https://files.pythonhosted.org/packages/07/26/d36c354c8b2a320819afcea6bffe72839efd004b98d1d166b90801d49d57/coverage-7.13.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cdb3c9f8fef0a954c632f64328a3935988d33a6604ce4bf67ec3e39670f12ae5", size = 245237, upload-time = "2025-12-08T13:12:17.858Z" }, + { url = "https://files.pythonhosted.org/packages/91/52/be5e85631e0eec547873d8b08dd67a5f6b111ecfe89a86e40b89b0c1c61c/coverage-7.13.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d10fd186aac2316f9bbb46ef91977f9d394ded67050ad6d84d94ed6ea2e8e54e", size = 247061, upload-time = "2025-12-08T13:12:19.132Z" }, + { url = "https://files.pythonhosted.org/packages/0f/45/a5e8fa0caf05fbd8fa0402470377bff09cc1f026d21c05c71e01295e55ab/coverage-7.13.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f88ae3e69df2ab62fb0bc5219a597cb890ba5c438190ffa87490b315190bb33", size = 248928, upload-time = "2025-12-08T13:12:20.702Z" }, + { url = "https://files.pythonhosted.org/packages/f5/42/ffb5069b6fd1b95fae482e02f3fecf380d437dd5a39bae09f16d2e2e7e01/coverage-7.13.0-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c4be718e51e86f553bcf515305a158a1cd180d23b72f07ae76d6017c3cc5d791", size = 245931, upload-time = "2025-12-08T13:12:22.243Z" }, + { url = "https://files.pythonhosted.org/packages/95/6e/73e809b882c2858f13e55c0c36e94e09ce07e6165d5644588f9517efe333/coverage-7.13.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a00d3a393207ae12f7c49bb1c113190883b500f48979abb118d8b72b8c95c032", size = 246968, upload-time = "2025-12-08T13:12:23.52Z" }, + { url = "https://files.pythonhosted.org/packages/87/08/64ebd9e64b6adb8b4a4662133d706fbaccecab972e0b3ccc23f64e2678ad/coverage-7.13.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a7b1cd820e1b6116f92c6128f1188e7afe421c7e1b35fa9836b11444e53ebd9", size = 244972, upload-time = "2025-12-08T13:12:24.781Z" }, + { url = "https://files.pythonhosted.org/packages/12/97/f4d27c6fe0cb375a5eced4aabcaef22de74766fb80a3d5d2015139e54b22/coverage-7.13.0-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:37eee4e552a65866f15dedd917d5e5f3d59805994260720821e2c1b51ac3248f", size = 245241, upload-time = "2025-12-08T13:12:28.041Z" }, + { url = "https://files.pythonhosted.org/packages/0c/94/42f8ae7f633bf4c118bf1038d80472f9dade88961a466f290b81250f7ab7/coverage-7.13.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:62d7c4f13102148c78d7353c6052af6d899a7f6df66a32bddcc0c0eb7c5326f8", size = 245847, upload-time = "2025-12-08T13:12:29.337Z" }, + { url = "https://files.pythonhosted.org/packages/a8/2f/6369ca22b6b6d933f4f4d27765d313d8914cc4cce84f82a16436b1a233db/coverage-7.13.0-cp310-cp310-win32.whl", hash = "sha256:24e4e56304fdb56f96f80eabf840eab043b3afea9348b88be680ec5986780a0f", size = 220573, upload-time = "2025-12-08T13:12:30.905Z" }, + { url = "https://files.pythonhosted.org/packages/f1/dc/a6a741e519acceaeccc70a7f4cfe5d030efc4b222595f0677e101af6f1f3/coverage-7.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:74c136e4093627cf04b26a35dab8cbfc9b37c647f0502fc313376e11726ba303", size = 221509, upload-time = "2025-12-08T13:12:32.09Z" }, + { url = "https://files.pythonhosted.org/packages/f1/dc/888bf90d8b1c3d0b4020a40e52b9f80957d75785931ec66c7dfaccc11c7d/coverage-7.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0dfa3855031070058add1a59fdfda0192fd3e8f97e7c81de0596c145dea51820", size = 218104, upload-time = "2025-12-08T13:12:33.333Z" }, + { url = "https://files.pythonhosted.org/packages/8d/ea/069d51372ad9c380214e86717e40d1a743713a2af191cfba30a0911b0a4a/coverage-7.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4fdb6f54f38e334db97f72fa0c701e66d8479af0bc3f9bfb5b90f1c30f54500f", size = 218606, upload-time = "2025-12-08T13:12:34.498Z" }, + { url = "https://files.pythonhosted.org/packages/68/09/77b1c3a66c2aa91141b6c4471af98e5b1ed9b9e6d17255da5eb7992299e3/coverage-7.13.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7e442c013447d1d8d195be62852270b78b6e255b79b8675bad8479641e21fd96", size = 248999, upload-time = "2025-12-08T13:12:36.02Z" }, + { url = "https://files.pythonhosted.org/packages/0a/32/2e2f96e9d5691eaf1181d9040f850b8b7ce165ea10810fd8e2afa534cef7/coverage-7.13.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1ed5630d946859de835a85e9a43b721123a8a44ec26e2830b296d478c7fd4259", size = 250925, upload-time = "2025-12-08T13:12:37.221Z" }, + { url = "https://files.pythonhosted.org/packages/7b/45/b88ddac1d7978859b9a39a8a50ab323186148f1d64bc068f86fc77706321/coverage-7.13.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f15a931a668e58087bc39d05d2b4bf4b14ff2875b49c994bbdb1c2217a8daeb", size = 253032, upload-time = "2025-12-08T13:12:38.763Z" }, + { url = "https://files.pythonhosted.org/packages/71/cb/e15513f94c69d4820a34b6bf3d2b1f9f8755fa6021be97c7065442d7d653/coverage-7.13.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:30a3a201a127ea57f7e14ba43c93c9c4be8b7d17a26e03bb49e6966d019eede9", size = 249134, upload-time = "2025-12-08T13:12:40.382Z" }, + { url = "https://files.pythonhosted.org/packages/09/61/d960ff7dc9e902af3310ce632a875aaa7860f36d2bc8fc8b37ee7c1b82a5/coverage-7.13.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7a485ff48fbd231efa32d58f479befce52dcb6bfb2a88bb7bf9a0b89b1bc8030", size = 250731, upload-time = "2025-12-08T13:12:41.992Z" }, + { url = "https://files.pythonhosted.org/packages/98/34/c7c72821794afc7c7c2da1db8f00c2c98353078aa7fb6b5ff36aac834b52/coverage-7.13.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:22486cdafba4f9e471c816a2a5745337742a617fef68e890d8baf9f3036d7833", size = 248795, upload-time = "2025-12-08T13:12:43.331Z" }, + { url = "https://files.pythonhosted.org/packages/0a/5b/e0f07107987a43b2def9aa041c614ddb38064cbf294a71ef8c67d43a0cdd/coverage-7.13.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:263c3dbccc78e2e331e59e90115941b5f53e85cfcc6b3b2fbff1fd4e3d2c6ea8", size = 248514, upload-time = "2025-12-08T13:12:44.546Z" }, + { url = "https://files.pythonhosted.org/packages/71/c2/c949c5d3b5e9fc6dd79e1b73cdb86a59ef14f3709b1d72bf7668ae12e000/coverage-7.13.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e5330fa0cc1f5c3c4c3bb8e101b742025933e7848989370a1d4c8c5e401ea753", size = 249424, upload-time = "2025-12-08T13:12:45.759Z" }, + { url = "https://files.pythonhosted.org/packages/11/f1/bbc009abd6537cec0dffb2cc08c17a7f03de74c970e6302db4342a6e05af/coverage-7.13.0-cp311-cp311-win32.whl", hash = "sha256:0f4872f5d6c54419c94c25dd6ae1d015deeb337d06e448cd890a1e89a8ee7f3b", size = 220597, upload-time = "2025-12-08T13:12:47.378Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f6/d9977f2fb51c10fbaed0718ce3d0a8541185290b981f73b1d27276c12d91/coverage-7.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51a202e0f80f241ccb68e3e26e19ab5b3bf0f813314f2c967642f13ebcf1ddfe", size = 221536, upload-time = "2025-12-08T13:12:48.7Z" }, + { url = "https://files.pythonhosted.org/packages/be/ad/3fcf43fd96fb43e337a3073dea63ff148dcc5c41ba7a14d4c7d34efb2216/coverage-7.13.0-cp311-cp311-win_arm64.whl", hash = "sha256:d2a9d7f1c11487b1c69367ab3ac2d81b9b3721f097aa409a3191c3e90f8f3dd7", size = 220206, upload-time = "2025-12-08T13:12:50.365Z" }, + { url = "https://files.pythonhosted.org/packages/9b/f1/2619559f17f31ba00fc40908efd1fbf1d0a5536eb75dc8341e7d660a08de/coverage-7.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0b3d67d31383c4c68e19a88e28fc4c2e29517580f1b0ebec4a069d502ce1e0bf", size = 218274, upload-time = "2025-12-08T13:12:52.095Z" }, + { url = "https://files.pythonhosted.org/packages/2b/11/30d71ae5d6e949ff93b2a79a2c1b4822e00423116c5c6edfaeef37301396/coverage-7.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:581f086833d24a22c89ae0fe2142cfaa1c92c930adf637ddf122d55083fb5a0f", size = 218638, upload-time = "2025-12-08T13:12:53.418Z" }, + { url = "https://files.pythonhosted.org/packages/79/c2/fce80fc6ded8d77e53207489d6065d0fed75db8951457f9213776615e0f5/coverage-7.13.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0a3a30f0e257df382f5f9534d4ce3d4cf06eafaf5192beb1a7bd066cb10e78fb", size = 250129, upload-time = "2025-12-08T13:12:54.744Z" }, + { url = "https://files.pythonhosted.org/packages/5b/b6/51b5d1eb6fcbb9a1d5d6984e26cbe09018475c2922d554fd724dd0f056ee/coverage-7.13.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:583221913fbc8f53b88c42e8dbb8fca1d0f2e597cb190ce45916662b8b9d9621", size = 252885, upload-time = "2025-12-08T13:12:56.401Z" }, + { url = "https://files.pythonhosted.org/packages/0d/f8/972a5affea41de798691ab15d023d3530f9f56a72e12e243f35031846ff7/coverage-7.13.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f5d9bd30756fff3e7216491a0d6d520c448d5124d3d8e8f56446d6412499e74", size = 253974, upload-time = "2025-12-08T13:12:57.718Z" }, + { url = "https://files.pythonhosted.org/packages/8a/56/116513aee860b2c7968aa3506b0f59b22a959261d1dbf3aea7b4450a7520/coverage-7.13.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a23e5a1f8b982d56fa64f8e442e037f6ce29322f1f9e6c2344cd9e9f4407ee57", size = 250538, upload-time = "2025-12-08T13:12:59.254Z" }, + { url = "https://files.pythonhosted.org/packages/d6/75/074476d64248fbadf16dfafbf93fdcede389ec821f74ca858d7c87d2a98c/coverage-7.13.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9b01c22bc74a7fb44066aaf765224c0d933ddf1f5047d6cdfe4795504a4493f8", size = 251912, upload-time = "2025-12-08T13:13:00.604Z" }, + { url = "https://files.pythonhosted.org/packages/f2/d2/aa4f8acd1f7c06024705c12609d8698c51b27e4d635d717cd1934c9668e2/coverage-7.13.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:898cce66d0836973f48dda4e3514d863d70142bdf6dfab932b9b6a90ea5b222d", size = 250054, upload-time = "2025-12-08T13:13:01.892Z" }, + { url = "https://files.pythonhosted.org/packages/19/98/8df9e1af6a493b03694a1e8070e024e7d2cdc77adedc225a35e616d505de/coverage-7.13.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:3ab483ea0e251b5790c2aac03acde31bff0c736bf8a86829b89382b407cd1c3b", size = 249619, upload-time = "2025-12-08T13:13:03.236Z" }, + { url = "https://files.pythonhosted.org/packages/d8/71/f8679231f3353018ca66ef647fa6fe7b77e6bff7845be54ab84f86233363/coverage-7.13.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1d84e91521c5e4cb6602fe11ece3e1de03b2760e14ae4fcf1a4b56fa3c801fcd", size = 251496, upload-time = "2025-12-08T13:13:04.511Z" }, + { url = "https://files.pythonhosted.org/packages/04/86/9cb406388034eaf3c606c22094edbbb82eea1fa9d20c0e9efadff20d0733/coverage-7.13.0-cp312-cp312-win32.whl", hash = "sha256:193c3887285eec1dbdb3f2bd7fbc351d570ca9c02ca756c3afbc71b3c98af6ef", size = 220808, upload-time = "2025-12-08T13:13:06.422Z" }, + { url = "https://files.pythonhosted.org/packages/1c/59/af483673df6455795daf5f447c2f81a3d2fcfc893a22b8ace983791f6f34/coverage-7.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:4f3e223b2b2db5e0db0c2b97286aba0036ca000f06aca9b12112eaa9af3d92ae", size = 221616, upload-time = "2025-12-08T13:13:07.95Z" }, + { url = "https://files.pythonhosted.org/packages/64/b0/959d582572b30a6830398c60dd419c1965ca4b5fb38ac6b7093a0d50ca8d/coverage-7.13.0-cp312-cp312-win_arm64.whl", hash = "sha256:086cede306d96202e15a4b77ace8472e39d9f4e5f9fd92dd4fecdfb2313b2080", size = 220261, upload-time = "2025-12-08T13:13:09.581Z" }, + { url = "https://files.pythonhosted.org/packages/7c/cc/bce226595eb3bf7d13ccffe154c3c487a22222d87ff018525ab4dd2e9542/coverage-7.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:28ee1c96109974af104028a8ef57cec21447d42d0e937c0275329272e370ebcf", size = 218297, upload-time = "2025-12-08T13:13:10.977Z" }, + { url = "https://files.pythonhosted.org/packages/3b/9f/73c4d34600aae03447dff3d7ad1d0ac649856bfb87d1ca7d681cfc913f9e/coverage-7.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d1e97353dcc5587b85986cda4ff3ec98081d7e84dd95e8b2a6d59820f0545f8a", size = 218673, upload-time = "2025-12-08T13:13:12.562Z" }, + { url = "https://files.pythonhosted.org/packages/63/ab/8fa097db361a1e8586535ae5073559e6229596b3489ec3ef2f5b38df8cb2/coverage-7.13.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:99acd4dfdfeb58e1937629eb1ab6ab0899b131f183ee5f23e0b5da5cba2fec74", size = 249652, upload-time = "2025-12-08T13:13:13.909Z" }, + { url = "https://files.pythonhosted.org/packages/90/3a/9bfd4de2ff191feb37ef9465855ca56a6f2f30a3bca172e474130731ac3d/coverage-7.13.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ff45e0cd8451e293b63ced93161e189780baf444119391b3e7d25315060368a6", size = 252251, upload-time = "2025-12-08T13:13:15.553Z" }, + { url = "https://files.pythonhosted.org/packages/df/61/b5d8105f016e1b5874af0d7c67542da780ccd4a5f2244a433d3e20ceb1ad/coverage-7.13.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f4f72a85316d8e13234cafe0a9f81b40418ad7a082792fa4165bd7d45d96066b", size = 253492, upload-time = "2025-12-08T13:13:16.849Z" }, + { url = "https://files.pythonhosted.org/packages/f3/b8/0fad449981803cc47a4694768b99823fb23632150743f9c83af329bb6090/coverage-7.13.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:11c21557d0e0a5a38632cbbaca5f008723b26a89d70db6315523df6df77d6232", size = 249850, upload-time = "2025-12-08T13:13:18.142Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e9/8d68337c3125014d918cf4327d5257553a710a2995a6a6de2ac77e5aa429/coverage-7.13.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:76541dc8d53715fb4f7a3a06b34b0dc6846e3c69bc6204c55653a85dd6220971", size = 251633, upload-time = "2025-12-08T13:13:19.56Z" }, + { url = "https://files.pythonhosted.org/packages/55/14/d4112ab26b3a1bc4b3c1295d8452dcf399ed25be4cf649002fb3e64b2d93/coverage-7.13.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:6e9e451dee940a86789134b6b0ffbe31c454ade3b849bb8a9d2cca2541a8e91d", size = 249586, upload-time = "2025-12-08T13:13:20.883Z" }, + { url = "https://files.pythonhosted.org/packages/2c/a9/22b0000186db663b0d82f86c2f1028099ae9ac202491685051e2a11a5218/coverage-7.13.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:5c67dace46f361125e6b9cace8fe0b729ed8479f47e70c89b838d319375c8137", size = 249412, upload-time = "2025-12-08T13:13:22.22Z" }, + { url = "https://files.pythonhosted.org/packages/a1/2e/42d8e0d9e7527fba439acdc6ed24a2b97613b1dc85849b1dd935c2cffef0/coverage-7.13.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f59883c643cb19630500f57016f76cfdcd6845ca8c5b5ea1f6e17f74c8e5f511", size = 251191, upload-time = "2025-12-08T13:13:23.899Z" }, + { url = "https://files.pythonhosted.org/packages/a4/af/8c7af92b1377fd8860536aadd58745119252aaaa71a5213e5a8e8007a9f5/coverage-7.13.0-cp313-cp313-win32.whl", hash = "sha256:58632b187be6f0be500f553be41e277712baa278147ecb7559983c6d9faf7ae1", size = 220829, upload-time = "2025-12-08T13:13:25.182Z" }, + { url = "https://files.pythonhosted.org/packages/58/f9/725e8bf16f343d33cbe076c75dc8370262e194ff10072c0608b8e5cf33a3/coverage-7.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:73419b89f812f498aca53f757dd834919b48ce4799f9d5cad33ca0ae442bdb1a", size = 221640, upload-time = "2025-12-08T13:13:26.836Z" }, + { url = "https://files.pythonhosted.org/packages/8a/ff/e98311000aa6933cc79274e2b6b94a2fe0fe3434fca778eba82003675496/coverage-7.13.0-cp313-cp313-win_arm64.whl", hash = "sha256:eb76670874fdd6091eedcc856128ee48c41a9bbbb9c3f1c7c3cf169290e3ffd6", size = 220269, upload-time = "2025-12-08T13:13:28.116Z" }, + { url = "https://files.pythonhosted.org/packages/cf/cf/bbaa2e1275b300343ea865f7d424cc0a2e2a1df6925a070b2b2d5d765330/coverage-7.13.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:6e63ccc6e0ad8986386461c3c4b737540f20426e7ec932f42e030320896c311a", size = 218990, upload-time = "2025-12-08T13:13:29.463Z" }, + { url = "https://files.pythonhosted.org/packages/21/1d/82f0b3323b3d149d7672e7744c116e9c170f4957e0c42572f0366dbb4477/coverage-7.13.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:494f5459ffa1bd45e18558cd98710c36c0b8fbfa82a5eabcbe671d80ecffbfe8", size = 219340, upload-time = "2025-12-08T13:13:31.524Z" }, + { url = "https://files.pythonhosted.org/packages/fb/e3/fe3fd4702a3832a255f4d43013eacb0ef5fc155a5960ea9269d8696db28b/coverage-7.13.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:06cac81bf10f74034e055e903f5f946e3e26fc51c09fc9f584e4a1605d977053", size = 260638, upload-time = "2025-12-08T13:13:32.965Z" }, + { url = "https://files.pythonhosted.org/packages/ad/01/63186cb000307f2b4da463f72af9b85d380236965574c78e7e27680a2593/coverage-7.13.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f2ffc92b46ed6e6760f1d47a71e56b5664781bc68986dbd1836b2b70c0ce2071", size = 262705, upload-time = "2025-12-08T13:13:34.378Z" }, + { url = "https://files.pythonhosted.org/packages/7c/a1/c0dacef0cc865f2455d59eed3548573ce47ed603205ffd0735d1d78b5906/coverage-7.13.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0602f701057c6823e5db1b74530ce85f17c3c5be5c85fc042ac939cbd909426e", size = 265125, upload-time = "2025-12-08T13:13:35.73Z" }, + { url = "https://files.pythonhosted.org/packages/ef/92/82b99223628b61300bd382c205795533bed021505eab6dd86e11fb5d7925/coverage-7.13.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:25dc33618d45456ccb1d37bce44bc78cf269909aa14c4db2e03d63146a8a1493", size = 259844, upload-time = "2025-12-08T13:13:37.69Z" }, + { url = "https://files.pythonhosted.org/packages/cf/2c/89b0291ae4e6cd59ef042708e1c438e2290f8c31959a20055d8768349ee2/coverage-7.13.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:71936a8b3b977ddd0b694c28c6a34f4fff2e9dd201969a4ff5d5fc7742d614b0", size = 262700, upload-time = "2025-12-08T13:13:39.525Z" }, + { url = "https://files.pythonhosted.org/packages/bf/f9/a5f992efae1996245e796bae34ceb942b05db275e4b34222a9a40b9fbd3b/coverage-7.13.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:936bc20503ce24770c71938d1369461f0c5320830800933bc3956e2a4ded930e", size = 260321, upload-time = "2025-12-08T13:13:41.172Z" }, + { url = "https://files.pythonhosted.org/packages/4c/89/a29f5d98c64fedbe32e2ac3c227fbf78edc01cc7572eee17d61024d89889/coverage-7.13.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:af0a583efaacc52ae2521f8d7910aff65cdb093091d76291ac5820d5e947fc1c", size = 259222, upload-time = "2025-12-08T13:13:43.282Z" }, + { url = "https://files.pythonhosted.org/packages/b3/c3/940fe447aae302a6701ee51e53af7e08b86ff6eed7631e5740c157ee22b9/coverage-7.13.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f1c23e24a7000da892a312fb17e33c5f94f8b001de44b7cf8ba2e36fbd15859e", size = 261411, upload-time = "2025-12-08T13:13:44.72Z" }, + { url = "https://files.pythonhosted.org/packages/eb/31/12a4aec689cb942a89129587860ed4d0fd522d5fda81237147fde554b8ae/coverage-7.13.0-cp313-cp313t-win32.whl", hash = "sha256:5f8a0297355e652001015e93be345ee54393e45dc3050af4a0475c5a2b767d46", size = 221505, upload-time = "2025-12-08T13:13:46.332Z" }, + { url = "https://files.pythonhosted.org/packages/65/8c/3b5fe3259d863572d2b0827642c50c3855d26b3aefe80bdc9eba1f0af3b0/coverage-7.13.0-cp313-cp313t-win_amd64.whl", hash = "sha256:6abb3a4c52f05e08460bd9acf04fec027f8718ecaa0d09c40ffbc3fbd70ecc39", size = 222569, upload-time = "2025-12-08T13:13:47.79Z" }, + { url = "https://files.pythonhosted.org/packages/b0/39/f71fa8316a96ac72fc3908839df651e8eccee650001a17f2c78cdb355624/coverage-7.13.0-cp313-cp313t-win_arm64.whl", hash = "sha256:3ad968d1e3aa6ce5be295ab5fe3ae1bf5bb4769d0f98a80a0252d543a2ef2e9e", size = 220841, upload-time = "2025-12-08T13:13:49.243Z" }, + { url = "https://files.pythonhosted.org/packages/f8/4b/9b54bedda55421449811dcd5263a2798a63f48896c24dfb92b0f1b0845bd/coverage-7.13.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:453b7ec753cf5e4356e14fe858064e5520c460d3bbbcb9c35e55c0d21155c256", size = 218343, upload-time = "2025-12-08T13:13:50.811Z" }, + { url = "https://files.pythonhosted.org/packages/59/df/c3a1f34d4bba2e592c8979f924da4d3d4598b0df2392fbddb7761258e3dc/coverage-7.13.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:af827b7cbb303e1befa6c4f94fd2bf72f108089cfa0f8abab8f4ca553cf5ca5a", size = 218672, upload-time = "2025-12-08T13:13:52.284Z" }, + { url = "https://files.pythonhosted.org/packages/07/62/eec0659e47857698645ff4e6ad02e30186eb8afd65214fd43f02a76537cb/coverage-7.13.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9987a9e4f8197a1000280f7cc089e3ea2c8b3c0a64d750537809879a7b4ceaf9", size = 249715, upload-time = "2025-12-08T13:13:53.791Z" }, + { url = "https://files.pythonhosted.org/packages/23/2d/3c7ff8b2e0e634c1f58d095f071f52ed3c23ff25be524b0ccae8b71f99f8/coverage-7.13.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:3188936845cd0cb114fa6a51842a304cdbac2958145d03be2377ec41eb285d19", size = 252225, upload-time = "2025-12-08T13:13:55.274Z" }, + { url = "https://files.pythonhosted.org/packages/aa/ac/fb03b469d20e9c9a81093575003f959cf91a4a517b783aab090e4538764b/coverage-7.13.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a2bdb3babb74079f021696cb46b8bb5f5661165c385d3a238712b031a12355be", size = 253559, upload-time = "2025-12-08T13:13:57.161Z" }, + { url = "https://files.pythonhosted.org/packages/29/62/14afa9e792383c66cc0a3b872a06ded6e4ed1079c7d35de274f11d27064e/coverage-7.13.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7464663eaca6adba4175f6c19354feea61ebbdd735563a03d1e472c7072d27bb", size = 249724, upload-time = "2025-12-08T13:13:58.692Z" }, + { url = "https://files.pythonhosted.org/packages/31/b7/333f3dab2939070613696ab3ee91738950f0467778c6e5a5052e840646b7/coverage-7.13.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8069e831f205d2ff1f3d355e82f511eb7c5522d7d413f5db5756b772ec8697f8", size = 251582, upload-time = "2025-12-08T13:14:00.642Z" }, + { url = "https://files.pythonhosted.org/packages/81/cb/69162bda9381f39b2287265d7e29ee770f7c27c19f470164350a38318764/coverage-7.13.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:6fb2d5d272341565f08e962cce14cdf843a08ac43bd621783527adb06b089c4b", size = 249538, upload-time = "2025-12-08T13:14:02.556Z" }, + { url = "https://files.pythonhosted.org/packages/e0/76/350387b56a30f4970abe32b90b2a434f87d29f8b7d4ae40d2e8a85aacfb3/coverage-7.13.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:5e70f92ef89bac1ac8a99b3324923b4749f008fdbd7aa9cb35e01d7a284a04f9", size = 249349, upload-time = "2025-12-08T13:14:04.015Z" }, + { url = "https://files.pythonhosted.org/packages/86/0d/7f6c42b8d59f4c7e43ea3059f573c0dcfed98ba46eb43c68c69e52ae095c/coverage-7.13.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4b5de7d4583e60d5fd246dd57fcd3a8aa23c6e118a8c72b38adf666ba8e7e927", size = 251011, upload-time = "2025-12-08T13:14:05.505Z" }, + { url = "https://files.pythonhosted.org/packages/d7/f1/4bb2dff379721bb0b5c649d5c5eaf438462cad824acf32eb1b7ca0c7078e/coverage-7.13.0-cp314-cp314-win32.whl", hash = "sha256:a6c6e16b663be828a8f0b6c5027d36471d4a9f90d28444aa4ced4d48d7d6ae8f", size = 221091, upload-time = "2025-12-08T13:14:07.127Z" }, + { url = "https://files.pythonhosted.org/packages/ba/44/c239da52f373ce379c194b0ee3bcc121020e397242b85f99e0afc8615066/coverage-7.13.0-cp314-cp314-win_amd64.whl", hash = "sha256:0900872f2fdb3ee5646b557918d02279dc3af3dfb39029ac4e945458b13f73bc", size = 221904, upload-time = "2025-12-08T13:14:08.542Z" }, + { url = "https://files.pythonhosted.org/packages/89/1f/b9f04016d2a29c2e4a0307baefefad1a4ec5724946a2b3e482690486cade/coverage-7.13.0-cp314-cp314-win_arm64.whl", hash = "sha256:3a10260e6a152e5f03f26db4a407c4c62d3830b9af9b7c0450b183615f05d43b", size = 220480, upload-time = "2025-12-08T13:14:10.958Z" }, + { url = "https://files.pythonhosted.org/packages/16/d4/364a1439766c8e8647860584171c36010ca3226e6e45b1753b1b249c5161/coverage-7.13.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:9097818b6cc1cfb5f174e3263eba4a62a17683bcfe5c4b5d07f4c97fa51fbf28", size = 219074, upload-time = "2025-12-08T13:14:13.345Z" }, + { url = "https://files.pythonhosted.org/packages/ce/f4/71ba8be63351e099911051b2089662c03d5671437a0ec2171823c8e03bec/coverage-7.13.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0018f73dfb4301a89292c73be6ba5f58722ff79f51593352759c1790ded1cabe", size = 219342, upload-time = "2025-12-08T13:14:15.02Z" }, + { url = "https://files.pythonhosted.org/packages/5e/25/127d8ed03d7711a387d96f132589057213e3aef7475afdaa303412463f22/coverage-7.13.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:166ad2a22ee770f5656e1257703139d3533b4a0b6909af67c6b4a3adc1c98657", size = 260713, upload-time = "2025-12-08T13:14:16.907Z" }, + { url = "https://files.pythonhosted.org/packages/fd/db/559fbb6def07d25b2243663b46ba9eb5a3c6586c0c6f4e62980a68f0ee1c/coverage-7.13.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f6aaef16d65d1787280943f1c8718dc32e9cf141014e4634d64446702d26e0ff", size = 262825, upload-time = "2025-12-08T13:14:18.68Z" }, + { url = "https://files.pythonhosted.org/packages/37/99/6ee5bf7eff884766edb43bd8736b5e1c5144d0fe47498c3779326fe75a35/coverage-7.13.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e999e2dcc094002d6e2c7bbc1fb85b58ba4f465a760a8014d97619330cdbbbf3", size = 265233, upload-time = "2025-12-08T13:14:20.55Z" }, + { url = "https://files.pythonhosted.org/packages/d8/90/92f18fe0356ea69e1f98f688ed80cec39f44e9f09a1f26a1bbf017cc67f2/coverage-7.13.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:00c3d22cf6fb1cf3bf662aaaa4e563be8243a5ed2630339069799835a9cc7f9b", size = 259779, upload-time = "2025-12-08T13:14:22.367Z" }, + { url = "https://files.pythonhosted.org/packages/90/5d/b312a8b45b37a42ea7d27d7d3ff98ade3a6c892dd48d1d503e773503373f/coverage-7.13.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22ccfe8d9bb0d6134892cbe1262493a8c70d736b9df930f3f3afae0fe3ac924d", size = 262700, upload-time = "2025-12-08T13:14:24.309Z" }, + { url = "https://files.pythonhosted.org/packages/63/f8/b1d0de5c39351eb71c366f872376d09386640840a2e09b0d03973d791e20/coverage-7.13.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:9372dff5ea15930fea0445eaf37bbbafbc771a49e70c0aeed8b4e2c2614cc00e", size = 260302, upload-time = "2025-12-08T13:14:26.068Z" }, + { url = "https://files.pythonhosted.org/packages/aa/7c/d42f4435bc40c55558b3109a39e2d456cddcec37434f62a1f1230991667a/coverage-7.13.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:69ac2c492918c2461bc6ace42d0479638e60719f2a4ef3f0815fa2df88e9f940", size = 259136, upload-time = "2025-12-08T13:14:27.604Z" }, + { url = "https://files.pythonhosted.org/packages/b8/d3/23413241dc04d47cfe19b9a65b32a2edd67ecd0b817400c2843ebc58c847/coverage-7.13.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:739c6c051a7540608d097b8e13c76cfa85263ced467168dc6b477bae3df7d0e2", size = 261467, upload-time = "2025-12-08T13:14:29.09Z" }, + { url = "https://files.pythonhosted.org/packages/13/e6/6e063174500eee216b96272c0d1847bf215926786f85c2bd024cf4d02d2f/coverage-7.13.0-cp314-cp314t-win32.whl", hash = "sha256:fe81055d8c6c9de76d60c94ddea73c290b416e061d40d542b24a5871bad498b7", size = 221875, upload-time = "2025-12-08T13:14:31.106Z" }, + { url = "https://files.pythonhosted.org/packages/3b/46/f4fb293e4cbe3620e3ac2a3e8fd566ed33affb5861a9b20e3dd6c1896cbc/coverage-7.13.0-cp314-cp314t-win_amd64.whl", hash = "sha256:445badb539005283825959ac9fa4a28f712c214b65af3a2c464f1adc90f5fcbc", size = 222982, upload-time = "2025-12-08T13:14:33.1Z" }, + { url = "https://files.pythonhosted.org/packages/68/62/5b3b9018215ed9733fbd1ae3b2ed75c5de62c3b55377a52cae732e1b7805/coverage-7.13.0-cp314-cp314t-win_arm64.whl", hash = "sha256:de7f6748b890708578fc4b7bb967d810aeb6fcc9bff4bb77dbca77dab2f9df6a", size = 221016, upload-time = "2025-12-08T13:14:34.601Z" }, + { url = "https://files.pythonhosted.org/packages/8d/4c/1968f32fb9a2604645827e11ff84a31e59d532e01995f904723b4f5328b3/coverage-7.13.0-py3-none-any.whl", hash = "sha256:850d2998f380b1e266459ca5b47bc9e7daf9af1d070f66317972f382d46f1904", size = 210068, upload-time = "2025-12-08T13:14:36.236Z" }, +] + +[package.optional-dependencies] +toml = [ + { name = "tomli", marker = "python_full_version <= '3.11'" }, +] + +[[package]] +name = "distlib" +version = "0.3.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c4/91/e2df406fb4efacdf46871c25cde65d3c6ee5e173b7e5a4547a47bae91920/distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64", size = 609931, upload-time = "2023-12-12T07:14:03.091Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8e/41/9307e4f5f9976bc8b7fea0b66367734e8faf3ec84bc0d412d8cfabbb66cd/distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784", size = 468850, upload-time = "2023-12-12T07:13:59.966Z" }, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/09/35/2495c4ac46b980e4ca1f6ad6db102322ef3ad2410b79fdde159a4b0f3b92/exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc", size = 28883, upload-time = "2024-07-12T22:26:00.161Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b", size = 16453, upload-time = "2024-07-12T22:25:58.476Z" }, +] + +[[package]] +name = "filelock" +version = "3.16.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/db/3ef5bb276dae18d6ec2124224403d1d67bccdbefc17af4cc8f553e341ab1/filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435", size = 18037, upload-time = "2024-09-17T19:02:01.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b9/f8/feced7779d755758a52d1f6635d990b8d98dc0a29fa568bbe0625f18fdf3/filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0", size = 16163, upload-time = "2024-09-17T19:02:00.268Z" }, +] + +[[package]] +name = "flake8" +version = "7.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mccabe" }, + { name = "pycodestyle" }, + { name = "pyflakes" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9b/af/fbfe3c4b5a657d79e5c47a2827a362f9e1b763336a52f926126aa6dc7123/flake8-7.3.0.tar.gz", hash = "sha256:fe044858146b9fc69b551a4b490d69cf960fcb78ad1edcb84e7fbb1b4a8e3872", size = 48326, upload-time = "2025-06-20T19:31:35.838Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9f/56/13ab06b4f93ca7cac71078fbe37fcea175d3216f31f85c3168a6bbd0bb9a/flake8-7.3.0-py2.py3-none-any.whl", hash = "sha256:b9696257b9ce8beb888cdbe31cf885c90d31928fe202be0889a7cdafad32f01e", size = 57922, upload-time = "2025-06-20T19:31:34.425Z" }, +] + +[[package]] +name = "flake8-print" +version = "5.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "flake8" }, + { name = "pycodestyle" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2b/a6/770c5832a6b563e023def7d81925d1b9f3079ebc805e48be0a5ee206f716/flake8-print-5.0.0.tar.gz", hash = "sha256:76915a2a389cc1c0879636c219eb909c38501d3a43cc8dae542081c9ba48bdf9", size = 5166, upload-time = "2022-04-30T16:19:22.711Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/74/2c/aa2ffda404b5d9c89dad8bcc4e0f4af673ab2de67e96997d13f04ad68b5b/flake8_print-5.0.0-py3-none-any.whl", hash = "sha256:84a1a6ea10d7056b804221ac5e62b1cee1aefc897ce16f2e5c42d3046068f5d8", size = 5687, upload-time = "2022-04-30T16:19:24.307Z" }, +] + +[[package]] +name = "identify" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/29/bb/25024dbcc93516c492b75919e76f389bac754a3e4248682fba32b250c880/identify-2.6.1.tar.gz", hash = "sha256:91478c5fb7c3aac5ff7bf9b4344f803843dc586832d5f110d672b19aa1984c98", size = 99097, upload-time = "2024-09-14T23:50:32.513Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7d/0c/4ef72754c050979fdcc06c744715ae70ea37e734816bb6514f79df77a42f/identify-2.6.1-py2.py3-none-any.whl", hash = "sha256:53863bcac7caf8d2ed85bd20312ea5dcfc22226800f6d6881f232d861db5a8f0", size = 98972, upload-time = "2024-09-14T23:50:30.747Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646, upload-time = "2023-01-07T11:08:11.254Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892, upload-time = "2023-01-07T11:08:09.864Z" }, +] + +[[package]] +name = "isort" +version = "7.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/63/53/4f3c058e3bace40282876f9b553343376ee687f3c35a525dc79dbd450f88/isort-7.0.0.tar.gz", hash = "sha256:5513527951aadb3ac4292a41a16cbc50dd1642432f5e8c20057d414bdafb4187", size = 805049, upload-time = "2025-10-11T13:30:59.107Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/ed/e3705d6d02b4f7aea715a353c8ce193efd0b5db13e204df895d38734c244/isort-7.0.0-py3-none-any.whl", hash = "sha256:1bcabac8bc3c36c7fb7b98a76c8abb18e0f841a3ba81decac7691008592499c1", size = 94672, upload-time = "2025-10-11T13:30:57.665Z" }, +] + +[[package]] +name = "librt" +version = "0.7.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/93/e4/b59bdf1197fdf9888452ea4d2048cdad61aef85eb83e99dc52551d7fdc04/librt-0.7.4.tar.gz", hash = "sha256:3871af56c59864d5fd21d1ac001eb2fb3b140d52ba0454720f2e4a19812404ba", size = 145862, upload-time = "2025-12-15T16:52:43.862Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/06/1e/3e61dff6c07a3b400fe907d3164b92b3b3023ef86eac1ee236869dc276f7/librt-0.7.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dc300cb5a5a01947b1ee8099233156fdccd5001739e5f596ecfbc0dab07b5a3b", size = 54708, upload-time = "2025-12-15T16:51:03.752Z" }, + { url = "https://files.pythonhosted.org/packages/87/98/ab2428b0a80d0fd67decaeea84a5ec920e3dd4d95ecfd074c71f51bd7315/librt-0.7.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ee8d3323d921e0f6919918a97f9b5445a7dfe647270b2629ec1008aa676c0bc0", size = 56656, upload-time = "2025-12-15T16:51:05.038Z" }, + { url = "https://files.pythonhosted.org/packages/c1/ce/de1fad3a16e4fb5b6605bd6cbe6d0e5207cc8eca58993835749a1da0812b/librt-0.7.4-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:95cb80854a355b284c55f79674f6187cc9574df4dc362524e0cce98c89ee8331", size = 161024, upload-time = "2025-12-15T16:51:06.31Z" }, + { url = "https://files.pythonhosted.org/packages/88/00/ddfcdc1147dd7fb68321d7b064b12f0b9101d85f466a46006f86096fde8d/librt-0.7.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ca1caedf8331d8ad6027f93b52d68ed8f8009f5c420c246a46fe9d3be06be0f", size = 169529, upload-time = "2025-12-15T16:51:07.907Z" }, + { url = "https://files.pythonhosted.org/packages/dd/b3/915702c7077df2483b015030d1979404474f490fe9a071e9576f7b26fef6/librt-0.7.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c2a6f1236151e6fe1da289351b5b5bce49651c91554ecc7b70a947bced6fe212", size = 183270, upload-time = "2025-12-15T16:51:09.164Z" }, + { url = "https://files.pythonhosted.org/packages/45/19/ab2f217e8ec509fca4ea9e2e5022b9f72c1a7b7195f5a5770d299df807ea/librt-0.7.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7766b57aeebaf3f1dac14fdd4a75c9a61f2ed56d8ebeefe4189db1cb9d2a3783", size = 179038, upload-time = "2025-12-15T16:51:10.538Z" }, + { url = "https://files.pythonhosted.org/packages/10/1c/d40851d187662cf50312ebbc0b277c7478dd78dbaaf5ee94056f1d7f2f83/librt-0.7.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:1c4c89fb01157dd0a3bfe9e75cd6253b0a1678922befcd664eca0772a4c6c979", size = 173502, upload-time = "2025-12-15T16:51:11.888Z" }, + { url = "https://files.pythonhosted.org/packages/07/52/d5880835c772b22c38db18660420fa6901fd9e9a433b65f0ba9b0f4da764/librt-0.7.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f7fa8beef580091c02b4fd26542de046b2abfe0aaefa02e8bcf68acb7618f2b3", size = 193570, upload-time = "2025-12-15T16:51:13.168Z" }, + { url = "https://files.pythonhosted.org/packages/f1/35/22d3c424b82f86ce019c0addadf001d459dfac8036aecc07fadc5c541053/librt-0.7.4-cp310-cp310-win32.whl", hash = "sha256:543c42fa242faae0466fe72d297976f3c710a357a219b1efde3a0539a68a6997", size = 42596, upload-time = "2025-12-15T16:51:14.422Z" }, + { url = "https://files.pythonhosted.org/packages/95/b1/e7c316ac5fe60ac1fdfe515198087205220803c4cf923ee63e1cb8380b17/librt-0.7.4-cp310-cp310-win_amd64.whl", hash = "sha256:25cc40d8eb63f0a7ea4c8f49f524989b9df901969cb860a2bc0e4bad4b8cb8a8", size = 48972, upload-time = "2025-12-15T16:51:15.516Z" }, + { url = "https://files.pythonhosted.org/packages/84/64/44089b12d8b4714a7f0e2f33fb19285ba87702d4be0829f20b36ebeeee07/librt-0.7.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3485b9bb7dfa66167d5500ffdafdc35415b45f0da06c75eb7df131f3357b174a", size = 54709, upload-time = "2025-12-15T16:51:16.699Z" }, + { url = "https://files.pythonhosted.org/packages/26/ef/6fa39fb5f37002f7d25e0da4f24d41b457582beea9369eeb7e9e73db5508/librt-0.7.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:188b4b1a770f7f95ea035d5bbb9d7367248fc9d12321deef78a269ebf46a5729", size = 56663, upload-time = "2025-12-15T16:51:17.856Z" }, + { url = "https://files.pythonhosted.org/packages/9d/e4/cbaca170a13bee2469c90df9e47108610b4422c453aea1aec1779ac36c24/librt-0.7.4-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1b668b1c840183e4e38ed5a99f62fac44c3a3eef16870f7f17cfdfb8b47550ed", size = 161703, upload-time = "2025-12-15T16:51:19.421Z" }, + { url = "https://files.pythonhosted.org/packages/d0/32/0b2296f9cc7e693ab0d0835e355863512e5eac90450c412777bd699c76ae/librt-0.7.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0e8f864b521f6cfedb314d171630f827efee08f5c3462bcbc2244ab8e1768cd6", size = 171027, upload-time = "2025-12-15T16:51:20.721Z" }, + { url = "https://files.pythonhosted.org/packages/d8/33/c70b6d40f7342716e5f1353c8da92d9e32708a18cbfa44897a93ec2bf879/librt-0.7.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4df7c9def4fc619a9c2ab402d73a0c5b53899abe090e0100323b13ccb5a3dd82", size = 184700, upload-time = "2025-12-15T16:51:22.272Z" }, + { url = "https://files.pythonhosted.org/packages/e4/c8/555c405155da210e4c4113a879d378f54f850dbc7b794e847750a8fadd43/librt-0.7.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f79bc3595b6ed159a1bf0cdc70ed6ebec393a874565cab7088a219cca14da727", size = 180719, upload-time = "2025-12-15T16:51:23.561Z" }, + { url = "https://files.pythonhosted.org/packages/6b/88/34dc1f1461c5613d1b73f0ecafc5316cc50adcc1b334435985b752ed53e5/librt-0.7.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:77772a4b8b5f77d47d883846928c36d730b6e612a6388c74cba33ad9eb149c11", size = 174535, upload-time = "2025-12-15T16:51:25.031Z" }, + { url = "https://files.pythonhosted.org/packages/b6/5a/f3fafe80a221626bcedfa9fe5abbf5f04070989d44782f579b2d5920d6d0/librt-0.7.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:064a286e6ab0b4c900e228ab4fa9cb3811b4b83d3e0cc5cd816b2d0f548cb61c", size = 195236, upload-time = "2025-12-15T16:51:26.328Z" }, + { url = "https://files.pythonhosted.org/packages/d8/77/5c048d471ce17f4c3a6e08419be19add4d291e2f7067b877437d482622ac/librt-0.7.4-cp311-cp311-win32.whl", hash = "sha256:42da201c47c77b6cc91fc17e0e2b330154428d35d6024f3278aa2683e7e2daf2", size = 42930, upload-time = "2025-12-15T16:51:27.853Z" }, + { url = "https://files.pythonhosted.org/packages/fb/3b/514a86305a12c3d9eac03e424b07cd312c7343a9f8a52719aa079590a552/librt-0.7.4-cp311-cp311-win_amd64.whl", hash = "sha256:d31acb5886c16ae1711741f22504195af46edec8315fe69b77e477682a87a83e", size = 49240, upload-time = "2025-12-15T16:51:29.037Z" }, + { url = "https://files.pythonhosted.org/packages/ba/01/3b7b1914f565926b780a734fac6e9a4d2c7aefe41f4e89357d73697a9457/librt-0.7.4-cp311-cp311-win_arm64.whl", hash = "sha256:114722f35093da080a333b3834fff04ef43147577ed99dd4db574b03a5f7d170", size = 42613, upload-time = "2025-12-15T16:51:30.194Z" }, + { url = "https://files.pythonhosted.org/packages/f3/e7/b805d868d21f425b7e76a0ea71a2700290f2266a4f3c8357fcf73efc36aa/librt-0.7.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7dd3b5c37e0fb6666c27cf4e2c88ae43da904f2155c4cfc1e5a2fdce3b9fcf92", size = 55688, upload-time = "2025-12-15T16:51:31.571Z" }, + { url = "https://files.pythonhosted.org/packages/59/5e/69a2b02e62a14cfd5bfd9f1e9adea294d5bcfeea219c7555730e5d068ee4/librt-0.7.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9c5de1928c486201b23ed0cc4ac92e6e07be5cd7f3abc57c88a9cf4f0f32108", size = 57141, upload-time = "2025-12-15T16:51:32.714Z" }, + { url = "https://files.pythonhosted.org/packages/6e/6b/05dba608aae1272b8ea5ff8ef12c47a4a099a04d1e00e28a94687261d403/librt-0.7.4-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:078ae52ffb3f036396cc4aed558e5b61faedd504a3c1f62b8ae34bf95ae39d94", size = 165322, upload-time = "2025-12-15T16:51:33.986Z" }, + { url = "https://files.pythonhosted.org/packages/8f/bc/199533d3fc04a4cda8d7776ee0d79955ab0c64c79ca079366fbc2617e680/librt-0.7.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce58420e25097b2fc201aef9b9f6d65df1eb8438e51154e1a7feb8847e4a55ab", size = 174216, upload-time = "2025-12-15T16:51:35.384Z" }, + { url = "https://files.pythonhosted.org/packages/62/ec/09239b912a45a8ed117cb4a6616d9ff508f5d3131bd84329bf2f8d6564f1/librt-0.7.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b719c8730c02a606dc0e8413287e8e94ac2d32a51153b300baf1f62347858fba", size = 189005, upload-time = "2025-12-15T16:51:36.687Z" }, + { url = "https://files.pythonhosted.org/packages/46/2e/e188313d54c02f5b0580dd31476bb4b0177514ff8d2be9f58d4a6dc3a7ba/librt-0.7.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3749ef74c170809e6dee68addec9d2458700a8de703de081c888e92a8b015cf9", size = 183960, upload-time = "2025-12-15T16:51:37.977Z" }, + { url = "https://files.pythonhosted.org/packages/eb/84/f1d568d254518463d879161d3737b784137d236075215e56c7c9be191cee/librt-0.7.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b35c63f557653c05b5b1b6559a074dbabe0afee28ee2a05b6c9ba21ad0d16a74", size = 177609, upload-time = "2025-12-15T16:51:40.584Z" }, + { url = "https://files.pythonhosted.org/packages/5d/43/060bbc1c002f0d757c33a1afe6bf6a565f947a04841139508fc7cef6c08b/librt-0.7.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1ef704e01cb6ad39ad7af668d51677557ca7e5d377663286f0ee1b6b27c28e5f", size = 199269, upload-time = "2025-12-15T16:51:41.879Z" }, + { url = "https://files.pythonhosted.org/packages/ff/7f/708f8f02d8012ee9f366c07ea6a92882f48bd06cc1ff16a35e13d0fbfb08/librt-0.7.4-cp312-cp312-win32.whl", hash = "sha256:c66c2b245926ec15188aead25d395091cb5c9df008d3b3207268cd65557d6286", size = 43186, upload-time = "2025-12-15T16:51:43.149Z" }, + { url = "https://files.pythonhosted.org/packages/f1/a5/4e051b061c8b2509be31b2c7ad4682090502c0a8b6406edcf8c6b4fe1ef7/librt-0.7.4-cp312-cp312-win_amd64.whl", hash = "sha256:71a56f4671f7ff723451f26a6131754d7c1809e04e22ebfbac1db8c9e6767a20", size = 49455, upload-time = "2025-12-15T16:51:44.336Z" }, + { url = "https://files.pythonhosted.org/packages/d0/d2/90d84e9f919224a3c1f393af1636d8638f54925fdc6cd5ee47f1548461e5/librt-0.7.4-cp312-cp312-win_arm64.whl", hash = "sha256:419eea245e7ec0fe664eb7e85e7ff97dcdb2513ca4f6b45a8ec4a3346904f95a", size = 42828, upload-time = "2025-12-15T16:51:45.498Z" }, + { url = "https://files.pythonhosted.org/packages/fe/4d/46a53ccfbb39fd0b493fd4496eb76f3ebc15bb3e45d8c2e695a27587edf5/librt-0.7.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d44a1b1ba44cbd2fc3cb77992bef6d6fdb1028849824e1dd5e4d746e1f7f7f0b", size = 55745, upload-time = "2025-12-15T16:51:46.636Z" }, + { url = "https://files.pythonhosted.org/packages/7f/2b/3ac7f5212b1828bf4f979cf87f547db948d3e28421d7a430d4db23346ce4/librt-0.7.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c9cab4b3de1f55e6c30a84c8cee20e4d3b2476f4d547256694a1b0163da4fe32", size = 57166, upload-time = "2025-12-15T16:51:48.219Z" }, + { url = "https://files.pythonhosted.org/packages/e8/99/6523509097cbe25f363795f0c0d1c6a3746e30c2994e25b5aefdab119b21/librt-0.7.4-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:2857c875f1edd1feef3c371fbf830a61b632fb4d1e57160bb1e6a3206e6abe67", size = 165833, upload-time = "2025-12-15T16:51:49.443Z" }, + { url = "https://files.pythonhosted.org/packages/fe/35/323611e59f8fe032649b4fb7e77f746f96eb7588fcbb31af26bae9630571/librt-0.7.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b370a77be0a16e1ad0270822c12c21462dc40496e891d3b0caf1617c8cc57e20", size = 174818, upload-time = "2025-12-15T16:51:51.015Z" }, + { url = "https://files.pythonhosted.org/packages/41/e6/40fb2bb21616c6e06b6a64022802228066e9a31618f493e03f6b9661548a/librt-0.7.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d05acd46b9a52087bfc50c59dfdf96a2c480a601e8898a44821c7fd676598f74", size = 189607, upload-time = "2025-12-15T16:51:52.671Z" }, + { url = "https://files.pythonhosted.org/packages/32/48/1b47c7d5d28b775941e739ed2bfe564b091c49201b9503514d69e4ed96d7/librt-0.7.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:70969229cb23d9c1a80e14225838d56e464dc71fa34c8342c954fc50e7516dee", size = 184585, upload-time = "2025-12-15T16:51:54.027Z" }, + { url = "https://files.pythonhosted.org/packages/75/a6/ee135dfb5d3b54d5d9001dbe483806229c6beac3ee2ba1092582b7efeb1b/librt-0.7.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4450c354b89dbb266730893862dbff06006c9ed5b06b6016d529b2bf644fc681", size = 178249, upload-time = "2025-12-15T16:51:55.248Z" }, + { url = "https://files.pythonhosted.org/packages/04/87/d5b84ec997338be26af982bcd6679be0c1db9a32faadab1cf4bb24f9e992/librt-0.7.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:adefe0d48ad35b90b6f361f6ff5a1bd95af80c17d18619c093c60a20e7a5b60c", size = 199851, upload-time = "2025-12-15T16:51:56.933Z" }, + { url = "https://files.pythonhosted.org/packages/86/63/ba1333bf48306fe398e3392a7427ce527f81b0b79d0d91618c4610ce9d15/librt-0.7.4-cp313-cp313-win32.whl", hash = "sha256:21ea710e96c1e050635700695095962a22ea420d4b3755a25e4909f2172b4ff2", size = 43249, upload-time = "2025-12-15T16:51:58.498Z" }, + { url = "https://files.pythonhosted.org/packages/f9/8a/de2c6df06cdfa9308c080e6b060fe192790b6a48a47320b215e860f0e98c/librt-0.7.4-cp313-cp313-win_amd64.whl", hash = "sha256:772e18696cf5a64afee908662fbcb1f907460ddc851336ee3a848ef7684c8e1e", size = 49417, upload-time = "2025-12-15T16:51:59.618Z" }, + { url = "https://files.pythonhosted.org/packages/31/66/8ee0949efc389691381ed686185e43536c20e7ad880c122dd1f31e65c658/librt-0.7.4-cp313-cp313-win_arm64.whl", hash = "sha256:52e34c6af84e12921748c8354aa6acf1912ca98ba60cdaa6920e34793f1a0788", size = 42824, upload-time = "2025-12-15T16:52:00.784Z" }, + { url = "https://files.pythonhosted.org/packages/74/81/6921e65c8708eb6636bbf383aa77e6c7dad33a598ed3b50c313306a2da9d/librt-0.7.4-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:4f1ee004942eaaed6e06c087d93ebc1c67e9a293e5f6b9b5da558df6bf23dc5d", size = 55191, upload-time = "2025-12-15T16:52:01.97Z" }, + { url = "https://files.pythonhosted.org/packages/0d/d6/3eb864af8a8de8b39cc8dd2e9ded1823979a27795d72c4eea0afa8c26c9f/librt-0.7.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:d854c6dc0f689bad7ed452d2a3ecff58029d80612d336a45b62c35e917f42d23", size = 56898, upload-time = "2025-12-15T16:52:03.356Z" }, + { url = "https://files.pythonhosted.org/packages/49/bc/b1d4c0711fdf79646225d576faee8747b8528a6ec1ceb6accfd89ade7102/librt-0.7.4-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a4f7339d9e445280f23d63dea842c0c77379c4a47471c538fc8feedab9d8d063", size = 163725, upload-time = "2025-12-15T16:52:04.572Z" }, + { url = "https://files.pythonhosted.org/packages/2c/08/61c41cd8f0a6a41fc99ea78a2205b88187e45ba9800792410ed62f033584/librt-0.7.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:39003fc73f925e684f8521b2dbf34f61a5deb8a20a15dcf53e0d823190ce8848", size = 172469, upload-time = "2025-12-15T16:52:05.863Z" }, + { url = "https://files.pythonhosted.org/packages/8b/c7/4ee18b4d57f01444230bc18cf59103aeab8f8c0f45e84e0e540094df1df1/librt-0.7.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6bb15ee29d95875ad697d449fe6071b67f730f15a6961913a2b0205015ca0843", size = 186804, upload-time = "2025-12-15T16:52:07.192Z" }, + { url = "https://files.pythonhosted.org/packages/a1/af/009e8ba3fbf830c936842da048eda1b34b99329f402e49d88fafff6525d1/librt-0.7.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:02a69369862099e37d00765583052a99d6a68af7e19b887e1b78fee0146b755a", size = 181807, upload-time = "2025-12-15T16:52:08.554Z" }, + { url = "https://files.pythonhosted.org/packages/85/26/51ae25f813656a8b117c27a974f25e8c1e90abcd5a791ac685bf5b489a1b/librt-0.7.4-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ec72342cc4d62f38b25a94e28b9efefce41839aecdecf5e9627473ed04b7be16", size = 175595, upload-time = "2025-12-15T16:52:10.186Z" }, + { url = "https://files.pythonhosted.org/packages/48/93/36d6c71f830305f88996b15c8e017aa8d1e03e2e947b40b55bbf1a34cf24/librt-0.7.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:776dbb9bfa0fc5ce64234b446995d8d9f04badf64f544ca036bd6cff6f0732ce", size = 196504, upload-time = "2025-12-15T16:52:11.472Z" }, + { url = "https://files.pythonhosted.org/packages/08/11/8299e70862bb9d704735bf132c6be09c17b00fbc7cda0429a9df222fdc1b/librt-0.7.4-cp314-cp314-win32.whl", hash = "sha256:0f8cac84196d0ffcadf8469d9ded4d4e3a8b1c666095c2a291e22bf58e1e8a9f", size = 39738, upload-time = "2025-12-15T16:52:12.962Z" }, + { url = "https://files.pythonhosted.org/packages/54/d5/656b0126e4e0f8e2725cd2d2a1ec40f71f37f6f03f135a26b663c0e1a737/librt-0.7.4-cp314-cp314-win_amd64.whl", hash = "sha256:037f5cb6fe5abe23f1dc058054d50e9699fcc90d0677eee4e4f74a8677636a1a", size = 45976, upload-time = "2025-12-15T16:52:14.441Z" }, + { url = "https://files.pythonhosted.org/packages/60/86/465ff07b75c1067da8fa7f02913c4ead096ef106cfac97a977f763783bfb/librt-0.7.4-cp314-cp314-win_arm64.whl", hash = "sha256:a5deebb53d7a4d7e2e758a96befcd8edaaca0633ae71857995a0f16033289e44", size = 39073, upload-time = "2025-12-15T16:52:15.621Z" }, + { url = "https://files.pythonhosted.org/packages/b3/a0/24941f85960774a80d4b3c2aec651d7d980466da8101cae89e8b032a3e21/librt-0.7.4-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:b4c25312c7f4e6ab35ab16211bdf819e6e4eddcba3b2ea632fb51c9a2a97e105", size = 57369, upload-time = "2025-12-15T16:52:16.782Z" }, + { url = "https://files.pythonhosted.org/packages/77/a0/ddb259cae86ab415786c1547d0fe1b40f04a7b089f564fd5c0242a3fafb2/librt-0.7.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:618b7459bb392bdf373f2327e477597fff8f9e6a1878fffc1b711c013d1b0da4", size = 59230, upload-time = "2025-12-15T16:52:18.259Z" }, + { url = "https://files.pythonhosted.org/packages/31/11/77823cb530ab8a0c6fac848ac65b745be446f6f301753b8990e8809080c9/librt-0.7.4-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1437c3f72a30c7047f16fd3e972ea58b90172c3c6ca309645c1c68984f05526a", size = 183869, upload-time = "2025-12-15T16:52:19.457Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ce/157db3614cf3034b3f702ae5ba4fefda4686f11eea4b7b96542324a7a0e7/librt-0.7.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c96cb76f055b33308f6858b9b594618f1b46e147a4d03a4d7f0c449e304b9b95", size = 194606, upload-time = "2025-12-15T16:52:20.795Z" }, + { url = "https://files.pythonhosted.org/packages/30/ef/6ec4c7e3d6490f69a4fd2803516fa5334a848a4173eac26d8ee6507bff6e/librt-0.7.4-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:28f990e6821204f516d09dc39966ef8b84556ffd648d5926c9a3f681e8de8906", size = 206776, upload-time = "2025-12-15T16:52:22.229Z" }, + { url = "https://files.pythonhosted.org/packages/ad/22/750b37bf549f60a4782ab80e9d1e9c44981374ab79a7ea68670159905918/librt-0.7.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:bc4aebecc79781a1b77d7d4e7d9fe080385a439e198d993b557b60f9117addaf", size = 203205, upload-time = "2025-12-15T16:52:23.603Z" }, + { url = "https://files.pythonhosted.org/packages/7a/87/2e8a0f584412a93df5faad46c5fa0a6825fdb5eba2ce482074b114877f44/librt-0.7.4-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:022cc673e69283a42621dd453e2407cf1647e77f8bd857d7ad7499901e62376f", size = 196696, upload-time = "2025-12-15T16:52:24.951Z" }, + { url = "https://files.pythonhosted.org/packages/e5/ca/7bf78fa950e43b564b7de52ceeb477fb211a11f5733227efa1591d05a307/librt-0.7.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:2b3ca211ae8ea540569e9c513da052699b7b06928dcda61247cb4f318122bdb5", size = 217191, upload-time = "2025-12-15T16:52:26.194Z" }, + { url = "https://files.pythonhosted.org/packages/d6/49/3732b0e8424ae35ad5c3166d9dd5bcdae43ce98775e0867a716ff5868064/librt-0.7.4-cp314-cp314t-win32.whl", hash = "sha256:8a461f6456981d8c8e971ff5a55f2e34f4e60871e665d2f5fde23ee74dea4eeb", size = 40276, upload-time = "2025-12-15T16:52:27.54Z" }, + { url = "https://files.pythonhosted.org/packages/35/d6/d8823e01bd069934525fddb343189c008b39828a429b473fb20d67d5cd36/librt-0.7.4-cp314-cp314t-win_amd64.whl", hash = "sha256:721a7b125a817d60bf4924e1eec2a7867bfcf64cfc333045de1df7a0629e4481", size = 46772, upload-time = "2025-12-15T16:52:28.653Z" }, + { url = "https://files.pythonhosted.org/packages/36/e9/a0aa60f5322814dd084a89614e9e31139702e342f8459ad8af1984a18168/librt-0.7.4-cp314-cp314t-win_arm64.whl", hash = "sha256:76b2ba71265c0102d11458879b4d53ccd0b32b0164d14deb8d2b598a018e502f", size = 39724, upload-time = "2025-12-15T16:52:29.836Z" }, +] + +[[package]] +name = "mccabe" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/ff/0ffefdcac38932a54d2b5eed4e0ba8a408f215002cd178ad1df0f2806ff8/mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", size = 9658, upload-time = "2022-01-24T01:14:51.113Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/1a/1f68f9ba0c207934b35b86a8ca3aad8395a3d6dd7921c0686e23853ff5a9/mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e", size = 7350, upload-time = "2022-01-24T01:14:49.62Z" }, +] + +[[package]] +name = "mypy" +version = "1.19.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "librt", marker = "platform_python_implementation != 'PyPy'" }, + { name = "mypy-extensions" }, + { name = "pathspec" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f5/db/4efed9504bc01309ab9c2da7e352cc223569f05478012b5d9ece38fd44d2/mypy-1.19.1.tar.gz", hash = "sha256:19d88bb05303fe63f71dd2c6270daca27cb9401c4ca8255fe50d1d920e0eb9ba", size = 3582404, upload-time = "2025-12-15T05:03:48.42Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2f/63/e499890d8e39b1ff2df4c0c6ce5d371b6844ee22b8250687a99fd2f657a8/mypy-1.19.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f05aa3d375b385734388e844bc01733bd33c644ab48e9684faa54e5389775ec", size = 13101333, upload-time = "2025-12-15T05:03:03.28Z" }, + { url = "https://files.pythonhosted.org/packages/72/4b/095626fc136fba96effc4fd4a82b41d688ab92124f8c4f7564bffe5cf1b0/mypy-1.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:022ea7279374af1a5d78dfcab853fe6a536eebfda4b59deab53cd21f6cd9f00b", size = 12164102, upload-time = "2025-12-15T05:02:33.611Z" }, + { url = "https://files.pythonhosted.org/packages/0c/5b/952928dd081bf88a83a5ccd49aaecfcd18fd0d2710c7ff07b8fb6f7032b9/mypy-1.19.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee4c11e460685c3e0c64a4c5de82ae143622410950d6be863303a1c4ba0e36d6", size = 12765799, upload-time = "2025-12-15T05:03:28.44Z" }, + { url = "https://files.pythonhosted.org/packages/2a/0d/93c2e4a287f74ef11a66fb6d49c7a9f05e47b0a4399040e6719b57f500d2/mypy-1.19.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de759aafbae8763283b2ee5869c7255391fbc4de3ff171f8f030b5ec48381b74", size = 13522149, upload-time = "2025-12-15T05:02:36.011Z" }, + { url = "https://files.pythonhosted.org/packages/7b/0e/33a294b56aaad2b338d203e3a1d8b453637ac36cb278b45005e0901cf148/mypy-1.19.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ab43590f9cd5108f41aacf9fca31841142c786827a74ab7cc8a2eacb634e09a1", size = 13810105, upload-time = "2025-12-15T05:02:40.327Z" }, + { url = "https://files.pythonhosted.org/packages/0e/fd/3e82603a0cb66b67c5e7abababce6bf1a929ddf67bf445e652684af5c5a0/mypy-1.19.1-cp310-cp310-win_amd64.whl", hash = "sha256:2899753e2f61e571b3971747e302d5f420c3fd09650e1951e99f823bc3089dac", size = 10057200, upload-time = "2025-12-15T05:02:51.012Z" }, + { url = "https://files.pythonhosted.org/packages/ef/47/6b3ebabd5474d9cdc170d1342fbf9dddc1b0ec13ec90bf9004ee6f391c31/mypy-1.19.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d8dfc6ab58ca7dda47d9237349157500468e404b17213d44fc1cb77bce532288", size = 13028539, upload-time = "2025-12-15T05:03:44.129Z" }, + { url = "https://files.pythonhosted.org/packages/5c/a6/ac7c7a88a3c9c54334f53a941b765e6ec6c4ebd65d3fe8cdcfbe0d0fd7db/mypy-1.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e3f276d8493c3c97930e354b2595a44a21348b320d859fb4a2b9f66da9ed27ab", size = 12083163, upload-time = "2025-12-15T05:03:37.679Z" }, + { url = "https://files.pythonhosted.org/packages/67/af/3afa9cf880aa4a2c803798ac24f1d11ef72a0c8079689fac5cfd815e2830/mypy-1.19.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2abb24cf3f17864770d18d673c85235ba52456b36a06b6afc1e07c1fdcd3d0e6", size = 12687629, upload-time = "2025-12-15T05:02:31.526Z" }, + { url = "https://files.pythonhosted.org/packages/2d/46/20f8a7114a56484ab268b0ab372461cb3a8f7deed31ea96b83a4e4cfcfca/mypy-1.19.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a009ffa5a621762d0c926a078c2d639104becab69e79538a494bcccb62cc0331", size = 13436933, upload-time = "2025-12-15T05:03:15.606Z" }, + { url = "https://files.pythonhosted.org/packages/5b/f8/33b291ea85050a21f15da910002460f1f445f8007adb29230f0adea279cb/mypy-1.19.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f7cee03c9a2e2ee26ec07479f38ea9c884e301d42c6d43a19d20fb014e3ba925", size = 13661754, upload-time = "2025-12-15T05:02:26.731Z" }, + { url = "https://files.pythonhosted.org/packages/fd/a3/47cbd4e85bec4335a9cd80cf67dbc02be21b5d4c9c23ad6b95d6c5196bac/mypy-1.19.1-cp311-cp311-win_amd64.whl", hash = "sha256:4b84a7a18f41e167f7995200a1d07a4a6810e89d29859df936f1c3923d263042", size = 10055772, upload-time = "2025-12-15T05:03:26.179Z" }, + { url = "https://files.pythonhosted.org/packages/06/8a/19bfae96f6615aa8a0604915512e0289b1fad33d5909bf7244f02935d33a/mypy-1.19.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a8174a03289288c1f6c46d55cef02379b478bfbc8e358e02047487cad44c6ca1", size = 13206053, upload-time = "2025-12-15T05:03:46.622Z" }, + { url = "https://files.pythonhosted.org/packages/a5/34/3e63879ab041602154ba2a9f99817bb0c85c4df19a23a1443c8986e4d565/mypy-1.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ffcebe56eb09ff0c0885e750036a095e23793ba6c2e894e7e63f6d89ad51f22e", size = 12219134, upload-time = "2025-12-15T05:03:24.367Z" }, + { url = "https://files.pythonhosted.org/packages/89/cc/2db6f0e95366b630364e09845672dbee0cbf0bbe753a204b29a944967cd9/mypy-1.19.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b64d987153888790bcdb03a6473d321820597ab8dd9243b27a92153c4fa50fd2", size = 12731616, upload-time = "2025-12-15T05:02:44.725Z" }, + { url = "https://files.pythonhosted.org/packages/00/be/dd56c1fd4807bc1eba1cf18b2a850d0de7bacb55e158755eb79f77c41f8e/mypy-1.19.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c35d298c2c4bba75feb2195655dfea8124d855dfd7343bf8b8c055421eaf0cf8", size = 13620847, upload-time = "2025-12-15T05:03:39.633Z" }, + { url = "https://files.pythonhosted.org/packages/6d/42/332951aae42b79329f743bf1da088cd75d8d4d9acc18fbcbd84f26c1af4e/mypy-1.19.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:34c81968774648ab5ac09c29a375fdede03ba253f8f8287847bd480782f73a6a", size = 13834976, upload-time = "2025-12-15T05:03:08.786Z" }, + { url = "https://files.pythonhosted.org/packages/6f/63/e7493e5f90e1e085c562bb06e2eb32cae27c5057b9653348d38b47daaecc/mypy-1.19.1-cp312-cp312-win_amd64.whl", hash = "sha256:b10e7c2cd7870ba4ad9b2d8a6102eb5ffc1f16ca35e3de6bfa390c1113029d13", size = 10118104, upload-time = "2025-12-15T05:03:10.834Z" }, + { url = "https://files.pythonhosted.org/packages/de/9f/a6abae693f7a0c697dbb435aac52e958dc8da44e92e08ba88d2e42326176/mypy-1.19.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e3157c7594ff2ef1634ee058aafc56a82db665c9438fd41b390f3bde1ab12250", size = 13201927, upload-time = "2025-12-15T05:02:29.138Z" }, + { url = "https://files.pythonhosted.org/packages/9a/a4/45c35ccf6e1c65afc23a069f50e2c66f46bd3798cbe0d680c12d12935caa/mypy-1.19.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdb12f69bcc02700c2b47e070238f42cb87f18c0bc1fc4cdb4fb2bc5fd7a3b8b", size = 12206730, upload-time = "2025-12-15T05:03:01.325Z" }, + { url = "https://files.pythonhosted.org/packages/05/bb/cdcf89678e26b187650512620eec8368fded4cfd99cfcb431e4cdfd19dec/mypy-1.19.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f859fb09d9583a985be9a493d5cfc5515b56b08f7447759a0c5deaf68d80506e", size = 12724581, upload-time = "2025-12-15T05:03:20.087Z" }, + { url = "https://files.pythonhosted.org/packages/d1/32/dd260d52babf67bad8e6770f8e1102021877ce0edea106e72df5626bb0ec/mypy-1.19.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9a6538e0415310aad77cb94004ca6482330fece18036b5f360b62c45814c4ef", size = 13616252, upload-time = "2025-12-15T05:02:49.036Z" }, + { url = "https://files.pythonhosted.org/packages/71/d0/5e60a9d2e3bd48432ae2b454b7ef2b62a960ab51292b1eda2a95edd78198/mypy-1.19.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:da4869fc5e7f62a88f3fe0b5c919d1d9f7ea3cef92d3689de2823fd27e40aa75", size = 13840848, upload-time = "2025-12-15T05:02:55.95Z" }, + { url = "https://files.pythonhosted.org/packages/98/76/d32051fa65ecf6cc8c6610956473abdc9b4c43301107476ac03559507843/mypy-1.19.1-cp313-cp313-win_amd64.whl", hash = "sha256:016f2246209095e8eda7538944daa1d60e1e8134d98983b9fc1e92c1fc0cb8dd", size = 10135510, upload-time = "2025-12-15T05:02:58.438Z" }, + { url = "https://files.pythonhosted.org/packages/de/eb/b83e75f4c820c4247a58580ef86fcd35165028f191e7e1ba57128c52782d/mypy-1.19.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:06e6170bd5836770e8104c8fdd58e5e725cfeb309f0a6c681a811f557e97eac1", size = 13199744, upload-time = "2025-12-15T05:03:30.823Z" }, + { url = "https://files.pythonhosted.org/packages/94/28/52785ab7bfa165f87fcbb61547a93f98bb20e7f82f90f165a1f69bce7b3d/mypy-1.19.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:804bd67b8054a85447c8954215a906d6eff9cabeabe493fb6334b24f4bfff718", size = 12215815, upload-time = "2025-12-15T05:02:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/0a/c6/bdd60774a0dbfb05122e3e925f2e9e846c009e479dcec4821dad881f5b52/mypy-1.19.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:21761006a7f497cb0d4de3d8ef4ca70532256688b0523eee02baf9eec895e27b", size = 12740047, upload-time = "2025-12-15T05:03:33.168Z" }, + { url = "https://files.pythonhosted.org/packages/32/2a/66ba933fe6c76bd40d1fe916a83f04fed253152f451a877520b3c4a5e41e/mypy-1.19.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:28902ee51f12e0f19e1e16fbe2f8f06b6637f482c459dd393efddd0ec7f82045", size = 13601998, upload-time = "2025-12-15T05:03:13.056Z" }, + { url = "https://files.pythonhosted.org/packages/e3/da/5055c63e377c5c2418760411fd6a63ee2b96cf95397259038756c042574f/mypy-1.19.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:481daf36a4c443332e2ae9c137dfee878fcea781a2e3f895d54bd3002a900957", size = 13807476, upload-time = "2025-12-15T05:03:17.977Z" }, + { url = "https://files.pythonhosted.org/packages/cd/09/4ebd873390a063176f06b0dbf1f7783dd87bd120eae7727fa4ae4179b685/mypy-1.19.1-cp314-cp314-win_amd64.whl", hash = "sha256:8bb5c6f6d043655e055be9b542aa5f3bdd30e4f3589163e85f93f3640060509f", size = 10281872, upload-time = "2025-12-15T05:03:05.549Z" }, + { url = "https://files.pythonhosted.org/packages/8d/f4/4ce9a05ce5ded1de3ec1c1d96cf9f9504a04e54ce0ed55cfa38619a32b8d/mypy-1.19.1-py3-none-any.whl", hash = "sha256:f1235f5ea01b7db5468d53ece6aaddf1ad0b88d9e7462b86ef96fe04995d7247", size = 2471239, upload-time = "2025-12-15T05:03:07.248Z" }, +] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/98/a4/1ab47638b92648243faf97a5aeb6ea83059cc3624972ab6b8d2316078d3f/mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782", size = 4433, upload-time = "2023-02-04T12:11:27.157Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", size = 4695, upload-time = "2023-02-04T12:11:25.002Z" }, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, +] + +[[package]] +name = "packaging" +version = "24.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/51/65/50db4dda066951078f0a96cf12f4b9ada6e4b811516bf0262c0f4f7064d4/packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002", size = 148788, upload-time = "2024-06-09T23:19:24.956Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/aa/cc0199a5f0ad350994d660967a8efb233fe0416e4639146c089643407ce6/packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124", size = 53985, upload-time = "2024-06-09T23:19:21.909Z" }, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, +] + +[[package]] +name = "pep8-naming" +version = "0.15.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "flake8" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8d/59/c32862134635ba231d45f1711035550dc38246396c27269a4cde4bfe18d2/pep8_naming-0.15.1.tar.gz", hash = "sha256:f6f4a499aba2deeda93c1f26ccc02f3da32b035c8b2db9696b730ef2c9639d29", size = 17640, upload-time = "2025-05-05T20:43:12.555Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a6/78/25281540f1121acaa78926f599a17ce102b8971bc20b096fa7fb6b5b59c1/pep8_naming-0.15.1-py3-none-any.whl", hash = "sha256:eb63925e7fd9e028c7f7ee7b1e413ec03d1ee5de0e627012102ee0222c273c86", size = 9561, upload-time = "2025-05-05T20:43:11.626Z" }, +] + +[[package]] +name = "platformdirs" +version = "4.3.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/13/fc/128cc9cb8f03208bdbf93d3aa862e16d376844a14f9a0ce5cf4507372de4/platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907", size = 21302, upload-time = "2024-09-17T19:06:50.688Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/a6/bc1012356d8ece4d66dd75c4b9fc6c1f6650ddd5991e421177d9f8f671be/platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb", size = 18439, upload-time = "2024-09-17T19:06:49.212Z" }, +] + +[[package]] +name = "pluggy" +version = "1.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955, upload-time = "2024-04-20T21:34:42.531Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556, upload-time = "2024-04-20T21:34:40.434Z" }, +] + +[[package]] +name = "pre-commit" +version = "4.5.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cfgv" }, + { name = "identify" }, + { name = "nodeenv" }, + { name = "pyyaml" }, + { name = "virtualenv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/40/f1/6d86a29246dfd2e9b6237f0b5823717f60cad94d47ddc26afa916d21f525/pre_commit-4.5.1.tar.gz", hash = "sha256:eb545fcff725875197837263e977ea257a402056661f09dae08e4b149b030a61", size = 198232, upload-time = "2025-12-16T21:14:33.552Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/19/fd3ef348460c80af7bb4669ea7926651d1f95c23ff2df18b9d24bab4f3fa/pre_commit-4.5.1-py2.py3-none-any.whl", hash = "sha256:3b3afd891e97337708c1674210f8eba659b52a38ea5f822ff142d10786221f77", size = 226437, upload-time = "2025-12-16T21:14:32.409Z" }, +] + +[[package]] +name = "pycodestyle" +version = "2.14.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/11/e0/abfd2a0d2efe47670df87f3e3a0e2edda42f055053c85361f19c0e2c1ca8/pycodestyle-2.14.0.tar.gz", hash = "sha256:c4b5b517d278089ff9d0abdec919cd97262a3367449ea1c8b49b91529167b783", size = 39472, upload-time = "2025-06-20T18:49:48.75Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d7/27/a58ddaf8c588a3ef080db9d0b7e0b97215cee3a45df74f3a94dbbf5c893a/pycodestyle-2.14.0-py2.py3-none-any.whl", hash = "sha256:dd6bf7cb4ee77f8e016f9c8e74a35ddd9f67e1d5fd4184d86c3b98e07099f42d", size = 31594, upload-time = "2025-06-20T18:49:47.491Z" }, +] + +[[package]] +name = "pyflakes" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/45/dc/fd034dc20b4b264b3d015808458391acbf9df40b1e54750ef175d39180b1/pyflakes-3.4.0.tar.gz", hash = "sha256:b24f96fafb7d2ab0ec5075b7350b3d2d2218eab42003821c06344973d3ea2f58", size = 64669, upload-time = "2025-06-20T18:45:27.834Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/2f/81d580a0fb83baeb066698975cb14a618bdbed7720678566f1b046a95fe8/pyflakes-3.4.0-py2.py3-none-any.whl", hash = "sha256:f742a7dbd0d9cb9ea41e9a24a918996e8170c799fa528688d40dd582c8265f4f", size = 63551, upload-time = "2025-06-20T18:45:26.937Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pytest" +version = "9.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, +] + +[[package]] +name = "pytest-cov" +version = "7.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage", extra = ["toml"] }, + { name = "pluggy" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" }, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", size = 184199, upload-time = "2024-08-06T20:31:40.178Z" }, + { url = "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", size = 171758, upload-time = "2024-08-06T20:31:42.173Z" }, + { url = "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", size = 718463, upload-time = "2024-08-06T20:31:44.263Z" }, + { url = "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", size = 719280, upload-time = "2024-08-06T20:31:50.199Z" }, + { url = "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", size = 751239, upload-time = "2024-08-06T20:31:52.292Z" }, + { url = "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", size = 695802, upload-time = "2024-08-06T20:31:53.836Z" }, + { url = "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", size = 720527, upload-time = "2024-08-06T20:31:55.565Z" }, + { url = "https://files.pythonhosted.org/packages/be/aa/5afe99233fb360d0ff37377145a949ae258aaab831bde4792b32650a4378/PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99", size = 144052, upload-time = "2024-08-06T20:31:56.914Z" }, + { url = "https://files.pythonhosted.org/packages/b5/84/0fa4b06f6d6c958d207620fc60005e241ecedceee58931bb20138e1e5776/PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e", size = 161774, upload-time = "2024-08-06T20:31:58.304Z" }, + { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612, upload-time = "2024-08-06T20:32:03.408Z" }, + { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040, upload-time = "2024-08-06T20:32:04.926Z" }, + { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829, upload-time = "2024-08-06T20:32:06.459Z" }, + { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167, upload-time = "2024-08-06T20:32:08.338Z" }, + { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952, upload-time = "2024-08-06T20:32:14.124Z" }, + { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301, upload-time = "2024-08-06T20:32:16.17Z" }, + { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638, upload-time = "2024-08-06T20:32:18.555Z" }, + { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850, upload-time = "2024-08-06T20:32:19.889Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980, upload-time = "2024-08-06T20:32:21.273Z" }, + { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" }, + { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" }, + { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" }, + { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload-time = "2024-08-06T20:32:30.058Z" }, + { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload-time = "2024-08-06T20:32:31.881Z" }, + { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload-time = "2024-08-06T20:32:37.083Z" }, + { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload-time = "2024-08-06T20:32:38.898Z" }, + { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload-time = "2024-08-06T20:32:40.241Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload-time = "2024-08-06T20:32:41.93Z" }, + { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309, upload-time = "2024-08-06T20:32:43.4Z" }, + { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679, upload-time = "2024-08-06T20:32:44.801Z" }, + { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428, upload-time = "2024-08-06T20:32:46.432Z" }, + { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361, upload-time = "2024-08-06T20:32:51.188Z" }, + { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523, upload-time = "2024-08-06T20:32:53.019Z" }, + { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660, upload-time = "2024-08-06T20:32:54.708Z" }, + { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597, upload-time = "2024-08-06T20:32:56.985Z" }, + { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527, upload-time = "2024-08-06T20:33:03.001Z" }, + { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" }, +] + +[[package]] +name = "ruff" +version = "0.14.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/57/08/52232a877978dd8f9cf2aeddce3e611b40a63287dfca29b6b8da791f5e8d/ruff-0.14.10.tar.gz", hash = "sha256:9a2e830f075d1a42cd28420d7809ace390832a490ed0966fe373ba288e77aaf4", size = 5859763, upload-time = "2025-12-18T19:28:57.98Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/60/01/933704d69f3f05ee16ef11406b78881733c186fe14b6a46b05cfcaf6d3b2/ruff-0.14.10-py3-none-linux_armv6l.whl", hash = "sha256:7a3ce585f2ade3e1f29ec1b92df13e3da262178df8c8bdf876f48fa0e8316c49", size = 13527080, upload-time = "2025-12-18T19:29:25.642Z" }, + { url = "https://files.pythonhosted.org/packages/df/58/a0349197a7dfa603ffb7f5b0470391efa79ddc327c1e29c4851e85b09cc5/ruff-0.14.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:674f9be9372907f7257c51f1d4fc902cb7cf014b9980152b802794317941f08f", size = 13797320, upload-time = "2025-12-18T19:29:02.571Z" }, + { url = "https://files.pythonhosted.org/packages/7b/82/36be59f00a6082e38c23536df4e71cdbc6af8d7c707eade97fcad5c98235/ruff-0.14.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d85713d522348837ef9df8efca33ccb8bd6fcfc86a2cde3ccb4bc9d28a18003d", size = 12918434, upload-time = "2025-12-18T19:28:51.202Z" }, + { url = "https://files.pythonhosted.org/packages/a6/00/45c62a7f7e34da92a25804f813ebe05c88aa9e0c25e5cb5a7d23dd7450e3/ruff-0.14.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6987ebe0501ae4f4308d7d24e2d0fe3d7a98430f5adfd0f1fead050a740a3a77", size = 13371961, upload-time = "2025-12-18T19:29:04.991Z" }, + { url = "https://files.pythonhosted.org/packages/40/31/a5906d60f0405f7e57045a70f2d57084a93ca7425f22e1d66904769d1628/ruff-0.14.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16a01dfb7b9e4eee556fbfd5392806b1b8550c9b4a9f6acd3dbe6812b193c70a", size = 13275629, upload-time = "2025-12-18T19:29:21.381Z" }, + { url = "https://files.pythonhosted.org/packages/3e/60/61c0087df21894cf9d928dc04bcd4fb10e8b2e8dca7b1a276ba2155b2002/ruff-0.14.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7165d31a925b7a294465fa81be8c12a0e9b60fb02bf177e79067c867e71f8b1f", size = 14029234, upload-time = "2025-12-18T19:29:00.132Z" }, + { url = "https://files.pythonhosted.org/packages/44/84/77d911bee3b92348b6e5dab5a0c898d87084ea03ac5dc708f46d88407def/ruff-0.14.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c561695675b972effb0c0a45db233f2c816ff3da8dcfbe7dfc7eed625f218935", size = 15449890, upload-time = "2025-12-18T19:28:53.573Z" }, + { url = "https://files.pythonhosted.org/packages/e9/36/480206eaefa24a7ec321582dda580443a8f0671fdbf6b1c80e9c3e93a16a/ruff-0.14.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4bb98fcbbc61725968893682fd4df8966a34611239c9fd07a1f6a07e7103d08e", size = 15123172, upload-time = "2025-12-18T19:29:23.453Z" }, + { url = "https://files.pythonhosted.org/packages/5c/38/68e414156015ba80cef5473d57919d27dfb62ec804b96180bafdeaf0e090/ruff-0.14.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f24b47993a9d8cb858429e97bdf8544c78029f09b520af615c1d261bf827001d", size = 14460260, upload-time = "2025-12-18T19:29:27.808Z" }, + { url = "https://files.pythonhosted.org/packages/b3/19/9e050c0dca8aba824d67cc0db69fb459c28d8cd3f6855b1405b3f29cc91d/ruff-0.14.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59aabd2e2c4fd614d2862e7939c34a532c04f1084476d6833dddef4afab87e9f", size = 14229978, upload-time = "2025-12-18T19:29:11.32Z" }, + { url = "https://files.pythonhosted.org/packages/51/eb/e8dd1dd6e05b9e695aa9dd420f4577debdd0f87a5ff2fedda33c09e9be8c/ruff-0.14.10-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:213db2b2e44be8625002dbea33bb9c60c66ea2c07c084a00d55732689d697a7f", size = 14338036, upload-time = "2025-12-18T19:29:09.184Z" }, + { url = "https://files.pythonhosted.org/packages/6a/12/f3e3a505db7c19303b70af370d137795fcfec136d670d5de5391e295c134/ruff-0.14.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b914c40ab64865a17a9a5b67911d14df72346a634527240039eb3bd650e5979d", size = 13264051, upload-time = "2025-12-18T19:29:13.431Z" }, + { url = "https://files.pythonhosted.org/packages/08/64/8c3a47eaccfef8ac20e0484e68e0772013eb85802f8a9f7603ca751eb166/ruff-0.14.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1484983559f026788e3a5c07c81ef7d1e97c1c78ed03041a18f75df104c45405", size = 13283998, upload-time = "2025-12-18T19:29:06.994Z" }, + { url = "https://files.pythonhosted.org/packages/12/84/534a5506f4074e5cc0529e5cd96cfc01bb480e460c7edf5af70d2bcae55e/ruff-0.14.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c70427132db492d25f982fffc8d6c7535cc2fd2c83fc8888f05caaa248521e60", size = 13601891, upload-time = "2025-12-18T19:28:55.811Z" }, + { url = "https://files.pythonhosted.org/packages/0d/1e/14c916087d8598917dbad9b2921d340f7884824ad6e9c55de948a93b106d/ruff-0.14.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5bcf45b681e9f1ee6445d317ce1fa9d6cba9a6049542d1c3d5b5958986be8830", size = 14336660, upload-time = "2025-12-18T19:29:16.531Z" }, + { url = "https://files.pythonhosted.org/packages/f2/1c/d7b67ab43f30013b47c12b42d1acd354c195351a3f7a1d67f59e54227ede/ruff-0.14.10-py3-none-win32.whl", hash = "sha256:104c49fc7ab73f3f3a758039adea978869a918f31b73280db175b43a2d9b51d6", size = 13196187, upload-time = "2025-12-18T19:29:19.006Z" }, + { url = "https://files.pythonhosted.org/packages/fb/9c/896c862e13886fae2af961bef3e6312db9ebc6adc2b156fe95e615dee8c1/ruff-0.14.10-py3-none-win_amd64.whl", hash = "sha256:466297bd73638c6bdf06485683e812db1c00c7ac96d4ddd0294a338c62fdc154", size = 14661283, upload-time = "2025-12-18T19:29:30.16Z" }, + { url = "https://files.pythonhosted.org/packages/74/31/b0e29d572670dca3674eeee78e418f20bdf97fa8aa9ea71380885e175ca0/ruff-0.14.10-py3-none-win_arm64.whl", hash = "sha256:e51d046cf6dda98a4633b8a8a771451107413b0f07183b2bef03f075599e44e6", size = 13729839, upload-time = "2025-12-18T19:28:48.636Z" }, +] + +[[package]] +name = "six" +version = "1.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/71/39/171f1c67cd00715f190ba0b100d606d440a28c93c7714febeca8b79af85e/six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", size = 34041, upload-time = "2021-05-05T14:18:18.379Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/5a/e7c31adbe875f2abbb91bd84cf2dc52d792b5a01506781dbcf25c91daf11/six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254", size = 11053, upload-time = "2021-05-05T14:18:17.237Z" }, +] + +[[package]] +name = "tomli" +version = "2.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c0/3f/d7af728f075fb08564c5949a9c95e44352e23dee646869fa104a3b2060a3/tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f", size = 15164, upload-time = "2022-02-08T10:54:04.006Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", size = 12757, upload-time = "2022-02-08T10:54:02.017Z" }, +] + +[[package]] +name = "types-python-dateutil" +version = "2.9.0.20251115" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6a/36/06d01fb52c0d57e9ad0c237654990920fa41195e4b3d640830dabf9eeb2f/types_python_dateutil-2.9.0.20251115.tar.gz", hash = "sha256:8a47f2c3920f52a994056b8786309b43143faa5a64d4cbb2722d6addabdf1a58", size = 16363, upload-time = "2025-11-15T03:00:13.717Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/0b/56961d3ba517ed0df9b3a27bfda6514f3d01b28d499d1bce9068cfe4edd1/types_python_dateutil-2.9.0.20251115-py3-none-any.whl", hash = "sha256:9cf9c1c582019753b8639a081deefd7e044b9fa36bd8217f565c6c4e36ee0624", size = 18251, upload-time = "2025-11-15T03:00:12.317Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321, upload-time = "2024-06-07T18:52:15.995Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438, upload-time = "2024-06-07T18:52:13.582Z" }, +] + +[[package]] +name = "virtualenv" +version = "20.26.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "distlib" }, + { name = "filelock" }, + { name = "platformdirs" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3f/40/abc5a766da6b0b2457f819feab8e9203cbeae29327bd241359f866a3da9d/virtualenv-20.26.6.tar.gz", hash = "sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48", size = 9372482, upload-time = "2024-09-27T16:28:57.502Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/59/90/57b8ac0c8a231545adc7698c64c5a36fa7cd8e376c691b9bde877269f2eb/virtualenv-20.26.6-py3-none-any.whl", hash = "sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2", size = 5999862, upload-time = "2024-09-27T16:28:54.798Z" }, +]