diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile new file mode 100644 index 0000000..ff261ba --- /dev/null +++ b/.devcontainer/Dockerfile @@ -0,0 +1,9 @@ +ARG VARIANT="3.9" +FROM mcr.microsoft.com/vscode/devcontainers/python:0-${VARIANT} + +USER vscode + +RUN curl -sSf https://rye.astral.sh/get | RYE_VERSION="0.44.0" RYE_INSTALL_OPTION="--yes" bash +ENV PATH=/home/vscode/.rye/shims:$PATH + +RUN echo "[[ -d .venv ]] && source .venv/bin/activate || export PATH=\$PATH" >> /home/vscode/.bashrc diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 0000000..c17fdc1 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,43 @@ +// For format details, see https://aka.ms/devcontainer.json. For config options, see the +// README at: https://github.com/devcontainers/templates/tree/main/src/debian +{ + "name": "Debian", + "build": { + "dockerfile": "Dockerfile", + "context": ".." + }, + + "postStartCommand": "rye sync --all-features", + + "customizations": { + "vscode": { + "extensions": [ + "ms-python.python" + ], + "settings": { + "terminal.integrated.shell.linux": "/bin/bash", + "python.pythonPath": ".venv/bin/python", + "python.defaultInterpreterPath": ".venv/bin/python", + "python.typeChecking": "basic", + "terminal.integrated.env.linux": { + "PATH": "/home/vscode/.rye/shims:${env:PATH}" + } + } + } + }, + "features": { + "ghcr.io/devcontainers/features/node:1": {} + } + + // Features to add to the dev container. More info: https://containers.dev/features. + // "features": {}, + + // Use 'forwardPorts' to make a list of ports inside the container available locally. + // "forwardPorts": [], + + // Configure tool-specific properties. + // "customizations": {}, + + // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root. + // "remoteUser": "root" +} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..795bdad --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,98 @@ +name: CI +on: + push: + branches-ignore: + - 'generated' + - 'codegen/**' + - 'integrated/**' + - 'stl-preview-head/**' + - 'stl-preview-base/**' + pull_request: + branches-ignore: + - 'stl-preview-head/**' + - 'stl-preview-base/**' + +jobs: + lint: + timeout-minutes: 10 + name: lint + runs-on: ${{ github.repository == 'stainless-sdks/kernel-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} + if: github.event_name == 'push' || github.event.pull_request.head.repo.fork + steps: + - uses: actions/checkout@v4 + + - name: Install Rye + run: | + curl -sSf https://rye.astral.sh/get | bash + echo "$HOME/.rye/shims" >> $GITHUB_PATH + env: + RYE_VERSION: '0.44.0' + RYE_INSTALL_OPTION: '--yes' + + - name: Install dependencies + run: rye sync --all-features + + - name: Run lints + run: ./scripts/lint + + build: + if: github.event_name == 'push' || github.event.pull_request.head.repo.fork + timeout-minutes: 10 + name: build + permissions: + contents: read + id-token: write + runs-on: ${{ github.repository == 'stainless-sdks/kernel-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} + steps: + - uses: actions/checkout@v4 + + - name: Install Rye + run: | + curl -sSf https://rye.astral.sh/get | bash + echo "$HOME/.rye/shims" >> $GITHUB_PATH + env: + RYE_VERSION: '0.44.0' + RYE_INSTALL_OPTION: '--yes' + + - name: Install dependencies + run: rye sync --all-features + + - name: Run build + run: rye build + + - name: Get GitHub OIDC Token + if: github.repository == 'stainless-sdks/kernel-python' + id: github-oidc + uses: actions/github-script@v6 + with: + script: core.setOutput('github_token', await core.getIDToken()); + + - name: Upload tarball + if: github.repository == 'stainless-sdks/kernel-python' + env: + URL: https://pkg.stainless.com/s + AUTH: ${{ steps.github-oidc.outputs.github_token }} + SHA: ${{ github.sha }} + run: ./scripts/utils/upload-artifact.sh + + test: + timeout-minutes: 10 + name: test + runs-on: ${{ github.repository == 'stainless-sdks/kernel-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} + if: github.event_name == 'push' || github.event.pull_request.head.repo.fork + steps: + - uses: actions/checkout@v4 + + - name: Install Rye + run: | + curl -sSf https://rye.astral.sh/get | bash + echo "$HOME/.rye/shims" >> $GITHUB_PATH + env: + RYE_VERSION: '0.44.0' + RYE_INSTALL_OPTION: '--yes' + + - name: Bootstrap + run: ./scripts/bootstrap + + - name: Run tests + run: ./scripts/test diff --git a/.github/workflows/publish-pypi.yml b/.github/workflows/publish-pypi.yml new file mode 100644 index 0000000..994e625 --- /dev/null +++ b/.github/workflows/publish-pypi.yml @@ -0,0 +1,31 @@ +# This workflow is triggered when a GitHub release is created. +# It can also be run manually to re-publish to PyPI in case it failed for some reason. +# You can run this workflow by navigating to https://www.github.com/kernel/kernel-python-sdk/actions/workflows/publish-pypi.yml +name: Publish PyPI +on: + workflow_dispatch: + + release: + types: [published] + +jobs: + publish: + name: publish + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + + - name: Install Rye + run: | + curl -sSf https://rye.astral.sh/get | bash + echo "$HOME/.rye/shims" >> $GITHUB_PATH + env: + RYE_VERSION: '0.44.0' + RYE_INSTALL_OPTION: '--yes' + + - name: Publish to PyPI + run: | + bash ./bin/publish-pypi + env: + PYPI_TOKEN: ${{ secrets.KERNEL_PYPI_TOKEN || secrets.PYPI_TOKEN }} diff --git a/.github/workflows/release-doctor.yml b/.github/workflows/release-doctor.yml new file mode 100644 index 0000000..ba1be2c --- /dev/null +++ b/.github/workflows/release-doctor.yml @@ -0,0 +1,21 @@ +name: Release Doctor +on: + pull_request: + branches: + - main + workflow_dispatch: + +jobs: + release_doctor: + name: release doctor + runs-on: ubuntu-latest + if: github.repository == 'kernel/kernel-python-sdk' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch' || startsWith(github.head_ref, 'release-please') || github.head_ref == 'next') + + steps: + - uses: actions/checkout@v4 + + - name: Check release environment + run: | + bash ./bin/check-release-environment + env: + PYPI_TOKEN: ${{ secrets.KERNEL_PYPI_TOKEN || secrets.PYPI_TOKEN }} diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..95ceb18 --- /dev/null +++ b/.gitignore @@ -0,0 +1,15 @@ +.prism.log +_dev + +__pycache__ +.mypy_cache + +dist + +.venv +.idea + +.env +.envrc +codegen.log +Brewfile.lock.json diff --git a/.python-version b/.python-version new file mode 100644 index 0000000..43077b2 --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.9.18 diff --git a/.release-please-manifest.json b/.release-please-manifest.json new file mode 100644 index 0000000..d2d60a3 --- /dev/null +++ b/.release-please-manifest.json @@ -0,0 +1,3 @@ +{ + ".": "0.24.0" +} \ No newline at end of file diff --git a/.stats.yml b/.stats.yml new file mode 100644 index 0000000..9ab4346 --- /dev/null +++ b/.stats.yml @@ -0,0 +1,4 @@ +configured_endpoints: 89 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/kernel%2Fkernel-8d66dbedea5b240936b338809f272568ca84a452fc13dbda835479f2ec068b41.yml +openapi_spec_hash: 7c499bfce2e996f1fff5e7791cea390e +config_hash: 2ee8c7057fa9b05cd0dabd23247c40ec diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..5b01030 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "python.analysis.importFormat": "relative", +} diff --git a/Brewfile b/Brewfile new file mode 100644 index 0000000..492ca37 --- /dev/null +++ b/Brewfile @@ -0,0 +1,2 @@ +brew "rye" + diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..9cb624f --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,128 @@ +## Setting up the environment + +### With Rye + +We use [Rye](https://rye.astral.sh/) to manage dependencies because it will automatically provision a Python environment with the expected Python version. To set it up, run: + +```sh +$ ./scripts/bootstrap +``` + +Or [install Rye manually](https://rye.astral.sh/guide/installation/) and run: + +```sh +$ rye sync --all-features +``` + +You can then run scripts using `rye run python script.py` or by activating the virtual environment: + +```sh +# Activate the virtual environment - https://docs.python.org/3/library/venv.html#how-venvs-work +$ source .venv/bin/activate + +# now you can omit the `rye run` prefix +$ python script.py +``` + +### Without Rye + +Alternatively if you don't want to install `Rye`, you can stick with the standard `pip` setup by ensuring you have the Python version specified in `.python-version`, create a virtual environment however you desire and then install dependencies using this command: + +```sh +$ pip install -r requirements-dev.lock +``` + +## Modifying/Adding code + +Most of the SDK is generated code. Modifications to code will be persisted between generations, but may +result in merge conflicts between manual patches and changes from the generator. The generator will never +modify the contents of the `src/kernel/lib/` and `examples/` directories. + +## Adding and running examples + +All files in the `examples/` directory are not modified by the generator and can be freely edited or added to. + +```py +# add an example to examples/.py + +#!/usr/bin/env -S rye run python +… +``` + +```sh +$ chmod +x examples/.py +# run the example against your api +$ ./examples/.py +``` + +## Using the repository from source + +If you’d like to use the repository from source, you can either install from git or link to a cloned repository: + +To install via git: + +```sh +$ pip install git+ssh://git@github.com/kernel/kernel-python-sdk.git +``` + +Alternatively, you can build from source and install the wheel file: + +Building this package will create two files in the `dist/` directory, a `.tar.gz` containing the source files and a `.whl` that can be used to install the package efficiently. + +To create a distributable version of the library, all you have to do is run this command: + +```sh +$ rye build +# or +$ python -m build +``` + +Then to install: + +```sh +$ pip install ./path-to-wheel-file.whl +``` + +## Running tests + +Most tests require you to [set up a mock server](https://github.com/stoplightio/prism) against the OpenAPI spec to run the tests. + +```sh +# you will need npm installed +$ npx prism mock path/to/your/openapi.yml +``` + +```sh +$ ./scripts/test +``` + +## Linting and formatting + +This repository uses [ruff](https://github.com/astral-sh/ruff) and +[black](https://github.com/psf/black) to format the code in the repository. + +To lint: + +```sh +$ ./scripts/lint +``` + +To format and fix all ruff issues automatically: + +```sh +$ ./scripts/format +``` + +## Publishing and releases + +Changes made to this repository via the automated release PR pipeline should publish to PyPI automatically. If +the changes aren't made through the automated pipeline, you may want to make releases manually. + +### Publish with a GitHub workflow + +You can release to package managers by using [the `Publish PyPI` GitHub action](https://www.github.com/kernel/kernel-python-sdk/actions/workflows/publish-pypi.yml). This requires a setup organization or repository secret to be set up. + +### Publish manually + +If you need to manually release a package, you can run the `bin/publish-pypi` script with a `PYPI_TOKEN` set on +the environment. diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..3b7d20d --- /dev/null +++ b/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2026 Kernel + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.md b/README.md index 6866e5b..d3e4341 100644 --- a/README.md +++ b/README.md @@ -1 +1,493 @@ -# kernel-python \ No newline at end of file +# Kernel Python API library + + +[![PyPI version](https://img.shields.io/pypi/v/kernel.svg?label=pypi%20(stable))](https://pypi.org/project/kernel/) + +The Kernel Python library provides convenient access to the Kernel REST API from any Python 3.9+ +application. The library includes type definitions for all request params and response fields, +and offers both synchronous and asynchronous clients powered by [httpx](https://github.com/encode/httpx). + +It is generated with [Stainless](https://www.stainless.com/). + +## Documentation + +The REST API documentation can be found on [docs.onkernel.com](https://docs.onkernel.com). The full API of this library can be found in [api.md](api.md). + +## Installation + +```sh +# install from PyPI +pip install kernel +``` + +## Usage + +The full API of this library can be found in [api.md](api.md). + +```python +import os +from kernel import Kernel + +client = Kernel( + api_key=os.environ.get("KERNEL_API_KEY"), # This is the default and can be omitted + # defaults to "production". + environment="development", +) + +browser = client.browsers.create( + stealth=True, +) +print(browser.session_id) +``` + +While you can provide an `api_key` keyword argument, +we recommend using [python-dotenv](https://pypi.org/project/python-dotenv/) +to add `KERNEL_API_KEY="My API Key"` to your `.env` file +so that your API Key is not stored in source control. + +## Async usage + +Simply import `AsyncKernel` instead of `Kernel` and use `await` with each API call: + +```python +import os +import asyncio +from kernel import AsyncKernel + +client = AsyncKernel( + api_key=os.environ.get("KERNEL_API_KEY"), # This is the default and can be omitted + # defaults to "production". + environment="development", +) + + +async def main() -> None: + browser = await client.browsers.create( + stealth=True, + ) + print(browser.session_id) + + +asyncio.run(main()) +``` + +Functionality between the synchronous and asynchronous clients is otherwise identical. + +### With aiohttp + +By default, the async client uses `httpx` for HTTP requests. However, for improved concurrency performance you may also use `aiohttp` as the HTTP backend. + +You can enable this by installing `aiohttp`: + +```sh +# install from PyPI +pip install kernel[aiohttp] +``` + +Then you can enable it by instantiating the client with `http_client=DefaultAioHttpClient()`: + +```python +import os +import asyncio +from kernel import DefaultAioHttpClient +from kernel import AsyncKernel + + +async def main() -> None: + async with AsyncKernel( + api_key=os.environ.get("KERNEL_API_KEY"), # This is the default and can be omitted + http_client=DefaultAioHttpClient(), + ) as client: + browser = await client.browsers.create( + stealth=True, + ) + print(browser.session_id) + + +asyncio.run(main()) +``` + +## Using types + +Nested request parameters are [TypedDicts](https://docs.python.org/3/library/typing.html#typing.TypedDict). Responses are [Pydantic models](https://docs.pydantic.dev) which also provide helper methods for things like: + +- Serializing back into JSON, `model.to_json()` +- Converting to a dictionary, `model.to_dict()` + +Typed requests and responses provide autocomplete and documentation within your editor. If you would like to see type errors in VS Code to help catch bugs earlier, set `python.analysis.typeCheckingMode` to `basic`. + +## Pagination + +List methods in the Kernel API are paginated. + +This library provides auto-paginating iterators with each list response, so you do not have to request successive pages manually: + +```python +from kernel import Kernel + +client = Kernel() + +all_deployments = [] +# Automatically fetches more pages as needed. +for deployment in client.deployments.list( + app_name="YOUR_APP", + limit=2, +): + # Do something with deployment here + all_deployments.append(deployment) +print(all_deployments) +``` + +Or, asynchronously: + +```python +import asyncio +from kernel import AsyncKernel + +client = AsyncKernel() + + +async def main() -> None: + all_deployments = [] + # Iterate through items across all pages, issuing requests as needed. + async for deployment in client.deployments.list( + app_name="YOUR_APP", + limit=2, + ): + all_deployments.append(deployment) + print(all_deployments) + + +asyncio.run(main()) +``` + +Alternatively, you can use the `.has_next_page()`, `.next_page_info()`, or `.get_next_page()` methods for more granular control working with pages: + +```python +first_page = await client.deployments.list( + app_name="YOUR_APP", + limit=2, +) +if first_page.has_next_page(): + print(f"will fetch next page using these details: {first_page.next_page_info()}") + next_page = await first_page.get_next_page() + print(f"number of items we just fetched: {len(next_page.items)}") + +# Remove `await` for non-async usage. +``` + +Or just work directly with the returned data: + +```python +first_page = await client.deployments.list( + app_name="YOUR_APP", + limit=2, +) + +print( + f"the current start offset for this page: {first_page.next_offset}" +) # => "the current start offset for this page: 1" +for deployment in first_page.items: + print(deployment.id) + +# Remove `await` for non-async usage. +``` + +## Nested params + +Nested parameters are dictionaries, typed using `TypedDict`, for example: + +```python +from kernel import Kernel + +client = Kernel() + +browser = client.browsers.create( + persistence={"id": "my-awesome-browser-for-user-1234"}, +) +print(browser.persistence) +``` + +## File uploads + +Request parameters that correspond to file uploads can be passed as `bytes`, or a [`PathLike`](https://docs.python.org/3/library/os.html#os.PathLike) instance or a tuple of `(filename, contents, media type)`. + +```python +from pathlib import Path +from kernel import Kernel + +client = Kernel() + +client.deployments.create( + file=Path("/path/to/file"), +) +``` + +The async client uses the exact same interface. If you pass a [`PathLike`](https://docs.python.org/3/library/os.html#os.PathLike) instance, the file contents will be read asynchronously automatically. + +## Handling errors + +When the library is unable to connect to the API (for example, due to network connection problems or a timeout), a subclass of `kernel.APIConnectionError` is raised. + +When the API returns a non-success status code (that is, 4xx or 5xx +response), a subclass of `kernel.APIStatusError` is raised, containing `status_code` and `response` properties. + +All errors inherit from `kernel.APIError`. + +```python +import kernel +from kernel import Kernel + +client = Kernel() + +try: + client.browsers.create( + stealth=True, + ) +except kernel.APIConnectionError as e: + print("The server could not be reached") + print(e.__cause__) # an underlying Exception, likely raised within httpx. +except kernel.RateLimitError as e: + print("A 429 status code was received; we should back off a bit.") +except kernel.APIStatusError as e: + print("Another non-200-range status code was received") + print(e.status_code) + print(e.response) +``` + +Error codes are as follows: + +| Status Code | Error Type | +| ----------- | -------------------------- | +| 400 | `BadRequestError` | +| 401 | `AuthenticationError` | +| 403 | `PermissionDeniedError` | +| 404 | `NotFoundError` | +| 422 | `UnprocessableEntityError` | +| 429 | `RateLimitError` | +| >=500 | `InternalServerError` | +| N/A | `APIConnectionError` | + +### Retries + +Certain errors are automatically retried 2 times by default, with a short exponential backoff. +Connection errors (for example, due to a network connectivity problem), 408 Request Timeout, 409 Conflict, +429 Rate Limit, and >=500 Internal errors are all retried by default. + +You can use the `max_retries` option to configure or disable retry settings: + +```python +from kernel import Kernel + +# Configure the default for all requests: +client = Kernel( + # default is 2 + max_retries=0, +) + +# Or, configure per-request: +client.with_options(max_retries=5).browsers.create( + stealth=True, +) +``` + +### Timeouts + +By default requests time out after 1 minute. You can configure this with a `timeout` option, +which accepts a float or an [`httpx.Timeout`](https://www.python-httpx.org/advanced/timeouts/#fine-tuning-the-configuration) object: + +```python +from kernel import Kernel + +# Configure the default for all requests: +client = Kernel( + # 20 seconds (default is 1 minute) + timeout=20.0, +) + +# More granular control: +client = Kernel( + timeout=httpx.Timeout(60.0, read=5.0, write=10.0, connect=2.0), +) + +# Override per-request: +client.with_options(timeout=5.0).browsers.create( + stealth=True, +) +``` + +On timeout, an `APITimeoutError` is thrown. + +Note that requests that time out are [retried twice by default](#retries). + +## Advanced + +### Logging + +We use the standard library [`logging`](https://docs.python.org/3/library/logging.html) module. + +You can enable logging by setting the environment variable `KERNEL_LOG` to `info`. + +```shell +$ export KERNEL_LOG=info +``` + +Or to `debug` for more verbose logging. + +### How to tell whether `None` means `null` or missing + +In an API response, a field may be explicitly `null`, or missing entirely; in either case, its value is `None` in this library. You can differentiate the two cases with `.model_fields_set`: + +```py +if response.my_field is None: + if 'my_field' not in response.model_fields_set: + print('Got json like {}, without a "my_field" key present at all.') + else: + print('Got json like {"my_field": null}.') +``` + +### Accessing raw response data (e.g. headers) + +The "raw" Response object can be accessed by prefixing `.with_raw_response.` to any HTTP method call, e.g., + +```py +from kernel import Kernel + +client = Kernel() +response = client.browsers.with_raw_response.create( + stealth=True, +) +print(response.headers.get('X-My-Header')) + +browser = response.parse() # get the object that `browsers.create()` would have returned +print(browser.session_id) +``` + +These methods return an [`APIResponse`](https://github.com/kernel/kernel-python-sdk/tree/main/src/kernel/_response.py) object. + +The async client returns an [`AsyncAPIResponse`](https://github.com/kernel/kernel-python-sdk/tree/main/src/kernel/_response.py) with the same structure, the only difference being `await`able methods for reading the response content. + +#### `.with_streaming_response` + +The above interface eagerly reads the full response body when you make the request, which may not always be what you want. + +To stream the response body, use `.with_streaming_response` instead, which requires a context manager and only reads the response body once you call `.read()`, `.text()`, `.json()`, `.iter_bytes()`, `.iter_text()`, `.iter_lines()` or `.parse()`. In the async client, these are async methods. + +```python +with client.browsers.with_streaming_response.create( + stealth=True, +) as response: + print(response.headers.get("X-My-Header")) + + for line in response.iter_lines(): + print(line) +``` + +The context manager is required so that the response will reliably be closed. + +### Making custom/undocumented requests + +This library is typed for convenient access to the documented API. + +If you need to access undocumented endpoints, params, or response properties, the library can still be used. + +#### Undocumented endpoints + +To make requests to undocumented endpoints, you can make requests using `client.get`, `client.post`, and other +http verbs. Options on the client will be respected (such as retries) when making this request. + +```py +import httpx + +response = client.post( + "/foo", + cast_to=httpx.Response, + body={"my_param": True}, +) + +print(response.headers.get("x-foo")) +``` + +#### Undocumented request params + +If you want to explicitly send an extra param, you can do so with the `extra_query`, `extra_body`, and `extra_headers` request +options. + +#### Undocumented response properties + +To access undocumented response properties, you can access the extra fields like `response.unknown_prop`. You +can also get all the extra fields on the Pydantic model as a dict with +[`response.model_extra`](https://docs.pydantic.dev/latest/api/base_model/#pydantic.BaseModel.model_extra). + +### Configuring the HTTP client + +You can directly override the [httpx client](https://www.python-httpx.org/api/#client) to customize it for your use case, including: + +- Support for [proxies](https://www.python-httpx.org/advanced/proxies/) +- Custom [transports](https://www.python-httpx.org/advanced/transports/) +- Additional [advanced](https://www.python-httpx.org/advanced/clients/) functionality + +```python +import httpx +from kernel import Kernel, DefaultHttpxClient + +client = Kernel( + # Or use the `KERNEL_BASE_URL` env var + base_url="http://my.test.server.example.com:8083", + http_client=DefaultHttpxClient( + proxy="http://my.test.proxy.example.com", + transport=httpx.HTTPTransport(local_address="0.0.0.0"), + ), +) +``` + +You can also customize the client on a per-request basis by using `with_options()`: + +```python +client.with_options(http_client=DefaultHttpxClient(...)) +``` + +### Managing HTTP resources + +By default the library closes underlying HTTP connections whenever the client is [garbage collected](https://docs.python.org/3/reference/datamodel.html#object.__del__). You can manually close the client using the `.close()` method if desired, or with a context manager that closes when exiting. + +```py +from kernel import Kernel + +with Kernel() as client: + # make requests here + ... + +# HTTP client is now closed +``` + +## Versioning + +This package generally follows [SemVer](https://semver.org/spec/v2.0.0.html) conventions, though certain backwards-incompatible changes may be released as minor versions: + +1. Changes that only affect static types, without breaking runtime behavior. +2. Changes to library internals which are technically public but not intended or documented for external use. _(Please open a GitHub issue to let us know if you are relying on such internals.)_ +3. Changes that we do not expect to impact the vast majority of users in practice. + +We take backwards-compatibility seriously and work hard to ensure you can rely on a smooth upgrade experience. + +We are keen for your feedback; please open an [issue](https://www.github.com/kernel/kernel-python-sdk/issues) with questions, bugs, or suggestions. + +### Determining the installed version + +If you've upgraded to the latest version but aren't seeing any new features you were expecting then your python environment is likely still using an older version. + +You can determine the version that is being used at runtime with: + +```py +import kernel +print(kernel.__version__) +``` + +## Requirements + +Python 3.9 or higher. + +## Contributing + +See [the contributing documentation](./CONTRIBUTING.md). diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000..0c6c32d --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,23 @@ +# Security Policy + +## Reporting Security Issues + +This SDK is generated by [Stainless Software Inc](http://stainless.com). Stainless takes security seriously, and encourages you to report any security vulnerability promptly so that appropriate action can be taken. + +To report a security issue, please contact the Stainless team at security@stainless.com. + +## Responsible Disclosure + +We appreciate the efforts of security researchers and individuals who help us maintain the security of +SDKs we generate. If you believe you have found a security vulnerability, please adhere to responsible +disclosure practices by allowing us a reasonable amount of time to investigate and address the issue +before making any information public. + +## Reporting Non-SDK Related Security Issues + +If you encounter security issues that are not directly related to SDKs but pertain to the services +or products provided by Kernel, please follow the respective company's security reporting guidelines. + +--- + +Thank you for helping us keep the SDKs and systems they interact with secure. diff --git a/api.md b/api.md new file mode 100644 index 0000000..db5f2df --- /dev/null +++ b/api.md @@ -0,0 +1,342 @@ +# Shared Types + +```python +from kernel.types import ( + AppAction, + BrowserExtension, + BrowserProfile, + BrowserViewport, + ErrorDetail, + ErrorEvent, + ErrorModel, + HeartbeatEvent, + LogEvent, +) +``` + +# Deployments + +Types: + +```python +from kernel.types import ( + DeploymentStateEvent, + DeploymentCreateResponse, + DeploymentRetrieveResponse, + DeploymentListResponse, + DeploymentFollowResponse, +) +``` + +Methods: + +- client.deployments.create(\*\*params) -> DeploymentCreateResponse +- client.deployments.retrieve(id) -> DeploymentRetrieveResponse +- client.deployments.list(\*\*params) -> SyncOffsetPagination[DeploymentListResponse] +- client.deployments.follow(id, \*\*params) -> DeploymentFollowResponse + +# Apps + +Types: + +```python +from kernel.types import AppListResponse +``` + +Methods: + +- client.apps.list(\*\*params) -> SyncOffsetPagination[AppListResponse] + +# Invocations + +Types: + +```python +from kernel.types import ( + InvocationStateEvent, + InvocationCreateResponse, + InvocationRetrieveResponse, + InvocationUpdateResponse, + InvocationListResponse, + InvocationFollowResponse, +) +``` + +Methods: + +- client.invocations.create(\*\*params) -> InvocationCreateResponse +- client.invocations.retrieve(id) -> InvocationRetrieveResponse +- client.invocations.update(id, \*\*params) -> InvocationUpdateResponse +- client.invocations.list(\*\*params) -> SyncOffsetPagination[InvocationListResponse] +- client.invocations.delete_browsers(id) -> None +- client.invocations.follow(id, \*\*params) -> InvocationFollowResponse + +# Browsers + +Types: + +```python +from kernel.types import ( + BrowserPersistence, + Profile, + BrowserCreateResponse, + BrowserRetrieveResponse, + BrowserListResponse, +) +``` + +Methods: + +- client.browsers.create(\*\*params) -> BrowserCreateResponse +- client.browsers.retrieve(id) -> BrowserRetrieveResponse +- client.browsers.list(\*\*params) -> SyncOffsetPagination[BrowserListResponse] +- client.browsers.delete(\*\*params) -> None +- client.browsers.delete_by_id(id) -> None +- client.browsers.load_extensions(id, \*\*params) -> None + +## Replays + +Types: + +```python +from kernel.types.browsers import ReplayListResponse, ReplayStartResponse +``` + +Methods: + +- client.browsers.replays.list(id) -> ReplayListResponse +- client.browsers.replays.download(replay_id, \*, id) -> BinaryAPIResponse +- client.browsers.replays.start(id, \*\*params) -> ReplayStartResponse +- client.browsers.replays.stop(replay_id, \*, id) -> None + +## Fs + +Types: + +```python +from kernel.types.browsers import FFileInfoResponse, FListFilesResponse +``` + +Methods: + +- client.browsers.fs.create_directory(id, \*\*params) -> None +- client.browsers.fs.delete_directory(id, \*\*params) -> None +- client.browsers.fs.delete_file(id, \*\*params) -> None +- client.browsers.fs.download_dir_zip(id, \*\*params) -> BinaryAPIResponse +- client.browsers.fs.file_info(id, \*\*params) -> FFileInfoResponse +- client.browsers.fs.list_files(id, \*\*params) -> FListFilesResponse +- client.browsers.fs.move(id, \*\*params) -> None +- client.browsers.fs.read_file(id, \*\*params) -> BinaryAPIResponse +- client.browsers.fs.set_file_permissions(id, \*\*params) -> None +- client.browsers.fs.upload(id, \*\*params) -> None +- client.browsers.fs.upload_zip(id, \*\*params) -> None +- client.browsers.fs.write_file(id, contents, \*\*params) -> None + +### Watch + +Types: + +```python +from kernel.types.browsers.fs import WatchEventsResponse, WatchStartResponse +``` + +Methods: + +- client.browsers.fs.watch.events(watch_id, \*, id) -> WatchEventsResponse +- client.browsers.fs.watch.start(id, \*\*params) -> WatchStartResponse +- client.browsers.fs.watch.stop(watch_id, \*, id) -> None + +## Process + +Types: + +```python +from kernel.types.browsers import ( + ProcessExecResponse, + ProcessKillResponse, + ProcessSpawnResponse, + ProcessStatusResponse, + ProcessStdinResponse, + ProcessStdoutStreamResponse, +) +``` + +Methods: + +- client.browsers.process.exec(id, \*\*params) -> ProcessExecResponse +- client.browsers.process.kill(process_id, \*, id, \*\*params) -> ProcessKillResponse +- client.browsers.process.spawn(id, \*\*params) -> ProcessSpawnResponse +- client.browsers.process.status(process_id, \*, id) -> ProcessStatusResponse +- client.browsers.process.stdin(process_id, \*, id, \*\*params) -> ProcessStdinResponse +- client.browsers.process.stdout_stream(process_id, \*, id) -> ProcessStdoutStreamResponse + +## Logs + +Methods: + +- client.browsers.logs.stream(id, \*\*params) -> LogEvent + +## Computer + +Types: + +```python +from kernel.types.browsers import ComputerSetCursorVisibilityResponse +``` + +Methods: + +- client.browsers.computer.capture_screenshot(id, \*\*params) -> BinaryAPIResponse +- client.browsers.computer.click_mouse(id, \*\*params) -> None +- client.browsers.computer.drag_mouse(id, \*\*params) -> None +- client.browsers.computer.move_mouse(id, \*\*params) -> None +- client.browsers.computer.press_key(id, \*\*params) -> None +- client.browsers.computer.scroll(id, \*\*params) -> None +- client.browsers.computer.set_cursor_visibility(id, \*\*params) -> ComputerSetCursorVisibilityResponse +- client.browsers.computer.type_text(id, \*\*params) -> None + +## Playwright + +Types: + +```python +from kernel.types.browsers import PlaywrightExecuteResponse +``` + +Methods: + +- client.browsers.playwright.execute(id, \*\*params) -> PlaywrightExecuteResponse + +# Profiles + +Types: + +```python +from kernel.types import ProfileListResponse +``` + +Methods: + +- client.profiles.create(\*\*params) -> Profile +- client.profiles.retrieve(id_or_name) -> Profile +- client.profiles.list() -> ProfileListResponse +- client.profiles.delete(id_or_name) -> None +- client.profiles.download(id_or_name) -> BinaryAPIResponse + +# Proxies + +Types: + +```python +from kernel.types import ( + ProxyCreateResponse, + ProxyRetrieveResponse, + ProxyListResponse, + ProxyCheckResponse, +) +``` + +Methods: + +- client.proxies.create(\*\*params) -> ProxyCreateResponse +- client.proxies.retrieve(id) -> ProxyRetrieveResponse +- client.proxies.list() -> ProxyListResponse +- client.proxies.delete(id) -> None +- client.proxies.check(id) -> ProxyCheckResponse + +# Extensions + +Types: + +```python +from kernel.types import ExtensionListResponse, ExtensionUploadResponse +``` + +Methods: + +- client.extensions.list() -> ExtensionListResponse +- client.extensions.delete(id_or_name) -> None +- client.extensions.download(id_or_name) -> BinaryAPIResponse +- client.extensions.download_from_chrome_store(\*\*params) -> BinaryAPIResponse +- client.extensions.upload(\*\*params) -> ExtensionUploadResponse + +# BrowserPools + +Types: + +```python +from kernel.types import BrowserPool, BrowserPoolListResponse, BrowserPoolAcquireResponse +``` + +Methods: + +- client.browser_pools.create(\*\*params) -> BrowserPool +- client.browser_pools.retrieve(id_or_name) -> BrowserPool +- client.browser_pools.update(id_or_name, \*\*params) -> BrowserPool +- client.browser_pools.list() -> BrowserPoolListResponse +- client.browser_pools.delete(id_or_name, \*\*params) -> None +- client.browser_pools.acquire(id_or_name, \*\*params) -> BrowserPoolAcquireResponse +- client.browser_pools.flush(id_or_name) -> None +- client.browser_pools.release(id_or_name, \*\*params) -> None + +# Agents + +## Auth + +Types: + +```python +from kernel.types.agents import ( + AgentAuthInvocationResponse, + AgentAuthSubmitResponse, + AuthAgent, + AuthAgentCreateRequest, + AuthAgentInvocationCreateRequest, + AuthAgentInvocationCreateResponse, + DiscoveredField, +) +``` + +Methods: + +- client.agents.auth.create(\*\*params) -> AuthAgent +- client.agents.auth.retrieve(id) -> AuthAgent +- client.agents.auth.list(\*\*params) -> SyncOffsetPagination[AuthAgent] +- client.agents.auth.delete(id) -> None + +### Invocations + +Types: + +```python +from kernel.types.agents.auth import InvocationExchangeResponse +``` + +Methods: + +- client.agents.auth.invocations.create(\*\*params) -> AuthAgentInvocationCreateResponse +- client.agents.auth.invocations.retrieve(invocation_id) -> AgentAuthInvocationResponse +- client.agents.auth.invocations.exchange(invocation_id, \*\*params) -> InvocationExchangeResponse +- client.agents.auth.invocations.submit(invocation_id, \*\*params) -> AgentAuthSubmitResponse + +# Credentials + +Types: + +```python +from kernel.types import ( + CreateCredentialRequest, + Credential, + UpdateCredentialRequest, + CredentialTotpCodeResponse, +) +``` + +Methods: + +- client.credentials.create(\*\*params) -> Credential +- client.credentials.retrieve(id_or_name) -> Credential +- client.credentials.update(id_or_name, \*\*params) -> Credential +- client.credentials.list(\*\*params) -> SyncOffsetPagination[Credential] +- client.credentials.delete(id_or_name) -> None +- client.credentials.totp_code(id_or_name) -> CredentialTotpCodeResponse diff --git a/bin/check-release-environment b/bin/check-release-environment new file mode 100644 index 0000000..b845b0f --- /dev/null +++ b/bin/check-release-environment @@ -0,0 +1,21 @@ +#!/usr/bin/env bash + +errors=() + +if [ -z "${PYPI_TOKEN}" ]; then + errors+=("The PYPI_TOKEN secret has not been set. Please set it in either this repository's secrets or your organization secrets.") +fi + +lenErrors=${#errors[@]} + +if [[ lenErrors -gt 0 ]]; then + echo -e "Found the following errors in the release environment:\n" + + for error in "${errors[@]}"; do + echo -e "- $error\n" + done + + exit 1 +fi + +echo "The environment is ready to push releases!" diff --git a/bin/publish-pypi b/bin/publish-pypi new file mode 100644 index 0000000..826054e --- /dev/null +++ b/bin/publish-pypi @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +set -eux +mkdir -p dist +rye build --clean +rye publish --yes --token=$PYPI_TOKEN diff --git a/examples/.keep b/examples/.keep new file mode 100644 index 0000000..d8c73e9 --- /dev/null +++ b/examples/.keep @@ -0,0 +1,4 @@ +File generated from our OpenAPI spec by Stainless. + +This directory can be used to store example files demonstrating usage of this SDK. +It is ignored by Stainless code generation and its content (other than this keep file) won't be touched. \ No newline at end of file diff --git a/noxfile.py b/noxfile.py new file mode 100644 index 0000000..53bca7f --- /dev/null +++ b/noxfile.py @@ -0,0 +1,9 @@ +import nox + + +@nox.session(reuse_venv=True, name="test-pydantic-v1") +def test_pydantic_v1(session: nox.Session) -> None: + session.install("-r", "requirements-dev.lock") + session.install("pydantic<2") + + session.run("pytest", "--showlocals", "--ignore=tests/functional", *session.posargs) diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..22aadee --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,269 @@ +[project] +name = "kernel" +version = "0.24.0" +description = "The official Python library for the kernel API" +dynamic = ["readme"] +license = "Apache-2.0" +authors = [ +{ name = "Kernel", email = "" }, +] + +dependencies = [ + "httpx>=0.23.0, <1", + "pydantic>=1.9.0, <3", + "typing-extensions>=4.10, <5", + "anyio>=3.5.0, <5", + "distro>=1.7.0, <2", + "sniffio", +] + +requires-python = ">= 3.9" +classifiers = [ + "Typing :: Typed", + "Intended Audience :: Developers", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", + "Operating System :: OS Independent", + "Operating System :: POSIX", + "Operating System :: MacOS", + "Operating System :: POSIX :: Linux", + "Operating System :: Microsoft :: Windows", + "Topic :: Software Development :: Libraries :: Python Modules", + "License :: OSI Approved :: Apache Software License" +] + +[project.urls] +Homepage = "https://github.com/kernel/kernel-python-sdk" +Repository = "https://github.com/kernel/kernel-python-sdk" + +[project.optional-dependencies] +aiohttp = ["aiohttp", "httpx_aiohttp>=0.1.9"] + +[tool.rye] +managed = true +# version pins are in requirements-dev.lock +dev-dependencies = [ + "pyright==1.1.399", + "mypy==1.17", + "respx", + "pytest", + "pytest-asyncio", + "ruff", + "time-machine", + "nox", + "dirty-equals>=0.6.0", + "importlib-metadata>=6.7.0", + "rich>=13.7.1", + "pytest-xdist>=3.6.1", +] + +[tool.rye.scripts] +format = { chain = [ + "format:ruff", + "format:docs", + "fix:ruff", + # run formatting again to fix any inconsistencies when imports are stripped + "format:ruff", +]} +"format:docs" = "python scripts/utils/ruffen-docs.py README.md api.md" +"format:ruff" = "ruff format" + +"lint" = { chain = [ + "check:ruff", + "typecheck", + "check:importable", +]} +"check:ruff" = "ruff check ." +"fix:ruff" = "ruff check --fix ." + +"check:importable" = "python -c 'import kernel'" + +typecheck = { chain = [ + "typecheck:pyright", + "typecheck:mypy" +]} +"typecheck:pyright" = "pyright" +"typecheck:verify-types" = "pyright --verifytypes kernel --ignoreexternal" +"typecheck:mypy" = "mypy ." + +[build-system] +requires = ["hatchling==1.26.3", "hatch-fancy-pypi-readme"] +build-backend = "hatchling.build" + +[tool.hatch.build] +include = [ + "src/*" +] + +[tool.hatch.build.targets.wheel] +packages = ["src/kernel"] + +[tool.hatch.build.targets.sdist] +# Basically everything except hidden files/directories (such as .github, .devcontainers, .python-version, etc) +include = [ + "/*.toml", + "/*.json", + "/*.lock", + "/*.md", + "/mypy.ini", + "/noxfile.py", + "bin/*", + "examples/*", + "src/*", + "tests/*", +] + +[tool.hatch.metadata.hooks.fancy-pypi-readme] +content-type = "text/markdown" + +[[tool.hatch.metadata.hooks.fancy-pypi-readme.fragments]] +path = "README.md" + +[[tool.hatch.metadata.hooks.fancy-pypi-readme.substitutions]] +# replace relative links with absolute links +pattern = '\[(.+?)\]\(((?!https?://)\S+?)\)' +replacement = '[\1](https://github.com/kernel/kernel-python-sdk/tree/main/\g<2>)' + +[tool.pytest.ini_options] +testpaths = ["tests"] +addopts = "--tb=short -n auto" +xfail_strict = true +asyncio_mode = "auto" +asyncio_default_fixture_loop_scope = "session" +filterwarnings = [ + "error" +] + +[tool.pyright] +# this enables practically every flag given by pyright. +# there are a couple of flags that are still disabled by +# default in strict mode as they are experimental and niche. +typeCheckingMode = "strict" +pythonVersion = "3.9" + +exclude = [ + "_dev", + ".venv", + ".nox", + ".git", +] + +reportImplicitOverride = true +reportOverlappingOverload = false + +reportImportCycles = false +reportPrivateUsage = false + +[tool.mypy] +pretty = true +show_error_codes = true + +# Exclude _files.py because mypy isn't smart enough to apply +# the correct type narrowing and as this is an internal module +# it's fine to just use Pyright. +# +# We also exclude our `tests` as mypy doesn't always infer +# types correctly and Pyright will still catch any type errors. +exclude = ['src/kernel/_files.py', '_dev/.*.py', 'tests/.*'] + +strict_equality = true +implicit_reexport = true +check_untyped_defs = true +no_implicit_optional = true + +warn_return_any = true +warn_unreachable = true +warn_unused_configs = true + +# Turn these options off as it could cause conflicts +# with the Pyright options. +warn_unused_ignores = false +warn_redundant_casts = false + +disallow_any_generics = true +disallow_untyped_defs = true +disallow_untyped_calls = true +disallow_subclassing_any = true +disallow_incomplete_defs = true +disallow_untyped_decorators = true +cache_fine_grained = true + +# By default, mypy reports an error if you assign a value to the result +# of a function call that doesn't return anything. We do this in our test +# cases: +# ``` +# result = ... +# assert result is None +# ``` +# Changing this codegen to make mypy happy would increase complexity +# and would not be worth it. +disable_error_code = "func-returns-value,overload-cannot-match" + +# https://github.com/python/mypy/issues/12162 +[[tool.mypy.overrides]] +module = "black.files.*" +ignore_errors = true +ignore_missing_imports = true + + +[tool.ruff] +line-length = 120 +output-format = "grouped" +target-version = "py38" + +[tool.ruff.format] +docstring-code-format = true + +[tool.ruff.lint] +select = [ + # isort + "I", + # bugbear rules + "B", + # remove unused imports + "F401", + # check for missing future annotations + "FA102", + # bare except statements + "E722", + # unused arguments + "ARG", + # print statements + "T201", + "T203", + # misuse of typing.TYPE_CHECKING + "TC004", + # import rules + "TID251", +] +ignore = [ + # mutable defaults + "B006", +] +unfixable = [ + # disable auto fix for print statements + "T201", + "T203", +] + +extend-safe-fixes = ["FA102"] + +[tool.ruff.lint.flake8-tidy-imports.banned-api] +"functools.lru_cache".msg = "This function does not retain type information for the wrapped function's arguments; The `lru_cache` function from `_utils` should be used instead" + +[tool.ruff.lint.isort] +length-sort = true +length-sort-straight = true +combine-as-imports = true +extra-standard-library = ["typing_extensions"] +known-first-party = ["kernel", "tests"] + +[tool.ruff.lint.per-file-ignores] +"bin/**.py" = ["T201", "T203"] +"scripts/**.py" = ["T201", "T203"] +"tests/**.py" = ["T201", "T203"] +"examples/**.py" = ["T201", "T203"] diff --git a/release-please-config.json b/release-please-config.json new file mode 100644 index 0000000..942ec08 --- /dev/null +++ b/release-please-config.json @@ -0,0 +1,66 @@ +{ + "packages": { + ".": {} + }, + "$schema": "https://raw.githubusercontent.com/stainless-api/release-please/main/schemas/config.json", + "include-v-in-tag": true, + "include-component-in-tag": false, + "versioning": "prerelease", + "prerelease": true, + "bump-minor-pre-major": true, + "bump-patch-for-minor-pre-major": false, + "pull-request-header": "Automated Release PR", + "pull-request-title-pattern": "release: ${version}", + "changelog-sections": [ + { + "type": "feat", + "section": "Features" + }, + { + "type": "fix", + "section": "Bug Fixes" + }, + { + "type": "perf", + "section": "Performance Improvements" + }, + { + "type": "revert", + "section": "Reverts" + }, + { + "type": "chore", + "section": "Chores" + }, + { + "type": "docs", + "section": "Documentation" + }, + { + "type": "style", + "section": "Styles" + }, + { + "type": "refactor", + "section": "Refactors" + }, + { + "type": "test", + "section": "Tests", + "hidden": true + }, + { + "type": "build", + "section": "Build System" + }, + { + "type": "ci", + "section": "Continuous Integration", + "hidden": true + } + ], + "release-type": "python", + "extra-files": [ + "src/kernel/_version.py" + ] +} \ No newline at end of file diff --git a/requirements-dev.lock b/requirements-dev.lock new file mode 100644 index 0000000..7643dfb --- /dev/null +++ b/requirements-dev.lock @@ -0,0 +1,149 @@ +# generated by rye +# use `rye lock` or `rye sync` to update this lockfile +# +# last locked with the following flags: +# pre: false +# features: [] +# all-features: true +# with-sources: false +# generate-hashes: false +# universal: false + +-e file:. +aiohappyeyeballs==2.6.1 + # via aiohttp +aiohttp==3.13.2 + # via httpx-aiohttp + # via kernel +aiosignal==1.4.0 + # via aiohttp +annotated-types==0.7.0 + # via pydantic +anyio==4.12.0 + # via httpx + # via kernel +argcomplete==3.6.3 + # via nox +async-timeout==5.0.1 + # via aiohttp +attrs==25.4.0 + # via aiohttp + # via nox +backports-asyncio-runner==1.2.0 + # via pytest-asyncio +certifi==2025.11.12 + # via httpcore + # via httpx +colorlog==6.10.1 + # via nox +dependency-groups==1.3.1 + # via nox +dirty-equals==0.11 +distlib==0.4.0 + # via virtualenv +distro==1.9.0 + # via kernel +exceptiongroup==1.3.1 + # via anyio + # via pytest +execnet==2.1.2 + # via pytest-xdist +filelock==3.19.1 + # via virtualenv +frozenlist==1.8.0 + # via aiohttp + # via aiosignal +h11==0.16.0 + # via httpcore +httpcore==1.0.9 + # via httpx +httpx==0.28.1 + # via httpx-aiohttp + # via kernel + # via respx +httpx-aiohttp==0.1.9 + # via kernel +humanize==4.13.0 + # via nox +idna==3.11 + # via anyio + # via httpx + # via yarl +importlib-metadata==8.7.0 +iniconfig==2.1.0 + # via pytest +markdown-it-py==3.0.0 + # via rich +mdurl==0.1.2 + # via markdown-it-py +multidict==6.7.0 + # via aiohttp + # via yarl +mypy==1.17.0 +mypy-extensions==1.1.0 + # via mypy +nodeenv==1.9.1 + # via pyright +nox==2025.11.12 +packaging==25.0 + # via dependency-groups + # via nox + # via pytest +pathspec==0.12.1 + # via mypy +platformdirs==4.4.0 + # via virtualenv +pluggy==1.6.0 + # via pytest +propcache==0.4.1 + # via aiohttp + # via yarl +pydantic==2.12.5 + # via kernel +pydantic-core==2.41.5 + # via pydantic +pygments==2.19.2 + # via pytest + # via rich +pyright==1.1.399 +pytest==8.4.2 + # via pytest-asyncio + # via pytest-xdist +pytest-asyncio==1.2.0 +pytest-xdist==3.8.0 +python-dateutil==2.9.0.post0 + # via time-machine +respx==0.22.0 +rich==14.2.0 +ruff==0.14.7 +six==1.17.0 + # via python-dateutil +sniffio==1.3.1 + # via kernel +time-machine==2.19.0 +tomli==2.3.0 + # via dependency-groups + # via mypy + # via nox + # via pytest +typing-extensions==4.15.0 + # via aiosignal + # via anyio + # via exceptiongroup + # via kernel + # via multidict + # via mypy + # via pydantic + # via pydantic-core + # via pyright + # via pytest-asyncio + # via typing-inspection + # via virtualenv +typing-inspection==0.4.2 + # via pydantic +virtualenv==20.35.4 + # via nox +yarl==1.22.0 + # via aiohttp +zipp==3.23.0 + # via importlib-metadata diff --git a/requirements.lock b/requirements.lock new file mode 100644 index 0000000..bbfe2b3 --- /dev/null +++ b/requirements.lock @@ -0,0 +1,76 @@ +# generated by rye +# use `rye lock` or `rye sync` to update this lockfile +# +# last locked with the following flags: +# pre: false +# features: [] +# all-features: true +# with-sources: false +# generate-hashes: false +# universal: false + +-e file:. +aiohappyeyeballs==2.6.1 + # via aiohttp +aiohttp==3.13.2 + # via httpx-aiohttp + # via kernel +aiosignal==1.4.0 + # via aiohttp +annotated-types==0.7.0 + # via pydantic +anyio==4.12.0 + # via httpx + # via kernel +async-timeout==5.0.1 + # via aiohttp +attrs==25.4.0 + # via aiohttp +certifi==2025.11.12 + # via httpcore + # via httpx +distro==1.9.0 + # via kernel +exceptiongroup==1.3.1 + # via anyio +frozenlist==1.8.0 + # via aiohttp + # via aiosignal +h11==0.16.0 + # via httpcore +httpcore==1.0.9 + # via httpx +httpx==0.28.1 + # via httpx-aiohttp + # via kernel +httpx-aiohttp==0.1.9 + # via kernel +idna==3.11 + # via anyio + # via httpx + # via yarl +multidict==6.7.0 + # via aiohttp + # via yarl +propcache==0.4.1 + # via aiohttp + # via yarl +pydantic==2.12.5 + # via kernel +pydantic-core==2.41.5 + # via pydantic +sniffio==1.3.1 + # via kernel +typing-extensions==4.15.0 + # via aiosignal + # via anyio + # via exceptiongroup + # via kernel + # via multidict + # via pydantic + # via pydantic-core + # via typing-inspection +typing-inspection==0.4.2 + # via pydantic +yarl==1.22.0 + # via aiohttp diff --git a/scripts/bootstrap b/scripts/bootstrap new file mode 100755 index 0000000..b430fee --- /dev/null +++ b/scripts/bootstrap @@ -0,0 +1,27 @@ +#!/usr/bin/env bash + +set -e + +cd "$(dirname "$0")/.." + +if [ -f "Brewfile" ] && [ "$(uname -s)" = "Darwin" ] && [ "$SKIP_BREW" != "1" ] && [ -t 0 ]; then + brew bundle check >/dev/null 2>&1 || { + echo -n "==> Install Homebrew dependencies? (y/N): " + read -r response + case "$response" in + [yY][eE][sS]|[yY]) + brew bundle + ;; + *) + ;; + esac + echo + } +fi + +echo "==> Installing Python dependencies…" + +# experimental uv support makes installations significantly faster +rye config --set-bool behavior.use-uv=true + +rye sync --all-features diff --git a/scripts/format b/scripts/format new file mode 100755 index 0000000..667ec2d --- /dev/null +++ b/scripts/format @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +set -e + +cd "$(dirname "$0")/.." + +echo "==> Running formatters" +rye run format diff --git a/scripts/lint b/scripts/lint new file mode 100755 index 0000000..7675e60 --- /dev/null +++ b/scripts/lint @@ -0,0 +1,16 @@ +#!/usr/bin/env bash + +set -e + +cd "$(dirname "$0")/.." + +if [ "$1" = "--fix" ]; then + echo "==> Running lints with --fix" + rye run fix:ruff +else + echo "==> Running lints" + rye run lint +fi + +echo "==> Making sure it imports" +rye run python -c 'import kernel' diff --git a/scripts/mock b/scripts/mock new file mode 100755 index 0000000..0b28f6e --- /dev/null +++ b/scripts/mock @@ -0,0 +1,41 @@ +#!/usr/bin/env bash + +set -e + +cd "$(dirname "$0")/.." + +if [[ -n "$1" && "$1" != '--'* ]]; then + URL="$1" + shift +else + URL="$(grep 'openapi_spec_url' .stats.yml | cut -d' ' -f2)" +fi + +# Check if the URL is empty +if [ -z "$URL" ]; then + echo "Error: No OpenAPI spec path/url provided or found in .stats.yml" + exit 1 +fi + +echo "==> Starting mock server with URL ${URL}" + +# Run prism mock on the given spec +if [ "$1" == "--daemon" ]; then + npm exec --package=@stainless-api/prism-cli@5.15.0 -- prism mock "$URL" &> .prism.log & + + # Wait for server to come online + echo -n "Waiting for server" + while ! grep -q "✖ fatal\|Prism is listening" ".prism.log" ; do + echo -n "." + sleep 0.1 + done + + if grep -q "✖ fatal" ".prism.log"; then + cat .prism.log + exit 1 + fi + + echo +else + npm exec --package=@stainless-api/prism-cli@5.15.0 -- prism mock "$URL" +fi diff --git a/scripts/test b/scripts/test new file mode 100755 index 0000000..dbeda2d --- /dev/null +++ b/scripts/test @@ -0,0 +1,61 @@ +#!/usr/bin/env bash + +set -e + +cd "$(dirname "$0")/.." + +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[0;33m' +NC='\033[0m' # No Color + +function prism_is_running() { + curl --silent "http://localhost:4010" >/dev/null 2>&1 +} + +kill_server_on_port() { + pids=$(lsof -t -i tcp:"$1" || echo "") + if [ "$pids" != "" ]; then + kill "$pids" + echo "Stopped $pids." + fi +} + +function is_overriding_api_base_url() { + [ -n "$TEST_API_BASE_URL" ] +} + +if ! is_overriding_api_base_url && ! prism_is_running ; then + # When we exit this script, make sure to kill the background mock server process + trap 'kill_server_on_port 4010' EXIT + + # Start the dev server + ./scripts/mock --daemon +fi + +if is_overriding_api_base_url ; then + echo -e "${GREEN}✔ Running tests against ${TEST_API_BASE_URL}${NC}" + echo +elif ! prism_is_running ; then + echo -e "${RED}ERROR:${NC} The test suite will not run without a mock Prism server" + echo -e "running against your OpenAPI spec." + echo + echo -e "To run the server, pass in the path or url of your OpenAPI" + echo -e "spec to the prism command:" + echo + echo -e " \$ ${YELLOW}npm exec --package=@stainless-api/prism-cli@5.15.0 -- prism mock path/to/your.openapi.yml${NC}" + echo + + exit 1 +else + echo -e "${GREEN}✔ Mock prism server is running with your OpenAPI spec${NC}" + echo +fi + +export DEFER_PYDANTIC_BUILD=false + +echo "==> Running tests" +rye run pytest "$@" + +echo "==> Running Pydantic v1 tests" +rye run nox -s test-pydantic-v1 -- "$@" diff --git a/scripts/utils/ruffen-docs.py b/scripts/utils/ruffen-docs.py new file mode 100644 index 0000000..0cf2bd2 --- /dev/null +++ b/scripts/utils/ruffen-docs.py @@ -0,0 +1,167 @@ +# fork of https://github.com/asottile/blacken-docs adapted for ruff +from __future__ import annotations + +import re +import sys +import argparse +import textwrap +import contextlib +import subprocess +from typing import Match, Optional, Sequence, Generator, NamedTuple, cast + +MD_RE = re.compile( + r"(?P^(?P *)```\s*python\n)" r"(?P.*?)" r"(?P^(?P=indent)```\s*$)", + re.DOTALL | re.MULTILINE, +) +MD_PYCON_RE = re.compile( + r"(?P^(?P *)```\s*pycon\n)" r"(?P.*?)" r"(?P^(?P=indent)```.*$)", + re.DOTALL | re.MULTILINE, +) +PYCON_PREFIX = ">>> " +PYCON_CONTINUATION_PREFIX = "..." +PYCON_CONTINUATION_RE = re.compile( + rf"^{re.escape(PYCON_CONTINUATION_PREFIX)}( |$)", +) +DEFAULT_LINE_LENGTH = 100 + + +class CodeBlockError(NamedTuple): + offset: int + exc: Exception + + +def format_str( + src: str, +) -> tuple[str, Sequence[CodeBlockError]]: + errors: list[CodeBlockError] = [] + + @contextlib.contextmanager + def _collect_error(match: Match[str]) -> Generator[None, None, None]: + try: + yield + except Exception as e: + errors.append(CodeBlockError(match.start(), e)) + + def _md_match(match: Match[str]) -> str: + code = textwrap.dedent(match["code"]) + with _collect_error(match): + code = format_code_block(code) + code = textwrap.indent(code, match["indent"]) + return f"{match['before']}{code}{match['after']}" + + def _pycon_match(match: Match[str]) -> str: + code = "" + fragment = cast(Optional[str], None) + + def finish_fragment() -> None: + nonlocal code + nonlocal fragment + + if fragment is not None: + with _collect_error(match): + fragment = format_code_block(fragment) + fragment_lines = fragment.splitlines() + code += f"{PYCON_PREFIX}{fragment_lines[0]}\n" + for line in fragment_lines[1:]: + # Skip blank lines to handle Black adding a blank above + # functions within blocks. A blank line would end the REPL + # continuation prompt. + # + # >>> if True: + # ... def f(): + # ... pass + # ... + if line: + code += f"{PYCON_CONTINUATION_PREFIX} {line}\n" + if fragment_lines[-1].startswith(" "): + code += f"{PYCON_CONTINUATION_PREFIX}\n" + fragment = None + + indentation = None + for line in match["code"].splitlines(): + orig_line, line = line, line.lstrip() + if indentation is None and line: + indentation = len(orig_line) - len(line) + continuation_match = PYCON_CONTINUATION_RE.match(line) + if continuation_match and fragment is not None: + fragment += line[continuation_match.end() :] + "\n" + else: + finish_fragment() + if line.startswith(PYCON_PREFIX): + fragment = line[len(PYCON_PREFIX) :] + "\n" + else: + code += orig_line[indentation:] + "\n" + finish_fragment() + return code + + def _md_pycon_match(match: Match[str]) -> str: + code = _pycon_match(match) + code = textwrap.indent(code, match["indent"]) + return f"{match['before']}{code}{match['after']}" + + src = MD_RE.sub(_md_match, src) + src = MD_PYCON_RE.sub(_md_pycon_match, src) + return src, errors + + +def format_code_block(code: str) -> str: + return subprocess.check_output( + [ + sys.executable, + "-m", + "ruff", + "format", + "--stdin-filename=script.py", + f"--line-length={DEFAULT_LINE_LENGTH}", + ], + encoding="utf-8", + input=code, + ) + + +def format_file( + filename: str, + skip_errors: bool, +) -> int: + with open(filename, encoding="UTF-8") as f: + contents = f.read() + new_contents, errors = format_str(contents) + for error in errors: + lineno = contents[: error.offset].count("\n") + 1 + print(f"{filename}:{lineno}: code block parse error {error.exc}") + if errors and not skip_errors: + return 1 + if contents != new_contents: + print(f"{filename}: Rewriting...") + with open(filename, "w", encoding="UTF-8") as f: + f.write(new_contents) + return 0 + else: + return 0 + + +def main(argv: Sequence[str] | None = None) -> int: + parser = argparse.ArgumentParser() + parser.add_argument( + "-l", + "--line-length", + type=int, + default=DEFAULT_LINE_LENGTH, + ) + parser.add_argument( + "-S", + "--skip-string-normalization", + action="store_true", + ) + parser.add_argument("-E", "--skip-errors", action="store_true") + parser.add_argument("filenames", nargs="*") + args = parser.parse_args(argv) + + retv = 0 + for filename in args.filenames: + retv |= format_file(filename, skip_errors=args.skip_errors) + return retv + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/scripts/utils/upload-artifact.sh b/scripts/utils/upload-artifact.sh new file mode 100755 index 0000000..14b2cc8 --- /dev/null +++ b/scripts/utils/upload-artifact.sh @@ -0,0 +1,27 @@ +#!/usr/bin/env bash +set -exuo pipefail + +FILENAME=$(basename dist/*.whl) + +RESPONSE=$(curl -X POST "$URL?filename=$FILENAME" \ + -H "Authorization: Bearer $AUTH" \ + -H "Content-Type: application/json") + +SIGNED_URL=$(echo "$RESPONSE" | jq -r '.url') + +if [[ "$SIGNED_URL" == "null" ]]; then + echo -e "\033[31mFailed to get signed URL.\033[0m" + exit 1 +fi + +UPLOAD_RESPONSE=$(curl -v -X PUT \ + -H "Content-Type: binary/octet-stream" \ + --data-binary "@dist/$FILENAME" "$SIGNED_URL" 2>&1) + +if echo "$UPLOAD_RESPONSE" | grep -q "HTTP/[0-9.]* 200"; then + echo -e "\033[32mUploaded build to Stainless storage.\033[0m" + echo -e "\033[32mInstallation: pip install 'https://pkg.stainless.com/s/kernel-python/$SHA/$FILENAME'\033[0m" +else + echo -e "\033[31mFailed to upload artifact.\033[0m" + exit 1 +fi diff --git a/src/kernel/__init__.py b/src/kernel/__init__.py new file mode 100644 index 0000000..d1fdcc0 --- /dev/null +++ b/src/kernel/__init__.py @@ -0,0 +1,104 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import typing as _t + +from . import types +from ._types import NOT_GIVEN, Omit, NoneType, NotGiven, Transport, ProxiesTypes, omit, not_given +from ._utils import file_from_path +from ._client import ( + ENVIRONMENTS, + Client, + Kernel, + Stream, + Timeout, + Transport, + AsyncClient, + AsyncKernel, + AsyncStream, + RequestOptions, +) +from ._models import BaseModel +from ._version import __title__, __version__ +from ._response import APIResponse as APIResponse, AsyncAPIResponse as AsyncAPIResponse +from ._constants import DEFAULT_TIMEOUT, DEFAULT_MAX_RETRIES, DEFAULT_CONNECTION_LIMITS +from ._exceptions import ( + APIError, + KernelError, + ConflictError, + NotFoundError, + APIStatusError, + RateLimitError, + APITimeoutError, + BadRequestError, + APIConnectionError, + AuthenticationError, + InternalServerError, + PermissionDeniedError, + UnprocessableEntityError, + APIResponseValidationError, +) +from ._base_client import DefaultHttpxClient, DefaultAioHttpClient, DefaultAsyncHttpxClient +from ._utils._logs import setup_logging as _setup_logging + +__all__ = [ + "types", + "__version__", + "__title__", + "NoneType", + "Transport", + "ProxiesTypes", + "NotGiven", + "NOT_GIVEN", + "not_given", + "Omit", + "omit", + "KernelError", + "APIError", + "APIStatusError", + "APITimeoutError", + "APIConnectionError", + "APIResponseValidationError", + "BadRequestError", + "AuthenticationError", + "PermissionDeniedError", + "NotFoundError", + "ConflictError", + "UnprocessableEntityError", + "RateLimitError", + "InternalServerError", + "Timeout", + "RequestOptions", + "Client", + "AsyncClient", + "Stream", + "AsyncStream", + "Kernel", + "AsyncKernel", + "ENVIRONMENTS", + "file_from_path", + "BaseModel", + "DEFAULT_TIMEOUT", + "DEFAULT_MAX_RETRIES", + "DEFAULT_CONNECTION_LIMITS", + "DefaultHttpxClient", + "DefaultAsyncHttpxClient", + "DefaultAioHttpClient", +] + +if not _t.TYPE_CHECKING: + from ._utils._resources_proxy import resources as resources + +_setup_logging() + +# Update the __module__ attribute for exported symbols so that +# error messages point to this module instead of the module +# it was originally defined in, e.g. +# kernel._exceptions.NotFoundError -> kernel.NotFoundError +__locals = locals() +for __name in __all__: + if not __name.startswith("__"): + try: + __locals[__name].__module__ = "kernel" + except (TypeError, AttributeError): + # Some of our exported symbols are builtins which we can't set attributes for. + pass diff --git a/src/kernel/_base_client.py b/src/kernel/_base_client.py new file mode 100644 index 0000000..787be54 --- /dev/null +++ b/src/kernel/_base_client.py @@ -0,0 +1,2001 @@ +from __future__ import annotations + +import sys +import json +import time +import uuid +import email +import asyncio +import inspect +import logging +import platform +import email.utils +from types import TracebackType +from random import random +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Type, + Union, + Generic, + Mapping, + TypeVar, + Iterable, + Iterator, + Optional, + Generator, + AsyncIterator, + cast, + overload, +) +from typing_extensions import Literal, override, get_origin + +import anyio +import httpx +import distro +import pydantic +from httpx import URL +from pydantic import PrivateAttr + +from . import _exceptions +from ._qs import Querystring +from ._files import to_httpx_files, async_to_httpx_files +from ._types import ( + Body, + Omit, + Query, + Headers, + Timeout, + NotGiven, + ResponseT, + AnyMapping, + PostParser, + RequestFiles, + HttpxSendArgs, + RequestOptions, + HttpxRequestFiles, + ModelBuilderProtocol, + not_given, +) +from ._utils import is_dict, is_list, asyncify, is_given, lru_cache, is_mapping +from ._compat import PYDANTIC_V1, model_copy, model_dump +from ._models import GenericModel, FinalRequestOptions, validate_type, construct_type +from ._response import ( + APIResponse, + BaseAPIResponse, + AsyncAPIResponse, + extract_response_type, +) +from ._constants import ( + DEFAULT_TIMEOUT, + MAX_RETRY_DELAY, + DEFAULT_MAX_RETRIES, + INITIAL_RETRY_DELAY, + RAW_RESPONSE_HEADER, + OVERRIDE_CAST_TO_HEADER, + DEFAULT_CONNECTION_LIMITS, +) +from ._streaming import Stream, SSEDecoder, AsyncStream, SSEBytesDecoder +from ._exceptions import ( + APIStatusError, + APITimeoutError, + APIConnectionError, + APIResponseValidationError, +) + +log: logging.Logger = logging.getLogger(__name__) + +# TODO: make base page type vars covariant +SyncPageT = TypeVar("SyncPageT", bound="BaseSyncPage[Any]") +AsyncPageT = TypeVar("AsyncPageT", bound="BaseAsyncPage[Any]") + + +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) + +_StreamT = TypeVar("_StreamT", bound=Stream[Any]) +_AsyncStreamT = TypeVar("_AsyncStreamT", bound=AsyncStream[Any]) + +if TYPE_CHECKING: + from httpx._config import ( + DEFAULT_TIMEOUT_CONFIG, # pyright: ignore[reportPrivateImportUsage] + ) + + HTTPX_DEFAULT_TIMEOUT = DEFAULT_TIMEOUT_CONFIG +else: + try: + from httpx._config import DEFAULT_TIMEOUT_CONFIG as HTTPX_DEFAULT_TIMEOUT + except ImportError: + # taken from https://github.com/encode/httpx/blob/3ba5fe0d7ac70222590e759c31442b1cab263791/httpx/_config.py#L366 + HTTPX_DEFAULT_TIMEOUT = Timeout(5.0) + + +class PageInfo: + """Stores the necessary information to build the request to retrieve the next page. + + Either `url` or `params` must be set. + """ + + url: URL | NotGiven + params: Query | NotGiven + json: Body | NotGiven + + @overload + def __init__( + self, + *, + url: URL, + ) -> None: ... + + @overload + def __init__( + self, + *, + params: Query, + ) -> None: ... + + @overload + def __init__( + self, + *, + json: Body, + ) -> None: ... + + def __init__( + self, + *, + url: URL | NotGiven = not_given, + json: Body | NotGiven = not_given, + params: Query | NotGiven = not_given, + ) -> None: + self.url = url + self.json = json + self.params = params + + @override + def __repr__(self) -> str: + if self.url: + return f"{self.__class__.__name__}(url={self.url})" + if self.json: + return f"{self.__class__.__name__}(json={self.json})" + return f"{self.__class__.__name__}(params={self.params})" + + +class BasePage(GenericModel, Generic[_T]): + """ + Defines the core interface for pagination. + + Type Args: + ModelT: The pydantic model that represents an item in the response. + + Methods: + has_next_page(): Check if there is another page available + next_page_info(): Get the necessary information to make a request for the next page + """ + + _options: FinalRequestOptions = PrivateAttr() + _model: Type[_T] = PrivateAttr() + + def has_next_page(self) -> bool: + items = self._get_page_items() + if not items: + return False + return self.next_page_info() is not None + + def next_page_info(self) -> Optional[PageInfo]: ... + + def _get_page_items(self) -> Iterable[_T]: # type: ignore[empty-body] + ... + + def _params_from_url(self, url: URL) -> httpx.QueryParams: + # TODO: do we have to preprocess params here? + return httpx.QueryParams(cast(Any, self._options.params)).merge(url.params) + + def _info_to_options(self, info: PageInfo) -> FinalRequestOptions: + options = model_copy(self._options) + options._strip_raw_response_header() + + if not isinstance(info.params, NotGiven): + options.params = {**options.params, **info.params} + return options + + if not isinstance(info.url, NotGiven): + params = self._params_from_url(info.url) + url = info.url.copy_with(params=params) + options.params = dict(url.params) + options.url = str(url) + return options + + if not isinstance(info.json, NotGiven): + if not is_mapping(info.json): + raise TypeError("Pagination is only supported with mappings") + + if not options.json_data: + options.json_data = {**info.json} + else: + if not is_mapping(options.json_data): + raise TypeError("Pagination is only supported with mappings") + + options.json_data = {**options.json_data, **info.json} + return options + + raise ValueError("Unexpected PageInfo state") + + +class BaseSyncPage(BasePage[_T], Generic[_T]): + _client: SyncAPIClient = pydantic.PrivateAttr() + + def _set_private_attributes( + self, + client: SyncAPIClient, + model: Type[_T], + options: FinalRequestOptions, + ) -> None: + if (not PYDANTIC_V1) and getattr(self, "__pydantic_private__", None) is None: + self.__pydantic_private__ = {} + + self._model = model + self._client = client + self._options = options + + # Pydantic uses a custom `__iter__` method to support casting BaseModels + # to dictionaries. e.g. dict(model). + # As we want to support `for item in page`, this is inherently incompatible + # with the default pydantic behaviour. It is not possible to support both + # use cases at once. Fortunately, this is not a big deal as all other pydantic + # methods should continue to work as expected as there is an alternative method + # to cast a model to a dictionary, model.dict(), which is used internally + # by pydantic. + def __iter__(self) -> Iterator[_T]: # type: ignore + for page in self.iter_pages(): + for item in page._get_page_items(): + yield item + + def iter_pages(self: SyncPageT) -> Iterator[SyncPageT]: + page = self + while True: + yield page + if page.has_next_page(): + page = page.get_next_page() + else: + return + + def get_next_page(self: SyncPageT) -> SyncPageT: + info = self.next_page_info() + if not info: + raise RuntimeError( + "No next page expected; please check `.has_next_page()` before calling `.get_next_page()`." + ) + + options = self._info_to_options(info) + return self._client._request_api_list(self._model, page=self.__class__, options=options) + + +class AsyncPaginator(Generic[_T, AsyncPageT]): + def __init__( + self, + client: AsyncAPIClient, + options: FinalRequestOptions, + page_cls: Type[AsyncPageT], + model: Type[_T], + ) -> None: + self._model = model + self._client = client + self._options = options + self._page_cls = page_cls + + def __await__(self) -> Generator[Any, None, AsyncPageT]: + return self._get_page().__await__() + + async def _get_page(self) -> AsyncPageT: + def _parser(resp: AsyncPageT) -> AsyncPageT: + resp._set_private_attributes( + model=self._model, + options=self._options, + client=self._client, + ) + return resp + + self._options.post_parser = _parser + + return await self._client.request(self._page_cls, self._options) + + async def __aiter__(self) -> AsyncIterator[_T]: + # https://github.com/microsoft/pyright/issues/3464 + page = cast( + AsyncPageT, + await self, # type: ignore + ) + async for item in page: + yield item + + +class BaseAsyncPage(BasePage[_T], Generic[_T]): + _client: AsyncAPIClient = pydantic.PrivateAttr() + + def _set_private_attributes( + self, + model: Type[_T], + client: AsyncAPIClient, + options: FinalRequestOptions, + ) -> None: + if (not PYDANTIC_V1) and getattr(self, "__pydantic_private__", None) is None: + self.__pydantic_private__ = {} + + self._model = model + self._client = client + self._options = options + + async def __aiter__(self) -> AsyncIterator[_T]: + async for page in self.iter_pages(): + for item in page._get_page_items(): + yield item + + async def iter_pages(self: AsyncPageT) -> AsyncIterator[AsyncPageT]: + page = self + while True: + yield page + if page.has_next_page(): + page = await page.get_next_page() + else: + return + + async def get_next_page(self: AsyncPageT) -> AsyncPageT: + info = self.next_page_info() + if not info: + raise RuntimeError( + "No next page expected; please check `.has_next_page()` before calling `.get_next_page()`." + ) + + options = self._info_to_options(info) + return await self._client._request_api_list(self._model, page=self.__class__, options=options) + + +_HttpxClientT = TypeVar("_HttpxClientT", bound=Union[httpx.Client, httpx.AsyncClient]) +_DefaultStreamT = TypeVar("_DefaultStreamT", bound=Union[Stream[Any], AsyncStream[Any]]) + + +class BaseClient(Generic[_HttpxClientT, _DefaultStreamT]): + _client: _HttpxClientT + _version: str + _base_url: URL + max_retries: int + timeout: Union[float, Timeout, None] + _strict_response_validation: bool + _idempotency_header: str | None + _default_stream_cls: type[_DefaultStreamT] | None = None + + def __init__( + self, + *, + version: str, + base_url: str | URL, + _strict_response_validation: bool, + max_retries: int = DEFAULT_MAX_RETRIES, + timeout: float | Timeout | None = DEFAULT_TIMEOUT, + custom_headers: Mapping[str, str] | None = None, + custom_query: Mapping[str, object] | None = None, + ) -> None: + self._version = version + self._base_url = self._enforce_trailing_slash(URL(base_url)) + self.max_retries = max_retries + self.timeout = timeout + self._custom_headers = custom_headers or {} + self._custom_query = custom_query or {} + self._strict_response_validation = _strict_response_validation + self._idempotency_header = None + self._platform: Platform | None = None + + if max_retries is None: # pyright: ignore[reportUnnecessaryComparison] + raise TypeError( + "max_retries cannot be None. If you want to disable retries, pass `0`; if you want unlimited retries, pass `math.inf` or a very high number; if you want the default behavior, pass `kernel.DEFAULT_MAX_RETRIES`" + ) + + def _enforce_trailing_slash(self, url: URL) -> URL: + if url.raw_path.endswith(b"/"): + return url + return url.copy_with(raw_path=url.raw_path + b"/") + + def _make_status_error_from_response( + self, + response: httpx.Response, + ) -> APIStatusError: + if response.is_closed and not response.is_stream_consumed: + # We can't read the response body as it has been closed + # before it was read. This can happen if an event hook + # raises a status error. + body = None + err_msg = f"Error code: {response.status_code}" + else: + err_text = response.text.strip() + body = err_text + + try: + body = json.loads(err_text) + err_msg = f"Error code: {response.status_code} - {body}" + except Exception: + err_msg = err_text or f"Error code: {response.status_code}" + + return self._make_status_error(err_msg, body=body, response=response) + + def _make_status_error( + self, + err_msg: str, + *, + body: object, + response: httpx.Response, + ) -> _exceptions.APIStatusError: + raise NotImplementedError() + + def _build_headers(self, options: FinalRequestOptions, *, retries_taken: int = 0) -> httpx.Headers: + custom_headers = options.headers or {} + headers_dict = _merge_mappings(self.default_headers, custom_headers) + self._validate_headers(headers_dict, custom_headers) + + # headers are case-insensitive while dictionaries are not. + headers = httpx.Headers(headers_dict) + + idempotency_header = self._idempotency_header + if idempotency_header and options.idempotency_key and idempotency_header not in headers: + headers[idempotency_header] = options.idempotency_key + + # Don't set these headers if they were already set or removed by the caller. We check + # `custom_headers`, which can contain `Omit()`, instead of `headers` to account for the removal case. + lower_custom_headers = [header.lower() for header in custom_headers] + if "x-stainless-retry-count" not in lower_custom_headers: + headers["x-stainless-retry-count"] = str(retries_taken) + if "x-stainless-read-timeout" not in lower_custom_headers: + timeout = self.timeout if isinstance(options.timeout, NotGiven) else options.timeout + if isinstance(timeout, Timeout): + timeout = timeout.read + if timeout is not None: + headers["x-stainless-read-timeout"] = str(timeout) + + return headers + + def _prepare_url(self, url: str) -> URL: + """ + Merge a URL argument together with any 'base_url' on the client, + to create the URL used for the outgoing request. + """ + # Copied from httpx's `_merge_url` method. + merge_url = URL(url) + if merge_url.is_relative_url: + merge_raw_path = self.base_url.raw_path + merge_url.raw_path.lstrip(b"/") + return self.base_url.copy_with(raw_path=merge_raw_path) + + return merge_url + + def _make_sse_decoder(self) -> SSEDecoder | SSEBytesDecoder: + return SSEDecoder() + + def _build_request( + self, + options: FinalRequestOptions, + *, + retries_taken: int = 0, + ) -> httpx.Request: + if log.isEnabledFor(logging.DEBUG): + log.debug("Request options: %s", model_dump(options, exclude_unset=True)) + + kwargs: dict[str, Any] = {} + + json_data = options.json_data + if options.extra_json is not None: + if json_data is None: + json_data = cast(Body, options.extra_json) + elif is_mapping(json_data): + json_data = _merge_mappings(json_data, options.extra_json) + else: + raise RuntimeError(f"Unexpected JSON data type, {type(json_data)}, cannot merge with `extra_body`") + + headers = self._build_headers(options, retries_taken=retries_taken) + params = _merge_mappings(self.default_query, options.params) + content_type = headers.get("Content-Type") + files = options.files + + # If the given Content-Type header is multipart/form-data then it + # has to be removed so that httpx can generate the header with + # additional information for us as it has to be in this form + # for the server to be able to correctly parse the request: + # multipart/form-data; boundary=---abc-- + if content_type is not None and content_type.startswith("multipart/form-data"): + if "boundary" not in content_type: + # only remove the header if the boundary hasn't been explicitly set + # as the caller doesn't want httpx to come up with their own boundary + headers.pop("Content-Type") + + # As we are now sending multipart/form-data instead of application/json + # we need to tell httpx to use it, https://www.python-httpx.org/advanced/clients/#multipart-file-encoding + if json_data: + if not is_dict(json_data): + raise TypeError( + f"Expected query input to be a dictionary for multipart requests but got {type(json_data)} instead." + ) + kwargs["data"] = self._serialize_multipartform(json_data) + + # httpx determines whether or not to send a "multipart/form-data" + # request based on the truthiness of the "files" argument. + # This gets around that issue by generating a dict value that + # evaluates to true. + # + # https://github.com/encode/httpx/discussions/2399#discussioncomment-3814186 + if not files: + files = cast(HttpxRequestFiles, ForceMultipartDict()) + + prepared_url = self._prepare_url(options.url) + if "_" in prepared_url.host: + # work around https://github.com/encode/httpx/discussions/2880 + kwargs["extensions"] = {"sni_hostname": prepared_url.host.replace("_", "-")} + + is_body_allowed = options.method.lower() != "get" + + if is_body_allowed: + if isinstance(json_data, bytes): + kwargs["content"] = json_data + else: + kwargs["json"] = json_data if is_given(json_data) else None + kwargs["files"] = files + else: + headers.pop("Content-Type", None) + kwargs.pop("data", None) + + # TODO: report this error to httpx + return self._client.build_request( # pyright: ignore[reportUnknownMemberType] + headers=headers, + timeout=self.timeout if isinstance(options.timeout, NotGiven) else options.timeout, + method=options.method, + url=prepared_url, + # the `Query` type that we use is incompatible with qs' + # `Params` type as it needs to be typed as `Mapping[str, object]` + # so that passing a `TypedDict` doesn't cause an error. + # https://github.com/microsoft/pyright/issues/3526#event-6715453066 + params=self.qs.stringify(cast(Mapping[str, Any], params)) if params else None, + **kwargs, + ) + + def _serialize_multipartform(self, data: Mapping[object, object]) -> dict[str, object]: + items = self.qs.stringify_items( + # TODO: type ignore is required as stringify_items is well typed but we can't be + # well typed without heavy validation. + data, # type: ignore + array_format="brackets", + ) + serialized: dict[str, object] = {} + for key, value in items: + existing = serialized.get(key) + + if not existing: + serialized[key] = value + continue + + # If a value has already been set for this key then that + # means we're sending data like `array[]=[1, 2, 3]` and we + # need to tell httpx that we want to send multiple values with + # the same key which is done by using a list or a tuple. + # + # Note: 2d arrays should never result in the same key at both + # levels so it's safe to assume that if the value is a list, + # it was because we changed it to be a list. + if is_list(existing): + existing.append(value) + else: + serialized[key] = [existing, value] + + return serialized + + def _maybe_override_cast_to(self, cast_to: type[ResponseT], options: FinalRequestOptions) -> type[ResponseT]: + if not is_given(options.headers): + return cast_to + + # make a copy of the headers so we don't mutate user-input + headers = dict(options.headers) + + # we internally support defining a temporary header to override the + # default `cast_to` type for use with `.with_raw_response` and `.with_streaming_response` + # see _response.py for implementation details + override_cast_to = headers.pop(OVERRIDE_CAST_TO_HEADER, not_given) + if is_given(override_cast_to): + options.headers = headers + return cast(Type[ResponseT], override_cast_to) + + return cast_to + + def _should_stream_response_body(self, request: httpx.Request) -> bool: + return request.headers.get(RAW_RESPONSE_HEADER) == "stream" # type: ignore[no-any-return] + + def _process_response_data( + self, + *, + data: object, + cast_to: type[ResponseT], + response: httpx.Response, + ) -> ResponseT: + if data is None: + return cast(ResponseT, None) + + if cast_to is object: + return cast(ResponseT, data) + + try: + if inspect.isclass(cast_to) and issubclass(cast_to, ModelBuilderProtocol): + return cast(ResponseT, cast_to.build(response=response, data=data)) + + if self._strict_response_validation: + return cast(ResponseT, validate_type(type_=cast_to, value=data)) + + return cast(ResponseT, construct_type(type_=cast_to, value=data)) + except pydantic.ValidationError as err: + raise APIResponseValidationError(response=response, body=data) from err + + @property + def qs(self) -> Querystring: + return Querystring() + + @property + def custom_auth(self) -> httpx.Auth | None: + return None + + @property + def auth_headers(self) -> dict[str, str]: + return {} + + @property + def default_headers(self) -> dict[str, str | Omit]: + return { + "Accept": "application/json", + "Content-Type": "application/json", + "User-Agent": self.user_agent, + **self.platform_headers(), + **self.auth_headers, + **self._custom_headers, + } + + @property + def default_query(self) -> dict[str, object]: + return { + **self._custom_query, + } + + def _validate_headers( + self, + headers: Headers, # noqa: ARG002 + custom_headers: Headers, # noqa: ARG002 + ) -> None: + """Validate the given default headers and custom headers. + + Does nothing by default. + """ + return + + @property + def user_agent(self) -> str: + return f"{self.__class__.__name__}/Python {self._version}" + + @property + def base_url(self) -> URL: + return self._base_url + + @base_url.setter + def base_url(self, url: URL | str) -> None: + self._base_url = self._enforce_trailing_slash(url if isinstance(url, URL) else URL(url)) + + def platform_headers(self) -> Dict[str, str]: + # the actual implementation is in a separate `lru_cache` decorated + # function because adding `lru_cache` to methods will leak memory + # https://github.com/python/cpython/issues/88476 + return platform_headers(self._version, platform=self._platform) + + def _parse_retry_after_header(self, response_headers: Optional[httpx.Headers] = None) -> float | None: + """Returns a float of the number of seconds (not milliseconds) to wait after retrying, or None if unspecified. + + About the Retry-After header: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After + See also https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After#syntax + """ + if response_headers is None: + return None + + # First, try the non-standard `retry-after-ms` header for milliseconds, + # which is more precise than integer-seconds `retry-after` + try: + retry_ms_header = response_headers.get("retry-after-ms", None) + return float(retry_ms_header) / 1000 + except (TypeError, ValueError): + pass + + # Next, try parsing `retry-after` header as seconds (allowing nonstandard floats). + retry_header = response_headers.get("retry-after") + try: + # note: the spec indicates that this should only ever be an integer + # but if someone sends a float there's no reason for us to not respect it + return float(retry_header) + except (TypeError, ValueError): + pass + + # Last, try parsing `retry-after` as a date. + retry_date_tuple = email.utils.parsedate_tz(retry_header) + if retry_date_tuple is None: + return None + + retry_date = email.utils.mktime_tz(retry_date_tuple) + return float(retry_date - time.time()) + + def _calculate_retry_timeout( + self, + remaining_retries: int, + options: FinalRequestOptions, + response_headers: Optional[httpx.Headers] = None, + ) -> float: + max_retries = options.get_max_retries(self.max_retries) + + # If the API asks us to wait a certain amount of time (and it's a reasonable amount), just do what it says. + retry_after = self._parse_retry_after_header(response_headers) + if retry_after is not None and 0 < retry_after <= 60: + return retry_after + + # Also cap retry count to 1000 to avoid any potential overflows with `pow` + nb_retries = min(max_retries - remaining_retries, 1000) + + # Apply exponential backoff, but not more than the max. + sleep_seconds = min(INITIAL_RETRY_DELAY * pow(2.0, nb_retries), MAX_RETRY_DELAY) + + # Apply some jitter, plus-or-minus half a second. + jitter = 1 - 0.25 * random() + timeout = sleep_seconds * jitter + return timeout if timeout >= 0 else 0 + + def _should_retry(self, response: httpx.Response) -> bool: + # Note: this is not a standard header + should_retry_header = response.headers.get("x-should-retry") + + # If the server explicitly says whether or not to retry, obey. + if should_retry_header == "true": + log.debug("Retrying as header `x-should-retry` is set to `true`") + return True + if should_retry_header == "false": + log.debug("Not retrying as header `x-should-retry` is set to `false`") + return False + + # Retry on request timeouts. + if response.status_code == 408: + log.debug("Retrying due to status code %i", response.status_code) + return True + + # Retry on lock timeouts. + if response.status_code == 409: + log.debug("Retrying due to status code %i", response.status_code) + return True + + # Retry on rate limits. + if response.status_code == 429: + log.debug("Retrying due to status code %i", response.status_code) + return True + + # Retry internal errors. + if response.status_code >= 500: + log.debug("Retrying due to status code %i", response.status_code) + return True + + log.debug("Not retrying") + return False + + def _idempotency_key(self) -> str: + return f"stainless-python-retry-{uuid.uuid4()}" + + +class _DefaultHttpxClient(httpx.Client): + def __init__(self, **kwargs: Any) -> None: + kwargs.setdefault("timeout", DEFAULT_TIMEOUT) + kwargs.setdefault("limits", DEFAULT_CONNECTION_LIMITS) + kwargs.setdefault("follow_redirects", True) + super().__init__(**kwargs) + + +if TYPE_CHECKING: + DefaultHttpxClient = httpx.Client + """An alias to `httpx.Client` that provides the same defaults that this SDK + uses internally. + + This is useful because overriding the `http_client` with your own instance of + `httpx.Client` will result in httpx's defaults being used, not ours. + """ +else: + DefaultHttpxClient = _DefaultHttpxClient + + +class SyncHttpxClientWrapper(DefaultHttpxClient): + def __del__(self) -> None: + if self.is_closed: + return + + try: + self.close() + except Exception: + pass + + +class SyncAPIClient(BaseClient[httpx.Client, Stream[Any]]): + _client: httpx.Client + _default_stream_cls: type[Stream[Any]] | None = None + + def __init__( + self, + *, + version: str, + base_url: str | URL, + max_retries: int = DEFAULT_MAX_RETRIES, + timeout: float | Timeout | None | NotGiven = not_given, + http_client: httpx.Client | None = None, + custom_headers: Mapping[str, str] | None = None, + custom_query: Mapping[str, object] | None = None, + _strict_response_validation: bool, + ) -> None: + if not is_given(timeout): + # if the user passed in a custom http client with a non-default + # timeout set then we use that timeout. + # + # note: there is an edge case here where the user passes in a client + # where they've explicitly set the timeout to match the default timeout + # as this check is structural, meaning that we'll think they didn't + # pass in a timeout and will ignore it + if http_client and http_client.timeout != HTTPX_DEFAULT_TIMEOUT: + timeout = http_client.timeout + else: + timeout = DEFAULT_TIMEOUT + + if http_client is not None and not isinstance(http_client, httpx.Client): # pyright: ignore[reportUnnecessaryIsInstance] + raise TypeError( + f"Invalid `http_client` argument; Expected an instance of `httpx.Client` but got {type(http_client)}" + ) + + super().__init__( + version=version, + # cast to a valid type because mypy doesn't understand our type narrowing + timeout=cast(Timeout, timeout), + base_url=base_url, + max_retries=max_retries, + custom_query=custom_query, + custom_headers=custom_headers, + _strict_response_validation=_strict_response_validation, + ) + self._client = http_client or SyncHttpxClientWrapper( + base_url=base_url, + # cast to a valid type because mypy doesn't understand our type narrowing + timeout=cast(Timeout, timeout), + ) + + def is_closed(self) -> bool: + return self._client.is_closed + + def close(self) -> None: + """Close the underlying HTTPX client. + + The client will *not* be usable after this. + """ + # If an error is thrown while constructing a client, self._client + # may not be present + if hasattr(self, "_client"): + self._client.close() + + def __enter__(self: _T) -> _T: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.close() + + def _prepare_options( + self, + options: FinalRequestOptions, # noqa: ARG002 + ) -> FinalRequestOptions: + """Hook for mutating the given options""" + return options + + def _prepare_request( + self, + request: httpx.Request, # noqa: ARG002 + ) -> None: + """This method is used as a callback for mutating the `Request` object + after it has been constructed. + This is useful for cases where you want to add certain headers based off of + the request properties, e.g. `url`, `method` etc. + """ + return None + + @overload + def request( + self, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + *, + stream: Literal[True], + stream_cls: Type[_StreamT], + ) -> _StreamT: ... + + @overload + def request( + self, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + *, + stream: Literal[False] = False, + ) -> ResponseT: ... + + @overload + def request( + self, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + *, + stream: bool = False, + stream_cls: Type[_StreamT] | None = None, + ) -> ResponseT | _StreamT: ... + + def request( + self, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + *, + stream: bool = False, + stream_cls: type[_StreamT] | None = None, + ) -> ResponseT | _StreamT: + cast_to = self._maybe_override_cast_to(cast_to, options) + + # create a copy of the options we were given so that if the + # options are mutated later & we then retry, the retries are + # given the original options + input_options = model_copy(options) + if input_options.idempotency_key is None and input_options.method.lower() != "get": + # ensure the idempotency key is reused between requests + input_options.idempotency_key = self._idempotency_key() + + response: httpx.Response | None = None + max_retries = input_options.get_max_retries(self.max_retries) + + retries_taken = 0 + for retries_taken in range(max_retries + 1): + options = model_copy(input_options) + options = self._prepare_options(options) + + remaining_retries = max_retries - retries_taken + request = self._build_request(options, retries_taken=retries_taken) + self._prepare_request(request) + + kwargs: HttpxSendArgs = {} + if self.custom_auth is not None: + kwargs["auth"] = self.custom_auth + + if options.follow_redirects is not None: + kwargs["follow_redirects"] = options.follow_redirects + + log.debug("Sending HTTP Request: %s %s", request.method, request.url) + + response = None + try: + response = self._client.send( + request, + stream=stream or self._should_stream_response_body(request=request), + **kwargs, + ) + except httpx.TimeoutException as err: + log.debug("Encountered httpx.TimeoutException", exc_info=True) + + if remaining_retries > 0: + self._sleep_for_retry( + retries_taken=retries_taken, + max_retries=max_retries, + options=input_options, + response=None, + ) + continue + + log.debug("Raising timeout error") + raise APITimeoutError(request=request) from err + except Exception as err: + log.debug("Encountered Exception", exc_info=True) + + if remaining_retries > 0: + self._sleep_for_retry( + retries_taken=retries_taken, + max_retries=max_retries, + options=input_options, + response=None, + ) + continue + + log.debug("Raising connection error") + raise APIConnectionError(request=request) from err + + log.debug( + 'HTTP Response: %s %s "%i %s" %s', + request.method, + request.url, + response.status_code, + response.reason_phrase, + response.headers, + ) + + try: + response.raise_for_status() + except httpx.HTTPStatusError as err: # thrown on 4xx and 5xx status code + log.debug("Encountered httpx.HTTPStatusError", exc_info=True) + + if remaining_retries > 0 and self._should_retry(err.response): + err.response.close() + self._sleep_for_retry( + retries_taken=retries_taken, + max_retries=max_retries, + options=input_options, + response=response, + ) + continue + + # If the response is streamed then we need to explicitly read the response + # to completion before attempting to access the response text. + if not err.response.is_closed: + err.response.read() + + log.debug("Re-raising status error") + raise self._make_status_error_from_response(err.response) from None + + break + + assert response is not None, "could not resolve response (should never happen)" + return self._process_response( + cast_to=cast_to, + options=options, + response=response, + stream=stream, + stream_cls=stream_cls, + retries_taken=retries_taken, + ) + + def _sleep_for_retry( + self, *, retries_taken: int, max_retries: int, options: FinalRequestOptions, response: httpx.Response | None + ) -> None: + remaining_retries = max_retries - retries_taken + if remaining_retries == 1: + log.debug("1 retry left") + else: + log.debug("%i retries left", remaining_retries) + + timeout = self._calculate_retry_timeout(remaining_retries, options, response.headers if response else None) + log.info("Retrying request to %s in %f seconds", options.url, timeout) + + time.sleep(timeout) + + def _process_response( + self, + *, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + response: httpx.Response, + stream: bool, + stream_cls: type[Stream[Any]] | type[AsyncStream[Any]] | None, + retries_taken: int = 0, + ) -> ResponseT: + origin = get_origin(cast_to) or cast_to + + if ( + inspect.isclass(origin) + and issubclass(origin, BaseAPIResponse) + # we only want to actually return the custom BaseAPIResponse class if we're + # returning the raw response, or if we're not streaming SSE, as if we're streaming + # SSE then `cast_to` doesn't actively reflect the type we need to parse into + and (not stream or bool(response.request.headers.get(RAW_RESPONSE_HEADER))) + ): + if not issubclass(origin, APIResponse): + raise TypeError(f"API Response types must subclass {APIResponse}; Received {origin}") + + response_cls = cast("type[BaseAPIResponse[Any]]", cast_to) + return cast( + ResponseT, + response_cls( + raw=response, + client=self, + cast_to=extract_response_type(response_cls), + stream=stream, + stream_cls=stream_cls, + options=options, + retries_taken=retries_taken, + ), + ) + + if cast_to == httpx.Response: + return cast(ResponseT, response) + + api_response = APIResponse( + raw=response, + client=self, + cast_to=cast("type[ResponseT]", cast_to), # pyright: ignore[reportUnnecessaryCast] + stream=stream, + stream_cls=stream_cls, + options=options, + retries_taken=retries_taken, + ) + if bool(response.request.headers.get(RAW_RESPONSE_HEADER)): + return cast(ResponseT, api_response) + + return api_response.parse() + + def _request_api_list( + self, + model: Type[object], + page: Type[SyncPageT], + options: FinalRequestOptions, + ) -> SyncPageT: + def _parser(resp: SyncPageT) -> SyncPageT: + resp._set_private_attributes( + client=self, + model=model, + options=options, + ) + return resp + + options.post_parser = _parser + + return self.request(page, options, stream=False) + + @overload + def get( + self, + path: str, + *, + cast_to: Type[ResponseT], + options: RequestOptions = {}, + stream: Literal[False] = False, + ) -> ResponseT: ... + + @overload + def get( + self, + path: str, + *, + cast_to: Type[ResponseT], + options: RequestOptions = {}, + stream: Literal[True], + stream_cls: type[_StreamT], + ) -> _StreamT: ... + + @overload + def get( + self, + path: str, + *, + cast_to: Type[ResponseT], + options: RequestOptions = {}, + stream: bool, + stream_cls: type[_StreamT] | None = None, + ) -> ResponseT | _StreamT: ... + + def get( + self, + path: str, + *, + cast_to: Type[ResponseT], + options: RequestOptions = {}, + stream: bool = False, + stream_cls: type[_StreamT] | None = None, + ) -> ResponseT | _StreamT: + opts = FinalRequestOptions.construct(method="get", url=path, **options) + # cast is required because mypy complains about returning Any even though + # it understands the type variables + return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls)) + + @overload + def post( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + options: RequestOptions = {}, + files: RequestFiles | None = None, + stream: Literal[False] = False, + ) -> ResponseT: ... + + @overload + def post( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + options: RequestOptions = {}, + files: RequestFiles | None = None, + stream: Literal[True], + stream_cls: type[_StreamT], + ) -> _StreamT: ... + + @overload + def post( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + options: RequestOptions = {}, + files: RequestFiles | None = None, + stream: bool, + stream_cls: type[_StreamT] | None = None, + ) -> ResponseT | _StreamT: ... + + def post( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + options: RequestOptions = {}, + files: RequestFiles | None = None, + stream: bool = False, + stream_cls: type[_StreamT] | None = None, + ) -> ResponseT | _StreamT: + opts = FinalRequestOptions.construct( + method="post", url=path, json_data=body, files=to_httpx_files(files), **options + ) + return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls)) + + def patch( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + files: RequestFiles | None = None, + options: RequestOptions = {}, + ) -> ResponseT: + opts = FinalRequestOptions.construct( + method="patch", url=path, json_data=body, files=to_httpx_files(files), **options + ) + return self.request(cast_to, opts) + + def put( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + files: RequestFiles | None = None, + options: RequestOptions = {}, + ) -> ResponseT: + opts = FinalRequestOptions.construct( + method="put", url=path, json_data=body, files=to_httpx_files(files), **options + ) + return self.request(cast_to, opts) + + def delete( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + options: RequestOptions = {}, + ) -> ResponseT: + opts = FinalRequestOptions.construct(method="delete", url=path, json_data=body, **options) + return self.request(cast_to, opts) + + def get_api_list( + self, + path: str, + *, + model: Type[object], + page: Type[SyncPageT], + body: Body | None = None, + options: RequestOptions = {}, + method: str = "get", + ) -> SyncPageT: + opts = FinalRequestOptions.construct(method=method, url=path, json_data=body, **options) + return self._request_api_list(model, page, opts) + + +class _DefaultAsyncHttpxClient(httpx.AsyncClient): + def __init__(self, **kwargs: Any) -> None: + kwargs.setdefault("timeout", DEFAULT_TIMEOUT) + kwargs.setdefault("limits", DEFAULT_CONNECTION_LIMITS) + kwargs.setdefault("follow_redirects", True) + super().__init__(**kwargs) + + +try: + import httpx_aiohttp +except ImportError: + + class _DefaultAioHttpClient(httpx.AsyncClient): + def __init__(self, **_kwargs: Any) -> None: + raise RuntimeError("To use the aiohttp client you must have installed the package with the `aiohttp` extra") +else: + + class _DefaultAioHttpClient(httpx_aiohttp.HttpxAiohttpClient): # type: ignore + def __init__(self, **kwargs: Any) -> None: + kwargs.setdefault("timeout", DEFAULT_TIMEOUT) + kwargs.setdefault("limits", DEFAULT_CONNECTION_LIMITS) + kwargs.setdefault("follow_redirects", True) + + super().__init__(**kwargs) + + +if TYPE_CHECKING: + DefaultAsyncHttpxClient = httpx.AsyncClient + """An alias to `httpx.AsyncClient` that provides the same defaults that this SDK + uses internally. + + This is useful because overriding the `http_client` with your own instance of + `httpx.AsyncClient` will result in httpx's defaults being used, not ours. + """ + + DefaultAioHttpClient = httpx.AsyncClient + """An alias to `httpx.AsyncClient` that changes the default HTTP transport to `aiohttp`.""" +else: + DefaultAsyncHttpxClient = _DefaultAsyncHttpxClient + DefaultAioHttpClient = _DefaultAioHttpClient + + +class AsyncHttpxClientWrapper(DefaultAsyncHttpxClient): + def __del__(self) -> None: + if self.is_closed: + return + + try: + # TODO(someday): support non asyncio runtimes here + asyncio.get_running_loop().create_task(self.aclose()) + except Exception: + pass + + +class AsyncAPIClient(BaseClient[httpx.AsyncClient, AsyncStream[Any]]): + _client: httpx.AsyncClient + _default_stream_cls: type[AsyncStream[Any]] | None = None + + def __init__( + self, + *, + version: str, + base_url: str | URL, + _strict_response_validation: bool, + max_retries: int = DEFAULT_MAX_RETRIES, + timeout: float | Timeout | None | NotGiven = not_given, + http_client: httpx.AsyncClient | None = None, + custom_headers: Mapping[str, str] | None = None, + custom_query: Mapping[str, object] | None = None, + ) -> None: + if not is_given(timeout): + # if the user passed in a custom http client with a non-default + # timeout set then we use that timeout. + # + # note: there is an edge case here where the user passes in a client + # where they've explicitly set the timeout to match the default timeout + # as this check is structural, meaning that we'll think they didn't + # pass in a timeout and will ignore it + if http_client and http_client.timeout != HTTPX_DEFAULT_TIMEOUT: + timeout = http_client.timeout + else: + timeout = DEFAULT_TIMEOUT + + if http_client is not None and not isinstance(http_client, httpx.AsyncClient): # pyright: ignore[reportUnnecessaryIsInstance] + raise TypeError( + f"Invalid `http_client` argument; Expected an instance of `httpx.AsyncClient` but got {type(http_client)}" + ) + + super().__init__( + version=version, + base_url=base_url, + # cast to a valid type because mypy doesn't understand our type narrowing + timeout=cast(Timeout, timeout), + max_retries=max_retries, + custom_query=custom_query, + custom_headers=custom_headers, + _strict_response_validation=_strict_response_validation, + ) + self._client = http_client or AsyncHttpxClientWrapper( + base_url=base_url, + # cast to a valid type because mypy doesn't understand our type narrowing + timeout=cast(Timeout, timeout), + ) + + def is_closed(self) -> bool: + return self._client.is_closed + + async def close(self) -> None: + """Close the underlying HTTPX client. + + The client will *not* be usable after this. + """ + await self._client.aclose() + + async def __aenter__(self: _T) -> _T: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + await self.close() + + async def _prepare_options( + self, + options: FinalRequestOptions, # noqa: ARG002 + ) -> FinalRequestOptions: + """Hook for mutating the given options""" + return options + + async def _prepare_request( + self, + request: httpx.Request, # noqa: ARG002 + ) -> None: + """This method is used as a callback for mutating the `Request` object + after it has been constructed. + This is useful for cases where you want to add certain headers based off of + the request properties, e.g. `url`, `method` etc. + """ + return None + + @overload + async def request( + self, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + *, + stream: Literal[False] = False, + ) -> ResponseT: ... + + @overload + async def request( + self, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + *, + stream: Literal[True], + stream_cls: type[_AsyncStreamT], + ) -> _AsyncStreamT: ... + + @overload + async def request( + self, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + *, + stream: bool, + stream_cls: type[_AsyncStreamT] | None = None, + ) -> ResponseT | _AsyncStreamT: ... + + async def request( + self, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + *, + stream: bool = False, + stream_cls: type[_AsyncStreamT] | None = None, + ) -> ResponseT | _AsyncStreamT: + if self._platform is None: + # `get_platform` can make blocking IO calls so we + # execute it earlier while we are in an async context + self._platform = await asyncify(get_platform)() + + cast_to = self._maybe_override_cast_to(cast_to, options) + + # create a copy of the options we were given so that if the + # options are mutated later & we then retry, the retries are + # given the original options + input_options = model_copy(options) + if input_options.idempotency_key is None and input_options.method.lower() != "get": + # ensure the idempotency key is reused between requests + input_options.idempotency_key = self._idempotency_key() + + response: httpx.Response | None = None + max_retries = input_options.get_max_retries(self.max_retries) + + retries_taken = 0 + for retries_taken in range(max_retries + 1): + options = model_copy(input_options) + options = await self._prepare_options(options) + + remaining_retries = max_retries - retries_taken + request = self._build_request(options, retries_taken=retries_taken) + await self._prepare_request(request) + + kwargs: HttpxSendArgs = {} + if self.custom_auth is not None: + kwargs["auth"] = self.custom_auth + + if options.follow_redirects is not None: + kwargs["follow_redirects"] = options.follow_redirects + + log.debug("Sending HTTP Request: %s %s", request.method, request.url) + + response = None + try: + response = await self._client.send( + request, + stream=stream or self._should_stream_response_body(request=request), + **kwargs, + ) + except httpx.TimeoutException as err: + log.debug("Encountered httpx.TimeoutException", exc_info=True) + + if remaining_retries > 0: + await self._sleep_for_retry( + retries_taken=retries_taken, + max_retries=max_retries, + options=input_options, + response=None, + ) + continue + + log.debug("Raising timeout error") + raise APITimeoutError(request=request) from err + except Exception as err: + log.debug("Encountered Exception", exc_info=True) + + if remaining_retries > 0: + await self._sleep_for_retry( + retries_taken=retries_taken, + max_retries=max_retries, + options=input_options, + response=None, + ) + continue + + log.debug("Raising connection error") + raise APIConnectionError(request=request) from err + + log.debug( + 'HTTP Response: %s %s "%i %s" %s', + request.method, + request.url, + response.status_code, + response.reason_phrase, + response.headers, + ) + + try: + response.raise_for_status() + except httpx.HTTPStatusError as err: # thrown on 4xx and 5xx status code + log.debug("Encountered httpx.HTTPStatusError", exc_info=True) + + if remaining_retries > 0 and self._should_retry(err.response): + await err.response.aclose() + await self._sleep_for_retry( + retries_taken=retries_taken, + max_retries=max_retries, + options=input_options, + response=response, + ) + continue + + # If the response is streamed then we need to explicitly read the response + # to completion before attempting to access the response text. + if not err.response.is_closed: + await err.response.aread() + + log.debug("Re-raising status error") + raise self._make_status_error_from_response(err.response) from None + + break + + assert response is not None, "could not resolve response (should never happen)" + return await self._process_response( + cast_to=cast_to, + options=options, + response=response, + stream=stream, + stream_cls=stream_cls, + retries_taken=retries_taken, + ) + + async def _sleep_for_retry( + self, *, retries_taken: int, max_retries: int, options: FinalRequestOptions, response: httpx.Response | None + ) -> None: + remaining_retries = max_retries - retries_taken + if remaining_retries == 1: + log.debug("1 retry left") + else: + log.debug("%i retries left", remaining_retries) + + timeout = self._calculate_retry_timeout(remaining_retries, options, response.headers if response else None) + log.info("Retrying request to %s in %f seconds", options.url, timeout) + + await anyio.sleep(timeout) + + async def _process_response( + self, + *, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + response: httpx.Response, + stream: bool, + stream_cls: type[Stream[Any]] | type[AsyncStream[Any]] | None, + retries_taken: int = 0, + ) -> ResponseT: + origin = get_origin(cast_to) or cast_to + + if ( + inspect.isclass(origin) + and issubclass(origin, BaseAPIResponse) + # we only want to actually return the custom BaseAPIResponse class if we're + # returning the raw response, or if we're not streaming SSE, as if we're streaming + # SSE then `cast_to` doesn't actively reflect the type we need to parse into + and (not stream or bool(response.request.headers.get(RAW_RESPONSE_HEADER))) + ): + if not issubclass(origin, AsyncAPIResponse): + raise TypeError(f"API Response types must subclass {AsyncAPIResponse}; Received {origin}") + + response_cls = cast("type[BaseAPIResponse[Any]]", cast_to) + return cast( + "ResponseT", + response_cls( + raw=response, + client=self, + cast_to=extract_response_type(response_cls), + stream=stream, + stream_cls=stream_cls, + options=options, + retries_taken=retries_taken, + ), + ) + + if cast_to == httpx.Response: + return cast(ResponseT, response) + + api_response = AsyncAPIResponse( + raw=response, + client=self, + cast_to=cast("type[ResponseT]", cast_to), # pyright: ignore[reportUnnecessaryCast] + stream=stream, + stream_cls=stream_cls, + options=options, + retries_taken=retries_taken, + ) + if bool(response.request.headers.get(RAW_RESPONSE_HEADER)): + return cast(ResponseT, api_response) + + return await api_response.parse() + + def _request_api_list( + self, + model: Type[_T], + page: Type[AsyncPageT], + options: FinalRequestOptions, + ) -> AsyncPaginator[_T, AsyncPageT]: + return AsyncPaginator(client=self, options=options, page_cls=page, model=model) + + @overload + async def get( + self, + path: str, + *, + cast_to: Type[ResponseT], + options: RequestOptions = {}, + stream: Literal[False] = False, + ) -> ResponseT: ... + + @overload + async def get( + self, + path: str, + *, + cast_to: Type[ResponseT], + options: RequestOptions = {}, + stream: Literal[True], + stream_cls: type[_AsyncStreamT], + ) -> _AsyncStreamT: ... + + @overload + async def get( + self, + path: str, + *, + cast_to: Type[ResponseT], + options: RequestOptions = {}, + stream: bool, + stream_cls: type[_AsyncStreamT] | None = None, + ) -> ResponseT | _AsyncStreamT: ... + + async def get( + self, + path: str, + *, + cast_to: Type[ResponseT], + options: RequestOptions = {}, + stream: bool = False, + stream_cls: type[_AsyncStreamT] | None = None, + ) -> ResponseT | _AsyncStreamT: + opts = FinalRequestOptions.construct(method="get", url=path, **options) + return await self.request(cast_to, opts, stream=stream, stream_cls=stream_cls) + + @overload + async def post( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + files: RequestFiles | None = None, + options: RequestOptions = {}, + stream: Literal[False] = False, + ) -> ResponseT: ... + + @overload + async def post( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + files: RequestFiles | None = None, + options: RequestOptions = {}, + stream: Literal[True], + stream_cls: type[_AsyncStreamT], + ) -> _AsyncStreamT: ... + + @overload + async def post( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + files: RequestFiles | None = None, + options: RequestOptions = {}, + stream: bool, + stream_cls: type[_AsyncStreamT] | None = None, + ) -> ResponseT | _AsyncStreamT: ... + + async def post( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + files: RequestFiles | None = None, + options: RequestOptions = {}, + stream: bool = False, + stream_cls: type[_AsyncStreamT] | None = None, + ) -> ResponseT | _AsyncStreamT: + opts = FinalRequestOptions.construct( + method="post", url=path, json_data=body, files=await async_to_httpx_files(files), **options + ) + return await self.request(cast_to, opts, stream=stream, stream_cls=stream_cls) + + async def patch( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + files: RequestFiles | None = None, + options: RequestOptions = {}, + ) -> ResponseT: + opts = FinalRequestOptions.construct( + method="patch", url=path, json_data=body, files=await async_to_httpx_files(files), **options + ) + return await self.request(cast_to, opts) + + async def put( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + files: RequestFiles | None = None, + options: RequestOptions = {}, + ) -> ResponseT: + opts = FinalRequestOptions.construct( + method="put", url=path, json_data=body, files=await async_to_httpx_files(files), **options + ) + return await self.request(cast_to, opts) + + async def delete( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + options: RequestOptions = {}, + ) -> ResponseT: + opts = FinalRequestOptions.construct(method="delete", url=path, json_data=body, **options) + return await self.request(cast_to, opts) + + def get_api_list( + self, + path: str, + *, + model: Type[_T], + page: Type[AsyncPageT], + body: Body | None = None, + options: RequestOptions = {}, + method: str = "get", + ) -> AsyncPaginator[_T, AsyncPageT]: + opts = FinalRequestOptions.construct(method=method, url=path, json_data=body, **options) + return self._request_api_list(model, page, opts) + + +def make_request_options( + *, + query: Query | None = None, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + idempotency_key: str | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + post_parser: PostParser | NotGiven = not_given, +) -> RequestOptions: + """Create a dict of type RequestOptions without keys of NotGiven values.""" + options: RequestOptions = {} + if extra_headers is not None: + options["headers"] = extra_headers + + if extra_body is not None: + options["extra_json"] = cast(AnyMapping, extra_body) + + if query is not None: + options["params"] = query + + if extra_query is not None: + options["params"] = {**options.get("params", {}), **extra_query} + + if not isinstance(timeout, NotGiven): + options["timeout"] = timeout + + if idempotency_key is not None: + options["idempotency_key"] = idempotency_key + + if is_given(post_parser): + # internal + options["post_parser"] = post_parser # type: ignore + + return options + + +class ForceMultipartDict(Dict[str, None]): + def __bool__(self) -> bool: + return True + + +class OtherPlatform: + def __init__(self, name: str) -> None: + self.name = name + + @override + def __str__(self) -> str: + return f"Other:{self.name}" + + +Platform = Union[ + OtherPlatform, + Literal[ + "MacOS", + "Linux", + "Windows", + "FreeBSD", + "OpenBSD", + "iOS", + "Android", + "Unknown", + ], +] + + +def get_platform() -> Platform: + try: + system = platform.system().lower() + platform_name = platform.platform().lower() + except Exception: + return "Unknown" + + if "iphone" in platform_name or "ipad" in platform_name: + # Tested using Python3IDE on an iPhone 11 and Pythonista on an iPad 7 + # system is Darwin and platform_name is a string like: + # - Darwin-21.6.0-iPhone12,1-64bit + # - Darwin-21.6.0-iPad7,11-64bit + return "iOS" + + if system == "darwin": + return "MacOS" + + if system == "windows": + return "Windows" + + if "android" in platform_name: + # Tested using Pydroid 3 + # system is Linux and platform_name is a string like 'Linux-5.10.81-android12-9-00001-geba40aecb3b7-ab8534902-aarch64-with-libc' + return "Android" + + if system == "linux": + # https://distro.readthedocs.io/en/latest/#distro.id + distro_id = distro.id() + if distro_id == "freebsd": + return "FreeBSD" + + if distro_id == "openbsd": + return "OpenBSD" + + return "Linux" + + if platform_name: + return OtherPlatform(platform_name) + + return "Unknown" + + +@lru_cache(maxsize=None) +def platform_headers(version: str, *, platform: Platform | None) -> Dict[str, str]: + return { + "X-Stainless-Lang": "python", + "X-Stainless-Package-Version": version, + "X-Stainless-OS": str(platform or get_platform()), + "X-Stainless-Arch": str(get_architecture()), + "X-Stainless-Runtime": get_python_runtime(), + "X-Stainless-Runtime-Version": get_python_version(), + } + + +class OtherArch: + def __init__(self, name: str) -> None: + self.name = name + + @override + def __str__(self) -> str: + return f"other:{self.name}" + + +Arch = Union[OtherArch, Literal["x32", "x64", "arm", "arm64", "unknown"]] + + +def get_python_runtime() -> str: + try: + return platform.python_implementation() + except Exception: + return "unknown" + + +def get_python_version() -> str: + try: + return platform.python_version() + except Exception: + return "unknown" + + +def get_architecture() -> Arch: + try: + machine = platform.machine().lower() + except Exception: + return "unknown" + + if machine in ("arm64", "aarch64"): + return "arm64" + + # TODO: untested + if machine == "arm": + return "arm" + + if machine == "x86_64": + return "x64" + + # TODO: untested + if sys.maxsize <= 2**32: + return "x32" + + if machine: + return OtherArch(machine) + + return "unknown" + + +def _merge_mappings( + obj1: Mapping[_T_co, Union[_T, Omit]], + obj2: Mapping[_T_co, Union[_T, Omit]], +) -> Dict[_T_co, _T]: + """Merge two mappings of the same type, removing any values that are instances of `Omit`. + + In cases with duplicate keys the second mapping takes precedence. + """ + merged = {**obj1, **obj2} + return {key: value for key, value in merged.items() if not isinstance(value, Omit)} diff --git a/src/kernel/_client.py b/src/kernel/_client.py new file mode 100644 index 0000000..166ecdb --- /dev/null +++ b/src/kernel/_client.py @@ -0,0 +1,853 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import TYPE_CHECKING, Any, Dict, Mapping, cast +from typing_extensions import Self, Literal, override + +import httpx + +from . import _exceptions +from ._qs import Querystring +from ._types import ( + Omit, + Timeout, + NotGiven, + Transport, + ProxiesTypes, + RequestOptions, + not_given, +) +from ._utils import is_given, get_async_library +from ._compat import cached_property +from ._version import __version__ +from ._streaming import Stream as Stream, AsyncStream as AsyncStream +from ._exceptions import KernelError, APIStatusError +from ._base_client import ( + DEFAULT_MAX_RETRIES, + SyncAPIClient, + AsyncAPIClient, +) + +if TYPE_CHECKING: + from .resources import ( + apps, + agents, + proxies, + browsers, + profiles, + extensions, + credentials, + deployments, + invocations, + browser_pools, + ) + from .resources.apps import AppsResource, AsyncAppsResource + from .resources.proxies import ProxiesResource, AsyncProxiesResource + from .resources.profiles import ProfilesResource, AsyncProfilesResource + from .resources.extensions import ExtensionsResource, AsyncExtensionsResource + from .resources.credentials import CredentialsResource, AsyncCredentialsResource + from .resources.deployments import DeploymentsResource, AsyncDeploymentsResource + from .resources.invocations import InvocationsResource, AsyncInvocationsResource + from .resources.agents.agents import AgentsResource, AsyncAgentsResource + from .resources.browser_pools import BrowserPoolsResource, AsyncBrowserPoolsResource + from .resources.browsers.browsers import BrowsersResource, AsyncBrowsersResource + +__all__ = [ + "ENVIRONMENTS", + "Timeout", + "Transport", + "ProxiesTypes", + "RequestOptions", + "Kernel", + "AsyncKernel", + "Client", + "AsyncClient", +] + +ENVIRONMENTS: Dict[str, str] = { + "production": "https://api.onkernel.com/", + "development": "https://localhost:3001/", +} + + +class Kernel(SyncAPIClient): + # client options + api_key: str + + _environment: Literal["production", "development"] | NotGiven + + def __init__( + self, + *, + api_key: str | None = None, + environment: Literal["production", "development"] | NotGiven = not_given, + base_url: str | httpx.URL | None | NotGiven = not_given, + timeout: float | Timeout | None | NotGiven = not_given, + max_retries: int = DEFAULT_MAX_RETRIES, + default_headers: Mapping[str, str] | None = None, + default_query: Mapping[str, object] | None = None, + # Configure a custom httpx client. + # We provide a `DefaultHttpxClient` class that you can pass to retain the default values we use for `limits`, `timeout` & `follow_redirects`. + # See the [httpx documentation](https://www.python-httpx.org/api/#client) for more details. + http_client: httpx.Client | None = None, + # Enable or disable schema validation for data returned by the API. + # When enabled an error APIResponseValidationError is raised + # if the API responds with invalid data for the expected schema. + # + # This parameter may be removed or changed in the future. + # If you rely on this feature, please open a GitHub issue + # outlining your use-case to help us decide if it should be + # part of our public interface in the future. + _strict_response_validation: bool = False, + ) -> None: + """Construct a new synchronous Kernel client instance. + + This automatically infers the `api_key` argument from the `KERNEL_API_KEY` environment variable if it is not provided. + """ + if api_key is None: + api_key = os.environ.get("KERNEL_API_KEY") + if api_key is None: + raise KernelError( + "The api_key client option must be set either by passing api_key to the client or by setting the KERNEL_API_KEY environment variable" + ) + self.api_key = api_key + + self._environment = environment + + base_url_env = os.environ.get("KERNEL_BASE_URL") + if is_given(base_url) and base_url is not None: + # cast required because mypy doesn't understand the type narrowing + base_url = cast("str | httpx.URL", base_url) # pyright: ignore[reportUnnecessaryCast] + elif is_given(environment): + if base_url_env and base_url is not None: + raise ValueError( + "Ambiguous URL; The `KERNEL_BASE_URL` env var and the `environment` argument are given. If you want to use the environment, you must pass base_url=None", + ) + + try: + base_url = ENVIRONMENTS[environment] + except KeyError as exc: + raise ValueError(f"Unknown environment: {environment}") from exc + elif base_url_env is not None: + base_url = base_url_env + else: + self._environment = environment = "production" + + try: + base_url = ENVIRONMENTS[environment] + except KeyError as exc: + raise ValueError(f"Unknown environment: {environment}") from exc + + super().__init__( + version=__version__, + base_url=base_url, + max_retries=max_retries, + timeout=timeout, + http_client=http_client, + custom_headers=default_headers, + custom_query=default_query, + _strict_response_validation=_strict_response_validation, + ) + + @cached_property + def deployments(self) -> DeploymentsResource: + from .resources.deployments import DeploymentsResource + + return DeploymentsResource(self) + + @cached_property + def apps(self) -> AppsResource: + from .resources.apps import AppsResource + + return AppsResource(self) + + @cached_property + def invocations(self) -> InvocationsResource: + from .resources.invocations import InvocationsResource + + return InvocationsResource(self) + + @cached_property + def browsers(self) -> BrowsersResource: + from .resources.browsers import BrowsersResource + + return BrowsersResource(self) + + @cached_property + def profiles(self) -> ProfilesResource: + from .resources.profiles import ProfilesResource + + return ProfilesResource(self) + + @cached_property + def proxies(self) -> ProxiesResource: + from .resources.proxies import ProxiesResource + + return ProxiesResource(self) + + @cached_property + def extensions(self) -> ExtensionsResource: + from .resources.extensions import ExtensionsResource + + return ExtensionsResource(self) + + @cached_property + def browser_pools(self) -> BrowserPoolsResource: + from .resources.browser_pools import BrowserPoolsResource + + return BrowserPoolsResource(self) + + @cached_property + def agents(self) -> AgentsResource: + from .resources.agents import AgentsResource + + return AgentsResource(self) + + @cached_property + def credentials(self) -> CredentialsResource: + from .resources.credentials import CredentialsResource + + return CredentialsResource(self) + + @cached_property + def with_raw_response(self) -> KernelWithRawResponse: + return KernelWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> KernelWithStreamedResponse: + return KernelWithStreamedResponse(self) + + @property + @override + def qs(self) -> Querystring: + return Querystring(array_format="comma") + + @property + @override + def auth_headers(self) -> dict[str, str]: + api_key = self.api_key + return {"Authorization": f"Bearer {api_key}"} + + @property + @override + def default_headers(self) -> dict[str, str | Omit]: + return { + **super().default_headers, + "X-Stainless-Async": "false", + **self._custom_headers, + } + + def copy( + self, + *, + api_key: str | None = None, + environment: Literal["production", "development"] | None = None, + base_url: str | httpx.URL | None = None, + timeout: float | Timeout | None | NotGiven = not_given, + http_client: httpx.Client | None = None, + max_retries: int | NotGiven = not_given, + default_headers: Mapping[str, str] | None = None, + set_default_headers: Mapping[str, str] | None = None, + default_query: Mapping[str, object] | None = None, + set_default_query: Mapping[str, object] | None = None, + _extra_kwargs: Mapping[str, Any] = {}, + ) -> Self: + """ + Create a new client instance re-using the same options given to the current client with optional overriding. + """ + if default_headers is not None and set_default_headers is not None: + raise ValueError("The `default_headers` and `set_default_headers` arguments are mutually exclusive") + + if default_query is not None and set_default_query is not None: + raise ValueError("The `default_query` and `set_default_query` arguments are mutually exclusive") + + headers = self._custom_headers + if default_headers is not None: + headers = {**headers, **default_headers} + elif set_default_headers is not None: + headers = set_default_headers + + params = self._custom_query + if default_query is not None: + params = {**params, **default_query} + elif set_default_query is not None: + params = set_default_query + + http_client = http_client or self._client + return self.__class__( + api_key=api_key or self.api_key, + base_url=base_url or self.base_url, + environment=environment or self._environment, + timeout=self.timeout if isinstance(timeout, NotGiven) else timeout, + http_client=http_client, + max_retries=max_retries if is_given(max_retries) else self.max_retries, + default_headers=headers, + default_query=params, + **_extra_kwargs, + ) + + # Alias for `copy` for nicer inline usage, e.g. + # client.with_options(timeout=10).foo.create(...) + with_options = copy + + @override + def _make_status_error( + self, + err_msg: str, + *, + body: object, + response: httpx.Response, + ) -> APIStatusError: + if response.status_code == 400: + return _exceptions.BadRequestError(err_msg, response=response, body=body) + + if response.status_code == 401: + return _exceptions.AuthenticationError(err_msg, response=response, body=body) + + if response.status_code == 403: + return _exceptions.PermissionDeniedError(err_msg, response=response, body=body) + + if response.status_code == 404: + return _exceptions.NotFoundError(err_msg, response=response, body=body) + + if response.status_code == 409: + return _exceptions.ConflictError(err_msg, response=response, body=body) + + if response.status_code == 422: + return _exceptions.UnprocessableEntityError(err_msg, response=response, body=body) + + if response.status_code == 429: + return _exceptions.RateLimitError(err_msg, response=response, body=body) + + if response.status_code >= 500: + return _exceptions.InternalServerError(err_msg, response=response, body=body) + return APIStatusError(err_msg, response=response, body=body) + + +class AsyncKernel(AsyncAPIClient): + # client options + api_key: str + + _environment: Literal["production", "development"] | NotGiven + + def __init__( + self, + *, + api_key: str | None = None, + environment: Literal["production", "development"] | NotGiven = not_given, + base_url: str | httpx.URL | None | NotGiven = not_given, + timeout: float | Timeout | None | NotGiven = not_given, + max_retries: int = DEFAULT_MAX_RETRIES, + default_headers: Mapping[str, str] | None = None, + default_query: Mapping[str, object] | None = None, + # Configure a custom httpx client. + # We provide a `DefaultAsyncHttpxClient` class that you can pass to retain the default values we use for `limits`, `timeout` & `follow_redirects`. + # See the [httpx documentation](https://www.python-httpx.org/api/#asyncclient) for more details. + http_client: httpx.AsyncClient | None = None, + # Enable or disable schema validation for data returned by the API. + # When enabled an error APIResponseValidationError is raised + # if the API responds with invalid data for the expected schema. + # + # This parameter may be removed or changed in the future. + # If you rely on this feature, please open a GitHub issue + # outlining your use-case to help us decide if it should be + # part of our public interface in the future. + _strict_response_validation: bool = False, + ) -> None: + """Construct a new async AsyncKernel client instance. + + This automatically infers the `api_key` argument from the `KERNEL_API_KEY` environment variable if it is not provided. + """ + if api_key is None: + api_key = os.environ.get("KERNEL_API_KEY") + if api_key is None: + raise KernelError( + "The api_key client option must be set either by passing api_key to the client or by setting the KERNEL_API_KEY environment variable" + ) + self.api_key = api_key + + self._environment = environment + + base_url_env = os.environ.get("KERNEL_BASE_URL") + if is_given(base_url) and base_url is not None: + # cast required because mypy doesn't understand the type narrowing + base_url = cast("str | httpx.URL", base_url) # pyright: ignore[reportUnnecessaryCast] + elif is_given(environment): + if base_url_env and base_url is not None: + raise ValueError( + "Ambiguous URL; The `KERNEL_BASE_URL` env var and the `environment` argument are given. If you want to use the environment, you must pass base_url=None", + ) + + try: + base_url = ENVIRONMENTS[environment] + except KeyError as exc: + raise ValueError(f"Unknown environment: {environment}") from exc + elif base_url_env is not None: + base_url = base_url_env + else: + self._environment = environment = "production" + + try: + base_url = ENVIRONMENTS[environment] + except KeyError as exc: + raise ValueError(f"Unknown environment: {environment}") from exc + + super().__init__( + version=__version__, + base_url=base_url, + max_retries=max_retries, + timeout=timeout, + http_client=http_client, + custom_headers=default_headers, + custom_query=default_query, + _strict_response_validation=_strict_response_validation, + ) + + @cached_property + def deployments(self) -> AsyncDeploymentsResource: + from .resources.deployments import AsyncDeploymentsResource + + return AsyncDeploymentsResource(self) + + @cached_property + def apps(self) -> AsyncAppsResource: + from .resources.apps import AsyncAppsResource + + return AsyncAppsResource(self) + + @cached_property + def invocations(self) -> AsyncInvocationsResource: + from .resources.invocations import AsyncInvocationsResource + + return AsyncInvocationsResource(self) + + @cached_property + def browsers(self) -> AsyncBrowsersResource: + from .resources.browsers import AsyncBrowsersResource + + return AsyncBrowsersResource(self) + + @cached_property + def profiles(self) -> AsyncProfilesResource: + from .resources.profiles import AsyncProfilesResource + + return AsyncProfilesResource(self) + + @cached_property + def proxies(self) -> AsyncProxiesResource: + from .resources.proxies import AsyncProxiesResource + + return AsyncProxiesResource(self) + + @cached_property + def extensions(self) -> AsyncExtensionsResource: + from .resources.extensions import AsyncExtensionsResource + + return AsyncExtensionsResource(self) + + @cached_property + def browser_pools(self) -> AsyncBrowserPoolsResource: + from .resources.browser_pools import AsyncBrowserPoolsResource + + return AsyncBrowserPoolsResource(self) + + @cached_property + def agents(self) -> AsyncAgentsResource: + from .resources.agents import AsyncAgentsResource + + return AsyncAgentsResource(self) + + @cached_property + def credentials(self) -> AsyncCredentialsResource: + from .resources.credentials import AsyncCredentialsResource + + return AsyncCredentialsResource(self) + + @cached_property + def with_raw_response(self) -> AsyncKernelWithRawResponse: + return AsyncKernelWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncKernelWithStreamedResponse: + return AsyncKernelWithStreamedResponse(self) + + @property + @override + def qs(self) -> Querystring: + return Querystring(array_format="comma") + + @property + @override + def auth_headers(self) -> dict[str, str]: + api_key = self.api_key + return {"Authorization": f"Bearer {api_key}"} + + @property + @override + def default_headers(self) -> dict[str, str | Omit]: + return { + **super().default_headers, + "X-Stainless-Async": f"async:{get_async_library()}", + **self._custom_headers, + } + + def copy( + self, + *, + api_key: str | None = None, + environment: Literal["production", "development"] | None = None, + base_url: str | httpx.URL | None = None, + timeout: float | Timeout | None | NotGiven = not_given, + http_client: httpx.AsyncClient | None = None, + max_retries: int | NotGiven = not_given, + default_headers: Mapping[str, str] | None = None, + set_default_headers: Mapping[str, str] | None = None, + default_query: Mapping[str, object] | None = None, + set_default_query: Mapping[str, object] | None = None, + _extra_kwargs: Mapping[str, Any] = {}, + ) -> Self: + """ + Create a new client instance re-using the same options given to the current client with optional overriding. + """ + if default_headers is not None and set_default_headers is not None: + raise ValueError("The `default_headers` and `set_default_headers` arguments are mutually exclusive") + + if default_query is not None and set_default_query is not None: + raise ValueError("The `default_query` and `set_default_query` arguments are mutually exclusive") + + headers = self._custom_headers + if default_headers is not None: + headers = {**headers, **default_headers} + elif set_default_headers is not None: + headers = set_default_headers + + params = self._custom_query + if default_query is not None: + params = {**params, **default_query} + elif set_default_query is not None: + params = set_default_query + + http_client = http_client or self._client + return self.__class__( + api_key=api_key or self.api_key, + base_url=base_url or self.base_url, + environment=environment or self._environment, + timeout=self.timeout if isinstance(timeout, NotGiven) else timeout, + http_client=http_client, + max_retries=max_retries if is_given(max_retries) else self.max_retries, + default_headers=headers, + default_query=params, + **_extra_kwargs, + ) + + # Alias for `copy` for nicer inline usage, e.g. + # client.with_options(timeout=10).foo.create(...) + with_options = copy + + @override + def _make_status_error( + self, + err_msg: str, + *, + body: object, + response: httpx.Response, + ) -> APIStatusError: + if response.status_code == 400: + return _exceptions.BadRequestError(err_msg, response=response, body=body) + + if response.status_code == 401: + return _exceptions.AuthenticationError(err_msg, response=response, body=body) + + if response.status_code == 403: + return _exceptions.PermissionDeniedError(err_msg, response=response, body=body) + + if response.status_code == 404: + return _exceptions.NotFoundError(err_msg, response=response, body=body) + + if response.status_code == 409: + return _exceptions.ConflictError(err_msg, response=response, body=body) + + if response.status_code == 422: + return _exceptions.UnprocessableEntityError(err_msg, response=response, body=body) + + if response.status_code == 429: + return _exceptions.RateLimitError(err_msg, response=response, body=body) + + if response.status_code >= 500: + return _exceptions.InternalServerError(err_msg, response=response, body=body) + return APIStatusError(err_msg, response=response, body=body) + + +class KernelWithRawResponse: + _client: Kernel + + def __init__(self, client: Kernel) -> None: + self._client = client + + @cached_property + def deployments(self) -> deployments.DeploymentsResourceWithRawResponse: + from .resources.deployments import DeploymentsResourceWithRawResponse + + return DeploymentsResourceWithRawResponse(self._client.deployments) + + @cached_property + def apps(self) -> apps.AppsResourceWithRawResponse: + from .resources.apps import AppsResourceWithRawResponse + + return AppsResourceWithRawResponse(self._client.apps) + + @cached_property + def invocations(self) -> invocations.InvocationsResourceWithRawResponse: + from .resources.invocations import InvocationsResourceWithRawResponse + + return InvocationsResourceWithRawResponse(self._client.invocations) + + @cached_property + def browsers(self) -> browsers.BrowsersResourceWithRawResponse: + from .resources.browsers import BrowsersResourceWithRawResponse + + return BrowsersResourceWithRawResponse(self._client.browsers) + + @cached_property + def profiles(self) -> profiles.ProfilesResourceWithRawResponse: + from .resources.profiles import ProfilesResourceWithRawResponse + + return ProfilesResourceWithRawResponse(self._client.profiles) + + @cached_property + def proxies(self) -> proxies.ProxiesResourceWithRawResponse: + from .resources.proxies import ProxiesResourceWithRawResponse + + return ProxiesResourceWithRawResponse(self._client.proxies) + + @cached_property + def extensions(self) -> extensions.ExtensionsResourceWithRawResponse: + from .resources.extensions import ExtensionsResourceWithRawResponse + + return ExtensionsResourceWithRawResponse(self._client.extensions) + + @cached_property + def browser_pools(self) -> browser_pools.BrowserPoolsResourceWithRawResponse: + from .resources.browser_pools import BrowserPoolsResourceWithRawResponse + + return BrowserPoolsResourceWithRawResponse(self._client.browser_pools) + + @cached_property + def agents(self) -> agents.AgentsResourceWithRawResponse: + from .resources.agents import AgentsResourceWithRawResponse + + return AgentsResourceWithRawResponse(self._client.agents) + + @cached_property + def credentials(self) -> credentials.CredentialsResourceWithRawResponse: + from .resources.credentials import CredentialsResourceWithRawResponse + + return CredentialsResourceWithRawResponse(self._client.credentials) + + +class AsyncKernelWithRawResponse: + _client: AsyncKernel + + def __init__(self, client: AsyncKernel) -> None: + self._client = client + + @cached_property + def deployments(self) -> deployments.AsyncDeploymentsResourceWithRawResponse: + from .resources.deployments import AsyncDeploymentsResourceWithRawResponse + + return AsyncDeploymentsResourceWithRawResponse(self._client.deployments) + + @cached_property + def apps(self) -> apps.AsyncAppsResourceWithRawResponse: + from .resources.apps import AsyncAppsResourceWithRawResponse + + return AsyncAppsResourceWithRawResponse(self._client.apps) + + @cached_property + def invocations(self) -> invocations.AsyncInvocationsResourceWithRawResponse: + from .resources.invocations import AsyncInvocationsResourceWithRawResponse + + return AsyncInvocationsResourceWithRawResponse(self._client.invocations) + + @cached_property + def browsers(self) -> browsers.AsyncBrowsersResourceWithRawResponse: + from .resources.browsers import AsyncBrowsersResourceWithRawResponse + + return AsyncBrowsersResourceWithRawResponse(self._client.browsers) + + @cached_property + def profiles(self) -> profiles.AsyncProfilesResourceWithRawResponse: + from .resources.profiles import AsyncProfilesResourceWithRawResponse + + return AsyncProfilesResourceWithRawResponse(self._client.profiles) + + @cached_property + def proxies(self) -> proxies.AsyncProxiesResourceWithRawResponse: + from .resources.proxies import AsyncProxiesResourceWithRawResponse + + return AsyncProxiesResourceWithRawResponse(self._client.proxies) + + @cached_property + def extensions(self) -> extensions.AsyncExtensionsResourceWithRawResponse: + from .resources.extensions import AsyncExtensionsResourceWithRawResponse + + return AsyncExtensionsResourceWithRawResponse(self._client.extensions) + + @cached_property + def browser_pools(self) -> browser_pools.AsyncBrowserPoolsResourceWithRawResponse: + from .resources.browser_pools import AsyncBrowserPoolsResourceWithRawResponse + + return AsyncBrowserPoolsResourceWithRawResponse(self._client.browser_pools) + + @cached_property + def agents(self) -> agents.AsyncAgentsResourceWithRawResponse: + from .resources.agents import AsyncAgentsResourceWithRawResponse + + return AsyncAgentsResourceWithRawResponse(self._client.agents) + + @cached_property + def credentials(self) -> credentials.AsyncCredentialsResourceWithRawResponse: + from .resources.credentials import AsyncCredentialsResourceWithRawResponse + + return AsyncCredentialsResourceWithRawResponse(self._client.credentials) + + +class KernelWithStreamedResponse: + _client: Kernel + + def __init__(self, client: Kernel) -> None: + self._client = client + + @cached_property + def deployments(self) -> deployments.DeploymentsResourceWithStreamingResponse: + from .resources.deployments import DeploymentsResourceWithStreamingResponse + + return DeploymentsResourceWithStreamingResponse(self._client.deployments) + + @cached_property + def apps(self) -> apps.AppsResourceWithStreamingResponse: + from .resources.apps import AppsResourceWithStreamingResponse + + return AppsResourceWithStreamingResponse(self._client.apps) + + @cached_property + def invocations(self) -> invocations.InvocationsResourceWithStreamingResponse: + from .resources.invocations import InvocationsResourceWithStreamingResponse + + return InvocationsResourceWithStreamingResponse(self._client.invocations) + + @cached_property + def browsers(self) -> browsers.BrowsersResourceWithStreamingResponse: + from .resources.browsers import BrowsersResourceWithStreamingResponse + + return BrowsersResourceWithStreamingResponse(self._client.browsers) + + @cached_property + def profiles(self) -> profiles.ProfilesResourceWithStreamingResponse: + from .resources.profiles import ProfilesResourceWithStreamingResponse + + return ProfilesResourceWithStreamingResponse(self._client.profiles) + + @cached_property + def proxies(self) -> proxies.ProxiesResourceWithStreamingResponse: + from .resources.proxies import ProxiesResourceWithStreamingResponse + + return ProxiesResourceWithStreamingResponse(self._client.proxies) + + @cached_property + def extensions(self) -> extensions.ExtensionsResourceWithStreamingResponse: + from .resources.extensions import ExtensionsResourceWithStreamingResponse + + return ExtensionsResourceWithStreamingResponse(self._client.extensions) + + @cached_property + def browser_pools(self) -> browser_pools.BrowserPoolsResourceWithStreamingResponse: + from .resources.browser_pools import BrowserPoolsResourceWithStreamingResponse + + return BrowserPoolsResourceWithStreamingResponse(self._client.browser_pools) + + @cached_property + def agents(self) -> agents.AgentsResourceWithStreamingResponse: + from .resources.agents import AgentsResourceWithStreamingResponse + + return AgentsResourceWithStreamingResponse(self._client.agents) + + @cached_property + def credentials(self) -> credentials.CredentialsResourceWithStreamingResponse: + from .resources.credentials import CredentialsResourceWithStreamingResponse + + return CredentialsResourceWithStreamingResponse(self._client.credentials) + + +class AsyncKernelWithStreamedResponse: + _client: AsyncKernel + + def __init__(self, client: AsyncKernel) -> None: + self._client = client + + @cached_property + def deployments(self) -> deployments.AsyncDeploymentsResourceWithStreamingResponse: + from .resources.deployments import AsyncDeploymentsResourceWithStreamingResponse + + return AsyncDeploymentsResourceWithStreamingResponse(self._client.deployments) + + @cached_property + def apps(self) -> apps.AsyncAppsResourceWithStreamingResponse: + from .resources.apps import AsyncAppsResourceWithStreamingResponse + + return AsyncAppsResourceWithStreamingResponse(self._client.apps) + + @cached_property + def invocations(self) -> invocations.AsyncInvocationsResourceWithStreamingResponse: + from .resources.invocations import AsyncInvocationsResourceWithStreamingResponse + + return AsyncInvocationsResourceWithStreamingResponse(self._client.invocations) + + @cached_property + def browsers(self) -> browsers.AsyncBrowsersResourceWithStreamingResponse: + from .resources.browsers import AsyncBrowsersResourceWithStreamingResponse + + return AsyncBrowsersResourceWithStreamingResponse(self._client.browsers) + + @cached_property + def profiles(self) -> profiles.AsyncProfilesResourceWithStreamingResponse: + from .resources.profiles import AsyncProfilesResourceWithStreamingResponse + + return AsyncProfilesResourceWithStreamingResponse(self._client.profiles) + + @cached_property + def proxies(self) -> proxies.AsyncProxiesResourceWithStreamingResponse: + from .resources.proxies import AsyncProxiesResourceWithStreamingResponse + + return AsyncProxiesResourceWithStreamingResponse(self._client.proxies) + + @cached_property + def extensions(self) -> extensions.AsyncExtensionsResourceWithStreamingResponse: + from .resources.extensions import AsyncExtensionsResourceWithStreamingResponse + + return AsyncExtensionsResourceWithStreamingResponse(self._client.extensions) + + @cached_property + def browser_pools(self) -> browser_pools.AsyncBrowserPoolsResourceWithStreamingResponse: + from .resources.browser_pools import AsyncBrowserPoolsResourceWithStreamingResponse + + return AsyncBrowserPoolsResourceWithStreamingResponse(self._client.browser_pools) + + @cached_property + def agents(self) -> agents.AsyncAgentsResourceWithStreamingResponse: + from .resources.agents import AsyncAgentsResourceWithStreamingResponse + + return AsyncAgentsResourceWithStreamingResponse(self._client.agents) + + @cached_property + def credentials(self) -> credentials.AsyncCredentialsResourceWithStreamingResponse: + from .resources.credentials import AsyncCredentialsResourceWithStreamingResponse + + return AsyncCredentialsResourceWithStreamingResponse(self._client.credentials) + + +Client = Kernel + +AsyncClient = AsyncKernel diff --git a/src/kernel/_compat.py b/src/kernel/_compat.py new file mode 100644 index 0000000..bdef67f --- /dev/null +++ b/src/kernel/_compat.py @@ -0,0 +1,219 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Union, Generic, TypeVar, Callable, cast, overload +from datetime import date, datetime +from typing_extensions import Self, Literal + +import pydantic +from pydantic.fields import FieldInfo + +from ._types import IncEx, StrBytesIntFloat + +_T = TypeVar("_T") +_ModelT = TypeVar("_ModelT", bound=pydantic.BaseModel) + +# --------------- Pydantic v2, v3 compatibility --------------- + +# Pyright incorrectly reports some of our functions as overriding a method when they don't +# pyright: reportIncompatibleMethodOverride=false + +PYDANTIC_V1 = pydantic.VERSION.startswith("1.") + +if TYPE_CHECKING: + + def parse_date(value: date | StrBytesIntFloat) -> date: # noqa: ARG001 + ... + + def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime: # noqa: ARG001 + ... + + def get_args(t: type[Any]) -> tuple[Any, ...]: # noqa: ARG001 + ... + + def is_union(tp: type[Any] | None) -> bool: # noqa: ARG001 + ... + + def get_origin(t: type[Any]) -> type[Any] | None: # noqa: ARG001 + ... + + def is_literal_type(type_: type[Any]) -> bool: # noqa: ARG001 + ... + + def is_typeddict(type_: type[Any]) -> bool: # noqa: ARG001 + ... + +else: + # v1 re-exports + if PYDANTIC_V1: + from pydantic.typing import ( + get_args as get_args, + is_union as is_union, + get_origin as get_origin, + is_typeddict as is_typeddict, + is_literal_type as is_literal_type, + ) + from pydantic.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime + else: + from ._utils import ( + get_args as get_args, + is_union as is_union, + get_origin as get_origin, + parse_date as parse_date, + is_typeddict as is_typeddict, + parse_datetime as parse_datetime, + is_literal_type as is_literal_type, + ) + + +# refactored config +if TYPE_CHECKING: + from pydantic import ConfigDict as ConfigDict +else: + if PYDANTIC_V1: + # TODO: provide an error message here? + ConfigDict = None + else: + from pydantic import ConfigDict as ConfigDict + + +# renamed methods / properties +def parse_obj(model: type[_ModelT], value: object) -> _ModelT: + if PYDANTIC_V1: + return cast(_ModelT, model.parse_obj(value)) # pyright: ignore[reportDeprecated, reportUnnecessaryCast] + else: + return model.model_validate(value) + + +def field_is_required(field: FieldInfo) -> bool: + if PYDANTIC_V1: + return field.required # type: ignore + return field.is_required() + + +def field_get_default(field: FieldInfo) -> Any: + value = field.get_default() + if PYDANTIC_V1: + return value + from pydantic_core import PydanticUndefined + + if value == PydanticUndefined: + return None + return value + + +def field_outer_type(field: FieldInfo) -> Any: + if PYDANTIC_V1: + return field.outer_type_ # type: ignore + return field.annotation + + +def get_model_config(model: type[pydantic.BaseModel]) -> Any: + if PYDANTIC_V1: + return model.__config__ # type: ignore + return model.model_config + + +def get_model_fields(model: type[pydantic.BaseModel]) -> dict[str, FieldInfo]: + if PYDANTIC_V1: + return model.__fields__ # type: ignore + return model.model_fields + + +def model_copy(model: _ModelT, *, deep: bool = False) -> _ModelT: + if PYDANTIC_V1: + return model.copy(deep=deep) # type: ignore + return model.model_copy(deep=deep) + + +def model_json(model: pydantic.BaseModel, *, indent: int | None = None) -> str: + if PYDANTIC_V1: + return model.json(indent=indent) # type: ignore + return model.model_dump_json(indent=indent) + + +def model_dump( + model: pydantic.BaseModel, + *, + exclude: IncEx | None = None, + exclude_unset: bool = False, + exclude_defaults: bool = False, + warnings: bool = True, + mode: Literal["json", "python"] = "python", +) -> dict[str, Any]: + if (not PYDANTIC_V1) or hasattr(model, "model_dump"): + return model.model_dump( + mode=mode, + exclude=exclude, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + # warnings are not supported in Pydantic v1 + warnings=True if PYDANTIC_V1 else warnings, + ) + return cast( + "dict[str, Any]", + model.dict( # pyright: ignore[reportDeprecated, reportUnnecessaryCast] + exclude=exclude, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + ), + ) + + +def model_parse(model: type[_ModelT], data: Any) -> _ModelT: + if PYDANTIC_V1: + return model.parse_obj(data) # pyright: ignore[reportDeprecated] + return model.model_validate(data) + + +# generic models +if TYPE_CHECKING: + + class GenericModel(pydantic.BaseModel): ... + +else: + if PYDANTIC_V1: + import pydantic.generics + + class GenericModel(pydantic.generics.GenericModel, pydantic.BaseModel): ... + else: + # there no longer needs to be a distinction in v2 but + # we still have to create our own subclass to avoid + # inconsistent MRO ordering errors + class GenericModel(pydantic.BaseModel): ... + + +# cached properties +if TYPE_CHECKING: + cached_property = property + + # we define a separate type (copied from typeshed) + # that represents that `cached_property` is `set`able + # at runtime, which differs from `@property`. + # + # this is a separate type as editors likely special case + # `@property` and we don't want to cause issues just to have + # more helpful internal types. + + class typed_cached_property(Generic[_T]): + func: Callable[[Any], _T] + attrname: str | None + + def __init__(self, func: Callable[[Any], _T]) -> None: ... + + @overload + def __get__(self, instance: None, owner: type[Any] | None = None) -> Self: ... + + @overload + def __get__(self, instance: object, owner: type[Any] | None = None) -> _T: ... + + def __get__(self, instance: object, owner: type[Any] | None = None) -> _T | Self: + raise NotImplementedError() + + def __set_name__(self, owner: type[Any], name: str) -> None: ... + + # __set__ is not defined at runtime, but @cached_property is designed to be settable + def __set__(self, instance: object, value: _T) -> None: ... +else: + from functools import cached_property as cached_property + + typed_cached_property = cached_property diff --git a/src/kernel/_constants.py b/src/kernel/_constants.py new file mode 100644 index 0000000..6ddf2c7 --- /dev/null +++ b/src/kernel/_constants.py @@ -0,0 +1,14 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import httpx + +RAW_RESPONSE_HEADER = "X-Stainless-Raw-Response" +OVERRIDE_CAST_TO_HEADER = "____stainless_override_cast_to" + +# default timeout is 1 minute +DEFAULT_TIMEOUT = httpx.Timeout(timeout=60, connect=5.0) +DEFAULT_MAX_RETRIES = 2 +DEFAULT_CONNECTION_LIMITS = httpx.Limits(max_connections=100, max_keepalive_connections=20) + +INITIAL_RETRY_DELAY = 0.5 +MAX_RETRY_DELAY = 8.0 diff --git a/src/kernel/_exceptions.py b/src/kernel/_exceptions.py new file mode 100644 index 0000000..53cd14c --- /dev/null +++ b/src/kernel/_exceptions.py @@ -0,0 +1,108 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal + +import httpx + +__all__ = [ + "BadRequestError", + "AuthenticationError", + "PermissionDeniedError", + "NotFoundError", + "ConflictError", + "UnprocessableEntityError", + "RateLimitError", + "InternalServerError", +] + + +class KernelError(Exception): + pass + + +class APIError(KernelError): + message: str + request: httpx.Request + + body: object | None + """The API response body. + + If the API responded with a valid JSON structure then this property will be the + decoded result. + + If it isn't a valid JSON structure then this will be the raw response. + + If there was no response associated with this error then it will be `None`. + """ + + def __init__(self, message: str, request: httpx.Request, *, body: object | None) -> None: # noqa: ARG002 + super().__init__(message) + self.request = request + self.message = message + self.body = body + + +class APIResponseValidationError(APIError): + response: httpx.Response + status_code: int + + def __init__(self, response: httpx.Response, body: object | None, *, message: str | None = None) -> None: + super().__init__(message or "Data returned by API invalid for expected schema.", response.request, body=body) + self.response = response + self.status_code = response.status_code + + +class APIStatusError(APIError): + """Raised when an API response has a status code of 4xx or 5xx.""" + + response: httpx.Response + status_code: int + + def __init__(self, message: str, *, response: httpx.Response, body: object | None) -> None: + super().__init__(message, response.request, body=body) + self.response = response + self.status_code = response.status_code + + +class APIConnectionError(APIError): + def __init__(self, *, message: str = "Connection error.", request: httpx.Request) -> None: + super().__init__(message, request, body=None) + + +class APITimeoutError(APIConnectionError): + def __init__(self, request: httpx.Request) -> None: + super().__init__(message="Request timed out.", request=request) + + +class BadRequestError(APIStatusError): + status_code: Literal[400] = 400 # pyright: ignore[reportIncompatibleVariableOverride] + + +class AuthenticationError(APIStatusError): + status_code: Literal[401] = 401 # pyright: ignore[reportIncompatibleVariableOverride] + + +class PermissionDeniedError(APIStatusError): + status_code: Literal[403] = 403 # pyright: ignore[reportIncompatibleVariableOverride] + + +class NotFoundError(APIStatusError): + status_code: Literal[404] = 404 # pyright: ignore[reportIncompatibleVariableOverride] + + +class ConflictError(APIStatusError): + status_code: Literal[409] = 409 # pyright: ignore[reportIncompatibleVariableOverride] + + +class UnprocessableEntityError(APIStatusError): + status_code: Literal[422] = 422 # pyright: ignore[reportIncompatibleVariableOverride] + + +class RateLimitError(APIStatusError): + status_code: Literal[429] = 429 # pyright: ignore[reportIncompatibleVariableOverride] + + +class InternalServerError(APIStatusError): + pass diff --git a/src/kernel/_files.py b/src/kernel/_files.py new file mode 100644 index 0000000..bbef8bf --- /dev/null +++ b/src/kernel/_files.py @@ -0,0 +1,123 @@ +from __future__ import annotations + +import io +import os +import pathlib +from typing import overload +from typing_extensions import TypeGuard + +import anyio + +from ._types import ( + FileTypes, + FileContent, + RequestFiles, + HttpxFileTypes, + Base64FileInput, + HttpxFileContent, + HttpxRequestFiles, +) +from ._utils import is_tuple_t, is_mapping_t, is_sequence_t + + +def is_base64_file_input(obj: object) -> TypeGuard[Base64FileInput]: + return isinstance(obj, io.IOBase) or isinstance(obj, os.PathLike) + + +def is_file_content(obj: object) -> TypeGuard[FileContent]: + return ( + isinstance(obj, bytes) or isinstance(obj, tuple) or isinstance(obj, io.IOBase) or isinstance(obj, os.PathLike) + ) + + +def assert_is_file_content(obj: object, *, key: str | None = None) -> None: + if not is_file_content(obj): + prefix = f"Expected entry at `{key}`" if key is not None else f"Expected file input `{obj!r}`" + raise RuntimeError( + f"{prefix} to be bytes, an io.IOBase instance, PathLike or a tuple but received {type(obj)} instead. See https://github.com/kernel/kernel-python-sdk/tree/main#file-uploads" + ) from None + + +@overload +def to_httpx_files(files: None) -> None: ... + + +@overload +def to_httpx_files(files: RequestFiles) -> HttpxRequestFiles: ... + + +def to_httpx_files(files: RequestFiles | None) -> HttpxRequestFiles | None: + if files is None: + return None + + if is_mapping_t(files): + files = {key: _transform_file(file) for key, file in files.items()} + elif is_sequence_t(files): + files = [(key, _transform_file(file)) for key, file in files] + else: + raise TypeError(f"Unexpected file type input {type(files)}, expected mapping or sequence") + + return files + + +def _transform_file(file: FileTypes) -> HttpxFileTypes: + if is_file_content(file): + if isinstance(file, os.PathLike): + path = pathlib.Path(file) + return (path.name, path.read_bytes()) + + return file + + if is_tuple_t(file): + return (file[0], read_file_content(file[1]), *file[2:]) + + raise TypeError(f"Expected file types input to be a FileContent type or to be a tuple") + + +def read_file_content(file: FileContent) -> HttpxFileContent: + if isinstance(file, os.PathLike): + return pathlib.Path(file).read_bytes() + return file + + +@overload +async def async_to_httpx_files(files: None) -> None: ... + + +@overload +async def async_to_httpx_files(files: RequestFiles) -> HttpxRequestFiles: ... + + +async def async_to_httpx_files(files: RequestFiles | None) -> HttpxRequestFiles | None: + if files is None: + return None + + if is_mapping_t(files): + files = {key: await _async_transform_file(file) for key, file in files.items()} + elif is_sequence_t(files): + files = [(key, await _async_transform_file(file)) for key, file in files] + else: + raise TypeError("Unexpected file type input {type(files)}, expected mapping or sequence") + + return files + + +async def _async_transform_file(file: FileTypes) -> HttpxFileTypes: + if is_file_content(file): + if isinstance(file, os.PathLike): + path = anyio.Path(file) + return (path.name, await path.read_bytes()) + + return file + + if is_tuple_t(file): + return (file[0], await async_read_file_content(file[1]), *file[2:]) + + raise TypeError(f"Expected file types input to be a FileContent type or to be a tuple") + + +async def async_read_file_content(file: FileContent) -> HttpxFileContent: + if isinstance(file, os.PathLike): + return await anyio.Path(file).read_bytes() + + return file diff --git a/src/kernel/_models.py b/src/kernel/_models.py new file mode 100644 index 0000000..ca9500b --- /dev/null +++ b/src/kernel/_models.py @@ -0,0 +1,857 @@ +from __future__ import annotations + +import os +import inspect +import weakref +from typing import TYPE_CHECKING, Any, Type, Union, Generic, TypeVar, Callable, Optional, cast +from datetime import date, datetime +from typing_extensions import ( + List, + Unpack, + Literal, + ClassVar, + Protocol, + Required, + ParamSpec, + TypedDict, + TypeGuard, + final, + override, + runtime_checkable, +) + +import pydantic +from pydantic.fields import FieldInfo + +from ._types import ( + Body, + IncEx, + Query, + ModelT, + Headers, + Timeout, + NotGiven, + AnyMapping, + HttpxRequestFiles, +) +from ._utils import ( + PropertyInfo, + is_list, + is_given, + json_safe, + lru_cache, + is_mapping, + parse_date, + coerce_boolean, + parse_datetime, + strip_not_given, + extract_type_arg, + is_annotated_type, + is_type_alias_type, + strip_annotated_type, +) +from ._compat import ( + PYDANTIC_V1, + ConfigDict, + GenericModel as BaseGenericModel, + get_args, + is_union, + parse_obj, + get_origin, + is_literal_type, + get_model_config, + get_model_fields, + field_get_default, +) +from ._constants import RAW_RESPONSE_HEADER + +if TYPE_CHECKING: + from pydantic_core.core_schema import ModelField, ModelSchema, LiteralSchema, ModelFieldsSchema + +__all__ = ["BaseModel", "GenericModel"] + +_T = TypeVar("_T") +_BaseModelT = TypeVar("_BaseModelT", bound="BaseModel") + +P = ParamSpec("P") + + +@runtime_checkable +class _ConfigProtocol(Protocol): + allow_population_by_field_name: bool + + +class BaseModel(pydantic.BaseModel): + if PYDANTIC_V1: + + @property + @override + def model_fields_set(self) -> set[str]: + # a forwards-compat shim for pydantic v2 + return self.__fields_set__ # type: ignore + + class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated] + extra: Any = pydantic.Extra.allow # type: ignore + else: + model_config: ClassVar[ConfigDict] = ConfigDict( + extra="allow", defer_build=coerce_boolean(os.environ.get("DEFER_PYDANTIC_BUILD", "true")) + ) + + def to_dict( + self, + *, + mode: Literal["json", "python"] = "python", + use_api_names: bool = True, + exclude_unset: bool = True, + exclude_defaults: bool = False, + exclude_none: bool = False, + warnings: bool = True, + ) -> dict[str, object]: + """Recursively generate a dictionary representation of the model, optionally specifying which fields to include or exclude. + + By default, fields that were not set by the API will not be included, + and keys will match the API response, *not* the property names from the model. + + For example, if the API responds with `"fooBar": true` but we've defined a `foo_bar: bool` property, + the output will use the `"fooBar"` key (unless `use_api_names=False` is passed). + + Args: + mode: + If mode is 'json', the dictionary will only contain JSON serializable types. e.g. `datetime` will be turned into a string, `"2024-3-22T18:11:19.117000Z"`. + If mode is 'python', the dictionary may contain any Python objects. e.g. `datetime(2024, 3, 22)` + + use_api_names: Whether to use the key that the API responded with or the property name. Defaults to `True`. + exclude_unset: Whether to exclude fields that have not been explicitly set. + exclude_defaults: Whether to exclude fields that are set to their default value from the output. + exclude_none: Whether to exclude fields that have a value of `None` from the output. + warnings: Whether to log warnings when invalid fields are encountered. This is only supported in Pydantic v2. + """ + return self.model_dump( + mode=mode, + by_alias=use_api_names, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + warnings=warnings, + ) + + def to_json( + self, + *, + indent: int | None = 2, + use_api_names: bool = True, + exclude_unset: bool = True, + exclude_defaults: bool = False, + exclude_none: bool = False, + warnings: bool = True, + ) -> str: + """Generates a JSON string representing this model as it would be received from or sent to the API (but with indentation). + + By default, fields that were not set by the API will not be included, + and keys will match the API response, *not* the property names from the model. + + For example, if the API responds with `"fooBar": true` but we've defined a `foo_bar: bool` property, + the output will use the `"fooBar"` key (unless `use_api_names=False` is passed). + + Args: + indent: Indentation to use in the JSON output. If `None` is passed, the output will be compact. Defaults to `2` + use_api_names: Whether to use the key that the API responded with or the property name. Defaults to `True`. + exclude_unset: Whether to exclude fields that have not been explicitly set. + exclude_defaults: Whether to exclude fields that have the default value. + exclude_none: Whether to exclude fields that have a value of `None`. + warnings: Whether to show any warnings that occurred during serialization. This is only supported in Pydantic v2. + """ + return self.model_dump_json( + indent=indent, + by_alias=use_api_names, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + warnings=warnings, + ) + + @override + def __str__(self) -> str: + # mypy complains about an invalid self arg + return f"{self.__repr_name__()}({self.__repr_str__(', ')})" # type: ignore[misc] + + # Override the 'construct' method in a way that supports recursive parsing without validation. + # Based on https://github.com/samuelcolvin/pydantic/issues/1168#issuecomment-817742836. + @classmethod + @override + def construct( # pyright: ignore[reportIncompatibleMethodOverride] + __cls: Type[ModelT], + _fields_set: set[str] | None = None, + **values: object, + ) -> ModelT: + m = __cls.__new__(__cls) + fields_values: dict[str, object] = {} + + config = get_model_config(__cls) + populate_by_name = ( + config.allow_population_by_field_name + if isinstance(config, _ConfigProtocol) + else config.get("populate_by_name") + ) + + if _fields_set is None: + _fields_set = set() + + model_fields = get_model_fields(__cls) + for name, field in model_fields.items(): + key = field.alias + if key is None or (key not in values and populate_by_name): + key = name + + if key in values: + fields_values[name] = _construct_field(value=values[key], field=field, key=key) + _fields_set.add(name) + else: + fields_values[name] = field_get_default(field) + + extra_field_type = _get_extra_fields_type(__cls) + + _extra = {} + for key, value in values.items(): + if key not in model_fields: + parsed = construct_type(value=value, type_=extra_field_type) if extra_field_type is not None else value + + if PYDANTIC_V1: + _fields_set.add(key) + fields_values[key] = parsed + else: + _extra[key] = parsed + + object.__setattr__(m, "__dict__", fields_values) + + if PYDANTIC_V1: + # init_private_attributes() does not exist in v2 + m._init_private_attributes() # type: ignore + + # copied from Pydantic v1's `construct()` method + object.__setattr__(m, "__fields_set__", _fields_set) + else: + # these properties are copied from Pydantic's `model_construct()` method + object.__setattr__(m, "__pydantic_private__", None) + object.__setattr__(m, "__pydantic_extra__", _extra) + object.__setattr__(m, "__pydantic_fields_set__", _fields_set) + + return m + + if not TYPE_CHECKING: + # type checkers incorrectly complain about this assignment + # because the type signatures are technically different + # although not in practice + model_construct = construct + + if PYDANTIC_V1: + # we define aliases for some of the new pydantic v2 methods so + # that we can just document these methods without having to specify + # a specific pydantic version as some users may not know which + # pydantic version they are currently using + + @override + def model_dump( + self, + *, + mode: Literal["json", "python"] | str = "python", + include: IncEx | None = None, + exclude: IncEx | None = None, + context: Any | None = None, + by_alias: bool | None = None, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + exclude_computed_fields: bool = False, + round_trip: bool = False, + warnings: bool | Literal["none", "warn", "error"] = True, + fallback: Callable[[Any], Any] | None = None, + serialize_as_any: bool = False, + ) -> dict[str, Any]: + """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump + + Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. + + Args: + mode: The mode in which `to_python` should run. + If mode is 'json', the output will only contain JSON serializable types. + If mode is 'python', the output may contain non-JSON-serializable Python objects. + include: A set of fields to include in the output. + exclude: A set of fields to exclude from the output. + context: Additional context to pass to the serializer. + by_alias: Whether to use the field's alias in the dictionary key if defined. + exclude_unset: Whether to exclude fields that have not been explicitly set. + exclude_defaults: Whether to exclude fields that are set to their default value. + exclude_none: Whether to exclude fields that have a value of `None`. + exclude_computed_fields: Whether to exclude computed fields. + While this can be useful for round-tripping, it is usually recommended to use the dedicated + `round_trip` parameter instead. + round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T]. + warnings: How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, + "error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. + fallback: A function to call when an unknown value is encountered. If not provided, + a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError] error is raised. + serialize_as_any: Whether to serialize fields with duck-typing serialization behavior. + + Returns: + A dictionary representation of the model. + """ + if mode not in {"json", "python"}: + raise ValueError("mode must be either 'json' or 'python'") + if round_trip != False: + raise ValueError("round_trip is only supported in Pydantic v2") + if warnings != True: + raise ValueError("warnings is only supported in Pydantic v2") + if context is not None: + raise ValueError("context is only supported in Pydantic v2") + if serialize_as_any != False: + raise ValueError("serialize_as_any is only supported in Pydantic v2") + if fallback is not None: + raise ValueError("fallback is only supported in Pydantic v2") + if exclude_computed_fields != False: + raise ValueError("exclude_computed_fields is only supported in Pydantic v2") + dumped = super().dict( # pyright: ignore[reportDeprecated] + include=include, + exclude=exclude, + by_alias=by_alias if by_alias is not None else False, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + + return cast("dict[str, Any]", json_safe(dumped)) if mode == "json" else dumped + + @override + def model_dump_json( + self, + *, + indent: int | None = None, + ensure_ascii: bool = False, + include: IncEx | None = None, + exclude: IncEx | None = None, + context: Any | None = None, + by_alias: bool | None = None, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + exclude_computed_fields: bool = False, + round_trip: bool = False, + warnings: bool | Literal["none", "warn", "error"] = True, + fallback: Callable[[Any], Any] | None = None, + serialize_as_any: bool = False, + ) -> str: + """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump_json + + Generates a JSON representation of the model using Pydantic's `to_json` method. + + Args: + indent: Indentation to use in the JSON output. If None is passed, the output will be compact. + include: Field(s) to include in the JSON output. Can take either a string or set of strings. + exclude: Field(s) to exclude from the JSON output. Can take either a string or set of strings. + by_alias: Whether to serialize using field aliases. + exclude_unset: Whether to exclude fields that have not been explicitly set. + exclude_defaults: Whether to exclude fields that have the default value. + exclude_none: Whether to exclude fields that have a value of `None`. + round_trip: Whether to use serialization/deserialization between JSON and class instance. + warnings: Whether to show any warnings that occurred during serialization. + + Returns: + A JSON string representation of the model. + """ + if round_trip != False: + raise ValueError("round_trip is only supported in Pydantic v2") + if warnings != True: + raise ValueError("warnings is only supported in Pydantic v2") + if context is not None: + raise ValueError("context is only supported in Pydantic v2") + if serialize_as_any != False: + raise ValueError("serialize_as_any is only supported in Pydantic v2") + if fallback is not None: + raise ValueError("fallback is only supported in Pydantic v2") + if ensure_ascii != False: + raise ValueError("ensure_ascii is only supported in Pydantic v2") + if exclude_computed_fields != False: + raise ValueError("exclude_computed_fields is only supported in Pydantic v2") + return super().json( # type: ignore[reportDeprecated] + indent=indent, + include=include, + exclude=exclude, + by_alias=by_alias if by_alias is not None else False, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + + +def _construct_field(value: object, field: FieldInfo, key: str) -> object: + if value is None: + return field_get_default(field) + + if PYDANTIC_V1: + type_ = cast(type, field.outer_type_) # type: ignore + else: + type_ = field.annotation # type: ignore + + if type_ is None: + raise RuntimeError(f"Unexpected field type is None for {key}") + + return construct_type(value=value, type_=type_, metadata=getattr(field, "metadata", None)) + + +def _get_extra_fields_type(cls: type[pydantic.BaseModel]) -> type | None: + if PYDANTIC_V1: + # TODO + return None + + schema = cls.__pydantic_core_schema__ + if schema["type"] == "model": + fields = schema["schema"] + if fields["type"] == "model-fields": + extras = fields.get("extras_schema") + if extras and "cls" in extras: + # mypy can't narrow the type + return extras["cls"] # type: ignore[no-any-return] + + return None + + +def is_basemodel(type_: type) -> bool: + """Returns whether or not the given type is either a `BaseModel` or a union of `BaseModel`""" + if is_union(type_): + for variant in get_args(type_): + if is_basemodel(variant): + return True + + return False + + return is_basemodel_type(type_) + + +def is_basemodel_type(type_: type) -> TypeGuard[type[BaseModel] | type[GenericModel]]: + origin = get_origin(type_) or type_ + if not inspect.isclass(origin): + return False + return issubclass(origin, BaseModel) or issubclass(origin, GenericModel) + + +def build( + base_model_cls: Callable[P, _BaseModelT], + *args: P.args, + **kwargs: P.kwargs, +) -> _BaseModelT: + """Construct a BaseModel class without validation. + + This is useful for cases where you need to instantiate a `BaseModel` + from an API response as this provides type-safe params which isn't supported + by helpers like `construct_type()`. + + ```py + build(MyModel, my_field_a="foo", my_field_b=123) + ``` + """ + if args: + raise TypeError( + "Received positional arguments which are not supported; Keyword arguments must be used instead", + ) + + return cast(_BaseModelT, construct_type(type_=base_model_cls, value=kwargs)) + + +def construct_type_unchecked(*, value: object, type_: type[_T]) -> _T: + """Loose coercion to the expected type with construction of nested values. + + Note: the returned value from this function is not guaranteed to match the + given type. + """ + return cast(_T, construct_type(value=value, type_=type_)) + + +def construct_type(*, value: object, type_: object, metadata: Optional[List[Any]] = None) -> object: + """Loose coercion to the expected type with construction of nested values. + + If the given value does not match the expected type then it is returned as-is. + """ + + # store a reference to the original type we were given before we extract any inner + # types so that we can properly resolve forward references in `TypeAliasType` annotations + original_type = None + + # we allow `object` as the input type because otherwise, passing things like + # `Literal['value']` will be reported as a type error by type checkers + type_ = cast("type[object]", type_) + if is_type_alias_type(type_): + original_type = type_ # type: ignore[unreachable] + type_ = type_.__value__ # type: ignore[unreachable] + + # unwrap `Annotated[T, ...]` -> `T` + if metadata is not None and len(metadata) > 0: + meta: tuple[Any, ...] = tuple(metadata) + elif is_annotated_type(type_): + meta = get_args(type_)[1:] + type_ = extract_type_arg(type_, 0) + else: + meta = tuple() + + # we need to use the origin class for any types that are subscripted generics + # e.g. Dict[str, object] + origin = get_origin(type_) or type_ + args = get_args(type_) + + if is_union(origin): + try: + return validate_type(type_=cast("type[object]", original_type or type_), value=value) + except Exception: + pass + + # if the type is a discriminated union then we want to construct the right variant + # in the union, even if the data doesn't match exactly, otherwise we'd break code + # that relies on the constructed class types, e.g. + # + # class FooType: + # kind: Literal['foo'] + # value: str + # + # class BarType: + # kind: Literal['bar'] + # value: int + # + # without this block, if the data we get is something like `{'kind': 'bar', 'value': 'foo'}` then + # we'd end up constructing `FooType` when it should be `BarType`. + discriminator = _build_discriminated_union_meta(union=type_, meta_annotations=meta) + if discriminator and is_mapping(value): + variant_value = value.get(discriminator.field_alias_from or discriminator.field_name) + if variant_value and isinstance(variant_value, str): + variant_type = discriminator.mapping.get(variant_value) + if variant_type: + return construct_type(type_=variant_type, value=value) + + # if the data is not valid, use the first variant that doesn't fail while deserializing + for variant in args: + try: + return construct_type(value=value, type_=variant) + except Exception: + continue + + raise RuntimeError(f"Could not convert data into a valid instance of {type_}") + + if origin == dict: + if not is_mapping(value): + return value + + _, items_type = get_args(type_) # Dict[_, items_type] + return {key: construct_type(value=item, type_=items_type) for key, item in value.items()} + + if ( + not is_literal_type(type_) + and inspect.isclass(origin) + and (issubclass(origin, BaseModel) or issubclass(origin, GenericModel)) + ): + if is_list(value): + return [cast(Any, type_).construct(**entry) if is_mapping(entry) else entry for entry in value] + + if is_mapping(value): + if issubclass(type_, BaseModel): + return type_.construct(**value) # type: ignore[arg-type] + + return cast(Any, type_).construct(**value) + + if origin == list: + if not is_list(value): + return value + + inner_type = args[0] # List[inner_type] + return [construct_type(value=entry, type_=inner_type) for entry in value] + + if origin == float: + if isinstance(value, int): + coerced = float(value) + if coerced != value: + return value + return coerced + + return value + + if type_ == datetime: + try: + return parse_datetime(value) # type: ignore + except Exception: + return value + + if type_ == date: + try: + return parse_date(value) # type: ignore + except Exception: + return value + + return value + + +@runtime_checkable +class CachedDiscriminatorType(Protocol): + __discriminator__: DiscriminatorDetails + + +DISCRIMINATOR_CACHE: weakref.WeakKeyDictionary[type, DiscriminatorDetails] = weakref.WeakKeyDictionary() + + +class DiscriminatorDetails: + field_name: str + """The name of the discriminator field in the variant class, e.g. + + ```py + class Foo(BaseModel): + type: Literal['foo'] + ``` + + Will result in field_name='type' + """ + + field_alias_from: str | None + """The name of the discriminator field in the API response, e.g. + + ```py + class Foo(BaseModel): + type: Literal['foo'] = Field(alias='type_from_api') + ``` + + Will result in field_alias_from='type_from_api' + """ + + mapping: dict[str, type] + """Mapping of discriminator value to variant type, e.g. + + {'foo': FooVariant, 'bar': BarVariant} + """ + + def __init__( + self, + *, + mapping: dict[str, type], + discriminator_field: str, + discriminator_alias: str | None, + ) -> None: + self.mapping = mapping + self.field_name = discriminator_field + self.field_alias_from = discriminator_alias + + +def _build_discriminated_union_meta(*, union: type, meta_annotations: tuple[Any, ...]) -> DiscriminatorDetails | None: + cached = DISCRIMINATOR_CACHE.get(union) + if cached is not None: + return cached + + discriminator_field_name: str | None = None + + for annotation in meta_annotations: + if isinstance(annotation, PropertyInfo) and annotation.discriminator is not None: + discriminator_field_name = annotation.discriminator + break + + if not discriminator_field_name: + return None + + mapping: dict[str, type] = {} + discriminator_alias: str | None = None + + for variant in get_args(union): + variant = strip_annotated_type(variant) + if is_basemodel_type(variant): + if PYDANTIC_V1: + field_info = cast("dict[str, FieldInfo]", variant.__fields__).get(discriminator_field_name) # pyright: ignore[reportDeprecated, reportUnnecessaryCast] + if not field_info: + continue + + # Note: if one variant defines an alias then they all should + discriminator_alias = field_info.alias + + if (annotation := getattr(field_info, "annotation", None)) and is_literal_type(annotation): + for entry in get_args(annotation): + if isinstance(entry, str): + mapping[entry] = variant + else: + field = _extract_field_schema_pv2(variant, discriminator_field_name) + if not field: + continue + + # Note: if one variant defines an alias then they all should + discriminator_alias = field.get("serialization_alias") + + field_schema = field["schema"] + + if field_schema["type"] == "literal": + for entry in cast("LiteralSchema", field_schema)["expected"]: + if isinstance(entry, str): + mapping[entry] = variant + + if not mapping: + return None + + details = DiscriminatorDetails( + mapping=mapping, + discriminator_field=discriminator_field_name, + discriminator_alias=discriminator_alias, + ) + DISCRIMINATOR_CACHE.setdefault(union, details) + return details + + +def _extract_field_schema_pv2(model: type[BaseModel], field_name: str) -> ModelField | None: + schema = model.__pydantic_core_schema__ + if schema["type"] == "definitions": + schema = schema["schema"] + + if schema["type"] != "model": + return None + + schema = cast("ModelSchema", schema) + fields_schema = schema["schema"] + if fields_schema["type"] != "model-fields": + return None + + fields_schema = cast("ModelFieldsSchema", fields_schema) + field = fields_schema["fields"].get(field_name) + if not field: + return None + + return cast("ModelField", field) # pyright: ignore[reportUnnecessaryCast] + + +def validate_type(*, type_: type[_T], value: object) -> _T: + """Strict validation that the given value matches the expected type""" + if inspect.isclass(type_) and issubclass(type_, pydantic.BaseModel): + return cast(_T, parse_obj(type_, value)) + + return cast(_T, _validate_non_model_type(type_=type_, value=value)) + + +def set_pydantic_config(typ: Any, config: pydantic.ConfigDict) -> None: + """Add a pydantic config for the given type. + + Note: this is a no-op on Pydantic v1. + """ + setattr(typ, "__pydantic_config__", config) # noqa: B010 + + +# our use of subclassing here causes weirdness for type checkers, +# so we just pretend that we don't subclass +if TYPE_CHECKING: + GenericModel = BaseModel +else: + + class GenericModel(BaseGenericModel, BaseModel): + pass + + +if not PYDANTIC_V1: + from pydantic import TypeAdapter as _TypeAdapter + + _CachedTypeAdapter = cast("TypeAdapter[object]", lru_cache(maxsize=None)(_TypeAdapter)) + + if TYPE_CHECKING: + from pydantic import TypeAdapter + else: + TypeAdapter = _CachedTypeAdapter + + def _validate_non_model_type(*, type_: type[_T], value: object) -> _T: + return TypeAdapter(type_).validate_python(value) + +elif not TYPE_CHECKING: # TODO: condition is weird + + class RootModel(GenericModel, Generic[_T]): + """Used as a placeholder to easily convert runtime types to a Pydantic format + to provide validation. + + For example: + ```py + validated = RootModel[int](__root__="5").__root__ + # validated: 5 + ``` + """ + + __root__: _T + + def _validate_non_model_type(*, type_: type[_T], value: object) -> _T: + model = _create_pydantic_model(type_).validate(value) + return cast(_T, model.__root__) + + def _create_pydantic_model(type_: _T) -> Type[RootModel[_T]]: + return RootModel[type_] # type: ignore + + +class FinalRequestOptionsInput(TypedDict, total=False): + method: Required[str] + url: Required[str] + params: Query + headers: Headers + max_retries: int + timeout: float | Timeout | None + files: HttpxRequestFiles | None + idempotency_key: str + json_data: Body + extra_json: AnyMapping + follow_redirects: bool + + +@final +class FinalRequestOptions(pydantic.BaseModel): + method: str + url: str + params: Query = {} + headers: Union[Headers, NotGiven] = NotGiven() + max_retries: Union[int, NotGiven] = NotGiven() + timeout: Union[float, Timeout, None, NotGiven] = NotGiven() + files: Union[HttpxRequestFiles, None] = None + idempotency_key: Union[str, None] = None + post_parser: Union[Callable[[Any], Any], NotGiven] = NotGiven() + follow_redirects: Union[bool, None] = None + + # It should be noted that we cannot use `json` here as that would override + # a BaseModel method in an incompatible fashion. + json_data: Union[Body, None] = None + extra_json: Union[AnyMapping, None] = None + + if PYDANTIC_V1: + + class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated] + arbitrary_types_allowed: bool = True + else: + model_config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True) + + def get_max_retries(self, max_retries: int) -> int: + if isinstance(self.max_retries, NotGiven): + return max_retries + return self.max_retries + + def _strip_raw_response_header(self) -> None: + if not is_given(self.headers): + return + + if self.headers.get(RAW_RESPONSE_HEADER): + self.headers = {**self.headers} + self.headers.pop(RAW_RESPONSE_HEADER) + + # override the `construct` method so that we can run custom transformations. + # this is necessary as we don't want to do any actual runtime type checking + # (which means we can't use validators) but we do want to ensure that `NotGiven` + # values are not present + # + # type ignore required because we're adding explicit types to `**values` + @classmethod + def construct( # type: ignore + cls, + _fields_set: set[str] | None = None, + **values: Unpack[FinalRequestOptionsInput], + ) -> FinalRequestOptions: + kwargs: dict[str, Any] = { + # we unconditionally call `strip_not_given` on any value + # as it will just ignore any non-mapping types + key: strip_not_given(value) + for key, value in values.items() + } + if PYDANTIC_V1: + return cast(FinalRequestOptions, super().construct(_fields_set, **kwargs)) # pyright: ignore[reportDeprecated] + return super().model_construct(_fields_set, **kwargs) + + if not TYPE_CHECKING: + # type checkers incorrectly complain about this assignment + model_construct = construct diff --git a/src/kernel/_qs.py b/src/kernel/_qs.py new file mode 100644 index 0000000..ada6fd3 --- /dev/null +++ b/src/kernel/_qs.py @@ -0,0 +1,150 @@ +from __future__ import annotations + +from typing import Any, List, Tuple, Union, Mapping, TypeVar +from urllib.parse import parse_qs, urlencode +from typing_extensions import Literal, get_args + +from ._types import NotGiven, not_given +from ._utils import flatten + +_T = TypeVar("_T") + + +ArrayFormat = Literal["comma", "repeat", "indices", "brackets"] +NestedFormat = Literal["dots", "brackets"] + +PrimitiveData = Union[str, int, float, bool, None] +# this should be Data = Union[PrimitiveData, "List[Data]", "Tuple[Data]", "Mapping[str, Data]"] +# https://github.com/microsoft/pyright/issues/3555 +Data = Union[PrimitiveData, List[Any], Tuple[Any], "Mapping[str, Any]"] +Params = Mapping[str, Data] + + +class Querystring: + array_format: ArrayFormat + nested_format: NestedFormat + + def __init__( + self, + *, + array_format: ArrayFormat = "repeat", + nested_format: NestedFormat = "brackets", + ) -> None: + self.array_format = array_format + self.nested_format = nested_format + + def parse(self, query: str) -> Mapping[str, object]: + # Note: custom format syntax is not supported yet + return parse_qs(query) + + def stringify( + self, + params: Params, + *, + array_format: ArrayFormat | NotGiven = not_given, + nested_format: NestedFormat | NotGiven = not_given, + ) -> str: + return urlencode( + self.stringify_items( + params, + array_format=array_format, + nested_format=nested_format, + ) + ) + + def stringify_items( + self, + params: Params, + *, + array_format: ArrayFormat | NotGiven = not_given, + nested_format: NestedFormat | NotGiven = not_given, + ) -> list[tuple[str, str]]: + opts = Options( + qs=self, + array_format=array_format, + nested_format=nested_format, + ) + return flatten([self._stringify_item(key, value, opts) for key, value in params.items()]) + + def _stringify_item( + self, + key: str, + value: Data, + opts: Options, + ) -> list[tuple[str, str]]: + if isinstance(value, Mapping): + items: list[tuple[str, str]] = [] + nested_format = opts.nested_format + for subkey, subvalue in value.items(): + items.extend( + self._stringify_item( + # TODO: error if unknown format + f"{key}.{subkey}" if nested_format == "dots" else f"{key}[{subkey}]", + subvalue, + opts, + ) + ) + return items + + if isinstance(value, (list, tuple)): + array_format = opts.array_format + if array_format == "comma": + return [ + ( + key, + ",".join(self._primitive_value_to_str(item) for item in value if item is not None), + ), + ] + elif array_format == "repeat": + items = [] + for item in value: + items.extend(self._stringify_item(key, item, opts)) + return items + elif array_format == "indices": + raise NotImplementedError("The array indices format is not supported yet") + elif array_format == "brackets": + items = [] + key = key + "[]" + for item in value: + items.extend(self._stringify_item(key, item, opts)) + return items + else: + raise NotImplementedError( + f"Unknown array_format value: {array_format}, choose from {', '.join(get_args(ArrayFormat))}" + ) + + serialised = self._primitive_value_to_str(value) + if not serialised: + return [] + return [(key, serialised)] + + def _primitive_value_to_str(self, value: PrimitiveData) -> str: + # copied from httpx + if value is True: + return "true" + elif value is False: + return "false" + elif value is None: + return "" + return str(value) + + +_qs = Querystring() +parse = _qs.parse +stringify = _qs.stringify +stringify_items = _qs.stringify_items + + +class Options: + array_format: ArrayFormat + nested_format: NestedFormat + + def __init__( + self, + qs: Querystring = _qs, + *, + array_format: ArrayFormat | NotGiven = not_given, + nested_format: NestedFormat | NotGiven = not_given, + ) -> None: + self.array_format = qs.array_format if isinstance(array_format, NotGiven) else array_format + self.nested_format = qs.nested_format if isinstance(nested_format, NotGiven) else nested_format diff --git a/src/kernel/_resource.py b/src/kernel/_resource.py new file mode 100644 index 0000000..eb51ab5 --- /dev/null +++ b/src/kernel/_resource.py @@ -0,0 +1,43 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import time +from typing import TYPE_CHECKING + +import anyio + +if TYPE_CHECKING: + from ._client import Kernel, AsyncKernel + + +class SyncAPIResource: + _client: Kernel + + def __init__(self, client: Kernel) -> None: + self._client = client + self._get = client.get + self._post = client.post + self._patch = client.patch + self._put = client.put + self._delete = client.delete + self._get_api_list = client.get_api_list + + def _sleep(self, seconds: float) -> None: + time.sleep(seconds) + + +class AsyncAPIResource: + _client: AsyncKernel + + def __init__(self, client: AsyncKernel) -> None: + self._client = client + self._get = client.get + self._post = client.post + self._patch = client.patch + self._put = client.put + self._delete = client.delete + self._get_api_list = client.get_api_list + + async def _sleep(self, seconds: float) -> None: + await anyio.sleep(seconds) diff --git a/src/kernel/_response.py b/src/kernel/_response.py new file mode 100644 index 0000000..89c72c3 --- /dev/null +++ b/src/kernel/_response.py @@ -0,0 +1,830 @@ +from __future__ import annotations + +import os +import inspect +import logging +import datetime +import functools +from types import TracebackType +from typing import ( + TYPE_CHECKING, + Any, + Union, + Generic, + TypeVar, + Callable, + Iterator, + AsyncIterator, + cast, + overload, +) +from typing_extensions import Awaitable, ParamSpec, override, get_origin + +import anyio +import httpx +import pydantic + +from ._types import NoneType +from ._utils import is_given, extract_type_arg, is_annotated_type, is_type_alias_type, extract_type_var_from_base +from ._models import BaseModel, is_basemodel +from ._constants import RAW_RESPONSE_HEADER, OVERRIDE_CAST_TO_HEADER +from ._streaming import Stream, AsyncStream, is_stream_class_type, extract_stream_chunk_type +from ._exceptions import KernelError, APIResponseValidationError + +if TYPE_CHECKING: + from ._models import FinalRequestOptions + from ._base_client import BaseClient + + +P = ParamSpec("P") +R = TypeVar("R") +_T = TypeVar("_T") +_APIResponseT = TypeVar("_APIResponseT", bound="APIResponse[Any]") +_AsyncAPIResponseT = TypeVar("_AsyncAPIResponseT", bound="AsyncAPIResponse[Any]") + +log: logging.Logger = logging.getLogger(__name__) + + +class BaseAPIResponse(Generic[R]): + _cast_to: type[R] + _client: BaseClient[Any, Any] + _parsed_by_type: dict[type[Any], Any] + _is_sse_stream: bool + _stream_cls: type[Stream[Any]] | type[AsyncStream[Any]] | None + _options: FinalRequestOptions + + http_response: httpx.Response + + retries_taken: int + """The number of retries made. If no retries happened this will be `0`""" + + def __init__( + self, + *, + raw: httpx.Response, + cast_to: type[R], + client: BaseClient[Any, Any], + stream: bool, + stream_cls: type[Stream[Any]] | type[AsyncStream[Any]] | None, + options: FinalRequestOptions, + retries_taken: int = 0, + ) -> None: + self._cast_to = cast_to + self._client = client + self._parsed_by_type = {} + self._is_sse_stream = stream + self._stream_cls = stream_cls + self._options = options + self.http_response = raw + self.retries_taken = retries_taken + + @property + def headers(self) -> httpx.Headers: + return self.http_response.headers + + @property + def http_request(self) -> httpx.Request: + """Returns the httpx Request instance associated with the current response.""" + return self.http_response.request + + @property + def status_code(self) -> int: + return self.http_response.status_code + + @property + def url(self) -> httpx.URL: + """Returns the URL for which the request was made.""" + return self.http_response.url + + @property + def method(self) -> str: + return self.http_request.method + + @property + def http_version(self) -> str: + return self.http_response.http_version + + @property + def elapsed(self) -> datetime.timedelta: + """The time taken for the complete request/response cycle to complete.""" + return self.http_response.elapsed + + @property + def is_closed(self) -> bool: + """Whether or not the response body has been closed. + + If this is False then there is response data that has not been read yet. + You must either fully consume the response body or call `.close()` + before discarding the response to prevent resource leaks. + """ + return self.http_response.is_closed + + @override + def __repr__(self) -> str: + return ( + f"<{self.__class__.__name__} [{self.status_code} {self.http_response.reason_phrase}] type={self._cast_to}>" + ) + + def _parse(self, *, to: type[_T] | None = None) -> R | _T: + cast_to = to if to is not None else self._cast_to + + # unwrap `TypeAlias('Name', T)` -> `T` + if is_type_alias_type(cast_to): + cast_to = cast_to.__value__ # type: ignore[unreachable] + + # unwrap `Annotated[T, ...]` -> `T` + if cast_to and is_annotated_type(cast_to): + cast_to = extract_type_arg(cast_to, 0) + + origin = get_origin(cast_to) or cast_to + + if self._is_sse_stream: + if to: + if not is_stream_class_type(to): + raise TypeError(f"Expected custom parse type to be a subclass of {Stream} or {AsyncStream}") + + return cast( + _T, + to( + cast_to=extract_stream_chunk_type( + to, + failure_message="Expected custom stream type to be passed with a type argument, e.g. Stream[ChunkType]", + ), + response=self.http_response, + client=cast(Any, self._client), + ), + ) + + if self._stream_cls: + return cast( + R, + self._stream_cls( + cast_to=extract_stream_chunk_type(self._stream_cls), + response=self.http_response, + client=cast(Any, self._client), + ), + ) + + stream_cls = cast("type[Stream[Any]] | type[AsyncStream[Any]] | None", self._client._default_stream_cls) + if stream_cls is None: + raise MissingStreamClassError() + + return cast( + R, + stream_cls( + cast_to=cast_to, + response=self.http_response, + client=cast(Any, self._client), + ), + ) + + if cast_to is NoneType: + return cast(R, None) + + response = self.http_response + if cast_to == str: + return cast(R, response.text) + + if cast_to == bytes: + return cast(R, response.content) + + if cast_to == int: + return cast(R, int(response.text)) + + if cast_to == float: + return cast(R, float(response.text)) + + if cast_to == bool: + return cast(R, response.text.lower() == "true") + + if origin == APIResponse: + raise RuntimeError("Unexpected state - cast_to is `APIResponse`") + + if inspect.isclass(origin) and issubclass(origin, httpx.Response): + # Because of the invariance of our ResponseT TypeVar, users can subclass httpx.Response + # and pass that class to our request functions. We cannot change the variance to be either + # covariant or contravariant as that makes our usage of ResponseT illegal. We could construct + # the response class ourselves but that is something that should be supported directly in httpx + # as it would be easy to incorrectly construct the Response object due to the multitude of arguments. + if cast_to != httpx.Response: + raise ValueError(f"Subclasses of httpx.Response cannot be passed to `cast_to`") + return cast(R, response) + + if ( + inspect.isclass( + origin # pyright: ignore[reportUnknownArgumentType] + ) + and not issubclass(origin, BaseModel) + and issubclass(origin, pydantic.BaseModel) + ): + raise TypeError("Pydantic models must subclass our base model type, e.g. `from kernel import BaseModel`") + + if ( + cast_to is not object + and not origin is list + and not origin is dict + and not origin is Union + and not issubclass(origin, BaseModel) + ): + raise RuntimeError( + f"Unsupported type, expected {cast_to} to be a subclass of {BaseModel}, {dict}, {list}, {Union}, {NoneType}, {str} or {httpx.Response}." + ) + + # split is required to handle cases where additional information is included + # in the response, e.g. application/json; charset=utf-8 + content_type, *_ = response.headers.get("content-type", "*").split(";") + if not content_type.endswith("json"): + if is_basemodel(cast_to): + try: + data = response.json() + except Exception as exc: + log.debug("Could not read JSON from response data due to %s - %s", type(exc), exc) + else: + return self._client._process_response_data( + data=data, + cast_to=cast_to, # type: ignore + response=response, + ) + + if self._client._strict_response_validation: + raise APIResponseValidationError( + response=response, + message=f"Expected Content-Type response header to be `application/json` but received `{content_type}` instead.", + body=response.text, + ) + + # If the API responds with content that isn't JSON then we just return + # the (decoded) text without performing any parsing so that you can still + # handle the response however you need to. + return response.text # type: ignore + + data = response.json() + + return self._client._process_response_data( + data=data, + cast_to=cast_to, # type: ignore + response=response, + ) + + +class APIResponse(BaseAPIResponse[R]): + @overload + def parse(self, *, to: type[_T]) -> _T: ... + + @overload + def parse(self) -> R: ... + + def parse(self, *, to: type[_T] | None = None) -> R | _T: + """Returns the rich python representation of this response's data. + + For lower-level control, see `.read()`, `.json()`, `.iter_bytes()`. + + You can customise the type that the response is parsed into through + the `to` argument, e.g. + + ```py + from kernel import BaseModel + + + class MyModel(BaseModel): + foo: str + + + obj = response.parse(to=MyModel) + print(obj.foo) + ``` + + We support parsing: + - `BaseModel` + - `dict` + - `list` + - `Union` + - `str` + - `int` + - `float` + - `httpx.Response` + """ + cache_key = to if to is not None else self._cast_to + cached = self._parsed_by_type.get(cache_key) + if cached is not None: + return cached # type: ignore[no-any-return] + + if not self._is_sse_stream: + self.read() + + parsed = self._parse(to=to) + if is_given(self._options.post_parser): + parsed = self._options.post_parser(parsed) + + self._parsed_by_type[cache_key] = parsed + return parsed + + def read(self) -> bytes: + """Read and return the binary response content.""" + try: + return self.http_response.read() + except httpx.StreamConsumed as exc: + # The default error raised by httpx isn't very + # helpful in our case so we re-raise it with + # a different error message. + raise StreamAlreadyConsumed() from exc + + def text(self) -> str: + """Read and decode the response content into a string.""" + self.read() + return self.http_response.text + + def json(self) -> object: + """Read and decode the JSON response content.""" + self.read() + return self.http_response.json() + + def close(self) -> None: + """Close the response and release the connection. + + Automatically called if the response body is read to completion. + """ + self.http_response.close() + + def iter_bytes(self, chunk_size: int | None = None) -> Iterator[bytes]: + """ + A byte-iterator over the decoded response content. + + This automatically handles gzip, deflate and brotli encoded responses. + """ + for chunk in self.http_response.iter_bytes(chunk_size): + yield chunk + + def iter_text(self, chunk_size: int | None = None) -> Iterator[str]: + """A str-iterator over the decoded response content + that handles both gzip, deflate, etc but also detects the content's + string encoding. + """ + for chunk in self.http_response.iter_text(chunk_size): + yield chunk + + def iter_lines(self) -> Iterator[str]: + """Like `iter_text()` but will only yield chunks for each line""" + for chunk in self.http_response.iter_lines(): + yield chunk + + +class AsyncAPIResponse(BaseAPIResponse[R]): + @overload + async def parse(self, *, to: type[_T]) -> _T: ... + + @overload + async def parse(self) -> R: ... + + async def parse(self, *, to: type[_T] | None = None) -> R | _T: + """Returns the rich python representation of this response's data. + + For lower-level control, see `.read()`, `.json()`, `.iter_bytes()`. + + You can customise the type that the response is parsed into through + the `to` argument, e.g. + + ```py + from kernel import BaseModel + + + class MyModel(BaseModel): + foo: str + + + obj = response.parse(to=MyModel) + print(obj.foo) + ``` + + We support parsing: + - `BaseModel` + - `dict` + - `list` + - `Union` + - `str` + - `httpx.Response` + """ + cache_key = to if to is not None else self._cast_to + cached = self._parsed_by_type.get(cache_key) + if cached is not None: + return cached # type: ignore[no-any-return] + + if not self._is_sse_stream: + await self.read() + + parsed = self._parse(to=to) + if is_given(self._options.post_parser): + parsed = self._options.post_parser(parsed) + + self._parsed_by_type[cache_key] = parsed + return parsed + + async def read(self) -> bytes: + """Read and return the binary response content.""" + try: + return await self.http_response.aread() + except httpx.StreamConsumed as exc: + # the default error raised by httpx isn't very + # helpful in our case so we re-raise it with + # a different error message + raise StreamAlreadyConsumed() from exc + + async def text(self) -> str: + """Read and decode the response content into a string.""" + await self.read() + return self.http_response.text + + async def json(self) -> object: + """Read and decode the JSON response content.""" + await self.read() + return self.http_response.json() + + async def close(self) -> None: + """Close the response and release the connection. + + Automatically called if the response body is read to completion. + """ + await self.http_response.aclose() + + async def iter_bytes(self, chunk_size: int | None = None) -> AsyncIterator[bytes]: + """ + A byte-iterator over the decoded response content. + + This automatically handles gzip, deflate and brotli encoded responses. + """ + async for chunk in self.http_response.aiter_bytes(chunk_size): + yield chunk + + async def iter_text(self, chunk_size: int | None = None) -> AsyncIterator[str]: + """A str-iterator over the decoded response content + that handles both gzip, deflate, etc but also detects the content's + string encoding. + """ + async for chunk in self.http_response.aiter_text(chunk_size): + yield chunk + + async def iter_lines(self) -> AsyncIterator[str]: + """Like `iter_text()` but will only yield chunks for each line""" + async for chunk in self.http_response.aiter_lines(): + yield chunk + + +class BinaryAPIResponse(APIResponse[bytes]): + """Subclass of APIResponse providing helpers for dealing with binary data. + + Note: If you want to stream the response data instead of eagerly reading it + all at once then you should use `.with_streaming_response` when making + the API request, e.g. `.with_streaming_response.get_binary_response()` + """ + + def write_to_file( + self, + file: str | os.PathLike[str], + ) -> None: + """Write the output to the given file. + + Accepts a filename or any path-like object, e.g. pathlib.Path + + Note: if you want to stream the data to the file instead of writing + all at once then you should use `.with_streaming_response` when making + the API request, e.g. `.with_streaming_response.get_binary_response()` + """ + with open(file, mode="wb") as f: + for data in self.iter_bytes(): + f.write(data) + + +class AsyncBinaryAPIResponse(AsyncAPIResponse[bytes]): + """Subclass of APIResponse providing helpers for dealing with binary data. + + Note: If you want to stream the response data instead of eagerly reading it + all at once then you should use `.with_streaming_response` when making + the API request, e.g. `.with_streaming_response.get_binary_response()` + """ + + async def write_to_file( + self, + file: str | os.PathLike[str], + ) -> None: + """Write the output to the given file. + + Accepts a filename or any path-like object, e.g. pathlib.Path + + Note: if you want to stream the data to the file instead of writing + all at once then you should use `.with_streaming_response` when making + the API request, e.g. `.with_streaming_response.get_binary_response()` + """ + path = anyio.Path(file) + async with await path.open(mode="wb") as f: + async for data in self.iter_bytes(): + await f.write(data) + + +class StreamedBinaryAPIResponse(APIResponse[bytes]): + def stream_to_file( + self, + file: str | os.PathLike[str], + *, + chunk_size: int | None = None, + ) -> None: + """Streams the output to the given file. + + Accepts a filename or any path-like object, e.g. pathlib.Path + """ + with open(file, mode="wb") as f: + for data in self.iter_bytes(chunk_size): + f.write(data) + + +class AsyncStreamedBinaryAPIResponse(AsyncAPIResponse[bytes]): + async def stream_to_file( + self, + file: str | os.PathLike[str], + *, + chunk_size: int | None = None, + ) -> None: + """Streams the output to the given file. + + Accepts a filename or any path-like object, e.g. pathlib.Path + """ + path = anyio.Path(file) + async with await path.open(mode="wb") as f: + async for data in self.iter_bytes(chunk_size): + await f.write(data) + + +class MissingStreamClassError(TypeError): + def __init__(self) -> None: + super().__init__( + "The `stream` argument was set to `True` but the `stream_cls` argument was not given. See `kernel._streaming` for reference", + ) + + +class StreamAlreadyConsumed(KernelError): + """ + Attempted to read or stream content, but the content has already + been streamed. + + This can happen if you use a method like `.iter_lines()` and then attempt + to read th entire response body afterwards, e.g. + + ```py + response = await client.post(...) + async for line in response.iter_lines(): + ... # do something with `line` + + content = await response.read() + # ^ error + ``` + + If you want this behaviour you'll need to either manually accumulate the response + content or call `await response.read()` before iterating over the stream. + """ + + def __init__(self) -> None: + message = ( + "Attempted to read or stream some content, but the content has " + "already been streamed. " + "This could be due to attempting to stream the response " + "content more than once." + "\n\n" + "You can fix this by manually accumulating the response content while streaming " + "or by calling `.read()` before starting to stream." + ) + super().__init__(message) + + +class ResponseContextManager(Generic[_APIResponseT]): + """Context manager for ensuring that a request is not made + until it is entered and that the response will always be closed + when the context manager exits + """ + + def __init__(self, request_func: Callable[[], _APIResponseT]) -> None: + self._request_func = request_func + self.__response: _APIResponseT | None = None + + def __enter__(self) -> _APIResponseT: + self.__response = self._request_func() + return self.__response + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + if self.__response is not None: + self.__response.close() + + +class AsyncResponseContextManager(Generic[_AsyncAPIResponseT]): + """Context manager for ensuring that a request is not made + until it is entered and that the response will always be closed + when the context manager exits + """ + + def __init__(self, api_request: Awaitable[_AsyncAPIResponseT]) -> None: + self._api_request = api_request + self.__response: _AsyncAPIResponseT | None = None + + async def __aenter__(self) -> _AsyncAPIResponseT: + self.__response = await self._api_request + return self.__response + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + if self.__response is not None: + await self.__response.close() + + +def to_streamed_response_wrapper(func: Callable[P, R]) -> Callable[P, ResponseContextManager[APIResponse[R]]]: + """Higher order function that takes one of our bound API methods and wraps it + to support streaming and returning the raw `APIResponse` object directly. + """ + + @functools.wraps(func) + def wrapped(*args: P.args, **kwargs: P.kwargs) -> ResponseContextManager[APIResponse[R]]: + extra_headers: dict[str, str] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "stream" + + kwargs["extra_headers"] = extra_headers + + make_request = functools.partial(func, *args, **kwargs) + + return ResponseContextManager(cast(Callable[[], APIResponse[R]], make_request)) + + return wrapped + + +def async_to_streamed_response_wrapper( + func: Callable[P, Awaitable[R]], +) -> Callable[P, AsyncResponseContextManager[AsyncAPIResponse[R]]]: + """Higher order function that takes one of our bound API methods and wraps it + to support streaming and returning the raw `APIResponse` object directly. + """ + + @functools.wraps(func) + def wrapped(*args: P.args, **kwargs: P.kwargs) -> AsyncResponseContextManager[AsyncAPIResponse[R]]: + extra_headers: dict[str, str] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "stream" + + kwargs["extra_headers"] = extra_headers + + make_request = func(*args, **kwargs) + + return AsyncResponseContextManager(cast(Awaitable[AsyncAPIResponse[R]], make_request)) + + return wrapped + + +def to_custom_streamed_response_wrapper( + func: Callable[P, object], + response_cls: type[_APIResponseT], +) -> Callable[P, ResponseContextManager[_APIResponseT]]: + """Higher order function that takes one of our bound API methods and an `APIResponse` class + and wraps the method to support streaming and returning the given response class directly. + + Note: the given `response_cls` *must* be concrete, e.g. `class BinaryAPIResponse(APIResponse[bytes])` + """ + + @functools.wraps(func) + def wrapped(*args: P.args, **kwargs: P.kwargs) -> ResponseContextManager[_APIResponseT]: + extra_headers: dict[str, Any] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "stream" + extra_headers[OVERRIDE_CAST_TO_HEADER] = response_cls + + kwargs["extra_headers"] = extra_headers + + make_request = functools.partial(func, *args, **kwargs) + + return ResponseContextManager(cast(Callable[[], _APIResponseT], make_request)) + + return wrapped + + +def async_to_custom_streamed_response_wrapper( + func: Callable[P, Awaitable[object]], + response_cls: type[_AsyncAPIResponseT], +) -> Callable[P, AsyncResponseContextManager[_AsyncAPIResponseT]]: + """Higher order function that takes one of our bound API methods and an `APIResponse` class + and wraps the method to support streaming and returning the given response class directly. + + Note: the given `response_cls` *must* be concrete, e.g. `class BinaryAPIResponse(APIResponse[bytes])` + """ + + @functools.wraps(func) + def wrapped(*args: P.args, **kwargs: P.kwargs) -> AsyncResponseContextManager[_AsyncAPIResponseT]: + extra_headers: dict[str, Any] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "stream" + extra_headers[OVERRIDE_CAST_TO_HEADER] = response_cls + + kwargs["extra_headers"] = extra_headers + + make_request = func(*args, **kwargs) + + return AsyncResponseContextManager(cast(Awaitable[_AsyncAPIResponseT], make_request)) + + return wrapped + + +def to_raw_response_wrapper(func: Callable[P, R]) -> Callable[P, APIResponse[R]]: + """Higher order function that takes one of our bound API methods and wraps it + to support returning the raw `APIResponse` object directly. + """ + + @functools.wraps(func) + def wrapped(*args: P.args, **kwargs: P.kwargs) -> APIResponse[R]: + extra_headers: dict[str, str] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "raw" + + kwargs["extra_headers"] = extra_headers + + return cast(APIResponse[R], func(*args, **kwargs)) + + return wrapped + + +def async_to_raw_response_wrapper(func: Callable[P, Awaitable[R]]) -> Callable[P, Awaitable[AsyncAPIResponse[R]]]: + """Higher order function that takes one of our bound API methods and wraps it + to support returning the raw `APIResponse` object directly. + """ + + @functools.wraps(func) + async def wrapped(*args: P.args, **kwargs: P.kwargs) -> AsyncAPIResponse[R]: + extra_headers: dict[str, str] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "raw" + + kwargs["extra_headers"] = extra_headers + + return cast(AsyncAPIResponse[R], await func(*args, **kwargs)) + + return wrapped + + +def to_custom_raw_response_wrapper( + func: Callable[P, object], + response_cls: type[_APIResponseT], +) -> Callable[P, _APIResponseT]: + """Higher order function that takes one of our bound API methods and an `APIResponse` class + and wraps the method to support returning the given response class directly. + + Note: the given `response_cls` *must* be concrete, e.g. `class BinaryAPIResponse(APIResponse[bytes])` + """ + + @functools.wraps(func) + def wrapped(*args: P.args, **kwargs: P.kwargs) -> _APIResponseT: + extra_headers: dict[str, Any] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "raw" + extra_headers[OVERRIDE_CAST_TO_HEADER] = response_cls + + kwargs["extra_headers"] = extra_headers + + return cast(_APIResponseT, func(*args, **kwargs)) + + return wrapped + + +def async_to_custom_raw_response_wrapper( + func: Callable[P, Awaitable[object]], + response_cls: type[_AsyncAPIResponseT], +) -> Callable[P, Awaitable[_AsyncAPIResponseT]]: + """Higher order function that takes one of our bound API methods and an `APIResponse` class + and wraps the method to support returning the given response class directly. + + Note: the given `response_cls` *must* be concrete, e.g. `class BinaryAPIResponse(APIResponse[bytes])` + """ + + @functools.wraps(func) + def wrapped(*args: P.args, **kwargs: P.kwargs) -> Awaitable[_AsyncAPIResponseT]: + extra_headers: dict[str, Any] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "raw" + extra_headers[OVERRIDE_CAST_TO_HEADER] = response_cls + + kwargs["extra_headers"] = extra_headers + + return cast(Awaitable[_AsyncAPIResponseT], func(*args, **kwargs)) + + return wrapped + + +def extract_response_type(typ: type[BaseAPIResponse[Any]]) -> type: + """Given a type like `APIResponse[T]`, returns the generic type variable `T`. + + This also handles the case where a concrete subclass is given, e.g. + ```py + class MyResponse(APIResponse[bytes]): + ... + + extract_response_type(MyResponse) -> bytes + ``` + """ + return extract_type_var_from_base( + typ, + generic_bases=cast("tuple[type, ...]", (BaseAPIResponse, APIResponse, AsyncAPIResponse)), + index=0, + ) diff --git a/src/kernel/_streaming.py b/src/kernel/_streaming.py new file mode 100644 index 0000000..369a3f6 --- /dev/null +++ b/src/kernel/_streaming.py @@ -0,0 +1,333 @@ +# Note: initially copied from https://github.com/florimondmanca/httpx-sse/blob/master/src/httpx_sse/_decoders.py +from __future__ import annotations + +import json +import inspect +from types import TracebackType +from typing import TYPE_CHECKING, Any, Generic, TypeVar, Iterator, AsyncIterator, cast +from typing_extensions import Self, Protocol, TypeGuard, override, get_origin, runtime_checkable + +import httpx + +from ._utils import extract_type_var_from_base + +if TYPE_CHECKING: + from ._client import Kernel, AsyncKernel + + +_T = TypeVar("_T") + + +class Stream(Generic[_T]): + """Provides the core interface to iterate over a synchronous stream response.""" + + response: httpx.Response + + _decoder: SSEBytesDecoder + + def __init__( + self, + *, + cast_to: type[_T], + response: httpx.Response, + client: Kernel, + ) -> None: + self.response = response + self._cast_to = cast_to + self._client = client + self._decoder = client._make_sse_decoder() + self._iterator = self.__stream__() + + def __next__(self) -> _T: + return self._iterator.__next__() + + def __iter__(self) -> Iterator[_T]: + for item in self._iterator: + yield item + + def _iter_events(self) -> Iterator[ServerSentEvent]: + yield from self._decoder.iter_bytes(self.response.iter_bytes()) + + def __stream__(self) -> Iterator[_T]: + cast_to = cast(Any, self._cast_to) + response = self.response + process_data = self._client._process_response_data + iterator = self._iter_events() + + try: + for sse in iterator: + yield process_data(data=sse.json(), cast_to=cast_to, response=response) + finally: + # Ensure the response is closed even if the consumer doesn't read all data + response.close() + + def __enter__(self) -> Self: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.close() + + def close(self) -> None: + """ + Close the response and release the connection. + + Automatically called if the response body is read to completion. + """ + self.response.close() + + +class AsyncStream(Generic[_T]): + """Provides the core interface to iterate over an asynchronous stream response.""" + + response: httpx.Response + + _decoder: SSEDecoder | SSEBytesDecoder + + def __init__( + self, + *, + cast_to: type[_T], + response: httpx.Response, + client: AsyncKernel, + ) -> None: + self.response = response + self._cast_to = cast_to + self._client = client + self._decoder = client._make_sse_decoder() + self._iterator = self.__stream__() + + async def __anext__(self) -> _T: + return await self._iterator.__anext__() + + async def __aiter__(self) -> AsyncIterator[_T]: + async for item in self._iterator: + yield item + + async def _iter_events(self) -> AsyncIterator[ServerSentEvent]: + async for sse in self._decoder.aiter_bytes(self.response.aiter_bytes()): + yield sse + + async def __stream__(self) -> AsyncIterator[_T]: + cast_to = cast(Any, self._cast_to) + response = self.response + process_data = self._client._process_response_data + iterator = self._iter_events() + + try: + async for sse in iterator: + yield process_data(data=sse.json(), cast_to=cast_to, response=response) + finally: + # Ensure the response is closed even if the consumer doesn't read all data + await response.aclose() + + async def __aenter__(self) -> Self: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + await self.close() + + async def close(self) -> None: + """ + Close the response and release the connection. + + Automatically called if the response body is read to completion. + """ + await self.response.aclose() + + +class ServerSentEvent: + def __init__( + self, + *, + event: str | None = None, + data: str | None = None, + id: str | None = None, + retry: int | None = None, + ) -> None: + if data is None: + data = "" + + self._id = id + self._data = data + self._event = event or None + self._retry = retry + + @property + def event(self) -> str | None: + return self._event + + @property + def id(self) -> str | None: + return self._id + + @property + def retry(self) -> int | None: + return self._retry + + @property + def data(self) -> str: + return self._data + + def json(self) -> Any: + return json.loads(self.data) + + @override + def __repr__(self) -> str: + return f"ServerSentEvent(event={self.event}, data={self.data}, id={self.id}, retry={self.retry})" + + +class SSEDecoder: + _data: list[str] + _event: str | None + _retry: int | None + _last_event_id: str | None + + def __init__(self) -> None: + self._event = None + self._data = [] + self._last_event_id = None + self._retry = None + + def iter_bytes(self, iterator: Iterator[bytes]) -> Iterator[ServerSentEvent]: + """Given an iterator that yields raw binary data, iterate over it & yield every event encountered""" + for chunk in self._iter_chunks(iterator): + # Split before decoding so splitlines() only uses \r and \n + for raw_line in chunk.splitlines(): + line = raw_line.decode("utf-8") + sse = self.decode(line) + if sse: + yield sse + + def _iter_chunks(self, iterator: Iterator[bytes]) -> Iterator[bytes]: + """Given an iterator that yields raw binary data, iterate over it and yield individual SSE chunks""" + data = b"" + for chunk in iterator: + for line in chunk.splitlines(keepends=True): + data += line + if data.endswith((b"\r\r", b"\n\n", b"\r\n\r\n")): + yield data + data = b"" + if data: + yield data + + async def aiter_bytes(self, iterator: AsyncIterator[bytes]) -> AsyncIterator[ServerSentEvent]: + """Given an iterator that yields raw binary data, iterate over it & yield every event encountered""" + async for chunk in self._aiter_chunks(iterator): + # Split before decoding so splitlines() only uses \r and \n + for raw_line in chunk.splitlines(): + line = raw_line.decode("utf-8") + sse = self.decode(line) + if sse: + yield sse + + async def _aiter_chunks(self, iterator: AsyncIterator[bytes]) -> AsyncIterator[bytes]: + """Given an iterator that yields raw binary data, iterate over it and yield individual SSE chunks""" + data = b"" + async for chunk in iterator: + for line in chunk.splitlines(keepends=True): + data += line + if data.endswith((b"\r\r", b"\n\n", b"\r\n\r\n")): + yield data + data = b"" + if data: + yield data + + def decode(self, line: str) -> ServerSentEvent | None: + # See: https://html.spec.whatwg.org/multipage/server-sent-events.html#event-stream-interpretation # noqa: E501 + + if not line: + if not self._event and not self._data and not self._last_event_id and self._retry is None: + return None + + sse = ServerSentEvent( + event=self._event, + data="\n".join(self._data), + id=self._last_event_id, + retry=self._retry, + ) + + # NOTE: as per the SSE spec, do not reset last_event_id. + self._event = None + self._data = [] + self._retry = None + + return sse + + if line.startswith(":"): + return None + + fieldname, _, value = line.partition(":") + + if value.startswith(" "): + value = value[1:] + + if fieldname == "event": + self._event = value + elif fieldname == "data": + self._data.append(value) + elif fieldname == "id": + if "\0" in value: + pass + else: + self._last_event_id = value + elif fieldname == "retry": + try: + self._retry = int(value) + except (TypeError, ValueError): + pass + else: + pass # Field is ignored. + + return None + + +@runtime_checkable +class SSEBytesDecoder(Protocol): + def iter_bytes(self, iterator: Iterator[bytes]) -> Iterator[ServerSentEvent]: + """Given an iterator that yields raw binary data, iterate over it & yield every event encountered""" + ... + + def aiter_bytes(self, iterator: AsyncIterator[bytes]) -> AsyncIterator[ServerSentEvent]: + """Given an async iterator that yields raw binary data, iterate over it & yield every event encountered""" + ... + + +def is_stream_class_type(typ: type) -> TypeGuard[type[Stream[object]] | type[AsyncStream[object]]]: + """TypeGuard for determining whether or not the given type is a subclass of `Stream` / `AsyncStream`""" + origin = get_origin(typ) or typ + return inspect.isclass(origin) and issubclass(origin, (Stream, AsyncStream)) + + +def extract_stream_chunk_type( + stream_cls: type, + *, + failure_message: str | None = None, +) -> type: + """Given a type like `Stream[T]`, returns the generic type variable `T`. + + This also handles the case where a concrete subclass is given, e.g. + ```py + class MyStream(Stream[bytes]): + ... + + extract_stream_chunk_type(MyStream) -> bytes + ``` + """ + from ._base_client import Stream, AsyncStream + + return extract_type_var_from_base( + stream_cls, + index=0, + generic_bases=cast("tuple[type, ...]", (Stream, AsyncStream)), + failure_message=failure_message, + ) diff --git a/src/kernel/_types.py b/src/kernel/_types.py new file mode 100644 index 0000000..275ffbb --- /dev/null +++ b/src/kernel/_types.py @@ -0,0 +1,261 @@ +from __future__ import annotations + +from os import PathLike +from typing import ( + IO, + TYPE_CHECKING, + Any, + Dict, + List, + Type, + Tuple, + Union, + Mapping, + TypeVar, + Callable, + Iterator, + Optional, + Sequence, +) +from typing_extensions import ( + Set, + Literal, + Protocol, + TypeAlias, + TypedDict, + SupportsIndex, + overload, + override, + runtime_checkable, +) + +import httpx +import pydantic +from httpx import URL, Proxy, Timeout, Response, BaseTransport, AsyncBaseTransport + +if TYPE_CHECKING: + from ._models import BaseModel + from ._response import APIResponse, AsyncAPIResponse + +Transport = BaseTransport +AsyncTransport = AsyncBaseTransport +Query = Mapping[str, object] +Body = object +AnyMapping = Mapping[str, object] +ModelT = TypeVar("ModelT", bound=pydantic.BaseModel) +_T = TypeVar("_T") + + +# Approximates httpx internal ProxiesTypes and RequestFiles types +# while adding support for `PathLike` instances +ProxiesDict = Dict["str | URL", Union[None, str, URL, Proxy]] +ProxiesTypes = Union[str, Proxy, ProxiesDict] +if TYPE_CHECKING: + Base64FileInput = Union[IO[bytes], PathLike[str]] + FileContent = Union[IO[bytes], bytes, PathLike[str]] +else: + Base64FileInput = Union[IO[bytes], PathLike] + FileContent = Union[IO[bytes], bytes, PathLike] # PathLike is not subscriptable in Python 3.8. +FileTypes = Union[ + # file (or bytes) + FileContent, + # (filename, file (or bytes)) + Tuple[Optional[str], FileContent], + # (filename, file (or bytes), content_type) + Tuple[Optional[str], FileContent, Optional[str]], + # (filename, file (or bytes), content_type, headers) + Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], +] +RequestFiles = Union[Mapping[str, FileTypes], Sequence[Tuple[str, FileTypes]]] + +# duplicate of the above but without our custom file support +HttpxFileContent = Union[IO[bytes], bytes] +HttpxFileTypes = Union[ + # file (or bytes) + HttpxFileContent, + # (filename, file (or bytes)) + Tuple[Optional[str], HttpxFileContent], + # (filename, file (or bytes), content_type) + Tuple[Optional[str], HttpxFileContent, Optional[str]], + # (filename, file (or bytes), content_type, headers) + Tuple[Optional[str], HttpxFileContent, Optional[str], Mapping[str, str]], +] +HttpxRequestFiles = Union[Mapping[str, HttpxFileTypes], Sequence[Tuple[str, HttpxFileTypes]]] + +# Workaround to support (cast_to: Type[ResponseT]) -> ResponseT +# where ResponseT includes `None`. In order to support directly +# passing `None`, overloads would have to be defined for every +# method that uses `ResponseT` which would lead to an unacceptable +# amount of code duplication and make it unreadable. See _base_client.py +# for example usage. +# +# This unfortunately means that you will either have +# to import this type and pass it explicitly: +# +# from kernel import NoneType +# client.get('/foo', cast_to=NoneType) +# +# or build it yourself: +# +# client.get('/foo', cast_to=type(None)) +if TYPE_CHECKING: + NoneType: Type[None] +else: + NoneType = type(None) + + +class RequestOptions(TypedDict, total=False): + headers: Headers + max_retries: int + timeout: float | Timeout | None + params: Query + extra_json: AnyMapping + idempotency_key: str + follow_redirects: bool + + +# Sentinel class used until PEP 0661 is accepted +class NotGiven: + """ + For parameters with a meaningful None value, we need to distinguish between + the user explicitly passing None, and the user not passing the parameter at + all. + + User code shouldn't need to use not_given directly. + + For example: + + ```py + def create(timeout: Timeout | None | NotGiven = not_given): ... + + + create(timeout=1) # 1s timeout + create(timeout=None) # No timeout + create() # Default timeout behavior + ``` + """ + + def __bool__(self) -> Literal[False]: + return False + + @override + def __repr__(self) -> str: + return "NOT_GIVEN" + + +not_given = NotGiven() +# for backwards compatibility: +NOT_GIVEN = NotGiven() + + +class Omit: + """ + To explicitly omit something from being sent in a request, use `omit`. + + ```py + # as the default `Content-Type` header is `application/json` that will be sent + client.post("/upload/files", files={"file": b"my raw file content"}) + + # you can't explicitly override the header as it has to be dynamically generated + # to look something like: 'multipart/form-data; boundary=0d8382fcf5f8c3be01ca2e11002d2983' + client.post(..., headers={"Content-Type": "multipart/form-data"}) + + # instead you can remove the default `application/json` header by passing omit + client.post(..., headers={"Content-Type": omit}) + ``` + """ + + def __bool__(self) -> Literal[False]: + return False + + +omit = Omit() + + +@runtime_checkable +class ModelBuilderProtocol(Protocol): + @classmethod + def build( + cls: type[_T], + *, + response: Response, + data: object, + ) -> _T: ... + + +Headers = Mapping[str, Union[str, Omit]] + + +class HeadersLikeProtocol(Protocol): + def get(self, __key: str) -> str | None: ... + + +HeadersLike = Union[Headers, HeadersLikeProtocol] + +ResponseT = TypeVar( + "ResponseT", + bound=Union[ + object, + str, + None, + "BaseModel", + List[Any], + Dict[str, Any], + Response, + ModelBuilderProtocol, + "APIResponse[Any]", + "AsyncAPIResponse[Any]", + ], +) + +StrBytesIntFloat = Union[str, bytes, int, float] + +# Note: copied from Pydantic +# https://github.com/pydantic/pydantic/blob/6f31f8f68ef011f84357330186f603ff295312fd/pydantic/main.py#L79 +IncEx: TypeAlias = Union[Set[int], Set[str], Mapping[int, Union["IncEx", bool]], Mapping[str, Union["IncEx", bool]]] + +PostParser = Callable[[Any], Any] + + +@runtime_checkable +class InheritsGeneric(Protocol): + """Represents a type that has inherited from `Generic` + + The `__orig_bases__` property can be used to determine the resolved + type variable for a given base class. + """ + + __orig_bases__: tuple[_GenericAlias] + + +class _GenericAlias(Protocol): + __origin__: type[object] + + +class HttpxSendArgs(TypedDict, total=False): + auth: httpx.Auth + follow_redirects: bool + + +_T_co = TypeVar("_T_co", covariant=True) + + +if TYPE_CHECKING: + # This works because str.__contains__ does not accept object (either in typeshed or at runtime) + # https://github.com/hauntsaninja/useful_types/blob/5e9710f3875107d068e7679fd7fec9cfab0eff3b/useful_types/__init__.py#L285 + # + # Note: index() and count() methods are intentionally omitted to allow pyright to properly + # infer TypedDict types when dict literals are used in lists assigned to SequenceNotStr. + class SequenceNotStr(Protocol[_T_co]): + @overload + def __getitem__(self, index: SupportsIndex, /) -> _T_co: ... + @overload + def __getitem__(self, index: slice, /) -> Sequence[_T_co]: ... + def __contains__(self, value: object, /) -> bool: ... + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[_T_co]: ... + def __reversed__(self) -> Iterator[_T_co]: ... +else: + # just point this to a normal `Sequence` at runtime to avoid having to special case + # deserializing our custom sequence type + SequenceNotStr = Sequence diff --git a/src/kernel/_utils/__init__.py b/src/kernel/_utils/__init__.py new file mode 100644 index 0000000..dc64e29 --- /dev/null +++ b/src/kernel/_utils/__init__.py @@ -0,0 +1,64 @@ +from ._sync import asyncify as asyncify +from ._proxy import LazyProxy as LazyProxy +from ._utils import ( + flatten as flatten, + is_dict as is_dict, + is_list as is_list, + is_given as is_given, + is_tuple as is_tuple, + json_safe as json_safe, + lru_cache as lru_cache, + is_mapping as is_mapping, + is_tuple_t as is_tuple_t, + is_iterable as is_iterable, + is_sequence as is_sequence, + coerce_float as coerce_float, + is_mapping_t as is_mapping_t, + removeprefix as removeprefix, + removesuffix as removesuffix, + extract_files as extract_files, + is_sequence_t as is_sequence_t, + required_args as required_args, + coerce_boolean as coerce_boolean, + coerce_integer as coerce_integer, + file_from_path as file_from_path, + strip_not_given as strip_not_given, + deepcopy_minimal as deepcopy_minimal, + get_async_library as get_async_library, + maybe_coerce_float as maybe_coerce_float, + get_required_header as get_required_header, + maybe_coerce_boolean as maybe_coerce_boolean, + maybe_coerce_integer as maybe_coerce_integer, +) +from ._compat import ( + get_args as get_args, + is_union as is_union, + get_origin as get_origin, + is_typeddict as is_typeddict, + is_literal_type as is_literal_type, +) +from ._typing import ( + is_list_type as is_list_type, + is_union_type as is_union_type, + extract_type_arg as extract_type_arg, + is_iterable_type as is_iterable_type, + is_required_type as is_required_type, + is_sequence_type as is_sequence_type, + is_annotated_type as is_annotated_type, + is_type_alias_type as is_type_alias_type, + strip_annotated_type as strip_annotated_type, + extract_type_var_from_base as extract_type_var_from_base, +) +from ._streams import consume_sync_iterator as consume_sync_iterator, consume_async_iterator as consume_async_iterator +from ._transform import ( + PropertyInfo as PropertyInfo, + transform as transform, + async_transform as async_transform, + maybe_transform as maybe_transform, + async_maybe_transform as async_maybe_transform, +) +from ._reflection import ( + function_has_argument as function_has_argument, + assert_signatures_in_sync as assert_signatures_in_sync, +) +from ._datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime diff --git a/src/kernel/_utils/_compat.py b/src/kernel/_utils/_compat.py new file mode 100644 index 0000000..dd70323 --- /dev/null +++ b/src/kernel/_utils/_compat.py @@ -0,0 +1,45 @@ +from __future__ import annotations + +import sys +import typing_extensions +from typing import Any, Type, Union, Literal, Optional +from datetime import date, datetime +from typing_extensions import get_args as _get_args, get_origin as _get_origin + +from .._types import StrBytesIntFloat +from ._datetime_parse import parse_date as _parse_date, parse_datetime as _parse_datetime + +_LITERAL_TYPES = {Literal, typing_extensions.Literal} + + +def get_args(tp: type[Any]) -> tuple[Any, ...]: + return _get_args(tp) + + +def get_origin(tp: type[Any]) -> type[Any] | None: + return _get_origin(tp) + + +def is_union(tp: Optional[Type[Any]]) -> bool: + if sys.version_info < (3, 10): + return tp is Union # type: ignore[comparison-overlap] + else: + import types + + return tp is Union or tp is types.UnionType + + +def is_typeddict(tp: Type[Any]) -> bool: + return typing_extensions.is_typeddict(tp) + + +def is_literal_type(tp: Type[Any]) -> bool: + return get_origin(tp) in _LITERAL_TYPES + + +def parse_date(value: Union[date, StrBytesIntFloat]) -> date: + return _parse_date(value) + + +def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime: + return _parse_datetime(value) diff --git a/src/kernel/_utils/_datetime_parse.py b/src/kernel/_utils/_datetime_parse.py new file mode 100644 index 0000000..7cb9d9e --- /dev/null +++ b/src/kernel/_utils/_datetime_parse.py @@ -0,0 +1,136 @@ +""" +This file contains code from https://github.com/pydantic/pydantic/blob/main/pydantic/v1/datetime_parse.py +without the Pydantic v1 specific errors. +""" + +from __future__ import annotations + +import re +from typing import Dict, Union, Optional +from datetime import date, datetime, timezone, timedelta + +from .._types import StrBytesIntFloat + +date_expr = r"(?P\d{4})-(?P\d{1,2})-(?P\d{1,2})" +time_expr = ( + r"(?P\d{1,2}):(?P\d{1,2})" + r"(?::(?P\d{1,2})(?:\.(?P\d{1,6})\d{0,6})?)?" + r"(?PZ|[+-]\d{2}(?::?\d{2})?)?$" +) + +date_re = re.compile(f"{date_expr}$") +datetime_re = re.compile(f"{date_expr}[T ]{time_expr}") + + +EPOCH = datetime(1970, 1, 1) +# if greater than this, the number is in ms, if less than or equal it's in seconds +# (in seconds this is 11th October 2603, in ms it's 20th August 1970) +MS_WATERSHED = int(2e10) +# slightly more than datetime.max in ns - (datetime.max - EPOCH).total_seconds() * 1e9 +MAX_NUMBER = int(3e20) + + +def _get_numeric(value: StrBytesIntFloat, native_expected_type: str) -> Union[None, int, float]: + if isinstance(value, (int, float)): + return value + try: + return float(value) + except ValueError: + return None + except TypeError: + raise TypeError(f"invalid type; expected {native_expected_type}, string, bytes, int or float") from None + + +def _from_unix_seconds(seconds: Union[int, float]) -> datetime: + if seconds > MAX_NUMBER: + return datetime.max + elif seconds < -MAX_NUMBER: + return datetime.min + + while abs(seconds) > MS_WATERSHED: + seconds /= 1000 + dt = EPOCH + timedelta(seconds=seconds) + return dt.replace(tzinfo=timezone.utc) + + +def _parse_timezone(value: Optional[str]) -> Union[None, int, timezone]: + if value == "Z": + return timezone.utc + elif value is not None: + offset_mins = int(value[-2:]) if len(value) > 3 else 0 + offset = 60 * int(value[1:3]) + offset_mins + if value[0] == "-": + offset = -offset + return timezone(timedelta(minutes=offset)) + else: + return None + + +def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime: + """ + Parse a datetime/int/float/string and return a datetime.datetime. + + This function supports time zone offsets. When the input contains one, + the output uses a timezone with a fixed offset from UTC. + + Raise ValueError if the input is well formatted but not a valid datetime. + Raise ValueError if the input isn't well formatted. + """ + if isinstance(value, datetime): + return value + + number = _get_numeric(value, "datetime") + if number is not None: + return _from_unix_seconds(number) + + if isinstance(value, bytes): + value = value.decode() + + assert not isinstance(value, (float, int)) + + match = datetime_re.match(value) + if match is None: + raise ValueError("invalid datetime format") + + kw = match.groupdict() + if kw["microsecond"]: + kw["microsecond"] = kw["microsecond"].ljust(6, "0") + + tzinfo = _parse_timezone(kw.pop("tzinfo")) + kw_: Dict[str, Union[None, int, timezone]] = {k: int(v) for k, v in kw.items() if v is not None} + kw_["tzinfo"] = tzinfo + + return datetime(**kw_) # type: ignore + + +def parse_date(value: Union[date, StrBytesIntFloat]) -> date: + """ + Parse a date/int/float/string and return a datetime.date. + + Raise ValueError if the input is well formatted but not a valid date. + Raise ValueError if the input isn't well formatted. + """ + if isinstance(value, date): + if isinstance(value, datetime): + return value.date() + else: + return value + + number = _get_numeric(value, "date") + if number is not None: + return _from_unix_seconds(number).date() + + if isinstance(value, bytes): + value = value.decode() + + assert not isinstance(value, (float, int)) + match = date_re.match(value) + if match is None: + raise ValueError("invalid date format") + + kw = {k: int(v) for k, v in match.groupdict().items()} + + try: + return date(**kw) + except ValueError: + raise ValueError("invalid date format") from None diff --git a/src/kernel/_utils/_logs.py b/src/kernel/_utils/_logs.py new file mode 100644 index 0000000..4eff94b --- /dev/null +++ b/src/kernel/_utils/_logs.py @@ -0,0 +1,25 @@ +import os +import logging + +logger: logging.Logger = logging.getLogger("kernel") +httpx_logger: logging.Logger = logging.getLogger("httpx") + + +def _basic_config() -> None: + # e.g. [2023-10-05 14:12:26 - kernel._base_client:818 - DEBUG] HTTP Request: POST http://127.0.0.1:4010/foo/bar "200 OK" + logging.basicConfig( + format="[%(asctime)s - %(name)s:%(lineno)d - %(levelname)s] %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + ) + + +def setup_logging() -> None: + env = os.environ.get("KERNEL_LOG") + if env == "debug": + _basic_config() + logger.setLevel(logging.DEBUG) + httpx_logger.setLevel(logging.DEBUG) + elif env == "info": + _basic_config() + logger.setLevel(logging.INFO) + httpx_logger.setLevel(logging.INFO) diff --git a/src/kernel/_utils/_proxy.py b/src/kernel/_utils/_proxy.py new file mode 100644 index 0000000..0f239a3 --- /dev/null +++ b/src/kernel/_utils/_proxy.py @@ -0,0 +1,65 @@ +from __future__ import annotations + +from abc import ABC, abstractmethod +from typing import Generic, TypeVar, Iterable, cast +from typing_extensions import override + +T = TypeVar("T") + + +class LazyProxy(Generic[T], ABC): + """Implements data methods to pretend that an instance is another instance. + + This includes forwarding attribute access and other methods. + """ + + # Note: we have to special case proxies that themselves return proxies + # to support using a proxy as a catch-all for any random access, e.g. `proxy.foo.bar.baz` + + def __getattr__(self, attr: str) -> object: + proxied = self.__get_proxied__() + if isinstance(proxied, LazyProxy): + return proxied # pyright: ignore + return getattr(proxied, attr) + + @override + def __repr__(self) -> str: + proxied = self.__get_proxied__() + if isinstance(proxied, LazyProxy): + return proxied.__class__.__name__ + return repr(self.__get_proxied__()) + + @override + def __str__(self) -> str: + proxied = self.__get_proxied__() + if isinstance(proxied, LazyProxy): + return proxied.__class__.__name__ + return str(proxied) + + @override + def __dir__(self) -> Iterable[str]: + proxied = self.__get_proxied__() + if isinstance(proxied, LazyProxy): + return [] + return proxied.__dir__() + + @property # type: ignore + @override + def __class__(self) -> type: # pyright: ignore + try: + proxied = self.__get_proxied__() + except Exception: + return type(self) + if issubclass(type(proxied), LazyProxy): + return type(proxied) + return proxied.__class__ + + def __get_proxied__(self) -> T: + return self.__load__() + + def __as_proxied__(self) -> T: + """Helper method that returns the current proxy, typed as the loaded object""" + return cast(T, self) + + @abstractmethod + def __load__(self) -> T: ... diff --git a/src/kernel/_utils/_reflection.py b/src/kernel/_utils/_reflection.py new file mode 100644 index 0000000..89aa712 --- /dev/null +++ b/src/kernel/_utils/_reflection.py @@ -0,0 +1,42 @@ +from __future__ import annotations + +import inspect +from typing import Any, Callable + + +def function_has_argument(func: Callable[..., Any], arg_name: str) -> bool: + """Returns whether or not the given function has a specific parameter""" + sig = inspect.signature(func) + return arg_name in sig.parameters + + +def assert_signatures_in_sync( + source_func: Callable[..., Any], + check_func: Callable[..., Any], + *, + exclude_params: set[str] = set(), +) -> None: + """Ensure that the signature of the second function matches the first.""" + + check_sig = inspect.signature(check_func) + source_sig = inspect.signature(source_func) + + errors: list[str] = [] + + for name, source_param in source_sig.parameters.items(): + if name in exclude_params: + continue + + custom_param = check_sig.parameters.get(name) + if not custom_param: + errors.append(f"the `{name}` param is missing") + continue + + if custom_param.annotation != source_param.annotation: + errors.append( + f"types for the `{name}` param are do not match; source={repr(source_param.annotation)} checking={repr(custom_param.annotation)}" + ) + continue + + if errors: + raise AssertionError(f"{len(errors)} errors encountered when comparing signatures:\n\n" + "\n\n".join(errors)) diff --git a/src/kernel/_utils/_resources_proxy.py b/src/kernel/_utils/_resources_proxy.py new file mode 100644 index 0000000..006a639 --- /dev/null +++ b/src/kernel/_utils/_resources_proxy.py @@ -0,0 +1,24 @@ +from __future__ import annotations + +from typing import Any +from typing_extensions import override + +from ._proxy import LazyProxy + + +class ResourcesProxy(LazyProxy[Any]): + """A proxy for the `kernel.resources` module. + + This is used so that we can lazily import `kernel.resources` only when + needed *and* so that users can just import `kernel` and reference `kernel.resources` + """ + + @override + def __load__(self) -> Any: + import importlib + + mod = importlib.import_module("kernel.resources") + return mod + + +resources = ResourcesProxy().__as_proxied__() diff --git a/src/kernel/_utils/_streams.py b/src/kernel/_utils/_streams.py new file mode 100644 index 0000000..f4a0208 --- /dev/null +++ b/src/kernel/_utils/_streams.py @@ -0,0 +1,12 @@ +from typing import Any +from typing_extensions import Iterator, AsyncIterator + + +def consume_sync_iterator(iterator: Iterator[Any]) -> None: + for _ in iterator: + ... + + +async def consume_async_iterator(iterator: AsyncIterator[Any]) -> None: + async for _ in iterator: + ... diff --git a/src/kernel/_utils/_sync.py b/src/kernel/_utils/_sync.py new file mode 100644 index 0000000..f6027c1 --- /dev/null +++ b/src/kernel/_utils/_sync.py @@ -0,0 +1,58 @@ +from __future__ import annotations + +import asyncio +import functools +from typing import TypeVar, Callable, Awaitable +from typing_extensions import ParamSpec + +import anyio +import sniffio +import anyio.to_thread + +T_Retval = TypeVar("T_Retval") +T_ParamSpec = ParamSpec("T_ParamSpec") + + +async def to_thread( + func: Callable[T_ParamSpec, T_Retval], /, *args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs +) -> T_Retval: + if sniffio.current_async_library() == "asyncio": + return await asyncio.to_thread(func, *args, **kwargs) + + return await anyio.to_thread.run_sync( + functools.partial(func, *args, **kwargs), + ) + + +# inspired by `asyncer`, https://github.com/tiangolo/asyncer +def asyncify(function: Callable[T_ParamSpec, T_Retval]) -> Callable[T_ParamSpec, Awaitable[T_Retval]]: + """ + Take a blocking function and create an async one that receives the same + positional and keyword arguments. + + Usage: + + ```python + def blocking_func(arg1, arg2, kwarg1=None): + # blocking code + return result + + + result = asyncify(blocking_function)(arg1, arg2, kwarg1=value1) + ``` + + ## Arguments + + `function`: a blocking regular callable (e.g. a function) + + ## Return + + An async function that takes the same positional and keyword arguments as the + original one, that when called runs the same original function in a thread worker + and returns the result. + """ + + async def wrapper(*args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs) -> T_Retval: + return await to_thread(function, *args, **kwargs) + + return wrapper diff --git a/src/kernel/_utils/_transform.py b/src/kernel/_utils/_transform.py new file mode 100644 index 0000000..5207549 --- /dev/null +++ b/src/kernel/_utils/_transform.py @@ -0,0 +1,457 @@ +from __future__ import annotations + +import io +import base64 +import pathlib +from typing import Any, Mapping, TypeVar, cast +from datetime import date, datetime +from typing_extensions import Literal, get_args, override, get_type_hints as _get_type_hints + +import anyio +import pydantic + +from ._utils import ( + is_list, + is_given, + lru_cache, + is_mapping, + is_iterable, + is_sequence, +) +from .._files import is_base64_file_input +from ._compat import get_origin, is_typeddict +from ._typing import ( + is_list_type, + is_union_type, + extract_type_arg, + is_iterable_type, + is_required_type, + is_sequence_type, + is_annotated_type, + strip_annotated_type, +) + +_T = TypeVar("_T") + + +# TODO: support for drilling globals() and locals() +# TODO: ensure works correctly with forward references in all cases + + +PropertyFormat = Literal["iso8601", "base64", "custom"] + + +class PropertyInfo: + """Metadata class to be used in Annotated types to provide information about a given type. + + For example: + + class MyParams(TypedDict): + account_holder_name: Annotated[str, PropertyInfo(alias='accountHolderName')] + + This means that {'account_holder_name': 'Robert'} will be transformed to {'accountHolderName': 'Robert'} before being sent to the API. + """ + + alias: str | None + format: PropertyFormat | None + format_template: str | None + discriminator: str | None + + def __init__( + self, + *, + alias: str | None = None, + format: PropertyFormat | None = None, + format_template: str | None = None, + discriminator: str | None = None, + ) -> None: + self.alias = alias + self.format = format + self.format_template = format_template + self.discriminator = discriminator + + @override + def __repr__(self) -> str: + return f"{self.__class__.__name__}(alias='{self.alias}', format={self.format}, format_template='{self.format_template}', discriminator='{self.discriminator}')" + + +def maybe_transform( + data: object, + expected_type: object, +) -> Any | None: + """Wrapper over `transform()` that allows `None` to be passed. + + See `transform()` for more details. + """ + if data is None: + return None + return transform(data, expected_type) + + +# Wrapper over _transform_recursive providing fake types +def transform( + data: _T, + expected_type: object, +) -> _T: + """Transform dictionaries based off of type information from the given type, for example: + + ```py + class Params(TypedDict, total=False): + card_id: Required[Annotated[str, PropertyInfo(alias="cardID")]] + + + transformed = transform({"card_id": ""}, Params) + # {'cardID': ''} + ``` + + Any keys / data that does not have type information given will be included as is. + + It should be noted that the transformations that this function does are not represented in the type system. + """ + transformed = _transform_recursive(data, annotation=cast(type, expected_type)) + return cast(_T, transformed) + + +@lru_cache(maxsize=8096) +def _get_annotated_type(type_: type) -> type | None: + """If the given type is an `Annotated` type then it is returned, if not `None` is returned. + + This also unwraps the type when applicable, e.g. `Required[Annotated[T, ...]]` + """ + if is_required_type(type_): + # Unwrap `Required[Annotated[T, ...]]` to `Annotated[T, ...]` + type_ = get_args(type_)[0] + + if is_annotated_type(type_): + return type_ + + return None + + +def _maybe_transform_key(key: str, type_: type) -> str: + """Transform the given `data` based on the annotations provided in `type_`. + + Note: this function only looks at `Annotated` types that contain `PropertyInfo` metadata. + """ + annotated_type = _get_annotated_type(type_) + if annotated_type is None: + # no `Annotated` definition for this type, no transformation needed + return key + + # ignore the first argument as it is the actual type + annotations = get_args(annotated_type)[1:] + for annotation in annotations: + if isinstance(annotation, PropertyInfo) and annotation.alias is not None: + return annotation.alias + + return key + + +def _no_transform_needed(annotation: type) -> bool: + return annotation == float or annotation == int + + +def _transform_recursive( + data: object, + *, + annotation: type, + inner_type: type | None = None, +) -> object: + """Transform the given data against the expected type. + + Args: + annotation: The direct type annotation given to the particular piece of data. + This may or may not be wrapped in metadata types, e.g. `Required[T]`, `Annotated[T, ...]` etc + + inner_type: If applicable, this is the "inside" type. This is useful in certain cases where the outside type + is a container type such as `List[T]`. In that case `inner_type` should be set to `T` so that each entry in + the list can be transformed using the metadata from the container type. + + Defaults to the same value as the `annotation` argument. + """ + from .._compat import model_dump + + if inner_type is None: + inner_type = annotation + + stripped_type = strip_annotated_type(inner_type) + origin = get_origin(stripped_type) or stripped_type + if is_typeddict(stripped_type) and is_mapping(data): + return _transform_typeddict(data, stripped_type) + + if origin == dict and is_mapping(data): + items_type = get_args(stripped_type)[1] + return {key: _transform_recursive(value, annotation=items_type) for key, value in data.items()} + + if ( + # List[T] + (is_list_type(stripped_type) and is_list(data)) + # Iterable[T] + or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str)) + # Sequence[T] + or (is_sequence_type(stripped_type) and is_sequence(data) and not isinstance(data, str)) + ): + # dicts are technically iterable, but it is an iterable on the keys of the dict and is not usually + # intended as an iterable, so we don't transform it. + if isinstance(data, dict): + return cast(object, data) + + inner_type = extract_type_arg(stripped_type, 0) + if _no_transform_needed(inner_type): + # for some types there is no need to transform anything, so we can get a small + # perf boost from skipping that work. + # + # but we still need to convert to a list to ensure the data is json-serializable + if is_list(data): + return data + return list(data) + + return [_transform_recursive(d, annotation=annotation, inner_type=inner_type) for d in data] + + if is_union_type(stripped_type): + # For union types we run the transformation against all subtypes to ensure that everything is transformed. + # + # TODO: there may be edge cases where the same normalized field name will transform to two different names + # in different subtypes. + for subtype in get_args(stripped_type): + data = _transform_recursive(data, annotation=annotation, inner_type=subtype) + return data + + if isinstance(data, pydantic.BaseModel): + return model_dump(data, exclude_unset=True, mode="json") + + annotated_type = _get_annotated_type(annotation) + if annotated_type is None: + return data + + # ignore the first argument as it is the actual type + annotations = get_args(annotated_type)[1:] + for annotation in annotations: + if isinstance(annotation, PropertyInfo) and annotation.format is not None: + return _format_data(data, annotation.format, annotation.format_template) + + return data + + +def _format_data(data: object, format_: PropertyFormat, format_template: str | None) -> object: + if isinstance(data, (date, datetime)): + if format_ == "iso8601": + return data.isoformat() + + if format_ == "custom" and format_template is not None: + return data.strftime(format_template) + + if format_ == "base64" and is_base64_file_input(data): + binary: str | bytes | None = None + + if isinstance(data, pathlib.Path): + binary = data.read_bytes() + elif isinstance(data, io.IOBase): + binary = data.read() + + if isinstance(binary, str): # type: ignore[unreachable] + binary = binary.encode() + + if not isinstance(binary, bytes): + raise RuntimeError(f"Could not read bytes from {data}; Received {type(binary)}") + + return base64.b64encode(binary).decode("ascii") + + return data + + +def _transform_typeddict( + data: Mapping[str, object], + expected_type: type, +) -> Mapping[str, object]: + result: dict[str, object] = {} + annotations = get_type_hints(expected_type, include_extras=True) + for key, value in data.items(): + if not is_given(value): + # we don't need to include omitted values here as they'll + # be stripped out before the request is sent anyway + continue + + type_ = annotations.get(key) + if type_ is None: + # we do not have a type annotation for this field, leave it as is + result[key] = value + else: + result[_maybe_transform_key(key, type_)] = _transform_recursive(value, annotation=type_) + return result + + +async def async_maybe_transform( + data: object, + expected_type: object, +) -> Any | None: + """Wrapper over `async_transform()` that allows `None` to be passed. + + See `async_transform()` for more details. + """ + if data is None: + return None + return await async_transform(data, expected_type) + + +async def async_transform( + data: _T, + expected_type: object, +) -> _T: + """Transform dictionaries based off of type information from the given type, for example: + + ```py + class Params(TypedDict, total=False): + card_id: Required[Annotated[str, PropertyInfo(alias="cardID")]] + + + transformed = transform({"card_id": ""}, Params) + # {'cardID': ''} + ``` + + Any keys / data that does not have type information given will be included as is. + + It should be noted that the transformations that this function does are not represented in the type system. + """ + transformed = await _async_transform_recursive(data, annotation=cast(type, expected_type)) + return cast(_T, transformed) + + +async def _async_transform_recursive( + data: object, + *, + annotation: type, + inner_type: type | None = None, +) -> object: + """Transform the given data against the expected type. + + Args: + annotation: The direct type annotation given to the particular piece of data. + This may or may not be wrapped in metadata types, e.g. `Required[T]`, `Annotated[T, ...]` etc + + inner_type: If applicable, this is the "inside" type. This is useful in certain cases where the outside type + is a container type such as `List[T]`. In that case `inner_type` should be set to `T` so that each entry in + the list can be transformed using the metadata from the container type. + + Defaults to the same value as the `annotation` argument. + """ + from .._compat import model_dump + + if inner_type is None: + inner_type = annotation + + stripped_type = strip_annotated_type(inner_type) + origin = get_origin(stripped_type) or stripped_type + if is_typeddict(stripped_type) and is_mapping(data): + return await _async_transform_typeddict(data, stripped_type) + + if origin == dict and is_mapping(data): + items_type = get_args(stripped_type)[1] + return {key: _transform_recursive(value, annotation=items_type) for key, value in data.items()} + + if ( + # List[T] + (is_list_type(stripped_type) and is_list(data)) + # Iterable[T] + or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str)) + # Sequence[T] + or (is_sequence_type(stripped_type) and is_sequence(data) and not isinstance(data, str)) + ): + # dicts are technically iterable, but it is an iterable on the keys of the dict and is not usually + # intended as an iterable, so we don't transform it. + if isinstance(data, dict): + return cast(object, data) + + inner_type = extract_type_arg(stripped_type, 0) + if _no_transform_needed(inner_type): + # for some types there is no need to transform anything, so we can get a small + # perf boost from skipping that work. + # + # but we still need to convert to a list to ensure the data is json-serializable + if is_list(data): + return data + return list(data) + + return [await _async_transform_recursive(d, annotation=annotation, inner_type=inner_type) for d in data] + + if is_union_type(stripped_type): + # For union types we run the transformation against all subtypes to ensure that everything is transformed. + # + # TODO: there may be edge cases where the same normalized field name will transform to two different names + # in different subtypes. + for subtype in get_args(stripped_type): + data = await _async_transform_recursive(data, annotation=annotation, inner_type=subtype) + return data + + if isinstance(data, pydantic.BaseModel): + return model_dump(data, exclude_unset=True, mode="json") + + annotated_type = _get_annotated_type(annotation) + if annotated_type is None: + return data + + # ignore the first argument as it is the actual type + annotations = get_args(annotated_type)[1:] + for annotation in annotations: + if isinstance(annotation, PropertyInfo) and annotation.format is not None: + return await _async_format_data(data, annotation.format, annotation.format_template) + + return data + + +async def _async_format_data(data: object, format_: PropertyFormat, format_template: str | None) -> object: + if isinstance(data, (date, datetime)): + if format_ == "iso8601": + return data.isoformat() + + if format_ == "custom" and format_template is not None: + return data.strftime(format_template) + + if format_ == "base64" and is_base64_file_input(data): + binary: str | bytes | None = None + + if isinstance(data, pathlib.Path): + binary = await anyio.Path(data).read_bytes() + elif isinstance(data, io.IOBase): + binary = data.read() + + if isinstance(binary, str): # type: ignore[unreachable] + binary = binary.encode() + + if not isinstance(binary, bytes): + raise RuntimeError(f"Could not read bytes from {data}; Received {type(binary)}") + + return base64.b64encode(binary).decode("ascii") + + return data + + +async def _async_transform_typeddict( + data: Mapping[str, object], + expected_type: type, +) -> Mapping[str, object]: + result: dict[str, object] = {} + annotations = get_type_hints(expected_type, include_extras=True) + for key, value in data.items(): + if not is_given(value): + # we don't need to include omitted values here as they'll + # be stripped out before the request is sent anyway + continue + + type_ = annotations.get(key) + if type_ is None: + # we do not have a type annotation for this field, leave it as is + result[key] = value + else: + result[_maybe_transform_key(key, type_)] = await _async_transform_recursive(value, annotation=type_) + return result + + +@lru_cache(maxsize=8096) +def get_type_hints( + obj: Any, + globalns: dict[str, Any] | None = None, + localns: Mapping[str, Any] | None = None, + include_extras: bool = False, +) -> dict[str, Any]: + return _get_type_hints(obj, globalns=globalns, localns=localns, include_extras=include_extras) diff --git a/src/kernel/_utils/_typing.py b/src/kernel/_utils/_typing.py new file mode 100644 index 0000000..193109f --- /dev/null +++ b/src/kernel/_utils/_typing.py @@ -0,0 +1,156 @@ +from __future__ import annotations + +import sys +import typing +import typing_extensions +from typing import Any, TypeVar, Iterable, cast +from collections import abc as _c_abc +from typing_extensions import ( + TypeIs, + Required, + Annotated, + get_args, + get_origin, +) + +from ._utils import lru_cache +from .._types import InheritsGeneric +from ._compat import is_union as _is_union + + +def is_annotated_type(typ: type) -> bool: + return get_origin(typ) == Annotated + + +def is_list_type(typ: type) -> bool: + return (get_origin(typ) or typ) == list + + +def is_sequence_type(typ: type) -> bool: + origin = get_origin(typ) or typ + return origin == typing_extensions.Sequence or origin == typing.Sequence or origin == _c_abc.Sequence + + +def is_iterable_type(typ: type) -> bool: + """If the given type is `typing.Iterable[T]`""" + origin = get_origin(typ) or typ + return origin == Iterable or origin == _c_abc.Iterable + + +def is_union_type(typ: type) -> bool: + return _is_union(get_origin(typ)) + + +def is_required_type(typ: type) -> bool: + return get_origin(typ) == Required + + +def is_typevar(typ: type) -> bool: + # type ignore is required because type checkers + # think this expression will always return False + return type(typ) == TypeVar # type: ignore + + +_TYPE_ALIAS_TYPES: tuple[type[typing_extensions.TypeAliasType], ...] = (typing_extensions.TypeAliasType,) +if sys.version_info >= (3, 12): + _TYPE_ALIAS_TYPES = (*_TYPE_ALIAS_TYPES, typing.TypeAliasType) + + +def is_type_alias_type(tp: Any, /) -> TypeIs[typing_extensions.TypeAliasType]: + """Return whether the provided argument is an instance of `TypeAliasType`. + + ```python + type Int = int + is_type_alias_type(Int) + # > True + Str = TypeAliasType("Str", str) + is_type_alias_type(Str) + # > True + ``` + """ + return isinstance(tp, _TYPE_ALIAS_TYPES) + + +# Extracts T from Annotated[T, ...] or from Required[Annotated[T, ...]] +@lru_cache(maxsize=8096) +def strip_annotated_type(typ: type) -> type: + if is_required_type(typ) or is_annotated_type(typ): + return strip_annotated_type(cast(type, get_args(typ)[0])) + + return typ + + +def extract_type_arg(typ: type, index: int) -> type: + args = get_args(typ) + try: + return cast(type, args[index]) + except IndexError as err: + raise RuntimeError(f"Expected type {typ} to have a type argument at index {index} but it did not") from err + + +def extract_type_var_from_base( + typ: type, + *, + generic_bases: tuple[type, ...], + index: int, + failure_message: str | None = None, +) -> type: + """Given a type like `Foo[T]`, returns the generic type variable `T`. + + This also handles the case where a concrete subclass is given, e.g. + ```py + class MyResponse(Foo[bytes]): + ... + + extract_type_var(MyResponse, bases=(Foo,), index=0) -> bytes + ``` + + And where a generic subclass is given: + ```py + _T = TypeVar('_T') + class MyResponse(Foo[_T]): + ... + + extract_type_var(MyResponse[bytes], bases=(Foo,), index=0) -> bytes + ``` + """ + cls = cast(object, get_origin(typ) or typ) + if cls in generic_bases: # pyright: ignore[reportUnnecessaryContains] + # we're given the class directly + return extract_type_arg(typ, index) + + # if a subclass is given + # --- + # this is needed as __orig_bases__ is not present in the typeshed stubs + # because it is intended to be for internal use only, however there does + # not seem to be a way to resolve generic TypeVars for inherited subclasses + # without using it. + if isinstance(cls, InheritsGeneric): + target_base_class: Any | None = None + for base in cls.__orig_bases__: + if base.__origin__ in generic_bases: + target_base_class = base + break + + if target_base_class is None: + raise RuntimeError( + "Could not find the generic base class;\n" + "This should never happen;\n" + f"Does {cls} inherit from one of {generic_bases} ?" + ) + + extracted = extract_type_arg(target_base_class, index) + if is_typevar(extracted): + # If the extracted type argument is itself a type variable + # then that means the subclass itself is generic, so we have + # to resolve the type argument from the class itself, not + # the base class. + # + # Note: if there is more than 1 type argument, the subclass could + # change the ordering of the type arguments, this is not currently + # supported. + return extract_type_arg(typ, index) + + return extracted + + raise RuntimeError(failure_message or f"Could not resolve inner type variable at index {index} for {typ}") diff --git a/src/kernel/_utils/_utils.py b/src/kernel/_utils/_utils.py new file mode 100644 index 0000000..eec7f4a --- /dev/null +++ b/src/kernel/_utils/_utils.py @@ -0,0 +1,421 @@ +from __future__ import annotations + +import os +import re +import inspect +import functools +from typing import ( + Any, + Tuple, + Mapping, + TypeVar, + Callable, + Iterable, + Sequence, + cast, + overload, +) +from pathlib import Path +from datetime import date, datetime +from typing_extensions import TypeGuard + +import sniffio + +from .._types import Omit, NotGiven, FileTypes, HeadersLike + +_T = TypeVar("_T") +_TupleT = TypeVar("_TupleT", bound=Tuple[object, ...]) +_MappingT = TypeVar("_MappingT", bound=Mapping[str, object]) +_SequenceT = TypeVar("_SequenceT", bound=Sequence[object]) +CallableT = TypeVar("CallableT", bound=Callable[..., Any]) + + +def flatten(t: Iterable[Iterable[_T]]) -> list[_T]: + return [item for sublist in t for item in sublist] + + +def extract_files( + # TODO: this needs to take Dict but variance issues..... + # create protocol type ? + query: Mapping[str, object], + *, + paths: Sequence[Sequence[str]], +) -> list[tuple[str, FileTypes]]: + """Recursively extract files from the given dictionary based on specified paths. + + A path may look like this ['foo', 'files', '', 'data']. + + Note: this mutates the given dictionary. + """ + files: list[tuple[str, FileTypes]] = [] + for path in paths: + files.extend(_extract_items(query, path, index=0, flattened_key=None)) + return files + + +def _extract_items( + obj: object, + path: Sequence[str], + *, + index: int, + flattened_key: str | None, +) -> list[tuple[str, FileTypes]]: + try: + key = path[index] + except IndexError: + if not is_given(obj): + # no value was provided - we can safely ignore + return [] + + # cyclical import + from .._files import assert_is_file_content + + # We have exhausted the path, return the entry we found. + assert flattened_key is not None + + if is_list(obj): + files: list[tuple[str, FileTypes]] = [] + for entry in obj: + assert_is_file_content(entry, key=flattened_key + "[]" if flattened_key else "") + files.append((flattened_key + "[]", cast(FileTypes, entry))) + return files + + assert_is_file_content(obj, key=flattened_key) + return [(flattened_key, cast(FileTypes, obj))] + + index += 1 + if is_dict(obj): + try: + # We are at the last entry in the path so we must remove the field + if (len(path)) == index: + item = obj.pop(key) + else: + item = obj[key] + except KeyError: + # Key was not present in the dictionary, this is not indicative of an error + # as the given path may not point to a required field. We also do not want + # to enforce required fields as the API may differ from the spec in some cases. + return [] + if flattened_key is None: + flattened_key = key + else: + flattened_key += f"[{key}]" + return _extract_items( + item, + path, + index=index, + flattened_key=flattened_key, + ) + elif is_list(obj): + if key != "": + return [] + + return flatten( + [ + _extract_items( + item, + path, + index=index, + flattened_key=flattened_key + "[]" if flattened_key is not None else "[]", + ) + for item in obj + ] + ) + + # Something unexpected was passed, just ignore it. + return [] + + +def is_given(obj: _T | NotGiven | Omit) -> TypeGuard[_T]: + return not isinstance(obj, NotGiven) and not isinstance(obj, Omit) + + +# Type safe methods for narrowing types with TypeVars. +# The default narrowing for isinstance(obj, dict) is dict[unknown, unknown], +# however this cause Pyright to rightfully report errors. As we know we don't +# care about the contained types we can safely use `object` in its place. +# +# There are two separate functions defined, `is_*` and `is_*_t` for different use cases. +# `is_*` is for when you're dealing with an unknown input +# `is_*_t` is for when you're narrowing a known union type to a specific subset + + +def is_tuple(obj: object) -> TypeGuard[tuple[object, ...]]: + return isinstance(obj, tuple) + + +def is_tuple_t(obj: _TupleT | object) -> TypeGuard[_TupleT]: + return isinstance(obj, tuple) + + +def is_sequence(obj: object) -> TypeGuard[Sequence[object]]: + return isinstance(obj, Sequence) + + +def is_sequence_t(obj: _SequenceT | object) -> TypeGuard[_SequenceT]: + return isinstance(obj, Sequence) + + +def is_mapping(obj: object) -> TypeGuard[Mapping[str, object]]: + return isinstance(obj, Mapping) + + +def is_mapping_t(obj: _MappingT | object) -> TypeGuard[_MappingT]: + return isinstance(obj, Mapping) + + +def is_dict(obj: object) -> TypeGuard[dict[object, object]]: + return isinstance(obj, dict) + + +def is_list(obj: object) -> TypeGuard[list[object]]: + return isinstance(obj, list) + + +def is_iterable(obj: object) -> TypeGuard[Iterable[object]]: + return isinstance(obj, Iterable) + + +def deepcopy_minimal(item: _T) -> _T: + """Minimal reimplementation of copy.deepcopy() that will only copy certain object types: + + - mappings, e.g. `dict` + - list + + This is done for performance reasons. + """ + if is_mapping(item): + return cast(_T, {k: deepcopy_minimal(v) for k, v in item.items()}) + if is_list(item): + return cast(_T, [deepcopy_minimal(entry) for entry in item]) + return item + + +# copied from https://github.com/Rapptz/RoboDanny +def human_join(seq: Sequence[str], *, delim: str = ", ", final: str = "or") -> str: + size = len(seq) + if size == 0: + return "" + + if size == 1: + return seq[0] + + if size == 2: + return f"{seq[0]} {final} {seq[1]}" + + return delim.join(seq[:-1]) + f" {final} {seq[-1]}" + + +def quote(string: str) -> str: + """Add single quotation marks around the given string. Does *not* do any escaping.""" + return f"'{string}'" + + +def required_args(*variants: Sequence[str]) -> Callable[[CallableT], CallableT]: + """Decorator to enforce a given set of arguments or variants of arguments are passed to the decorated function. + + Useful for enforcing runtime validation of overloaded functions. + + Example usage: + ```py + @overload + def foo(*, a: str) -> str: ... + + + @overload + def foo(*, b: bool) -> str: ... + + + # This enforces the same constraints that a static type checker would + # i.e. that either a or b must be passed to the function + @required_args(["a"], ["b"]) + def foo(*, a: str | None = None, b: bool | None = None) -> str: ... + ``` + """ + + def inner(func: CallableT) -> CallableT: + params = inspect.signature(func).parameters + positional = [ + name + for name, param in params.items() + if param.kind + in { + param.POSITIONAL_ONLY, + param.POSITIONAL_OR_KEYWORD, + } + ] + + @functools.wraps(func) + def wrapper(*args: object, **kwargs: object) -> object: + given_params: set[str] = set() + for i, _ in enumerate(args): + try: + given_params.add(positional[i]) + except IndexError: + raise TypeError( + f"{func.__name__}() takes {len(positional)} argument(s) but {len(args)} were given" + ) from None + + for key in kwargs.keys(): + given_params.add(key) + + for variant in variants: + matches = all((param in given_params for param in variant)) + if matches: + break + else: # no break + if len(variants) > 1: + variations = human_join( + ["(" + human_join([quote(arg) for arg in variant], final="and") + ")" for variant in variants] + ) + msg = f"Missing required arguments; Expected either {variations} arguments to be given" + else: + assert len(variants) > 0 + + # TODO: this error message is not deterministic + missing = list(set(variants[0]) - given_params) + if len(missing) > 1: + msg = f"Missing required arguments: {human_join([quote(arg) for arg in missing])}" + else: + msg = f"Missing required argument: {quote(missing[0])}" + raise TypeError(msg) + return func(*args, **kwargs) + + return wrapper # type: ignore + + return inner + + +_K = TypeVar("_K") +_V = TypeVar("_V") + + +@overload +def strip_not_given(obj: None) -> None: ... + + +@overload +def strip_not_given(obj: Mapping[_K, _V | NotGiven]) -> dict[_K, _V]: ... + + +@overload +def strip_not_given(obj: object) -> object: ... + + +def strip_not_given(obj: object | None) -> object: + """Remove all top-level keys where their values are instances of `NotGiven`""" + if obj is None: + return None + + if not is_mapping(obj): + return obj + + return {key: value for key, value in obj.items() if not isinstance(value, NotGiven)} + + +def coerce_integer(val: str) -> int: + return int(val, base=10) + + +def coerce_float(val: str) -> float: + return float(val) + + +def coerce_boolean(val: str) -> bool: + return val == "true" or val == "1" or val == "on" + + +def maybe_coerce_integer(val: str | None) -> int | None: + if val is None: + return None + return coerce_integer(val) + + +def maybe_coerce_float(val: str | None) -> float | None: + if val is None: + return None + return coerce_float(val) + + +def maybe_coerce_boolean(val: str | None) -> bool | None: + if val is None: + return None + return coerce_boolean(val) + + +def removeprefix(string: str, prefix: str) -> str: + """Remove a prefix from a string. + + Backport of `str.removeprefix` for Python < 3.9 + """ + if string.startswith(prefix): + return string[len(prefix) :] + return string + + +def removesuffix(string: str, suffix: str) -> str: + """Remove a suffix from a string. + + Backport of `str.removesuffix` for Python < 3.9 + """ + if string.endswith(suffix): + return string[: -len(suffix)] + return string + + +def file_from_path(path: str) -> FileTypes: + contents = Path(path).read_bytes() + file_name = os.path.basename(path) + return (file_name, contents) + + +def get_required_header(headers: HeadersLike, header: str) -> str: + lower_header = header.lower() + if is_mapping_t(headers): + # mypy doesn't understand the type narrowing here + for k, v in headers.items(): # type: ignore + if k.lower() == lower_header and isinstance(v, str): + return v + + # to deal with the case where the header looks like Stainless-Event-Id + intercaps_header = re.sub(r"([^\w])(\w)", lambda pat: pat.group(1) + pat.group(2).upper(), header.capitalize()) + + for normalized_header in [header, lower_header, header.upper(), intercaps_header]: + value = headers.get(normalized_header) + if value: + return value + + raise ValueError(f"Could not find {header} header") + + +def get_async_library() -> str: + try: + return sniffio.current_async_library() + except Exception: + return "false" + + +def lru_cache(*, maxsize: int | None = 128) -> Callable[[CallableT], CallableT]: + """A version of functools.lru_cache that retains the type signature + for the wrapped function arguments. + """ + wrapper = functools.lru_cache( # noqa: TID251 + maxsize=maxsize, + ) + return cast(Any, wrapper) # type: ignore[no-any-return] + + +def json_safe(data: object) -> object: + """Translates a mapping / sequence recursively in the same fashion + as `pydantic` v2's `model_dump(mode="json")`. + """ + if is_mapping(data): + return {json_safe(key): json_safe(value) for key, value in data.items()} + + if is_iterable(data) and not isinstance(data, (str, bytes, bytearray)): + return [json_safe(item) for item in data] + + if isinstance(data, (datetime, date)): + return data.isoformat() + + return data diff --git a/src/kernel/_version.py b/src/kernel/_version.py new file mode 100644 index 0000000..17d46b5 --- /dev/null +++ b/src/kernel/_version.py @@ -0,0 +1,4 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +__title__ = "kernel" +__version__ = "0.24.0" # x-release-please-version diff --git a/src/kernel/lib/.keep b/src/kernel/lib/.keep new file mode 100644 index 0000000..5e2c99f --- /dev/null +++ b/src/kernel/lib/.keep @@ -0,0 +1,4 @@ +File generated from our OpenAPI spec by Stainless. + +This directory can be used to store custom files to expand the SDK. +It is ignored by Stainless code generation and its content (other than this keep file) won't be touched. \ No newline at end of file diff --git a/src/kernel/pagination.py b/src/kernel/pagination.py new file mode 100644 index 0000000..cdf83c2 --- /dev/null +++ b/src/kernel/pagination.py @@ -0,0 +1,102 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Any, List, Type, Generic, Mapping, TypeVar, Optional, cast +from typing_extensions import override + +from httpx import Response + +from ._utils import is_mapping, maybe_coerce_boolean, maybe_coerce_integer +from ._models import BaseModel +from ._base_client import BasePage, PageInfo, BaseSyncPage, BaseAsyncPage + +__all__ = ["SyncOffsetPagination", "AsyncOffsetPagination"] + +_BaseModelT = TypeVar("_BaseModelT", bound=BaseModel) + +_T = TypeVar("_T") + + +class SyncOffsetPagination(BaseSyncPage[_T], BasePage[_T], Generic[_T]): + items: List[_T] + has_more: Optional[bool] = None + next_offset: Optional[int] = None + + @override + def _get_page_items(self) -> List[_T]: + items = self.items + if not items: + return [] + return items + + @override + def has_next_page(self) -> bool: + has_more = self.has_more + if has_more is not None and has_more is False: + return False + + return super().has_next_page() + + @override + def next_page_info(self) -> Optional[PageInfo]: + next_offset = self.next_offset + if next_offset is None: + return None # type: ignore[unreachable] + + length = len(self._get_page_items()) + current_count = next_offset + length + + return PageInfo(params={"offset": current_count}) + + @classmethod + def build(cls: Type[_BaseModelT], *, response: Response, data: object) -> _BaseModelT: # noqa: ARG003 + return cls.construct( + None, + **{ + **(cast(Mapping[str, Any], data) if is_mapping(data) else {"items": data}), + "has_more": maybe_coerce_boolean(response.headers.get("X-Has-More")), + "next_offset": maybe_coerce_integer(response.headers.get("X-Next-Offset")), + }, + ) + + +class AsyncOffsetPagination(BaseAsyncPage[_T], BasePage[_T], Generic[_T]): + items: List[_T] + has_more: Optional[bool] = None + next_offset: Optional[int] = None + + @override + def _get_page_items(self) -> List[_T]: + items = self.items + if not items: + return [] + return items + + @override + def has_next_page(self) -> bool: + has_more = self.has_more + if has_more is not None and has_more is False: + return False + + return super().has_next_page() + + @override + def next_page_info(self) -> Optional[PageInfo]: + next_offset = self.next_offset + if next_offset is None: + return None # type: ignore[unreachable] + + length = len(self._get_page_items()) + current_count = next_offset + length + + return PageInfo(params={"offset": current_count}) + + @classmethod + def build(cls: Type[_BaseModelT], *, response: Response, data: object) -> _BaseModelT: # noqa: ARG003 + return cls.construct( + None, + **{ + **(cast(Mapping[str, Any], data) if is_mapping(data) else {"items": data}), + "has_more": maybe_coerce_boolean(response.headers.get("X-Has-More")), + "next_offset": maybe_coerce_integer(response.headers.get("X-Next-Offset")), + }, + ) diff --git a/src/kernel/py.typed b/src/kernel/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/src/kernel/resources/__init__.py b/src/kernel/resources/__init__.py new file mode 100644 index 0000000..e6e8103 --- /dev/null +++ b/src/kernel/resources/__init__.py @@ -0,0 +1,145 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .apps import ( + AppsResource, + AsyncAppsResource, + AppsResourceWithRawResponse, + AsyncAppsResourceWithRawResponse, + AppsResourceWithStreamingResponse, + AsyncAppsResourceWithStreamingResponse, +) +from .agents import ( + AgentsResource, + AsyncAgentsResource, + AgentsResourceWithRawResponse, + AsyncAgentsResourceWithRawResponse, + AgentsResourceWithStreamingResponse, + AsyncAgentsResourceWithStreamingResponse, +) +from .proxies import ( + ProxiesResource, + AsyncProxiesResource, + ProxiesResourceWithRawResponse, + AsyncProxiesResourceWithRawResponse, + ProxiesResourceWithStreamingResponse, + AsyncProxiesResourceWithStreamingResponse, +) +from .browsers import ( + BrowsersResource, + AsyncBrowsersResource, + BrowsersResourceWithRawResponse, + AsyncBrowsersResourceWithRawResponse, + BrowsersResourceWithStreamingResponse, + AsyncBrowsersResourceWithStreamingResponse, +) +from .profiles import ( + ProfilesResource, + AsyncProfilesResource, + ProfilesResourceWithRawResponse, + AsyncProfilesResourceWithRawResponse, + ProfilesResourceWithStreamingResponse, + AsyncProfilesResourceWithStreamingResponse, +) +from .extensions import ( + ExtensionsResource, + AsyncExtensionsResource, + ExtensionsResourceWithRawResponse, + AsyncExtensionsResourceWithRawResponse, + ExtensionsResourceWithStreamingResponse, + AsyncExtensionsResourceWithStreamingResponse, +) +from .credentials import ( + CredentialsResource, + AsyncCredentialsResource, + CredentialsResourceWithRawResponse, + AsyncCredentialsResourceWithRawResponse, + CredentialsResourceWithStreamingResponse, + AsyncCredentialsResourceWithStreamingResponse, +) +from .deployments import ( + DeploymentsResource, + AsyncDeploymentsResource, + DeploymentsResourceWithRawResponse, + AsyncDeploymentsResourceWithRawResponse, + DeploymentsResourceWithStreamingResponse, + AsyncDeploymentsResourceWithStreamingResponse, +) +from .invocations import ( + InvocationsResource, + AsyncInvocationsResource, + InvocationsResourceWithRawResponse, + AsyncInvocationsResourceWithRawResponse, + InvocationsResourceWithStreamingResponse, + AsyncInvocationsResourceWithStreamingResponse, +) +from .browser_pools import ( + BrowserPoolsResource, + AsyncBrowserPoolsResource, + BrowserPoolsResourceWithRawResponse, + AsyncBrowserPoolsResourceWithRawResponse, + BrowserPoolsResourceWithStreamingResponse, + AsyncBrowserPoolsResourceWithStreamingResponse, +) + +__all__ = [ + "DeploymentsResource", + "AsyncDeploymentsResource", + "DeploymentsResourceWithRawResponse", + "AsyncDeploymentsResourceWithRawResponse", + "DeploymentsResourceWithStreamingResponse", + "AsyncDeploymentsResourceWithStreamingResponse", + "AppsResource", + "AsyncAppsResource", + "AppsResourceWithRawResponse", + "AsyncAppsResourceWithRawResponse", + "AppsResourceWithStreamingResponse", + "AsyncAppsResourceWithStreamingResponse", + "InvocationsResource", + "AsyncInvocationsResource", + "InvocationsResourceWithRawResponse", + "AsyncInvocationsResourceWithRawResponse", + "InvocationsResourceWithStreamingResponse", + "AsyncInvocationsResourceWithStreamingResponse", + "BrowsersResource", + "AsyncBrowsersResource", + "BrowsersResourceWithRawResponse", + "AsyncBrowsersResourceWithRawResponse", + "BrowsersResourceWithStreamingResponse", + "AsyncBrowsersResourceWithStreamingResponse", + "ProfilesResource", + "AsyncProfilesResource", + "ProfilesResourceWithRawResponse", + "AsyncProfilesResourceWithRawResponse", + "ProfilesResourceWithStreamingResponse", + "AsyncProfilesResourceWithStreamingResponse", + "ProxiesResource", + "AsyncProxiesResource", + "ProxiesResourceWithRawResponse", + "AsyncProxiesResourceWithRawResponse", + "ProxiesResourceWithStreamingResponse", + "AsyncProxiesResourceWithStreamingResponse", + "ExtensionsResource", + "AsyncExtensionsResource", + "ExtensionsResourceWithRawResponse", + "AsyncExtensionsResourceWithRawResponse", + "ExtensionsResourceWithStreamingResponse", + "AsyncExtensionsResourceWithStreamingResponse", + "BrowserPoolsResource", + "AsyncBrowserPoolsResource", + "BrowserPoolsResourceWithRawResponse", + "AsyncBrowserPoolsResourceWithRawResponse", + "BrowserPoolsResourceWithStreamingResponse", + "AsyncBrowserPoolsResourceWithStreamingResponse", + "AgentsResource", + "AsyncAgentsResource", + "AgentsResourceWithRawResponse", + "AsyncAgentsResourceWithRawResponse", + "AgentsResourceWithStreamingResponse", + "AsyncAgentsResourceWithStreamingResponse", + "CredentialsResource", + "AsyncCredentialsResource", + "CredentialsResourceWithRawResponse", + "AsyncCredentialsResourceWithRawResponse", + "CredentialsResourceWithStreamingResponse", + "AsyncCredentialsResourceWithStreamingResponse", +] diff --git a/src/kernel/resources/agents/__init__.py b/src/kernel/resources/agents/__init__.py new file mode 100644 index 0000000..cb159eb --- /dev/null +++ b/src/kernel/resources/agents/__init__.py @@ -0,0 +1,33 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .auth import ( + AuthResource, + AsyncAuthResource, + AuthResourceWithRawResponse, + AsyncAuthResourceWithRawResponse, + AuthResourceWithStreamingResponse, + AsyncAuthResourceWithStreamingResponse, +) +from .agents import ( + AgentsResource, + AsyncAgentsResource, + AgentsResourceWithRawResponse, + AsyncAgentsResourceWithRawResponse, + AgentsResourceWithStreamingResponse, + AsyncAgentsResourceWithStreamingResponse, +) + +__all__ = [ + "AuthResource", + "AsyncAuthResource", + "AuthResourceWithRawResponse", + "AsyncAuthResourceWithRawResponse", + "AuthResourceWithStreamingResponse", + "AsyncAuthResourceWithStreamingResponse", + "AgentsResource", + "AsyncAgentsResource", + "AgentsResourceWithRawResponse", + "AsyncAgentsResourceWithRawResponse", + "AgentsResourceWithStreamingResponse", + "AsyncAgentsResourceWithStreamingResponse", +] diff --git a/src/kernel/resources/agents/agents.py b/src/kernel/resources/agents/agents.py new file mode 100644 index 0000000..6999bd5 --- /dev/null +++ b/src/kernel/resources/agents/agents.py @@ -0,0 +1,102 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from ..._compat import cached_property +from .auth.auth import ( + AuthResource, + AsyncAuthResource, + AuthResourceWithRawResponse, + AsyncAuthResourceWithRawResponse, + AuthResourceWithStreamingResponse, + AsyncAuthResourceWithStreamingResponse, +) +from ..._resource import SyncAPIResource, AsyncAPIResource + +__all__ = ["AgentsResource", "AsyncAgentsResource"] + + +class AgentsResource(SyncAPIResource): + @cached_property + def auth(self) -> AuthResource: + return AuthResource(self._client) + + @cached_property + def with_raw_response(self) -> AgentsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return AgentsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AgentsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return AgentsResourceWithStreamingResponse(self) + + +class AsyncAgentsResource(AsyncAPIResource): + @cached_property + def auth(self) -> AsyncAuthResource: + return AsyncAuthResource(self._client) + + @cached_property + def with_raw_response(self) -> AsyncAgentsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return AsyncAgentsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncAgentsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return AsyncAgentsResourceWithStreamingResponse(self) + + +class AgentsResourceWithRawResponse: + def __init__(self, agents: AgentsResource) -> None: + self._agents = agents + + @cached_property + def auth(self) -> AuthResourceWithRawResponse: + return AuthResourceWithRawResponse(self._agents.auth) + + +class AsyncAgentsResourceWithRawResponse: + def __init__(self, agents: AsyncAgentsResource) -> None: + self._agents = agents + + @cached_property + def auth(self) -> AsyncAuthResourceWithRawResponse: + return AsyncAuthResourceWithRawResponse(self._agents.auth) + + +class AgentsResourceWithStreamingResponse: + def __init__(self, agents: AgentsResource) -> None: + self._agents = agents + + @cached_property + def auth(self) -> AuthResourceWithStreamingResponse: + return AuthResourceWithStreamingResponse(self._agents.auth) + + +class AsyncAgentsResourceWithStreamingResponse: + def __init__(self, agents: AsyncAgentsResource) -> None: + self._agents = agents + + @cached_property + def auth(self) -> AsyncAuthResourceWithStreamingResponse: + return AsyncAuthResourceWithStreamingResponse(self._agents.auth) diff --git a/src/kernel/resources/agents/auth/__init__.py b/src/kernel/resources/agents/auth/__init__.py new file mode 100644 index 0000000..6130549 --- /dev/null +++ b/src/kernel/resources/agents/auth/__init__.py @@ -0,0 +1,33 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .auth import ( + AuthResource, + AsyncAuthResource, + AuthResourceWithRawResponse, + AsyncAuthResourceWithRawResponse, + AuthResourceWithStreamingResponse, + AsyncAuthResourceWithStreamingResponse, +) +from .invocations import ( + InvocationsResource, + AsyncInvocationsResource, + InvocationsResourceWithRawResponse, + AsyncInvocationsResourceWithRawResponse, + InvocationsResourceWithStreamingResponse, + AsyncInvocationsResourceWithStreamingResponse, +) + +__all__ = [ + "InvocationsResource", + "AsyncInvocationsResource", + "InvocationsResourceWithRawResponse", + "AsyncInvocationsResourceWithRawResponse", + "InvocationsResourceWithStreamingResponse", + "AsyncInvocationsResourceWithStreamingResponse", + "AuthResource", + "AsyncAuthResource", + "AuthResourceWithRawResponse", + "AsyncAuthResourceWithRawResponse", + "AuthResourceWithStreamingResponse", + "AsyncAuthResourceWithStreamingResponse", +] diff --git a/src/kernel/resources/agents/auth/auth.py b/src/kernel/resources/agents/auth/auth.py new file mode 100644 index 0000000..4a541f7 --- /dev/null +++ b/src/kernel/resources/agents/auth/auth.py @@ -0,0 +1,560 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import httpx + +from ...._types import Body, Omit, Query, Headers, NoneType, NotGiven, SequenceNotStr, omit, not_given +from ...._utils import maybe_transform, async_maybe_transform +from ...._compat import cached_property +from .invocations import ( + InvocationsResource, + AsyncInvocationsResource, + InvocationsResourceWithRawResponse, + AsyncInvocationsResourceWithRawResponse, + InvocationsResourceWithStreamingResponse, + AsyncInvocationsResourceWithStreamingResponse, +) +from ...._resource import SyncAPIResource, AsyncAPIResource +from ...._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ....pagination import SyncOffsetPagination, AsyncOffsetPagination +from ...._base_client import AsyncPaginator, make_request_options +from ....types.agents import auth_list_params, auth_create_params +from ....types.agents.auth_agent import AuthAgent + +__all__ = ["AuthResource", "AsyncAuthResource"] + + +class AuthResource(SyncAPIResource): + @cached_property + def invocations(self) -> InvocationsResource: + return InvocationsResource(self._client) + + @cached_property + def with_raw_response(self) -> AuthResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return AuthResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AuthResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return AuthResourceWithStreamingResponse(self) + + def create( + self, + *, + domain: str, + profile_name: str, + allowed_domains: SequenceNotStr[str] | Omit = omit, + credential_name: str | Omit = omit, + login_url: str | Omit = omit, + proxy: auth_create_params.Proxy | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AuthAgent: + """ + Creates a new auth agent for the specified domain and profile combination, or + returns an existing one if it already exists. This is idempotent - calling with + the same domain and profile will return the same agent. Does NOT start an + invocation - use POST /agents/auth/invocations to start an auth flow. + + Args: + domain: Domain for authentication + + profile_name: Name of the profile to use for this auth agent + + allowed_domains: Additional domains that are valid for this auth agent's authentication flow + (besides the primary domain). Useful when login pages redirect to different + domains. + + credential_name: Optional name of an existing credential to use for this auth agent. If provided, + the credential will be linked to the agent and its values will be used to + auto-fill the login form on invocation. + + login_url: Optional login page URL. If provided, will be stored on the agent and used to + skip discovery in future invocations. + + proxy: Optional proxy configuration + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._post( + "/agents/auth", + body=maybe_transform( + { + "domain": domain, + "profile_name": profile_name, + "allowed_domains": allowed_domains, + "credential_name": credential_name, + "login_url": login_url, + "proxy": proxy, + }, + auth_create_params.AuthCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=AuthAgent, + ) + + def retrieve( + self, + id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AuthAgent: + """Retrieve an auth agent by its ID. + + Returns the current authentication status of + the managed profile. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + return self._get( + f"/agents/auth/{id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=AuthAgent, + ) + + def list( + self, + *, + domain: str | Omit = omit, + limit: int | Omit = omit, + offset: int | Omit = omit, + profile_name: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> SyncOffsetPagination[AuthAgent]: + """ + List auth agents with optional filters for profile_name and domain. + + Args: + domain: Filter by domain + + limit: Maximum number of results to return + + offset: Number of results to skip + + profile_name: Filter by profile name + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get_api_list( + "/agents/auth", + page=SyncOffsetPagination[AuthAgent], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "domain": domain, + "limit": limit, + "offset": offset, + "profile_name": profile_name, + }, + auth_list_params.AuthListParams, + ), + ), + model=AuthAgent, + ) + + def delete( + self, + id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """Deletes an auth agent and terminates its workflow. + + This will: + + - Soft delete the auth agent record + - Gracefully terminate the agent's Temporal workflow + - Cancel any in-progress invocations + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return self._delete( + f"/agents/auth/{id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + +class AsyncAuthResource(AsyncAPIResource): + @cached_property + def invocations(self) -> AsyncInvocationsResource: + return AsyncInvocationsResource(self._client) + + @cached_property + def with_raw_response(self) -> AsyncAuthResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return AsyncAuthResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncAuthResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return AsyncAuthResourceWithStreamingResponse(self) + + async def create( + self, + *, + domain: str, + profile_name: str, + allowed_domains: SequenceNotStr[str] | Omit = omit, + credential_name: str | Omit = omit, + login_url: str | Omit = omit, + proxy: auth_create_params.Proxy | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AuthAgent: + """ + Creates a new auth agent for the specified domain and profile combination, or + returns an existing one if it already exists. This is idempotent - calling with + the same domain and profile will return the same agent. Does NOT start an + invocation - use POST /agents/auth/invocations to start an auth flow. + + Args: + domain: Domain for authentication + + profile_name: Name of the profile to use for this auth agent + + allowed_domains: Additional domains that are valid for this auth agent's authentication flow + (besides the primary domain). Useful when login pages redirect to different + domains. + + credential_name: Optional name of an existing credential to use for this auth agent. If provided, + the credential will be linked to the agent and its values will be used to + auto-fill the login form on invocation. + + login_url: Optional login page URL. If provided, will be stored on the agent and used to + skip discovery in future invocations. + + proxy: Optional proxy configuration + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._post( + "/agents/auth", + body=await async_maybe_transform( + { + "domain": domain, + "profile_name": profile_name, + "allowed_domains": allowed_domains, + "credential_name": credential_name, + "login_url": login_url, + "proxy": proxy, + }, + auth_create_params.AuthCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=AuthAgent, + ) + + async def retrieve( + self, + id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AuthAgent: + """Retrieve an auth agent by its ID. + + Returns the current authentication status of + the managed profile. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + return await self._get( + f"/agents/auth/{id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=AuthAgent, + ) + + def list( + self, + *, + domain: str | Omit = omit, + limit: int | Omit = omit, + offset: int | Omit = omit, + profile_name: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AsyncPaginator[AuthAgent, AsyncOffsetPagination[AuthAgent]]: + """ + List auth agents with optional filters for profile_name and domain. + + Args: + domain: Filter by domain + + limit: Maximum number of results to return + + offset: Number of results to skip + + profile_name: Filter by profile name + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get_api_list( + "/agents/auth", + page=AsyncOffsetPagination[AuthAgent], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "domain": domain, + "limit": limit, + "offset": offset, + "profile_name": profile_name, + }, + auth_list_params.AuthListParams, + ), + ), + model=AuthAgent, + ) + + async def delete( + self, + id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """Deletes an auth agent and terminates its workflow. + + This will: + + - Soft delete the auth agent record + - Gracefully terminate the agent's Temporal workflow + - Cancel any in-progress invocations + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return await self._delete( + f"/agents/auth/{id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + +class AuthResourceWithRawResponse: + def __init__(self, auth: AuthResource) -> None: + self._auth = auth + + self.create = to_raw_response_wrapper( + auth.create, + ) + self.retrieve = to_raw_response_wrapper( + auth.retrieve, + ) + self.list = to_raw_response_wrapper( + auth.list, + ) + self.delete = to_raw_response_wrapper( + auth.delete, + ) + + @cached_property + def invocations(self) -> InvocationsResourceWithRawResponse: + return InvocationsResourceWithRawResponse(self._auth.invocations) + + +class AsyncAuthResourceWithRawResponse: + def __init__(self, auth: AsyncAuthResource) -> None: + self._auth = auth + + self.create = async_to_raw_response_wrapper( + auth.create, + ) + self.retrieve = async_to_raw_response_wrapper( + auth.retrieve, + ) + self.list = async_to_raw_response_wrapper( + auth.list, + ) + self.delete = async_to_raw_response_wrapper( + auth.delete, + ) + + @cached_property + def invocations(self) -> AsyncInvocationsResourceWithRawResponse: + return AsyncInvocationsResourceWithRawResponse(self._auth.invocations) + + +class AuthResourceWithStreamingResponse: + def __init__(self, auth: AuthResource) -> None: + self._auth = auth + + self.create = to_streamed_response_wrapper( + auth.create, + ) + self.retrieve = to_streamed_response_wrapper( + auth.retrieve, + ) + self.list = to_streamed_response_wrapper( + auth.list, + ) + self.delete = to_streamed_response_wrapper( + auth.delete, + ) + + @cached_property + def invocations(self) -> InvocationsResourceWithStreamingResponse: + return InvocationsResourceWithStreamingResponse(self._auth.invocations) + + +class AsyncAuthResourceWithStreamingResponse: + def __init__(self, auth: AsyncAuthResource) -> None: + self._auth = auth + + self.create = async_to_streamed_response_wrapper( + auth.create, + ) + self.retrieve = async_to_streamed_response_wrapper( + auth.retrieve, + ) + self.list = async_to_streamed_response_wrapper( + auth.list, + ) + self.delete = async_to_streamed_response_wrapper( + auth.delete, + ) + + @cached_property + def invocations(self) -> AsyncInvocationsResourceWithStreamingResponse: + return AsyncInvocationsResourceWithStreamingResponse(self._auth.invocations) diff --git a/src/kernel/resources/agents/auth/invocations.py b/src/kernel/resources/agents/auth/invocations.py new file mode 100644 index 0000000..aa1c4da --- /dev/null +++ b/src/kernel/resources/agents/auth/invocations.py @@ -0,0 +1,575 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict +from typing_extensions import overload + +import httpx + +from ...._types import Body, Omit, Query, Headers, NotGiven, omit, not_given +from ...._utils import required_args, maybe_transform, async_maybe_transform +from ...._compat import cached_property +from ...._resource import SyncAPIResource, AsyncAPIResource +from ...._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ...._base_client import make_request_options +from ....types.agents.auth import invocation_create_params, invocation_submit_params, invocation_exchange_params +from ....types.agents.agent_auth_submit_response import AgentAuthSubmitResponse +from ....types.agents.agent_auth_invocation_response import AgentAuthInvocationResponse +from ....types.agents.auth.invocation_exchange_response import InvocationExchangeResponse +from ....types.agents.auth_agent_invocation_create_response import AuthAgentInvocationCreateResponse + +__all__ = ["InvocationsResource", "AsyncInvocationsResource"] + + +class InvocationsResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> InvocationsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return InvocationsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> InvocationsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return InvocationsResourceWithStreamingResponse(self) + + def create( + self, + *, + auth_agent_id: str, + save_credential_as: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AuthAgentInvocationCreateResponse: + """Creates a new authentication invocation for the specified auth agent. + + This + starts the auth flow and returns a hosted URL for the user to complete + authentication. + + Args: + auth_agent_id: ID of the auth agent to create an invocation for + + save_credential_as: If provided, saves the submitted credentials under this name upon successful + login. The credential will be linked to the auth agent for automatic + re-authentication. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._post( + "/agents/auth/invocations", + body=maybe_transform( + { + "auth_agent_id": auth_agent_id, + "save_credential_as": save_credential_as, + }, + invocation_create_params.InvocationCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=AuthAgentInvocationCreateResponse, + ) + + def retrieve( + self, + invocation_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AgentAuthInvocationResponse: + """Returns invocation details including status, app_name, and domain. + + Supports both + API key and JWT (from exchange endpoint) authentication. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not invocation_id: + raise ValueError(f"Expected a non-empty value for `invocation_id` but received {invocation_id!r}") + return self._get( + f"/agents/auth/invocations/{invocation_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=AgentAuthInvocationResponse, + ) + + def exchange( + self, + invocation_id: str, + *, + code: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> InvocationExchangeResponse: + """Validates the handoff code and returns a JWT token for subsequent requests. + + No + authentication required (the handoff code serves as the credential). + + Args: + code: Handoff code from start endpoint + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not invocation_id: + raise ValueError(f"Expected a non-empty value for `invocation_id` but received {invocation_id!r}") + return self._post( + f"/agents/auth/invocations/{invocation_id}/exchange", + body=maybe_transform({"code": code}, invocation_exchange_params.InvocationExchangeParams), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=InvocationExchangeResponse, + ) + + @overload + def submit( + self, + invocation_id: str, + *, + field_values: Dict[str, str], + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AgentAuthSubmitResponse: + """Submits field values for the discovered login form. + + Returns immediately after + submission is accepted. Poll the invocation endpoint to track progress and get + results. + + Args: + field_values: Values for the discovered login fields + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @overload + def submit( + self, + invocation_id: str, + *, + sso_button: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AgentAuthSubmitResponse: + """Submits field values for the discovered login form. + + Returns immediately after + submission is accepted. Poll the invocation endpoint to track progress and get + results. + + Args: + sso_button: Selector of SSO button to click + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @required_args(["field_values"], ["sso_button"]) + def submit( + self, + invocation_id: str, + *, + field_values: Dict[str, str] | Omit = omit, + sso_button: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AgentAuthSubmitResponse: + if not invocation_id: + raise ValueError(f"Expected a non-empty value for `invocation_id` but received {invocation_id!r}") + return self._post( + f"/agents/auth/invocations/{invocation_id}/submit", + body=maybe_transform( + { + "field_values": field_values, + "sso_button": sso_button, + }, + invocation_submit_params.InvocationSubmitParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=AgentAuthSubmitResponse, + ) + + +class AsyncInvocationsResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncInvocationsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return AsyncInvocationsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncInvocationsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return AsyncInvocationsResourceWithStreamingResponse(self) + + async def create( + self, + *, + auth_agent_id: str, + save_credential_as: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AuthAgentInvocationCreateResponse: + """Creates a new authentication invocation for the specified auth agent. + + This + starts the auth flow and returns a hosted URL for the user to complete + authentication. + + Args: + auth_agent_id: ID of the auth agent to create an invocation for + + save_credential_as: If provided, saves the submitted credentials under this name upon successful + login. The credential will be linked to the auth agent for automatic + re-authentication. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._post( + "/agents/auth/invocations", + body=await async_maybe_transform( + { + "auth_agent_id": auth_agent_id, + "save_credential_as": save_credential_as, + }, + invocation_create_params.InvocationCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=AuthAgentInvocationCreateResponse, + ) + + async def retrieve( + self, + invocation_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AgentAuthInvocationResponse: + """Returns invocation details including status, app_name, and domain. + + Supports both + API key and JWT (from exchange endpoint) authentication. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not invocation_id: + raise ValueError(f"Expected a non-empty value for `invocation_id` but received {invocation_id!r}") + return await self._get( + f"/agents/auth/invocations/{invocation_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=AgentAuthInvocationResponse, + ) + + async def exchange( + self, + invocation_id: str, + *, + code: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> InvocationExchangeResponse: + """Validates the handoff code and returns a JWT token for subsequent requests. + + No + authentication required (the handoff code serves as the credential). + + Args: + code: Handoff code from start endpoint + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not invocation_id: + raise ValueError(f"Expected a non-empty value for `invocation_id` but received {invocation_id!r}") + return await self._post( + f"/agents/auth/invocations/{invocation_id}/exchange", + body=await async_maybe_transform({"code": code}, invocation_exchange_params.InvocationExchangeParams), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=InvocationExchangeResponse, + ) + + @overload + async def submit( + self, + invocation_id: str, + *, + field_values: Dict[str, str], + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AgentAuthSubmitResponse: + """Submits field values for the discovered login form. + + Returns immediately after + submission is accepted. Poll the invocation endpoint to track progress and get + results. + + Args: + field_values: Values for the discovered login fields + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @overload + async def submit( + self, + invocation_id: str, + *, + sso_button: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AgentAuthSubmitResponse: + """Submits field values for the discovered login form. + + Returns immediately after + submission is accepted. Poll the invocation endpoint to track progress and get + results. + + Args: + sso_button: Selector of SSO button to click + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @required_args(["field_values"], ["sso_button"]) + async def submit( + self, + invocation_id: str, + *, + field_values: Dict[str, str] | Omit = omit, + sso_button: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AgentAuthSubmitResponse: + if not invocation_id: + raise ValueError(f"Expected a non-empty value for `invocation_id` but received {invocation_id!r}") + return await self._post( + f"/agents/auth/invocations/{invocation_id}/submit", + body=await async_maybe_transform( + { + "field_values": field_values, + "sso_button": sso_button, + }, + invocation_submit_params.InvocationSubmitParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=AgentAuthSubmitResponse, + ) + + +class InvocationsResourceWithRawResponse: + def __init__(self, invocations: InvocationsResource) -> None: + self._invocations = invocations + + self.create = to_raw_response_wrapper( + invocations.create, + ) + self.retrieve = to_raw_response_wrapper( + invocations.retrieve, + ) + self.exchange = to_raw_response_wrapper( + invocations.exchange, + ) + self.submit = to_raw_response_wrapper( + invocations.submit, + ) + + +class AsyncInvocationsResourceWithRawResponse: + def __init__(self, invocations: AsyncInvocationsResource) -> None: + self._invocations = invocations + + self.create = async_to_raw_response_wrapper( + invocations.create, + ) + self.retrieve = async_to_raw_response_wrapper( + invocations.retrieve, + ) + self.exchange = async_to_raw_response_wrapper( + invocations.exchange, + ) + self.submit = async_to_raw_response_wrapper( + invocations.submit, + ) + + +class InvocationsResourceWithStreamingResponse: + def __init__(self, invocations: InvocationsResource) -> None: + self._invocations = invocations + + self.create = to_streamed_response_wrapper( + invocations.create, + ) + self.retrieve = to_streamed_response_wrapper( + invocations.retrieve, + ) + self.exchange = to_streamed_response_wrapper( + invocations.exchange, + ) + self.submit = to_streamed_response_wrapper( + invocations.submit, + ) + + +class AsyncInvocationsResourceWithStreamingResponse: + def __init__(self, invocations: AsyncInvocationsResource) -> None: + self._invocations = invocations + + self.create = async_to_streamed_response_wrapper( + invocations.create, + ) + self.retrieve = async_to_streamed_response_wrapper( + invocations.retrieve, + ) + self.exchange = async_to_streamed_response_wrapper( + invocations.exchange, + ) + self.submit = async_to_streamed_response_wrapper( + invocations.submit, + ) diff --git a/src/kernel/resources/apps.py b/src/kernel/resources/apps.py new file mode 100644 index 0000000..0443e73 --- /dev/null +++ b/src/kernel/resources/apps.py @@ -0,0 +1,212 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import httpx + +from ..types import app_list_params +from .._types import Body, Omit, Query, Headers, NotGiven, omit, not_given +from .._utils import maybe_transform +from .._compat import cached_property +from .._resource import SyncAPIResource, AsyncAPIResource +from .._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ..pagination import SyncOffsetPagination, AsyncOffsetPagination +from .._base_client import AsyncPaginator, make_request_options +from ..types.app_list_response import AppListResponse + +__all__ = ["AppsResource", "AsyncAppsResource"] + + +class AppsResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> AppsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return AppsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AppsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return AppsResourceWithStreamingResponse(self) + + def list( + self, + *, + app_name: str | Omit = omit, + limit: int | Omit = omit, + offset: int | Omit = omit, + version: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> SyncOffsetPagination[AppListResponse]: + """List applications. + + Optionally filter by app name and/or version label. + + Args: + app_name: Filter results by application name. + + limit: Limit the number of apps to return. + + offset: Offset the number of apps to return. + + version: Filter results by version label. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get_api_list( + "/apps", + page=SyncOffsetPagination[AppListResponse], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "app_name": app_name, + "limit": limit, + "offset": offset, + "version": version, + }, + app_list_params.AppListParams, + ), + ), + model=AppListResponse, + ) + + +class AsyncAppsResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncAppsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return AsyncAppsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncAppsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return AsyncAppsResourceWithStreamingResponse(self) + + def list( + self, + *, + app_name: str | Omit = omit, + limit: int | Omit = omit, + offset: int | Omit = omit, + version: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AsyncPaginator[AppListResponse, AsyncOffsetPagination[AppListResponse]]: + """List applications. + + Optionally filter by app name and/or version label. + + Args: + app_name: Filter results by application name. + + limit: Limit the number of apps to return. + + offset: Offset the number of apps to return. + + version: Filter results by version label. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get_api_list( + "/apps", + page=AsyncOffsetPagination[AppListResponse], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "app_name": app_name, + "limit": limit, + "offset": offset, + "version": version, + }, + app_list_params.AppListParams, + ), + ), + model=AppListResponse, + ) + + +class AppsResourceWithRawResponse: + def __init__(self, apps: AppsResource) -> None: + self._apps = apps + + self.list = to_raw_response_wrapper( + apps.list, + ) + + +class AsyncAppsResourceWithRawResponse: + def __init__(self, apps: AsyncAppsResource) -> None: + self._apps = apps + + self.list = async_to_raw_response_wrapper( + apps.list, + ) + + +class AppsResourceWithStreamingResponse: + def __init__(self, apps: AppsResource) -> None: + self._apps = apps + + self.list = to_streamed_response_wrapper( + apps.list, + ) + + +class AsyncAppsResourceWithStreamingResponse: + def __init__(self, apps: AsyncAppsResource) -> None: + self._apps = apps + + self.list = async_to_streamed_response_wrapper( + apps.list, + ) diff --git a/src/kernel/resources/browser_pools.py b/src/kernel/resources/browser_pools.py new file mode 100644 index 0000000..5a4bf61 --- /dev/null +++ b/src/kernel/resources/browser_pools.py @@ -0,0 +1,1022 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Iterable + +import httpx + +from ..types import ( + browser_pool_create_params, + browser_pool_delete_params, + browser_pool_update_params, + browser_pool_acquire_params, + browser_pool_release_params, +) +from .._types import Body, Omit, Query, Headers, NoneType, NotGiven, omit, not_given +from .._utils import maybe_transform, async_maybe_transform +from .._compat import cached_property +from .._resource import SyncAPIResource, AsyncAPIResource +from .._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from .._base_client import make_request_options +from ..types.browser_pool import BrowserPool +from ..types.browser_pool_list_response import BrowserPoolListResponse +from ..types.browser_pool_acquire_response import BrowserPoolAcquireResponse +from ..types.shared_params.browser_profile import BrowserProfile +from ..types.shared_params.browser_viewport import BrowserViewport +from ..types.shared_params.browser_extension import BrowserExtension + +__all__ = ["BrowserPoolsResource", "AsyncBrowserPoolsResource"] + + +class BrowserPoolsResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> BrowserPoolsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return BrowserPoolsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> BrowserPoolsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return BrowserPoolsResourceWithStreamingResponse(self) + + def create( + self, + *, + size: int, + extensions: Iterable[BrowserExtension] | Omit = omit, + fill_rate_per_minute: int | Omit = omit, + headless: bool | Omit = omit, + kiosk_mode: bool | Omit = omit, + name: str | Omit = omit, + profile: BrowserProfile | Omit = omit, + proxy_id: str | Omit = omit, + stealth: bool | Omit = omit, + timeout_seconds: int | Omit = omit, + viewport: BrowserViewport | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> BrowserPool: + """ + Create a new browser pool with the specified configuration and size. + + Args: + size: Number of browsers to create in the pool + + extensions: List of browser extensions to load into the session. Provide each by id or name. + + fill_rate_per_minute: Percentage of the pool to fill per minute. Defaults to 10%. + + headless: If true, launches the browser using a headless image. Defaults to false. + + kiosk_mode: If true, launches the browser in kiosk mode to hide address bar and tabs in live + view. + + name: Optional name for the browser pool. Must be unique within the organization. + + profile: Profile selection for the browser session. Provide either id or name. If + specified, the matching profile will be loaded into the browser session. + Profiles must be created beforehand. + + proxy_id: Optional proxy to associate to the browser session. Must reference a proxy + belonging to the caller's org. + + stealth: If true, launches the browser in stealth mode to reduce detection by anti-bot + mechanisms. + + timeout_seconds: Default idle timeout in seconds for browsers acquired from this pool before they + are destroyed. Defaults to 600 seconds if not specified + + viewport: Initial browser window size in pixels with optional refresh rate. If omitted, + image defaults apply (1920x1080@25). Only specific viewport configurations are + supported. The server will reject unsupported combinations. Supported + resolutions are: 2560x1440@10, 1920x1080@25, 1920x1200@25, 1440x900@25, + 1024x768@60, 1200x800@60 If refresh_rate is not provided, it will be + automatically determined from the width and height if they match a supported + configuration exactly. Note: Higher resolutions may affect the responsiveness of + live view browser + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._post( + "/browser_pools", + body=maybe_transform( + { + "size": size, + "extensions": extensions, + "fill_rate_per_minute": fill_rate_per_minute, + "headless": headless, + "kiosk_mode": kiosk_mode, + "name": name, + "profile": profile, + "proxy_id": proxy_id, + "stealth": stealth, + "timeout_seconds": timeout_seconds, + "viewport": viewport, + }, + browser_pool_create_params.BrowserPoolCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=BrowserPool, + ) + + def retrieve( + self, + id_or_name: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> BrowserPool: + """ + Retrieve details for a single browser pool by its ID or name. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id_or_name: + raise ValueError(f"Expected a non-empty value for `id_or_name` but received {id_or_name!r}") + return self._get( + f"/browser_pools/{id_or_name}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=BrowserPool, + ) + + def update( + self, + id_or_name: str, + *, + size: int, + discard_all_idle: bool | Omit = omit, + extensions: Iterable[BrowserExtension] | Omit = omit, + fill_rate_per_minute: int | Omit = omit, + headless: bool | Omit = omit, + kiosk_mode: bool | Omit = omit, + name: str | Omit = omit, + profile: BrowserProfile | Omit = omit, + proxy_id: str | Omit = omit, + stealth: bool | Omit = omit, + timeout_seconds: int | Omit = omit, + viewport: BrowserViewport | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> BrowserPool: + """ + Updates the configuration used to create browsers in the pool. + + Args: + size: Number of browsers to create in the pool + + discard_all_idle: Whether to discard all idle browsers and rebuild the pool immediately. Defaults + to false. + + extensions: List of browser extensions to load into the session. Provide each by id or name. + + fill_rate_per_minute: Percentage of the pool to fill per minute. Defaults to 10%. + + headless: If true, launches the browser using a headless image. Defaults to false. + + kiosk_mode: If true, launches the browser in kiosk mode to hide address bar and tabs in live + view. + + name: Optional name for the browser pool. Must be unique within the organization. + + profile: Profile selection for the browser session. Provide either id or name. If + specified, the matching profile will be loaded into the browser session. + Profiles must be created beforehand. + + proxy_id: Optional proxy to associate to the browser session. Must reference a proxy + belonging to the caller's org. + + stealth: If true, launches the browser in stealth mode to reduce detection by anti-bot + mechanisms. + + timeout_seconds: Default idle timeout in seconds for browsers acquired from this pool before they + are destroyed. Defaults to 600 seconds if not specified + + viewport: Initial browser window size in pixels with optional refresh rate. If omitted, + image defaults apply (1920x1080@25). Only specific viewport configurations are + supported. The server will reject unsupported combinations. Supported + resolutions are: 2560x1440@10, 1920x1080@25, 1920x1200@25, 1440x900@25, + 1024x768@60, 1200x800@60 If refresh_rate is not provided, it will be + automatically determined from the width and height if they match a supported + configuration exactly. Note: Higher resolutions may affect the responsiveness of + live view browser + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id_or_name: + raise ValueError(f"Expected a non-empty value for `id_or_name` but received {id_or_name!r}") + return self._patch( + f"/browser_pools/{id_or_name}", + body=maybe_transform( + { + "size": size, + "discard_all_idle": discard_all_idle, + "extensions": extensions, + "fill_rate_per_minute": fill_rate_per_minute, + "headless": headless, + "kiosk_mode": kiosk_mode, + "name": name, + "profile": profile, + "proxy_id": proxy_id, + "stealth": stealth, + "timeout_seconds": timeout_seconds, + "viewport": viewport, + }, + browser_pool_update_params.BrowserPoolUpdateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=BrowserPool, + ) + + def list( + self, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> BrowserPoolListResponse: + """List browser pools owned by the caller's organization.""" + return self._get( + "/browser_pools", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=BrowserPoolListResponse, + ) + + def delete( + self, + id_or_name: str, + *, + force: bool | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """Delete a browser pool and all browsers in it. + + By default, deletion is blocked if + browsers are currently leased. Use force=true to terminate leased browsers. + + Args: + force: If true, force delete even if browsers are currently leased. Leased browsers + will be terminated. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id_or_name: + raise ValueError(f"Expected a non-empty value for `id_or_name` but received {id_or_name!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return self._delete( + f"/browser_pools/{id_or_name}", + body=maybe_transform({"force": force}, browser_pool_delete_params.BrowserPoolDeleteParams), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + def acquire( + self, + id_or_name: str, + *, + acquire_timeout_seconds: int | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> BrowserPoolAcquireResponse: + """Long-polling endpoint to acquire a browser from the pool. + + Returns immediately + when a browser is available, or returns 204 No Content when the poll times out. + The client should retry the request to continue waiting for a browser. The + acquired browser will use the pool's timeout_seconds for its idle timeout. + + Args: + acquire_timeout_seconds: Maximum number of seconds to wait for a browser to be available. Defaults to the + calculated time it would take to fill the pool at the currently configured fill + rate. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id_or_name: + raise ValueError(f"Expected a non-empty value for `id_or_name` but received {id_or_name!r}") + return self._post( + f"/browser_pools/{id_or_name}/acquire", + body=maybe_transform( + {"acquire_timeout_seconds": acquire_timeout_seconds}, + browser_pool_acquire_params.BrowserPoolAcquireParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=BrowserPoolAcquireResponse, + ) + + def flush( + self, + id_or_name: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Destroys all idle browsers in the pool; leased browsers are not affected. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id_or_name: + raise ValueError(f"Expected a non-empty value for `id_or_name` but received {id_or_name!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return self._post( + f"/browser_pools/{id_or_name}/flush", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + def release( + self, + id_or_name: str, + *, + session_id: str, + reuse: bool | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Release a browser back to the pool, optionally recreating the browser instance. + + Args: + session_id: Browser session ID to release back to the pool + + reuse: Whether to reuse the browser instance or destroy it and create a new one. + Defaults to true. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id_or_name: + raise ValueError(f"Expected a non-empty value for `id_or_name` but received {id_or_name!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return self._post( + f"/browser_pools/{id_or_name}/release", + body=maybe_transform( + { + "session_id": session_id, + "reuse": reuse, + }, + browser_pool_release_params.BrowserPoolReleaseParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + +class AsyncBrowserPoolsResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncBrowserPoolsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return AsyncBrowserPoolsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncBrowserPoolsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return AsyncBrowserPoolsResourceWithStreamingResponse(self) + + async def create( + self, + *, + size: int, + extensions: Iterable[BrowserExtension] | Omit = omit, + fill_rate_per_minute: int | Omit = omit, + headless: bool | Omit = omit, + kiosk_mode: bool | Omit = omit, + name: str | Omit = omit, + profile: BrowserProfile | Omit = omit, + proxy_id: str | Omit = omit, + stealth: bool | Omit = omit, + timeout_seconds: int | Omit = omit, + viewport: BrowserViewport | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> BrowserPool: + """ + Create a new browser pool with the specified configuration and size. + + Args: + size: Number of browsers to create in the pool + + extensions: List of browser extensions to load into the session. Provide each by id or name. + + fill_rate_per_minute: Percentage of the pool to fill per minute. Defaults to 10%. + + headless: If true, launches the browser using a headless image. Defaults to false. + + kiosk_mode: If true, launches the browser in kiosk mode to hide address bar and tabs in live + view. + + name: Optional name for the browser pool. Must be unique within the organization. + + profile: Profile selection for the browser session. Provide either id or name. If + specified, the matching profile will be loaded into the browser session. + Profiles must be created beforehand. + + proxy_id: Optional proxy to associate to the browser session. Must reference a proxy + belonging to the caller's org. + + stealth: If true, launches the browser in stealth mode to reduce detection by anti-bot + mechanisms. + + timeout_seconds: Default idle timeout in seconds for browsers acquired from this pool before they + are destroyed. Defaults to 600 seconds if not specified + + viewport: Initial browser window size in pixels with optional refresh rate. If omitted, + image defaults apply (1920x1080@25). Only specific viewport configurations are + supported. The server will reject unsupported combinations. Supported + resolutions are: 2560x1440@10, 1920x1080@25, 1920x1200@25, 1440x900@25, + 1024x768@60, 1200x800@60 If refresh_rate is not provided, it will be + automatically determined from the width and height if they match a supported + configuration exactly. Note: Higher resolutions may affect the responsiveness of + live view browser + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._post( + "/browser_pools", + body=await async_maybe_transform( + { + "size": size, + "extensions": extensions, + "fill_rate_per_minute": fill_rate_per_minute, + "headless": headless, + "kiosk_mode": kiosk_mode, + "name": name, + "profile": profile, + "proxy_id": proxy_id, + "stealth": stealth, + "timeout_seconds": timeout_seconds, + "viewport": viewport, + }, + browser_pool_create_params.BrowserPoolCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=BrowserPool, + ) + + async def retrieve( + self, + id_or_name: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> BrowserPool: + """ + Retrieve details for a single browser pool by its ID or name. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id_or_name: + raise ValueError(f"Expected a non-empty value for `id_or_name` but received {id_or_name!r}") + return await self._get( + f"/browser_pools/{id_or_name}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=BrowserPool, + ) + + async def update( + self, + id_or_name: str, + *, + size: int, + discard_all_idle: bool | Omit = omit, + extensions: Iterable[BrowserExtension] | Omit = omit, + fill_rate_per_minute: int | Omit = omit, + headless: bool | Omit = omit, + kiosk_mode: bool | Omit = omit, + name: str | Omit = omit, + profile: BrowserProfile | Omit = omit, + proxy_id: str | Omit = omit, + stealth: bool | Omit = omit, + timeout_seconds: int | Omit = omit, + viewport: BrowserViewport | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> BrowserPool: + """ + Updates the configuration used to create browsers in the pool. + + Args: + size: Number of browsers to create in the pool + + discard_all_idle: Whether to discard all idle browsers and rebuild the pool immediately. Defaults + to false. + + extensions: List of browser extensions to load into the session. Provide each by id or name. + + fill_rate_per_minute: Percentage of the pool to fill per minute. Defaults to 10%. + + headless: If true, launches the browser using a headless image. Defaults to false. + + kiosk_mode: If true, launches the browser in kiosk mode to hide address bar and tabs in live + view. + + name: Optional name for the browser pool. Must be unique within the organization. + + profile: Profile selection for the browser session. Provide either id or name. If + specified, the matching profile will be loaded into the browser session. + Profiles must be created beforehand. + + proxy_id: Optional proxy to associate to the browser session. Must reference a proxy + belonging to the caller's org. + + stealth: If true, launches the browser in stealth mode to reduce detection by anti-bot + mechanisms. + + timeout_seconds: Default idle timeout in seconds for browsers acquired from this pool before they + are destroyed. Defaults to 600 seconds if not specified + + viewport: Initial browser window size in pixels with optional refresh rate. If omitted, + image defaults apply (1920x1080@25). Only specific viewport configurations are + supported. The server will reject unsupported combinations. Supported + resolutions are: 2560x1440@10, 1920x1080@25, 1920x1200@25, 1440x900@25, + 1024x768@60, 1200x800@60 If refresh_rate is not provided, it will be + automatically determined from the width and height if they match a supported + configuration exactly. Note: Higher resolutions may affect the responsiveness of + live view browser + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id_or_name: + raise ValueError(f"Expected a non-empty value for `id_or_name` but received {id_or_name!r}") + return await self._patch( + f"/browser_pools/{id_or_name}", + body=await async_maybe_transform( + { + "size": size, + "discard_all_idle": discard_all_idle, + "extensions": extensions, + "fill_rate_per_minute": fill_rate_per_minute, + "headless": headless, + "kiosk_mode": kiosk_mode, + "name": name, + "profile": profile, + "proxy_id": proxy_id, + "stealth": stealth, + "timeout_seconds": timeout_seconds, + "viewport": viewport, + }, + browser_pool_update_params.BrowserPoolUpdateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=BrowserPool, + ) + + async def list( + self, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> BrowserPoolListResponse: + """List browser pools owned by the caller's organization.""" + return await self._get( + "/browser_pools", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=BrowserPoolListResponse, + ) + + async def delete( + self, + id_or_name: str, + *, + force: bool | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """Delete a browser pool and all browsers in it. + + By default, deletion is blocked if + browsers are currently leased. Use force=true to terminate leased browsers. + + Args: + force: If true, force delete even if browsers are currently leased. Leased browsers + will be terminated. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id_or_name: + raise ValueError(f"Expected a non-empty value for `id_or_name` but received {id_or_name!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return await self._delete( + f"/browser_pools/{id_or_name}", + body=await async_maybe_transform({"force": force}, browser_pool_delete_params.BrowserPoolDeleteParams), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + async def acquire( + self, + id_or_name: str, + *, + acquire_timeout_seconds: int | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> BrowserPoolAcquireResponse: + """Long-polling endpoint to acquire a browser from the pool. + + Returns immediately + when a browser is available, or returns 204 No Content when the poll times out. + The client should retry the request to continue waiting for a browser. The + acquired browser will use the pool's timeout_seconds for its idle timeout. + + Args: + acquire_timeout_seconds: Maximum number of seconds to wait for a browser to be available. Defaults to the + calculated time it would take to fill the pool at the currently configured fill + rate. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id_or_name: + raise ValueError(f"Expected a non-empty value for `id_or_name` but received {id_or_name!r}") + return await self._post( + f"/browser_pools/{id_or_name}/acquire", + body=await async_maybe_transform( + {"acquire_timeout_seconds": acquire_timeout_seconds}, + browser_pool_acquire_params.BrowserPoolAcquireParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=BrowserPoolAcquireResponse, + ) + + async def flush( + self, + id_or_name: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Destroys all idle browsers in the pool; leased browsers are not affected. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id_or_name: + raise ValueError(f"Expected a non-empty value for `id_or_name` but received {id_or_name!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return await self._post( + f"/browser_pools/{id_or_name}/flush", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + async def release( + self, + id_or_name: str, + *, + session_id: str, + reuse: bool | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Release a browser back to the pool, optionally recreating the browser instance. + + Args: + session_id: Browser session ID to release back to the pool + + reuse: Whether to reuse the browser instance or destroy it and create a new one. + Defaults to true. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id_or_name: + raise ValueError(f"Expected a non-empty value for `id_or_name` but received {id_or_name!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return await self._post( + f"/browser_pools/{id_or_name}/release", + body=await async_maybe_transform( + { + "session_id": session_id, + "reuse": reuse, + }, + browser_pool_release_params.BrowserPoolReleaseParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + +class BrowserPoolsResourceWithRawResponse: + def __init__(self, browser_pools: BrowserPoolsResource) -> None: + self._browser_pools = browser_pools + + self.create = to_raw_response_wrapper( + browser_pools.create, + ) + self.retrieve = to_raw_response_wrapper( + browser_pools.retrieve, + ) + self.update = to_raw_response_wrapper( + browser_pools.update, + ) + self.list = to_raw_response_wrapper( + browser_pools.list, + ) + self.delete = to_raw_response_wrapper( + browser_pools.delete, + ) + self.acquire = to_raw_response_wrapper( + browser_pools.acquire, + ) + self.flush = to_raw_response_wrapper( + browser_pools.flush, + ) + self.release = to_raw_response_wrapper( + browser_pools.release, + ) + + +class AsyncBrowserPoolsResourceWithRawResponse: + def __init__(self, browser_pools: AsyncBrowserPoolsResource) -> None: + self._browser_pools = browser_pools + + self.create = async_to_raw_response_wrapper( + browser_pools.create, + ) + self.retrieve = async_to_raw_response_wrapper( + browser_pools.retrieve, + ) + self.update = async_to_raw_response_wrapper( + browser_pools.update, + ) + self.list = async_to_raw_response_wrapper( + browser_pools.list, + ) + self.delete = async_to_raw_response_wrapper( + browser_pools.delete, + ) + self.acquire = async_to_raw_response_wrapper( + browser_pools.acquire, + ) + self.flush = async_to_raw_response_wrapper( + browser_pools.flush, + ) + self.release = async_to_raw_response_wrapper( + browser_pools.release, + ) + + +class BrowserPoolsResourceWithStreamingResponse: + def __init__(self, browser_pools: BrowserPoolsResource) -> None: + self._browser_pools = browser_pools + + self.create = to_streamed_response_wrapper( + browser_pools.create, + ) + self.retrieve = to_streamed_response_wrapper( + browser_pools.retrieve, + ) + self.update = to_streamed_response_wrapper( + browser_pools.update, + ) + self.list = to_streamed_response_wrapper( + browser_pools.list, + ) + self.delete = to_streamed_response_wrapper( + browser_pools.delete, + ) + self.acquire = to_streamed_response_wrapper( + browser_pools.acquire, + ) + self.flush = to_streamed_response_wrapper( + browser_pools.flush, + ) + self.release = to_streamed_response_wrapper( + browser_pools.release, + ) + + +class AsyncBrowserPoolsResourceWithStreamingResponse: + def __init__(self, browser_pools: AsyncBrowserPoolsResource) -> None: + self._browser_pools = browser_pools + + self.create = async_to_streamed_response_wrapper( + browser_pools.create, + ) + self.retrieve = async_to_streamed_response_wrapper( + browser_pools.retrieve, + ) + self.update = async_to_streamed_response_wrapper( + browser_pools.update, + ) + self.list = async_to_streamed_response_wrapper( + browser_pools.list, + ) + self.delete = async_to_streamed_response_wrapper( + browser_pools.delete, + ) + self.acquire = async_to_streamed_response_wrapper( + browser_pools.acquire, + ) + self.flush = async_to_streamed_response_wrapper( + browser_pools.flush, + ) + self.release = async_to_streamed_response_wrapper( + browser_pools.release, + ) diff --git a/src/kernel/resources/browsers/__init__.py b/src/kernel/resources/browsers/__init__.py new file mode 100644 index 0000000..a1acee2 --- /dev/null +++ b/src/kernel/resources/browsers/__init__.py @@ -0,0 +1,103 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .fs import ( + FsResource, + AsyncFsResource, + FsResourceWithRawResponse, + AsyncFsResourceWithRawResponse, + FsResourceWithStreamingResponse, + AsyncFsResourceWithStreamingResponse, +) +from .logs import ( + LogsResource, + AsyncLogsResource, + LogsResourceWithRawResponse, + AsyncLogsResourceWithRawResponse, + LogsResourceWithStreamingResponse, + AsyncLogsResourceWithStreamingResponse, +) +from .process import ( + ProcessResource, + AsyncProcessResource, + ProcessResourceWithRawResponse, + AsyncProcessResourceWithRawResponse, + ProcessResourceWithStreamingResponse, + AsyncProcessResourceWithStreamingResponse, +) +from .replays import ( + ReplaysResource, + AsyncReplaysResource, + ReplaysResourceWithRawResponse, + AsyncReplaysResourceWithRawResponse, + ReplaysResourceWithStreamingResponse, + AsyncReplaysResourceWithStreamingResponse, +) +from .browsers import ( + BrowsersResource, + AsyncBrowsersResource, + BrowsersResourceWithRawResponse, + AsyncBrowsersResourceWithRawResponse, + BrowsersResourceWithStreamingResponse, + AsyncBrowsersResourceWithStreamingResponse, +) +from .computer import ( + ComputerResource, + AsyncComputerResource, + ComputerResourceWithRawResponse, + AsyncComputerResourceWithRawResponse, + ComputerResourceWithStreamingResponse, + AsyncComputerResourceWithStreamingResponse, +) +from .playwright import ( + PlaywrightResource, + AsyncPlaywrightResource, + PlaywrightResourceWithRawResponse, + AsyncPlaywrightResourceWithRawResponse, + PlaywrightResourceWithStreamingResponse, + AsyncPlaywrightResourceWithStreamingResponse, +) + +__all__ = [ + "ReplaysResource", + "AsyncReplaysResource", + "ReplaysResourceWithRawResponse", + "AsyncReplaysResourceWithRawResponse", + "ReplaysResourceWithStreamingResponse", + "AsyncReplaysResourceWithStreamingResponse", + "FsResource", + "AsyncFsResource", + "FsResourceWithRawResponse", + "AsyncFsResourceWithRawResponse", + "FsResourceWithStreamingResponse", + "AsyncFsResourceWithStreamingResponse", + "ProcessResource", + "AsyncProcessResource", + "ProcessResourceWithRawResponse", + "AsyncProcessResourceWithRawResponse", + "ProcessResourceWithStreamingResponse", + "AsyncProcessResourceWithStreamingResponse", + "LogsResource", + "AsyncLogsResource", + "LogsResourceWithRawResponse", + "AsyncLogsResourceWithRawResponse", + "LogsResourceWithStreamingResponse", + "AsyncLogsResourceWithStreamingResponse", + "ComputerResource", + "AsyncComputerResource", + "ComputerResourceWithRawResponse", + "AsyncComputerResourceWithRawResponse", + "ComputerResourceWithStreamingResponse", + "AsyncComputerResourceWithStreamingResponse", + "PlaywrightResource", + "AsyncPlaywrightResource", + "PlaywrightResourceWithRawResponse", + "AsyncPlaywrightResourceWithRawResponse", + "PlaywrightResourceWithStreamingResponse", + "AsyncPlaywrightResourceWithStreamingResponse", + "BrowsersResource", + "AsyncBrowsersResource", + "BrowsersResourceWithRawResponse", + "AsyncBrowsersResourceWithRawResponse", + "BrowsersResourceWithStreamingResponse", + "AsyncBrowsersResourceWithStreamingResponse", +] diff --git a/src/kernel/resources/browsers/browsers.py b/src/kernel/resources/browsers/browsers.py new file mode 100644 index 0000000..8050a7d --- /dev/null +++ b/src/kernel/resources/browsers/browsers.py @@ -0,0 +1,976 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import typing_extensions +from typing import Mapping, Iterable, cast + +import httpx + +from .logs import ( + LogsResource, + AsyncLogsResource, + LogsResourceWithRawResponse, + AsyncLogsResourceWithRawResponse, + LogsResourceWithStreamingResponse, + AsyncLogsResourceWithStreamingResponse, +) +from .fs.fs import ( + FsResource, + AsyncFsResource, + FsResourceWithRawResponse, + AsyncFsResourceWithRawResponse, + FsResourceWithStreamingResponse, + AsyncFsResourceWithStreamingResponse, +) +from ...types import ( + browser_list_params, + browser_create_params, + browser_delete_params, + browser_load_extensions_params, +) +from .process import ( + ProcessResource, + AsyncProcessResource, + ProcessResourceWithRawResponse, + AsyncProcessResourceWithRawResponse, + ProcessResourceWithStreamingResponse, + AsyncProcessResourceWithStreamingResponse, +) +from .replays import ( + ReplaysResource, + AsyncReplaysResource, + ReplaysResourceWithRawResponse, + AsyncReplaysResourceWithRawResponse, + ReplaysResourceWithStreamingResponse, + AsyncReplaysResourceWithStreamingResponse, +) +from ..._types import Body, Omit, Query, Headers, NoneType, NotGiven, omit, not_given +from ..._utils import extract_files, maybe_transform, deepcopy_minimal, async_maybe_transform +from .computer import ( + ComputerResource, + AsyncComputerResource, + ComputerResourceWithRawResponse, + AsyncComputerResourceWithRawResponse, + ComputerResourceWithStreamingResponse, + AsyncComputerResourceWithStreamingResponse, +) +from ..._compat import cached_property +from .playwright import ( + PlaywrightResource, + AsyncPlaywrightResource, + PlaywrightResourceWithRawResponse, + AsyncPlaywrightResourceWithRawResponse, + PlaywrightResourceWithStreamingResponse, + AsyncPlaywrightResourceWithStreamingResponse, +) +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ...pagination import SyncOffsetPagination, AsyncOffsetPagination +from ..._base_client import AsyncPaginator, make_request_options +from ...types.browser_list_response import BrowserListResponse +from ...types.browser_create_response import BrowserCreateResponse +from ...types.browser_persistence_param import BrowserPersistenceParam +from ...types.browser_retrieve_response import BrowserRetrieveResponse +from ...types.shared_params.browser_profile import BrowserProfile +from ...types.shared_params.browser_viewport import BrowserViewport +from ...types.shared_params.browser_extension import BrowserExtension + +__all__ = ["BrowsersResource", "AsyncBrowsersResource"] + + +class BrowsersResource(SyncAPIResource): + @cached_property + def replays(self) -> ReplaysResource: + return ReplaysResource(self._client) + + @cached_property + def fs(self) -> FsResource: + return FsResource(self._client) + + @cached_property + def process(self) -> ProcessResource: + return ProcessResource(self._client) + + @cached_property + def logs(self) -> LogsResource: + return LogsResource(self._client) + + @cached_property + def computer(self) -> ComputerResource: + return ComputerResource(self._client) + + @cached_property + def playwright(self) -> PlaywrightResource: + return PlaywrightResource(self._client) + + @cached_property + def with_raw_response(self) -> BrowsersResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return BrowsersResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> BrowsersResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return BrowsersResourceWithStreamingResponse(self) + + def create( + self, + *, + extensions: Iterable[BrowserExtension] | Omit = omit, + headless: bool | Omit = omit, + invocation_id: str | Omit = omit, + kiosk_mode: bool | Omit = omit, + persistence: BrowserPersistenceParam | Omit = omit, + profile: BrowserProfile | Omit = omit, + proxy_id: str | Omit = omit, + stealth: bool | Omit = omit, + timeout_seconds: int | Omit = omit, + viewport: BrowserViewport | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> BrowserCreateResponse: + """ + Create a new browser session from within an action. + + Args: + extensions: List of browser extensions to load into the session. Provide each by id or name. + + headless: If true, launches the browser using a headless image (no VNC/GUI). Defaults to + false. + + invocation_id: action invocation ID + + kiosk_mode: If true, launches the browser in kiosk mode to hide address bar and tabs in live + view. + + persistence: DEPRECATED: Use timeout_seconds (up to 72 hours) and Profiles instead. + + profile: Profile selection for the browser session. Provide either id or name. If + specified, the matching profile will be loaded into the browser session. + Profiles must be created beforehand. + + proxy_id: Optional proxy to associate to the browser session. Must reference a proxy + belonging to the caller's org. + + stealth: If true, launches the browser in stealth mode to reduce detection by anti-bot + mechanisms. + + timeout_seconds: The number of seconds of inactivity before the browser session is terminated. + Activity includes CDP connections and live view connections. Defaults to 60 + seconds. Minimum allowed is 10 seconds. Maximum allowed is 259200 (72 hours). We + check for inactivity every 5 seconds, so the actual timeout behavior you will + see is +/- 5 seconds around the specified value. + + viewport: Initial browser window size in pixels with optional refresh rate. If omitted, + image defaults apply (1920x1080@25). Only specific viewport configurations are + supported. The server will reject unsupported combinations. Supported + resolutions are: 2560x1440@10, 1920x1080@25, 1920x1200@25, 1440x900@25, + 1024x768@60, 1200x800@60 If refresh_rate is not provided, it will be + automatically determined from the width and height if they match a supported + configuration exactly. Note: Higher resolutions may affect the responsiveness of + live view browser + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._post( + "/browsers", + body=maybe_transform( + { + "extensions": extensions, + "headless": headless, + "invocation_id": invocation_id, + "kiosk_mode": kiosk_mode, + "persistence": persistence, + "profile": profile, + "proxy_id": proxy_id, + "stealth": stealth, + "timeout_seconds": timeout_seconds, + "viewport": viewport, + }, + browser_create_params.BrowserCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=BrowserCreateResponse, + ) + + def retrieve( + self, + id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> BrowserRetrieveResponse: + """ + Get information about a browser session. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + return self._get( + f"/browsers/{id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=BrowserRetrieveResponse, + ) + + def list( + self, + *, + include_deleted: bool | Omit = omit, + limit: int | Omit = omit, + offset: int | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> SyncOffsetPagination[BrowserListResponse]: + """List all browser sessions with pagination support. + + Use include_deleted=true to + include soft-deleted sessions in the results. + + Args: + include_deleted: When true, includes soft-deleted browser sessions in the results alongside + active sessions. + + limit: Maximum number of results to return. Defaults to 20, maximum 100. + + offset: Number of results to skip. Defaults to 0. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get_api_list( + "/browsers", + page=SyncOffsetPagination[BrowserListResponse], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "include_deleted": include_deleted, + "limit": limit, + "offset": offset, + }, + browser_list_params.BrowserListParams, + ), + ), + model=BrowserListResponse, + ) + + @typing_extensions.deprecated("deprecated") + def delete( + self, + *, + persistent_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """DEPRECATED: Use DELETE /browsers/{id} instead. + + Delete a persistent browser + session by its persistent_id. + + Args: + persistent_id: Persistent browser identifier + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return self._delete( + "/browsers", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform({"persistent_id": persistent_id}, browser_delete_params.BrowserDeleteParams), + ), + cast_to=NoneType, + ) + + def delete_by_id( + self, + id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Delete a browser session by ID + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return self._delete( + f"/browsers/{id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + def load_extensions( + self, + id: str, + *, + extensions: Iterable[browser_load_extensions_params.Extension], + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Loads one or more unpacked extensions and restarts Chromium on the browser + instance. + + Args: + extensions: List of extensions to upload and activate + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + body = deepcopy_minimal({"extensions": extensions}) + files = extract_files(cast(Mapping[str, object], body), paths=[["extensions", "", "zip_file"]]) + # It should be noted that the actual Content-Type header that will be + # sent to the server will contain a `boundary` parameter, e.g. + # multipart/form-data; boundary=---abc-- + extra_headers["Content-Type"] = "multipart/form-data" + return self._post( + f"/browsers/{id}/extensions", + body=maybe_transform(body, browser_load_extensions_params.BrowserLoadExtensionsParams), + files=files, + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + +class AsyncBrowsersResource(AsyncAPIResource): + @cached_property + def replays(self) -> AsyncReplaysResource: + return AsyncReplaysResource(self._client) + + @cached_property + def fs(self) -> AsyncFsResource: + return AsyncFsResource(self._client) + + @cached_property + def process(self) -> AsyncProcessResource: + return AsyncProcessResource(self._client) + + @cached_property + def logs(self) -> AsyncLogsResource: + return AsyncLogsResource(self._client) + + @cached_property + def computer(self) -> AsyncComputerResource: + return AsyncComputerResource(self._client) + + @cached_property + def playwright(self) -> AsyncPlaywrightResource: + return AsyncPlaywrightResource(self._client) + + @cached_property + def with_raw_response(self) -> AsyncBrowsersResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return AsyncBrowsersResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncBrowsersResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return AsyncBrowsersResourceWithStreamingResponse(self) + + async def create( + self, + *, + extensions: Iterable[BrowserExtension] | Omit = omit, + headless: bool | Omit = omit, + invocation_id: str | Omit = omit, + kiosk_mode: bool | Omit = omit, + persistence: BrowserPersistenceParam | Omit = omit, + profile: BrowserProfile | Omit = omit, + proxy_id: str | Omit = omit, + stealth: bool | Omit = omit, + timeout_seconds: int | Omit = omit, + viewport: BrowserViewport | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> BrowserCreateResponse: + """ + Create a new browser session from within an action. + + Args: + extensions: List of browser extensions to load into the session. Provide each by id or name. + + headless: If true, launches the browser using a headless image (no VNC/GUI). Defaults to + false. + + invocation_id: action invocation ID + + kiosk_mode: If true, launches the browser in kiosk mode to hide address bar and tabs in live + view. + + persistence: DEPRECATED: Use timeout_seconds (up to 72 hours) and Profiles instead. + + profile: Profile selection for the browser session. Provide either id or name. If + specified, the matching profile will be loaded into the browser session. + Profiles must be created beforehand. + + proxy_id: Optional proxy to associate to the browser session. Must reference a proxy + belonging to the caller's org. + + stealth: If true, launches the browser in stealth mode to reduce detection by anti-bot + mechanisms. + + timeout_seconds: The number of seconds of inactivity before the browser session is terminated. + Activity includes CDP connections and live view connections. Defaults to 60 + seconds. Minimum allowed is 10 seconds. Maximum allowed is 259200 (72 hours). We + check for inactivity every 5 seconds, so the actual timeout behavior you will + see is +/- 5 seconds around the specified value. + + viewport: Initial browser window size in pixels with optional refresh rate. If omitted, + image defaults apply (1920x1080@25). Only specific viewport configurations are + supported. The server will reject unsupported combinations. Supported + resolutions are: 2560x1440@10, 1920x1080@25, 1920x1200@25, 1440x900@25, + 1024x768@60, 1200x800@60 If refresh_rate is not provided, it will be + automatically determined from the width and height if they match a supported + configuration exactly. Note: Higher resolutions may affect the responsiveness of + live view browser + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._post( + "/browsers", + body=await async_maybe_transform( + { + "extensions": extensions, + "headless": headless, + "invocation_id": invocation_id, + "kiosk_mode": kiosk_mode, + "persistence": persistence, + "profile": profile, + "proxy_id": proxy_id, + "stealth": stealth, + "timeout_seconds": timeout_seconds, + "viewport": viewport, + }, + browser_create_params.BrowserCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=BrowserCreateResponse, + ) + + async def retrieve( + self, + id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> BrowserRetrieveResponse: + """ + Get information about a browser session. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + return await self._get( + f"/browsers/{id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=BrowserRetrieveResponse, + ) + + def list( + self, + *, + include_deleted: bool | Omit = omit, + limit: int | Omit = omit, + offset: int | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AsyncPaginator[BrowserListResponse, AsyncOffsetPagination[BrowserListResponse]]: + """List all browser sessions with pagination support. + + Use include_deleted=true to + include soft-deleted sessions in the results. + + Args: + include_deleted: When true, includes soft-deleted browser sessions in the results alongside + active sessions. + + limit: Maximum number of results to return. Defaults to 20, maximum 100. + + offset: Number of results to skip. Defaults to 0. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get_api_list( + "/browsers", + page=AsyncOffsetPagination[BrowserListResponse], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "include_deleted": include_deleted, + "limit": limit, + "offset": offset, + }, + browser_list_params.BrowserListParams, + ), + ), + model=BrowserListResponse, + ) + + @typing_extensions.deprecated("deprecated") + async def delete( + self, + *, + persistent_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """DEPRECATED: Use DELETE /browsers/{id} instead. + + Delete a persistent browser + session by its persistent_id. + + Args: + persistent_id: Persistent browser identifier + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return await self._delete( + "/browsers", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform( + {"persistent_id": persistent_id}, browser_delete_params.BrowserDeleteParams + ), + ), + cast_to=NoneType, + ) + + async def delete_by_id( + self, + id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Delete a browser session by ID + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return await self._delete( + f"/browsers/{id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + async def load_extensions( + self, + id: str, + *, + extensions: Iterable[browser_load_extensions_params.Extension], + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Loads one or more unpacked extensions and restarts Chromium on the browser + instance. + + Args: + extensions: List of extensions to upload and activate + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + body = deepcopy_minimal({"extensions": extensions}) + files = extract_files(cast(Mapping[str, object], body), paths=[["extensions", "", "zip_file"]]) + # It should be noted that the actual Content-Type header that will be + # sent to the server will contain a `boundary` parameter, e.g. + # multipart/form-data; boundary=---abc-- + extra_headers["Content-Type"] = "multipart/form-data" + return await self._post( + f"/browsers/{id}/extensions", + body=await async_maybe_transform(body, browser_load_extensions_params.BrowserLoadExtensionsParams), + files=files, + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + +class BrowsersResourceWithRawResponse: + def __init__(self, browsers: BrowsersResource) -> None: + self._browsers = browsers + + self.create = to_raw_response_wrapper( + browsers.create, + ) + self.retrieve = to_raw_response_wrapper( + browsers.retrieve, + ) + self.list = to_raw_response_wrapper( + browsers.list, + ) + self.delete = ( # pyright: ignore[reportDeprecated] + to_raw_response_wrapper( + browsers.delete, # pyright: ignore[reportDeprecated], + ) + ) + self.delete_by_id = to_raw_response_wrapper( + browsers.delete_by_id, + ) + self.load_extensions = to_raw_response_wrapper( + browsers.load_extensions, + ) + + @cached_property + def replays(self) -> ReplaysResourceWithRawResponse: + return ReplaysResourceWithRawResponse(self._browsers.replays) + + @cached_property + def fs(self) -> FsResourceWithRawResponse: + return FsResourceWithRawResponse(self._browsers.fs) + + @cached_property + def process(self) -> ProcessResourceWithRawResponse: + return ProcessResourceWithRawResponse(self._browsers.process) + + @cached_property + def logs(self) -> LogsResourceWithRawResponse: + return LogsResourceWithRawResponse(self._browsers.logs) + + @cached_property + def computer(self) -> ComputerResourceWithRawResponse: + return ComputerResourceWithRawResponse(self._browsers.computer) + + @cached_property + def playwright(self) -> PlaywrightResourceWithRawResponse: + return PlaywrightResourceWithRawResponse(self._browsers.playwright) + + +class AsyncBrowsersResourceWithRawResponse: + def __init__(self, browsers: AsyncBrowsersResource) -> None: + self._browsers = browsers + + self.create = async_to_raw_response_wrapper( + browsers.create, + ) + self.retrieve = async_to_raw_response_wrapper( + browsers.retrieve, + ) + self.list = async_to_raw_response_wrapper( + browsers.list, + ) + self.delete = ( # pyright: ignore[reportDeprecated] + async_to_raw_response_wrapper( + browsers.delete, # pyright: ignore[reportDeprecated], + ) + ) + self.delete_by_id = async_to_raw_response_wrapper( + browsers.delete_by_id, + ) + self.load_extensions = async_to_raw_response_wrapper( + browsers.load_extensions, + ) + + @cached_property + def replays(self) -> AsyncReplaysResourceWithRawResponse: + return AsyncReplaysResourceWithRawResponse(self._browsers.replays) + + @cached_property + def fs(self) -> AsyncFsResourceWithRawResponse: + return AsyncFsResourceWithRawResponse(self._browsers.fs) + + @cached_property + def process(self) -> AsyncProcessResourceWithRawResponse: + return AsyncProcessResourceWithRawResponse(self._browsers.process) + + @cached_property + def logs(self) -> AsyncLogsResourceWithRawResponse: + return AsyncLogsResourceWithRawResponse(self._browsers.logs) + + @cached_property + def computer(self) -> AsyncComputerResourceWithRawResponse: + return AsyncComputerResourceWithRawResponse(self._browsers.computer) + + @cached_property + def playwright(self) -> AsyncPlaywrightResourceWithRawResponse: + return AsyncPlaywrightResourceWithRawResponse(self._browsers.playwright) + + +class BrowsersResourceWithStreamingResponse: + def __init__(self, browsers: BrowsersResource) -> None: + self._browsers = browsers + + self.create = to_streamed_response_wrapper( + browsers.create, + ) + self.retrieve = to_streamed_response_wrapper( + browsers.retrieve, + ) + self.list = to_streamed_response_wrapper( + browsers.list, + ) + self.delete = ( # pyright: ignore[reportDeprecated] + to_streamed_response_wrapper( + browsers.delete, # pyright: ignore[reportDeprecated], + ) + ) + self.delete_by_id = to_streamed_response_wrapper( + browsers.delete_by_id, + ) + self.load_extensions = to_streamed_response_wrapper( + browsers.load_extensions, + ) + + @cached_property + def replays(self) -> ReplaysResourceWithStreamingResponse: + return ReplaysResourceWithStreamingResponse(self._browsers.replays) + + @cached_property + def fs(self) -> FsResourceWithStreamingResponse: + return FsResourceWithStreamingResponse(self._browsers.fs) + + @cached_property + def process(self) -> ProcessResourceWithStreamingResponse: + return ProcessResourceWithStreamingResponse(self._browsers.process) + + @cached_property + def logs(self) -> LogsResourceWithStreamingResponse: + return LogsResourceWithStreamingResponse(self._browsers.logs) + + @cached_property + def computer(self) -> ComputerResourceWithStreamingResponse: + return ComputerResourceWithStreamingResponse(self._browsers.computer) + + @cached_property + def playwright(self) -> PlaywrightResourceWithStreamingResponse: + return PlaywrightResourceWithStreamingResponse(self._browsers.playwright) + + +class AsyncBrowsersResourceWithStreamingResponse: + def __init__(self, browsers: AsyncBrowsersResource) -> None: + self._browsers = browsers + + self.create = async_to_streamed_response_wrapper( + browsers.create, + ) + self.retrieve = async_to_streamed_response_wrapper( + browsers.retrieve, + ) + self.list = async_to_streamed_response_wrapper( + browsers.list, + ) + self.delete = ( # pyright: ignore[reportDeprecated] + async_to_streamed_response_wrapper( + browsers.delete, # pyright: ignore[reportDeprecated], + ) + ) + self.delete_by_id = async_to_streamed_response_wrapper( + browsers.delete_by_id, + ) + self.load_extensions = async_to_streamed_response_wrapper( + browsers.load_extensions, + ) + + @cached_property + def replays(self) -> AsyncReplaysResourceWithStreamingResponse: + return AsyncReplaysResourceWithStreamingResponse(self._browsers.replays) + + @cached_property + def fs(self) -> AsyncFsResourceWithStreamingResponse: + return AsyncFsResourceWithStreamingResponse(self._browsers.fs) + + @cached_property + def process(self) -> AsyncProcessResourceWithStreamingResponse: + return AsyncProcessResourceWithStreamingResponse(self._browsers.process) + + @cached_property + def logs(self) -> AsyncLogsResourceWithStreamingResponse: + return AsyncLogsResourceWithStreamingResponse(self._browsers.logs) + + @cached_property + def computer(self) -> AsyncComputerResourceWithStreamingResponse: + return AsyncComputerResourceWithStreamingResponse(self._browsers.computer) + + @cached_property + def playwright(self) -> AsyncPlaywrightResourceWithStreamingResponse: + return AsyncPlaywrightResourceWithStreamingResponse(self._browsers.playwright) diff --git a/src/kernel/resources/browsers/computer.py b/src/kernel/resources/browsers/computer.py new file mode 100644 index 0000000..c23dd3d --- /dev/null +++ b/src/kernel/resources/browsers/computer.py @@ -0,0 +1,1041 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Iterable +from typing_extensions import Literal + +import httpx + +from ..._types import Body, Omit, Query, Headers, NoneType, NotGiven, SequenceNotStr, omit, not_given +from ..._utils import maybe_transform, async_maybe_transform +from ..._compat import cached_property +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import ( + BinaryAPIResponse, + AsyncBinaryAPIResponse, + StreamedBinaryAPIResponse, + AsyncStreamedBinaryAPIResponse, + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + to_custom_raw_response_wrapper, + async_to_streamed_response_wrapper, + to_custom_streamed_response_wrapper, + async_to_custom_raw_response_wrapper, + async_to_custom_streamed_response_wrapper, +) +from ..._base_client import make_request_options +from ...types.browsers import ( + computer_scroll_params, + computer_press_key_params, + computer_type_text_params, + computer_drag_mouse_params, + computer_move_mouse_params, + computer_click_mouse_params, + computer_capture_screenshot_params, + computer_set_cursor_visibility_params, +) +from ...types.browsers.computer_set_cursor_visibility_response import ComputerSetCursorVisibilityResponse + +__all__ = ["ComputerResource", "AsyncComputerResource"] + + +class ComputerResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> ComputerResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return ComputerResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> ComputerResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return ComputerResourceWithStreamingResponse(self) + + def capture_screenshot( + self, + id: str, + *, + region: computer_capture_screenshot_params.Region | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> BinaryAPIResponse: + """ + Capture a screenshot of the browser instance + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "image/png", **(extra_headers or {})} + return self._post( + f"/browsers/{id}/computer/screenshot", + body=maybe_transform( + {"region": region}, computer_capture_screenshot_params.ComputerCaptureScreenshotParams + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=BinaryAPIResponse, + ) + + def click_mouse( + self, + id: str, + *, + x: int, + y: int, + button: Literal["left", "right", "middle", "back", "forward"] | Omit = omit, + click_type: Literal["down", "up", "click"] | Omit = omit, + hold_keys: SequenceNotStr[str] | Omit = omit, + num_clicks: int | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Simulate a mouse click action on the browser instance + + Args: + x: X coordinate of the click position + + y: Y coordinate of the click position + + button: Mouse button to interact with + + click_type: Type of click action + + hold_keys: Modifier keys to hold during the click + + num_clicks: Number of times to repeat the click + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return self._post( + f"/browsers/{id}/computer/click_mouse", + body=maybe_transform( + { + "x": x, + "y": y, + "button": button, + "click_type": click_type, + "hold_keys": hold_keys, + "num_clicks": num_clicks, + }, + computer_click_mouse_params.ComputerClickMouseParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + def drag_mouse( + self, + id: str, + *, + path: Iterable[Iterable[int]], + button: Literal["left", "middle", "right"] | Omit = omit, + delay: int | Omit = omit, + hold_keys: SequenceNotStr[str] | Omit = omit, + step_delay_ms: int | Omit = omit, + steps_per_segment: int | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Drag the mouse along a path + + Args: + path: Ordered list of [x, y] coordinate pairs to move through while dragging. Must + contain at least 2 points. + + button: Mouse button to drag with + + delay: Delay in milliseconds between button down and starting to move along the path. + + hold_keys: Modifier keys to hold during the drag + + step_delay_ms: Delay in milliseconds between relative steps while dragging (not the initial + delay). + + steps_per_segment: Number of relative move steps per segment in the path. Minimum 1. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return self._post( + f"/browsers/{id}/computer/drag_mouse", + body=maybe_transform( + { + "path": path, + "button": button, + "delay": delay, + "hold_keys": hold_keys, + "step_delay_ms": step_delay_ms, + "steps_per_segment": steps_per_segment, + }, + computer_drag_mouse_params.ComputerDragMouseParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + def move_mouse( + self, + id: str, + *, + x: int, + y: int, + hold_keys: SequenceNotStr[str] | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Move the mouse cursor to the specified coordinates on the browser instance + + Args: + x: X coordinate to move the cursor to + + y: Y coordinate to move the cursor to + + hold_keys: Modifier keys to hold during the move + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return self._post( + f"/browsers/{id}/computer/move_mouse", + body=maybe_transform( + { + "x": x, + "y": y, + "hold_keys": hold_keys, + }, + computer_move_mouse_params.ComputerMoveMouseParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + def press_key( + self, + id: str, + *, + keys: SequenceNotStr[str], + duration: int | Omit = omit, + hold_keys: SequenceNotStr[str] | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Press one or more keys on the host computer + + Args: + keys: List of key symbols to press. Each item should be a key symbol supported by + xdotool (see X11 keysym definitions). Examples include "Return", "Shift", + "Ctrl", "Alt", "F5". Items in this list could also be combinations, e.g. + "Ctrl+t" or "Ctrl+Shift+Tab". + + duration: Duration to hold the keys down in milliseconds. If omitted or 0, keys are + tapped. + + hold_keys: Optional modifier keys to hold during the key press sequence. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return self._post( + f"/browsers/{id}/computer/press_key", + body=maybe_transform( + { + "keys": keys, + "duration": duration, + "hold_keys": hold_keys, + }, + computer_press_key_params.ComputerPressKeyParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + def scroll( + self, + id: str, + *, + x: int, + y: int, + delta_x: int | Omit = omit, + delta_y: int | Omit = omit, + hold_keys: SequenceNotStr[str] | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Scroll the mouse wheel at a position on the host computer + + Args: + x: X coordinate at which to perform the scroll + + y: Y coordinate at which to perform the scroll + + delta_x: Horizontal scroll amount. Positive scrolls right, negative scrolls left. + + delta_y: Vertical scroll amount. Positive scrolls down, negative scrolls up. + + hold_keys: Modifier keys to hold during the scroll + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return self._post( + f"/browsers/{id}/computer/scroll", + body=maybe_transform( + { + "x": x, + "y": y, + "delta_x": delta_x, + "delta_y": delta_y, + "hold_keys": hold_keys, + }, + computer_scroll_params.ComputerScrollParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + def set_cursor_visibility( + self, + id: str, + *, + hidden: bool, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ComputerSetCursorVisibilityResponse: + """ + Set cursor visibility + + Args: + hidden: Whether the cursor should be hidden or visible + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + return self._post( + f"/browsers/{id}/computer/cursor", + body=maybe_transform( + {"hidden": hidden}, computer_set_cursor_visibility_params.ComputerSetCursorVisibilityParams + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ComputerSetCursorVisibilityResponse, + ) + + def type_text( + self, + id: str, + *, + text: str, + delay: int | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Type text on the browser instance + + Args: + text: Text to type on the browser instance + + delay: Delay in milliseconds between keystrokes + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return self._post( + f"/browsers/{id}/computer/type", + body=maybe_transform( + { + "text": text, + "delay": delay, + }, + computer_type_text_params.ComputerTypeTextParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + +class AsyncComputerResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncComputerResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return AsyncComputerResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncComputerResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return AsyncComputerResourceWithStreamingResponse(self) + + async def capture_screenshot( + self, + id: str, + *, + region: computer_capture_screenshot_params.Region | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AsyncBinaryAPIResponse: + """ + Capture a screenshot of the browser instance + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "image/png", **(extra_headers or {})} + return await self._post( + f"/browsers/{id}/computer/screenshot", + body=await async_maybe_transform( + {"region": region}, computer_capture_screenshot_params.ComputerCaptureScreenshotParams + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=AsyncBinaryAPIResponse, + ) + + async def click_mouse( + self, + id: str, + *, + x: int, + y: int, + button: Literal["left", "right", "middle", "back", "forward"] | Omit = omit, + click_type: Literal["down", "up", "click"] | Omit = omit, + hold_keys: SequenceNotStr[str] | Omit = omit, + num_clicks: int | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Simulate a mouse click action on the browser instance + + Args: + x: X coordinate of the click position + + y: Y coordinate of the click position + + button: Mouse button to interact with + + click_type: Type of click action + + hold_keys: Modifier keys to hold during the click + + num_clicks: Number of times to repeat the click + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return await self._post( + f"/browsers/{id}/computer/click_mouse", + body=await async_maybe_transform( + { + "x": x, + "y": y, + "button": button, + "click_type": click_type, + "hold_keys": hold_keys, + "num_clicks": num_clicks, + }, + computer_click_mouse_params.ComputerClickMouseParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + async def drag_mouse( + self, + id: str, + *, + path: Iterable[Iterable[int]], + button: Literal["left", "middle", "right"] | Omit = omit, + delay: int | Omit = omit, + hold_keys: SequenceNotStr[str] | Omit = omit, + step_delay_ms: int | Omit = omit, + steps_per_segment: int | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Drag the mouse along a path + + Args: + path: Ordered list of [x, y] coordinate pairs to move through while dragging. Must + contain at least 2 points. + + button: Mouse button to drag with + + delay: Delay in milliseconds between button down and starting to move along the path. + + hold_keys: Modifier keys to hold during the drag + + step_delay_ms: Delay in milliseconds between relative steps while dragging (not the initial + delay). + + steps_per_segment: Number of relative move steps per segment in the path. Minimum 1. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return await self._post( + f"/browsers/{id}/computer/drag_mouse", + body=await async_maybe_transform( + { + "path": path, + "button": button, + "delay": delay, + "hold_keys": hold_keys, + "step_delay_ms": step_delay_ms, + "steps_per_segment": steps_per_segment, + }, + computer_drag_mouse_params.ComputerDragMouseParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + async def move_mouse( + self, + id: str, + *, + x: int, + y: int, + hold_keys: SequenceNotStr[str] | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Move the mouse cursor to the specified coordinates on the browser instance + + Args: + x: X coordinate to move the cursor to + + y: Y coordinate to move the cursor to + + hold_keys: Modifier keys to hold during the move + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return await self._post( + f"/browsers/{id}/computer/move_mouse", + body=await async_maybe_transform( + { + "x": x, + "y": y, + "hold_keys": hold_keys, + }, + computer_move_mouse_params.ComputerMoveMouseParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + async def press_key( + self, + id: str, + *, + keys: SequenceNotStr[str], + duration: int | Omit = omit, + hold_keys: SequenceNotStr[str] | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Press one or more keys on the host computer + + Args: + keys: List of key symbols to press. Each item should be a key symbol supported by + xdotool (see X11 keysym definitions). Examples include "Return", "Shift", + "Ctrl", "Alt", "F5". Items in this list could also be combinations, e.g. + "Ctrl+t" or "Ctrl+Shift+Tab". + + duration: Duration to hold the keys down in milliseconds. If omitted or 0, keys are + tapped. + + hold_keys: Optional modifier keys to hold during the key press sequence. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return await self._post( + f"/browsers/{id}/computer/press_key", + body=await async_maybe_transform( + { + "keys": keys, + "duration": duration, + "hold_keys": hold_keys, + }, + computer_press_key_params.ComputerPressKeyParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + async def scroll( + self, + id: str, + *, + x: int, + y: int, + delta_x: int | Omit = omit, + delta_y: int | Omit = omit, + hold_keys: SequenceNotStr[str] | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Scroll the mouse wheel at a position on the host computer + + Args: + x: X coordinate at which to perform the scroll + + y: Y coordinate at which to perform the scroll + + delta_x: Horizontal scroll amount. Positive scrolls right, negative scrolls left. + + delta_y: Vertical scroll amount. Positive scrolls down, negative scrolls up. + + hold_keys: Modifier keys to hold during the scroll + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return await self._post( + f"/browsers/{id}/computer/scroll", + body=await async_maybe_transform( + { + "x": x, + "y": y, + "delta_x": delta_x, + "delta_y": delta_y, + "hold_keys": hold_keys, + }, + computer_scroll_params.ComputerScrollParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + async def set_cursor_visibility( + self, + id: str, + *, + hidden: bool, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ComputerSetCursorVisibilityResponse: + """ + Set cursor visibility + + Args: + hidden: Whether the cursor should be hidden or visible + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + return await self._post( + f"/browsers/{id}/computer/cursor", + body=await async_maybe_transform( + {"hidden": hidden}, computer_set_cursor_visibility_params.ComputerSetCursorVisibilityParams + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ComputerSetCursorVisibilityResponse, + ) + + async def type_text( + self, + id: str, + *, + text: str, + delay: int | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Type text on the browser instance + + Args: + text: Text to type on the browser instance + + delay: Delay in milliseconds between keystrokes + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return await self._post( + f"/browsers/{id}/computer/type", + body=await async_maybe_transform( + { + "text": text, + "delay": delay, + }, + computer_type_text_params.ComputerTypeTextParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + +class ComputerResourceWithRawResponse: + def __init__(self, computer: ComputerResource) -> None: + self._computer = computer + + self.capture_screenshot = to_custom_raw_response_wrapper( + computer.capture_screenshot, + BinaryAPIResponse, + ) + self.click_mouse = to_raw_response_wrapper( + computer.click_mouse, + ) + self.drag_mouse = to_raw_response_wrapper( + computer.drag_mouse, + ) + self.move_mouse = to_raw_response_wrapper( + computer.move_mouse, + ) + self.press_key = to_raw_response_wrapper( + computer.press_key, + ) + self.scroll = to_raw_response_wrapper( + computer.scroll, + ) + self.set_cursor_visibility = to_raw_response_wrapper( + computer.set_cursor_visibility, + ) + self.type_text = to_raw_response_wrapper( + computer.type_text, + ) + + +class AsyncComputerResourceWithRawResponse: + def __init__(self, computer: AsyncComputerResource) -> None: + self._computer = computer + + self.capture_screenshot = async_to_custom_raw_response_wrapper( + computer.capture_screenshot, + AsyncBinaryAPIResponse, + ) + self.click_mouse = async_to_raw_response_wrapper( + computer.click_mouse, + ) + self.drag_mouse = async_to_raw_response_wrapper( + computer.drag_mouse, + ) + self.move_mouse = async_to_raw_response_wrapper( + computer.move_mouse, + ) + self.press_key = async_to_raw_response_wrapper( + computer.press_key, + ) + self.scroll = async_to_raw_response_wrapper( + computer.scroll, + ) + self.set_cursor_visibility = async_to_raw_response_wrapper( + computer.set_cursor_visibility, + ) + self.type_text = async_to_raw_response_wrapper( + computer.type_text, + ) + + +class ComputerResourceWithStreamingResponse: + def __init__(self, computer: ComputerResource) -> None: + self._computer = computer + + self.capture_screenshot = to_custom_streamed_response_wrapper( + computer.capture_screenshot, + StreamedBinaryAPIResponse, + ) + self.click_mouse = to_streamed_response_wrapper( + computer.click_mouse, + ) + self.drag_mouse = to_streamed_response_wrapper( + computer.drag_mouse, + ) + self.move_mouse = to_streamed_response_wrapper( + computer.move_mouse, + ) + self.press_key = to_streamed_response_wrapper( + computer.press_key, + ) + self.scroll = to_streamed_response_wrapper( + computer.scroll, + ) + self.set_cursor_visibility = to_streamed_response_wrapper( + computer.set_cursor_visibility, + ) + self.type_text = to_streamed_response_wrapper( + computer.type_text, + ) + + +class AsyncComputerResourceWithStreamingResponse: + def __init__(self, computer: AsyncComputerResource) -> None: + self._computer = computer + + self.capture_screenshot = async_to_custom_streamed_response_wrapper( + computer.capture_screenshot, + AsyncStreamedBinaryAPIResponse, + ) + self.click_mouse = async_to_streamed_response_wrapper( + computer.click_mouse, + ) + self.drag_mouse = async_to_streamed_response_wrapper( + computer.drag_mouse, + ) + self.move_mouse = async_to_streamed_response_wrapper( + computer.move_mouse, + ) + self.press_key = async_to_streamed_response_wrapper( + computer.press_key, + ) + self.scroll = async_to_streamed_response_wrapper( + computer.scroll, + ) + self.set_cursor_visibility = async_to_streamed_response_wrapper( + computer.set_cursor_visibility, + ) + self.type_text = async_to_streamed_response_wrapper( + computer.type_text, + ) diff --git a/src/kernel/resources/browsers/fs/__init__.py b/src/kernel/resources/browsers/fs/__init__.py new file mode 100644 index 0000000..8195b3f --- /dev/null +++ b/src/kernel/resources/browsers/fs/__init__.py @@ -0,0 +1,33 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .fs import ( + FsResource, + AsyncFsResource, + FsResourceWithRawResponse, + AsyncFsResourceWithRawResponse, + FsResourceWithStreamingResponse, + AsyncFsResourceWithStreamingResponse, +) +from .watch import ( + WatchResource, + AsyncWatchResource, + WatchResourceWithRawResponse, + AsyncWatchResourceWithRawResponse, + WatchResourceWithStreamingResponse, + AsyncWatchResourceWithStreamingResponse, +) + +__all__ = [ + "WatchResource", + "AsyncWatchResource", + "WatchResourceWithRawResponse", + "AsyncWatchResourceWithRawResponse", + "WatchResourceWithStreamingResponse", + "AsyncWatchResourceWithStreamingResponse", + "FsResource", + "AsyncFsResource", + "FsResourceWithRawResponse", + "AsyncFsResourceWithRawResponse", + "FsResourceWithStreamingResponse", + "AsyncFsResourceWithStreamingResponse", +] diff --git a/src/kernel/resources/browsers/fs/fs.py b/src/kernel/resources/browsers/fs/fs.py new file mode 100644 index 0000000..0da0bdd --- /dev/null +++ b/src/kernel/resources/browsers/fs/fs.py @@ -0,0 +1,1364 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Mapping, Iterable, cast + +import httpx + +from .watch import ( + WatchResource, + AsyncWatchResource, + WatchResourceWithRawResponse, + AsyncWatchResourceWithRawResponse, + WatchResourceWithStreamingResponse, + AsyncWatchResourceWithStreamingResponse, +) +from ...._files import read_file_content, async_read_file_content +from ...._types import Body, Omit, Query, Headers, NoneType, NotGiven, FileTypes, FileContent, omit, not_given +from ...._utils import extract_files, maybe_transform, deepcopy_minimal, async_maybe_transform +from ...._compat import cached_property +from ...._resource import SyncAPIResource, AsyncAPIResource +from ...._response import ( + BinaryAPIResponse, + AsyncBinaryAPIResponse, + StreamedBinaryAPIResponse, + AsyncStreamedBinaryAPIResponse, + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + to_custom_raw_response_wrapper, + async_to_streamed_response_wrapper, + to_custom_streamed_response_wrapper, + async_to_custom_raw_response_wrapper, + async_to_custom_streamed_response_wrapper, +) +from ...._base_client import make_request_options +from ....types.browsers import ( + f_move_params, + f_upload_params, + f_file_info_params, + f_read_file_params, + f_list_files_params, + f_upload_zip_params, + f_write_file_params, + f_delete_file_params, + f_create_directory_params, + f_delete_directory_params, + f_download_dir_zip_params, + f_set_file_permissions_params, +) +from ....types.browsers.f_file_info_response import FFileInfoResponse +from ....types.browsers.f_list_files_response import FListFilesResponse + +__all__ = ["FsResource", "AsyncFsResource"] + + +class FsResource(SyncAPIResource): + @cached_property + def watch(self) -> WatchResource: + return WatchResource(self._client) + + @cached_property + def with_raw_response(self) -> FsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return FsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> FsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return FsResourceWithStreamingResponse(self) + + def create_directory( + self, + id: str, + *, + path: str, + mode: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Create a new directory + + Args: + path: Absolute directory path to create. + + mode: Optional directory mode (octal string, e.g. 755). Defaults to 755. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return self._put( + f"/browsers/{id}/fs/create_directory", + body=maybe_transform( + { + "path": path, + "mode": mode, + }, + f_create_directory_params.FCreateDirectoryParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + def delete_directory( + self, + id: str, + *, + path: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Delete a directory + + Args: + path: Absolute path to delete. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return self._put( + f"/browsers/{id}/fs/delete_directory", + body=maybe_transform({"path": path}, f_delete_directory_params.FDeleteDirectoryParams), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + def delete_file( + self, + id: str, + *, + path: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Delete a file + + Args: + path: Absolute path to delete. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return self._put( + f"/browsers/{id}/fs/delete_file", + body=maybe_transform({"path": path}, f_delete_file_params.FDeleteFileParams), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + def download_dir_zip( + self, + id: str, + *, + path: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> BinaryAPIResponse: + """ + Returns a ZIP file containing the contents of the specified directory. + + Args: + path: Absolute directory path to archive and download. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "application/zip", **(extra_headers or {})} + return self._get( + f"/browsers/{id}/fs/download_dir_zip", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform({"path": path}, f_download_dir_zip_params.FDownloadDirZipParams), + ), + cast_to=BinaryAPIResponse, + ) + + def file_info( + self, + id: str, + *, + path: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> FFileInfoResponse: + """ + Get information about a file or directory + + Args: + path: Absolute path of the file or directory. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + return self._get( + f"/browsers/{id}/fs/file_info", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform({"path": path}, f_file_info_params.FFileInfoParams), + ), + cast_to=FFileInfoResponse, + ) + + def list_files( + self, + id: str, + *, + path: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> FListFilesResponse: + """ + List files in a directory + + Args: + path: Absolute directory path. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + return self._get( + f"/browsers/{id}/fs/list_files", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform({"path": path}, f_list_files_params.FListFilesParams), + ), + cast_to=FListFilesResponse, + ) + + def move( + self, + id: str, + *, + dest_path: str, + src_path: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Move or rename a file or directory + + Args: + dest_path: Absolute destination path. + + src_path: Absolute source path. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return self._put( + f"/browsers/{id}/fs/move", + body=maybe_transform( + { + "dest_path": dest_path, + "src_path": src_path, + }, + f_move_params.FMoveParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + def read_file( + self, + id: str, + *, + path: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> BinaryAPIResponse: + """ + Read file contents + + Args: + path: Absolute file path to read. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "application/octet-stream", **(extra_headers or {})} + return self._get( + f"/browsers/{id}/fs/read_file", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform({"path": path}, f_read_file_params.FReadFileParams), + ), + cast_to=BinaryAPIResponse, + ) + + def set_file_permissions( + self, + id: str, + *, + mode: str, + path: str, + group: str | Omit = omit, + owner: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Set file or directory permissions/ownership + + Args: + mode: File mode bits (octal string, e.g. 644). + + path: Absolute path whose permissions are to be changed. + + group: New group name or GID. + + owner: New owner username or UID. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return self._put( + f"/browsers/{id}/fs/set_file_permissions", + body=maybe_transform( + { + "mode": mode, + "path": path, + "group": group, + "owner": owner, + }, + f_set_file_permissions_params.FSetFilePermissionsParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + def upload( + self, + id: str, + *, + files: Iterable[f_upload_params.File], + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Allows uploading single or multiple files to the remote filesystem. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + body = deepcopy_minimal({"files": files}) + extracted_files = extract_files(cast(Mapping[str, object], body), paths=[["files", "", "file"]]) + # It should be noted that the actual Content-Type header that will be + # sent to the server will contain a `boundary` parameter, e.g. + # multipart/form-data; boundary=---abc-- + extra_headers["Content-Type"] = "multipart/form-data" + return self._post( + f"/browsers/{id}/fs/upload", + body=maybe_transform(body, f_upload_params.FUploadParams), + files=extracted_files, + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + def upload_zip( + self, + id: str, + *, + dest_path: str, + zip_file: FileTypes, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Upload a zip file and extract its contents to the specified destination path. + + Args: + dest_path: Absolute destination directory to extract the archive to. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + body = deepcopy_minimal( + { + "dest_path": dest_path, + "zip_file": zip_file, + } + ) + files = extract_files(cast(Mapping[str, object], body), paths=[["zip_file"]]) + # It should be noted that the actual Content-Type header that will be + # sent to the server will contain a `boundary` parameter, e.g. + # multipart/form-data; boundary=---abc-- + extra_headers["Content-Type"] = "multipart/form-data" + return self._post( + f"/browsers/{id}/fs/upload_zip", + body=maybe_transform(body, f_upload_zip_params.FUploadZipParams), + files=files, + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + def write_file( + self, + id: str, + contents: FileContent, + *, + path: str, + mode: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Write or create a file + + Args: + path: Destination absolute file path. + + mode: Optional file mode (octal string, e.g. 644). Defaults to 644. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + extra_headers["Content-Type"] = "application/octet-stream" + return self._put( + f"/browsers/{id}/fs/write_file", + body=read_file_content(contents), + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "path": path, + "mode": mode, + }, + f_write_file_params.FWriteFileParams, + ), + ), + cast_to=NoneType, + ) + + +class AsyncFsResource(AsyncAPIResource): + @cached_property + def watch(self) -> AsyncWatchResource: + return AsyncWatchResource(self._client) + + @cached_property + def with_raw_response(self) -> AsyncFsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return AsyncFsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncFsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return AsyncFsResourceWithStreamingResponse(self) + + async def create_directory( + self, + id: str, + *, + path: str, + mode: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Create a new directory + + Args: + path: Absolute directory path to create. + + mode: Optional directory mode (octal string, e.g. 755). Defaults to 755. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return await self._put( + f"/browsers/{id}/fs/create_directory", + body=await async_maybe_transform( + { + "path": path, + "mode": mode, + }, + f_create_directory_params.FCreateDirectoryParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + async def delete_directory( + self, + id: str, + *, + path: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Delete a directory + + Args: + path: Absolute path to delete. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return await self._put( + f"/browsers/{id}/fs/delete_directory", + body=await async_maybe_transform({"path": path}, f_delete_directory_params.FDeleteDirectoryParams), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + async def delete_file( + self, + id: str, + *, + path: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Delete a file + + Args: + path: Absolute path to delete. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return await self._put( + f"/browsers/{id}/fs/delete_file", + body=await async_maybe_transform({"path": path}, f_delete_file_params.FDeleteFileParams), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + async def download_dir_zip( + self, + id: str, + *, + path: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AsyncBinaryAPIResponse: + """ + Returns a ZIP file containing the contents of the specified directory. + + Args: + path: Absolute directory path to archive and download. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "application/zip", **(extra_headers or {})} + return await self._get( + f"/browsers/{id}/fs/download_dir_zip", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform({"path": path}, f_download_dir_zip_params.FDownloadDirZipParams), + ), + cast_to=AsyncBinaryAPIResponse, + ) + + async def file_info( + self, + id: str, + *, + path: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> FFileInfoResponse: + """ + Get information about a file or directory + + Args: + path: Absolute path of the file or directory. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + return await self._get( + f"/browsers/{id}/fs/file_info", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform({"path": path}, f_file_info_params.FFileInfoParams), + ), + cast_to=FFileInfoResponse, + ) + + async def list_files( + self, + id: str, + *, + path: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> FListFilesResponse: + """ + List files in a directory + + Args: + path: Absolute directory path. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + return await self._get( + f"/browsers/{id}/fs/list_files", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform({"path": path}, f_list_files_params.FListFilesParams), + ), + cast_to=FListFilesResponse, + ) + + async def move( + self, + id: str, + *, + dest_path: str, + src_path: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Move or rename a file or directory + + Args: + dest_path: Absolute destination path. + + src_path: Absolute source path. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return await self._put( + f"/browsers/{id}/fs/move", + body=await async_maybe_transform( + { + "dest_path": dest_path, + "src_path": src_path, + }, + f_move_params.FMoveParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + async def read_file( + self, + id: str, + *, + path: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AsyncBinaryAPIResponse: + """ + Read file contents + + Args: + path: Absolute file path to read. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "application/octet-stream", **(extra_headers or {})} + return await self._get( + f"/browsers/{id}/fs/read_file", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform({"path": path}, f_read_file_params.FReadFileParams), + ), + cast_to=AsyncBinaryAPIResponse, + ) + + async def set_file_permissions( + self, + id: str, + *, + mode: str, + path: str, + group: str | Omit = omit, + owner: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Set file or directory permissions/ownership + + Args: + mode: File mode bits (octal string, e.g. 644). + + path: Absolute path whose permissions are to be changed. + + group: New group name or GID. + + owner: New owner username or UID. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return await self._put( + f"/browsers/{id}/fs/set_file_permissions", + body=await async_maybe_transform( + { + "mode": mode, + "path": path, + "group": group, + "owner": owner, + }, + f_set_file_permissions_params.FSetFilePermissionsParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + async def upload( + self, + id: str, + *, + files: Iterable[f_upload_params.File], + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Allows uploading single or multiple files to the remote filesystem. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + body = deepcopy_minimal({"files": files}) + extracted_files = extract_files(cast(Mapping[str, object], body), paths=[["files", "", "file"]]) + # It should be noted that the actual Content-Type header that will be + # sent to the server will contain a `boundary` parameter, e.g. + # multipart/form-data; boundary=---abc-- + extra_headers["Content-Type"] = "multipart/form-data" + return await self._post( + f"/browsers/{id}/fs/upload", + body=await async_maybe_transform(body, f_upload_params.FUploadParams), + files=extracted_files, + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + async def upload_zip( + self, + id: str, + *, + dest_path: str, + zip_file: FileTypes, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Upload a zip file and extract its contents to the specified destination path. + + Args: + dest_path: Absolute destination directory to extract the archive to. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + body = deepcopy_minimal( + { + "dest_path": dest_path, + "zip_file": zip_file, + } + ) + files = extract_files(cast(Mapping[str, object], body), paths=[["zip_file"]]) + # It should be noted that the actual Content-Type header that will be + # sent to the server will contain a `boundary` parameter, e.g. + # multipart/form-data; boundary=---abc-- + extra_headers["Content-Type"] = "multipart/form-data" + return await self._post( + f"/browsers/{id}/fs/upload_zip", + body=await async_maybe_transform(body, f_upload_zip_params.FUploadZipParams), + files=files, + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + async def write_file( + self, + id: str, + contents: FileContent, + *, + path: str, + mode: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Write or create a file + + Args: + path: Destination absolute file path. + + mode: Optional file mode (octal string, e.g. 644). Defaults to 644. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + extra_headers["Content-Type"] = "application/octet-stream" + return await self._put( + f"/browsers/{id}/fs/write_file", + body=await async_read_file_content(contents), + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform( + { + "path": path, + "mode": mode, + }, + f_write_file_params.FWriteFileParams, + ), + ), + cast_to=NoneType, + ) + + +class FsResourceWithRawResponse: + def __init__(self, fs: FsResource) -> None: + self._fs = fs + + self.create_directory = to_raw_response_wrapper( + fs.create_directory, + ) + self.delete_directory = to_raw_response_wrapper( + fs.delete_directory, + ) + self.delete_file = to_raw_response_wrapper( + fs.delete_file, + ) + self.download_dir_zip = to_custom_raw_response_wrapper( + fs.download_dir_zip, + BinaryAPIResponse, + ) + self.file_info = to_raw_response_wrapper( + fs.file_info, + ) + self.list_files = to_raw_response_wrapper( + fs.list_files, + ) + self.move = to_raw_response_wrapper( + fs.move, + ) + self.read_file = to_custom_raw_response_wrapper( + fs.read_file, + BinaryAPIResponse, + ) + self.set_file_permissions = to_raw_response_wrapper( + fs.set_file_permissions, + ) + self.upload = to_raw_response_wrapper( + fs.upload, + ) + self.upload_zip = to_raw_response_wrapper( + fs.upload_zip, + ) + self.write_file = to_raw_response_wrapper( + fs.write_file, + ) + + @cached_property + def watch(self) -> WatchResourceWithRawResponse: + return WatchResourceWithRawResponse(self._fs.watch) + + +class AsyncFsResourceWithRawResponse: + def __init__(self, fs: AsyncFsResource) -> None: + self._fs = fs + + self.create_directory = async_to_raw_response_wrapper( + fs.create_directory, + ) + self.delete_directory = async_to_raw_response_wrapper( + fs.delete_directory, + ) + self.delete_file = async_to_raw_response_wrapper( + fs.delete_file, + ) + self.download_dir_zip = async_to_custom_raw_response_wrapper( + fs.download_dir_zip, + AsyncBinaryAPIResponse, + ) + self.file_info = async_to_raw_response_wrapper( + fs.file_info, + ) + self.list_files = async_to_raw_response_wrapper( + fs.list_files, + ) + self.move = async_to_raw_response_wrapper( + fs.move, + ) + self.read_file = async_to_custom_raw_response_wrapper( + fs.read_file, + AsyncBinaryAPIResponse, + ) + self.set_file_permissions = async_to_raw_response_wrapper( + fs.set_file_permissions, + ) + self.upload = async_to_raw_response_wrapper( + fs.upload, + ) + self.upload_zip = async_to_raw_response_wrapper( + fs.upload_zip, + ) + self.write_file = async_to_raw_response_wrapper( + fs.write_file, + ) + + @cached_property + def watch(self) -> AsyncWatchResourceWithRawResponse: + return AsyncWatchResourceWithRawResponse(self._fs.watch) + + +class FsResourceWithStreamingResponse: + def __init__(self, fs: FsResource) -> None: + self._fs = fs + + self.create_directory = to_streamed_response_wrapper( + fs.create_directory, + ) + self.delete_directory = to_streamed_response_wrapper( + fs.delete_directory, + ) + self.delete_file = to_streamed_response_wrapper( + fs.delete_file, + ) + self.download_dir_zip = to_custom_streamed_response_wrapper( + fs.download_dir_zip, + StreamedBinaryAPIResponse, + ) + self.file_info = to_streamed_response_wrapper( + fs.file_info, + ) + self.list_files = to_streamed_response_wrapper( + fs.list_files, + ) + self.move = to_streamed_response_wrapper( + fs.move, + ) + self.read_file = to_custom_streamed_response_wrapper( + fs.read_file, + StreamedBinaryAPIResponse, + ) + self.set_file_permissions = to_streamed_response_wrapper( + fs.set_file_permissions, + ) + self.upload = to_streamed_response_wrapper( + fs.upload, + ) + self.upload_zip = to_streamed_response_wrapper( + fs.upload_zip, + ) + self.write_file = to_streamed_response_wrapper( + fs.write_file, + ) + + @cached_property + def watch(self) -> WatchResourceWithStreamingResponse: + return WatchResourceWithStreamingResponse(self._fs.watch) + + +class AsyncFsResourceWithStreamingResponse: + def __init__(self, fs: AsyncFsResource) -> None: + self._fs = fs + + self.create_directory = async_to_streamed_response_wrapper( + fs.create_directory, + ) + self.delete_directory = async_to_streamed_response_wrapper( + fs.delete_directory, + ) + self.delete_file = async_to_streamed_response_wrapper( + fs.delete_file, + ) + self.download_dir_zip = async_to_custom_streamed_response_wrapper( + fs.download_dir_zip, + AsyncStreamedBinaryAPIResponse, + ) + self.file_info = async_to_streamed_response_wrapper( + fs.file_info, + ) + self.list_files = async_to_streamed_response_wrapper( + fs.list_files, + ) + self.move = async_to_streamed_response_wrapper( + fs.move, + ) + self.read_file = async_to_custom_streamed_response_wrapper( + fs.read_file, + AsyncStreamedBinaryAPIResponse, + ) + self.set_file_permissions = async_to_streamed_response_wrapper( + fs.set_file_permissions, + ) + self.upload = async_to_streamed_response_wrapper( + fs.upload, + ) + self.upload_zip = async_to_streamed_response_wrapper( + fs.upload_zip, + ) + self.write_file = async_to_streamed_response_wrapper( + fs.write_file, + ) + + @cached_property + def watch(self) -> AsyncWatchResourceWithStreamingResponse: + return AsyncWatchResourceWithStreamingResponse(self._fs.watch) diff --git a/src/kernel/resources/browsers/fs/watch.py b/src/kernel/resources/browsers/fs/watch.py new file mode 100644 index 0000000..2a5c1e3 --- /dev/null +++ b/src/kernel/resources/browsers/fs/watch.py @@ -0,0 +1,369 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import httpx + +from ...._types import Body, Omit, Query, Headers, NoneType, NotGiven, omit, not_given +from ...._utils import maybe_transform, async_maybe_transform +from ...._compat import cached_property +from ...._resource import SyncAPIResource, AsyncAPIResource +from ...._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ...._streaming import Stream, AsyncStream +from ...._base_client import make_request_options +from ....types.browsers.fs import watch_start_params +from ....types.browsers.fs.watch_start_response import WatchStartResponse +from ....types.browsers.fs.watch_events_response import WatchEventsResponse + +__all__ = ["WatchResource", "AsyncWatchResource"] + + +class WatchResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> WatchResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return WatchResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> WatchResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return WatchResourceWithStreamingResponse(self) + + def events( + self, + watch_id: str, + *, + id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> Stream[WatchEventsResponse]: + """ + Stream filesystem events for a watch + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + if not watch_id: + raise ValueError(f"Expected a non-empty value for `watch_id` but received {watch_id!r}") + extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})} + return self._get( + f"/browsers/{id}/fs/watch/{watch_id}/events", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=WatchEventsResponse, + stream=True, + stream_cls=Stream[WatchEventsResponse], + ) + + def start( + self, + id: str, + *, + path: str, + recursive: bool | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> WatchStartResponse: + """ + Watch a directory for changes + + Args: + path: Directory to watch. + + recursive: Whether to watch recursively. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + return self._post( + f"/browsers/{id}/fs/watch", + body=maybe_transform( + { + "path": path, + "recursive": recursive, + }, + watch_start_params.WatchStartParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=WatchStartResponse, + ) + + def stop( + self, + watch_id: str, + *, + id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Stop watching a directory + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + if not watch_id: + raise ValueError(f"Expected a non-empty value for `watch_id` but received {watch_id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return self._delete( + f"/browsers/{id}/fs/watch/{watch_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + +class AsyncWatchResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncWatchResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return AsyncWatchResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncWatchResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return AsyncWatchResourceWithStreamingResponse(self) + + async def events( + self, + watch_id: str, + *, + id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AsyncStream[WatchEventsResponse]: + """ + Stream filesystem events for a watch + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + if not watch_id: + raise ValueError(f"Expected a non-empty value for `watch_id` but received {watch_id!r}") + extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})} + return await self._get( + f"/browsers/{id}/fs/watch/{watch_id}/events", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=WatchEventsResponse, + stream=True, + stream_cls=AsyncStream[WatchEventsResponse], + ) + + async def start( + self, + id: str, + *, + path: str, + recursive: bool | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> WatchStartResponse: + """ + Watch a directory for changes + + Args: + path: Directory to watch. + + recursive: Whether to watch recursively. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + return await self._post( + f"/browsers/{id}/fs/watch", + body=await async_maybe_transform( + { + "path": path, + "recursive": recursive, + }, + watch_start_params.WatchStartParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=WatchStartResponse, + ) + + async def stop( + self, + watch_id: str, + *, + id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Stop watching a directory + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + if not watch_id: + raise ValueError(f"Expected a non-empty value for `watch_id` but received {watch_id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return await self._delete( + f"/browsers/{id}/fs/watch/{watch_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + +class WatchResourceWithRawResponse: + def __init__(self, watch: WatchResource) -> None: + self._watch = watch + + self.events = to_raw_response_wrapper( + watch.events, + ) + self.start = to_raw_response_wrapper( + watch.start, + ) + self.stop = to_raw_response_wrapper( + watch.stop, + ) + + +class AsyncWatchResourceWithRawResponse: + def __init__(self, watch: AsyncWatchResource) -> None: + self._watch = watch + + self.events = async_to_raw_response_wrapper( + watch.events, + ) + self.start = async_to_raw_response_wrapper( + watch.start, + ) + self.stop = async_to_raw_response_wrapper( + watch.stop, + ) + + +class WatchResourceWithStreamingResponse: + def __init__(self, watch: WatchResource) -> None: + self._watch = watch + + self.events = to_streamed_response_wrapper( + watch.events, + ) + self.start = to_streamed_response_wrapper( + watch.start, + ) + self.stop = to_streamed_response_wrapper( + watch.stop, + ) + + +class AsyncWatchResourceWithStreamingResponse: + def __init__(self, watch: AsyncWatchResource) -> None: + self._watch = watch + + self.events = async_to_streamed_response_wrapper( + watch.events, + ) + self.start = async_to_streamed_response_wrapper( + watch.start, + ) + self.stop = async_to_streamed_response_wrapper( + watch.stop, + ) diff --git a/src/kernel/resources/browsers/logs.py b/src/kernel/resources/browsers/logs.py new file mode 100644 index 0000000..ab97a70 --- /dev/null +++ b/src/kernel/resources/browsers/logs.py @@ -0,0 +1,214 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal + +import httpx + +from ..._types import Body, Omit, Query, Headers, NotGiven, omit, not_given +from ..._utils import maybe_transform, async_maybe_transform +from ..._compat import cached_property +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ..._streaming import Stream, AsyncStream +from ..._base_client import make_request_options +from ...types.browsers import log_stream_params +from ...types.shared.log_event import LogEvent + +__all__ = ["LogsResource", "AsyncLogsResource"] + + +class LogsResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> LogsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return LogsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> LogsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return LogsResourceWithStreamingResponse(self) + + def stream( + self, + id: str, + *, + source: Literal["path", "supervisor"], + follow: bool | Omit = omit, + path: str | Omit = omit, + supervisor_process: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> Stream[LogEvent]: + """ + Stream log files on the browser instance via SSE + + Args: + path: only required if source is path + + supervisor_process: only required if source is supervisor + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})} + return self._get( + f"/browsers/{id}/logs/stream", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "source": source, + "follow": follow, + "path": path, + "supervisor_process": supervisor_process, + }, + log_stream_params.LogStreamParams, + ), + ), + cast_to=LogEvent, + stream=True, + stream_cls=Stream[LogEvent], + ) + + +class AsyncLogsResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncLogsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return AsyncLogsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncLogsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return AsyncLogsResourceWithStreamingResponse(self) + + async def stream( + self, + id: str, + *, + source: Literal["path", "supervisor"], + follow: bool | Omit = omit, + path: str | Omit = omit, + supervisor_process: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AsyncStream[LogEvent]: + """ + Stream log files on the browser instance via SSE + + Args: + path: only required if source is path + + supervisor_process: only required if source is supervisor + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})} + return await self._get( + f"/browsers/{id}/logs/stream", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform( + { + "source": source, + "follow": follow, + "path": path, + "supervisor_process": supervisor_process, + }, + log_stream_params.LogStreamParams, + ), + ), + cast_to=LogEvent, + stream=True, + stream_cls=AsyncStream[LogEvent], + ) + + +class LogsResourceWithRawResponse: + def __init__(self, logs: LogsResource) -> None: + self._logs = logs + + self.stream = to_raw_response_wrapper( + logs.stream, + ) + + +class AsyncLogsResourceWithRawResponse: + def __init__(self, logs: AsyncLogsResource) -> None: + self._logs = logs + + self.stream = async_to_raw_response_wrapper( + logs.stream, + ) + + +class LogsResourceWithStreamingResponse: + def __init__(self, logs: LogsResource) -> None: + self._logs = logs + + self.stream = to_streamed_response_wrapper( + logs.stream, + ) + + +class AsyncLogsResourceWithStreamingResponse: + def __init__(self, logs: AsyncLogsResource) -> None: + self._logs = logs + + self.stream = async_to_streamed_response_wrapper( + logs.stream, + ) diff --git a/src/kernel/resources/browsers/playwright.py b/src/kernel/resources/browsers/playwright.py new file mode 100644 index 0000000..5c47e3b --- /dev/null +++ b/src/kernel/resources/browsers/playwright.py @@ -0,0 +1,205 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import httpx + +from ..._types import Body, Omit, Query, Headers, NotGiven, omit, not_given +from ..._utils import maybe_transform, async_maybe_transform +from ..._compat import cached_property +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ..._base_client import make_request_options +from ...types.browsers import playwright_execute_params +from ...types.browsers.playwright_execute_response import PlaywrightExecuteResponse + +__all__ = ["PlaywrightResource", "AsyncPlaywrightResource"] + + +class PlaywrightResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> PlaywrightResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return PlaywrightResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> PlaywrightResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return PlaywrightResourceWithStreamingResponse(self) + + def execute( + self, + id: str, + *, + code: str, + timeout_sec: int | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> PlaywrightExecuteResponse: + """ + Execute arbitrary Playwright code in a fresh execution context against the + browser. The code runs in the same VM as the browser, minimizing latency and + maximizing throughput. It has access to 'page', 'context', and 'browser' + variables. It can `return` a value, and this value is returned in the response. + + Args: + code: TypeScript/JavaScript code to execute. The code has access to 'page', 'context', + and 'browser' variables. It runs within a function, so you can use a return + statement at the end to return a value. This value is returned as the `result` + property in the response. Example: "await page.goto('https://example.com'); + return await page.title();" + + timeout_sec: Maximum execution time in seconds. Default is 60. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + return self._post( + f"/browsers/{id}/playwright/execute", + body=maybe_transform( + { + "code": code, + "timeout_sec": timeout_sec, + }, + playwright_execute_params.PlaywrightExecuteParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=PlaywrightExecuteResponse, + ) + + +class AsyncPlaywrightResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncPlaywrightResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return AsyncPlaywrightResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncPlaywrightResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return AsyncPlaywrightResourceWithStreamingResponse(self) + + async def execute( + self, + id: str, + *, + code: str, + timeout_sec: int | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> PlaywrightExecuteResponse: + """ + Execute arbitrary Playwright code in a fresh execution context against the + browser. The code runs in the same VM as the browser, minimizing latency and + maximizing throughput. It has access to 'page', 'context', and 'browser' + variables. It can `return` a value, and this value is returned in the response. + + Args: + code: TypeScript/JavaScript code to execute. The code has access to 'page', 'context', + and 'browser' variables. It runs within a function, so you can use a return + statement at the end to return a value. This value is returned as the `result` + property in the response. Example: "await page.goto('https://example.com'); + return await page.title();" + + timeout_sec: Maximum execution time in seconds. Default is 60. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + return await self._post( + f"/browsers/{id}/playwright/execute", + body=await async_maybe_transform( + { + "code": code, + "timeout_sec": timeout_sec, + }, + playwright_execute_params.PlaywrightExecuteParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=PlaywrightExecuteResponse, + ) + + +class PlaywrightResourceWithRawResponse: + def __init__(self, playwright: PlaywrightResource) -> None: + self._playwright = playwright + + self.execute = to_raw_response_wrapper( + playwright.execute, + ) + + +class AsyncPlaywrightResourceWithRawResponse: + def __init__(self, playwright: AsyncPlaywrightResource) -> None: + self._playwright = playwright + + self.execute = async_to_raw_response_wrapper( + playwright.execute, + ) + + +class PlaywrightResourceWithStreamingResponse: + def __init__(self, playwright: PlaywrightResource) -> None: + self._playwright = playwright + + self.execute = to_streamed_response_wrapper( + playwright.execute, + ) + + +class AsyncPlaywrightResourceWithStreamingResponse: + def __init__(self, playwright: AsyncPlaywrightResource) -> None: + self._playwright = playwright + + self.execute = async_to_streamed_response_wrapper( + playwright.execute, + ) diff --git a/src/kernel/resources/browsers/process.py b/src/kernel/resources/browsers/process.py new file mode 100644 index 0000000..f5c4341 --- /dev/null +++ b/src/kernel/resources/browsers/process.py @@ -0,0 +1,742 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict, Optional +from typing_extensions import Literal + +import httpx + +from ..._types import Body, Omit, Query, Headers, NotGiven, SequenceNotStr, omit, not_given +from ..._utils import maybe_transform, async_maybe_transform +from ..._compat import cached_property +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ..._streaming import Stream, AsyncStream +from ..._base_client import make_request_options +from ...types.browsers import process_exec_params, process_kill_params, process_spawn_params, process_stdin_params +from ...types.browsers.process_exec_response import ProcessExecResponse +from ...types.browsers.process_kill_response import ProcessKillResponse +from ...types.browsers.process_spawn_response import ProcessSpawnResponse +from ...types.browsers.process_stdin_response import ProcessStdinResponse +from ...types.browsers.process_status_response import ProcessStatusResponse +from ...types.browsers.process_stdout_stream_response import ProcessStdoutStreamResponse + +__all__ = ["ProcessResource", "AsyncProcessResource"] + + +class ProcessResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> ProcessResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return ProcessResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> ProcessResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return ProcessResourceWithStreamingResponse(self) + + def exec( + self, + id: str, + *, + command: str, + args: SequenceNotStr[str] | Omit = omit, + as_root: bool | Omit = omit, + as_user: Optional[str] | Omit = omit, + cwd: Optional[str] | Omit = omit, + env: Dict[str, str] | Omit = omit, + timeout_sec: Optional[int] | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ProcessExecResponse: + """ + Execute a command synchronously + + Args: + command: Executable or shell command to run. + + args: Command arguments. + + as_root: Run the process with root privileges. + + as_user: Run the process as this user. + + cwd: Working directory (absolute path) to run the command in. + + env: Environment variables to set for the process. + + timeout_sec: Maximum execution time in seconds. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + return self._post( + f"/browsers/{id}/process/exec", + body=maybe_transform( + { + "command": command, + "args": args, + "as_root": as_root, + "as_user": as_user, + "cwd": cwd, + "env": env, + "timeout_sec": timeout_sec, + }, + process_exec_params.ProcessExecParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ProcessExecResponse, + ) + + def kill( + self, + process_id: str, + *, + id: str, + signal: Literal["TERM", "KILL", "INT", "HUP"], + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ProcessKillResponse: + """ + Send signal to process + + Args: + signal: Signal to send. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + if not process_id: + raise ValueError(f"Expected a non-empty value for `process_id` but received {process_id!r}") + return self._post( + f"/browsers/{id}/process/{process_id}/kill", + body=maybe_transform({"signal": signal}, process_kill_params.ProcessKillParams), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ProcessKillResponse, + ) + + def spawn( + self, + id: str, + *, + command: str, + args: SequenceNotStr[str] | Omit = omit, + as_root: bool | Omit = omit, + as_user: Optional[str] | Omit = omit, + cwd: Optional[str] | Omit = omit, + env: Dict[str, str] | Omit = omit, + timeout_sec: Optional[int] | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ProcessSpawnResponse: + """ + Execute a command asynchronously + + Args: + command: Executable or shell command to run. + + args: Command arguments. + + as_root: Run the process with root privileges. + + as_user: Run the process as this user. + + cwd: Working directory (absolute path) to run the command in. + + env: Environment variables to set for the process. + + timeout_sec: Maximum execution time in seconds. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + return self._post( + f"/browsers/{id}/process/spawn", + body=maybe_transform( + { + "command": command, + "args": args, + "as_root": as_root, + "as_user": as_user, + "cwd": cwd, + "env": env, + "timeout_sec": timeout_sec, + }, + process_spawn_params.ProcessSpawnParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ProcessSpawnResponse, + ) + + def status( + self, + process_id: str, + *, + id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ProcessStatusResponse: + """ + Get process status + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + if not process_id: + raise ValueError(f"Expected a non-empty value for `process_id` but received {process_id!r}") + return self._get( + f"/browsers/{id}/process/{process_id}/status", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ProcessStatusResponse, + ) + + def stdin( + self, + process_id: str, + *, + id: str, + data_b64: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ProcessStdinResponse: + """ + Write to process stdin + + Args: + data_b64: Base64-encoded data to write. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + if not process_id: + raise ValueError(f"Expected a non-empty value for `process_id` but received {process_id!r}") + return self._post( + f"/browsers/{id}/process/{process_id}/stdin", + body=maybe_transform({"data_b64": data_b64}, process_stdin_params.ProcessStdinParams), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ProcessStdinResponse, + ) + + def stdout_stream( + self, + process_id: str, + *, + id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> Stream[ProcessStdoutStreamResponse]: + """ + Stream process stdout via SSE + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + if not process_id: + raise ValueError(f"Expected a non-empty value for `process_id` but received {process_id!r}") + extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})} + return self._get( + f"/browsers/{id}/process/{process_id}/stdout/stream", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ProcessStdoutStreamResponse, + stream=True, + stream_cls=Stream[ProcessStdoutStreamResponse], + ) + + +class AsyncProcessResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncProcessResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return AsyncProcessResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncProcessResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return AsyncProcessResourceWithStreamingResponse(self) + + async def exec( + self, + id: str, + *, + command: str, + args: SequenceNotStr[str] | Omit = omit, + as_root: bool | Omit = omit, + as_user: Optional[str] | Omit = omit, + cwd: Optional[str] | Omit = omit, + env: Dict[str, str] | Omit = omit, + timeout_sec: Optional[int] | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ProcessExecResponse: + """ + Execute a command synchronously + + Args: + command: Executable or shell command to run. + + args: Command arguments. + + as_root: Run the process with root privileges. + + as_user: Run the process as this user. + + cwd: Working directory (absolute path) to run the command in. + + env: Environment variables to set for the process. + + timeout_sec: Maximum execution time in seconds. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + return await self._post( + f"/browsers/{id}/process/exec", + body=await async_maybe_transform( + { + "command": command, + "args": args, + "as_root": as_root, + "as_user": as_user, + "cwd": cwd, + "env": env, + "timeout_sec": timeout_sec, + }, + process_exec_params.ProcessExecParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ProcessExecResponse, + ) + + async def kill( + self, + process_id: str, + *, + id: str, + signal: Literal["TERM", "KILL", "INT", "HUP"], + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ProcessKillResponse: + """ + Send signal to process + + Args: + signal: Signal to send. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + if not process_id: + raise ValueError(f"Expected a non-empty value for `process_id` but received {process_id!r}") + return await self._post( + f"/browsers/{id}/process/{process_id}/kill", + body=await async_maybe_transform({"signal": signal}, process_kill_params.ProcessKillParams), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ProcessKillResponse, + ) + + async def spawn( + self, + id: str, + *, + command: str, + args: SequenceNotStr[str] | Omit = omit, + as_root: bool | Omit = omit, + as_user: Optional[str] | Omit = omit, + cwd: Optional[str] | Omit = omit, + env: Dict[str, str] | Omit = omit, + timeout_sec: Optional[int] | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ProcessSpawnResponse: + """ + Execute a command asynchronously + + Args: + command: Executable or shell command to run. + + args: Command arguments. + + as_root: Run the process with root privileges. + + as_user: Run the process as this user. + + cwd: Working directory (absolute path) to run the command in. + + env: Environment variables to set for the process. + + timeout_sec: Maximum execution time in seconds. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + return await self._post( + f"/browsers/{id}/process/spawn", + body=await async_maybe_transform( + { + "command": command, + "args": args, + "as_root": as_root, + "as_user": as_user, + "cwd": cwd, + "env": env, + "timeout_sec": timeout_sec, + }, + process_spawn_params.ProcessSpawnParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ProcessSpawnResponse, + ) + + async def status( + self, + process_id: str, + *, + id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ProcessStatusResponse: + """ + Get process status + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + if not process_id: + raise ValueError(f"Expected a non-empty value for `process_id` but received {process_id!r}") + return await self._get( + f"/browsers/{id}/process/{process_id}/status", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ProcessStatusResponse, + ) + + async def stdin( + self, + process_id: str, + *, + id: str, + data_b64: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ProcessStdinResponse: + """ + Write to process stdin + + Args: + data_b64: Base64-encoded data to write. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + if not process_id: + raise ValueError(f"Expected a non-empty value for `process_id` but received {process_id!r}") + return await self._post( + f"/browsers/{id}/process/{process_id}/stdin", + body=await async_maybe_transform({"data_b64": data_b64}, process_stdin_params.ProcessStdinParams), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ProcessStdinResponse, + ) + + async def stdout_stream( + self, + process_id: str, + *, + id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AsyncStream[ProcessStdoutStreamResponse]: + """ + Stream process stdout via SSE + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + if not process_id: + raise ValueError(f"Expected a non-empty value for `process_id` but received {process_id!r}") + extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})} + return await self._get( + f"/browsers/{id}/process/{process_id}/stdout/stream", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ProcessStdoutStreamResponse, + stream=True, + stream_cls=AsyncStream[ProcessStdoutStreamResponse], + ) + + +class ProcessResourceWithRawResponse: + def __init__(self, process: ProcessResource) -> None: + self._process = process + + self.exec = to_raw_response_wrapper( + process.exec, + ) + self.kill = to_raw_response_wrapper( + process.kill, + ) + self.spawn = to_raw_response_wrapper( + process.spawn, + ) + self.status = to_raw_response_wrapper( + process.status, + ) + self.stdin = to_raw_response_wrapper( + process.stdin, + ) + self.stdout_stream = to_raw_response_wrapper( + process.stdout_stream, + ) + + +class AsyncProcessResourceWithRawResponse: + def __init__(self, process: AsyncProcessResource) -> None: + self._process = process + + self.exec = async_to_raw_response_wrapper( + process.exec, + ) + self.kill = async_to_raw_response_wrapper( + process.kill, + ) + self.spawn = async_to_raw_response_wrapper( + process.spawn, + ) + self.status = async_to_raw_response_wrapper( + process.status, + ) + self.stdin = async_to_raw_response_wrapper( + process.stdin, + ) + self.stdout_stream = async_to_raw_response_wrapper( + process.stdout_stream, + ) + + +class ProcessResourceWithStreamingResponse: + def __init__(self, process: ProcessResource) -> None: + self._process = process + + self.exec = to_streamed_response_wrapper( + process.exec, + ) + self.kill = to_streamed_response_wrapper( + process.kill, + ) + self.spawn = to_streamed_response_wrapper( + process.spawn, + ) + self.status = to_streamed_response_wrapper( + process.status, + ) + self.stdin = to_streamed_response_wrapper( + process.stdin, + ) + self.stdout_stream = to_streamed_response_wrapper( + process.stdout_stream, + ) + + +class AsyncProcessResourceWithStreamingResponse: + def __init__(self, process: AsyncProcessResource) -> None: + self._process = process + + self.exec = async_to_streamed_response_wrapper( + process.exec, + ) + self.kill = async_to_streamed_response_wrapper( + process.kill, + ) + self.spawn = async_to_streamed_response_wrapper( + process.spawn, + ) + self.status = async_to_streamed_response_wrapper( + process.status, + ) + self.stdin = async_to_streamed_response_wrapper( + process.stdin, + ) + self.stdout_stream = async_to_streamed_response_wrapper( + process.stdout_stream, + ) diff --git a/src/kernel/resources/browsers/replays.py b/src/kernel/resources/browsers/replays.py new file mode 100644 index 0000000..8a1d199 --- /dev/null +++ b/src/kernel/resources/browsers/replays.py @@ -0,0 +1,454 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import httpx + +from ..._types import Body, Omit, Query, Headers, NoneType, NotGiven, omit, not_given +from ..._utils import maybe_transform, async_maybe_transform +from ..._compat import cached_property +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import ( + BinaryAPIResponse, + AsyncBinaryAPIResponse, + StreamedBinaryAPIResponse, + AsyncStreamedBinaryAPIResponse, + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + to_custom_raw_response_wrapper, + async_to_streamed_response_wrapper, + to_custom_streamed_response_wrapper, + async_to_custom_raw_response_wrapper, + async_to_custom_streamed_response_wrapper, +) +from ..._base_client import make_request_options +from ...types.browsers import replay_start_params +from ...types.browsers.replay_list_response import ReplayListResponse +from ...types.browsers.replay_start_response import ReplayStartResponse + +__all__ = ["ReplaysResource", "AsyncReplaysResource"] + + +class ReplaysResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> ReplaysResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return ReplaysResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> ReplaysResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return ReplaysResourceWithStreamingResponse(self) + + def list( + self, + id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ReplayListResponse: + """ + List all replays for the specified browser session. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + return self._get( + f"/browsers/{id}/replays", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ReplayListResponse, + ) + + def download( + self, + replay_id: str, + *, + id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> BinaryAPIResponse: + """ + Download or stream the specified replay recording. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + if not replay_id: + raise ValueError(f"Expected a non-empty value for `replay_id` but received {replay_id!r}") + extra_headers = {"Accept": "video/mp4", **(extra_headers or {})} + return self._get( + f"/browsers/{id}/replays/{replay_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=BinaryAPIResponse, + ) + + def start( + self, + id: str, + *, + framerate: int | Omit = omit, + max_duration_in_seconds: int | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ReplayStartResponse: + """ + Start recording the browser session and return a replay ID. + + Args: + framerate: Recording framerate in fps. + + max_duration_in_seconds: Maximum recording duration in seconds. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + return self._post( + f"/browsers/{id}/replays", + body=maybe_transform( + { + "framerate": framerate, + "max_duration_in_seconds": max_duration_in_seconds, + }, + replay_start_params.ReplayStartParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ReplayStartResponse, + ) + + def stop( + self, + replay_id: str, + *, + id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Stop the specified replay recording and persist the video. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + if not replay_id: + raise ValueError(f"Expected a non-empty value for `replay_id` but received {replay_id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return self._post( + f"/browsers/{id}/replays/{replay_id}/stop", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + +class AsyncReplaysResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncReplaysResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return AsyncReplaysResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncReplaysResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return AsyncReplaysResourceWithStreamingResponse(self) + + async def list( + self, + id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ReplayListResponse: + """ + List all replays for the specified browser session. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + return await self._get( + f"/browsers/{id}/replays", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ReplayListResponse, + ) + + async def download( + self, + replay_id: str, + *, + id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AsyncBinaryAPIResponse: + """ + Download or stream the specified replay recording. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + if not replay_id: + raise ValueError(f"Expected a non-empty value for `replay_id` but received {replay_id!r}") + extra_headers = {"Accept": "video/mp4", **(extra_headers or {})} + return await self._get( + f"/browsers/{id}/replays/{replay_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=AsyncBinaryAPIResponse, + ) + + async def start( + self, + id: str, + *, + framerate: int | Omit = omit, + max_duration_in_seconds: int | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ReplayStartResponse: + """ + Start recording the browser session and return a replay ID. + + Args: + framerate: Recording framerate in fps. + + max_duration_in_seconds: Maximum recording duration in seconds. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + return await self._post( + f"/browsers/{id}/replays", + body=await async_maybe_transform( + { + "framerate": framerate, + "max_duration_in_seconds": max_duration_in_seconds, + }, + replay_start_params.ReplayStartParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ReplayStartResponse, + ) + + async def stop( + self, + replay_id: str, + *, + id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Stop the specified replay recording and persist the video. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + if not replay_id: + raise ValueError(f"Expected a non-empty value for `replay_id` but received {replay_id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return await self._post( + f"/browsers/{id}/replays/{replay_id}/stop", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + +class ReplaysResourceWithRawResponse: + def __init__(self, replays: ReplaysResource) -> None: + self._replays = replays + + self.list = to_raw_response_wrapper( + replays.list, + ) + self.download = to_custom_raw_response_wrapper( + replays.download, + BinaryAPIResponse, + ) + self.start = to_raw_response_wrapper( + replays.start, + ) + self.stop = to_raw_response_wrapper( + replays.stop, + ) + + +class AsyncReplaysResourceWithRawResponse: + def __init__(self, replays: AsyncReplaysResource) -> None: + self._replays = replays + + self.list = async_to_raw_response_wrapper( + replays.list, + ) + self.download = async_to_custom_raw_response_wrapper( + replays.download, + AsyncBinaryAPIResponse, + ) + self.start = async_to_raw_response_wrapper( + replays.start, + ) + self.stop = async_to_raw_response_wrapper( + replays.stop, + ) + + +class ReplaysResourceWithStreamingResponse: + def __init__(self, replays: ReplaysResource) -> None: + self._replays = replays + + self.list = to_streamed_response_wrapper( + replays.list, + ) + self.download = to_custom_streamed_response_wrapper( + replays.download, + StreamedBinaryAPIResponse, + ) + self.start = to_streamed_response_wrapper( + replays.start, + ) + self.stop = to_streamed_response_wrapper( + replays.stop, + ) + + +class AsyncReplaysResourceWithStreamingResponse: + def __init__(self, replays: AsyncReplaysResource) -> None: + self._replays = replays + + self.list = async_to_streamed_response_wrapper( + replays.list, + ) + self.download = async_to_custom_streamed_response_wrapper( + replays.download, + AsyncStreamedBinaryAPIResponse, + ) + self.start = async_to_streamed_response_wrapper( + replays.start, + ) + self.stop = async_to_streamed_response_wrapper( + replays.stop, + ) diff --git a/src/kernel/resources/credentials.py b/src/kernel/resources/credentials.py new file mode 100644 index 0000000..30e72e8 --- /dev/null +++ b/src/kernel/resources/credentials.py @@ -0,0 +1,711 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict, Optional + +import httpx + +from ..types import credential_list_params, credential_create_params, credential_update_params +from .._types import Body, Omit, Query, Headers, NoneType, NotGiven, omit, not_given +from .._utils import maybe_transform, async_maybe_transform +from .._compat import cached_property +from .._resource import SyncAPIResource, AsyncAPIResource +from .._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ..pagination import SyncOffsetPagination, AsyncOffsetPagination +from .._base_client import AsyncPaginator, make_request_options +from ..types.credential import Credential +from ..types.credential_totp_code_response import CredentialTotpCodeResponse + +__all__ = ["CredentialsResource", "AsyncCredentialsResource"] + + +class CredentialsResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> CredentialsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return CredentialsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> CredentialsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return CredentialsResourceWithStreamingResponse(self) + + def create( + self, + *, + domain: str, + name: str, + values: Dict[str, str], + sso_provider: str | Omit = omit, + totp_secret: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> Credential: + """ + Create a new credential for storing login information. + + Args: + domain: Target domain this credential is for + + name: Unique name for the credential within the organization + + values: Field name to value mapping (e.g., username, password) + + sso_provider: If set, indicates this credential should be used with the specified SSO provider + (e.g., google, github, microsoft). When the target site has a matching SSO + button, it will be clicked first before filling credential values on the + identity provider's login page. + + totp_secret: Base32-encoded TOTP secret for generating one-time passwords. Used for automatic + 2FA during login. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._post( + "/credentials", + body=maybe_transform( + { + "domain": domain, + "name": name, + "values": values, + "sso_provider": sso_provider, + "totp_secret": totp_secret, + }, + credential_create_params.CredentialCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Credential, + ) + + def retrieve( + self, + id_or_name: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> Credential: + """Retrieve a credential by its ID or name. + + Credential values are not returned. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id_or_name: + raise ValueError(f"Expected a non-empty value for `id_or_name` but received {id_or_name!r}") + return self._get( + f"/credentials/{id_or_name}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Credential, + ) + + def update( + self, + id_or_name: str, + *, + name: str | Omit = omit, + sso_provider: Optional[str] | Omit = omit, + totp_secret: str | Omit = omit, + values: Dict[str, str] | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> Credential: + """Update a credential's name or values. + + When values are provided, they are merged + with existing values (new keys are added, existing keys are overwritten). + + Args: + name: New name for the credential + + sso_provider: If set, indicates this credential should be used with the specified SSO + provider. Set to empty string or null to remove. + + totp_secret: Base32-encoded TOTP secret for generating one-time passwords. Spaces and + formatting are automatically normalized. Set to empty string to remove. + + values: Field name to value mapping. Values are merged with existing values (new keys + added, existing keys overwritten). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id_or_name: + raise ValueError(f"Expected a non-empty value for `id_or_name` but received {id_or_name!r}") + return self._patch( + f"/credentials/{id_or_name}", + body=maybe_transform( + { + "name": name, + "sso_provider": sso_provider, + "totp_secret": totp_secret, + "values": values, + }, + credential_update_params.CredentialUpdateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Credential, + ) + + def list( + self, + *, + domain: str | Omit = omit, + limit: int | Omit = omit, + offset: int | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> SyncOffsetPagination[Credential]: + """List credentials owned by the caller's organization. + + Credential values are not + returned. + + Args: + domain: Filter by domain + + limit: Maximum number of results to return + + offset: Number of results to skip + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get_api_list( + "/credentials", + page=SyncOffsetPagination[Credential], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "domain": domain, + "limit": limit, + "offset": offset, + }, + credential_list_params.CredentialListParams, + ), + ), + model=Credential, + ) + + def delete( + self, + id_or_name: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Delete a credential by its ID or name. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id_or_name: + raise ValueError(f"Expected a non-empty value for `id_or_name` but received {id_or_name!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return self._delete( + f"/credentials/{id_or_name}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + def totp_code( + self, + id_or_name: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> CredentialTotpCodeResponse: + """ + Returns the current 6-digit TOTP code for a credential with a configured + totp_secret. Use this to complete 2FA setup on sites or when you need a fresh + code. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id_or_name: + raise ValueError(f"Expected a non-empty value for `id_or_name` but received {id_or_name!r}") + return self._get( + f"/credentials/{id_or_name}/totp-code", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=CredentialTotpCodeResponse, + ) + + +class AsyncCredentialsResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncCredentialsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return AsyncCredentialsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncCredentialsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return AsyncCredentialsResourceWithStreamingResponse(self) + + async def create( + self, + *, + domain: str, + name: str, + values: Dict[str, str], + sso_provider: str | Omit = omit, + totp_secret: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> Credential: + """ + Create a new credential for storing login information. + + Args: + domain: Target domain this credential is for + + name: Unique name for the credential within the organization + + values: Field name to value mapping (e.g., username, password) + + sso_provider: If set, indicates this credential should be used with the specified SSO provider + (e.g., google, github, microsoft). When the target site has a matching SSO + button, it will be clicked first before filling credential values on the + identity provider's login page. + + totp_secret: Base32-encoded TOTP secret for generating one-time passwords. Used for automatic + 2FA during login. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._post( + "/credentials", + body=await async_maybe_transform( + { + "domain": domain, + "name": name, + "values": values, + "sso_provider": sso_provider, + "totp_secret": totp_secret, + }, + credential_create_params.CredentialCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Credential, + ) + + async def retrieve( + self, + id_or_name: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> Credential: + """Retrieve a credential by its ID or name. + + Credential values are not returned. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id_or_name: + raise ValueError(f"Expected a non-empty value for `id_or_name` but received {id_or_name!r}") + return await self._get( + f"/credentials/{id_or_name}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Credential, + ) + + async def update( + self, + id_or_name: str, + *, + name: str | Omit = omit, + sso_provider: Optional[str] | Omit = omit, + totp_secret: str | Omit = omit, + values: Dict[str, str] | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> Credential: + """Update a credential's name or values. + + When values are provided, they are merged + with existing values (new keys are added, existing keys are overwritten). + + Args: + name: New name for the credential + + sso_provider: If set, indicates this credential should be used with the specified SSO + provider. Set to empty string or null to remove. + + totp_secret: Base32-encoded TOTP secret for generating one-time passwords. Spaces and + formatting are automatically normalized. Set to empty string to remove. + + values: Field name to value mapping. Values are merged with existing values (new keys + added, existing keys overwritten). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id_or_name: + raise ValueError(f"Expected a non-empty value for `id_or_name` but received {id_or_name!r}") + return await self._patch( + f"/credentials/{id_or_name}", + body=await async_maybe_transform( + { + "name": name, + "sso_provider": sso_provider, + "totp_secret": totp_secret, + "values": values, + }, + credential_update_params.CredentialUpdateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Credential, + ) + + def list( + self, + *, + domain: str | Omit = omit, + limit: int | Omit = omit, + offset: int | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AsyncPaginator[Credential, AsyncOffsetPagination[Credential]]: + """List credentials owned by the caller's organization. + + Credential values are not + returned. + + Args: + domain: Filter by domain + + limit: Maximum number of results to return + + offset: Number of results to skip + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get_api_list( + "/credentials", + page=AsyncOffsetPagination[Credential], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "domain": domain, + "limit": limit, + "offset": offset, + }, + credential_list_params.CredentialListParams, + ), + ), + model=Credential, + ) + + async def delete( + self, + id_or_name: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Delete a credential by its ID or name. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id_or_name: + raise ValueError(f"Expected a non-empty value for `id_or_name` but received {id_or_name!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return await self._delete( + f"/credentials/{id_or_name}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + async def totp_code( + self, + id_or_name: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> CredentialTotpCodeResponse: + """ + Returns the current 6-digit TOTP code for a credential with a configured + totp_secret. Use this to complete 2FA setup on sites or when you need a fresh + code. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id_or_name: + raise ValueError(f"Expected a non-empty value for `id_or_name` but received {id_or_name!r}") + return await self._get( + f"/credentials/{id_or_name}/totp-code", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=CredentialTotpCodeResponse, + ) + + +class CredentialsResourceWithRawResponse: + def __init__(self, credentials: CredentialsResource) -> None: + self._credentials = credentials + + self.create = to_raw_response_wrapper( + credentials.create, + ) + self.retrieve = to_raw_response_wrapper( + credentials.retrieve, + ) + self.update = to_raw_response_wrapper( + credentials.update, + ) + self.list = to_raw_response_wrapper( + credentials.list, + ) + self.delete = to_raw_response_wrapper( + credentials.delete, + ) + self.totp_code = to_raw_response_wrapper( + credentials.totp_code, + ) + + +class AsyncCredentialsResourceWithRawResponse: + def __init__(self, credentials: AsyncCredentialsResource) -> None: + self._credentials = credentials + + self.create = async_to_raw_response_wrapper( + credentials.create, + ) + self.retrieve = async_to_raw_response_wrapper( + credentials.retrieve, + ) + self.update = async_to_raw_response_wrapper( + credentials.update, + ) + self.list = async_to_raw_response_wrapper( + credentials.list, + ) + self.delete = async_to_raw_response_wrapper( + credentials.delete, + ) + self.totp_code = async_to_raw_response_wrapper( + credentials.totp_code, + ) + + +class CredentialsResourceWithStreamingResponse: + def __init__(self, credentials: CredentialsResource) -> None: + self._credentials = credentials + + self.create = to_streamed_response_wrapper( + credentials.create, + ) + self.retrieve = to_streamed_response_wrapper( + credentials.retrieve, + ) + self.update = to_streamed_response_wrapper( + credentials.update, + ) + self.list = to_streamed_response_wrapper( + credentials.list, + ) + self.delete = to_streamed_response_wrapper( + credentials.delete, + ) + self.totp_code = to_streamed_response_wrapper( + credentials.totp_code, + ) + + +class AsyncCredentialsResourceWithStreamingResponse: + def __init__(self, credentials: AsyncCredentialsResource) -> None: + self._credentials = credentials + + self.create = async_to_streamed_response_wrapper( + credentials.create, + ) + self.retrieve = async_to_streamed_response_wrapper( + credentials.retrieve, + ) + self.update = async_to_streamed_response_wrapper( + credentials.update, + ) + self.list = async_to_streamed_response_wrapper( + credentials.list, + ) + self.delete = async_to_streamed_response_wrapper( + credentials.delete, + ) + self.totp_code = async_to_streamed_response_wrapper( + credentials.totp_code, + ) diff --git a/src/kernel/resources/deployments.py b/src/kernel/resources/deployments.py new file mode 100644 index 0000000..f924531 --- /dev/null +++ b/src/kernel/resources/deployments.py @@ -0,0 +1,547 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Any, Dict, Mapping, cast +from typing_extensions import Literal + +import httpx + +from ..types import deployment_list_params, deployment_create_params, deployment_follow_params +from .._types import Body, Omit, Query, Headers, NotGiven, FileTypes, omit, not_given +from .._utils import extract_files, maybe_transform, deepcopy_minimal, async_maybe_transform +from .._compat import cached_property +from .._resource import SyncAPIResource, AsyncAPIResource +from .._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from .._streaming import Stream, AsyncStream +from ..pagination import SyncOffsetPagination, AsyncOffsetPagination +from .._base_client import AsyncPaginator, make_request_options +from ..types.deployment_list_response import DeploymentListResponse +from ..types.deployment_create_response import DeploymentCreateResponse +from ..types.deployment_follow_response import DeploymentFollowResponse +from ..types.deployment_retrieve_response import DeploymentRetrieveResponse + +__all__ = ["DeploymentsResource", "AsyncDeploymentsResource"] + + +class DeploymentsResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> DeploymentsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return DeploymentsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> DeploymentsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return DeploymentsResourceWithStreamingResponse(self) + + def create( + self, + *, + entrypoint_rel_path: str | Omit = omit, + env_vars: Dict[str, str] | Omit = omit, + file: FileTypes | Omit = omit, + force: bool | Omit = omit, + region: Literal["aws.us-east-1a"] | Omit = omit, + source: deployment_create_params.Source | Omit = omit, + version: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> DeploymentCreateResponse: + """ + Create a new deployment. + + Args: + entrypoint_rel_path: Relative path to the entrypoint of the application + + env_vars: Map of environment variables to set for the deployed application. Each key-value + pair represents an environment variable. + + file: ZIP file containing the application source directory + + force: Allow overwriting an existing app version + + region: Region for deployment. Currently we only support "aws.us-east-1a" + + source: Source from which to fetch application code. + + version: Version of the application. Can be any string. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + body = deepcopy_minimal( + { + "entrypoint_rel_path": entrypoint_rel_path, + "env_vars": env_vars, + "file": file, + "force": force, + "region": region, + "source": source, + "version": version, + } + ) + files = extract_files(cast(Mapping[str, object], body), paths=[["file"]]) + # It should be noted that the actual Content-Type header that will be + # sent to the server will contain a `boundary` parameter, e.g. + # multipart/form-data; boundary=---abc-- + extra_headers = {"Content-Type": "multipart/form-data", **(extra_headers or {})} + return self._post( + "/deployments", + body=maybe_transform(body, deployment_create_params.DeploymentCreateParams), + files=files, + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=DeploymentCreateResponse, + ) + + def retrieve( + self, + id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> DeploymentRetrieveResponse: + """ + Get information about a deployment's status. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + return self._get( + f"/deployments/{id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=DeploymentRetrieveResponse, + ) + + def list( + self, + *, + app_name: str | Omit = omit, + limit: int | Omit = omit, + offset: int | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> SyncOffsetPagination[DeploymentListResponse]: + """List deployments. + + Optionally filter by application name. + + Args: + app_name: Filter results by application name. + + limit: Limit the number of deployments to return. + + offset: Offset the number of deployments to return. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get_api_list( + "/deployments", + page=SyncOffsetPagination[DeploymentListResponse], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "app_name": app_name, + "limit": limit, + "offset": offset, + }, + deployment_list_params.DeploymentListParams, + ), + ), + model=DeploymentListResponse, + ) + + def follow( + self, + id: str, + *, + since: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> Stream[DeploymentFollowResponse]: + """ + Establishes a Server-Sent Events (SSE) stream that delivers real-time logs and + status updates for a deployment. The stream terminates automatically once the + deployment reaches a terminal state. + + Args: + since: Show logs since the given time (RFC timestamps or durations like 5m). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})} + return self._get( + f"/deployments/{id}/events", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform({"since": since}, deployment_follow_params.DeploymentFollowParams), + ), + cast_to=cast( + Any, DeploymentFollowResponse + ), # Union types cannot be passed in as arguments in the type system + stream=True, + stream_cls=Stream[DeploymentFollowResponse], + ) + + +class AsyncDeploymentsResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncDeploymentsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return AsyncDeploymentsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncDeploymentsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return AsyncDeploymentsResourceWithStreamingResponse(self) + + async def create( + self, + *, + entrypoint_rel_path: str | Omit = omit, + env_vars: Dict[str, str] | Omit = omit, + file: FileTypes | Omit = omit, + force: bool | Omit = omit, + region: Literal["aws.us-east-1a"] | Omit = omit, + source: deployment_create_params.Source | Omit = omit, + version: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> DeploymentCreateResponse: + """ + Create a new deployment. + + Args: + entrypoint_rel_path: Relative path to the entrypoint of the application + + env_vars: Map of environment variables to set for the deployed application. Each key-value + pair represents an environment variable. + + file: ZIP file containing the application source directory + + force: Allow overwriting an existing app version + + region: Region for deployment. Currently we only support "aws.us-east-1a" + + source: Source from which to fetch application code. + + version: Version of the application. Can be any string. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + body = deepcopy_minimal( + { + "entrypoint_rel_path": entrypoint_rel_path, + "env_vars": env_vars, + "file": file, + "force": force, + "region": region, + "source": source, + "version": version, + } + ) + files = extract_files(cast(Mapping[str, object], body), paths=[["file"]]) + # It should be noted that the actual Content-Type header that will be + # sent to the server will contain a `boundary` parameter, e.g. + # multipart/form-data; boundary=---abc-- + extra_headers = {"Content-Type": "multipart/form-data", **(extra_headers or {})} + return await self._post( + "/deployments", + body=await async_maybe_transform(body, deployment_create_params.DeploymentCreateParams), + files=files, + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=DeploymentCreateResponse, + ) + + async def retrieve( + self, + id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> DeploymentRetrieveResponse: + """ + Get information about a deployment's status. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + return await self._get( + f"/deployments/{id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=DeploymentRetrieveResponse, + ) + + def list( + self, + *, + app_name: str | Omit = omit, + limit: int | Omit = omit, + offset: int | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AsyncPaginator[DeploymentListResponse, AsyncOffsetPagination[DeploymentListResponse]]: + """List deployments. + + Optionally filter by application name. + + Args: + app_name: Filter results by application name. + + limit: Limit the number of deployments to return. + + offset: Offset the number of deployments to return. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get_api_list( + "/deployments", + page=AsyncOffsetPagination[DeploymentListResponse], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "app_name": app_name, + "limit": limit, + "offset": offset, + }, + deployment_list_params.DeploymentListParams, + ), + ), + model=DeploymentListResponse, + ) + + async def follow( + self, + id: str, + *, + since: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AsyncStream[DeploymentFollowResponse]: + """ + Establishes a Server-Sent Events (SSE) stream that delivers real-time logs and + status updates for a deployment. The stream terminates automatically once the + deployment reaches a terminal state. + + Args: + since: Show logs since the given time (RFC timestamps or durations like 5m). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})} + return await self._get( + f"/deployments/{id}/events", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform({"since": since}, deployment_follow_params.DeploymentFollowParams), + ), + cast_to=cast( + Any, DeploymentFollowResponse + ), # Union types cannot be passed in as arguments in the type system + stream=True, + stream_cls=AsyncStream[DeploymentFollowResponse], + ) + + +class DeploymentsResourceWithRawResponse: + def __init__(self, deployments: DeploymentsResource) -> None: + self._deployments = deployments + + self.create = to_raw_response_wrapper( + deployments.create, + ) + self.retrieve = to_raw_response_wrapper( + deployments.retrieve, + ) + self.list = to_raw_response_wrapper( + deployments.list, + ) + self.follow = to_raw_response_wrapper( + deployments.follow, + ) + + +class AsyncDeploymentsResourceWithRawResponse: + def __init__(self, deployments: AsyncDeploymentsResource) -> None: + self._deployments = deployments + + self.create = async_to_raw_response_wrapper( + deployments.create, + ) + self.retrieve = async_to_raw_response_wrapper( + deployments.retrieve, + ) + self.list = async_to_raw_response_wrapper( + deployments.list, + ) + self.follow = async_to_raw_response_wrapper( + deployments.follow, + ) + + +class DeploymentsResourceWithStreamingResponse: + def __init__(self, deployments: DeploymentsResource) -> None: + self._deployments = deployments + + self.create = to_streamed_response_wrapper( + deployments.create, + ) + self.retrieve = to_streamed_response_wrapper( + deployments.retrieve, + ) + self.list = to_streamed_response_wrapper( + deployments.list, + ) + self.follow = to_streamed_response_wrapper( + deployments.follow, + ) + + +class AsyncDeploymentsResourceWithStreamingResponse: + def __init__(self, deployments: AsyncDeploymentsResource) -> None: + self._deployments = deployments + + self.create = async_to_streamed_response_wrapper( + deployments.create, + ) + self.retrieve = async_to_streamed_response_wrapper( + deployments.retrieve, + ) + self.list = async_to_streamed_response_wrapper( + deployments.list, + ) + self.follow = async_to_streamed_response_wrapper( + deployments.follow, + ) diff --git a/src/kernel/resources/extensions.py b/src/kernel/resources/extensions.py new file mode 100644 index 0000000..69497b1 --- /dev/null +++ b/src/kernel/resources/extensions.py @@ -0,0 +1,539 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Mapping, cast +from typing_extensions import Literal + +import httpx + +from ..types import extension_upload_params, extension_download_from_chrome_store_params +from .._types import Body, Omit, Query, Headers, NoneType, NotGiven, FileTypes, omit, not_given +from .._utils import extract_files, maybe_transform, deepcopy_minimal, async_maybe_transform +from .._compat import cached_property +from .._resource import SyncAPIResource, AsyncAPIResource +from .._response import ( + BinaryAPIResponse, + AsyncBinaryAPIResponse, + StreamedBinaryAPIResponse, + AsyncStreamedBinaryAPIResponse, + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + to_custom_raw_response_wrapper, + async_to_streamed_response_wrapper, + to_custom_streamed_response_wrapper, + async_to_custom_raw_response_wrapper, + async_to_custom_streamed_response_wrapper, +) +from .._base_client import make_request_options +from ..types.extension_list_response import ExtensionListResponse +from ..types.extension_upload_response import ExtensionUploadResponse + +__all__ = ["ExtensionsResource", "AsyncExtensionsResource"] + + +class ExtensionsResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> ExtensionsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return ExtensionsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> ExtensionsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return ExtensionsResourceWithStreamingResponse(self) + + def list( + self, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ExtensionListResponse: + """List extensions owned by the caller's organization.""" + return self._get( + "/extensions", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ExtensionListResponse, + ) + + def delete( + self, + id_or_name: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Delete an extension by its ID or by its name. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id_or_name: + raise ValueError(f"Expected a non-empty value for `id_or_name` but received {id_or_name!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return self._delete( + f"/extensions/{id_or_name}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + def download( + self, + id_or_name: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> BinaryAPIResponse: + """ + Download the extension as a ZIP archive by ID or name. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id_or_name: + raise ValueError(f"Expected a non-empty value for `id_or_name` but received {id_or_name!r}") + extra_headers = {"Accept": "application/octet-stream", **(extra_headers or {})} + return self._get( + f"/extensions/{id_or_name}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=BinaryAPIResponse, + ) + + def download_from_chrome_store( + self, + *, + url: str, + os: Literal["win", "mac", "linux"] | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> BinaryAPIResponse: + """ + Returns a ZIP archive containing the unpacked extension fetched from the Chrome + Web Store. + + Args: + url: Chrome Web Store URL for the extension. + + os: Target operating system for the extension package. Defaults to linux. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + extra_headers = {"Accept": "application/octet-stream", **(extra_headers or {})} + return self._get( + "/extensions/from_chrome_store", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "url": url, + "os": os, + }, + extension_download_from_chrome_store_params.ExtensionDownloadFromChromeStoreParams, + ), + ), + cast_to=BinaryAPIResponse, + ) + + def upload( + self, + *, + file: FileTypes, + name: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ExtensionUploadResponse: + """Upload a zip file containing an unpacked browser extension. + + Optionally provide a + unique name for later reference. + + Args: + file: ZIP file containing the browser extension. + + name: Optional unique name within the organization to reference this extension. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + body = deepcopy_minimal( + { + "file": file, + "name": name, + } + ) + files = extract_files(cast(Mapping[str, object], body), paths=[["file"]]) + # It should be noted that the actual Content-Type header that will be + # sent to the server will contain a `boundary` parameter, e.g. + # multipart/form-data; boundary=---abc-- + extra_headers = {"Content-Type": "multipart/form-data", **(extra_headers or {})} + return self._post( + "/extensions", + body=maybe_transform(body, extension_upload_params.ExtensionUploadParams), + files=files, + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ExtensionUploadResponse, + ) + + +class AsyncExtensionsResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncExtensionsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return AsyncExtensionsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncExtensionsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return AsyncExtensionsResourceWithStreamingResponse(self) + + async def list( + self, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ExtensionListResponse: + """List extensions owned by the caller's organization.""" + return await self._get( + "/extensions", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ExtensionListResponse, + ) + + async def delete( + self, + id_or_name: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Delete an extension by its ID or by its name. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id_or_name: + raise ValueError(f"Expected a non-empty value for `id_or_name` but received {id_or_name!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return await self._delete( + f"/extensions/{id_or_name}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + async def download( + self, + id_or_name: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AsyncBinaryAPIResponse: + """ + Download the extension as a ZIP archive by ID or name. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id_or_name: + raise ValueError(f"Expected a non-empty value for `id_or_name` but received {id_or_name!r}") + extra_headers = {"Accept": "application/octet-stream", **(extra_headers or {})} + return await self._get( + f"/extensions/{id_or_name}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=AsyncBinaryAPIResponse, + ) + + async def download_from_chrome_store( + self, + *, + url: str, + os: Literal["win", "mac", "linux"] | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AsyncBinaryAPIResponse: + """ + Returns a ZIP archive containing the unpacked extension fetched from the Chrome + Web Store. + + Args: + url: Chrome Web Store URL for the extension. + + os: Target operating system for the extension package. Defaults to linux. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + extra_headers = {"Accept": "application/octet-stream", **(extra_headers or {})} + return await self._get( + "/extensions/from_chrome_store", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform( + { + "url": url, + "os": os, + }, + extension_download_from_chrome_store_params.ExtensionDownloadFromChromeStoreParams, + ), + ), + cast_to=AsyncBinaryAPIResponse, + ) + + async def upload( + self, + *, + file: FileTypes, + name: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ExtensionUploadResponse: + """Upload a zip file containing an unpacked browser extension. + + Optionally provide a + unique name for later reference. + + Args: + file: ZIP file containing the browser extension. + + name: Optional unique name within the organization to reference this extension. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + body = deepcopy_minimal( + { + "file": file, + "name": name, + } + ) + files = extract_files(cast(Mapping[str, object], body), paths=[["file"]]) + # It should be noted that the actual Content-Type header that will be + # sent to the server will contain a `boundary` parameter, e.g. + # multipart/form-data; boundary=---abc-- + extra_headers = {"Content-Type": "multipart/form-data", **(extra_headers or {})} + return await self._post( + "/extensions", + body=await async_maybe_transform(body, extension_upload_params.ExtensionUploadParams), + files=files, + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ExtensionUploadResponse, + ) + + +class ExtensionsResourceWithRawResponse: + def __init__(self, extensions: ExtensionsResource) -> None: + self._extensions = extensions + + self.list = to_raw_response_wrapper( + extensions.list, + ) + self.delete = to_raw_response_wrapper( + extensions.delete, + ) + self.download = to_custom_raw_response_wrapper( + extensions.download, + BinaryAPIResponse, + ) + self.download_from_chrome_store = to_custom_raw_response_wrapper( + extensions.download_from_chrome_store, + BinaryAPIResponse, + ) + self.upload = to_raw_response_wrapper( + extensions.upload, + ) + + +class AsyncExtensionsResourceWithRawResponse: + def __init__(self, extensions: AsyncExtensionsResource) -> None: + self._extensions = extensions + + self.list = async_to_raw_response_wrapper( + extensions.list, + ) + self.delete = async_to_raw_response_wrapper( + extensions.delete, + ) + self.download = async_to_custom_raw_response_wrapper( + extensions.download, + AsyncBinaryAPIResponse, + ) + self.download_from_chrome_store = async_to_custom_raw_response_wrapper( + extensions.download_from_chrome_store, + AsyncBinaryAPIResponse, + ) + self.upload = async_to_raw_response_wrapper( + extensions.upload, + ) + + +class ExtensionsResourceWithStreamingResponse: + def __init__(self, extensions: ExtensionsResource) -> None: + self._extensions = extensions + + self.list = to_streamed_response_wrapper( + extensions.list, + ) + self.delete = to_streamed_response_wrapper( + extensions.delete, + ) + self.download = to_custom_streamed_response_wrapper( + extensions.download, + StreamedBinaryAPIResponse, + ) + self.download_from_chrome_store = to_custom_streamed_response_wrapper( + extensions.download_from_chrome_store, + StreamedBinaryAPIResponse, + ) + self.upload = to_streamed_response_wrapper( + extensions.upload, + ) + + +class AsyncExtensionsResourceWithStreamingResponse: + def __init__(self, extensions: AsyncExtensionsResource) -> None: + self._extensions = extensions + + self.list = async_to_streamed_response_wrapper( + extensions.list, + ) + self.delete = async_to_streamed_response_wrapper( + extensions.delete, + ) + self.download = async_to_custom_streamed_response_wrapper( + extensions.download, + AsyncStreamedBinaryAPIResponse, + ) + self.download_from_chrome_store = async_to_custom_streamed_response_wrapper( + extensions.download_from_chrome_store, + AsyncStreamedBinaryAPIResponse, + ) + self.upload = async_to_streamed_response_wrapper( + extensions.upload, + ) diff --git a/src/kernel/resources/invocations.py b/src/kernel/resources/invocations.py new file mode 100644 index 0000000..3b812d4 --- /dev/null +++ b/src/kernel/resources/invocations.py @@ -0,0 +1,762 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Any, cast +from typing_extensions import Literal + +import httpx + +from ..types import invocation_list_params, invocation_create_params, invocation_follow_params, invocation_update_params +from .._types import Body, Omit, Query, Headers, NoneType, NotGiven, omit, not_given +from .._utils import maybe_transform, async_maybe_transform +from .._compat import cached_property +from .._resource import SyncAPIResource, AsyncAPIResource +from .._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from .._streaming import Stream, AsyncStream +from ..pagination import SyncOffsetPagination, AsyncOffsetPagination +from .._base_client import AsyncPaginator, make_request_options +from ..types.invocation_list_response import InvocationListResponse +from ..types.invocation_create_response import InvocationCreateResponse +from ..types.invocation_follow_response import InvocationFollowResponse +from ..types.invocation_update_response import InvocationUpdateResponse +from ..types.invocation_retrieve_response import InvocationRetrieveResponse + +__all__ = ["InvocationsResource", "AsyncInvocationsResource"] + + +class InvocationsResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> InvocationsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return InvocationsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> InvocationsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return InvocationsResourceWithStreamingResponse(self) + + def create( + self, + *, + action_name: str, + app_name: str, + version: str, + async_: bool | Omit = omit, + async_timeout_seconds: int | Omit = omit, + payload: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> InvocationCreateResponse: + """ + Invoke an action. + + Args: + action_name: Name of the action to invoke + + app_name: Name of the application + + version: Version of the application + + async_: If true, invoke asynchronously. When set, the API responds 202 Accepted with + status "queued". + + async_timeout_seconds: Timeout in seconds for async invocations (min 10, max 3600). Only applies when + async is true. + + payload: Input data for the action, sent as a JSON string. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._post( + "/invocations", + body=maybe_transform( + { + "action_name": action_name, + "app_name": app_name, + "version": version, + "async_": async_, + "async_timeout_seconds": async_timeout_seconds, + "payload": payload, + }, + invocation_create_params.InvocationCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=InvocationCreateResponse, + ) + + def retrieve( + self, + id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> InvocationRetrieveResponse: + """ + Get details about an invocation's status and output. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + return self._get( + f"/invocations/{id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=InvocationRetrieveResponse, + ) + + def update( + self, + id: str, + *, + status: Literal["succeeded", "failed"], + output: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> InvocationUpdateResponse: + """Update an invocation's status or output. + + This can be used to cancel an + invocation by setting the status to "failed". + + Args: + status: New status for the invocation. + + output: Updated output of the invocation rendered as JSON string. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + return self._patch( + f"/invocations/{id}", + body=maybe_transform( + { + "status": status, + "output": output, + }, + invocation_update_params.InvocationUpdateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=InvocationUpdateResponse, + ) + + def list( + self, + *, + action_name: str | Omit = omit, + app_name: str | Omit = omit, + deployment_id: str | Omit = omit, + limit: int | Omit = omit, + offset: int | Omit = omit, + since: str | Omit = omit, + status: Literal["queued", "running", "succeeded", "failed"] | Omit = omit, + version: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> SyncOffsetPagination[InvocationListResponse]: + """List invocations. + + Optionally filter by application name, action name, status, + deployment ID, or start time. + + Args: + action_name: Filter results by action name. + + app_name: Filter results by application name. + + deployment_id: Filter results by deployment ID. + + limit: Limit the number of invocations to return. + + offset: Offset the number of invocations to return. + + since: Show invocations that have started since the given time (RFC timestamps or + durations like 5m). + + status: Filter results by invocation status. + + version: Filter results by application version. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get_api_list( + "/invocations", + page=SyncOffsetPagination[InvocationListResponse], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "action_name": action_name, + "app_name": app_name, + "deployment_id": deployment_id, + "limit": limit, + "offset": offset, + "since": since, + "status": status, + "version": version, + }, + invocation_list_params.InvocationListParams, + ), + ), + model=InvocationListResponse, + ) + + def delete_browsers( + self, + id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Delete all browser sessions created within the specified invocation. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return self._delete( + f"/invocations/{id}/browsers", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + def follow( + self, + id: str, + *, + since: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> Stream[InvocationFollowResponse]: + """ + Establishes a Server-Sent Events (SSE) stream that delivers real-time logs and + status updates for an invocation. The stream terminates automatically once the + invocation reaches a terminal state. + + Args: + since: Show logs since the given time (RFC timestamps or durations like 5m). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})} + return self._get( + f"/invocations/{id}/events", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform({"since": since}, invocation_follow_params.InvocationFollowParams), + ), + cast_to=cast( + Any, InvocationFollowResponse + ), # Union types cannot be passed in as arguments in the type system + stream=True, + stream_cls=Stream[InvocationFollowResponse], + ) + + +class AsyncInvocationsResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncInvocationsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return AsyncInvocationsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncInvocationsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return AsyncInvocationsResourceWithStreamingResponse(self) + + async def create( + self, + *, + action_name: str, + app_name: str, + version: str, + async_: bool | Omit = omit, + async_timeout_seconds: int | Omit = omit, + payload: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> InvocationCreateResponse: + """ + Invoke an action. + + Args: + action_name: Name of the action to invoke + + app_name: Name of the application + + version: Version of the application + + async_: If true, invoke asynchronously. When set, the API responds 202 Accepted with + status "queued". + + async_timeout_seconds: Timeout in seconds for async invocations (min 10, max 3600). Only applies when + async is true. + + payload: Input data for the action, sent as a JSON string. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._post( + "/invocations", + body=await async_maybe_transform( + { + "action_name": action_name, + "app_name": app_name, + "version": version, + "async_": async_, + "async_timeout_seconds": async_timeout_seconds, + "payload": payload, + }, + invocation_create_params.InvocationCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=InvocationCreateResponse, + ) + + async def retrieve( + self, + id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> InvocationRetrieveResponse: + """ + Get details about an invocation's status and output. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + return await self._get( + f"/invocations/{id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=InvocationRetrieveResponse, + ) + + async def update( + self, + id: str, + *, + status: Literal["succeeded", "failed"], + output: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> InvocationUpdateResponse: + """Update an invocation's status or output. + + This can be used to cancel an + invocation by setting the status to "failed". + + Args: + status: New status for the invocation. + + output: Updated output of the invocation rendered as JSON string. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + return await self._patch( + f"/invocations/{id}", + body=await async_maybe_transform( + { + "status": status, + "output": output, + }, + invocation_update_params.InvocationUpdateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=InvocationUpdateResponse, + ) + + def list( + self, + *, + action_name: str | Omit = omit, + app_name: str | Omit = omit, + deployment_id: str | Omit = omit, + limit: int | Omit = omit, + offset: int | Omit = omit, + since: str | Omit = omit, + status: Literal["queued", "running", "succeeded", "failed"] | Omit = omit, + version: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AsyncPaginator[InvocationListResponse, AsyncOffsetPagination[InvocationListResponse]]: + """List invocations. + + Optionally filter by application name, action name, status, + deployment ID, or start time. + + Args: + action_name: Filter results by action name. + + app_name: Filter results by application name. + + deployment_id: Filter results by deployment ID. + + limit: Limit the number of invocations to return. + + offset: Offset the number of invocations to return. + + since: Show invocations that have started since the given time (RFC timestamps or + durations like 5m). + + status: Filter results by invocation status. + + version: Filter results by application version. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get_api_list( + "/invocations", + page=AsyncOffsetPagination[InvocationListResponse], + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "action_name": action_name, + "app_name": app_name, + "deployment_id": deployment_id, + "limit": limit, + "offset": offset, + "since": since, + "status": status, + "version": version, + }, + invocation_list_params.InvocationListParams, + ), + ), + model=InvocationListResponse, + ) + + async def delete_browsers( + self, + id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Delete all browser sessions created within the specified invocation. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return await self._delete( + f"/invocations/{id}/browsers", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + async def follow( + self, + id: str, + *, + since: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AsyncStream[InvocationFollowResponse]: + """ + Establishes a Server-Sent Events (SSE) stream that delivers real-time logs and + status updates for an invocation. The stream terminates automatically once the + invocation reaches a terminal state. + + Args: + since: Show logs since the given time (RFC timestamps or durations like 5m). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})} + return await self._get( + f"/invocations/{id}/events", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform({"since": since}, invocation_follow_params.InvocationFollowParams), + ), + cast_to=cast( + Any, InvocationFollowResponse + ), # Union types cannot be passed in as arguments in the type system + stream=True, + stream_cls=AsyncStream[InvocationFollowResponse], + ) + + +class InvocationsResourceWithRawResponse: + def __init__(self, invocations: InvocationsResource) -> None: + self._invocations = invocations + + self.create = to_raw_response_wrapper( + invocations.create, + ) + self.retrieve = to_raw_response_wrapper( + invocations.retrieve, + ) + self.update = to_raw_response_wrapper( + invocations.update, + ) + self.list = to_raw_response_wrapper( + invocations.list, + ) + self.delete_browsers = to_raw_response_wrapper( + invocations.delete_browsers, + ) + self.follow = to_raw_response_wrapper( + invocations.follow, + ) + + +class AsyncInvocationsResourceWithRawResponse: + def __init__(self, invocations: AsyncInvocationsResource) -> None: + self._invocations = invocations + + self.create = async_to_raw_response_wrapper( + invocations.create, + ) + self.retrieve = async_to_raw_response_wrapper( + invocations.retrieve, + ) + self.update = async_to_raw_response_wrapper( + invocations.update, + ) + self.list = async_to_raw_response_wrapper( + invocations.list, + ) + self.delete_browsers = async_to_raw_response_wrapper( + invocations.delete_browsers, + ) + self.follow = async_to_raw_response_wrapper( + invocations.follow, + ) + + +class InvocationsResourceWithStreamingResponse: + def __init__(self, invocations: InvocationsResource) -> None: + self._invocations = invocations + + self.create = to_streamed_response_wrapper( + invocations.create, + ) + self.retrieve = to_streamed_response_wrapper( + invocations.retrieve, + ) + self.update = to_streamed_response_wrapper( + invocations.update, + ) + self.list = to_streamed_response_wrapper( + invocations.list, + ) + self.delete_browsers = to_streamed_response_wrapper( + invocations.delete_browsers, + ) + self.follow = to_streamed_response_wrapper( + invocations.follow, + ) + + +class AsyncInvocationsResourceWithStreamingResponse: + def __init__(self, invocations: AsyncInvocationsResource) -> None: + self._invocations = invocations + + self.create = async_to_streamed_response_wrapper( + invocations.create, + ) + self.retrieve = async_to_streamed_response_wrapper( + invocations.retrieve, + ) + self.update = async_to_streamed_response_wrapper( + invocations.update, + ) + self.list = async_to_streamed_response_wrapper( + invocations.list, + ) + self.delete_browsers = async_to_streamed_response_wrapper( + invocations.delete_browsers, + ) + self.follow = async_to_streamed_response_wrapper( + invocations.follow, + ) diff --git a/src/kernel/resources/profiles.py b/src/kernel/resources/profiles.py new file mode 100644 index 0000000..86064d5 --- /dev/null +++ b/src/kernel/resources/profiles.py @@ -0,0 +1,474 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import httpx + +from ..types import profile_create_params +from .._types import Body, Omit, Query, Headers, NoneType, NotGiven, omit, not_given +from .._utils import maybe_transform, async_maybe_transform +from .._compat import cached_property +from .._resource import SyncAPIResource, AsyncAPIResource +from .._response import ( + BinaryAPIResponse, + AsyncBinaryAPIResponse, + StreamedBinaryAPIResponse, + AsyncStreamedBinaryAPIResponse, + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + to_custom_raw_response_wrapper, + async_to_streamed_response_wrapper, + to_custom_streamed_response_wrapper, + async_to_custom_raw_response_wrapper, + async_to_custom_streamed_response_wrapper, +) +from .._base_client import make_request_options +from ..types.profile import Profile +from ..types.profile_list_response import ProfileListResponse + +__all__ = ["ProfilesResource", "AsyncProfilesResource"] + + +class ProfilesResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> ProfilesResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return ProfilesResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> ProfilesResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return ProfilesResourceWithStreamingResponse(self) + + def create( + self, + *, + name: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> Profile: + """ + Create a browser profile that can be used to load state into future browser + sessions. + + Args: + name: Optional name of the profile. Must be unique within the organization. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._post( + "/profiles", + body=maybe_transform({"name": name}, profile_create_params.ProfileCreateParams), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Profile, + ) + + def retrieve( + self, + id_or_name: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> Profile: + """ + Retrieve details for a single profile by its ID or name. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id_or_name: + raise ValueError(f"Expected a non-empty value for `id_or_name` but received {id_or_name!r}") + return self._get( + f"/profiles/{id_or_name}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Profile, + ) + + def list( + self, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ProfileListResponse: + """List profiles with optional filtering and pagination.""" + return self._get( + "/profiles", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ProfileListResponse, + ) + + def delete( + self, + id_or_name: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Delete a profile by its ID or by its name. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id_or_name: + raise ValueError(f"Expected a non-empty value for `id_or_name` but received {id_or_name!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return self._delete( + f"/profiles/{id_or_name}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + def download( + self, + id_or_name: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> BinaryAPIResponse: + """Download the profile. + + Profiles are JSON files containing the pieces of state + that we save. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id_or_name: + raise ValueError(f"Expected a non-empty value for `id_or_name` but received {id_or_name!r}") + extra_headers = {"Accept": "application/octet-stream", **(extra_headers or {})} + return self._get( + f"/profiles/{id_or_name}/download", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=BinaryAPIResponse, + ) + + +class AsyncProfilesResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncProfilesResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return AsyncProfilesResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncProfilesResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return AsyncProfilesResourceWithStreamingResponse(self) + + async def create( + self, + *, + name: str | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> Profile: + """ + Create a browser profile that can be used to load state into future browser + sessions. + + Args: + name: Optional name of the profile. Must be unique within the organization. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._post( + "/profiles", + body=await async_maybe_transform({"name": name}, profile_create_params.ProfileCreateParams), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Profile, + ) + + async def retrieve( + self, + id_or_name: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> Profile: + """ + Retrieve details for a single profile by its ID or name. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id_or_name: + raise ValueError(f"Expected a non-empty value for `id_or_name` but received {id_or_name!r}") + return await self._get( + f"/profiles/{id_or_name}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Profile, + ) + + async def list( + self, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ProfileListResponse: + """List profiles with optional filtering and pagination.""" + return await self._get( + "/profiles", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ProfileListResponse, + ) + + async def delete( + self, + id_or_name: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """ + Delete a profile by its ID or by its name. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id_or_name: + raise ValueError(f"Expected a non-empty value for `id_or_name` but received {id_or_name!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return await self._delete( + f"/profiles/{id_or_name}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + async def download( + self, + id_or_name: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AsyncBinaryAPIResponse: + """Download the profile. + + Profiles are JSON files containing the pieces of state + that we save. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id_or_name: + raise ValueError(f"Expected a non-empty value for `id_or_name` but received {id_or_name!r}") + extra_headers = {"Accept": "application/octet-stream", **(extra_headers or {})} + return await self._get( + f"/profiles/{id_or_name}/download", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=AsyncBinaryAPIResponse, + ) + + +class ProfilesResourceWithRawResponse: + def __init__(self, profiles: ProfilesResource) -> None: + self._profiles = profiles + + self.create = to_raw_response_wrapper( + profiles.create, + ) + self.retrieve = to_raw_response_wrapper( + profiles.retrieve, + ) + self.list = to_raw_response_wrapper( + profiles.list, + ) + self.delete = to_raw_response_wrapper( + profiles.delete, + ) + self.download = to_custom_raw_response_wrapper( + profiles.download, + BinaryAPIResponse, + ) + + +class AsyncProfilesResourceWithRawResponse: + def __init__(self, profiles: AsyncProfilesResource) -> None: + self._profiles = profiles + + self.create = async_to_raw_response_wrapper( + profiles.create, + ) + self.retrieve = async_to_raw_response_wrapper( + profiles.retrieve, + ) + self.list = async_to_raw_response_wrapper( + profiles.list, + ) + self.delete = async_to_raw_response_wrapper( + profiles.delete, + ) + self.download = async_to_custom_raw_response_wrapper( + profiles.download, + AsyncBinaryAPIResponse, + ) + + +class ProfilesResourceWithStreamingResponse: + def __init__(self, profiles: ProfilesResource) -> None: + self._profiles = profiles + + self.create = to_streamed_response_wrapper( + profiles.create, + ) + self.retrieve = to_streamed_response_wrapper( + profiles.retrieve, + ) + self.list = to_streamed_response_wrapper( + profiles.list, + ) + self.delete = to_streamed_response_wrapper( + profiles.delete, + ) + self.download = to_custom_streamed_response_wrapper( + profiles.download, + StreamedBinaryAPIResponse, + ) + + +class AsyncProfilesResourceWithStreamingResponse: + def __init__(self, profiles: AsyncProfilesResource) -> None: + self._profiles = profiles + + self.create = async_to_streamed_response_wrapper( + profiles.create, + ) + self.retrieve = async_to_streamed_response_wrapper( + profiles.retrieve, + ) + self.list = async_to_streamed_response_wrapper( + profiles.list, + ) + self.delete = async_to_streamed_response_wrapper( + profiles.delete, + ) + self.download = async_to_custom_streamed_response_wrapper( + profiles.download, + AsyncStreamedBinaryAPIResponse, + ) diff --git a/src/kernel/resources/proxies.py b/src/kernel/resources/proxies.py new file mode 100644 index 0000000..6574a25 --- /dev/null +++ b/src/kernel/resources/proxies.py @@ -0,0 +1,496 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal + +import httpx + +from ..types import proxy_create_params +from .._types import Body, Omit, Query, Headers, NoneType, NotGiven, omit, not_given +from .._utils import maybe_transform, async_maybe_transform +from .._compat import cached_property +from .._resource import SyncAPIResource, AsyncAPIResource +from .._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from .._base_client import make_request_options +from ..types.proxy_list_response import ProxyListResponse +from ..types.proxy_check_response import ProxyCheckResponse +from ..types.proxy_create_response import ProxyCreateResponse +from ..types.proxy_retrieve_response import ProxyRetrieveResponse + +__all__ = ["ProxiesResource", "AsyncProxiesResource"] + + +class ProxiesResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> ProxiesResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return ProxiesResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> ProxiesResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return ProxiesResourceWithStreamingResponse(self) + + def create( + self, + *, + type: Literal["datacenter", "isp", "residential", "mobile", "custom"], + config: proxy_create_params.Config | Omit = omit, + name: str | Omit = omit, + protocol: Literal["http", "https"] | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ProxyCreateResponse: + """ + Create a new proxy configuration for the caller's organization. + + Args: + type: Proxy type to use. In terms of quality for avoiding bot-detection, from best to + worst: `mobile` > `residential` > `isp` > `datacenter`. + + config: Configuration specific to the selected proxy `type`. + + name: Readable name of the proxy. + + protocol: Protocol to use for the proxy connection. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._post( + "/proxies", + body=maybe_transform( + { + "type": type, + "config": config, + "name": name, + "protocol": protocol, + }, + proxy_create_params.ProxyCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ProxyCreateResponse, + ) + + def retrieve( + self, + id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ProxyRetrieveResponse: + """ + Retrieve a proxy belonging to the caller's organization by ID. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + return self._get( + f"/proxies/{id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ProxyRetrieveResponse, + ) + + def list( + self, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ProxyListResponse: + """List proxies owned by the caller's organization.""" + return self._get( + "/proxies", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ProxyListResponse, + ) + + def delete( + self, + id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """Soft delete a proxy. + + Sessions referencing it are not modified. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return self._delete( + f"/proxies/{id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + def check( + self, + id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ProxyCheckResponse: + """ + Run a health check on the proxy to verify it's working. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + return self._post( + f"/proxies/{id}/check", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ProxyCheckResponse, + ) + + +class AsyncProxiesResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncProxiesResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#accessing-raw-response-data-eg-headers + """ + return AsyncProxiesResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncProxiesResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/kernel/kernel-python-sdk#with_streaming_response + """ + return AsyncProxiesResourceWithStreamingResponse(self) + + async def create( + self, + *, + type: Literal["datacenter", "isp", "residential", "mobile", "custom"], + config: proxy_create_params.Config | Omit = omit, + name: str | Omit = omit, + protocol: Literal["http", "https"] | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ProxyCreateResponse: + """ + Create a new proxy configuration for the caller's organization. + + Args: + type: Proxy type to use. In terms of quality for avoiding bot-detection, from best to + worst: `mobile` > `residential` > `isp` > `datacenter`. + + config: Configuration specific to the selected proxy `type`. + + name: Readable name of the proxy. + + protocol: Protocol to use for the proxy connection. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._post( + "/proxies", + body=await async_maybe_transform( + { + "type": type, + "config": config, + "name": name, + "protocol": protocol, + }, + proxy_create_params.ProxyCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ProxyCreateResponse, + ) + + async def retrieve( + self, + id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ProxyRetrieveResponse: + """ + Retrieve a proxy belonging to the caller's organization by ID. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + return await self._get( + f"/proxies/{id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ProxyRetrieveResponse, + ) + + async def list( + self, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ProxyListResponse: + """List proxies owned by the caller's organization.""" + return await self._get( + "/proxies", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ProxyListResponse, + ) + + async def delete( + self, + id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + """Soft delete a proxy. + + Sessions referencing it are not modified. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + extra_headers = {"Accept": "*/*", **(extra_headers or {})} + return await self._delete( + f"/proxies/{id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=NoneType, + ) + + async def check( + self, + id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ProxyCheckResponse: + """ + Run a health check on the proxy to verify it's working. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not id: + raise ValueError(f"Expected a non-empty value for `id` but received {id!r}") + return await self._post( + f"/proxies/{id}/check", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ProxyCheckResponse, + ) + + +class ProxiesResourceWithRawResponse: + def __init__(self, proxies: ProxiesResource) -> None: + self._proxies = proxies + + self.create = to_raw_response_wrapper( + proxies.create, + ) + self.retrieve = to_raw_response_wrapper( + proxies.retrieve, + ) + self.list = to_raw_response_wrapper( + proxies.list, + ) + self.delete = to_raw_response_wrapper( + proxies.delete, + ) + self.check = to_raw_response_wrapper( + proxies.check, + ) + + +class AsyncProxiesResourceWithRawResponse: + def __init__(self, proxies: AsyncProxiesResource) -> None: + self._proxies = proxies + + self.create = async_to_raw_response_wrapper( + proxies.create, + ) + self.retrieve = async_to_raw_response_wrapper( + proxies.retrieve, + ) + self.list = async_to_raw_response_wrapper( + proxies.list, + ) + self.delete = async_to_raw_response_wrapper( + proxies.delete, + ) + self.check = async_to_raw_response_wrapper( + proxies.check, + ) + + +class ProxiesResourceWithStreamingResponse: + def __init__(self, proxies: ProxiesResource) -> None: + self._proxies = proxies + + self.create = to_streamed_response_wrapper( + proxies.create, + ) + self.retrieve = to_streamed_response_wrapper( + proxies.retrieve, + ) + self.list = to_streamed_response_wrapper( + proxies.list, + ) + self.delete = to_streamed_response_wrapper( + proxies.delete, + ) + self.check = to_streamed_response_wrapper( + proxies.check, + ) + + +class AsyncProxiesResourceWithStreamingResponse: + def __init__(self, proxies: AsyncProxiesResource) -> None: + self._proxies = proxies + + self.create = async_to_streamed_response_wrapper( + proxies.create, + ) + self.retrieve = async_to_streamed_response_wrapper( + proxies.retrieve, + ) + self.list = async_to_streamed_response_wrapper( + proxies.list, + ) + self.delete = async_to_streamed_response_wrapper( + proxies.delete, + ) + self.check = async_to_streamed_response_wrapper( + proxies.check, + ) diff --git a/src/kernel/types/__init__.py b/src/kernel/types/__init__.py new file mode 100644 index 0000000..0665e53 --- /dev/null +++ b/src/kernel/types/__init__.py @@ -0,0 +1,71 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from .shared import ( + LogEvent as LogEvent, + AppAction as AppAction, + ErrorEvent as ErrorEvent, + ErrorModel as ErrorModel, + ErrorDetail as ErrorDetail, + BrowserProfile as BrowserProfile, + HeartbeatEvent as HeartbeatEvent, + BrowserViewport as BrowserViewport, + BrowserExtension as BrowserExtension, +) +from .profile import Profile as Profile +from .credential import Credential as Credential +from .browser_pool import BrowserPool as BrowserPool +from .app_list_params import AppListParams as AppListParams +from .app_list_response import AppListResponse as AppListResponse +from .browser_list_params import BrowserListParams as BrowserListParams +from .browser_persistence import BrowserPersistence as BrowserPersistence +from .proxy_create_params import ProxyCreateParams as ProxyCreateParams +from .proxy_list_response import ProxyListResponse as ProxyListResponse +from .proxy_check_response import ProxyCheckResponse as ProxyCheckResponse +from .browser_create_params import BrowserCreateParams as BrowserCreateParams +from .browser_delete_params import BrowserDeleteParams as BrowserDeleteParams +from .browser_list_response import BrowserListResponse as BrowserListResponse +from .profile_create_params import ProfileCreateParams as ProfileCreateParams +from .profile_list_response import ProfileListResponse as ProfileListResponse +from .proxy_create_response import ProxyCreateResponse as ProxyCreateResponse +from .credential_list_params import CredentialListParams as CredentialListParams +from .deployment_list_params import DeploymentListParams as DeploymentListParams +from .deployment_state_event import DeploymentStateEvent as DeploymentStateEvent +from .invocation_list_params import InvocationListParams as InvocationListParams +from .invocation_state_event import InvocationStateEvent as InvocationStateEvent +from .browser_create_response import BrowserCreateResponse as BrowserCreateResponse +from .extension_list_response import ExtensionListResponse as ExtensionListResponse +from .extension_upload_params import ExtensionUploadParams as ExtensionUploadParams +from .proxy_retrieve_response import ProxyRetrieveResponse as ProxyRetrieveResponse +from .credential_create_params import CredentialCreateParams as CredentialCreateParams +from .credential_update_params import CredentialUpdateParams as CredentialUpdateParams +from .deployment_create_params import DeploymentCreateParams as DeploymentCreateParams +from .deployment_follow_params import DeploymentFollowParams as DeploymentFollowParams +from .deployment_list_response import DeploymentListResponse as DeploymentListResponse +from .invocation_create_params import InvocationCreateParams as InvocationCreateParams +from .invocation_follow_params import InvocationFollowParams as InvocationFollowParams +from .invocation_list_response import InvocationListResponse as InvocationListResponse +from .invocation_update_params import InvocationUpdateParams as InvocationUpdateParams +from .browser_persistence_param import BrowserPersistenceParam as BrowserPersistenceParam +from .browser_retrieve_response import BrowserRetrieveResponse as BrowserRetrieveResponse +from .extension_upload_response import ExtensionUploadResponse as ExtensionUploadResponse +from .browser_pool_create_params import BrowserPoolCreateParams as BrowserPoolCreateParams +from .browser_pool_delete_params import BrowserPoolDeleteParams as BrowserPoolDeleteParams +from .browser_pool_list_response import BrowserPoolListResponse as BrowserPoolListResponse +from .browser_pool_update_params import BrowserPoolUpdateParams as BrowserPoolUpdateParams +from .deployment_create_response import DeploymentCreateResponse as DeploymentCreateResponse +from .deployment_follow_response import DeploymentFollowResponse as DeploymentFollowResponse +from .invocation_create_response import InvocationCreateResponse as InvocationCreateResponse +from .invocation_follow_response import InvocationFollowResponse as InvocationFollowResponse +from .invocation_update_response import InvocationUpdateResponse as InvocationUpdateResponse +from .browser_pool_acquire_params import BrowserPoolAcquireParams as BrowserPoolAcquireParams +from .browser_pool_release_params import BrowserPoolReleaseParams as BrowserPoolReleaseParams +from .deployment_retrieve_response import DeploymentRetrieveResponse as DeploymentRetrieveResponse +from .invocation_retrieve_response import InvocationRetrieveResponse as InvocationRetrieveResponse +from .browser_pool_acquire_response import BrowserPoolAcquireResponse as BrowserPoolAcquireResponse +from .credential_totp_code_response import CredentialTotpCodeResponse as CredentialTotpCodeResponse +from .browser_load_extensions_params import BrowserLoadExtensionsParams as BrowserLoadExtensionsParams +from .extension_download_from_chrome_store_params import ( + ExtensionDownloadFromChromeStoreParams as ExtensionDownloadFromChromeStoreParams, +) diff --git a/src/kernel/types/agents/__init__.py b/src/kernel/types/agents/__init__.py new file mode 100644 index 0000000..2ecdef6 --- /dev/null +++ b/src/kernel/types/agents/__init__.py @@ -0,0 +1,13 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from .auth_agent import AuthAgent as AuthAgent +from .auth_list_params import AuthListParams as AuthListParams +from .discovered_field import DiscoveredField as DiscoveredField +from .auth_create_params import AuthCreateParams as AuthCreateParams +from .agent_auth_submit_response import AgentAuthSubmitResponse as AgentAuthSubmitResponse +from .agent_auth_invocation_response import AgentAuthInvocationResponse as AgentAuthInvocationResponse +from .auth_agent_invocation_create_response import ( + AuthAgentInvocationCreateResponse as AuthAgentInvocationCreateResponse, +) diff --git a/src/kernel/types/agents/agent_auth_invocation_response.py b/src/kernel/types/agents/agent_auth_invocation_response.py new file mode 100644 index 0000000..42b54a4 --- /dev/null +++ b/src/kernel/types/agents/agent_auth_invocation_response.py @@ -0,0 +1,75 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional +from datetime import datetime +from typing_extensions import Literal + +from ..._models import BaseModel +from .discovered_field import DiscoveredField + +__all__ = ["AgentAuthInvocationResponse", "PendingSSOButton"] + + +class PendingSSOButton(BaseModel): + """An SSO button for signing in with an external identity provider""" + + label: str + """Visible button text""" + + provider: str + """Identity provider name""" + + selector: str + """XPath selector for the button""" + + +class AgentAuthInvocationResponse(BaseModel): + """Response from get invocation endpoint""" + + app_name: str + """App name (org name at time of invocation creation)""" + + domain: str + """Domain for authentication""" + + expires_at: datetime + """When the handoff code expires""" + + status: Literal["IN_PROGRESS", "SUCCESS", "EXPIRED", "CANCELED", "FAILED"] + """Invocation status""" + + step: Literal[ + "initialized", "discovering", "awaiting_input", "awaiting_external_action", "submitting", "completed", "expired" + ] + """Current step in the invocation workflow""" + + type: Literal["login", "auto_login", "reauth"] + """The invocation type: + + - login: First-time authentication + - reauth: Re-authentication for previously authenticated agents + - auto_login: Legacy type (no longer created, kept for backward compatibility) + """ + + error_message: Optional[str] = None + """Error message explaining why the invocation failed (present when status=FAILED)""" + + external_action_message: Optional[str] = None + """ + Instructions for user when external action is required (present when + step=awaiting_external_action) + """ + + live_view_url: Optional[str] = None + """Browser live view URL for debugging the invocation""" + + pending_fields: Optional[List[DiscoveredField]] = None + """Fields currently awaiting input (present when step=awaiting_input)""" + + pending_sso_buttons: Optional[List[PendingSSOButton]] = None + """SSO buttons available on the page (present when step=awaiting_input)""" + + submitted_fields: Optional[List[str]] = None + """ + Names of fields that have been submitted (present when step=submitting or later) + """ diff --git a/src/kernel/types/agents/agent_auth_submit_response.py b/src/kernel/types/agents/agent_auth_submit_response.py new file mode 100644 index 0000000..8cb0df1 --- /dev/null +++ b/src/kernel/types/agents/agent_auth_submit_response.py @@ -0,0 +1,14 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from ..._models import BaseModel + +__all__ = ["AgentAuthSubmitResponse"] + + +class AgentAuthSubmitResponse(BaseModel): + """ + Response from submit endpoint - returns immediately after submission is accepted + """ + + accepted: bool + """Whether the submission was accepted for processing""" diff --git a/src/kernel/types/agents/auth/__init__.py b/src/kernel/types/agents/auth/__init__.py new file mode 100644 index 0000000..41e8ba8 --- /dev/null +++ b/src/kernel/types/agents/auth/__init__.py @@ -0,0 +1,8 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from .invocation_create_params import InvocationCreateParams as InvocationCreateParams +from .invocation_submit_params import InvocationSubmitParams as InvocationSubmitParams +from .invocation_exchange_params import InvocationExchangeParams as InvocationExchangeParams +from .invocation_exchange_response import InvocationExchangeResponse as InvocationExchangeResponse diff --git a/src/kernel/types/agents/auth/invocation_create_params.py b/src/kernel/types/agents/auth/invocation_create_params.py new file mode 100644 index 0000000..b2727e0 --- /dev/null +++ b/src/kernel/types/agents/auth/invocation_create_params.py @@ -0,0 +1,19 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +__all__ = ["InvocationCreateParams"] + + +class InvocationCreateParams(TypedDict, total=False): + auth_agent_id: Required[str] + """ID of the auth agent to create an invocation for""" + + save_credential_as: str + """ + If provided, saves the submitted credentials under this name upon successful + login. The credential will be linked to the auth agent for automatic + re-authentication. + """ diff --git a/src/kernel/types/agents/auth/invocation_exchange_params.py b/src/kernel/types/agents/auth/invocation_exchange_params.py new file mode 100644 index 0000000..71e4d18 --- /dev/null +++ b/src/kernel/types/agents/auth/invocation_exchange_params.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +__all__ = ["InvocationExchangeParams"] + + +class InvocationExchangeParams(TypedDict, total=False): + code: Required[str] + """Handoff code from start endpoint""" diff --git a/src/kernel/types/agents/auth/invocation_exchange_response.py b/src/kernel/types/agents/auth/invocation_exchange_response.py new file mode 100644 index 0000000..710d9c3 --- /dev/null +++ b/src/kernel/types/agents/auth/invocation_exchange_response.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from ...._models import BaseModel + +__all__ = ["InvocationExchangeResponse"] + + +class InvocationExchangeResponse(BaseModel): + """Response from exchange endpoint""" + + invocation_id: str + """Invocation ID""" + + jwt: str + """JWT token with invocation_id claim (30 minute TTL)""" diff --git a/src/kernel/types/agents/auth/invocation_submit_params.py b/src/kernel/types/agents/auth/invocation_submit_params.py new file mode 100644 index 0000000..ad9f9c1 --- /dev/null +++ b/src/kernel/types/agents/auth/invocation_submit_params.py @@ -0,0 +1,21 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict, Union +from typing_extensions import Required, TypeAlias, TypedDict + +__all__ = ["InvocationSubmitParams", "Variant0", "Variant1"] + + +class Variant0(TypedDict, total=False): + field_values: Required[Dict[str, str]] + """Values for the discovered login fields""" + + +class Variant1(TypedDict, total=False): + sso_button: Required[str] + """Selector of SSO button to click""" + + +InvocationSubmitParams: TypeAlias = Union[Variant0, Variant1] diff --git a/src/kernel/types/agents/auth_agent.py b/src/kernel/types/agents/auth_agent.py new file mode 100644 index 0000000..33fc46b --- /dev/null +++ b/src/kernel/types/agents/auth_agent.py @@ -0,0 +1,54 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional +from datetime import datetime +from typing_extensions import Literal + +from ..._models import BaseModel + +__all__ = ["AuthAgent"] + + +class AuthAgent(BaseModel): + """ + An auth agent that manages authentication for a specific domain and profile combination + """ + + id: str + """Unique identifier for the auth agent""" + + domain: str + """Target domain for authentication""" + + profile_name: str + """Name of the profile associated with this auth agent""" + + status: Literal["AUTHENTICATED", "NEEDS_AUTH"] + """Current authentication status of the managed profile""" + + allowed_domains: Optional[List[str]] = None + """ + Additional domains that are valid for this auth agent's authentication flow + (besides the primary domain). Useful when login pages redirect to different + domains. + """ + + can_reauth: Optional[bool] = None + """ + Whether automatic re-authentication is possible (has credential_id, selectors, + and login_url) + """ + + credential_id: Optional[str] = None + """ID of the linked credential for automatic re-authentication""" + + credential_name: Optional[str] = None + """Name of the linked credential for automatic re-authentication""" + + has_selectors: Optional[bool] = None + """ + Whether this auth agent has stored selectors for deterministic re-authentication + """ + + last_auth_check_at: Optional[datetime] = None + """When the last authentication check was performed""" diff --git a/src/kernel/types/agents/auth_agent_invocation_create_response.py b/src/kernel/types/agents/auth_agent_invocation_create_response.py new file mode 100644 index 0000000..6027f4d --- /dev/null +++ b/src/kernel/types/agents/auth_agent_invocation_create_response.py @@ -0,0 +1,32 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from datetime import datetime +from typing_extensions import Literal + +from ..._models import BaseModel + +__all__ = ["AuthAgentInvocationCreateResponse"] + + +class AuthAgentInvocationCreateResponse(BaseModel): + """Response from creating an invocation. Always returns an invocation_id.""" + + expires_at: datetime + """When the handoff code expires.""" + + handoff_code: str + """One-time code for handoff.""" + + hosted_url: str + """URL to redirect user to.""" + + invocation_id: str + """Unique identifier for the invocation.""" + + type: Literal["login", "auto_login", "reauth"] + """The invocation type: + + - login: First-time authentication + - reauth: Re-authentication for previously authenticated agents + - auto_login: Legacy type (no longer created, kept for backward compatibility) + """ diff --git a/src/kernel/types/agents/auth_create_params.py b/src/kernel/types/agents/auth_create_params.py new file mode 100644 index 0000000..b792d56 --- /dev/null +++ b/src/kernel/types/agents/auth_create_params.py @@ -0,0 +1,48 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +from ..._types import SequenceNotStr + +__all__ = ["AuthCreateParams", "Proxy"] + + +class AuthCreateParams(TypedDict, total=False): + domain: Required[str] + """Domain for authentication""" + + profile_name: Required[str] + """Name of the profile to use for this auth agent""" + + allowed_domains: SequenceNotStr[str] + """ + Additional domains that are valid for this auth agent's authentication flow + (besides the primary domain). Useful when login pages redirect to different + domains. + """ + + credential_name: str + """Optional name of an existing credential to use for this auth agent. + + If provided, the credential will be linked to the agent and its values will be + used to auto-fill the login form on invocation. + """ + + login_url: str + """Optional login page URL. + + If provided, will be stored on the agent and used to skip discovery in future + invocations. + """ + + proxy: Proxy + """Optional proxy configuration""" + + +class Proxy(TypedDict, total=False): + """Optional proxy configuration""" + + proxy_id: str + """ID of the proxy to use""" diff --git a/src/kernel/types/agents/auth_list_params.py b/src/kernel/types/agents/auth_list_params.py new file mode 100644 index 0000000..52d5337 --- /dev/null +++ b/src/kernel/types/agents/auth_list_params.py @@ -0,0 +1,21 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import TypedDict + +__all__ = ["AuthListParams"] + + +class AuthListParams(TypedDict, total=False): + domain: str + """Filter by domain""" + + limit: int + """Maximum number of results to return""" + + offset: int + """Number of results to skip""" + + profile_name: str + """Filter by profile name""" diff --git a/src/kernel/types/agents/discovered_field.py b/src/kernel/types/agents/discovered_field.py new file mode 100644 index 0000000..72ac294 --- /dev/null +++ b/src/kernel/types/agents/discovered_field.py @@ -0,0 +1,30 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from typing_extensions import Literal + +from ..._models import BaseModel + +__all__ = ["DiscoveredField"] + + +class DiscoveredField(BaseModel): + """A discovered form field""" + + label: str + """Field label""" + + name: str + """Field name""" + + selector: str + """CSS selector for the field""" + + type: Literal["text", "email", "password", "tel", "number", "url", "code", "totp"] + """Field type""" + + placeholder: Optional[str] = None + """Field placeholder""" + + required: Optional[bool] = None + """Whether field is required""" diff --git a/src/kernel/types/app_list_params.py b/src/kernel/types/app_list_params.py new file mode 100644 index 0000000..296ded5 --- /dev/null +++ b/src/kernel/types/app_list_params.py @@ -0,0 +1,21 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import TypedDict + +__all__ = ["AppListParams"] + + +class AppListParams(TypedDict, total=False): + app_name: str + """Filter results by application name.""" + + limit: int + """Limit the number of apps to return.""" + + offset: int + """Offset the number of apps to return.""" + + version: str + """Filter results by version label.""" diff --git a/src/kernel/types/app_list_response.py b/src/kernel/types/app_list_response.py new file mode 100644 index 0000000..338f506 --- /dev/null +++ b/src/kernel/types/app_list_response.py @@ -0,0 +1,34 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Dict, List +from typing_extensions import Literal + +from .._models import BaseModel +from .shared.app_action import AppAction + +__all__ = ["AppListResponse"] + + +class AppListResponse(BaseModel): + """Summary of an application version.""" + + id: str + """Unique identifier for the app version""" + + actions: List[AppAction] + """List of actions available on the app""" + + app_name: str + """Name of the application""" + + deployment: str + """Deployment ID""" + + env_vars: Dict[str, str] + """Environment variables configured for this app version""" + + region: Literal["aws.us-east-1a"] + """Deployment region code""" + + version: str + """Version label for the application""" diff --git a/src/kernel/types/browser_create_params.py b/src/kernel/types/browser_create_params.py new file mode 100644 index 0000000..0818760 --- /dev/null +++ b/src/kernel/types/browser_create_params.py @@ -0,0 +1,79 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Iterable +from typing_extensions import TypedDict + +from .browser_persistence_param import BrowserPersistenceParam +from .shared_params.browser_profile import BrowserProfile +from .shared_params.browser_viewport import BrowserViewport +from .shared_params.browser_extension import BrowserExtension + +__all__ = ["BrowserCreateParams"] + + +class BrowserCreateParams(TypedDict, total=False): + extensions: Iterable[BrowserExtension] + """List of browser extensions to load into the session. + + Provide each by id or name. + """ + + headless: bool + """If true, launches the browser using a headless image (no VNC/GUI). + + Defaults to false. + """ + + invocation_id: str + """action invocation ID""" + + kiosk_mode: bool + """ + If true, launches the browser in kiosk mode to hide address bar and tabs in live + view. + """ + + persistence: BrowserPersistenceParam + """DEPRECATED: Use timeout_seconds (up to 72 hours) and Profiles instead.""" + + profile: BrowserProfile + """Profile selection for the browser session. + + Provide either id or name. If specified, the matching profile will be loaded + into the browser session. Profiles must be created beforehand. + """ + + proxy_id: str + """Optional proxy to associate to the browser session. + + Must reference a proxy belonging to the caller's org. + """ + + stealth: bool + """ + If true, launches the browser in stealth mode to reduce detection by anti-bot + mechanisms. + """ + + timeout_seconds: int + """The number of seconds of inactivity before the browser session is terminated. + + Activity includes CDP connections and live view connections. Defaults to 60 + seconds. Minimum allowed is 10 seconds. Maximum allowed is 259200 (72 hours). We + check for inactivity every 5 seconds, so the actual timeout behavior you will + see is +/- 5 seconds around the specified value. + """ + + viewport: BrowserViewport + """Initial browser window size in pixels with optional refresh rate. + + If omitted, image defaults apply (1920x1080@25). Only specific viewport + configurations are supported. The server will reject unsupported combinations. + Supported resolutions are: 2560x1440@10, 1920x1080@25, 1920x1200@25, + 1440x900@25, 1024x768@60, 1200x800@60 If refresh_rate is not provided, it will + be automatically determined from the width and height if they match a supported + configuration exactly. Note: Higher resolutions may affect the responsiveness of + live view browser + """ diff --git a/src/kernel/types/browser_create_response.py b/src/kernel/types/browser_create_response.py new file mode 100644 index 0000000..efff854 --- /dev/null +++ b/src/kernel/types/browser_create_response.py @@ -0,0 +1,64 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from datetime import datetime + +from .profile import Profile +from .._models import BaseModel +from .browser_persistence import BrowserPersistence +from .shared.browser_viewport import BrowserViewport + +__all__ = ["BrowserCreateResponse"] + + +class BrowserCreateResponse(BaseModel): + cdp_ws_url: str + """Websocket URL for Chrome DevTools Protocol connections to the browser session""" + + created_at: datetime + """When the browser session was created.""" + + headless: bool + """Whether the browser session is running in headless mode.""" + + session_id: str + """Unique identifier for the browser session""" + + stealth: bool + """Whether the browser session is running in stealth mode.""" + + timeout_seconds: int + """The number of seconds of inactivity before the browser session is terminated.""" + + browser_live_view_url: Optional[str] = None + """Remote URL for live viewing the browser session. + + Only available for non-headless browsers. + """ + + deleted_at: Optional[datetime] = None + """When the browser session was soft-deleted. Only present for deleted sessions.""" + + kiosk_mode: Optional[bool] = None + """Whether the browser session is running in kiosk mode.""" + + persistence: Optional[BrowserPersistence] = None + """DEPRECATED: Use timeout_seconds (up to 72 hours) and Profiles instead.""" + + profile: Optional[Profile] = None + """Browser profile metadata.""" + + proxy_id: Optional[str] = None + """ID of the proxy associated with this browser session, if any.""" + + viewport: Optional[BrowserViewport] = None + """Initial browser window size in pixels with optional refresh rate. + + If omitted, image defaults apply (1920x1080@25). Only specific viewport + configurations are supported. The server will reject unsupported combinations. + Supported resolutions are: 2560x1440@10, 1920x1080@25, 1920x1200@25, + 1440x900@25, 1024x768@60, 1200x800@60 If refresh_rate is not provided, it will + be automatically determined from the width and height if they match a supported + configuration exactly. Note: Higher resolutions may affect the responsiveness of + live view browser + """ diff --git a/src/kernel/types/browser_delete_params.py b/src/kernel/types/browser_delete_params.py new file mode 100644 index 0000000..4c5b1c6 --- /dev/null +++ b/src/kernel/types/browser_delete_params.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +__all__ = ["BrowserDeleteParams"] + + +class BrowserDeleteParams(TypedDict, total=False): + persistent_id: Required[str] + """Persistent browser identifier""" diff --git a/src/kernel/types/browser_list_params.py b/src/kernel/types/browser_list_params.py new file mode 100644 index 0000000..20837be --- /dev/null +++ b/src/kernel/types/browser_list_params.py @@ -0,0 +1,21 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import TypedDict + +__all__ = ["BrowserListParams"] + + +class BrowserListParams(TypedDict, total=False): + include_deleted: bool + """ + When true, includes soft-deleted browser sessions in the results alongside + active sessions. + """ + + limit: int + """Maximum number of results to return. Defaults to 20, maximum 100.""" + + offset: int + """Number of results to skip. Defaults to 0.""" diff --git a/src/kernel/types/browser_list_response.py b/src/kernel/types/browser_list_response.py new file mode 100644 index 0000000..3ce2648 --- /dev/null +++ b/src/kernel/types/browser_list_response.py @@ -0,0 +1,64 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from datetime import datetime + +from .profile import Profile +from .._models import BaseModel +from .browser_persistence import BrowserPersistence +from .shared.browser_viewport import BrowserViewport + +__all__ = ["BrowserListResponse"] + + +class BrowserListResponse(BaseModel): + cdp_ws_url: str + """Websocket URL for Chrome DevTools Protocol connections to the browser session""" + + created_at: datetime + """When the browser session was created.""" + + headless: bool + """Whether the browser session is running in headless mode.""" + + session_id: str + """Unique identifier for the browser session""" + + stealth: bool + """Whether the browser session is running in stealth mode.""" + + timeout_seconds: int + """The number of seconds of inactivity before the browser session is terminated.""" + + browser_live_view_url: Optional[str] = None + """Remote URL for live viewing the browser session. + + Only available for non-headless browsers. + """ + + deleted_at: Optional[datetime] = None + """When the browser session was soft-deleted. Only present for deleted sessions.""" + + kiosk_mode: Optional[bool] = None + """Whether the browser session is running in kiosk mode.""" + + persistence: Optional[BrowserPersistence] = None + """DEPRECATED: Use timeout_seconds (up to 72 hours) and Profiles instead.""" + + profile: Optional[Profile] = None + """Browser profile metadata.""" + + proxy_id: Optional[str] = None + """ID of the proxy associated with this browser session, if any.""" + + viewport: Optional[BrowserViewport] = None + """Initial browser window size in pixels with optional refresh rate. + + If omitted, image defaults apply (1920x1080@25). Only specific viewport + configurations are supported. The server will reject unsupported combinations. + Supported resolutions are: 2560x1440@10, 1920x1080@25, 1920x1200@25, + 1440x900@25, 1024x768@60, 1200x800@60 If refresh_rate is not provided, it will + be automatically determined from the width and height if they match a supported + configuration exactly. Note: Higher resolutions may affect the responsiveness of + live view browser + """ diff --git a/src/kernel/types/browser_load_extensions_params.py b/src/kernel/types/browser_load_extensions_params.py new file mode 100644 index 0000000..6212380 --- /dev/null +++ b/src/kernel/types/browser_load_extensions_params.py @@ -0,0 +1,26 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Iterable +from typing_extensions import Required, TypedDict + +from .._types import FileTypes + +__all__ = ["BrowserLoadExtensionsParams", "Extension"] + + +class BrowserLoadExtensionsParams(TypedDict, total=False): + extensions: Required[Iterable[Extension]] + """List of extensions to upload and activate""" + + +class Extension(TypedDict, total=False): + name: Required[str] + """Folder name to place the extension under /home/kernel/extensions/""" + + zip_file: Required[FileTypes] + """ + Zip archive containing an unpacked Chromium extension (must include + manifest.json) + """ diff --git a/src/kernel/types/browser_persistence.py b/src/kernel/types/browser_persistence.py new file mode 100644 index 0000000..381d630 --- /dev/null +++ b/src/kernel/types/browser_persistence.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .._models import BaseModel + +__all__ = ["BrowserPersistence"] + + +class BrowserPersistence(BaseModel): + """DEPRECATED: Use timeout_seconds (up to 72 hours) and Profiles instead.""" + + id: str + """DEPRECATED: Unique identifier for the persistent browser session.""" diff --git a/src/kernel/types/browser_persistence_param.py b/src/kernel/types/browser_persistence_param.py new file mode 100644 index 0000000..6109abf --- /dev/null +++ b/src/kernel/types/browser_persistence_param.py @@ -0,0 +1,14 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +__all__ = ["BrowserPersistenceParam"] + + +class BrowserPersistenceParam(TypedDict, total=False): + """DEPRECATED: Use timeout_seconds (up to 72 hours) and Profiles instead.""" + + id: Required[str] + """DEPRECATED: Unique identifier for the persistent browser session.""" diff --git a/src/kernel/types/browser_pool.py b/src/kernel/types/browser_pool.py new file mode 100644 index 0000000..1694313 --- /dev/null +++ b/src/kernel/types/browser_pool.py @@ -0,0 +1,98 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional +from datetime import datetime + +from .._models import BaseModel +from .shared.browser_profile import BrowserProfile +from .shared.browser_viewport import BrowserViewport +from .shared.browser_extension import BrowserExtension + +__all__ = ["BrowserPool", "BrowserPoolConfig"] + + +class BrowserPoolConfig(BaseModel): + """Configuration used to create all browsers in this pool""" + + size: int + """Number of browsers to create in the pool""" + + extensions: Optional[List[BrowserExtension]] = None + """List of browser extensions to load into the session. + + Provide each by id or name. + """ + + fill_rate_per_minute: Optional[int] = None + """Percentage of the pool to fill per minute. Defaults to 10%.""" + + headless: Optional[bool] = None + """If true, launches the browser using a headless image. Defaults to false.""" + + kiosk_mode: Optional[bool] = None + """ + If true, launches the browser in kiosk mode to hide address bar and tabs in live + view. + """ + + name: Optional[str] = None + """Optional name for the browser pool. Must be unique within the organization.""" + + profile: Optional[BrowserProfile] = None + """Profile selection for the browser session. + + Provide either id or name. If specified, the matching profile will be loaded + into the browser session. Profiles must be created beforehand. + """ + + proxy_id: Optional[str] = None + """Optional proxy to associate to the browser session. + + Must reference a proxy belonging to the caller's org. + """ + + stealth: Optional[bool] = None + """ + If true, launches the browser in stealth mode to reduce detection by anti-bot + mechanisms. + """ + + timeout_seconds: Optional[int] = None + """ + Default idle timeout in seconds for browsers acquired from this pool before they + are destroyed. Defaults to 600 seconds if not specified + """ + + viewport: Optional[BrowserViewport] = None + """Initial browser window size in pixels with optional refresh rate. + + If omitted, image defaults apply (1920x1080@25). Only specific viewport + configurations are supported. The server will reject unsupported combinations. + Supported resolutions are: 2560x1440@10, 1920x1080@25, 1920x1200@25, + 1440x900@25, 1024x768@60, 1200x800@60 If refresh_rate is not provided, it will + be automatically determined from the width and height if they match a supported + configuration exactly. Note: Higher resolutions may affect the responsiveness of + live view browser + """ + + +class BrowserPool(BaseModel): + """A browser pool containing multiple identically configured browsers.""" + + id: str + """Unique identifier for the browser pool""" + + acquired_count: int + """Number of browsers currently acquired from the pool""" + + available_count: int + """Number of browsers currently available in the pool""" + + browser_pool_config: BrowserPoolConfig + """Configuration used to create all browsers in this pool""" + + created_at: datetime + """Timestamp when the browser pool was created""" + + name: Optional[str] = None + """Browser pool name, if set""" diff --git a/src/kernel/types/browser_pool_acquire_params.py b/src/kernel/types/browser_pool_acquire_params.py new file mode 100644 index 0000000..d0df921 --- /dev/null +++ b/src/kernel/types/browser_pool_acquire_params.py @@ -0,0 +1,16 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import TypedDict + +__all__ = ["BrowserPoolAcquireParams"] + + +class BrowserPoolAcquireParams(TypedDict, total=False): + acquire_timeout_seconds: int + """Maximum number of seconds to wait for a browser to be available. + + Defaults to the calculated time it would take to fill the pool at the currently + configured fill rate. + """ diff --git a/src/kernel/types/browser_pool_acquire_response.py b/src/kernel/types/browser_pool_acquire_response.py new file mode 100644 index 0000000..4b70a87 --- /dev/null +++ b/src/kernel/types/browser_pool_acquire_response.py @@ -0,0 +1,64 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from datetime import datetime + +from .profile import Profile +from .._models import BaseModel +from .browser_persistence import BrowserPersistence +from .shared.browser_viewport import BrowserViewport + +__all__ = ["BrowserPoolAcquireResponse"] + + +class BrowserPoolAcquireResponse(BaseModel): + cdp_ws_url: str + """Websocket URL for Chrome DevTools Protocol connections to the browser session""" + + created_at: datetime + """When the browser session was created.""" + + headless: bool + """Whether the browser session is running in headless mode.""" + + session_id: str + """Unique identifier for the browser session""" + + stealth: bool + """Whether the browser session is running in stealth mode.""" + + timeout_seconds: int + """The number of seconds of inactivity before the browser session is terminated.""" + + browser_live_view_url: Optional[str] = None + """Remote URL for live viewing the browser session. + + Only available for non-headless browsers. + """ + + deleted_at: Optional[datetime] = None + """When the browser session was soft-deleted. Only present for deleted sessions.""" + + kiosk_mode: Optional[bool] = None + """Whether the browser session is running in kiosk mode.""" + + persistence: Optional[BrowserPersistence] = None + """DEPRECATED: Use timeout_seconds (up to 72 hours) and Profiles instead.""" + + profile: Optional[Profile] = None + """Browser profile metadata.""" + + proxy_id: Optional[str] = None + """ID of the proxy associated with this browser session, if any.""" + + viewport: Optional[BrowserViewport] = None + """Initial browser window size in pixels with optional refresh rate. + + If omitted, image defaults apply (1920x1080@25). Only specific viewport + configurations are supported. The server will reject unsupported combinations. + Supported resolutions are: 2560x1440@10, 1920x1080@25, 1920x1200@25, + 1440x900@25, 1024x768@60, 1200x800@60 If refresh_rate is not provided, it will + be automatically determined from the width and height if they match a supported + configuration exactly. Note: Higher resolutions may affect the responsiveness of + live view browser + """ diff --git a/src/kernel/types/browser_pool_create_params.py b/src/kernel/types/browser_pool_create_params.py new file mode 100644 index 0000000..6c8e815 --- /dev/null +++ b/src/kernel/types/browser_pool_create_params.py @@ -0,0 +1,75 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Iterable +from typing_extensions import Required, TypedDict + +from .shared_params.browser_profile import BrowserProfile +from .shared_params.browser_viewport import BrowserViewport +from .shared_params.browser_extension import BrowserExtension + +__all__ = ["BrowserPoolCreateParams"] + + +class BrowserPoolCreateParams(TypedDict, total=False): + size: Required[int] + """Number of browsers to create in the pool""" + + extensions: Iterable[BrowserExtension] + """List of browser extensions to load into the session. + + Provide each by id or name. + """ + + fill_rate_per_minute: int + """Percentage of the pool to fill per minute. Defaults to 10%.""" + + headless: bool + """If true, launches the browser using a headless image. Defaults to false.""" + + kiosk_mode: bool + """ + If true, launches the browser in kiosk mode to hide address bar and tabs in live + view. + """ + + name: str + """Optional name for the browser pool. Must be unique within the organization.""" + + profile: BrowserProfile + """Profile selection for the browser session. + + Provide either id or name. If specified, the matching profile will be loaded + into the browser session. Profiles must be created beforehand. + """ + + proxy_id: str + """Optional proxy to associate to the browser session. + + Must reference a proxy belonging to the caller's org. + """ + + stealth: bool + """ + If true, launches the browser in stealth mode to reduce detection by anti-bot + mechanisms. + """ + + timeout_seconds: int + """ + Default idle timeout in seconds for browsers acquired from this pool before they + are destroyed. Defaults to 600 seconds if not specified + """ + + viewport: BrowserViewport + """Initial browser window size in pixels with optional refresh rate. + + If omitted, image defaults apply (1920x1080@25). Only specific viewport + configurations are supported. The server will reject unsupported combinations. + Supported resolutions are: 2560x1440@10, 1920x1080@25, 1920x1200@25, + 1440x900@25, 1024x768@60, 1200x800@60 If refresh_rate is not provided, it will + be automatically determined from the width and height if they match a supported + configuration exactly. Note: Higher resolutions may affect the responsiveness of + live view browser + """ diff --git a/src/kernel/types/browser_pool_delete_params.py b/src/kernel/types/browser_pool_delete_params.py new file mode 100644 index 0000000..0a63c0f --- /dev/null +++ b/src/kernel/types/browser_pool_delete_params.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import TypedDict + +__all__ = ["BrowserPoolDeleteParams"] + + +class BrowserPoolDeleteParams(TypedDict, total=False): + force: bool + """If true, force delete even if browsers are currently leased. + + Leased browsers will be terminated. + """ diff --git a/src/kernel/types/browser_pool_list_response.py b/src/kernel/types/browser_pool_list_response.py new file mode 100644 index 0000000..a11c4de --- /dev/null +++ b/src/kernel/types/browser_pool_list_response.py @@ -0,0 +1,10 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List +from typing_extensions import TypeAlias + +from .browser_pool import BrowserPool + +__all__ = ["BrowserPoolListResponse"] + +BrowserPoolListResponse: TypeAlias = List[BrowserPool] diff --git a/src/kernel/types/browser_pool_release_params.py b/src/kernel/types/browser_pool_release_params.py new file mode 100644 index 0000000..104b0b0 --- /dev/null +++ b/src/kernel/types/browser_pool_release_params.py @@ -0,0 +1,18 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +__all__ = ["BrowserPoolReleaseParams"] + + +class BrowserPoolReleaseParams(TypedDict, total=False): + session_id: Required[str] + """Browser session ID to release back to the pool""" + + reuse: bool + """Whether to reuse the browser instance or destroy it and create a new one. + + Defaults to true. + """ diff --git a/src/kernel/types/browser_pool_update_params.py b/src/kernel/types/browser_pool_update_params.py new file mode 100644 index 0000000..2cd3be7 --- /dev/null +++ b/src/kernel/types/browser_pool_update_params.py @@ -0,0 +1,81 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Iterable +from typing_extensions import Required, TypedDict + +from .shared_params.browser_profile import BrowserProfile +from .shared_params.browser_viewport import BrowserViewport +from .shared_params.browser_extension import BrowserExtension + +__all__ = ["BrowserPoolUpdateParams"] + + +class BrowserPoolUpdateParams(TypedDict, total=False): + size: Required[int] + """Number of browsers to create in the pool""" + + discard_all_idle: bool + """Whether to discard all idle browsers and rebuild the pool immediately. + + Defaults to false. + """ + + extensions: Iterable[BrowserExtension] + """List of browser extensions to load into the session. + + Provide each by id or name. + """ + + fill_rate_per_minute: int + """Percentage of the pool to fill per minute. Defaults to 10%.""" + + headless: bool + """If true, launches the browser using a headless image. Defaults to false.""" + + kiosk_mode: bool + """ + If true, launches the browser in kiosk mode to hide address bar and tabs in live + view. + """ + + name: str + """Optional name for the browser pool. Must be unique within the organization.""" + + profile: BrowserProfile + """Profile selection for the browser session. + + Provide either id or name. If specified, the matching profile will be loaded + into the browser session. Profiles must be created beforehand. + """ + + proxy_id: str + """Optional proxy to associate to the browser session. + + Must reference a proxy belonging to the caller's org. + """ + + stealth: bool + """ + If true, launches the browser in stealth mode to reduce detection by anti-bot + mechanisms. + """ + + timeout_seconds: int + """ + Default idle timeout in seconds for browsers acquired from this pool before they + are destroyed. Defaults to 600 seconds if not specified + """ + + viewport: BrowserViewport + """Initial browser window size in pixels with optional refresh rate. + + If omitted, image defaults apply (1920x1080@25). Only specific viewport + configurations are supported. The server will reject unsupported combinations. + Supported resolutions are: 2560x1440@10, 1920x1080@25, 1920x1200@25, + 1440x900@25, 1024x768@60, 1200x800@60 If refresh_rate is not provided, it will + be automatically determined from the width and height if they match a supported + configuration exactly. Note: Higher resolutions may affect the responsiveness of + live view browser + """ diff --git a/src/kernel/types/browser_retrieve_response.py b/src/kernel/types/browser_retrieve_response.py new file mode 100644 index 0000000..12f58a5 --- /dev/null +++ b/src/kernel/types/browser_retrieve_response.py @@ -0,0 +1,64 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from datetime import datetime + +from .profile import Profile +from .._models import BaseModel +from .browser_persistence import BrowserPersistence +from .shared.browser_viewport import BrowserViewport + +__all__ = ["BrowserRetrieveResponse"] + + +class BrowserRetrieveResponse(BaseModel): + cdp_ws_url: str + """Websocket URL for Chrome DevTools Protocol connections to the browser session""" + + created_at: datetime + """When the browser session was created.""" + + headless: bool + """Whether the browser session is running in headless mode.""" + + session_id: str + """Unique identifier for the browser session""" + + stealth: bool + """Whether the browser session is running in stealth mode.""" + + timeout_seconds: int + """The number of seconds of inactivity before the browser session is terminated.""" + + browser_live_view_url: Optional[str] = None + """Remote URL for live viewing the browser session. + + Only available for non-headless browsers. + """ + + deleted_at: Optional[datetime] = None + """When the browser session was soft-deleted. Only present for deleted sessions.""" + + kiosk_mode: Optional[bool] = None + """Whether the browser session is running in kiosk mode.""" + + persistence: Optional[BrowserPersistence] = None + """DEPRECATED: Use timeout_seconds (up to 72 hours) and Profiles instead.""" + + profile: Optional[Profile] = None + """Browser profile metadata.""" + + proxy_id: Optional[str] = None + """ID of the proxy associated with this browser session, if any.""" + + viewport: Optional[BrowserViewport] = None + """Initial browser window size in pixels with optional refresh rate. + + If omitted, image defaults apply (1920x1080@25). Only specific viewport + configurations are supported. The server will reject unsupported combinations. + Supported resolutions are: 2560x1440@10, 1920x1080@25, 1920x1200@25, + 1440x900@25, 1024x768@60, 1200x800@60 If refresh_rate is not provided, it will + be automatically determined from the width and height if they match a supported + configuration exactly. Note: Higher resolutions may affect the responsiveness of + live view browser + """ diff --git a/src/kernel/types/browsers/__init__.py b/src/kernel/types/browsers/__init__.py new file mode 100644 index 0000000..546fdc6 --- /dev/null +++ b/src/kernel/types/browsers/__init__.py @@ -0,0 +1,47 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from .f_move_params import FMoveParams as FMoveParams +from .f_upload_params import FUploadParams as FUploadParams +from .log_stream_params import LogStreamParams as LogStreamParams +from .f_file_info_params import FFileInfoParams as FFileInfoParams +from .f_read_file_params import FReadFileParams as FReadFileParams +from .f_list_files_params import FListFilesParams as FListFilesParams +from .f_upload_zip_params import FUploadZipParams as FUploadZipParams +from .f_write_file_params import FWriteFileParams as FWriteFileParams +from .process_exec_params import ProcessExecParams as ProcessExecParams +from .process_kill_params import ProcessKillParams as ProcessKillParams +from .replay_start_params import ReplayStartParams as ReplayStartParams +from .f_delete_file_params import FDeleteFileParams as FDeleteFileParams +from .f_file_info_response import FFileInfoResponse as FFileInfoResponse +from .process_spawn_params import ProcessSpawnParams as ProcessSpawnParams +from .process_stdin_params import ProcessStdinParams as ProcessStdinParams +from .replay_list_response import ReplayListResponse as ReplayListResponse +from .f_list_files_response import FListFilesResponse as FListFilesResponse +from .process_exec_response import ProcessExecResponse as ProcessExecResponse +from .process_kill_response import ProcessKillResponse as ProcessKillResponse +from .replay_start_response import ReplayStartResponse as ReplayStartResponse +from .computer_scroll_params import ComputerScrollParams as ComputerScrollParams +from .process_spawn_response import ProcessSpawnResponse as ProcessSpawnResponse +from .process_stdin_response import ProcessStdinResponse as ProcessStdinResponse +from .process_status_response import ProcessStatusResponse as ProcessStatusResponse +from .computer_press_key_params import ComputerPressKeyParams as ComputerPressKeyParams +from .computer_type_text_params import ComputerTypeTextParams as ComputerTypeTextParams +from .f_create_directory_params import FCreateDirectoryParams as FCreateDirectoryParams +from .f_delete_directory_params import FDeleteDirectoryParams as FDeleteDirectoryParams +from .f_download_dir_zip_params import FDownloadDirZipParams as FDownloadDirZipParams +from .playwright_execute_params import PlaywrightExecuteParams as PlaywrightExecuteParams +from .computer_drag_mouse_params import ComputerDragMouseParams as ComputerDragMouseParams +from .computer_move_mouse_params import ComputerMoveMouseParams as ComputerMoveMouseParams +from .computer_click_mouse_params import ComputerClickMouseParams as ComputerClickMouseParams +from .playwright_execute_response import PlaywrightExecuteResponse as PlaywrightExecuteResponse +from .f_set_file_permissions_params import FSetFilePermissionsParams as FSetFilePermissionsParams +from .process_stdout_stream_response import ProcessStdoutStreamResponse as ProcessStdoutStreamResponse +from .computer_capture_screenshot_params import ComputerCaptureScreenshotParams as ComputerCaptureScreenshotParams +from .computer_set_cursor_visibility_params import ( + ComputerSetCursorVisibilityParams as ComputerSetCursorVisibilityParams, +) +from .computer_set_cursor_visibility_response import ( + ComputerSetCursorVisibilityResponse as ComputerSetCursorVisibilityResponse, +) diff --git a/src/kernel/types/browsers/computer_capture_screenshot_params.py b/src/kernel/types/browsers/computer_capture_screenshot_params.py new file mode 100644 index 0000000..942cef3 --- /dev/null +++ b/src/kernel/types/browsers/computer_capture_screenshot_params.py @@ -0,0 +1,25 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +__all__ = ["ComputerCaptureScreenshotParams", "Region"] + + +class ComputerCaptureScreenshotParams(TypedDict, total=False): + region: Region + + +class Region(TypedDict, total=False): + height: Required[int] + """Height of the region in pixels""" + + width: Required[int] + """Width of the region in pixels""" + + x: Required[int] + """X coordinate of the region's top-left corner""" + + y: Required[int] + """Y coordinate of the region's top-left corner""" diff --git a/src/kernel/types/browsers/computer_click_mouse_params.py b/src/kernel/types/browsers/computer_click_mouse_params.py new file mode 100644 index 0000000..9bde2e6 --- /dev/null +++ b/src/kernel/types/browsers/computer_click_mouse_params.py @@ -0,0 +1,29 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, Required, TypedDict + +from ..._types import SequenceNotStr + +__all__ = ["ComputerClickMouseParams"] + + +class ComputerClickMouseParams(TypedDict, total=False): + x: Required[int] + """X coordinate of the click position""" + + y: Required[int] + """Y coordinate of the click position""" + + button: Literal["left", "right", "middle", "back", "forward"] + """Mouse button to interact with""" + + click_type: Literal["down", "up", "click"] + """Type of click action""" + + hold_keys: SequenceNotStr[str] + """Modifier keys to hold during the click""" + + num_clicks: int + """Number of times to repeat the click""" diff --git a/src/kernel/types/browsers/computer_drag_mouse_params.py b/src/kernel/types/browsers/computer_drag_mouse_params.py new file mode 100644 index 0000000..fb03b4b --- /dev/null +++ b/src/kernel/types/browsers/computer_drag_mouse_params.py @@ -0,0 +1,36 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Iterable +from typing_extensions import Literal, Required, TypedDict + +from ..._types import SequenceNotStr + +__all__ = ["ComputerDragMouseParams"] + + +class ComputerDragMouseParams(TypedDict, total=False): + path: Required[Iterable[Iterable[int]]] + """Ordered list of [x, y] coordinate pairs to move through while dragging. + + Must contain at least 2 points. + """ + + button: Literal["left", "middle", "right"] + """Mouse button to drag with""" + + delay: int + """Delay in milliseconds between button down and starting to move along the path.""" + + hold_keys: SequenceNotStr[str] + """Modifier keys to hold during the drag""" + + step_delay_ms: int + """ + Delay in milliseconds between relative steps while dragging (not the initial + delay). + """ + + steps_per_segment: int + """Number of relative move steps per segment in the path. Minimum 1.""" diff --git a/src/kernel/types/browsers/computer_move_mouse_params.py b/src/kernel/types/browsers/computer_move_mouse_params.py new file mode 100644 index 0000000..1769e07 --- /dev/null +++ b/src/kernel/types/browsers/computer_move_mouse_params.py @@ -0,0 +1,20 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +from ..._types import SequenceNotStr + +__all__ = ["ComputerMoveMouseParams"] + + +class ComputerMoveMouseParams(TypedDict, total=False): + x: Required[int] + """X coordinate to move the cursor to""" + + y: Required[int] + """Y coordinate to move the cursor to""" + + hold_keys: SequenceNotStr[str] + """Modifier keys to hold during the move""" diff --git a/src/kernel/types/browsers/computer_press_key_params.py b/src/kernel/types/browsers/computer_press_key_params.py new file mode 100644 index 0000000..ea2c9b4 --- /dev/null +++ b/src/kernel/types/browsers/computer_press_key_params.py @@ -0,0 +1,28 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +from ..._types import SequenceNotStr + +__all__ = ["ComputerPressKeyParams"] + + +class ComputerPressKeyParams(TypedDict, total=False): + keys: Required[SequenceNotStr[str]] + """List of key symbols to press. + + Each item should be a key symbol supported by xdotool (see X11 keysym + definitions). Examples include "Return", "Shift", "Ctrl", "Alt", "F5". Items in + this list could also be combinations, e.g. "Ctrl+t" or "Ctrl+Shift+Tab". + """ + + duration: int + """Duration to hold the keys down in milliseconds. + + If omitted or 0, keys are tapped. + """ + + hold_keys: SequenceNotStr[str] + """Optional modifier keys to hold during the key press sequence.""" diff --git a/src/kernel/types/browsers/computer_scroll_params.py b/src/kernel/types/browsers/computer_scroll_params.py new file mode 100644 index 0000000..110cb30 --- /dev/null +++ b/src/kernel/types/browsers/computer_scroll_params.py @@ -0,0 +1,26 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +from ..._types import SequenceNotStr + +__all__ = ["ComputerScrollParams"] + + +class ComputerScrollParams(TypedDict, total=False): + x: Required[int] + """X coordinate at which to perform the scroll""" + + y: Required[int] + """Y coordinate at which to perform the scroll""" + + delta_x: int + """Horizontal scroll amount. Positive scrolls right, negative scrolls left.""" + + delta_y: int + """Vertical scroll amount. Positive scrolls down, negative scrolls up.""" + + hold_keys: SequenceNotStr[str] + """Modifier keys to hold during the scroll""" diff --git a/src/kernel/types/browsers/computer_set_cursor_visibility_params.py b/src/kernel/types/browsers/computer_set_cursor_visibility_params.py new file mode 100644 index 0000000..f003ee9 --- /dev/null +++ b/src/kernel/types/browsers/computer_set_cursor_visibility_params.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +__all__ = ["ComputerSetCursorVisibilityParams"] + + +class ComputerSetCursorVisibilityParams(TypedDict, total=False): + hidden: Required[bool] + """Whether the cursor should be hidden or visible""" diff --git a/src/kernel/types/browsers/computer_set_cursor_visibility_response.py b/src/kernel/types/browsers/computer_set_cursor_visibility_response.py new file mode 100644 index 0000000..0e07023 --- /dev/null +++ b/src/kernel/types/browsers/computer_set_cursor_visibility_response.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from ..._models import BaseModel + +__all__ = ["ComputerSetCursorVisibilityResponse"] + + +class ComputerSetCursorVisibilityResponse(BaseModel): + """Generic OK response.""" + + ok: bool + """Indicates success.""" diff --git a/src/kernel/types/browsers/computer_type_text_params.py b/src/kernel/types/browsers/computer_type_text_params.py new file mode 100644 index 0000000..3a2c513 --- /dev/null +++ b/src/kernel/types/browsers/computer_type_text_params.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +__all__ = ["ComputerTypeTextParams"] + + +class ComputerTypeTextParams(TypedDict, total=False): + text: Required[str] + """Text to type on the browser instance""" + + delay: int + """Delay in milliseconds between keystrokes""" diff --git a/src/kernel/types/browsers/f_create_directory_params.py b/src/kernel/types/browsers/f_create_directory_params.py new file mode 100644 index 0000000..20924f3 --- /dev/null +++ b/src/kernel/types/browsers/f_create_directory_params.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +__all__ = ["FCreateDirectoryParams"] + + +class FCreateDirectoryParams(TypedDict, total=False): + path: Required[str] + """Absolute directory path to create.""" + + mode: str + """Optional directory mode (octal string, e.g. 755). Defaults to 755.""" diff --git a/src/kernel/types/browsers/f_delete_directory_params.py b/src/kernel/types/browsers/f_delete_directory_params.py new file mode 100644 index 0000000..8f5a086 --- /dev/null +++ b/src/kernel/types/browsers/f_delete_directory_params.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +__all__ = ["FDeleteDirectoryParams"] + + +class FDeleteDirectoryParams(TypedDict, total=False): + path: Required[str] + """Absolute path to delete.""" diff --git a/src/kernel/types/browsers/f_delete_file_params.py b/src/kernel/types/browsers/f_delete_file_params.py new file mode 100644 index 0000000..d79bb8a --- /dev/null +++ b/src/kernel/types/browsers/f_delete_file_params.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +__all__ = ["FDeleteFileParams"] + + +class FDeleteFileParams(TypedDict, total=False): + path: Required[str] + """Absolute path to delete.""" diff --git a/src/kernel/types/browsers/f_download_dir_zip_params.py b/src/kernel/types/browsers/f_download_dir_zip_params.py new file mode 100644 index 0000000..88212c6 --- /dev/null +++ b/src/kernel/types/browsers/f_download_dir_zip_params.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +__all__ = ["FDownloadDirZipParams"] + + +class FDownloadDirZipParams(TypedDict, total=False): + path: Required[str] + """Absolute directory path to archive and download.""" diff --git a/src/kernel/types/browsers/f_file_info_params.py b/src/kernel/types/browsers/f_file_info_params.py new file mode 100644 index 0000000..9ddf41e --- /dev/null +++ b/src/kernel/types/browsers/f_file_info_params.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +__all__ = ["FFileInfoParams"] + + +class FFileInfoParams(TypedDict, total=False): + path: Required[str] + """Absolute path of the file or directory.""" diff --git a/src/kernel/types/browsers/f_file_info_response.py b/src/kernel/types/browsers/f_file_info_response.py new file mode 100644 index 0000000..7da1574 --- /dev/null +++ b/src/kernel/types/browsers/f_file_info_response.py @@ -0,0 +1,27 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from datetime import datetime + +from ..._models import BaseModel + +__all__ = ["FFileInfoResponse"] + + +class FFileInfoResponse(BaseModel): + is_dir: bool + """Whether the path is a directory.""" + + mod_time: datetime + """Last modification time.""" + + mode: str + """File mode bits (e.g., "drwxr-xr-x" or "-rw-r--r--").""" + + name: str + """Base name of the file or directory.""" + + path: str + """Absolute path.""" + + size_bytes: int + """Size in bytes. 0 for directories.""" diff --git a/src/kernel/types/browsers/f_list_files_params.py b/src/kernel/types/browsers/f_list_files_params.py new file mode 100644 index 0000000..87026f5 --- /dev/null +++ b/src/kernel/types/browsers/f_list_files_params.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +__all__ = ["FListFilesParams"] + + +class FListFilesParams(TypedDict, total=False): + path: Required[str] + """Absolute directory path.""" diff --git a/src/kernel/types/browsers/f_list_files_response.py b/src/kernel/types/browsers/f_list_files_response.py new file mode 100644 index 0000000..9fca14b --- /dev/null +++ b/src/kernel/types/browsers/f_list_files_response.py @@ -0,0 +1,32 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List +from datetime import datetime +from typing_extensions import TypeAlias + +from ..._models import BaseModel + +__all__ = ["FListFilesResponse", "FListFilesResponseItem"] + + +class FListFilesResponseItem(BaseModel): + is_dir: bool + """Whether the path is a directory.""" + + mod_time: datetime + """Last modification time.""" + + mode: str + """File mode bits (e.g., "drwxr-xr-x" or "-rw-r--r--").""" + + name: str + """Base name of the file or directory.""" + + path: str + """Absolute path.""" + + size_bytes: int + """Size in bytes. 0 for directories.""" + + +FListFilesResponse: TypeAlias = List[FListFilesResponseItem] diff --git a/src/kernel/types/browsers/f_move_params.py b/src/kernel/types/browsers/f_move_params.py new file mode 100644 index 0000000..d324cc9 --- /dev/null +++ b/src/kernel/types/browsers/f_move_params.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +__all__ = ["FMoveParams"] + + +class FMoveParams(TypedDict, total=False): + dest_path: Required[str] + """Absolute destination path.""" + + src_path: Required[str] + """Absolute source path.""" diff --git a/src/kernel/types/browsers/f_read_file_params.py b/src/kernel/types/browsers/f_read_file_params.py new file mode 100644 index 0000000..ee5d2e9 --- /dev/null +++ b/src/kernel/types/browsers/f_read_file_params.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +__all__ = ["FReadFileParams"] + + +class FReadFileParams(TypedDict, total=False): + path: Required[str] + """Absolute file path to read.""" diff --git a/src/kernel/types/browsers/f_set_file_permissions_params.py b/src/kernel/types/browsers/f_set_file_permissions_params.py new file mode 100644 index 0000000..5a02c1e --- /dev/null +++ b/src/kernel/types/browsers/f_set_file_permissions_params.py @@ -0,0 +1,21 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +__all__ = ["FSetFilePermissionsParams"] + + +class FSetFilePermissionsParams(TypedDict, total=False): + mode: Required[str] + """File mode bits (octal string, e.g. 644).""" + + path: Required[str] + """Absolute path whose permissions are to be changed.""" + + group: str + """New group name or GID.""" + + owner: str + """New owner username or UID.""" diff --git a/src/kernel/types/browsers/f_upload_params.py b/src/kernel/types/browsers/f_upload_params.py new file mode 100644 index 0000000..8f6534b --- /dev/null +++ b/src/kernel/types/browsers/f_upload_params.py @@ -0,0 +1,21 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Iterable +from typing_extensions import Required, TypedDict + +from ..._types import FileTypes + +__all__ = ["FUploadParams", "File"] + + +class FUploadParams(TypedDict, total=False): + files: Required[Iterable[File]] + + +class File(TypedDict, total=False): + dest_path: Required[str] + """Absolute destination path to write the file.""" + + file: Required[FileTypes] diff --git a/src/kernel/types/browsers/f_upload_zip_params.py b/src/kernel/types/browsers/f_upload_zip_params.py new file mode 100644 index 0000000..4646e05 --- /dev/null +++ b/src/kernel/types/browsers/f_upload_zip_params.py @@ -0,0 +1,16 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +from ..._types import FileTypes + +__all__ = ["FUploadZipParams"] + + +class FUploadZipParams(TypedDict, total=False): + dest_path: Required[str] + """Absolute destination directory to extract the archive to.""" + + zip_file: Required[FileTypes] diff --git a/src/kernel/types/browsers/f_write_file_params.py b/src/kernel/types/browsers/f_write_file_params.py new file mode 100644 index 0000000..557eac1 --- /dev/null +++ b/src/kernel/types/browsers/f_write_file_params.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +__all__ = ["FWriteFileParams"] + + +class FWriteFileParams(TypedDict, total=False): + path: Required[str] + """Destination absolute file path.""" + + mode: str + """Optional file mode (octal string, e.g. 644). Defaults to 644.""" diff --git a/src/kernel/types/browsers/fs/__init__.py b/src/kernel/types/browsers/fs/__init__.py new file mode 100644 index 0000000..ebd13d9 --- /dev/null +++ b/src/kernel/types/browsers/fs/__init__.py @@ -0,0 +1,7 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from .watch_start_params import WatchStartParams as WatchStartParams +from .watch_start_response import WatchStartResponse as WatchStartResponse +from .watch_events_response import WatchEventsResponse as WatchEventsResponse diff --git a/src/kernel/types/browsers/fs/watch_events_response.py b/src/kernel/types/browsers/fs/watch_events_response.py new file mode 100644 index 0000000..5778a30 --- /dev/null +++ b/src/kernel/types/browsers/fs/watch_events_response.py @@ -0,0 +1,24 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from typing_extensions import Literal + +from ...._models import BaseModel + +__all__ = ["WatchEventsResponse"] + + +class WatchEventsResponse(BaseModel): + """Filesystem change event.""" + + path: str + """Absolute path of the file or directory.""" + + type: Literal["CREATE", "WRITE", "DELETE", "RENAME"] + """Event type.""" + + is_dir: Optional[bool] = None + """Whether the affected path is a directory.""" + + name: Optional[str] = None + """Base name of the file or directory affected.""" diff --git a/src/kernel/types/browsers/fs/watch_start_params.py b/src/kernel/types/browsers/fs/watch_start_params.py new file mode 100644 index 0000000..5afddb1 --- /dev/null +++ b/src/kernel/types/browsers/fs/watch_start_params.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +__all__ = ["WatchStartParams"] + + +class WatchStartParams(TypedDict, total=False): + path: Required[str] + """Directory to watch.""" + + recursive: bool + """Whether to watch recursively.""" diff --git a/src/kernel/types/browsers/fs/watch_start_response.py b/src/kernel/types/browsers/fs/watch_start_response.py new file mode 100644 index 0000000..b9f78e4 --- /dev/null +++ b/src/kernel/types/browsers/fs/watch_start_response.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional + +from ...._models import BaseModel + +__all__ = ["WatchStartResponse"] + + +class WatchStartResponse(BaseModel): + watch_id: Optional[str] = None + """Unique identifier for the directory watch""" diff --git a/src/kernel/types/browsers/log_stream_params.py b/src/kernel/types/browsers/log_stream_params.py new file mode 100644 index 0000000..2eeb9b3 --- /dev/null +++ b/src/kernel/types/browsers/log_stream_params.py @@ -0,0 +1,19 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, Required, TypedDict + +__all__ = ["LogStreamParams"] + + +class LogStreamParams(TypedDict, total=False): + source: Required[Literal["path", "supervisor"]] + + follow: bool + + path: str + """only required if source is path""" + + supervisor_process: str + """only required if source is supervisor""" diff --git a/src/kernel/types/browsers/playwright_execute_params.py b/src/kernel/types/browsers/playwright_execute_params.py new file mode 100644 index 0000000..948a74c --- /dev/null +++ b/src/kernel/types/browsers/playwright_execute_params.py @@ -0,0 +1,21 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +__all__ = ["PlaywrightExecuteParams"] + + +class PlaywrightExecuteParams(TypedDict, total=False): + code: Required[str] + """TypeScript/JavaScript code to execute. + + The code has access to 'page', 'context', and 'browser' variables. It runs + within a function, so you can use a return statement at the end to return a + value. This value is returned as the `result` property in the response. Example: + "await page.goto('https://example.com'); return await page.title();" + """ + + timeout_sec: int + """Maximum execution time in seconds. Default is 60.""" diff --git a/src/kernel/types/browsers/playwright_execute_response.py b/src/kernel/types/browsers/playwright_execute_response.py new file mode 100644 index 0000000..d53080d --- /dev/null +++ b/src/kernel/types/browsers/playwright_execute_response.py @@ -0,0 +1,26 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional + +from ..._models import BaseModel + +__all__ = ["PlaywrightExecuteResponse"] + + +class PlaywrightExecuteResponse(BaseModel): + """Result of Playwright code execution""" + + success: bool + """Whether the code executed successfully""" + + error: Optional[str] = None + """Error message if execution failed""" + + result: Optional[object] = None + """The value returned by the code (if any)""" + + stderr: Optional[str] = None + """Standard error from the execution""" + + stdout: Optional[str] = None + """Standard output from the execution""" diff --git a/src/kernel/types/browsers/process_exec_params.py b/src/kernel/types/browsers/process_exec_params.py new file mode 100644 index 0000000..a6481c1 --- /dev/null +++ b/src/kernel/types/browsers/process_exec_params.py @@ -0,0 +1,33 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict, Optional +from typing_extensions import Required, TypedDict + +from ..._types import SequenceNotStr + +__all__ = ["ProcessExecParams"] + + +class ProcessExecParams(TypedDict, total=False): + command: Required[str] + """Executable or shell command to run.""" + + args: SequenceNotStr[str] + """Command arguments.""" + + as_root: bool + """Run the process with root privileges.""" + + as_user: Optional[str] + """Run the process as this user.""" + + cwd: Optional[str] + """Working directory (absolute path) to run the command in.""" + + env: Dict[str, str] + """Environment variables to set for the process.""" + + timeout_sec: Optional[int] + """Maximum execution time in seconds.""" diff --git a/src/kernel/types/browsers/process_exec_response.py b/src/kernel/types/browsers/process_exec_response.py new file mode 100644 index 0000000..a5e4b77 --- /dev/null +++ b/src/kernel/types/browsers/process_exec_response.py @@ -0,0 +1,23 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional + +from ..._models import BaseModel + +__all__ = ["ProcessExecResponse"] + + +class ProcessExecResponse(BaseModel): + """Result of a synchronous command execution.""" + + duration_ms: Optional[int] = None + """Execution duration in milliseconds.""" + + exit_code: Optional[int] = None + """Process exit code.""" + + stderr_b64: Optional[str] = None + """Base64-encoded stderr buffer.""" + + stdout_b64: Optional[str] = None + """Base64-encoded stdout buffer.""" diff --git a/src/kernel/types/browsers/process_kill_params.py b/src/kernel/types/browsers/process_kill_params.py new file mode 100644 index 0000000..84be277 --- /dev/null +++ b/src/kernel/types/browsers/process_kill_params.py @@ -0,0 +1,14 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, Required, TypedDict + +__all__ = ["ProcessKillParams"] + + +class ProcessKillParams(TypedDict, total=False): + id: Required[str] + + signal: Required[Literal["TERM", "KILL", "INT", "HUP"]] + """Signal to send.""" diff --git a/src/kernel/types/browsers/process_kill_response.py b/src/kernel/types/browsers/process_kill_response.py new file mode 100644 index 0000000..6706e88 --- /dev/null +++ b/src/kernel/types/browsers/process_kill_response.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from ..._models import BaseModel + +__all__ = ["ProcessKillResponse"] + + +class ProcessKillResponse(BaseModel): + """Generic OK response.""" + + ok: bool + """Indicates success.""" diff --git a/src/kernel/types/browsers/process_spawn_params.py b/src/kernel/types/browsers/process_spawn_params.py new file mode 100644 index 0000000..8e901cb --- /dev/null +++ b/src/kernel/types/browsers/process_spawn_params.py @@ -0,0 +1,33 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict, Optional +from typing_extensions import Required, TypedDict + +from ..._types import SequenceNotStr + +__all__ = ["ProcessSpawnParams"] + + +class ProcessSpawnParams(TypedDict, total=False): + command: Required[str] + """Executable or shell command to run.""" + + args: SequenceNotStr[str] + """Command arguments.""" + + as_root: bool + """Run the process with root privileges.""" + + as_user: Optional[str] + """Run the process as this user.""" + + cwd: Optional[str] + """Working directory (absolute path) to run the command in.""" + + env: Dict[str, str] + """Environment variables to set for the process.""" + + timeout_sec: Optional[int] + """Maximum execution time in seconds.""" diff --git a/src/kernel/types/browsers/process_spawn_response.py b/src/kernel/types/browsers/process_spawn_response.py new file mode 100644 index 0000000..0cda64d --- /dev/null +++ b/src/kernel/types/browsers/process_spawn_response.py @@ -0,0 +1,21 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from datetime import datetime + +from ..._models import BaseModel + +__all__ = ["ProcessSpawnResponse"] + + +class ProcessSpawnResponse(BaseModel): + """Information about a spawned process.""" + + pid: Optional[int] = None + """OS process ID.""" + + process_id: Optional[str] = None + """Server-assigned identifier for the process.""" + + started_at: Optional[datetime] = None + """Timestamp when the process started.""" diff --git a/src/kernel/types/browsers/process_status_response.py b/src/kernel/types/browsers/process_status_response.py new file mode 100644 index 0000000..91c7724 --- /dev/null +++ b/src/kernel/types/browsers/process_status_response.py @@ -0,0 +1,24 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from typing_extensions import Literal + +from ..._models import BaseModel + +__all__ = ["ProcessStatusResponse"] + + +class ProcessStatusResponse(BaseModel): + """Current status of a process.""" + + cpu_pct: Optional[float] = None + """Estimated CPU usage percentage.""" + + exit_code: Optional[int] = None + """Exit code if the process has exited.""" + + mem_bytes: Optional[int] = None + """Estimated resident memory usage in bytes.""" + + state: Optional[Literal["running", "exited"]] = None + """Process state.""" diff --git a/src/kernel/types/browsers/process_stdin_params.py b/src/kernel/types/browsers/process_stdin_params.py new file mode 100644 index 0000000..9ece9a5 --- /dev/null +++ b/src/kernel/types/browsers/process_stdin_params.py @@ -0,0 +1,14 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +__all__ = ["ProcessStdinParams"] + + +class ProcessStdinParams(TypedDict, total=False): + id: Required[str] + + data_b64: Required[str] + """Base64-encoded data to write.""" diff --git a/src/kernel/types/browsers/process_stdin_response.py b/src/kernel/types/browsers/process_stdin_response.py new file mode 100644 index 0000000..be3c798 --- /dev/null +++ b/src/kernel/types/browsers/process_stdin_response.py @@ -0,0 +1,14 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional + +from ..._models import BaseModel + +__all__ = ["ProcessStdinResponse"] + + +class ProcessStdinResponse(BaseModel): + """Result of writing to stdin.""" + + written_bytes: Optional[int] = None + """Number of bytes written.""" diff --git a/src/kernel/types/browsers/process_stdout_stream_response.py b/src/kernel/types/browsers/process_stdout_stream_response.py new file mode 100644 index 0000000..6e911f5 --- /dev/null +++ b/src/kernel/types/browsers/process_stdout_stream_response.py @@ -0,0 +1,24 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from typing_extensions import Literal + +from ..._models import BaseModel + +__all__ = ["ProcessStdoutStreamResponse"] + + +class ProcessStdoutStreamResponse(BaseModel): + """SSE payload representing process output or lifecycle events.""" + + data_b64: Optional[str] = None + """Base64-encoded data from the process stream.""" + + event: Optional[Literal["exit"]] = None + """Lifecycle event type.""" + + exit_code: Optional[int] = None + """Exit code when the event is "exit".""" + + stream: Optional[Literal["stdout", "stderr"]] = None + """Source stream of the data chunk.""" diff --git a/src/kernel/types/browsers/replay_list_response.py b/src/kernel/types/browsers/replay_list_response.py new file mode 100644 index 0000000..8cf9d54 --- /dev/null +++ b/src/kernel/types/browsers/replay_list_response.py @@ -0,0 +1,28 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional +from datetime import datetime +from typing_extensions import TypeAlias + +from ..._models import BaseModel + +__all__ = ["ReplayListResponse", "ReplayListResponseItem"] + + +class ReplayListResponseItem(BaseModel): + """Information about a browser replay recording.""" + + replay_id: str + """Unique identifier for the replay recording.""" + + finished_at: Optional[datetime] = None + """Timestamp when replay finished""" + + replay_view_url: Optional[str] = None + """URL for viewing the replay recording.""" + + started_at: Optional[datetime] = None + """Timestamp when replay started""" + + +ReplayListResponse: TypeAlias = List[ReplayListResponseItem] diff --git a/src/kernel/types/browsers/replay_start_params.py b/src/kernel/types/browsers/replay_start_params.py new file mode 100644 index 0000000..d668386 --- /dev/null +++ b/src/kernel/types/browsers/replay_start_params.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import TypedDict + +__all__ = ["ReplayStartParams"] + + +class ReplayStartParams(TypedDict, total=False): + framerate: int + """Recording framerate in fps.""" + + max_duration_in_seconds: int + """Maximum recording duration in seconds.""" diff --git a/src/kernel/types/browsers/replay_start_response.py b/src/kernel/types/browsers/replay_start_response.py new file mode 100644 index 0000000..ac4130b --- /dev/null +++ b/src/kernel/types/browsers/replay_start_response.py @@ -0,0 +1,24 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from datetime import datetime + +from ..._models import BaseModel + +__all__ = ["ReplayStartResponse"] + + +class ReplayStartResponse(BaseModel): + """Information about a browser replay recording.""" + + replay_id: str + """Unique identifier for the replay recording.""" + + finished_at: Optional[datetime] = None + """Timestamp when replay finished""" + + replay_view_url: Optional[str] = None + """URL for viewing the replay recording.""" + + started_at: Optional[datetime] = None + """Timestamp when replay started""" diff --git a/src/kernel/types/credential.py b/src/kernel/types/credential.py new file mode 100644 index 0000000..8ae733b --- /dev/null +++ b/src/kernel/types/credential.py @@ -0,0 +1,47 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from datetime import datetime + +from .._models import BaseModel + +__all__ = ["Credential"] + + +class Credential(BaseModel): + """A stored credential for automatic re-authentication""" + + id: str + """Unique identifier for the credential""" + + created_at: datetime + """When the credential was created""" + + domain: str + """Target domain this credential is for""" + + name: str + """Unique name for the credential within the organization""" + + updated_at: datetime + """When the credential was last updated""" + + has_totp_secret: Optional[bool] = None + """Whether this credential has a TOTP secret configured for automatic 2FA""" + + sso_provider: Optional[str] = None + """ + If set, indicates this credential should be used with the specified SSO provider + (e.g., google, github, microsoft). When the target site has a matching SSO + button, it will be clicked first before filling credential values on the + identity provider's login page. + """ + + totp_code: Optional[str] = None + """Current 6-digit TOTP code. + + Only included in create/update responses when totp_secret was just set. + """ + + totp_code_expires_at: Optional[datetime] = None + """When the totp_code expires. Only included when totp_code is present.""" diff --git a/src/kernel/types/credential_create_params.py b/src/kernel/types/credential_create_params.py new file mode 100644 index 0000000..94964b9 --- /dev/null +++ b/src/kernel/types/credential_create_params.py @@ -0,0 +1,33 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict +from typing_extensions import Required, TypedDict + +__all__ = ["CredentialCreateParams"] + + +class CredentialCreateParams(TypedDict, total=False): + domain: Required[str] + """Target domain this credential is for""" + + name: Required[str] + """Unique name for the credential within the organization""" + + values: Required[Dict[str, str]] + """Field name to value mapping (e.g., username, password)""" + + sso_provider: str + """ + If set, indicates this credential should be used with the specified SSO provider + (e.g., google, github, microsoft). When the target site has a matching SSO + button, it will be clicked first before filling credential values on the + identity provider's login page. + """ + + totp_secret: str + """Base32-encoded TOTP secret for generating one-time passwords. + + Used for automatic 2FA during login. + """ diff --git a/src/kernel/types/credential_list_params.py b/src/kernel/types/credential_list_params.py new file mode 100644 index 0000000..945909e --- /dev/null +++ b/src/kernel/types/credential_list_params.py @@ -0,0 +1,18 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import TypedDict + +__all__ = ["CredentialListParams"] + + +class CredentialListParams(TypedDict, total=False): + domain: str + """Filter by domain""" + + limit: int + """Maximum number of results to return""" + + offset: int + """Number of results to skip""" diff --git a/src/kernel/types/credential_totp_code_response.py b/src/kernel/types/credential_totp_code_response.py new file mode 100644 index 0000000..670f4e7 --- /dev/null +++ b/src/kernel/types/credential_totp_code_response.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from datetime import datetime + +from .._models import BaseModel + +__all__ = ["CredentialTotpCodeResponse"] + + +class CredentialTotpCodeResponse(BaseModel): + code: str + """Current 6-digit TOTP code""" + + expires_at: datetime + """When this code expires (ISO 8601 timestamp)""" diff --git a/src/kernel/types/credential_update_params.py b/src/kernel/types/credential_update_params.py new file mode 100644 index 0000000..c42209e --- /dev/null +++ b/src/kernel/types/credential_update_params.py @@ -0,0 +1,34 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict, Optional +from typing_extensions import TypedDict + +__all__ = ["CredentialUpdateParams"] + + +class CredentialUpdateParams(TypedDict, total=False): + name: str + """New name for the credential""" + + sso_provider: Optional[str] + """If set, indicates this credential should be used with the specified SSO + provider. + + Set to empty string or null to remove. + """ + + totp_secret: str + """Base32-encoded TOTP secret for generating one-time passwords. + + Spaces and formatting are automatically normalized. Set to empty string to + remove. + """ + + values: Dict[str, str] + """Field name to value mapping. + + Values are merged with existing values (new keys added, existing keys + overwritten). + """ diff --git a/src/kernel/types/deployment_create_params.py b/src/kernel/types/deployment_create_params.py new file mode 100644 index 0000000..84d3d87 --- /dev/null +++ b/src/kernel/types/deployment_create_params.py @@ -0,0 +1,68 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict +from typing_extensions import Literal, Required, TypedDict + +from .._types import FileTypes + +__all__ = ["DeploymentCreateParams", "Source", "SourceAuth"] + + +class DeploymentCreateParams(TypedDict, total=False): + entrypoint_rel_path: str + """Relative path to the entrypoint of the application""" + + env_vars: Dict[str, str] + """Map of environment variables to set for the deployed application. + + Each key-value pair represents an environment variable. + """ + + file: FileTypes + """ZIP file containing the application source directory""" + + force: bool + """Allow overwriting an existing app version""" + + region: Literal["aws.us-east-1a"] + """Region for deployment. Currently we only support "aws.us-east-1a" """ + + source: Source + """Source from which to fetch application code.""" + + version: str + """Version of the application. Can be any string.""" + + +class SourceAuth(TypedDict, total=False): + """Authentication for private repositories.""" + + token: Required[str] + """GitHub PAT or installation access token""" + + method: Required[Literal["github_token"]] + """Auth method""" + + +class Source(TypedDict, total=False): + """Source from which to fetch application code.""" + + entrypoint: Required[str] + """Relative path to the application entrypoint within the selected path.""" + + ref: Required[str] + """Git ref (branch, tag, or commit SHA) to fetch.""" + + type: Required[Literal["github"]] + """Source type identifier.""" + + url: Required[str] + """Base repository URL (without blob/tree suffixes).""" + + auth: SourceAuth + """Authentication for private repositories.""" + + path: str + """Path within the repo to deploy (omit to use repo root).""" diff --git a/src/kernel/types/deployment_create_response.py b/src/kernel/types/deployment_create_response.py new file mode 100644 index 0000000..5746c97 --- /dev/null +++ b/src/kernel/types/deployment_create_response.py @@ -0,0 +1,37 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Dict, Optional +from datetime import datetime +from typing_extensions import Literal + +from .._models import BaseModel + +__all__ = ["DeploymentCreateResponse"] + + +class DeploymentCreateResponse(BaseModel): + """Deployment record information.""" + + id: str + """Unique identifier for the deployment""" + + created_at: datetime + """Timestamp when the deployment was created""" + + region: Literal["aws.us-east-1a"] + """Deployment region code""" + + status: Literal["queued", "in_progress", "running", "failed", "stopped"] + """Current status of the deployment""" + + entrypoint_rel_path: Optional[str] = None + """Relative path to the application entrypoint""" + + env_vars: Optional[Dict[str, str]] = None + """Environment variables configured for this deployment""" + + status_reason: Optional[str] = None + """Status reason""" + + updated_at: Optional[datetime] = None + """Timestamp when the deployment was last updated""" diff --git a/src/kernel/types/deployment_follow_params.py b/src/kernel/types/deployment_follow_params.py new file mode 100644 index 0000000..861f161 --- /dev/null +++ b/src/kernel/types/deployment_follow_params.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import TypedDict + +__all__ = ["DeploymentFollowParams"] + + +class DeploymentFollowParams(TypedDict, total=False): + since: str + """Show logs since the given time (RFC timestamps or durations like 5m).""" diff --git a/src/kernel/types/deployment_follow_response.py b/src/kernel/types/deployment_follow_response.py new file mode 100644 index 0000000..d6de222 --- /dev/null +++ b/src/kernel/types/deployment_follow_response.py @@ -0,0 +1,49 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Dict, List, Union, Optional +from datetime import datetime +from typing_extensions import Literal, Annotated, TypeAlias + +from .._utils import PropertyInfo +from .._models import BaseModel +from .shared.log_event import LogEvent +from .shared.app_action import AppAction +from .shared.error_event import ErrorEvent +from .deployment_state_event import DeploymentStateEvent +from .shared.heartbeat_event import HeartbeatEvent + +__all__ = ["DeploymentFollowResponse", "AppVersionSummaryEvent"] + + +class AppVersionSummaryEvent(BaseModel): + """Summary of an application version.""" + + id: str + """Unique identifier for the app version""" + + actions: List[AppAction] + """List of actions available on the app""" + + app_name: str + """Name of the application""" + + event: Literal["app_version_summary"] + """Event type identifier (always "app_version_summary").""" + + region: Literal["aws.us-east-1a"] + """Deployment region code""" + + timestamp: datetime + """Time the state was reported.""" + + version: str + """Version label for the application""" + + env_vars: Optional[Dict[str, str]] = None + """Environment variables configured for this app version""" + + +DeploymentFollowResponse: TypeAlias = Annotated[ + Union[LogEvent, DeploymentStateEvent, AppVersionSummaryEvent, ErrorEvent, HeartbeatEvent], + PropertyInfo(discriminator="event"), +] diff --git a/src/kernel/types/deployment_list_params.py b/src/kernel/types/deployment_list_params.py new file mode 100644 index 0000000..54124da --- /dev/null +++ b/src/kernel/types/deployment_list_params.py @@ -0,0 +1,18 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import TypedDict + +__all__ = ["DeploymentListParams"] + + +class DeploymentListParams(TypedDict, total=False): + app_name: str + """Filter results by application name.""" + + limit: int + """Limit the number of deployments to return.""" + + offset: int + """Offset the number of deployments to return.""" diff --git a/src/kernel/types/deployment_list_response.py b/src/kernel/types/deployment_list_response.py new file mode 100644 index 0000000..d7719d4 --- /dev/null +++ b/src/kernel/types/deployment_list_response.py @@ -0,0 +1,37 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Dict, Optional +from datetime import datetime +from typing_extensions import Literal + +from .._models import BaseModel + +__all__ = ["DeploymentListResponse"] + + +class DeploymentListResponse(BaseModel): + """Deployment record information.""" + + id: str + """Unique identifier for the deployment""" + + created_at: datetime + """Timestamp when the deployment was created""" + + region: Literal["aws.us-east-1a"] + """Deployment region code""" + + status: Literal["queued", "in_progress", "running", "failed", "stopped"] + """Current status of the deployment""" + + entrypoint_rel_path: Optional[str] = None + """Relative path to the application entrypoint""" + + env_vars: Optional[Dict[str, str]] = None + """Environment variables configured for this deployment""" + + status_reason: Optional[str] = None + """Status reason""" + + updated_at: Optional[datetime] = None + """Timestamp when the deployment was last updated""" diff --git a/src/kernel/types/deployment_retrieve_response.py b/src/kernel/types/deployment_retrieve_response.py new file mode 100644 index 0000000..3601c86 --- /dev/null +++ b/src/kernel/types/deployment_retrieve_response.py @@ -0,0 +1,37 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Dict, Optional +from datetime import datetime +from typing_extensions import Literal + +from .._models import BaseModel + +__all__ = ["DeploymentRetrieveResponse"] + + +class DeploymentRetrieveResponse(BaseModel): + """Deployment record information.""" + + id: str + """Unique identifier for the deployment""" + + created_at: datetime + """Timestamp when the deployment was created""" + + region: Literal["aws.us-east-1a"] + """Deployment region code""" + + status: Literal["queued", "in_progress", "running", "failed", "stopped"] + """Current status of the deployment""" + + entrypoint_rel_path: Optional[str] = None + """Relative path to the application entrypoint""" + + env_vars: Optional[Dict[str, str]] = None + """Environment variables configured for this deployment""" + + status_reason: Optional[str] = None + """Status reason""" + + updated_at: Optional[datetime] = None + """Timestamp when the deployment was last updated""" diff --git a/src/kernel/types/deployment_state_event.py b/src/kernel/types/deployment_state_event.py new file mode 100644 index 0000000..cc221c7 --- /dev/null +++ b/src/kernel/types/deployment_state_event.py @@ -0,0 +1,50 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Dict, Optional +from datetime import datetime +from typing_extensions import Literal + +from .._models import BaseModel + +__all__ = ["DeploymentStateEvent", "Deployment"] + + +class Deployment(BaseModel): + """Deployment record information.""" + + id: str + """Unique identifier for the deployment""" + + created_at: datetime + """Timestamp when the deployment was created""" + + region: Literal["aws.us-east-1a"] + """Deployment region code""" + + status: Literal["queued", "in_progress", "running", "failed", "stopped"] + """Current status of the deployment""" + + entrypoint_rel_path: Optional[str] = None + """Relative path to the application entrypoint""" + + env_vars: Optional[Dict[str, str]] = None + """Environment variables configured for this deployment""" + + status_reason: Optional[str] = None + """Status reason""" + + updated_at: Optional[datetime] = None + """Timestamp when the deployment was last updated""" + + +class DeploymentStateEvent(BaseModel): + """An event representing the current state of a deployment.""" + + deployment: Deployment + """Deployment record information.""" + + event: Literal["deployment_state"] + """Event type identifier (always "deployment_state").""" + + timestamp: datetime + """Time the state was reported.""" diff --git a/src/kernel/types/extension_download_from_chrome_store_params.py b/src/kernel/types/extension_download_from_chrome_store_params.py new file mode 100644 index 0000000..e9ca538 --- /dev/null +++ b/src/kernel/types/extension_download_from_chrome_store_params.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, Required, TypedDict + +__all__ = ["ExtensionDownloadFromChromeStoreParams"] + + +class ExtensionDownloadFromChromeStoreParams(TypedDict, total=False): + url: Required[str] + """Chrome Web Store URL for the extension.""" + + os: Literal["win", "mac", "linux"] + """Target operating system for the extension package. Defaults to linux.""" diff --git a/src/kernel/types/extension_list_response.py b/src/kernel/types/extension_list_response.py new file mode 100644 index 0000000..79a5c99 --- /dev/null +++ b/src/kernel/types/extension_list_response.py @@ -0,0 +1,34 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional +from datetime import datetime +from typing_extensions import TypeAlias + +from .._models import BaseModel + +__all__ = ["ExtensionListResponse", "ExtensionListResponseItem"] + + +class ExtensionListResponseItem(BaseModel): + """A browser extension uploaded to Kernel.""" + + id: str + """Unique identifier for the extension""" + + created_at: datetime + """Timestamp when the extension was created""" + + size_bytes: int + """Size of the extension archive in bytes""" + + last_used_at: Optional[datetime] = None + """Timestamp when the extension was last used""" + + name: Optional[str] = None + """Optional, easier-to-reference name for the extension. + + Must be unique within the organization. + """ + + +ExtensionListResponse: TypeAlias = List[ExtensionListResponseItem] diff --git a/src/kernel/types/extension_upload_params.py b/src/kernel/types/extension_upload_params.py new file mode 100644 index 0000000..d36dde3 --- /dev/null +++ b/src/kernel/types/extension_upload_params.py @@ -0,0 +1,17 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +from .._types import FileTypes + +__all__ = ["ExtensionUploadParams"] + + +class ExtensionUploadParams(TypedDict, total=False): + file: Required[FileTypes] + """ZIP file containing the browser extension.""" + + name: str + """Optional unique name within the organization to reference this extension.""" diff --git a/src/kernel/types/extension_upload_response.py b/src/kernel/types/extension_upload_response.py new file mode 100644 index 0000000..1b3be22 --- /dev/null +++ b/src/kernel/types/extension_upload_response.py @@ -0,0 +1,30 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from datetime import datetime + +from .._models import BaseModel + +__all__ = ["ExtensionUploadResponse"] + + +class ExtensionUploadResponse(BaseModel): + """A browser extension uploaded to Kernel.""" + + id: str + """Unique identifier for the extension""" + + created_at: datetime + """Timestamp when the extension was created""" + + size_bytes: int + """Size of the extension archive in bytes""" + + last_used_at: Optional[datetime] = None + """Timestamp when the extension was last used""" + + name: Optional[str] = None + """Optional, easier-to-reference name for the extension. + + Must be unique within the organization. + """ diff --git a/src/kernel/types/invocation_create_params.py b/src/kernel/types/invocation_create_params.py new file mode 100644 index 0000000..288656a --- /dev/null +++ b/src/kernel/types/invocation_create_params.py @@ -0,0 +1,35 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, Annotated, TypedDict + +from .._utils import PropertyInfo + +__all__ = ["InvocationCreateParams"] + + +class InvocationCreateParams(TypedDict, total=False): + action_name: Required[str] + """Name of the action to invoke""" + + app_name: Required[str] + """Name of the application""" + + version: Required[str] + """Version of the application""" + + async_: Annotated[bool, PropertyInfo(alias="async")] + """If true, invoke asynchronously. + + When set, the API responds 202 Accepted with status "queued". + """ + + async_timeout_seconds: int + """Timeout in seconds for async invocations (min 10, max 3600). + + Only applies when async is true. + """ + + payload: str + """Input data for the action, sent as a JSON string.""" diff --git a/src/kernel/types/invocation_create_response.py b/src/kernel/types/invocation_create_response.py new file mode 100644 index 0000000..21fbcf3 --- /dev/null +++ b/src/kernel/types/invocation_create_response.py @@ -0,0 +1,28 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from typing_extensions import Literal + +from .._models import BaseModel + +__all__ = ["InvocationCreateResponse"] + + +class InvocationCreateResponse(BaseModel): + id: str + """ID of the invocation""" + + action_name: str + """Name of the action invoked""" + + status: Literal["queued", "running", "succeeded", "failed"] + """Status of the invocation""" + + output: Optional[str] = None + """The return value of the action that was invoked, rendered as a JSON string. + + This could be: string, number, boolean, array, object, or null. + """ + + status_reason: Optional[str] = None + """Status reason""" diff --git a/src/kernel/types/invocation_follow_params.py b/src/kernel/types/invocation_follow_params.py new file mode 100644 index 0000000..6784781 --- /dev/null +++ b/src/kernel/types/invocation_follow_params.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import TypedDict + +__all__ = ["InvocationFollowParams"] + + +class InvocationFollowParams(TypedDict, total=False): + since: str + """Show logs since the given time (RFC timestamps or durations like 5m).""" diff --git a/src/kernel/types/invocation_follow_response.py b/src/kernel/types/invocation_follow_response.py new file mode 100644 index 0000000..2effbde --- /dev/null +++ b/src/kernel/types/invocation_follow_response.py @@ -0,0 +1,16 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Union +from typing_extensions import Annotated, TypeAlias + +from .._utils import PropertyInfo +from .shared.log_event import LogEvent +from .shared.error_event import ErrorEvent +from .invocation_state_event import InvocationStateEvent +from .shared.heartbeat_event import HeartbeatEvent + +__all__ = ["InvocationFollowResponse"] + +InvocationFollowResponse: TypeAlias = Annotated[ + Union[LogEvent, InvocationStateEvent, ErrorEvent, HeartbeatEvent], PropertyInfo(discriminator="event") +] diff --git a/src/kernel/types/invocation_list_params.py b/src/kernel/types/invocation_list_params.py new file mode 100644 index 0000000..9673f2d --- /dev/null +++ b/src/kernel/types/invocation_list_params.py @@ -0,0 +1,36 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, TypedDict + +__all__ = ["InvocationListParams"] + + +class InvocationListParams(TypedDict, total=False): + action_name: str + """Filter results by action name.""" + + app_name: str + """Filter results by application name.""" + + deployment_id: str + """Filter results by deployment ID.""" + + limit: int + """Limit the number of invocations to return.""" + + offset: int + """Offset the number of invocations to return.""" + + since: str + """ + Show invocations that have started since the given time (RFC timestamps or + durations like 5m). + """ + + status: Literal["queued", "running", "succeeded", "failed"] + """Filter results by invocation status.""" + + version: str + """Filter results by application version.""" diff --git a/src/kernel/types/invocation_list_response.py b/src/kernel/types/invocation_list_response.py new file mode 100644 index 0000000..e635b4d --- /dev/null +++ b/src/kernel/types/invocation_list_response.py @@ -0,0 +1,50 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from datetime import datetime +from typing_extensions import Literal + +from .._models import BaseModel + +__all__ = ["InvocationListResponse"] + + +class InvocationListResponse(BaseModel): + id: str + """ID of the invocation""" + + action_name: str + """Name of the action invoked""" + + app_name: str + """Name of the application""" + + started_at: datetime + """RFC 3339 Nanoseconds timestamp when the invocation started""" + + status: Literal["queued", "running", "succeeded", "failed"] + """Status of the invocation""" + + version: str + """Version label for the application""" + + finished_at: Optional[datetime] = None + """ + RFC 3339 Nanoseconds timestamp when the invocation finished (null if still + running) + """ + + output: Optional[str] = None + """Output produced by the action, rendered as a JSON string. + + This could be: string, number, boolean, array, object, or null. + """ + + payload: Optional[str] = None + """Payload provided to the invocation. + + This is a string that can be parsed as JSON. + """ + + status_reason: Optional[str] = None + """Status reason""" diff --git a/src/kernel/types/invocation_retrieve_response.py b/src/kernel/types/invocation_retrieve_response.py new file mode 100644 index 0000000..580424e --- /dev/null +++ b/src/kernel/types/invocation_retrieve_response.py @@ -0,0 +1,50 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from datetime import datetime +from typing_extensions import Literal + +from .._models import BaseModel + +__all__ = ["InvocationRetrieveResponse"] + + +class InvocationRetrieveResponse(BaseModel): + id: str + """ID of the invocation""" + + action_name: str + """Name of the action invoked""" + + app_name: str + """Name of the application""" + + started_at: datetime + """RFC 3339 Nanoseconds timestamp when the invocation started""" + + status: Literal["queued", "running", "succeeded", "failed"] + """Status of the invocation""" + + version: str + """Version label for the application""" + + finished_at: Optional[datetime] = None + """ + RFC 3339 Nanoseconds timestamp when the invocation finished (null if still + running) + """ + + output: Optional[str] = None + """Output produced by the action, rendered as a JSON string. + + This could be: string, number, boolean, array, object, or null. + """ + + payload: Optional[str] = None + """Payload provided to the invocation. + + This is a string that can be parsed as JSON. + """ + + status_reason: Optional[str] = None + """Status reason""" diff --git a/src/kernel/types/invocation_state_event.py b/src/kernel/types/invocation_state_event.py new file mode 100644 index 0000000..f32bf8e --- /dev/null +++ b/src/kernel/types/invocation_state_event.py @@ -0,0 +1,62 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from datetime import datetime +from typing_extensions import Literal + +from .._models import BaseModel + +__all__ = ["InvocationStateEvent", "Invocation"] + + +class Invocation(BaseModel): + id: str + """ID of the invocation""" + + action_name: str + """Name of the action invoked""" + + app_name: str + """Name of the application""" + + started_at: datetime + """RFC 3339 Nanoseconds timestamp when the invocation started""" + + status: Literal["queued", "running", "succeeded", "failed"] + """Status of the invocation""" + + version: str + """Version label for the application""" + + finished_at: Optional[datetime] = None + """ + RFC 3339 Nanoseconds timestamp when the invocation finished (null if still + running) + """ + + output: Optional[str] = None + """Output produced by the action, rendered as a JSON string. + + This could be: string, number, boolean, array, object, or null. + """ + + payload: Optional[str] = None + """Payload provided to the invocation. + + This is a string that can be parsed as JSON. + """ + + status_reason: Optional[str] = None + """Status reason""" + + +class InvocationStateEvent(BaseModel): + """An event representing the current state of an invocation.""" + + event: Literal["invocation_state"] + """Event type identifier (always "invocation_state").""" + + invocation: Invocation + + timestamp: datetime + """Time the state was reported.""" diff --git a/src/kernel/types/invocation_update_params.py b/src/kernel/types/invocation_update_params.py new file mode 100644 index 0000000..72ccf5d --- /dev/null +++ b/src/kernel/types/invocation_update_params.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, Required, TypedDict + +__all__ = ["InvocationUpdateParams"] + + +class InvocationUpdateParams(TypedDict, total=False): + status: Required[Literal["succeeded", "failed"]] + """New status for the invocation.""" + + output: str + """Updated output of the invocation rendered as JSON string.""" diff --git a/src/kernel/types/invocation_update_response.py b/src/kernel/types/invocation_update_response.py new file mode 100644 index 0000000..3bcc8bc --- /dev/null +++ b/src/kernel/types/invocation_update_response.py @@ -0,0 +1,50 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from datetime import datetime +from typing_extensions import Literal + +from .._models import BaseModel + +__all__ = ["InvocationUpdateResponse"] + + +class InvocationUpdateResponse(BaseModel): + id: str + """ID of the invocation""" + + action_name: str + """Name of the action invoked""" + + app_name: str + """Name of the application""" + + started_at: datetime + """RFC 3339 Nanoseconds timestamp when the invocation started""" + + status: Literal["queued", "running", "succeeded", "failed"] + """Status of the invocation""" + + version: str + """Version label for the application""" + + finished_at: Optional[datetime] = None + """ + RFC 3339 Nanoseconds timestamp when the invocation finished (null if still + running) + """ + + output: Optional[str] = None + """Output produced by the action, rendered as a JSON string. + + This could be: string, number, boolean, array, object, or null. + """ + + payload: Optional[str] = None + """Payload provided to the invocation. + + This is a string that can be parsed as JSON. + """ + + status_reason: Optional[str] = None + """Status reason""" diff --git a/src/kernel/types/profile.py b/src/kernel/types/profile.py new file mode 100644 index 0000000..e141aa0 --- /dev/null +++ b/src/kernel/types/profile.py @@ -0,0 +1,27 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from datetime import datetime + +from .._models import BaseModel + +__all__ = ["Profile"] + + +class Profile(BaseModel): + """Browser profile metadata.""" + + id: str + """Unique identifier for the profile""" + + created_at: datetime + """Timestamp when the profile was created""" + + last_used_at: Optional[datetime] = None + """Timestamp when the profile was last used""" + + name: Optional[str] = None + """Optional, easier-to-reference name for the profile""" + + updated_at: Optional[datetime] = None + """Timestamp when the profile was last updated""" diff --git a/src/kernel/types/profile_create_params.py b/src/kernel/types/profile_create_params.py new file mode 100644 index 0000000..0b2b12a --- /dev/null +++ b/src/kernel/types/profile_create_params.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import TypedDict + +__all__ = ["ProfileCreateParams"] + + +class ProfileCreateParams(TypedDict, total=False): + name: str + """Optional name of the profile. Must be unique within the organization.""" diff --git a/src/kernel/types/profile_list_response.py b/src/kernel/types/profile_list_response.py new file mode 100644 index 0000000..24b2744 --- /dev/null +++ b/src/kernel/types/profile_list_response.py @@ -0,0 +1,10 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List +from typing_extensions import TypeAlias + +from .profile import Profile + +__all__ = ["ProfileListResponse"] + +ProfileListResponse: TypeAlias = List[Profile] diff --git a/src/kernel/types/proxy_check_response.py b/src/kernel/types/proxy_check_response.py new file mode 100644 index 0000000..dc45f4f --- /dev/null +++ b/src/kernel/types/proxy_check_response.py @@ -0,0 +1,195 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Union, Optional +from datetime import datetime +from typing_extensions import Literal, TypeAlias + +from .._models import BaseModel + +__all__ = [ + "ProxyCheckResponse", + "Config", + "ConfigDatacenterProxyConfig", + "ConfigIspProxyConfig", + "ConfigResidentialProxyConfig", + "ConfigMobileProxyConfig", + "ConfigCustomProxyConfig", +] + + +class ConfigDatacenterProxyConfig(BaseModel): + """Configuration for a datacenter proxy.""" + + country: Optional[str] = None + """ISO 3166 country code. Defaults to US if not provided.""" + + +class ConfigIspProxyConfig(BaseModel): + """Configuration for an ISP proxy.""" + + country: Optional[str] = None + """ISO 3166 country code. Defaults to US if not provided.""" + + +class ConfigResidentialProxyConfig(BaseModel): + """Configuration for residential proxies.""" + + asn: Optional[str] = None + """Autonomous system number. See https://bgp.potaroo.net/cidr/autnums.html""" + + city: Optional[str] = None + """City name (no spaces, e.g. + + `sanfrancisco`). If provided, `country` must also be provided. + """ + + country: Optional[str] = None + """ISO 3166 country code.""" + + os: Optional[Literal["windows", "macos", "android"]] = None + """Operating system of the residential device.""" + + state: Optional[str] = None + """Two-letter state code.""" + + zip: Optional[str] = None + """US ZIP code.""" + + +class ConfigMobileProxyConfig(BaseModel): + """Configuration for mobile proxies.""" + + asn: Optional[str] = None + """Autonomous system number. See https://bgp.potaroo.net/cidr/autnums.html""" + + carrier: Optional[ + Literal[ + "a1", + "aircel", + "airtel", + "att", + "celcom", + "chinamobile", + "claro", + "comcast", + "cox", + "digi", + "dt", + "docomo", + "dtac", + "etisalat", + "idea", + "kyivstar", + "meo", + "megafon", + "mtn", + "mtnza", + "mts", + "optus", + "orange", + "qwest", + "reliance_jio", + "robi", + "sprint", + "telefonica", + "telstra", + "tmobile", + "tigo", + "tim", + "verizon", + "vimpelcom", + "vodacomza", + "vodafone", + "vivo", + "zain", + "vivabo", + "telenormyanmar", + "kcelljsc", + "swisscom", + "singtel", + "asiacell", + "windit", + "cellc", + "ooredoo", + "drei", + "umobile", + "cableone", + "proximus", + "tele2", + "mobitel", + "o2", + "bouygues", + "free", + "sfr", + "digicel", + ] + ] = None + """Mobile carrier.""" + + city: Optional[str] = None + """City name (no spaces, e.g. + + `sanfrancisco`). If provided, `country` must also be provided. + """ + + country: Optional[str] = None + """ISO 3166 country code""" + + state: Optional[str] = None + """Two-letter state code.""" + + zip: Optional[str] = None + """US ZIP code.""" + + +class ConfigCustomProxyConfig(BaseModel): + """Configuration for a custom proxy (e.g., private proxy server).""" + + host: str + """Proxy host address or IP.""" + + port: int + """Proxy port.""" + + has_password: Optional[bool] = None + """Whether the proxy has a password.""" + + username: Optional[str] = None + """Username for proxy authentication.""" + + +Config: TypeAlias = Union[ + ConfigDatacenterProxyConfig, + ConfigIspProxyConfig, + ConfigResidentialProxyConfig, + ConfigMobileProxyConfig, + ConfigCustomProxyConfig, +] + + +class ProxyCheckResponse(BaseModel): + """Configuration for routing traffic through a proxy.""" + + type: Literal["datacenter", "isp", "residential", "mobile", "custom"] + """Proxy type to use. + + In terms of quality for avoiding bot-detection, from best to worst: `mobile` > + `residential` > `isp` > `datacenter`. + """ + + id: Optional[str] = None + + config: Optional[Config] = None + """Configuration specific to the selected proxy `type`.""" + + last_checked: Optional[datetime] = None + """Timestamp of the last health check performed on this proxy.""" + + name: Optional[str] = None + """Readable name of the proxy.""" + + protocol: Optional[Literal["http", "https"]] = None + """Protocol to use for the proxy connection.""" + + status: Optional[Literal["available", "unavailable"]] = None + """Current health status of the proxy.""" diff --git a/src/kernel/types/proxy_create_params.py b/src/kernel/types/proxy_create_params.py new file mode 100644 index 0000000..0a3536f --- /dev/null +++ b/src/kernel/types/proxy_create_params.py @@ -0,0 +1,182 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union +from typing_extensions import Literal, Required, TypeAlias, TypedDict + +__all__ = [ + "ProxyCreateParams", + "Config", + "ConfigDatacenterProxyConfig", + "ConfigIspProxyConfig", + "ConfigResidentialProxyConfig", + "ConfigMobileProxyConfig", + "ConfigCreateCustomProxyConfig", +] + + +class ProxyCreateParams(TypedDict, total=False): + type: Required[Literal["datacenter", "isp", "residential", "mobile", "custom"]] + """Proxy type to use. + + In terms of quality for avoiding bot-detection, from best to worst: `mobile` > + `residential` > `isp` > `datacenter`. + """ + + config: Config + """Configuration specific to the selected proxy `type`.""" + + name: str + """Readable name of the proxy.""" + + protocol: Literal["http", "https"] + """Protocol to use for the proxy connection.""" + + +class ConfigDatacenterProxyConfig(TypedDict, total=False): + """Configuration for a datacenter proxy.""" + + country: str + """ISO 3166 country code. Defaults to US if not provided.""" + + +class ConfigIspProxyConfig(TypedDict, total=False): + """Configuration for an ISP proxy.""" + + country: str + """ISO 3166 country code. Defaults to US if not provided.""" + + +class ConfigResidentialProxyConfig(TypedDict, total=False): + """Configuration for residential proxies.""" + + asn: str + """Autonomous system number. See https://bgp.potaroo.net/cidr/autnums.html""" + + city: str + """City name (no spaces, e.g. + + `sanfrancisco`). If provided, `country` must also be provided. + """ + + country: str + """ISO 3166 country code.""" + + os: Literal["windows", "macos", "android"] + """Operating system of the residential device.""" + + state: str + """Two-letter state code.""" + + zip: str + """US ZIP code.""" + + +class ConfigMobileProxyConfig(TypedDict, total=False): + """Configuration for mobile proxies.""" + + asn: str + """Autonomous system number. See https://bgp.potaroo.net/cidr/autnums.html""" + + carrier: Literal[ + "a1", + "aircel", + "airtel", + "att", + "celcom", + "chinamobile", + "claro", + "comcast", + "cox", + "digi", + "dt", + "docomo", + "dtac", + "etisalat", + "idea", + "kyivstar", + "meo", + "megafon", + "mtn", + "mtnza", + "mts", + "optus", + "orange", + "qwest", + "reliance_jio", + "robi", + "sprint", + "telefonica", + "telstra", + "tmobile", + "tigo", + "tim", + "verizon", + "vimpelcom", + "vodacomza", + "vodafone", + "vivo", + "zain", + "vivabo", + "telenormyanmar", + "kcelljsc", + "swisscom", + "singtel", + "asiacell", + "windit", + "cellc", + "ooredoo", + "drei", + "umobile", + "cableone", + "proximus", + "tele2", + "mobitel", + "o2", + "bouygues", + "free", + "sfr", + "digicel", + ] + """Mobile carrier.""" + + city: str + """City name (no spaces, e.g. + + `sanfrancisco`). If provided, `country` must also be provided. + """ + + country: str + """ISO 3166 country code""" + + state: str + """Two-letter state code.""" + + zip: str + """US ZIP code.""" + + +class ConfigCreateCustomProxyConfig(TypedDict, total=False): + """Configuration for a custom proxy (e.g., private proxy server).""" + + host: Required[str] + """Proxy host address or IP.""" + + port: Required[int] + """Proxy port.""" + + password: str + """Password for proxy authentication.""" + + username: str + """Username for proxy authentication.""" + + +Config: TypeAlias = Union[ + ConfigDatacenterProxyConfig, + ConfigIspProxyConfig, + ConfigResidentialProxyConfig, + ConfigMobileProxyConfig, + ConfigCreateCustomProxyConfig, +] diff --git a/src/kernel/types/proxy_create_response.py b/src/kernel/types/proxy_create_response.py new file mode 100644 index 0000000..dc474ab --- /dev/null +++ b/src/kernel/types/proxy_create_response.py @@ -0,0 +1,195 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Union, Optional +from datetime import datetime +from typing_extensions import Literal, TypeAlias + +from .._models import BaseModel + +__all__ = [ + "ProxyCreateResponse", + "Config", + "ConfigDatacenterProxyConfig", + "ConfigIspProxyConfig", + "ConfigResidentialProxyConfig", + "ConfigMobileProxyConfig", + "ConfigCustomProxyConfig", +] + + +class ConfigDatacenterProxyConfig(BaseModel): + """Configuration for a datacenter proxy.""" + + country: Optional[str] = None + """ISO 3166 country code. Defaults to US if not provided.""" + + +class ConfigIspProxyConfig(BaseModel): + """Configuration for an ISP proxy.""" + + country: Optional[str] = None + """ISO 3166 country code. Defaults to US if not provided.""" + + +class ConfigResidentialProxyConfig(BaseModel): + """Configuration for residential proxies.""" + + asn: Optional[str] = None + """Autonomous system number. See https://bgp.potaroo.net/cidr/autnums.html""" + + city: Optional[str] = None + """City name (no spaces, e.g. + + `sanfrancisco`). If provided, `country` must also be provided. + """ + + country: Optional[str] = None + """ISO 3166 country code.""" + + os: Optional[Literal["windows", "macos", "android"]] = None + """Operating system of the residential device.""" + + state: Optional[str] = None + """Two-letter state code.""" + + zip: Optional[str] = None + """US ZIP code.""" + + +class ConfigMobileProxyConfig(BaseModel): + """Configuration for mobile proxies.""" + + asn: Optional[str] = None + """Autonomous system number. See https://bgp.potaroo.net/cidr/autnums.html""" + + carrier: Optional[ + Literal[ + "a1", + "aircel", + "airtel", + "att", + "celcom", + "chinamobile", + "claro", + "comcast", + "cox", + "digi", + "dt", + "docomo", + "dtac", + "etisalat", + "idea", + "kyivstar", + "meo", + "megafon", + "mtn", + "mtnza", + "mts", + "optus", + "orange", + "qwest", + "reliance_jio", + "robi", + "sprint", + "telefonica", + "telstra", + "tmobile", + "tigo", + "tim", + "verizon", + "vimpelcom", + "vodacomza", + "vodafone", + "vivo", + "zain", + "vivabo", + "telenormyanmar", + "kcelljsc", + "swisscom", + "singtel", + "asiacell", + "windit", + "cellc", + "ooredoo", + "drei", + "umobile", + "cableone", + "proximus", + "tele2", + "mobitel", + "o2", + "bouygues", + "free", + "sfr", + "digicel", + ] + ] = None + """Mobile carrier.""" + + city: Optional[str] = None + """City name (no spaces, e.g. + + `sanfrancisco`). If provided, `country` must also be provided. + """ + + country: Optional[str] = None + """ISO 3166 country code""" + + state: Optional[str] = None + """Two-letter state code.""" + + zip: Optional[str] = None + """US ZIP code.""" + + +class ConfigCustomProxyConfig(BaseModel): + """Configuration for a custom proxy (e.g., private proxy server).""" + + host: str + """Proxy host address or IP.""" + + port: int + """Proxy port.""" + + has_password: Optional[bool] = None + """Whether the proxy has a password.""" + + username: Optional[str] = None + """Username for proxy authentication.""" + + +Config: TypeAlias = Union[ + ConfigDatacenterProxyConfig, + ConfigIspProxyConfig, + ConfigResidentialProxyConfig, + ConfigMobileProxyConfig, + ConfigCustomProxyConfig, +] + + +class ProxyCreateResponse(BaseModel): + """Configuration for routing traffic through a proxy.""" + + type: Literal["datacenter", "isp", "residential", "mobile", "custom"] + """Proxy type to use. + + In terms of quality for avoiding bot-detection, from best to worst: `mobile` > + `residential` > `isp` > `datacenter`. + """ + + id: Optional[str] = None + + config: Optional[Config] = None + """Configuration specific to the selected proxy `type`.""" + + last_checked: Optional[datetime] = None + """Timestamp of the last health check performed on this proxy.""" + + name: Optional[str] = None + """Readable name of the proxy.""" + + protocol: Optional[Literal["http", "https"]] = None + """Protocol to use for the proxy connection.""" + + status: Optional[Literal["available", "unavailable"]] = None + """Current health status of the proxy.""" diff --git a/src/kernel/types/proxy_list_response.py b/src/kernel/types/proxy_list_response.py new file mode 100644 index 0000000..08c846f --- /dev/null +++ b/src/kernel/types/proxy_list_response.py @@ -0,0 +1,199 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Union, Optional +from datetime import datetime +from typing_extensions import Literal, TypeAlias + +from .._models import BaseModel + +__all__ = [ + "ProxyListResponse", + "ProxyListResponseItem", + "ProxyListResponseItemConfig", + "ProxyListResponseItemConfigDatacenterProxyConfig", + "ProxyListResponseItemConfigIspProxyConfig", + "ProxyListResponseItemConfigResidentialProxyConfig", + "ProxyListResponseItemConfigMobileProxyConfig", + "ProxyListResponseItemConfigCustomProxyConfig", +] + + +class ProxyListResponseItemConfigDatacenterProxyConfig(BaseModel): + """Configuration for a datacenter proxy.""" + + country: Optional[str] = None + """ISO 3166 country code. Defaults to US if not provided.""" + + +class ProxyListResponseItemConfigIspProxyConfig(BaseModel): + """Configuration for an ISP proxy.""" + + country: Optional[str] = None + """ISO 3166 country code. Defaults to US if not provided.""" + + +class ProxyListResponseItemConfigResidentialProxyConfig(BaseModel): + """Configuration for residential proxies.""" + + asn: Optional[str] = None + """Autonomous system number. See https://bgp.potaroo.net/cidr/autnums.html""" + + city: Optional[str] = None + """City name (no spaces, e.g. + + `sanfrancisco`). If provided, `country` must also be provided. + """ + + country: Optional[str] = None + """ISO 3166 country code.""" + + os: Optional[Literal["windows", "macos", "android"]] = None + """Operating system of the residential device.""" + + state: Optional[str] = None + """Two-letter state code.""" + + zip: Optional[str] = None + """US ZIP code.""" + + +class ProxyListResponseItemConfigMobileProxyConfig(BaseModel): + """Configuration for mobile proxies.""" + + asn: Optional[str] = None + """Autonomous system number. See https://bgp.potaroo.net/cidr/autnums.html""" + + carrier: Optional[ + Literal[ + "a1", + "aircel", + "airtel", + "att", + "celcom", + "chinamobile", + "claro", + "comcast", + "cox", + "digi", + "dt", + "docomo", + "dtac", + "etisalat", + "idea", + "kyivstar", + "meo", + "megafon", + "mtn", + "mtnza", + "mts", + "optus", + "orange", + "qwest", + "reliance_jio", + "robi", + "sprint", + "telefonica", + "telstra", + "tmobile", + "tigo", + "tim", + "verizon", + "vimpelcom", + "vodacomza", + "vodafone", + "vivo", + "zain", + "vivabo", + "telenormyanmar", + "kcelljsc", + "swisscom", + "singtel", + "asiacell", + "windit", + "cellc", + "ooredoo", + "drei", + "umobile", + "cableone", + "proximus", + "tele2", + "mobitel", + "o2", + "bouygues", + "free", + "sfr", + "digicel", + ] + ] = None + """Mobile carrier.""" + + city: Optional[str] = None + """City name (no spaces, e.g. + + `sanfrancisco`). If provided, `country` must also be provided. + """ + + country: Optional[str] = None + """ISO 3166 country code""" + + state: Optional[str] = None + """Two-letter state code.""" + + zip: Optional[str] = None + """US ZIP code.""" + + +class ProxyListResponseItemConfigCustomProxyConfig(BaseModel): + """Configuration for a custom proxy (e.g., private proxy server).""" + + host: str + """Proxy host address or IP.""" + + port: int + """Proxy port.""" + + has_password: Optional[bool] = None + """Whether the proxy has a password.""" + + username: Optional[str] = None + """Username for proxy authentication.""" + + +ProxyListResponseItemConfig: TypeAlias = Union[ + ProxyListResponseItemConfigDatacenterProxyConfig, + ProxyListResponseItemConfigIspProxyConfig, + ProxyListResponseItemConfigResidentialProxyConfig, + ProxyListResponseItemConfigMobileProxyConfig, + ProxyListResponseItemConfigCustomProxyConfig, +] + + +class ProxyListResponseItem(BaseModel): + """Configuration for routing traffic through a proxy.""" + + type: Literal["datacenter", "isp", "residential", "mobile", "custom"] + """Proxy type to use. + + In terms of quality for avoiding bot-detection, from best to worst: `mobile` > + `residential` > `isp` > `datacenter`. + """ + + id: Optional[str] = None + + config: Optional[ProxyListResponseItemConfig] = None + """Configuration specific to the selected proxy `type`.""" + + last_checked: Optional[datetime] = None + """Timestamp of the last health check performed on this proxy.""" + + name: Optional[str] = None + """Readable name of the proxy.""" + + protocol: Optional[Literal["http", "https"]] = None + """Protocol to use for the proxy connection.""" + + status: Optional[Literal["available", "unavailable"]] = None + """Current health status of the proxy.""" + + +ProxyListResponse: TypeAlias = List[ProxyListResponseItem] diff --git a/src/kernel/types/proxy_retrieve_response.py b/src/kernel/types/proxy_retrieve_response.py new file mode 100644 index 0000000..24c7b96 --- /dev/null +++ b/src/kernel/types/proxy_retrieve_response.py @@ -0,0 +1,195 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Union, Optional +from datetime import datetime +from typing_extensions import Literal, TypeAlias + +from .._models import BaseModel + +__all__ = [ + "ProxyRetrieveResponse", + "Config", + "ConfigDatacenterProxyConfig", + "ConfigIspProxyConfig", + "ConfigResidentialProxyConfig", + "ConfigMobileProxyConfig", + "ConfigCustomProxyConfig", +] + + +class ConfigDatacenterProxyConfig(BaseModel): + """Configuration for a datacenter proxy.""" + + country: Optional[str] = None + """ISO 3166 country code. Defaults to US if not provided.""" + + +class ConfigIspProxyConfig(BaseModel): + """Configuration for an ISP proxy.""" + + country: Optional[str] = None + """ISO 3166 country code. Defaults to US if not provided.""" + + +class ConfigResidentialProxyConfig(BaseModel): + """Configuration for residential proxies.""" + + asn: Optional[str] = None + """Autonomous system number. See https://bgp.potaroo.net/cidr/autnums.html""" + + city: Optional[str] = None + """City name (no spaces, e.g. + + `sanfrancisco`). If provided, `country` must also be provided. + """ + + country: Optional[str] = None + """ISO 3166 country code.""" + + os: Optional[Literal["windows", "macos", "android"]] = None + """Operating system of the residential device.""" + + state: Optional[str] = None + """Two-letter state code.""" + + zip: Optional[str] = None + """US ZIP code.""" + + +class ConfigMobileProxyConfig(BaseModel): + """Configuration for mobile proxies.""" + + asn: Optional[str] = None + """Autonomous system number. See https://bgp.potaroo.net/cidr/autnums.html""" + + carrier: Optional[ + Literal[ + "a1", + "aircel", + "airtel", + "att", + "celcom", + "chinamobile", + "claro", + "comcast", + "cox", + "digi", + "dt", + "docomo", + "dtac", + "etisalat", + "idea", + "kyivstar", + "meo", + "megafon", + "mtn", + "mtnza", + "mts", + "optus", + "orange", + "qwest", + "reliance_jio", + "robi", + "sprint", + "telefonica", + "telstra", + "tmobile", + "tigo", + "tim", + "verizon", + "vimpelcom", + "vodacomza", + "vodafone", + "vivo", + "zain", + "vivabo", + "telenormyanmar", + "kcelljsc", + "swisscom", + "singtel", + "asiacell", + "windit", + "cellc", + "ooredoo", + "drei", + "umobile", + "cableone", + "proximus", + "tele2", + "mobitel", + "o2", + "bouygues", + "free", + "sfr", + "digicel", + ] + ] = None + """Mobile carrier.""" + + city: Optional[str] = None + """City name (no spaces, e.g. + + `sanfrancisco`). If provided, `country` must also be provided. + """ + + country: Optional[str] = None + """ISO 3166 country code""" + + state: Optional[str] = None + """Two-letter state code.""" + + zip: Optional[str] = None + """US ZIP code.""" + + +class ConfigCustomProxyConfig(BaseModel): + """Configuration for a custom proxy (e.g., private proxy server).""" + + host: str + """Proxy host address or IP.""" + + port: int + """Proxy port.""" + + has_password: Optional[bool] = None + """Whether the proxy has a password.""" + + username: Optional[str] = None + """Username for proxy authentication.""" + + +Config: TypeAlias = Union[ + ConfigDatacenterProxyConfig, + ConfigIspProxyConfig, + ConfigResidentialProxyConfig, + ConfigMobileProxyConfig, + ConfigCustomProxyConfig, +] + + +class ProxyRetrieveResponse(BaseModel): + """Configuration for routing traffic through a proxy.""" + + type: Literal["datacenter", "isp", "residential", "mobile", "custom"] + """Proxy type to use. + + In terms of quality for avoiding bot-detection, from best to worst: `mobile` > + `residential` > `isp` > `datacenter`. + """ + + id: Optional[str] = None + + config: Optional[Config] = None + """Configuration specific to the selected proxy `type`.""" + + last_checked: Optional[datetime] = None + """Timestamp of the last health check performed on this proxy.""" + + name: Optional[str] = None + """Readable name of the proxy.""" + + protocol: Optional[Literal["http", "https"]] = None + """Protocol to use for the proxy connection.""" + + status: Optional[Literal["available", "unavailable"]] = None + """Current health status of the proxy.""" diff --git a/src/kernel/types/shared/__init__.py b/src/kernel/types/shared/__init__.py new file mode 100644 index 0000000..6b64919 --- /dev/null +++ b/src/kernel/types/shared/__init__.py @@ -0,0 +1,11 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .log_event import LogEvent as LogEvent +from .app_action import AppAction as AppAction +from .error_event import ErrorEvent as ErrorEvent +from .error_model import ErrorModel as ErrorModel +from .error_detail import ErrorDetail as ErrorDetail +from .browser_profile import BrowserProfile as BrowserProfile +from .heartbeat_event import HeartbeatEvent as HeartbeatEvent +from .browser_viewport import BrowserViewport as BrowserViewport +from .browser_extension import BrowserExtension as BrowserExtension diff --git a/src/kernel/types/shared/app_action.py b/src/kernel/types/shared/app_action.py new file mode 100644 index 0000000..1babce1 --- /dev/null +++ b/src/kernel/types/shared/app_action.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from ..._models import BaseModel + +__all__ = ["AppAction"] + + +class AppAction(BaseModel): + """An action available on the app""" + + name: str + """Name of the action""" diff --git a/src/kernel/types/shared/browser_extension.py b/src/kernel/types/shared/browser_extension.py new file mode 100644 index 0000000..a91d2dc --- /dev/null +++ b/src/kernel/types/shared/browser_extension.py @@ -0,0 +1,23 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional + +from ..._models import BaseModel + +__all__ = ["BrowserExtension"] + + +class BrowserExtension(BaseModel): + """Extension selection for the browser session. + + Provide either id or name of an extension uploaded to Kernel. + """ + + id: Optional[str] = None + """Extension ID to load for this browser session""" + + name: Optional[str] = None + """Extension name to load for this browser session (instead of id). + + Must be 1-255 characters, using letters, numbers, dots, underscores, or hyphens. + """ diff --git a/src/kernel/types/shared/browser_profile.py b/src/kernel/types/shared/browser_profile.py new file mode 100644 index 0000000..4aadc31 --- /dev/null +++ b/src/kernel/types/shared/browser_profile.py @@ -0,0 +1,30 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional + +from ..._models import BaseModel + +__all__ = ["BrowserProfile"] + + +class BrowserProfile(BaseModel): + """Profile selection for the browser session. + + Provide either id or name. If specified, the + matching profile will be loaded into the browser session. Profiles must be created beforehand. + """ + + id: Optional[str] = None + """Profile ID to load for this browser session""" + + name: Optional[str] = None + """Profile name to load for this browser session (instead of id). + + Must be 1-255 characters, using letters, numbers, dots, underscores, or hyphens. + """ + + save_changes: Optional[bool] = None + """ + If true, save changes made during the session back to the profile when the + session ends. + """ diff --git a/src/kernel/types/shared/browser_viewport.py b/src/kernel/types/shared/browser_viewport.py new file mode 100644 index 0000000..ab8f427 --- /dev/null +++ b/src/kernel/types/shared/browser_viewport.py @@ -0,0 +1,30 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional + +from ..._models import BaseModel + +__all__ = ["BrowserViewport"] + + +class BrowserViewport(BaseModel): + """Initial browser window size in pixels with optional refresh rate. + + If omitted, image defaults apply (1920x1080@25). + Only specific viewport configurations are supported. The server will reject unsupported combinations. + Supported resolutions are: 2560x1440@10, 1920x1080@25, 1920x1200@25, 1440x900@25, 1024x768@60, 1200x800@60 + If refresh_rate is not provided, it will be automatically determined from the width and height if they match a supported configuration exactly. + Note: Higher resolutions may affect the responsiveness of live view browser + """ + + height: int + """Browser window height in pixels.""" + + width: int + """Browser window width in pixels.""" + + refresh_rate: Optional[int] = None + """Display refresh rate in Hz. + + If omitted, automatically determined from width and height. + """ diff --git a/src/kernel/types/shared/error_detail.py b/src/kernel/types/shared/error_detail.py new file mode 100644 index 0000000..24e655f --- /dev/null +++ b/src/kernel/types/shared/error_detail.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional + +from ..._models import BaseModel + +__all__ = ["ErrorDetail"] + + +class ErrorDetail(BaseModel): + code: Optional[str] = None + """Lower-level error code providing more specific detail""" + + message: Optional[str] = None + """Further detail about the error""" diff --git a/src/kernel/types/shared/error_event.py b/src/kernel/types/shared/error_event.py new file mode 100644 index 0000000..35175f5 --- /dev/null +++ b/src/kernel/types/shared/error_event.py @@ -0,0 +1,21 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from datetime import datetime +from typing_extensions import Literal + +from ..._models import BaseModel +from .error_model import ErrorModel + +__all__ = ["ErrorEvent"] + + +class ErrorEvent(BaseModel): + """An error event from the application.""" + + error: ErrorModel + + event: Literal["error"] + """Event type identifier (always "error").""" + + timestamp: datetime + """Time the error occurred.""" diff --git a/src/kernel/types/shared/error_model.py b/src/kernel/types/shared/error_model.py new file mode 100644 index 0000000..6cb4811 --- /dev/null +++ b/src/kernel/types/shared/error_model.py @@ -0,0 +1,21 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional + +from ..._models import BaseModel +from .error_detail import ErrorDetail + +__all__ = ["ErrorModel"] + + +class ErrorModel(BaseModel): + code: str + """Application-specific error code (machine-readable)""" + + message: str + """Human-readable error description for debugging""" + + details: Optional[List[ErrorDetail]] = None + """Additional error details (for multiple errors)""" + + inner_error: Optional[ErrorDetail] = None diff --git a/src/kernel/types/shared/heartbeat_event.py b/src/kernel/types/shared/heartbeat_event.py new file mode 100644 index 0000000..3745e9b --- /dev/null +++ b/src/kernel/types/shared/heartbeat_event.py @@ -0,0 +1,18 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from datetime import datetime +from typing_extensions import Literal + +from ..._models import BaseModel + +__all__ = ["HeartbeatEvent"] + + +class HeartbeatEvent(BaseModel): + """Heartbeat event sent periodically to keep SSE connection alive.""" + + event: Literal["sse_heartbeat"] + """Event type identifier (always "sse_heartbeat").""" + + timestamp: datetime + """Time the heartbeat was sent.""" diff --git a/src/kernel/types/shared/log_event.py b/src/kernel/types/shared/log_event.py new file mode 100644 index 0000000..078b6ec --- /dev/null +++ b/src/kernel/types/shared/log_event.py @@ -0,0 +1,21 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from datetime import datetime +from typing_extensions import Literal + +from ..._models import BaseModel + +__all__ = ["LogEvent"] + + +class LogEvent(BaseModel): + """A log entry from the application.""" + + event: Literal["log"] + """Event type identifier (always "log").""" + + message: str + """Log message text.""" + + timestamp: datetime + """Time the log entry was produced.""" diff --git a/src/kernel/types/shared_params/__init__.py b/src/kernel/types/shared_params/__init__.py new file mode 100644 index 0000000..de63c64 --- /dev/null +++ b/src/kernel/types/shared_params/__init__.py @@ -0,0 +1,5 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .browser_profile import BrowserProfile as BrowserProfile +from .browser_viewport import BrowserViewport as BrowserViewport +from .browser_extension import BrowserExtension as BrowserExtension diff --git a/src/kernel/types/shared_params/browser_extension.py b/src/kernel/types/shared_params/browser_extension.py new file mode 100644 index 0000000..e6c2b8f --- /dev/null +++ b/src/kernel/types/shared_params/browser_extension.py @@ -0,0 +1,23 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import TypedDict + +__all__ = ["BrowserExtension"] + + +class BrowserExtension(TypedDict, total=False): + """Extension selection for the browser session. + + Provide either id or name of an extension uploaded to Kernel. + """ + + id: str + """Extension ID to load for this browser session""" + + name: str + """Extension name to load for this browser session (instead of id). + + Must be 1-255 characters, using letters, numbers, dots, underscores, or hyphens. + """ diff --git a/src/kernel/types/shared_params/browser_profile.py b/src/kernel/types/shared_params/browser_profile.py new file mode 100644 index 0000000..51187db --- /dev/null +++ b/src/kernel/types/shared_params/browser_profile.py @@ -0,0 +1,30 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import TypedDict + +__all__ = ["BrowserProfile"] + + +class BrowserProfile(TypedDict, total=False): + """Profile selection for the browser session. + + Provide either id or name. If specified, the + matching profile will be loaded into the browser session. Profiles must be created beforehand. + """ + + id: str + """Profile ID to load for this browser session""" + + name: str + """Profile name to load for this browser session (instead of id). + + Must be 1-255 characters, using letters, numbers, dots, underscores, or hyphens. + """ + + save_changes: bool + """ + If true, save changes made during the session back to the profile when the + session ends. + """ diff --git a/src/kernel/types/shared_params/browser_viewport.py b/src/kernel/types/shared_params/browser_viewport.py new file mode 100644 index 0000000..9236547 --- /dev/null +++ b/src/kernel/types/shared_params/browser_viewport.py @@ -0,0 +1,30 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +__all__ = ["BrowserViewport"] + + +class BrowserViewport(TypedDict, total=False): + """Initial browser window size in pixels with optional refresh rate. + + If omitted, image defaults apply (1920x1080@25). + Only specific viewport configurations are supported. The server will reject unsupported combinations. + Supported resolutions are: 2560x1440@10, 1920x1080@25, 1920x1200@25, 1440x900@25, 1024x768@60, 1200x800@60 + If refresh_rate is not provided, it will be automatically determined from the width and height if they match a supported configuration exactly. + Note: Higher resolutions may affect the responsiveness of live view browser + """ + + height: Required[int] + """Browser window height in pixels.""" + + width: Required[int] + """Browser window width in pixels.""" + + refresh_rate: int + """Display refresh rate in Hz. + + If omitted, automatically determined from width and height. + """ diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..fd8019a --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. diff --git a/tests/api_resources/__init__.py b/tests/api_resources/__init__.py new file mode 100644 index 0000000..fd8019a --- /dev/null +++ b/tests/api_resources/__init__.py @@ -0,0 +1 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. diff --git a/tests/api_resources/agents/__init__.py b/tests/api_resources/agents/__init__.py new file mode 100644 index 0000000..fd8019a --- /dev/null +++ b/tests/api_resources/agents/__init__.py @@ -0,0 +1 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. diff --git a/tests/api_resources/agents/auth/__init__.py b/tests/api_resources/agents/auth/__init__.py new file mode 100644 index 0000000..fd8019a --- /dev/null +++ b/tests/api_resources/agents/auth/__init__.py @@ -0,0 +1 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. diff --git a/tests/api_resources/agents/auth/test_invocations.py b/tests/api_resources/agents/auth/test_invocations.py new file mode 100644 index 0000000..1bae66d --- /dev/null +++ b/tests/api_resources/agents/auth/test_invocations.py @@ -0,0 +1,497 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from kernel import Kernel, AsyncKernel +from tests.utils import assert_matches_type +from kernel.types.agents import AgentAuthSubmitResponse, AgentAuthInvocationResponse, AuthAgentInvocationCreateResponse +from kernel.types.agents.auth import ( + InvocationExchangeResponse, +) + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestInvocations: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_create(self, client: Kernel) -> None: + invocation = client.agents.auth.invocations.create( + auth_agent_id="abc123xyz", + ) + assert_matches_type(AuthAgentInvocationCreateResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_create_with_all_params(self, client: Kernel) -> None: + invocation = client.agents.auth.invocations.create( + auth_agent_id="abc123xyz", + save_credential_as="my-netflix-login", + ) + assert_matches_type(AuthAgentInvocationCreateResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_create(self, client: Kernel) -> None: + response = client.agents.auth.invocations.with_raw_response.create( + auth_agent_id="abc123xyz", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + invocation = response.parse() + assert_matches_type(AuthAgentInvocationCreateResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_create(self, client: Kernel) -> None: + with client.agents.auth.invocations.with_streaming_response.create( + auth_agent_id="abc123xyz", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + invocation = response.parse() + assert_matches_type(AuthAgentInvocationCreateResponse, invocation, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_retrieve(self, client: Kernel) -> None: + invocation = client.agents.auth.invocations.retrieve( + "invocation_id", + ) + assert_matches_type(AgentAuthInvocationResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_retrieve(self, client: Kernel) -> None: + response = client.agents.auth.invocations.with_raw_response.retrieve( + "invocation_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + invocation = response.parse() + assert_matches_type(AgentAuthInvocationResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_retrieve(self, client: Kernel) -> None: + with client.agents.auth.invocations.with_streaming_response.retrieve( + "invocation_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + invocation = response.parse() + assert_matches_type(AgentAuthInvocationResponse, invocation, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_retrieve(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `invocation_id` but received ''"): + client.agents.auth.invocations.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_exchange(self, client: Kernel) -> None: + invocation = client.agents.auth.invocations.exchange( + invocation_id="invocation_id", + code="abc123xyz", + ) + assert_matches_type(InvocationExchangeResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_exchange(self, client: Kernel) -> None: + response = client.agents.auth.invocations.with_raw_response.exchange( + invocation_id="invocation_id", + code="abc123xyz", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + invocation = response.parse() + assert_matches_type(InvocationExchangeResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_exchange(self, client: Kernel) -> None: + with client.agents.auth.invocations.with_streaming_response.exchange( + invocation_id="invocation_id", + code="abc123xyz", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + invocation = response.parse() + assert_matches_type(InvocationExchangeResponse, invocation, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_exchange(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `invocation_id` but received ''"): + client.agents.auth.invocations.with_raw_response.exchange( + invocation_id="", + code="abc123xyz", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_submit_overload_1(self, client: Kernel) -> None: + invocation = client.agents.auth.invocations.submit( + invocation_id="invocation_id", + field_values={ + "email": "user@example.com", + "password": "********", + }, + ) + assert_matches_type(AgentAuthSubmitResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_submit_overload_1(self, client: Kernel) -> None: + response = client.agents.auth.invocations.with_raw_response.submit( + invocation_id="invocation_id", + field_values={ + "email": "user@example.com", + "password": "********", + }, + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + invocation = response.parse() + assert_matches_type(AgentAuthSubmitResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_submit_overload_1(self, client: Kernel) -> None: + with client.agents.auth.invocations.with_streaming_response.submit( + invocation_id="invocation_id", + field_values={ + "email": "user@example.com", + "password": "********", + }, + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + invocation = response.parse() + assert_matches_type(AgentAuthSubmitResponse, invocation, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_submit_overload_1(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `invocation_id` but received ''"): + client.agents.auth.invocations.with_raw_response.submit( + invocation_id="", + field_values={ + "email": "user@example.com", + "password": "********", + }, + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_submit_overload_2(self, client: Kernel) -> None: + invocation = client.agents.auth.invocations.submit( + invocation_id="invocation_id", + sso_button="xpath=//button[contains(text(), 'Continue with Google')]", + ) + assert_matches_type(AgentAuthSubmitResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_submit_overload_2(self, client: Kernel) -> None: + response = client.agents.auth.invocations.with_raw_response.submit( + invocation_id="invocation_id", + sso_button="xpath=//button[contains(text(), 'Continue with Google')]", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + invocation = response.parse() + assert_matches_type(AgentAuthSubmitResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_submit_overload_2(self, client: Kernel) -> None: + with client.agents.auth.invocations.with_streaming_response.submit( + invocation_id="invocation_id", + sso_button="xpath=//button[contains(text(), 'Continue with Google')]", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + invocation = response.parse() + assert_matches_type(AgentAuthSubmitResponse, invocation, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_submit_overload_2(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `invocation_id` but received ''"): + client.agents.auth.invocations.with_raw_response.submit( + invocation_id="", + sso_button="xpath=//button[contains(text(), 'Continue with Google')]", + ) + + +class TestAsyncInvocations: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_create(self, async_client: AsyncKernel) -> None: + invocation = await async_client.agents.auth.invocations.create( + auth_agent_id="abc123xyz", + ) + assert_matches_type(AuthAgentInvocationCreateResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_create_with_all_params(self, async_client: AsyncKernel) -> None: + invocation = await async_client.agents.auth.invocations.create( + auth_agent_id="abc123xyz", + save_credential_as="my-netflix-login", + ) + assert_matches_type(AuthAgentInvocationCreateResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_create(self, async_client: AsyncKernel) -> None: + response = await async_client.agents.auth.invocations.with_raw_response.create( + auth_agent_id="abc123xyz", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + invocation = await response.parse() + assert_matches_type(AuthAgentInvocationCreateResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_create(self, async_client: AsyncKernel) -> None: + async with async_client.agents.auth.invocations.with_streaming_response.create( + auth_agent_id="abc123xyz", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + invocation = await response.parse() + assert_matches_type(AuthAgentInvocationCreateResponse, invocation, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_retrieve(self, async_client: AsyncKernel) -> None: + invocation = await async_client.agents.auth.invocations.retrieve( + "invocation_id", + ) + assert_matches_type(AgentAuthInvocationResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_retrieve(self, async_client: AsyncKernel) -> None: + response = await async_client.agents.auth.invocations.with_raw_response.retrieve( + "invocation_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + invocation = await response.parse() + assert_matches_type(AgentAuthInvocationResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_retrieve(self, async_client: AsyncKernel) -> None: + async with async_client.agents.auth.invocations.with_streaming_response.retrieve( + "invocation_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + invocation = await response.parse() + assert_matches_type(AgentAuthInvocationResponse, invocation, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_retrieve(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `invocation_id` but received ''"): + await async_client.agents.auth.invocations.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_exchange(self, async_client: AsyncKernel) -> None: + invocation = await async_client.agents.auth.invocations.exchange( + invocation_id="invocation_id", + code="abc123xyz", + ) + assert_matches_type(InvocationExchangeResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_exchange(self, async_client: AsyncKernel) -> None: + response = await async_client.agents.auth.invocations.with_raw_response.exchange( + invocation_id="invocation_id", + code="abc123xyz", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + invocation = await response.parse() + assert_matches_type(InvocationExchangeResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_exchange(self, async_client: AsyncKernel) -> None: + async with async_client.agents.auth.invocations.with_streaming_response.exchange( + invocation_id="invocation_id", + code="abc123xyz", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + invocation = await response.parse() + assert_matches_type(InvocationExchangeResponse, invocation, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_exchange(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `invocation_id` but received ''"): + await async_client.agents.auth.invocations.with_raw_response.exchange( + invocation_id="", + code="abc123xyz", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_submit_overload_1(self, async_client: AsyncKernel) -> None: + invocation = await async_client.agents.auth.invocations.submit( + invocation_id="invocation_id", + field_values={ + "email": "user@example.com", + "password": "********", + }, + ) + assert_matches_type(AgentAuthSubmitResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_submit_overload_1(self, async_client: AsyncKernel) -> None: + response = await async_client.agents.auth.invocations.with_raw_response.submit( + invocation_id="invocation_id", + field_values={ + "email": "user@example.com", + "password": "********", + }, + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + invocation = await response.parse() + assert_matches_type(AgentAuthSubmitResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_submit_overload_1(self, async_client: AsyncKernel) -> None: + async with async_client.agents.auth.invocations.with_streaming_response.submit( + invocation_id="invocation_id", + field_values={ + "email": "user@example.com", + "password": "********", + }, + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + invocation = await response.parse() + assert_matches_type(AgentAuthSubmitResponse, invocation, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_submit_overload_1(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `invocation_id` but received ''"): + await async_client.agents.auth.invocations.with_raw_response.submit( + invocation_id="", + field_values={ + "email": "user@example.com", + "password": "********", + }, + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_submit_overload_2(self, async_client: AsyncKernel) -> None: + invocation = await async_client.agents.auth.invocations.submit( + invocation_id="invocation_id", + sso_button="xpath=//button[contains(text(), 'Continue with Google')]", + ) + assert_matches_type(AgentAuthSubmitResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_submit_overload_2(self, async_client: AsyncKernel) -> None: + response = await async_client.agents.auth.invocations.with_raw_response.submit( + invocation_id="invocation_id", + sso_button="xpath=//button[contains(text(), 'Continue with Google')]", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + invocation = await response.parse() + assert_matches_type(AgentAuthSubmitResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_submit_overload_2(self, async_client: AsyncKernel) -> None: + async with async_client.agents.auth.invocations.with_streaming_response.submit( + invocation_id="invocation_id", + sso_button="xpath=//button[contains(text(), 'Continue with Google')]", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + invocation = await response.parse() + assert_matches_type(AgentAuthSubmitResponse, invocation, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_submit_overload_2(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `invocation_id` but received ''"): + await async_client.agents.auth.invocations.with_raw_response.submit( + invocation_id="", + sso_button="xpath=//button[contains(text(), 'Continue with Google')]", + ) diff --git a/tests/api_resources/agents/test_auth.py b/tests/api_resources/agents/test_auth.py new file mode 100644 index 0000000..9855ef8 --- /dev/null +++ b/tests/api_resources/agents/test_auth.py @@ -0,0 +1,371 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from kernel import Kernel, AsyncKernel +from tests.utils import assert_matches_type +from kernel.pagination import SyncOffsetPagination, AsyncOffsetPagination +from kernel.types.agents import AuthAgent + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestAuth: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_create(self, client: Kernel) -> None: + auth = client.agents.auth.create( + domain="netflix.com", + profile_name="user-123", + ) + assert_matches_type(AuthAgent, auth, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_create_with_all_params(self, client: Kernel) -> None: + auth = client.agents.auth.create( + domain="netflix.com", + profile_name="user-123", + allowed_domains=["login.netflix.com", "auth.netflix.com"], + credential_name="my-netflix-login", + login_url="https://netflix.com/login", + proxy={"proxy_id": "proxy_id"}, + ) + assert_matches_type(AuthAgent, auth, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_create(self, client: Kernel) -> None: + response = client.agents.auth.with_raw_response.create( + domain="netflix.com", + profile_name="user-123", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + auth = response.parse() + assert_matches_type(AuthAgent, auth, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_create(self, client: Kernel) -> None: + with client.agents.auth.with_streaming_response.create( + domain="netflix.com", + profile_name="user-123", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + auth = response.parse() + assert_matches_type(AuthAgent, auth, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_retrieve(self, client: Kernel) -> None: + auth = client.agents.auth.retrieve( + "id", + ) + assert_matches_type(AuthAgent, auth, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_retrieve(self, client: Kernel) -> None: + response = client.agents.auth.with_raw_response.retrieve( + "id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + auth = response.parse() + assert_matches_type(AuthAgent, auth, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_retrieve(self, client: Kernel) -> None: + with client.agents.auth.with_streaming_response.retrieve( + "id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + auth = response.parse() + assert_matches_type(AuthAgent, auth, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_retrieve(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.agents.auth.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_list(self, client: Kernel) -> None: + auth = client.agents.auth.list() + assert_matches_type(SyncOffsetPagination[AuthAgent], auth, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_list_with_all_params(self, client: Kernel) -> None: + auth = client.agents.auth.list( + domain="domain", + limit=100, + offset=0, + profile_name="profile_name", + ) + assert_matches_type(SyncOffsetPagination[AuthAgent], auth, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_list(self, client: Kernel) -> None: + response = client.agents.auth.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + auth = response.parse() + assert_matches_type(SyncOffsetPagination[AuthAgent], auth, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_list(self, client: Kernel) -> None: + with client.agents.auth.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + auth = response.parse() + assert_matches_type(SyncOffsetPagination[AuthAgent], auth, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_delete(self, client: Kernel) -> None: + auth = client.agents.auth.delete( + "id", + ) + assert auth is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_delete(self, client: Kernel) -> None: + response = client.agents.auth.with_raw_response.delete( + "id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + auth = response.parse() + assert auth is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_delete(self, client: Kernel) -> None: + with client.agents.auth.with_streaming_response.delete( + "id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + auth = response.parse() + assert auth is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_delete(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.agents.auth.with_raw_response.delete( + "", + ) + + +class TestAsyncAuth: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_create(self, async_client: AsyncKernel) -> None: + auth = await async_client.agents.auth.create( + domain="netflix.com", + profile_name="user-123", + ) + assert_matches_type(AuthAgent, auth, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_create_with_all_params(self, async_client: AsyncKernel) -> None: + auth = await async_client.agents.auth.create( + domain="netflix.com", + profile_name="user-123", + allowed_domains=["login.netflix.com", "auth.netflix.com"], + credential_name="my-netflix-login", + login_url="https://netflix.com/login", + proxy={"proxy_id": "proxy_id"}, + ) + assert_matches_type(AuthAgent, auth, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_create(self, async_client: AsyncKernel) -> None: + response = await async_client.agents.auth.with_raw_response.create( + domain="netflix.com", + profile_name="user-123", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + auth = await response.parse() + assert_matches_type(AuthAgent, auth, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_create(self, async_client: AsyncKernel) -> None: + async with async_client.agents.auth.with_streaming_response.create( + domain="netflix.com", + profile_name="user-123", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + auth = await response.parse() + assert_matches_type(AuthAgent, auth, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_retrieve(self, async_client: AsyncKernel) -> None: + auth = await async_client.agents.auth.retrieve( + "id", + ) + assert_matches_type(AuthAgent, auth, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_retrieve(self, async_client: AsyncKernel) -> None: + response = await async_client.agents.auth.with_raw_response.retrieve( + "id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + auth = await response.parse() + assert_matches_type(AuthAgent, auth, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_retrieve(self, async_client: AsyncKernel) -> None: + async with async_client.agents.auth.with_streaming_response.retrieve( + "id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + auth = await response.parse() + assert_matches_type(AuthAgent, auth, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_retrieve(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.agents.auth.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_list(self, async_client: AsyncKernel) -> None: + auth = await async_client.agents.auth.list() + assert_matches_type(AsyncOffsetPagination[AuthAgent], auth, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_list_with_all_params(self, async_client: AsyncKernel) -> None: + auth = await async_client.agents.auth.list( + domain="domain", + limit=100, + offset=0, + profile_name="profile_name", + ) + assert_matches_type(AsyncOffsetPagination[AuthAgent], auth, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_list(self, async_client: AsyncKernel) -> None: + response = await async_client.agents.auth.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + auth = await response.parse() + assert_matches_type(AsyncOffsetPagination[AuthAgent], auth, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_list(self, async_client: AsyncKernel) -> None: + async with async_client.agents.auth.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + auth = await response.parse() + assert_matches_type(AsyncOffsetPagination[AuthAgent], auth, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_delete(self, async_client: AsyncKernel) -> None: + auth = await async_client.agents.auth.delete( + "id", + ) + assert auth is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_delete(self, async_client: AsyncKernel) -> None: + response = await async_client.agents.auth.with_raw_response.delete( + "id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + auth = await response.parse() + assert auth is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_delete(self, async_client: AsyncKernel) -> None: + async with async_client.agents.auth.with_streaming_response.delete( + "id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + auth = await response.parse() + assert auth is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_delete(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.agents.auth.with_raw_response.delete( + "", + ) diff --git a/tests/api_resources/browsers/__init__.py b/tests/api_resources/browsers/__init__.py new file mode 100644 index 0000000..fd8019a --- /dev/null +++ b/tests/api_resources/browsers/__init__.py @@ -0,0 +1 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. diff --git a/tests/api_resources/browsers/fs/__init__.py b/tests/api_resources/browsers/fs/__init__.py new file mode 100644 index 0000000..fd8019a --- /dev/null +++ b/tests/api_resources/browsers/fs/__init__.py @@ -0,0 +1 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. diff --git a/tests/api_resources/browsers/fs/test_watch.py b/tests/api_resources/browsers/fs/test_watch.py new file mode 100644 index 0000000..683e154 --- /dev/null +++ b/tests/api_resources/browsers/fs/test_watch.py @@ -0,0 +1,342 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from kernel import Kernel, AsyncKernel +from tests.utils import assert_matches_type +from kernel.types.browsers.fs import WatchStartResponse + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestWatch: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_method_events(self, client: Kernel) -> None: + watch_stream = client.browsers.fs.watch.events( + watch_id="watch_id", + id="id", + ) + watch_stream.response.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_raw_response_events(self, client: Kernel) -> None: + response = client.browsers.fs.watch.with_raw_response.events( + watch_id="watch_id", + id="id", + ) + + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + stream = response.parse() + stream.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_streaming_response_events(self, client: Kernel) -> None: + with client.browsers.fs.watch.with_streaming_response.events( + watch_id="watch_id", + id="id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + stream = response.parse() + stream.close() + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_path_params_events(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.fs.watch.with_raw_response.events( + watch_id="watch_id", + id="", + ) + + with pytest.raises(ValueError, match=r"Expected a non-empty value for `watch_id` but received ''"): + client.browsers.fs.watch.with_raw_response.events( + watch_id="", + id="id", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_start(self, client: Kernel) -> None: + watch = client.browsers.fs.watch.start( + id="id", + path="path", + ) + assert_matches_type(WatchStartResponse, watch, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_start_with_all_params(self, client: Kernel) -> None: + watch = client.browsers.fs.watch.start( + id="id", + path="path", + recursive=True, + ) + assert_matches_type(WatchStartResponse, watch, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_start(self, client: Kernel) -> None: + response = client.browsers.fs.watch.with_raw_response.start( + id="id", + path="path", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + watch = response.parse() + assert_matches_type(WatchStartResponse, watch, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_start(self, client: Kernel) -> None: + with client.browsers.fs.watch.with_streaming_response.start( + id="id", + path="path", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + watch = response.parse() + assert_matches_type(WatchStartResponse, watch, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_start(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.fs.watch.with_raw_response.start( + id="", + path="path", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_stop(self, client: Kernel) -> None: + watch = client.browsers.fs.watch.stop( + watch_id="watch_id", + id="id", + ) + assert watch is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_stop(self, client: Kernel) -> None: + response = client.browsers.fs.watch.with_raw_response.stop( + watch_id="watch_id", + id="id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + watch = response.parse() + assert watch is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_stop(self, client: Kernel) -> None: + with client.browsers.fs.watch.with_streaming_response.stop( + watch_id="watch_id", + id="id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + watch = response.parse() + assert watch is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_stop(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.fs.watch.with_raw_response.stop( + watch_id="watch_id", + id="", + ) + + with pytest.raises(ValueError, match=r"Expected a non-empty value for `watch_id` but received ''"): + client.browsers.fs.watch.with_raw_response.stop( + watch_id="", + id="id", + ) + + +class TestAsyncWatch: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_method_events(self, async_client: AsyncKernel) -> None: + watch_stream = await async_client.browsers.fs.watch.events( + watch_id="watch_id", + id="id", + ) + await watch_stream.response.aclose() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_raw_response_events(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.fs.watch.with_raw_response.events( + watch_id="watch_id", + id="id", + ) + + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + stream = await response.parse() + await stream.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_streaming_response_events(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.fs.watch.with_streaming_response.events( + watch_id="watch_id", + id="id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + stream = await response.parse() + await stream.close() + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_path_params_events(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.fs.watch.with_raw_response.events( + watch_id="watch_id", + id="", + ) + + with pytest.raises(ValueError, match=r"Expected a non-empty value for `watch_id` but received ''"): + await async_client.browsers.fs.watch.with_raw_response.events( + watch_id="", + id="id", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_start(self, async_client: AsyncKernel) -> None: + watch = await async_client.browsers.fs.watch.start( + id="id", + path="path", + ) + assert_matches_type(WatchStartResponse, watch, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_start_with_all_params(self, async_client: AsyncKernel) -> None: + watch = await async_client.browsers.fs.watch.start( + id="id", + path="path", + recursive=True, + ) + assert_matches_type(WatchStartResponse, watch, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_start(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.fs.watch.with_raw_response.start( + id="id", + path="path", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + watch = await response.parse() + assert_matches_type(WatchStartResponse, watch, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_start(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.fs.watch.with_streaming_response.start( + id="id", + path="path", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + watch = await response.parse() + assert_matches_type(WatchStartResponse, watch, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_start(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.fs.watch.with_raw_response.start( + id="", + path="path", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_stop(self, async_client: AsyncKernel) -> None: + watch = await async_client.browsers.fs.watch.stop( + watch_id="watch_id", + id="id", + ) + assert watch is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_stop(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.fs.watch.with_raw_response.stop( + watch_id="watch_id", + id="id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + watch = await response.parse() + assert watch is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_stop(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.fs.watch.with_streaming_response.stop( + watch_id="watch_id", + id="id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + watch = await response.parse() + assert watch is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_stop(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.fs.watch.with_raw_response.stop( + watch_id="watch_id", + id="", + ) + + with pytest.raises(ValueError, match=r"Expected a non-empty value for `watch_id` but received ''"): + await async_client.browsers.fs.watch.with_raw_response.stop( + watch_id="", + id="id", + ) diff --git a/tests/api_resources/browsers/test_computer.py b/tests/api_resources/browsers/test_computer.py new file mode 100644 index 0000000..7634b89 --- /dev/null +++ b/tests/api_resources/browsers/test_computer.py @@ -0,0 +1,988 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import httpx +import pytest +from respx import MockRouter + +from kernel import Kernel, AsyncKernel +from tests.utils import assert_matches_type +from kernel._response import ( + BinaryAPIResponse, + AsyncBinaryAPIResponse, + StreamedBinaryAPIResponse, + AsyncStreamedBinaryAPIResponse, +) +from kernel.types.browsers import ( + ComputerSetCursorVisibilityResponse, +) + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestComputer: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @parametrize + @pytest.mark.respx(base_url=base_url) + def test_method_capture_screenshot(self, client: Kernel, respx_mock: MockRouter) -> None: + respx_mock.post("/browsers/id/computer/screenshot").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + computer = client.browsers.computer.capture_screenshot( + id="id", + ) + assert computer.is_closed + assert computer.json() == {"foo": "bar"} + assert cast(Any, computer.is_closed) is True + assert isinstance(computer, BinaryAPIResponse) + + @parametrize + @pytest.mark.respx(base_url=base_url) + def test_method_capture_screenshot_with_all_params(self, client: Kernel, respx_mock: MockRouter) -> None: + respx_mock.post("/browsers/id/computer/screenshot").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + computer = client.browsers.computer.capture_screenshot( + id="id", + region={ + "height": 0, + "width": 0, + "x": 0, + "y": 0, + }, + ) + assert computer.is_closed + assert computer.json() == {"foo": "bar"} + assert cast(Any, computer.is_closed) is True + assert isinstance(computer, BinaryAPIResponse) + + @parametrize + @pytest.mark.respx(base_url=base_url) + def test_raw_response_capture_screenshot(self, client: Kernel, respx_mock: MockRouter) -> None: + respx_mock.post("/browsers/id/computer/screenshot").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + + computer = client.browsers.computer.with_raw_response.capture_screenshot( + id="id", + ) + + assert computer.is_closed is True + assert computer.http_request.headers.get("X-Stainless-Lang") == "python" + assert computer.json() == {"foo": "bar"} + assert isinstance(computer, BinaryAPIResponse) + + @parametrize + @pytest.mark.respx(base_url=base_url) + def test_streaming_response_capture_screenshot(self, client: Kernel, respx_mock: MockRouter) -> None: + respx_mock.post("/browsers/id/computer/screenshot").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + with client.browsers.computer.with_streaming_response.capture_screenshot( + id="id", + ) as computer: + assert not computer.is_closed + assert computer.http_request.headers.get("X-Stainless-Lang") == "python" + + assert computer.json() == {"foo": "bar"} + assert cast(Any, computer.is_closed) is True + assert isinstance(computer, StreamedBinaryAPIResponse) + + assert cast(Any, computer.is_closed) is True + + @parametrize + @pytest.mark.respx(base_url=base_url) + def test_path_params_capture_screenshot(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.computer.with_raw_response.capture_screenshot( + id="", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_click_mouse(self, client: Kernel) -> None: + computer = client.browsers.computer.click_mouse( + id="id", + x=0, + y=0, + ) + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_click_mouse_with_all_params(self, client: Kernel) -> None: + computer = client.browsers.computer.click_mouse( + id="id", + x=0, + y=0, + button="left", + click_type="down", + hold_keys=["string"], + num_clicks=0, + ) + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_click_mouse(self, client: Kernel) -> None: + response = client.browsers.computer.with_raw_response.click_mouse( + id="id", + x=0, + y=0, + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + computer = response.parse() + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_click_mouse(self, client: Kernel) -> None: + with client.browsers.computer.with_streaming_response.click_mouse( + id="id", + x=0, + y=0, + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + computer = response.parse() + assert computer is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_click_mouse(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.computer.with_raw_response.click_mouse( + id="", + x=0, + y=0, + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_drag_mouse(self, client: Kernel) -> None: + computer = client.browsers.computer.drag_mouse( + id="id", + path=[[0, 0], [0, 0]], + ) + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_drag_mouse_with_all_params(self, client: Kernel) -> None: + computer = client.browsers.computer.drag_mouse( + id="id", + path=[[0, 0], [0, 0]], + button="left", + delay=0, + hold_keys=["string"], + step_delay_ms=0, + steps_per_segment=1, + ) + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_drag_mouse(self, client: Kernel) -> None: + response = client.browsers.computer.with_raw_response.drag_mouse( + id="id", + path=[[0, 0], [0, 0]], + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + computer = response.parse() + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_drag_mouse(self, client: Kernel) -> None: + with client.browsers.computer.with_streaming_response.drag_mouse( + id="id", + path=[[0, 0], [0, 0]], + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + computer = response.parse() + assert computer is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_drag_mouse(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.computer.with_raw_response.drag_mouse( + id="", + path=[[0, 0], [0, 0]], + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_move_mouse(self, client: Kernel) -> None: + computer = client.browsers.computer.move_mouse( + id="id", + x=0, + y=0, + ) + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_move_mouse_with_all_params(self, client: Kernel) -> None: + computer = client.browsers.computer.move_mouse( + id="id", + x=0, + y=0, + hold_keys=["string"], + ) + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_move_mouse(self, client: Kernel) -> None: + response = client.browsers.computer.with_raw_response.move_mouse( + id="id", + x=0, + y=0, + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + computer = response.parse() + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_move_mouse(self, client: Kernel) -> None: + with client.browsers.computer.with_streaming_response.move_mouse( + id="id", + x=0, + y=0, + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + computer = response.parse() + assert computer is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_move_mouse(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.computer.with_raw_response.move_mouse( + id="", + x=0, + y=0, + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_press_key(self, client: Kernel) -> None: + computer = client.browsers.computer.press_key( + id="id", + keys=["string"], + ) + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_press_key_with_all_params(self, client: Kernel) -> None: + computer = client.browsers.computer.press_key( + id="id", + keys=["string"], + duration=0, + hold_keys=["string"], + ) + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_press_key(self, client: Kernel) -> None: + response = client.browsers.computer.with_raw_response.press_key( + id="id", + keys=["string"], + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + computer = response.parse() + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_press_key(self, client: Kernel) -> None: + with client.browsers.computer.with_streaming_response.press_key( + id="id", + keys=["string"], + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + computer = response.parse() + assert computer is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_press_key(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.computer.with_raw_response.press_key( + id="", + keys=["string"], + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_scroll(self, client: Kernel) -> None: + computer = client.browsers.computer.scroll( + id="id", + x=0, + y=0, + ) + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_scroll_with_all_params(self, client: Kernel) -> None: + computer = client.browsers.computer.scroll( + id="id", + x=0, + y=0, + delta_x=0, + delta_y=0, + hold_keys=["string"], + ) + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_scroll(self, client: Kernel) -> None: + response = client.browsers.computer.with_raw_response.scroll( + id="id", + x=0, + y=0, + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + computer = response.parse() + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_scroll(self, client: Kernel) -> None: + with client.browsers.computer.with_streaming_response.scroll( + id="id", + x=0, + y=0, + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + computer = response.parse() + assert computer is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_scroll(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.computer.with_raw_response.scroll( + id="", + x=0, + y=0, + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_set_cursor_visibility(self, client: Kernel) -> None: + computer = client.browsers.computer.set_cursor_visibility( + id="id", + hidden=True, + ) + assert_matches_type(ComputerSetCursorVisibilityResponse, computer, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_set_cursor_visibility(self, client: Kernel) -> None: + response = client.browsers.computer.with_raw_response.set_cursor_visibility( + id="id", + hidden=True, + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + computer = response.parse() + assert_matches_type(ComputerSetCursorVisibilityResponse, computer, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_set_cursor_visibility(self, client: Kernel) -> None: + with client.browsers.computer.with_streaming_response.set_cursor_visibility( + id="id", + hidden=True, + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + computer = response.parse() + assert_matches_type(ComputerSetCursorVisibilityResponse, computer, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_set_cursor_visibility(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.computer.with_raw_response.set_cursor_visibility( + id="", + hidden=True, + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_type_text(self, client: Kernel) -> None: + computer = client.browsers.computer.type_text( + id="id", + text="text", + ) + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_type_text_with_all_params(self, client: Kernel) -> None: + computer = client.browsers.computer.type_text( + id="id", + text="text", + delay=0, + ) + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_type_text(self, client: Kernel) -> None: + response = client.browsers.computer.with_raw_response.type_text( + id="id", + text="text", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + computer = response.parse() + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_type_text(self, client: Kernel) -> None: + with client.browsers.computer.with_streaming_response.type_text( + id="id", + text="text", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + computer = response.parse() + assert computer is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_type_text(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.computer.with_raw_response.type_text( + id="", + text="text", + ) + + +class TestAsyncComputer: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @parametrize + @pytest.mark.respx(base_url=base_url) + async def test_method_capture_screenshot(self, async_client: AsyncKernel, respx_mock: MockRouter) -> None: + respx_mock.post("/browsers/id/computer/screenshot").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + computer = await async_client.browsers.computer.capture_screenshot( + id="id", + ) + assert computer.is_closed + assert await computer.json() == {"foo": "bar"} + assert cast(Any, computer.is_closed) is True + assert isinstance(computer, AsyncBinaryAPIResponse) + + @parametrize + @pytest.mark.respx(base_url=base_url) + async def test_method_capture_screenshot_with_all_params( + self, async_client: AsyncKernel, respx_mock: MockRouter + ) -> None: + respx_mock.post("/browsers/id/computer/screenshot").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + computer = await async_client.browsers.computer.capture_screenshot( + id="id", + region={ + "height": 0, + "width": 0, + "x": 0, + "y": 0, + }, + ) + assert computer.is_closed + assert await computer.json() == {"foo": "bar"} + assert cast(Any, computer.is_closed) is True + assert isinstance(computer, AsyncBinaryAPIResponse) + + @parametrize + @pytest.mark.respx(base_url=base_url) + async def test_raw_response_capture_screenshot(self, async_client: AsyncKernel, respx_mock: MockRouter) -> None: + respx_mock.post("/browsers/id/computer/screenshot").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + + computer = await async_client.browsers.computer.with_raw_response.capture_screenshot( + id="id", + ) + + assert computer.is_closed is True + assert computer.http_request.headers.get("X-Stainless-Lang") == "python" + assert await computer.json() == {"foo": "bar"} + assert isinstance(computer, AsyncBinaryAPIResponse) + + @parametrize + @pytest.mark.respx(base_url=base_url) + async def test_streaming_response_capture_screenshot( + self, async_client: AsyncKernel, respx_mock: MockRouter + ) -> None: + respx_mock.post("/browsers/id/computer/screenshot").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + async with async_client.browsers.computer.with_streaming_response.capture_screenshot( + id="id", + ) as computer: + assert not computer.is_closed + assert computer.http_request.headers.get("X-Stainless-Lang") == "python" + + assert await computer.json() == {"foo": "bar"} + assert cast(Any, computer.is_closed) is True + assert isinstance(computer, AsyncStreamedBinaryAPIResponse) + + assert cast(Any, computer.is_closed) is True + + @parametrize + @pytest.mark.respx(base_url=base_url) + async def test_path_params_capture_screenshot(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.computer.with_raw_response.capture_screenshot( + id="", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_click_mouse(self, async_client: AsyncKernel) -> None: + computer = await async_client.browsers.computer.click_mouse( + id="id", + x=0, + y=0, + ) + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_click_mouse_with_all_params(self, async_client: AsyncKernel) -> None: + computer = await async_client.browsers.computer.click_mouse( + id="id", + x=0, + y=0, + button="left", + click_type="down", + hold_keys=["string"], + num_clicks=0, + ) + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_click_mouse(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.computer.with_raw_response.click_mouse( + id="id", + x=0, + y=0, + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + computer = await response.parse() + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_click_mouse(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.computer.with_streaming_response.click_mouse( + id="id", + x=0, + y=0, + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + computer = await response.parse() + assert computer is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_click_mouse(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.computer.with_raw_response.click_mouse( + id="", + x=0, + y=0, + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_drag_mouse(self, async_client: AsyncKernel) -> None: + computer = await async_client.browsers.computer.drag_mouse( + id="id", + path=[[0, 0], [0, 0]], + ) + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_drag_mouse_with_all_params(self, async_client: AsyncKernel) -> None: + computer = await async_client.browsers.computer.drag_mouse( + id="id", + path=[[0, 0], [0, 0]], + button="left", + delay=0, + hold_keys=["string"], + step_delay_ms=0, + steps_per_segment=1, + ) + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_drag_mouse(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.computer.with_raw_response.drag_mouse( + id="id", + path=[[0, 0], [0, 0]], + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + computer = await response.parse() + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_drag_mouse(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.computer.with_streaming_response.drag_mouse( + id="id", + path=[[0, 0], [0, 0]], + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + computer = await response.parse() + assert computer is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_drag_mouse(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.computer.with_raw_response.drag_mouse( + id="", + path=[[0, 0], [0, 0]], + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_move_mouse(self, async_client: AsyncKernel) -> None: + computer = await async_client.browsers.computer.move_mouse( + id="id", + x=0, + y=0, + ) + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_move_mouse_with_all_params(self, async_client: AsyncKernel) -> None: + computer = await async_client.browsers.computer.move_mouse( + id="id", + x=0, + y=0, + hold_keys=["string"], + ) + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_move_mouse(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.computer.with_raw_response.move_mouse( + id="id", + x=0, + y=0, + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + computer = await response.parse() + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_move_mouse(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.computer.with_streaming_response.move_mouse( + id="id", + x=0, + y=0, + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + computer = await response.parse() + assert computer is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_move_mouse(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.computer.with_raw_response.move_mouse( + id="", + x=0, + y=0, + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_press_key(self, async_client: AsyncKernel) -> None: + computer = await async_client.browsers.computer.press_key( + id="id", + keys=["string"], + ) + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_press_key_with_all_params(self, async_client: AsyncKernel) -> None: + computer = await async_client.browsers.computer.press_key( + id="id", + keys=["string"], + duration=0, + hold_keys=["string"], + ) + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_press_key(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.computer.with_raw_response.press_key( + id="id", + keys=["string"], + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + computer = await response.parse() + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_press_key(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.computer.with_streaming_response.press_key( + id="id", + keys=["string"], + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + computer = await response.parse() + assert computer is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_press_key(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.computer.with_raw_response.press_key( + id="", + keys=["string"], + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_scroll(self, async_client: AsyncKernel) -> None: + computer = await async_client.browsers.computer.scroll( + id="id", + x=0, + y=0, + ) + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_scroll_with_all_params(self, async_client: AsyncKernel) -> None: + computer = await async_client.browsers.computer.scroll( + id="id", + x=0, + y=0, + delta_x=0, + delta_y=0, + hold_keys=["string"], + ) + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_scroll(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.computer.with_raw_response.scroll( + id="id", + x=0, + y=0, + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + computer = await response.parse() + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_scroll(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.computer.with_streaming_response.scroll( + id="id", + x=0, + y=0, + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + computer = await response.parse() + assert computer is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_scroll(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.computer.with_raw_response.scroll( + id="", + x=0, + y=0, + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_set_cursor_visibility(self, async_client: AsyncKernel) -> None: + computer = await async_client.browsers.computer.set_cursor_visibility( + id="id", + hidden=True, + ) + assert_matches_type(ComputerSetCursorVisibilityResponse, computer, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_set_cursor_visibility(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.computer.with_raw_response.set_cursor_visibility( + id="id", + hidden=True, + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + computer = await response.parse() + assert_matches_type(ComputerSetCursorVisibilityResponse, computer, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_set_cursor_visibility(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.computer.with_streaming_response.set_cursor_visibility( + id="id", + hidden=True, + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + computer = await response.parse() + assert_matches_type(ComputerSetCursorVisibilityResponse, computer, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_set_cursor_visibility(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.computer.with_raw_response.set_cursor_visibility( + id="", + hidden=True, + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_type_text(self, async_client: AsyncKernel) -> None: + computer = await async_client.browsers.computer.type_text( + id="id", + text="text", + ) + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_type_text_with_all_params(self, async_client: AsyncKernel) -> None: + computer = await async_client.browsers.computer.type_text( + id="id", + text="text", + delay=0, + ) + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_type_text(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.computer.with_raw_response.type_text( + id="id", + text="text", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + computer = await response.parse() + assert computer is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_type_text(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.computer.with_streaming_response.type_text( + id="id", + text="text", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + computer = await response.parse() + assert computer is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_type_text(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.computer.with_raw_response.type_text( + id="", + text="text", + ) diff --git a/tests/api_resources/browsers/test_fs.py b/tests/api_resources/browsers/test_fs.py new file mode 100644 index 0000000..38e07b7 --- /dev/null +++ b/tests/api_resources/browsers/test_fs.py @@ -0,0 +1,1309 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import httpx +import pytest +from respx import MockRouter + +from kernel import Kernel, AsyncKernel +from tests.utils import assert_matches_type +from kernel._response import ( + BinaryAPIResponse, + AsyncBinaryAPIResponse, + StreamedBinaryAPIResponse, + AsyncStreamedBinaryAPIResponse, +) +from kernel.types.browsers import ( + FFileInfoResponse, + FListFilesResponse, +) + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestFs: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_create_directory(self, client: Kernel) -> None: + f = client.browsers.fs.create_directory( + id="id", + path="/J!", + ) + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_create_directory_with_all_params(self, client: Kernel) -> None: + f = client.browsers.fs.create_directory( + id="id", + path="/J!", + mode="0611", + ) + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_create_directory(self, client: Kernel) -> None: + response = client.browsers.fs.with_raw_response.create_directory( + id="id", + path="/J!", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + f = response.parse() + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_create_directory(self, client: Kernel) -> None: + with client.browsers.fs.with_streaming_response.create_directory( + id="id", + path="/J!", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + f = response.parse() + assert f is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_create_directory(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.fs.with_raw_response.create_directory( + id="", + path="/J!", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_delete_directory(self, client: Kernel) -> None: + f = client.browsers.fs.delete_directory( + id="id", + path="/J!", + ) + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_delete_directory(self, client: Kernel) -> None: + response = client.browsers.fs.with_raw_response.delete_directory( + id="id", + path="/J!", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + f = response.parse() + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_delete_directory(self, client: Kernel) -> None: + with client.browsers.fs.with_streaming_response.delete_directory( + id="id", + path="/J!", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + f = response.parse() + assert f is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_delete_directory(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.fs.with_raw_response.delete_directory( + id="", + path="/J!", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_delete_file(self, client: Kernel) -> None: + f = client.browsers.fs.delete_file( + id="id", + path="/J!", + ) + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_delete_file(self, client: Kernel) -> None: + response = client.browsers.fs.with_raw_response.delete_file( + id="id", + path="/J!", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + f = response.parse() + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_delete_file(self, client: Kernel) -> None: + with client.browsers.fs.with_streaming_response.delete_file( + id="id", + path="/J!", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + f = response.parse() + assert f is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_delete_file(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.fs.with_raw_response.delete_file( + id="", + path="/J!", + ) + + @parametrize + @pytest.mark.respx(base_url=base_url) + def test_method_download_dir_zip(self, client: Kernel, respx_mock: MockRouter) -> None: + respx_mock.get("/browsers/id/fs/download_dir_zip").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + f = client.browsers.fs.download_dir_zip( + id="id", + path="/J!", + ) + assert f.is_closed + assert f.json() == {"foo": "bar"} + assert cast(Any, f.is_closed) is True + assert isinstance(f, BinaryAPIResponse) + + @parametrize + @pytest.mark.respx(base_url=base_url) + def test_raw_response_download_dir_zip(self, client: Kernel, respx_mock: MockRouter) -> None: + respx_mock.get("/browsers/id/fs/download_dir_zip").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + + f = client.browsers.fs.with_raw_response.download_dir_zip( + id="id", + path="/J!", + ) + + assert f.is_closed is True + assert f.http_request.headers.get("X-Stainless-Lang") == "python" + assert f.json() == {"foo": "bar"} + assert isinstance(f, BinaryAPIResponse) + + @parametrize + @pytest.mark.respx(base_url=base_url) + def test_streaming_response_download_dir_zip(self, client: Kernel, respx_mock: MockRouter) -> None: + respx_mock.get("/browsers/id/fs/download_dir_zip").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + with client.browsers.fs.with_streaming_response.download_dir_zip( + id="id", + path="/J!", + ) as f: + assert not f.is_closed + assert f.http_request.headers.get("X-Stainless-Lang") == "python" + + assert f.json() == {"foo": "bar"} + assert cast(Any, f.is_closed) is True + assert isinstance(f, StreamedBinaryAPIResponse) + + assert cast(Any, f.is_closed) is True + + @parametrize + @pytest.mark.respx(base_url=base_url) + def test_path_params_download_dir_zip(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.fs.with_raw_response.download_dir_zip( + id="", + path="/J!", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_file_info(self, client: Kernel) -> None: + f = client.browsers.fs.file_info( + id="id", + path="/J!", + ) + assert_matches_type(FFileInfoResponse, f, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_file_info(self, client: Kernel) -> None: + response = client.browsers.fs.with_raw_response.file_info( + id="id", + path="/J!", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + f = response.parse() + assert_matches_type(FFileInfoResponse, f, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_file_info(self, client: Kernel) -> None: + with client.browsers.fs.with_streaming_response.file_info( + id="id", + path="/J!", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + f = response.parse() + assert_matches_type(FFileInfoResponse, f, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_file_info(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.fs.with_raw_response.file_info( + id="", + path="/J!", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_list_files(self, client: Kernel) -> None: + f = client.browsers.fs.list_files( + id="id", + path="/J!", + ) + assert_matches_type(FListFilesResponse, f, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_list_files(self, client: Kernel) -> None: + response = client.browsers.fs.with_raw_response.list_files( + id="id", + path="/J!", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + f = response.parse() + assert_matches_type(FListFilesResponse, f, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_list_files(self, client: Kernel) -> None: + with client.browsers.fs.with_streaming_response.list_files( + id="id", + path="/J!", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + f = response.parse() + assert_matches_type(FListFilesResponse, f, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_list_files(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.fs.with_raw_response.list_files( + id="", + path="/J!", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_move(self, client: Kernel) -> None: + f = client.browsers.fs.move( + id="id", + dest_path="/J!", + src_path="/J!", + ) + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_move(self, client: Kernel) -> None: + response = client.browsers.fs.with_raw_response.move( + id="id", + dest_path="/J!", + src_path="/J!", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + f = response.parse() + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_move(self, client: Kernel) -> None: + with client.browsers.fs.with_streaming_response.move( + id="id", + dest_path="/J!", + src_path="/J!", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + f = response.parse() + assert f is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_move(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.fs.with_raw_response.move( + id="", + dest_path="/J!", + src_path="/J!", + ) + + @parametrize + @pytest.mark.respx(base_url=base_url) + def test_method_read_file(self, client: Kernel, respx_mock: MockRouter) -> None: + respx_mock.get("/browsers/id/fs/read_file").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + f = client.browsers.fs.read_file( + id="id", + path="/J!", + ) + assert f.is_closed + assert f.json() == {"foo": "bar"} + assert cast(Any, f.is_closed) is True + assert isinstance(f, BinaryAPIResponse) + + @parametrize + @pytest.mark.respx(base_url=base_url) + def test_raw_response_read_file(self, client: Kernel, respx_mock: MockRouter) -> None: + respx_mock.get("/browsers/id/fs/read_file").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + + f = client.browsers.fs.with_raw_response.read_file( + id="id", + path="/J!", + ) + + assert f.is_closed is True + assert f.http_request.headers.get("X-Stainless-Lang") == "python" + assert f.json() == {"foo": "bar"} + assert isinstance(f, BinaryAPIResponse) + + @parametrize + @pytest.mark.respx(base_url=base_url) + def test_streaming_response_read_file(self, client: Kernel, respx_mock: MockRouter) -> None: + respx_mock.get("/browsers/id/fs/read_file").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + with client.browsers.fs.with_streaming_response.read_file( + id="id", + path="/J!", + ) as f: + assert not f.is_closed + assert f.http_request.headers.get("X-Stainless-Lang") == "python" + + assert f.json() == {"foo": "bar"} + assert cast(Any, f.is_closed) is True + assert isinstance(f, StreamedBinaryAPIResponse) + + assert cast(Any, f.is_closed) is True + + @parametrize + @pytest.mark.respx(base_url=base_url) + def test_path_params_read_file(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.fs.with_raw_response.read_file( + id="", + path="/J!", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_set_file_permissions(self, client: Kernel) -> None: + f = client.browsers.fs.set_file_permissions( + id="id", + mode="0611", + path="/J!", + ) + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_set_file_permissions_with_all_params(self, client: Kernel) -> None: + f = client.browsers.fs.set_file_permissions( + id="id", + mode="0611", + path="/J!", + group="group", + owner="owner", + ) + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_set_file_permissions(self, client: Kernel) -> None: + response = client.browsers.fs.with_raw_response.set_file_permissions( + id="id", + mode="0611", + path="/J!", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + f = response.parse() + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_set_file_permissions(self, client: Kernel) -> None: + with client.browsers.fs.with_streaming_response.set_file_permissions( + id="id", + mode="0611", + path="/J!", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + f = response.parse() + assert f is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_set_file_permissions(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.fs.with_raw_response.set_file_permissions( + id="", + mode="0611", + path="/J!", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_upload(self, client: Kernel) -> None: + f = client.browsers.fs.upload( + id="id", + files=[ + { + "dest_path": "/J!", + "file": b"raw file contents", + } + ], + ) + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_upload(self, client: Kernel) -> None: + response = client.browsers.fs.with_raw_response.upload( + id="id", + files=[ + { + "dest_path": "/J!", + "file": b"raw file contents", + } + ], + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + f = response.parse() + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_upload(self, client: Kernel) -> None: + with client.browsers.fs.with_streaming_response.upload( + id="id", + files=[ + { + "dest_path": "/J!", + "file": b"raw file contents", + } + ], + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + f = response.parse() + assert f is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_upload(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.fs.with_raw_response.upload( + id="", + files=[ + { + "dest_path": "/J!", + "file": b"raw file contents", + } + ], + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_upload_zip(self, client: Kernel) -> None: + f = client.browsers.fs.upload_zip( + id="id", + dest_path="/J!", + zip_file=b"raw file contents", + ) + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_upload_zip(self, client: Kernel) -> None: + response = client.browsers.fs.with_raw_response.upload_zip( + id="id", + dest_path="/J!", + zip_file=b"raw file contents", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + f = response.parse() + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_upload_zip(self, client: Kernel) -> None: + with client.browsers.fs.with_streaming_response.upload_zip( + id="id", + dest_path="/J!", + zip_file=b"raw file contents", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + f = response.parse() + assert f is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_upload_zip(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.fs.with_raw_response.upload_zip( + id="", + dest_path="/J!", + zip_file=b"raw file contents", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_write_file(self, client: Kernel) -> None: + f = client.browsers.fs.write_file( + id="id", + contents=b"raw file contents", + path="/J!", + ) + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_write_file_with_all_params(self, client: Kernel) -> None: + f = client.browsers.fs.write_file( + id="id", + contents=b"raw file contents", + path="/J!", + mode="0611", + ) + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_write_file(self, client: Kernel) -> None: + response = client.browsers.fs.with_raw_response.write_file( + id="id", + contents=b"raw file contents", + path="/J!", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + f = response.parse() + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_write_file(self, client: Kernel) -> None: + with client.browsers.fs.with_streaming_response.write_file( + id="id", + contents=b"raw file contents", + path="/J!", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + f = response.parse() + assert f is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_write_file(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.fs.with_raw_response.write_file( + id="", + contents=b"raw file contents", + path="/J!", + ) + + +class TestAsyncFs: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_create_directory(self, async_client: AsyncKernel) -> None: + f = await async_client.browsers.fs.create_directory( + id="id", + path="/J!", + ) + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_create_directory_with_all_params(self, async_client: AsyncKernel) -> None: + f = await async_client.browsers.fs.create_directory( + id="id", + path="/J!", + mode="0611", + ) + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_create_directory(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.fs.with_raw_response.create_directory( + id="id", + path="/J!", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + f = await response.parse() + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_create_directory(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.fs.with_streaming_response.create_directory( + id="id", + path="/J!", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + f = await response.parse() + assert f is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_create_directory(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.fs.with_raw_response.create_directory( + id="", + path="/J!", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_delete_directory(self, async_client: AsyncKernel) -> None: + f = await async_client.browsers.fs.delete_directory( + id="id", + path="/J!", + ) + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_delete_directory(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.fs.with_raw_response.delete_directory( + id="id", + path="/J!", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + f = await response.parse() + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_delete_directory(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.fs.with_streaming_response.delete_directory( + id="id", + path="/J!", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + f = await response.parse() + assert f is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_delete_directory(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.fs.with_raw_response.delete_directory( + id="", + path="/J!", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_delete_file(self, async_client: AsyncKernel) -> None: + f = await async_client.browsers.fs.delete_file( + id="id", + path="/J!", + ) + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_delete_file(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.fs.with_raw_response.delete_file( + id="id", + path="/J!", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + f = await response.parse() + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_delete_file(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.fs.with_streaming_response.delete_file( + id="id", + path="/J!", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + f = await response.parse() + assert f is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_delete_file(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.fs.with_raw_response.delete_file( + id="", + path="/J!", + ) + + @parametrize + @pytest.mark.respx(base_url=base_url) + async def test_method_download_dir_zip(self, async_client: AsyncKernel, respx_mock: MockRouter) -> None: + respx_mock.get("/browsers/id/fs/download_dir_zip").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + f = await async_client.browsers.fs.download_dir_zip( + id="id", + path="/J!", + ) + assert f.is_closed + assert await f.json() == {"foo": "bar"} + assert cast(Any, f.is_closed) is True + assert isinstance(f, AsyncBinaryAPIResponse) + + @parametrize + @pytest.mark.respx(base_url=base_url) + async def test_raw_response_download_dir_zip(self, async_client: AsyncKernel, respx_mock: MockRouter) -> None: + respx_mock.get("/browsers/id/fs/download_dir_zip").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + + f = await async_client.browsers.fs.with_raw_response.download_dir_zip( + id="id", + path="/J!", + ) + + assert f.is_closed is True + assert f.http_request.headers.get("X-Stainless-Lang") == "python" + assert await f.json() == {"foo": "bar"} + assert isinstance(f, AsyncBinaryAPIResponse) + + @parametrize + @pytest.mark.respx(base_url=base_url) + async def test_streaming_response_download_dir_zip(self, async_client: AsyncKernel, respx_mock: MockRouter) -> None: + respx_mock.get("/browsers/id/fs/download_dir_zip").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + async with async_client.browsers.fs.with_streaming_response.download_dir_zip( + id="id", + path="/J!", + ) as f: + assert not f.is_closed + assert f.http_request.headers.get("X-Stainless-Lang") == "python" + + assert await f.json() == {"foo": "bar"} + assert cast(Any, f.is_closed) is True + assert isinstance(f, AsyncStreamedBinaryAPIResponse) + + assert cast(Any, f.is_closed) is True + + @parametrize + @pytest.mark.respx(base_url=base_url) + async def test_path_params_download_dir_zip(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.fs.with_raw_response.download_dir_zip( + id="", + path="/J!", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_file_info(self, async_client: AsyncKernel) -> None: + f = await async_client.browsers.fs.file_info( + id="id", + path="/J!", + ) + assert_matches_type(FFileInfoResponse, f, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_file_info(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.fs.with_raw_response.file_info( + id="id", + path="/J!", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + f = await response.parse() + assert_matches_type(FFileInfoResponse, f, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_file_info(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.fs.with_streaming_response.file_info( + id="id", + path="/J!", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + f = await response.parse() + assert_matches_type(FFileInfoResponse, f, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_file_info(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.fs.with_raw_response.file_info( + id="", + path="/J!", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_list_files(self, async_client: AsyncKernel) -> None: + f = await async_client.browsers.fs.list_files( + id="id", + path="/J!", + ) + assert_matches_type(FListFilesResponse, f, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_list_files(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.fs.with_raw_response.list_files( + id="id", + path="/J!", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + f = await response.parse() + assert_matches_type(FListFilesResponse, f, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_list_files(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.fs.with_streaming_response.list_files( + id="id", + path="/J!", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + f = await response.parse() + assert_matches_type(FListFilesResponse, f, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_list_files(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.fs.with_raw_response.list_files( + id="", + path="/J!", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_move(self, async_client: AsyncKernel) -> None: + f = await async_client.browsers.fs.move( + id="id", + dest_path="/J!", + src_path="/J!", + ) + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_move(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.fs.with_raw_response.move( + id="id", + dest_path="/J!", + src_path="/J!", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + f = await response.parse() + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_move(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.fs.with_streaming_response.move( + id="id", + dest_path="/J!", + src_path="/J!", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + f = await response.parse() + assert f is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_move(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.fs.with_raw_response.move( + id="", + dest_path="/J!", + src_path="/J!", + ) + + @parametrize + @pytest.mark.respx(base_url=base_url) + async def test_method_read_file(self, async_client: AsyncKernel, respx_mock: MockRouter) -> None: + respx_mock.get("/browsers/id/fs/read_file").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + f = await async_client.browsers.fs.read_file( + id="id", + path="/J!", + ) + assert f.is_closed + assert await f.json() == {"foo": "bar"} + assert cast(Any, f.is_closed) is True + assert isinstance(f, AsyncBinaryAPIResponse) + + @parametrize + @pytest.mark.respx(base_url=base_url) + async def test_raw_response_read_file(self, async_client: AsyncKernel, respx_mock: MockRouter) -> None: + respx_mock.get("/browsers/id/fs/read_file").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + + f = await async_client.browsers.fs.with_raw_response.read_file( + id="id", + path="/J!", + ) + + assert f.is_closed is True + assert f.http_request.headers.get("X-Stainless-Lang") == "python" + assert await f.json() == {"foo": "bar"} + assert isinstance(f, AsyncBinaryAPIResponse) + + @parametrize + @pytest.mark.respx(base_url=base_url) + async def test_streaming_response_read_file(self, async_client: AsyncKernel, respx_mock: MockRouter) -> None: + respx_mock.get("/browsers/id/fs/read_file").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + async with async_client.browsers.fs.with_streaming_response.read_file( + id="id", + path="/J!", + ) as f: + assert not f.is_closed + assert f.http_request.headers.get("X-Stainless-Lang") == "python" + + assert await f.json() == {"foo": "bar"} + assert cast(Any, f.is_closed) is True + assert isinstance(f, AsyncStreamedBinaryAPIResponse) + + assert cast(Any, f.is_closed) is True + + @parametrize + @pytest.mark.respx(base_url=base_url) + async def test_path_params_read_file(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.fs.with_raw_response.read_file( + id="", + path="/J!", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_set_file_permissions(self, async_client: AsyncKernel) -> None: + f = await async_client.browsers.fs.set_file_permissions( + id="id", + mode="0611", + path="/J!", + ) + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_set_file_permissions_with_all_params(self, async_client: AsyncKernel) -> None: + f = await async_client.browsers.fs.set_file_permissions( + id="id", + mode="0611", + path="/J!", + group="group", + owner="owner", + ) + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_set_file_permissions(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.fs.with_raw_response.set_file_permissions( + id="id", + mode="0611", + path="/J!", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + f = await response.parse() + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_set_file_permissions(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.fs.with_streaming_response.set_file_permissions( + id="id", + mode="0611", + path="/J!", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + f = await response.parse() + assert f is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_set_file_permissions(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.fs.with_raw_response.set_file_permissions( + id="", + mode="0611", + path="/J!", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_upload(self, async_client: AsyncKernel) -> None: + f = await async_client.browsers.fs.upload( + id="id", + files=[ + { + "dest_path": "/J!", + "file": b"raw file contents", + } + ], + ) + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_upload(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.fs.with_raw_response.upload( + id="id", + files=[ + { + "dest_path": "/J!", + "file": b"raw file contents", + } + ], + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + f = await response.parse() + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_upload(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.fs.with_streaming_response.upload( + id="id", + files=[ + { + "dest_path": "/J!", + "file": b"raw file contents", + } + ], + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + f = await response.parse() + assert f is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_upload(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.fs.with_raw_response.upload( + id="", + files=[ + { + "dest_path": "/J!", + "file": b"raw file contents", + } + ], + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_upload_zip(self, async_client: AsyncKernel) -> None: + f = await async_client.browsers.fs.upload_zip( + id="id", + dest_path="/J!", + zip_file=b"raw file contents", + ) + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_upload_zip(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.fs.with_raw_response.upload_zip( + id="id", + dest_path="/J!", + zip_file=b"raw file contents", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + f = await response.parse() + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_upload_zip(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.fs.with_streaming_response.upload_zip( + id="id", + dest_path="/J!", + zip_file=b"raw file contents", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + f = await response.parse() + assert f is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_upload_zip(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.fs.with_raw_response.upload_zip( + id="", + dest_path="/J!", + zip_file=b"raw file contents", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_write_file(self, async_client: AsyncKernel) -> None: + f = await async_client.browsers.fs.write_file( + id="id", + contents=b"raw file contents", + path="/J!", + ) + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_write_file_with_all_params(self, async_client: AsyncKernel) -> None: + f = await async_client.browsers.fs.write_file( + id="id", + contents=b"raw file contents", + path="/J!", + mode="0611", + ) + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_write_file(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.fs.with_raw_response.write_file( + id="id", + contents=b"raw file contents", + path="/J!", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + f = await response.parse() + assert f is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_write_file(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.fs.with_streaming_response.write_file( + id="id", + contents=b"raw file contents", + path="/J!", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + f = await response.parse() + assert f is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_write_file(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.fs.with_raw_response.write_file( + id="", + contents=b"raw file contents", + path="/J!", + ) diff --git a/tests/api_resources/browsers/test_logs.py b/tests/api_resources/browsers/test_logs.py new file mode 100644 index 0000000..6aac62f --- /dev/null +++ b/tests/api_resources/browsers/test_logs.py @@ -0,0 +1,136 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from kernel import Kernel, AsyncKernel + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestLogs: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_method_stream(self, client: Kernel) -> None: + log_stream = client.browsers.logs.stream( + id="id", + source="path", + ) + log_stream.response.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_method_stream_with_all_params(self, client: Kernel) -> None: + log_stream = client.browsers.logs.stream( + id="id", + source="path", + follow=True, + path="path", + supervisor_process="supervisor_process", + ) + log_stream.response.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_raw_response_stream(self, client: Kernel) -> None: + response = client.browsers.logs.with_raw_response.stream( + id="id", + source="path", + ) + + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + stream = response.parse() + stream.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_streaming_response_stream(self, client: Kernel) -> None: + with client.browsers.logs.with_streaming_response.stream( + id="id", + source="path", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + stream = response.parse() + stream.close() + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_path_params_stream(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.logs.with_raw_response.stream( + id="", + source="path", + ) + + +class TestAsyncLogs: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_method_stream(self, async_client: AsyncKernel) -> None: + log_stream = await async_client.browsers.logs.stream( + id="id", + source="path", + ) + await log_stream.response.aclose() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_method_stream_with_all_params(self, async_client: AsyncKernel) -> None: + log_stream = await async_client.browsers.logs.stream( + id="id", + source="path", + follow=True, + path="path", + supervisor_process="supervisor_process", + ) + await log_stream.response.aclose() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_raw_response_stream(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.logs.with_raw_response.stream( + id="id", + source="path", + ) + + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + stream = await response.parse() + await stream.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_streaming_response_stream(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.logs.with_streaming_response.stream( + id="id", + source="path", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + stream = await response.parse() + await stream.close() + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_path_params_stream(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.logs.with_raw_response.stream( + id="", + source="path", + ) diff --git a/tests/api_resources/browsers/test_playwright.py b/tests/api_resources/browsers/test_playwright.py new file mode 100644 index 0000000..cb79410 --- /dev/null +++ b/tests/api_resources/browsers/test_playwright.py @@ -0,0 +1,136 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from kernel import Kernel, AsyncKernel +from tests.utils import assert_matches_type +from kernel.types.browsers import PlaywrightExecuteResponse + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestPlaywright: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_execute(self, client: Kernel) -> None: + playwright = client.browsers.playwright.execute( + id="id", + code="code", + ) + assert_matches_type(PlaywrightExecuteResponse, playwright, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_execute_with_all_params(self, client: Kernel) -> None: + playwright = client.browsers.playwright.execute( + id="id", + code="code", + timeout_sec=1, + ) + assert_matches_type(PlaywrightExecuteResponse, playwright, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_execute(self, client: Kernel) -> None: + response = client.browsers.playwright.with_raw_response.execute( + id="id", + code="code", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + playwright = response.parse() + assert_matches_type(PlaywrightExecuteResponse, playwright, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_execute(self, client: Kernel) -> None: + with client.browsers.playwright.with_streaming_response.execute( + id="id", + code="code", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + playwright = response.parse() + assert_matches_type(PlaywrightExecuteResponse, playwright, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_execute(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.playwright.with_raw_response.execute( + id="", + code="code", + ) + + +class TestAsyncPlaywright: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_execute(self, async_client: AsyncKernel) -> None: + playwright = await async_client.browsers.playwright.execute( + id="id", + code="code", + ) + assert_matches_type(PlaywrightExecuteResponse, playwright, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_execute_with_all_params(self, async_client: AsyncKernel) -> None: + playwright = await async_client.browsers.playwright.execute( + id="id", + code="code", + timeout_sec=1, + ) + assert_matches_type(PlaywrightExecuteResponse, playwright, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_execute(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.playwright.with_raw_response.execute( + id="id", + code="code", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + playwright = await response.parse() + assert_matches_type(PlaywrightExecuteResponse, playwright, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_execute(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.playwright.with_streaming_response.execute( + id="id", + code="code", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + playwright = await response.parse() + assert_matches_type(PlaywrightExecuteResponse, playwright, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_execute(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.playwright.with_raw_response.execute( + id="", + code="code", + ) diff --git a/tests/api_resources/browsers/test_process.py b/tests/api_resources/browsers/test_process.py new file mode 100644 index 0000000..6997762 --- /dev/null +++ b/tests/api_resources/browsers/test_process.py @@ -0,0 +1,708 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from kernel import Kernel, AsyncKernel +from tests.utils import assert_matches_type +from kernel.types.browsers import ( + ProcessExecResponse, + ProcessKillResponse, + ProcessSpawnResponse, + ProcessStdinResponse, + ProcessStatusResponse, +) + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestProcess: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_exec(self, client: Kernel) -> None: + process = client.browsers.process.exec( + id="id", + command="command", + ) + assert_matches_type(ProcessExecResponse, process, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_exec_with_all_params(self, client: Kernel) -> None: + process = client.browsers.process.exec( + id="id", + command="command", + args=["string"], + as_root=True, + as_user="as_user", + cwd="/J!", + env={"foo": "string"}, + timeout_sec=0, + ) + assert_matches_type(ProcessExecResponse, process, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_exec(self, client: Kernel) -> None: + response = client.browsers.process.with_raw_response.exec( + id="id", + command="command", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + process = response.parse() + assert_matches_type(ProcessExecResponse, process, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_exec(self, client: Kernel) -> None: + with client.browsers.process.with_streaming_response.exec( + id="id", + command="command", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + process = response.parse() + assert_matches_type(ProcessExecResponse, process, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_exec(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.process.with_raw_response.exec( + id="", + command="command", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_kill(self, client: Kernel) -> None: + process = client.browsers.process.kill( + process_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + id="id", + signal="TERM", + ) + assert_matches_type(ProcessKillResponse, process, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_kill(self, client: Kernel) -> None: + response = client.browsers.process.with_raw_response.kill( + process_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + id="id", + signal="TERM", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + process = response.parse() + assert_matches_type(ProcessKillResponse, process, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_kill(self, client: Kernel) -> None: + with client.browsers.process.with_streaming_response.kill( + process_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + id="id", + signal="TERM", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + process = response.parse() + assert_matches_type(ProcessKillResponse, process, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_kill(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.process.with_raw_response.kill( + process_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + id="", + signal="TERM", + ) + + with pytest.raises(ValueError, match=r"Expected a non-empty value for `process_id` but received ''"): + client.browsers.process.with_raw_response.kill( + process_id="", + id="id", + signal="TERM", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_spawn(self, client: Kernel) -> None: + process = client.browsers.process.spawn( + id="id", + command="command", + ) + assert_matches_type(ProcessSpawnResponse, process, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_spawn_with_all_params(self, client: Kernel) -> None: + process = client.browsers.process.spawn( + id="id", + command="command", + args=["string"], + as_root=True, + as_user="as_user", + cwd="/J!", + env={"foo": "string"}, + timeout_sec=0, + ) + assert_matches_type(ProcessSpawnResponse, process, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_spawn(self, client: Kernel) -> None: + response = client.browsers.process.with_raw_response.spawn( + id="id", + command="command", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + process = response.parse() + assert_matches_type(ProcessSpawnResponse, process, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_spawn(self, client: Kernel) -> None: + with client.browsers.process.with_streaming_response.spawn( + id="id", + command="command", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + process = response.parse() + assert_matches_type(ProcessSpawnResponse, process, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_spawn(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.process.with_raw_response.spawn( + id="", + command="command", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_status(self, client: Kernel) -> None: + process = client.browsers.process.status( + process_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + id="id", + ) + assert_matches_type(ProcessStatusResponse, process, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_status(self, client: Kernel) -> None: + response = client.browsers.process.with_raw_response.status( + process_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + id="id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + process = response.parse() + assert_matches_type(ProcessStatusResponse, process, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_status(self, client: Kernel) -> None: + with client.browsers.process.with_streaming_response.status( + process_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + id="id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + process = response.parse() + assert_matches_type(ProcessStatusResponse, process, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_status(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.process.with_raw_response.status( + process_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + id="", + ) + + with pytest.raises(ValueError, match=r"Expected a non-empty value for `process_id` but received ''"): + client.browsers.process.with_raw_response.status( + process_id="", + id="id", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_stdin(self, client: Kernel) -> None: + process = client.browsers.process.stdin( + process_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + id="id", + data_b64="data_b64", + ) + assert_matches_type(ProcessStdinResponse, process, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_stdin(self, client: Kernel) -> None: + response = client.browsers.process.with_raw_response.stdin( + process_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + id="id", + data_b64="data_b64", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + process = response.parse() + assert_matches_type(ProcessStdinResponse, process, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_stdin(self, client: Kernel) -> None: + with client.browsers.process.with_streaming_response.stdin( + process_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + id="id", + data_b64="data_b64", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + process = response.parse() + assert_matches_type(ProcessStdinResponse, process, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_stdin(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.process.with_raw_response.stdin( + process_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + id="", + data_b64="data_b64", + ) + + with pytest.raises(ValueError, match=r"Expected a non-empty value for `process_id` but received ''"): + client.browsers.process.with_raw_response.stdin( + process_id="", + id="id", + data_b64="data_b64", + ) + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_method_stdout_stream(self, client: Kernel) -> None: + process_stream = client.browsers.process.stdout_stream( + process_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + id="id", + ) + process_stream.response.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_raw_response_stdout_stream(self, client: Kernel) -> None: + response = client.browsers.process.with_raw_response.stdout_stream( + process_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + id="id", + ) + + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + stream = response.parse() + stream.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_streaming_response_stdout_stream(self, client: Kernel) -> None: + with client.browsers.process.with_streaming_response.stdout_stream( + process_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + id="id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + stream = response.parse() + stream.close() + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_path_params_stdout_stream(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.process.with_raw_response.stdout_stream( + process_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + id="", + ) + + with pytest.raises(ValueError, match=r"Expected a non-empty value for `process_id` but received ''"): + client.browsers.process.with_raw_response.stdout_stream( + process_id="", + id="id", + ) + + +class TestAsyncProcess: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_exec(self, async_client: AsyncKernel) -> None: + process = await async_client.browsers.process.exec( + id="id", + command="command", + ) + assert_matches_type(ProcessExecResponse, process, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_exec_with_all_params(self, async_client: AsyncKernel) -> None: + process = await async_client.browsers.process.exec( + id="id", + command="command", + args=["string"], + as_root=True, + as_user="as_user", + cwd="/J!", + env={"foo": "string"}, + timeout_sec=0, + ) + assert_matches_type(ProcessExecResponse, process, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_exec(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.process.with_raw_response.exec( + id="id", + command="command", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + process = await response.parse() + assert_matches_type(ProcessExecResponse, process, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_exec(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.process.with_streaming_response.exec( + id="id", + command="command", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + process = await response.parse() + assert_matches_type(ProcessExecResponse, process, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_exec(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.process.with_raw_response.exec( + id="", + command="command", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_kill(self, async_client: AsyncKernel) -> None: + process = await async_client.browsers.process.kill( + process_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + id="id", + signal="TERM", + ) + assert_matches_type(ProcessKillResponse, process, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_kill(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.process.with_raw_response.kill( + process_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + id="id", + signal="TERM", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + process = await response.parse() + assert_matches_type(ProcessKillResponse, process, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_kill(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.process.with_streaming_response.kill( + process_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + id="id", + signal="TERM", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + process = await response.parse() + assert_matches_type(ProcessKillResponse, process, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_kill(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.process.with_raw_response.kill( + process_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + id="", + signal="TERM", + ) + + with pytest.raises(ValueError, match=r"Expected a non-empty value for `process_id` but received ''"): + await async_client.browsers.process.with_raw_response.kill( + process_id="", + id="id", + signal="TERM", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_spawn(self, async_client: AsyncKernel) -> None: + process = await async_client.browsers.process.spawn( + id="id", + command="command", + ) + assert_matches_type(ProcessSpawnResponse, process, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_spawn_with_all_params(self, async_client: AsyncKernel) -> None: + process = await async_client.browsers.process.spawn( + id="id", + command="command", + args=["string"], + as_root=True, + as_user="as_user", + cwd="/J!", + env={"foo": "string"}, + timeout_sec=0, + ) + assert_matches_type(ProcessSpawnResponse, process, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_spawn(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.process.with_raw_response.spawn( + id="id", + command="command", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + process = await response.parse() + assert_matches_type(ProcessSpawnResponse, process, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_spawn(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.process.with_streaming_response.spawn( + id="id", + command="command", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + process = await response.parse() + assert_matches_type(ProcessSpawnResponse, process, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_spawn(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.process.with_raw_response.spawn( + id="", + command="command", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_status(self, async_client: AsyncKernel) -> None: + process = await async_client.browsers.process.status( + process_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + id="id", + ) + assert_matches_type(ProcessStatusResponse, process, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_status(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.process.with_raw_response.status( + process_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + id="id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + process = await response.parse() + assert_matches_type(ProcessStatusResponse, process, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_status(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.process.with_streaming_response.status( + process_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + id="id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + process = await response.parse() + assert_matches_type(ProcessStatusResponse, process, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_status(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.process.with_raw_response.status( + process_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + id="", + ) + + with pytest.raises(ValueError, match=r"Expected a non-empty value for `process_id` but received ''"): + await async_client.browsers.process.with_raw_response.status( + process_id="", + id="id", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_stdin(self, async_client: AsyncKernel) -> None: + process = await async_client.browsers.process.stdin( + process_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + id="id", + data_b64="data_b64", + ) + assert_matches_type(ProcessStdinResponse, process, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_stdin(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.process.with_raw_response.stdin( + process_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + id="id", + data_b64="data_b64", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + process = await response.parse() + assert_matches_type(ProcessStdinResponse, process, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_stdin(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.process.with_streaming_response.stdin( + process_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + id="id", + data_b64="data_b64", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + process = await response.parse() + assert_matches_type(ProcessStdinResponse, process, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_stdin(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.process.with_raw_response.stdin( + process_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + id="", + data_b64="data_b64", + ) + + with pytest.raises(ValueError, match=r"Expected a non-empty value for `process_id` but received ''"): + await async_client.browsers.process.with_raw_response.stdin( + process_id="", + id="id", + data_b64="data_b64", + ) + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_method_stdout_stream(self, async_client: AsyncKernel) -> None: + process_stream = await async_client.browsers.process.stdout_stream( + process_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + id="id", + ) + await process_stream.response.aclose() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_raw_response_stdout_stream(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.process.with_raw_response.stdout_stream( + process_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + id="id", + ) + + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + stream = await response.parse() + await stream.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_streaming_response_stdout_stream(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.process.with_streaming_response.stdout_stream( + process_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + id="id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + stream = await response.parse() + await stream.close() + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_path_params_stdout_stream(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.process.with_raw_response.stdout_stream( + process_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + id="", + ) + + with pytest.raises(ValueError, match=r"Expected a non-empty value for `process_id` but received ''"): + await async_client.browsers.process.with_raw_response.stdout_stream( + process_id="", + id="id", + ) diff --git a/tests/api_resources/browsers/test_replays.py b/tests/api_resources/browsers/test_replays.py new file mode 100644 index 0000000..df1fed5 --- /dev/null +++ b/tests/api_resources/browsers/test_replays.py @@ -0,0 +1,444 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import httpx +import pytest +from respx import MockRouter + +from kernel import Kernel, AsyncKernel +from tests.utils import assert_matches_type +from kernel._response import ( + BinaryAPIResponse, + AsyncBinaryAPIResponse, + StreamedBinaryAPIResponse, + AsyncStreamedBinaryAPIResponse, +) +from kernel.types.browsers import ReplayListResponse, ReplayStartResponse + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestReplays: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_list(self, client: Kernel) -> None: + replay = client.browsers.replays.list( + "id", + ) + assert_matches_type(ReplayListResponse, replay, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_list(self, client: Kernel) -> None: + response = client.browsers.replays.with_raw_response.list( + "id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + replay = response.parse() + assert_matches_type(ReplayListResponse, replay, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_list(self, client: Kernel) -> None: + with client.browsers.replays.with_streaming_response.list( + "id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + replay = response.parse() + assert_matches_type(ReplayListResponse, replay, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_list(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.replays.with_raw_response.list( + "", + ) + + @parametrize + @pytest.mark.respx(base_url=base_url) + def test_method_download(self, client: Kernel, respx_mock: MockRouter) -> None: + respx_mock.get("/browsers/id/replays/replay_id").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + replay = client.browsers.replays.download( + replay_id="replay_id", + id="id", + ) + assert replay.is_closed + assert replay.json() == {"foo": "bar"} + assert cast(Any, replay.is_closed) is True + assert isinstance(replay, BinaryAPIResponse) + + @parametrize + @pytest.mark.respx(base_url=base_url) + def test_raw_response_download(self, client: Kernel, respx_mock: MockRouter) -> None: + respx_mock.get("/browsers/id/replays/replay_id").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + + replay = client.browsers.replays.with_raw_response.download( + replay_id="replay_id", + id="id", + ) + + assert replay.is_closed is True + assert replay.http_request.headers.get("X-Stainless-Lang") == "python" + assert replay.json() == {"foo": "bar"} + assert isinstance(replay, BinaryAPIResponse) + + @parametrize + @pytest.mark.respx(base_url=base_url) + def test_streaming_response_download(self, client: Kernel, respx_mock: MockRouter) -> None: + respx_mock.get("/browsers/id/replays/replay_id").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + with client.browsers.replays.with_streaming_response.download( + replay_id="replay_id", + id="id", + ) as replay: + assert not replay.is_closed + assert replay.http_request.headers.get("X-Stainless-Lang") == "python" + + assert replay.json() == {"foo": "bar"} + assert cast(Any, replay.is_closed) is True + assert isinstance(replay, StreamedBinaryAPIResponse) + + assert cast(Any, replay.is_closed) is True + + @parametrize + @pytest.mark.respx(base_url=base_url) + def test_path_params_download(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.replays.with_raw_response.download( + replay_id="replay_id", + id="", + ) + + with pytest.raises(ValueError, match=r"Expected a non-empty value for `replay_id` but received ''"): + client.browsers.replays.with_raw_response.download( + replay_id="", + id="id", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_start(self, client: Kernel) -> None: + replay = client.browsers.replays.start( + id="id", + ) + assert_matches_type(ReplayStartResponse, replay, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_start_with_all_params(self, client: Kernel) -> None: + replay = client.browsers.replays.start( + id="id", + framerate=1, + max_duration_in_seconds=1, + ) + assert_matches_type(ReplayStartResponse, replay, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_start(self, client: Kernel) -> None: + response = client.browsers.replays.with_raw_response.start( + id="id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + replay = response.parse() + assert_matches_type(ReplayStartResponse, replay, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_start(self, client: Kernel) -> None: + with client.browsers.replays.with_streaming_response.start( + id="id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + replay = response.parse() + assert_matches_type(ReplayStartResponse, replay, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_start(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.replays.with_raw_response.start( + id="", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_stop(self, client: Kernel) -> None: + replay = client.browsers.replays.stop( + replay_id="replay_id", + id="id", + ) + assert replay is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_stop(self, client: Kernel) -> None: + response = client.browsers.replays.with_raw_response.stop( + replay_id="replay_id", + id="id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + replay = response.parse() + assert replay is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_stop(self, client: Kernel) -> None: + with client.browsers.replays.with_streaming_response.stop( + replay_id="replay_id", + id="id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + replay = response.parse() + assert replay is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_stop(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.replays.with_raw_response.stop( + replay_id="replay_id", + id="", + ) + + with pytest.raises(ValueError, match=r"Expected a non-empty value for `replay_id` but received ''"): + client.browsers.replays.with_raw_response.stop( + replay_id="", + id="id", + ) + + +class TestAsyncReplays: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_list(self, async_client: AsyncKernel) -> None: + replay = await async_client.browsers.replays.list( + "id", + ) + assert_matches_type(ReplayListResponse, replay, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_list(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.replays.with_raw_response.list( + "id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + replay = await response.parse() + assert_matches_type(ReplayListResponse, replay, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_list(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.replays.with_streaming_response.list( + "id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + replay = await response.parse() + assert_matches_type(ReplayListResponse, replay, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_list(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.replays.with_raw_response.list( + "", + ) + + @parametrize + @pytest.mark.respx(base_url=base_url) + async def test_method_download(self, async_client: AsyncKernel, respx_mock: MockRouter) -> None: + respx_mock.get("/browsers/id/replays/replay_id").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + replay = await async_client.browsers.replays.download( + replay_id="replay_id", + id="id", + ) + assert replay.is_closed + assert await replay.json() == {"foo": "bar"} + assert cast(Any, replay.is_closed) is True + assert isinstance(replay, AsyncBinaryAPIResponse) + + @parametrize + @pytest.mark.respx(base_url=base_url) + async def test_raw_response_download(self, async_client: AsyncKernel, respx_mock: MockRouter) -> None: + respx_mock.get("/browsers/id/replays/replay_id").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + + replay = await async_client.browsers.replays.with_raw_response.download( + replay_id="replay_id", + id="id", + ) + + assert replay.is_closed is True + assert replay.http_request.headers.get("X-Stainless-Lang") == "python" + assert await replay.json() == {"foo": "bar"} + assert isinstance(replay, AsyncBinaryAPIResponse) + + @parametrize + @pytest.mark.respx(base_url=base_url) + async def test_streaming_response_download(self, async_client: AsyncKernel, respx_mock: MockRouter) -> None: + respx_mock.get("/browsers/id/replays/replay_id").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + async with async_client.browsers.replays.with_streaming_response.download( + replay_id="replay_id", + id="id", + ) as replay: + assert not replay.is_closed + assert replay.http_request.headers.get("X-Stainless-Lang") == "python" + + assert await replay.json() == {"foo": "bar"} + assert cast(Any, replay.is_closed) is True + assert isinstance(replay, AsyncStreamedBinaryAPIResponse) + + assert cast(Any, replay.is_closed) is True + + @parametrize + @pytest.mark.respx(base_url=base_url) + async def test_path_params_download(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.replays.with_raw_response.download( + replay_id="replay_id", + id="", + ) + + with pytest.raises(ValueError, match=r"Expected a non-empty value for `replay_id` but received ''"): + await async_client.browsers.replays.with_raw_response.download( + replay_id="", + id="id", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_start(self, async_client: AsyncKernel) -> None: + replay = await async_client.browsers.replays.start( + id="id", + ) + assert_matches_type(ReplayStartResponse, replay, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_start_with_all_params(self, async_client: AsyncKernel) -> None: + replay = await async_client.browsers.replays.start( + id="id", + framerate=1, + max_duration_in_seconds=1, + ) + assert_matches_type(ReplayStartResponse, replay, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_start(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.replays.with_raw_response.start( + id="id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + replay = await response.parse() + assert_matches_type(ReplayStartResponse, replay, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_start(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.replays.with_streaming_response.start( + id="id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + replay = await response.parse() + assert_matches_type(ReplayStartResponse, replay, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_start(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.replays.with_raw_response.start( + id="", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_stop(self, async_client: AsyncKernel) -> None: + replay = await async_client.browsers.replays.stop( + replay_id="replay_id", + id="id", + ) + assert replay is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_stop(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.replays.with_raw_response.stop( + replay_id="replay_id", + id="id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + replay = await response.parse() + assert replay is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_stop(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.replays.with_streaming_response.stop( + replay_id="replay_id", + id="id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + replay = await response.parse() + assert replay is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_stop(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.replays.with_raw_response.stop( + replay_id="replay_id", + id="", + ) + + with pytest.raises(ValueError, match=r"Expected a non-empty value for `replay_id` but received ''"): + await async_client.browsers.replays.with_raw_response.stop( + replay_id="", + id="id", + ) diff --git a/tests/api_resources/test_apps.py b/tests/api_resources/test_apps.py new file mode 100644 index 0000000..7475bcd --- /dev/null +++ b/tests/api_resources/test_apps.py @@ -0,0 +1,103 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from kernel import Kernel, AsyncKernel +from tests.utils import assert_matches_type +from kernel.types import AppListResponse +from kernel.pagination import SyncOffsetPagination, AsyncOffsetPagination + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestApps: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_list(self, client: Kernel) -> None: + app = client.apps.list() + assert_matches_type(SyncOffsetPagination[AppListResponse], app, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_list_with_all_params(self, client: Kernel) -> None: + app = client.apps.list( + app_name="app_name", + limit=1, + offset=0, + version="version", + ) + assert_matches_type(SyncOffsetPagination[AppListResponse], app, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_list(self, client: Kernel) -> None: + response = client.apps.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + app = response.parse() + assert_matches_type(SyncOffsetPagination[AppListResponse], app, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_list(self, client: Kernel) -> None: + with client.apps.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + app = response.parse() + assert_matches_type(SyncOffsetPagination[AppListResponse], app, path=["response"]) + + assert cast(Any, response.is_closed) is True + + +class TestAsyncApps: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_list(self, async_client: AsyncKernel) -> None: + app = await async_client.apps.list() + assert_matches_type(AsyncOffsetPagination[AppListResponse], app, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_list_with_all_params(self, async_client: AsyncKernel) -> None: + app = await async_client.apps.list( + app_name="app_name", + limit=1, + offset=0, + version="version", + ) + assert_matches_type(AsyncOffsetPagination[AppListResponse], app, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_list(self, async_client: AsyncKernel) -> None: + response = await async_client.apps.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + app = await response.parse() + assert_matches_type(AsyncOffsetPagination[AppListResponse], app, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_list(self, async_client: AsyncKernel) -> None: + async with async_client.apps.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + app = await response.parse() + assert_matches_type(AsyncOffsetPagination[AppListResponse], app, path=["response"]) + + assert cast(Any, response.is_closed) is True diff --git a/tests/api_resources/test_browser_pools.py b/tests/api_resources/test_browser_pools.py new file mode 100644 index 0000000..6a8f164 --- /dev/null +++ b/tests/api_resources/test_browser_pools.py @@ -0,0 +1,856 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from kernel import Kernel, AsyncKernel +from tests.utils import assert_matches_type +from kernel.types import ( + BrowserPool, + BrowserPoolListResponse, + BrowserPoolAcquireResponse, +) + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestBrowserPools: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_create(self, client: Kernel) -> None: + browser_pool = client.browser_pools.create( + size=10, + ) + assert_matches_type(BrowserPool, browser_pool, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_create_with_all_params(self, client: Kernel) -> None: + browser_pool = client.browser_pools.create( + size=10, + extensions=[ + { + "id": "id", + "name": "name", + } + ], + fill_rate_per_minute=0, + headless=False, + kiosk_mode=True, + name="my-pool", + profile={ + "id": "id", + "name": "name", + "save_changes": True, + }, + proxy_id="proxy_id", + stealth=True, + timeout_seconds=60, + viewport={ + "height": 800, + "width": 1280, + "refresh_rate": 60, + }, + ) + assert_matches_type(BrowserPool, browser_pool, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_create(self, client: Kernel) -> None: + response = client.browser_pools.with_raw_response.create( + size=10, + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + browser_pool = response.parse() + assert_matches_type(BrowserPool, browser_pool, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_create(self, client: Kernel) -> None: + with client.browser_pools.with_streaming_response.create( + size=10, + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + browser_pool = response.parse() + assert_matches_type(BrowserPool, browser_pool, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_retrieve(self, client: Kernel) -> None: + browser_pool = client.browser_pools.retrieve( + "id_or_name", + ) + assert_matches_type(BrowserPool, browser_pool, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_retrieve(self, client: Kernel) -> None: + response = client.browser_pools.with_raw_response.retrieve( + "id_or_name", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + browser_pool = response.parse() + assert_matches_type(BrowserPool, browser_pool, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_retrieve(self, client: Kernel) -> None: + with client.browser_pools.with_streaming_response.retrieve( + "id_or_name", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + browser_pool = response.parse() + assert_matches_type(BrowserPool, browser_pool, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_retrieve(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id_or_name` but received ''"): + client.browser_pools.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_update(self, client: Kernel) -> None: + browser_pool = client.browser_pools.update( + id_or_name="id_or_name", + size=10, + ) + assert_matches_type(BrowserPool, browser_pool, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_update_with_all_params(self, client: Kernel) -> None: + browser_pool = client.browser_pools.update( + id_or_name="id_or_name", + size=10, + discard_all_idle=False, + extensions=[ + { + "id": "id", + "name": "name", + } + ], + fill_rate_per_minute=0, + headless=False, + kiosk_mode=True, + name="my-pool", + profile={ + "id": "id", + "name": "name", + "save_changes": True, + }, + proxy_id="proxy_id", + stealth=True, + timeout_seconds=60, + viewport={ + "height": 800, + "width": 1280, + "refresh_rate": 60, + }, + ) + assert_matches_type(BrowserPool, browser_pool, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_update(self, client: Kernel) -> None: + response = client.browser_pools.with_raw_response.update( + id_or_name="id_or_name", + size=10, + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + browser_pool = response.parse() + assert_matches_type(BrowserPool, browser_pool, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_update(self, client: Kernel) -> None: + with client.browser_pools.with_streaming_response.update( + id_or_name="id_or_name", + size=10, + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + browser_pool = response.parse() + assert_matches_type(BrowserPool, browser_pool, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_update(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id_or_name` but received ''"): + client.browser_pools.with_raw_response.update( + id_or_name="", + size=10, + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_list(self, client: Kernel) -> None: + browser_pool = client.browser_pools.list() + assert_matches_type(BrowserPoolListResponse, browser_pool, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_list(self, client: Kernel) -> None: + response = client.browser_pools.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + browser_pool = response.parse() + assert_matches_type(BrowserPoolListResponse, browser_pool, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_list(self, client: Kernel) -> None: + with client.browser_pools.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + browser_pool = response.parse() + assert_matches_type(BrowserPoolListResponse, browser_pool, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_delete(self, client: Kernel) -> None: + browser_pool = client.browser_pools.delete( + id_or_name="id_or_name", + ) + assert browser_pool is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_delete_with_all_params(self, client: Kernel) -> None: + browser_pool = client.browser_pools.delete( + id_or_name="id_or_name", + force=True, + ) + assert browser_pool is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_delete(self, client: Kernel) -> None: + response = client.browser_pools.with_raw_response.delete( + id_or_name="id_or_name", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + browser_pool = response.parse() + assert browser_pool is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_delete(self, client: Kernel) -> None: + with client.browser_pools.with_streaming_response.delete( + id_or_name="id_or_name", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + browser_pool = response.parse() + assert browser_pool is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_delete(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id_or_name` but received ''"): + client.browser_pools.with_raw_response.delete( + id_or_name="", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_acquire(self, client: Kernel) -> None: + browser_pool = client.browser_pools.acquire( + id_or_name="id_or_name", + ) + assert_matches_type(BrowserPoolAcquireResponse, browser_pool, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_acquire_with_all_params(self, client: Kernel) -> None: + browser_pool = client.browser_pools.acquire( + id_or_name="id_or_name", + acquire_timeout_seconds=0, + ) + assert_matches_type(BrowserPoolAcquireResponse, browser_pool, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_acquire(self, client: Kernel) -> None: + response = client.browser_pools.with_raw_response.acquire( + id_or_name="id_or_name", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + browser_pool = response.parse() + assert_matches_type(BrowserPoolAcquireResponse, browser_pool, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_acquire(self, client: Kernel) -> None: + with client.browser_pools.with_streaming_response.acquire( + id_or_name="id_or_name", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + browser_pool = response.parse() + assert_matches_type(BrowserPoolAcquireResponse, browser_pool, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_acquire(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id_or_name` but received ''"): + client.browser_pools.with_raw_response.acquire( + id_or_name="", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_flush(self, client: Kernel) -> None: + browser_pool = client.browser_pools.flush( + "id_or_name", + ) + assert browser_pool is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_flush(self, client: Kernel) -> None: + response = client.browser_pools.with_raw_response.flush( + "id_or_name", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + browser_pool = response.parse() + assert browser_pool is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_flush(self, client: Kernel) -> None: + with client.browser_pools.with_streaming_response.flush( + "id_or_name", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + browser_pool = response.parse() + assert browser_pool is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_flush(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id_or_name` but received ''"): + client.browser_pools.with_raw_response.flush( + "", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_release(self, client: Kernel) -> None: + browser_pool = client.browser_pools.release( + id_or_name="id_or_name", + session_id="ts8iy3sg25ibheguyni2lg9t", + ) + assert browser_pool is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_release_with_all_params(self, client: Kernel) -> None: + browser_pool = client.browser_pools.release( + id_or_name="id_or_name", + session_id="ts8iy3sg25ibheguyni2lg9t", + reuse=False, + ) + assert browser_pool is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_release(self, client: Kernel) -> None: + response = client.browser_pools.with_raw_response.release( + id_or_name="id_or_name", + session_id="ts8iy3sg25ibheguyni2lg9t", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + browser_pool = response.parse() + assert browser_pool is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_release(self, client: Kernel) -> None: + with client.browser_pools.with_streaming_response.release( + id_or_name="id_or_name", + session_id="ts8iy3sg25ibheguyni2lg9t", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + browser_pool = response.parse() + assert browser_pool is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_release(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id_or_name` but received ''"): + client.browser_pools.with_raw_response.release( + id_or_name="", + session_id="ts8iy3sg25ibheguyni2lg9t", + ) + + +class TestAsyncBrowserPools: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_create(self, async_client: AsyncKernel) -> None: + browser_pool = await async_client.browser_pools.create( + size=10, + ) + assert_matches_type(BrowserPool, browser_pool, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_create_with_all_params(self, async_client: AsyncKernel) -> None: + browser_pool = await async_client.browser_pools.create( + size=10, + extensions=[ + { + "id": "id", + "name": "name", + } + ], + fill_rate_per_minute=0, + headless=False, + kiosk_mode=True, + name="my-pool", + profile={ + "id": "id", + "name": "name", + "save_changes": True, + }, + proxy_id="proxy_id", + stealth=True, + timeout_seconds=60, + viewport={ + "height": 800, + "width": 1280, + "refresh_rate": 60, + }, + ) + assert_matches_type(BrowserPool, browser_pool, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_create(self, async_client: AsyncKernel) -> None: + response = await async_client.browser_pools.with_raw_response.create( + size=10, + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + browser_pool = await response.parse() + assert_matches_type(BrowserPool, browser_pool, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_create(self, async_client: AsyncKernel) -> None: + async with async_client.browser_pools.with_streaming_response.create( + size=10, + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + browser_pool = await response.parse() + assert_matches_type(BrowserPool, browser_pool, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_retrieve(self, async_client: AsyncKernel) -> None: + browser_pool = await async_client.browser_pools.retrieve( + "id_or_name", + ) + assert_matches_type(BrowserPool, browser_pool, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_retrieve(self, async_client: AsyncKernel) -> None: + response = await async_client.browser_pools.with_raw_response.retrieve( + "id_or_name", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + browser_pool = await response.parse() + assert_matches_type(BrowserPool, browser_pool, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_retrieve(self, async_client: AsyncKernel) -> None: + async with async_client.browser_pools.with_streaming_response.retrieve( + "id_or_name", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + browser_pool = await response.parse() + assert_matches_type(BrowserPool, browser_pool, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_retrieve(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id_or_name` but received ''"): + await async_client.browser_pools.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_update(self, async_client: AsyncKernel) -> None: + browser_pool = await async_client.browser_pools.update( + id_or_name="id_or_name", + size=10, + ) + assert_matches_type(BrowserPool, browser_pool, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_update_with_all_params(self, async_client: AsyncKernel) -> None: + browser_pool = await async_client.browser_pools.update( + id_or_name="id_or_name", + size=10, + discard_all_idle=False, + extensions=[ + { + "id": "id", + "name": "name", + } + ], + fill_rate_per_minute=0, + headless=False, + kiosk_mode=True, + name="my-pool", + profile={ + "id": "id", + "name": "name", + "save_changes": True, + }, + proxy_id="proxy_id", + stealth=True, + timeout_seconds=60, + viewport={ + "height": 800, + "width": 1280, + "refresh_rate": 60, + }, + ) + assert_matches_type(BrowserPool, browser_pool, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_update(self, async_client: AsyncKernel) -> None: + response = await async_client.browser_pools.with_raw_response.update( + id_or_name="id_or_name", + size=10, + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + browser_pool = await response.parse() + assert_matches_type(BrowserPool, browser_pool, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_update(self, async_client: AsyncKernel) -> None: + async with async_client.browser_pools.with_streaming_response.update( + id_or_name="id_or_name", + size=10, + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + browser_pool = await response.parse() + assert_matches_type(BrowserPool, browser_pool, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_update(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id_or_name` but received ''"): + await async_client.browser_pools.with_raw_response.update( + id_or_name="", + size=10, + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_list(self, async_client: AsyncKernel) -> None: + browser_pool = await async_client.browser_pools.list() + assert_matches_type(BrowserPoolListResponse, browser_pool, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_list(self, async_client: AsyncKernel) -> None: + response = await async_client.browser_pools.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + browser_pool = await response.parse() + assert_matches_type(BrowserPoolListResponse, browser_pool, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_list(self, async_client: AsyncKernel) -> None: + async with async_client.browser_pools.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + browser_pool = await response.parse() + assert_matches_type(BrowserPoolListResponse, browser_pool, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_delete(self, async_client: AsyncKernel) -> None: + browser_pool = await async_client.browser_pools.delete( + id_or_name="id_or_name", + ) + assert browser_pool is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_delete_with_all_params(self, async_client: AsyncKernel) -> None: + browser_pool = await async_client.browser_pools.delete( + id_or_name="id_or_name", + force=True, + ) + assert browser_pool is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_delete(self, async_client: AsyncKernel) -> None: + response = await async_client.browser_pools.with_raw_response.delete( + id_or_name="id_or_name", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + browser_pool = await response.parse() + assert browser_pool is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_delete(self, async_client: AsyncKernel) -> None: + async with async_client.browser_pools.with_streaming_response.delete( + id_or_name="id_or_name", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + browser_pool = await response.parse() + assert browser_pool is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_delete(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id_or_name` but received ''"): + await async_client.browser_pools.with_raw_response.delete( + id_or_name="", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_acquire(self, async_client: AsyncKernel) -> None: + browser_pool = await async_client.browser_pools.acquire( + id_or_name="id_or_name", + ) + assert_matches_type(BrowserPoolAcquireResponse, browser_pool, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_acquire_with_all_params(self, async_client: AsyncKernel) -> None: + browser_pool = await async_client.browser_pools.acquire( + id_or_name="id_or_name", + acquire_timeout_seconds=0, + ) + assert_matches_type(BrowserPoolAcquireResponse, browser_pool, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_acquire(self, async_client: AsyncKernel) -> None: + response = await async_client.browser_pools.with_raw_response.acquire( + id_or_name="id_or_name", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + browser_pool = await response.parse() + assert_matches_type(BrowserPoolAcquireResponse, browser_pool, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_acquire(self, async_client: AsyncKernel) -> None: + async with async_client.browser_pools.with_streaming_response.acquire( + id_or_name="id_or_name", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + browser_pool = await response.parse() + assert_matches_type(BrowserPoolAcquireResponse, browser_pool, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_acquire(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id_or_name` but received ''"): + await async_client.browser_pools.with_raw_response.acquire( + id_or_name="", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_flush(self, async_client: AsyncKernel) -> None: + browser_pool = await async_client.browser_pools.flush( + "id_or_name", + ) + assert browser_pool is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_flush(self, async_client: AsyncKernel) -> None: + response = await async_client.browser_pools.with_raw_response.flush( + "id_or_name", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + browser_pool = await response.parse() + assert browser_pool is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_flush(self, async_client: AsyncKernel) -> None: + async with async_client.browser_pools.with_streaming_response.flush( + "id_or_name", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + browser_pool = await response.parse() + assert browser_pool is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_flush(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id_or_name` but received ''"): + await async_client.browser_pools.with_raw_response.flush( + "", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_release(self, async_client: AsyncKernel) -> None: + browser_pool = await async_client.browser_pools.release( + id_or_name="id_or_name", + session_id="ts8iy3sg25ibheguyni2lg9t", + ) + assert browser_pool is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_release_with_all_params(self, async_client: AsyncKernel) -> None: + browser_pool = await async_client.browser_pools.release( + id_or_name="id_or_name", + session_id="ts8iy3sg25ibheguyni2lg9t", + reuse=False, + ) + assert browser_pool is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_release(self, async_client: AsyncKernel) -> None: + response = await async_client.browser_pools.with_raw_response.release( + id_or_name="id_or_name", + session_id="ts8iy3sg25ibheguyni2lg9t", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + browser_pool = await response.parse() + assert browser_pool is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_release(self, async_client: AsyncKernel) -> None: + async with async_client.browser_pools.with_streaming_response.release( + id_or_name="id_or_name", + session_id="ts8iy3sg25ibheguyni2lg9t", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + browser_pool = await response.parse() + assert browser_pool is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_release(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id_or_name` but received ''"): + await async_client.browser_pools.with_raw_response.release( + id_or_name="", + session_id="ts8iy3sg25ibheguyni2lg9t", + ) diff --git a/tests/api_resources/test_browsers.py b/tests/api_resources/test_browsers.py new file mode 100644 index 0000000..a766656 --- /dev/null +++ b/tests/api_resources/test_browsers.py @@ -0,0 +1,599 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from kernel import Kernel, AsyncKernel +from tests.utils import assert_matches_type +from kernel.types import ( + BrowserListResponse, + BrowserCreateResponse, + BrowserRetrieveResponse, +) +from kernel.pagination import SyncOffsetPagination, AsyncOffsetPagination + +# pyright: reportDeprecated=false + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestBrowsers: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_create(self, client: Kernel) -> None: + browser = client.browsers.create() + assert_matches_type(BrowserCreateResponse, browser, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_create_with_all_params(self, client: Kernel) -> None: + browser = client.browsers.create( + extensions=[ + { + "id": "id", + "name": "name", + } + ], + headless=False, + invocation_id="rr33xuugxj9h0bkf1rdt2bet", + kiosk_mode=True, + persistence={"id": "my-awesome-browser-for-user-1234"}, + profile={ + "id": "id", + "name": "name", + "save_changes": True, + }, + proxy_id="proxy_id", + stealth=True, + timeout_seconds=10, + viewport={ + "height": 800, + "width": 1280, + "refresh_rate": 60, + }, + ) + assert_matches_type(BrowserCreateResponse, browser, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_create(self, client: Kernel) -> None: + response = client.browsers.with_raw_response.create() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + browser = response.parse() + assert_matches_type(BrowserCreateResponse, browser, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_create(self, client: Kernel) -> None: + with client.browsers.with_streaming_response.create() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + browser = response.parse() + assert_matches_type(BrowserCreateResponse, browser, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_retrieve(self, client: Kernel) -> None: + browser = client.browsers.retrieve( + "htzv5orfit78e1m2biiifpbv", + ) + assert_matches_type(BrowserRetrieveResponse, browser, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_retrieve(self, client: Kernel) -> None: + response = client.browsers.with_raw_response.retrieve( + "htzv5orfit78e1m2biiifpbv", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + browser = response.parse() + assert_matches_type(BrowserRetrieveResponse, browser, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_retrieve(self, client: Kernel) -> None: + with client.browsers.with_streaming_response.retrieve( + "htzv5orfit78e1m2biiifpbv", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + browser = response.parse() + assert_matches_type(BrowserRetrieveResponse, browser, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_retrieve(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_list(self, client: Kernel) -> None: + browser = client.browsers.list() + assert_matches_type(SyncOffsetPagination[BrowserListResponse], browser, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_list_with_all_params(self, client: Kernel) -> None: + browser = client.browsers.list( + include_deleted=True, + limit=1, + offset=0, + ) + assert_matches_type(SyncOffsetPagination[BrowserListResponse], browser, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_list(self, client: Kernel) -> None: + response = client.browsers.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + browser = response.parse() + assert_matches_type(SyncOffsetPagination[BrowserListResponse], browser, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_list(self, client: Kernel) -> None: + with client.browsers.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + browser = response.parse() + assert_matches_type(SyncOffsetPagination[BrowserListResponse], browser, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_delete(self, client: Kernel) -> None: + with pytest.warns(DeprecationWarning): + browser = client.browsers.delete( + persistent_id="persistent_id", + ) + + assert browser is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_delete(self, client: Kernel) -> None: + with pytest.warns(DeprecationWarning): + response = client.browsers.with_raw_response.delete( + persistent_id="persistent_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + browser = response.parse() + assert browser is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_delete(self, client: Kernel) -> None: + with pytest.warns(DeprecationWarning): + with client.browsers.with_streaming_response.delete( + persistent_id="persistent_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + browser = response.parse() + assert browser is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_delete_by_id(self, client: Kernel) -> None: + browser = client.browsers.delete_by_id( + "htzv5orfit78e1m2biiifpbv", + ) + assert browser is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_delete_by_id(self, client: Kernel) -> None: + response = client.browsers.with_raw_response.delete_by_id( + "htzv5orfit78e1m2biiifpbv", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + browser = response.parse() + assert browser is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_delete_by_id(self, client: Kernel) -> None: + with client.browsers.with_streaming_response.delete_by_id( + "htzv5orfit78e1m2biiifpbv", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + browser = response.parse() + assert browser is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_delete_by_id(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.with_raw_response.delete_by_id( + "", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_load_extensions(self, client: Kernel) -> None: + browser = client.browsers.load_extensions( + id="id", + extensions=[ + { + "name": "name", + "zip_file": b"raw file contents", + } + ], + ) + assert browser is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_load_extensions(self, client: Kernel) -> None: + response = client.browsers.with_raw_response.load_extensions( + id="id", + extensions=[ + { + "name": "name", + "zip_file": b"raw file contents", + } + ], + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + browser = response.parse() + assert browser is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_load_extensions(self, client: Kernel) -> None: + with client.browsers.with_streaming_response.load_extensions( + id="id", + extensions=[ + { + "name": "name", + "zip_file": b"raw file contents", + } + ], + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + browser = response.parse() + assert browser is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_load_extensions(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.browsers.with_raw_response.load_extensions( + id="", + extensions=[ + { + "name": "name", + "zip_file": b"raw file contents", + } + ], + ) + + +class TestAsyncBrowsers: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_create(self, async_client: AsyncKernel) -> None: + browser = await async_client.browsers.create() + assert_matches_type(BrowserCreateResponse, browser, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_create_with_all_params(self, async_client: AsyncKernel) -> None: + browser = await async_client.browsers.create( + extensions=[ + { + "id": "id", + "name": "name", + } + ], + headless=False, + invocation_id="rr33xuugxj9h0bkf1rdt2bet", + kiosk_mode=True, + persistence={"id": "my-awesome-browser-for-user-1234"}, + profile={ + "id": "id", + "name": "name", + "save_changes": True, + }, + proxy_id="proxy_id", + stealth=True, + timeout_seconds=10, + viewport={ + "height": 800, + "width": 1280, + "refresh_rate": 60, + }, + ) + assert_matches_type(BrowserCreateResponse, browser, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_create(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.with_raw_response.create() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + browser = await response.parse() + assert_matches_type(BrowserCreateResponse, browser, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_create(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.with_streaming_response.create() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + browser = await response.parse() + assert_matches_type(BrowserCreateResponse, browser, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_retrieve(self, async_client: AsyncKernel) -> None: + browser = await async_client.browsers.retrieve( + "htzv5orfit78e1m2biiifpbv", + ) + assert_matches_type(BrowserRetrieveResponse, browser, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_retrieve(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.with_raw_response.retrieve( + "htzv5orfit78e1m2biiifpbv", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + browser = await response.parse() + assert_matches_type(BrowserRetrieveResponse, browser, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_retrieve(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.with_streaming_response.retrieve( + "htzv5orfit78e1m2biiifpbv", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + browser = await response.parse() + assert_matches_type(BrowserRetrieveResponse, browser, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_retrieve(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_list(self, async_client: AsyncKernel) -> None: + browser = await async_client.browsers.list() + assert_matches_type(AsyncOffsetPagination[BrowserListResponse], browser, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_list_with_all_params(self, async_client: AsyncKernel) -> None: + browser = await async_client.browsers.list( + include_deleted=True, + limit=1, + offset=0, + ) + assert_matches_type(AsyncOffsetPagination[BrowserListResponse], browser, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_list(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + browser = await response.parse() + assert_matches_type(AsyncOffsetPagination[BrowserListResponse], browser, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_list(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + browser = await response.parse() + assert_matches_type(AsyncOffsetPagination[BrowserListResponse], browser, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_delete(self, async_client: AsyncKernel) -> None: + with pytest.warns(DeprecationWarning): + browser = await async_client.browsers.delete( + persistent_id="persistent_id", + ) + + assert browser is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_delete(self, async_client: AsyncKernel) -> None: + with pytest.warns(DeprecationWarning): + response = await async_client.browsers.with_raw_response.delete( + persistent_id="persistent_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + browser = await response.parse() + assert browser is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_delete(self, async_client: AsyncKernel) -> None: + with pytest.warns(DeprecationWarning): + async with async_client.browsers.with_streaming_response.delete( + persistent_id="persistent_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + browser = await response.parse() + assert browser is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_delete_by_id(self, async_client: AsyncKernel) -> None: + browser = await async_client.browsers.delete_by_id( + "htzv5orfit78e1m2biiifpbv", + ) + assert browser is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_delete_by_id(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.with_raw_response.delete_by_id( + "htzv5orfit78e1m2biiifpbv", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + browser = await response.parse() + assert browser is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_delete_by_id(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.with_streaming_response.delete_by_id( + "htzv5orfit78e1m2biiifpbv", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + browser = await response.parse() + assert browser is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_delete_by_id(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.with_raw_response.delete_by_id( + "", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_load_extensions(self, async_client: AsyncKernel) -> None: + browser = await async_client.browsers.load_extensions( + id="id", + extensions=[ + { + "name": "name", + "zip_file": b"raw file contents", + } + ], + ) + assert browser is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_load_extensions(self, async_client: AsyncKernel) -> None: + response = await async_client.browsers.with_raw_response.load_extensions( + id="id", + extensions=[ + { + "name": "name", + "zip_file": b"raw file contents", + } + ], + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + browser = await response.parse() + assert browser is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_load_extensions(self, async_client: AsyncKernel) -> None: + async with async_client.browsers.with_streaming_response.load_extensions( + id="id", + extensions=[ + { + "name": "name", + "zip_file": b"raw file contents", + } + ], + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + browser = await response.parse() + assert browser is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_load_extensions(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.browsers.with_raw_response.load_extensions( + id="", + extensions=[ + { + "name": "name", + "zip_file": b"raw file contents", + } + ], + ) diff --git a/tests/api_resources/test_credentials.py b/tests/api_resources/test_credentials.py new file mode 100644 index 0000000..b609868 --- /dev/null +++ b/tests/api_resources/test_credentials.py @@ -0,0 +1,598 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from kernel import Kernel, AsyncKernel +from tests.utils import assert_matches_type +from kernel.types import ( + Credential, + CredentialTotpCodeResponse, +) +from kernel.pagination import SyncOffsetPagination, AsyncOffsetPagination + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestCredentials: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_create(self, client: Kernel) -> None: + credential = client.credentials.create( + domain="netflix.com", + name="my-netflix-login", + values={ + "username": "user@example.com", + "password": "mysecretpassword", + }, + ) + assert_matches_type(Credential, credential, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_create_with_all_params(self, client: Kernel) -> None: + credential = client.credentials.create( + domain="netflix.com", + name="my-netflix-login", + values={ + "username": "user@example.com", + "password": "mysecretpassword", + }, + sso_provider="google", + totp_secret="JBSWY3DPEHPK3PXP", + ) + assert_matches_type(Credential, credential, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_create(self, client: Kernel) -> None: + response = client.credentials.with_raw_response.create( + domain="netflix.com", + name="my-netflix-login", + values={ + "username": "user@example.com", + "password": "mysecretpassword", + }, + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + credential = response.parse() + assert_matches_type(Credential, credential, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_create(self, client: Kernel) -> None: + with client.credentials.with_streaming_response.create( + domain="netflix.com", + name="my-netflix-login", + values={ + "username": "user@example.com", + "password": "mysecretpassword", + }, + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + credential = response.parse() + assert_matches_type(Credential, credential, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_retrieve(self, client: Kernel) -> None: + credential = client.credentials.retrieve( + "id_or_name", + ) + assert_matches_type(Credential, credential, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_retrieve(self, client: Kernel) -> None: + response = client.credentials.with_raw_response.retrieve( + "id_or_name", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + credential = response.parse() + assert_matches_type(Credential, credential, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_retrieve(self, client: Kernel) -> None: + with client.credentials.with_streaming_response.retrieve( + "id_or_name", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + credential = response.parse() + assert_matches_type(Credential, credential, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_retrieve(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id_or_name` but received ''"): + client.credentials.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_update(self, client: Kernel) -> None: + credential = client.credentials.update( + id_or_name="id_or_name", + ) + assert_matches_type(Credential, credential, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_update_with_all_params(self, client: Kernel) -> None: + credential = client.credentials.update( + id_or_name="id_or_name", + name="my-updated-login", + sso_provider="google", + totp_secret="JBSWY3DPEHPK3PXP", + values={ + "username": "user@example.com", + "password": "newpassword", + }, + ) + assert_matches_type(Credential, credential, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_update(self, client: Kernel) -> None: + response = client.credentials.with_raw_response.update( + id_or_name="id_or_name", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + credential = response.parse() + assert_matches_type(Credential, credential, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_update(self, client: Kernel) -> None: + with client.credentials.with_streaming_response.update( + id_or_name="id_or_name", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + credential = response.parse() + assert_matches_type(Credential, credential, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_update(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id_or_name` but received ''"): + client.credentials.with_raw_response.update( + id_or_name="", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_list(self, client: Kernel) -> None: + credential = client.credentials.list() + assert_matches_type(SyncOffsetPagination[Credential], credential, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_list_with_all_params(self, client: Kernel) -> None: + credential = client.credentials.list( + domain="domain", + limit=100, + offset=0, + ) + assert_matches_type(SyncOffsetPagination[Credential], credential, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_list(self, client: Kernel) -> None: + response = client.credentials.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + credential = response.parse() + assert_matches_type(SyncOffsetPagination[Credential], credential, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_list(self, client: Kernel) -> None: + with client.credentials.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + credential = response.parse() + assert_matches_type(SyncOffsetPagination[Credential], credential, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_delete(self, client: Kernel) -> None: + credential = client.credentials.delete( + "id_or_name", + ) + assert credential is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_delete(self, client: Kernel) -> None: + response = client.credentials.with_raw_response.delete( + "id_or_name", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + credential = response.parse() + assert credential is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_delete(self, client: Kernel) -> None: + with client.credentials.with_streaming_response.delete( + "id_or_name", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + credential = response.parse() + assert credential is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_delete(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id_or_name` but received ''"): + client.credentials.with_raw_response.delete( + "", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_totp_code(self, client: Kernel) -> None: + credential = client.credentials.totp_code( + "id_or_name", + ) + assert_matches_type(CredentialTotpCodeResponse, credential, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_totp_code(self, client: Kernel) -> None: + response = client.credentials.with_raw_response.totp_code( + "id_or_name", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + credential = response.parse() + assert_matches_type(CredentialTotpCodeResponse, credential, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_totp_code(self, client: Kernel) -> None: + with client.credentials.with_streaming_response.totp_code( + "id_or_name", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + credential = response.parse() + assert_matches_type(CredentialTotpCodeResponse, credential, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_totp_code(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id_or_name` but received ''"): + client.credentials.with_raw_response.totp_code( + "", + ) + + +class TestAsyncCredentials: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_create(self, async_client: AsyncKernel) -> None: + credential = await async_client.credentials.create( + domain="netflix.com", + name="my-netflix-login", + values={ + "username": "user@example.com", + "password": "mysecretpassword", + }, + ) + assert_matches_type(Credential, credential, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_create_with_all_params(self, async_client: AsyncKernel) -> None: + credential = await async_client.credentials.create( + domain="netflix.com", + name="my-netflix-login", + values={ + "username": "user@example.com", + "password": "mysecretpassword", + }, + sso_provider="google", + totp_secret="JBSWY3DPEHPK3PXP", + ) + assert_matches_type(Credential, credential, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_create(self, async_client: AsyncKernel) -> None: + response = await async_client.credentials.with_raw_response.create( + domain="netflix.com", + name="my-netflix-login", + values={ + "username": "user@example.com", + "password": "mysecretpassword", + }, + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + credential = await response.parse() + assert_matches_type(Credential, credential, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_create(self, async_client: AsyncKernel) -> None: + async with async_client.credentials.with_streaming_response.create( + domain="netflix.com", + name="my-netflix-login", + values={ + "username": "user@example.com", + "password": "mysecretpassword", + }, + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + credential = await response.parse() + assert_matches_type(Credential, credential, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_retrieve(self, async_client: AsyncKernel) -> None: + credential = await async_client.credentials.retrieve( + "id_or_name", + ) + assert_matches_type(Credential, credential, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_retrieve(self, async_client: AsyncKernel) -> None: + response = await async_client.credentials.with_raw_response.retrieve( + "id_or_name", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + credential = await response.parse() + assert_matches_type(Credential, credential, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_retrieve(self, async_client: AsyncKernel) -> None: + async with async_client.credentials.with_streaming_response.retrieve( + "id_or_name", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + credential = await response.parse() + assert_matches_type(Credential, credential, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_retrieve(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id_or_name` but received ''"): + await async_client.credentials.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_update(self, async_client: AsyncKernel) -> None: + credential = await async_client.credentials.update( + id_or_name="id_or_name", + ) + assert_matches_type(Credential, credential, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_update_with_all_params(self, async_client: AsyncKernel) -> None: + credential = await async_client.credentials.update( + id_or_name="id_or_name", + name="my-updated-login", + sso_provider="google", + totp_secret="JBSWY3DPEHPK3PXP", + values={ + "username": "user@example.com", + "password": "newpassword", + }, + ) + assert_matches_type(Credential, credential, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_update(self, async_client: AsyncKernel) -> None: + response = await async_client.credentials.with_raw_response.update( + id_or_name="id_or_name", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + credential = await response.parse() + assert_matches_type(Credential, credential, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_update(self, async_client: AsyncKernel) -> None: + async with async_client.credentials.with_streaming_response.update( + id_or_name="id_or_name", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + credential = await response.parse() + assert_matches_type(Credential, credential, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_update(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id_or_name` but received ''"): + await async_client.credentials.with_raw_response.update( + id_or_name="", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_list(self, async_client: AsyncKernel) -> None: + credential = await async_client.credentials.list() + assert_matches_type(AsyncOffsetPagination[Credential], credential, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_list_with_all_params(self, async_client: AsyncKernel) -> None: + credential = await async_client.credentials.list( + domain="domain", + limit=100, + offset=0, + ) + assert_matches_type(AsyncOffsetPagination[Credential], credential, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_list(self, async_client: AsyncKernel) -> None: + response = await async_client.credentials.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + credential = await response.parse() + assert_matches_type(AsyncOffsetPagination[Credential], credential, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_list(self, async_client: AsyncKernel) -> None: + async with async_client.credentials.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + credential = await response.parse() + assert_matches_type(AsyncOffsetPagination[Credential], credential, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_delete(self, async_client: AsyncKernel) -> None: + credential = await async_client.credentials.delete( + "id_or_name", + ) + assert credential is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_delete(self, async_client: AsyncKernel) -> None: + response = await async_client.credentials.with_raw_response.delete( + "id_or_name", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + credential = await response.parse() + assert credential is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_delete(self, async_client: AsyncKernel) -> None: + async with async_client.credentials.with_streaming_response.delete( + "id_or_name", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + credential = await response.parse() + assert credential is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_delete(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id_or_name` but received ''"): + await async_client.credentials.with_raw_response.delete( + "", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_totp_code(self, async_client: AsyncKernel) -> None: + credential = await async_client.credentials.totp_code( + "id_or_name", + ) + assert_matches_type(CredentialTotpCodeResponse, credential, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_totp_code(self, async_client: AsyncKernel) -> None: + response = await async_client.credentials.with_raw_response.totp_code( + "id_or_name", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + credential = await response.parse() + assert_matches_type(CredentialTotpCodeResponse, credential, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_totp_code(self, async_client: AsyncKernel) -> None: + async with async_client.credentials.with_streaming_response.totp_code( + "id_or_name", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + credential = await response.parse() + assert_matches_type(CredentialTotpCodeResponse, credential, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_totp_code(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id_or_name` but received ''"): + await async_client.credentials.with_raw_response.totp_code( + "", + ) diff --git a/tests/api_resources/test_deployments.py b/tests/api_resources/test_deployments.py new file mode 100644 index 0000000..6c3354e --- /dev/null +++ b/tests/api_resources/test_deployments.py @@ -0,0 +1,393 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from kernel import Kernel, AsyncKernel +from tests.utils import assert_matches_type +from kernel.types import ( + DeploymentListResponse, + DeploymentCreateResponse, + DeploymentRetrieveResponse, +) +from kernel.pagination import SyncOffsetPagination, AsyncOffsetPagination + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestDeployments: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_create(self, client: Kernel) -> None: + deployment = client.deployments.create() + assert_matches_type(DeploymentCreateResponse, deployment, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_create_with_all_params(self, client: Kernel) -> None: + deployment = client.deployments.create( + entrypoint_rel_path="src/app.py", + env_vars={"FOO": "bar"}, + file=b"raw file contents", + force=False, + region="aws.us-east-1a", + source={ + "entrypoint": "src/index.ts", + "ref": "main", + "type": "github", + "url": "https://github.com/org/repo", + "auth": { + "token": "ghs_***", + "method": "github_token", + }, + "path": "apps/api", + }, + version="1.0.0", + ) + assert_matches_type(DeploymentCreateResponse, deployment, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_create(self, client: Kernel) -> None: + response = client.deployments.with_raw_response.create() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + deployment = response.parse() + assert_matches_type(DeploymentCreateResponse, deployment, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_create(self, client: Kernel) -> None: + with client.deployments.with_streaming_response.create() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + deployment = response.parse() + assert_matches_type(DeploymentCreateResponse, deployment, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_retrieve(self, client: Kernel) -> None: + deployment = client.deployments.retrieve( + "id", + ) + assert_matches_type(DeploymentRetrieveResponse, deployment, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_retrieve(self, client: Kernel) -> None: + response = client.deployments.with_raw_response.retrieve( + "id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + deployment = response.parse() + assert_matches_type(DeploymentRetrieveResponse, deployment, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_retrieve(self, client: Kernel) -> None: + with client.deployments.with_streaming_response.retrieve( + "id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + deployment = response.parse() + assert_matches_type(DeploymentRetrieveResponse, deployment, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_retrieve(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.deployments.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_list(self, client: Kernel) -> None: + deployment = client.deployments.list() + assert_matches_type(SyncOffsetPagination[DeploymentListResponse], deployment, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_list_with_all_params(self, client: Kernel) -> None: + deployment = client.deployments.list( + app_name="app_name", + limit=1, + offset=0, + ) + assert_matches_type(SyncOffsetPagination[DeploymentListResponse], deployment, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_list(self, client: Kernel) -> None: + response = client.deployments.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + deployment = response.parse() + assert_matches_type(SyncOffsetPagination[DeploymentListResponse], deployment, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_list(self, client: Kernel) -> None: + with client.deployments.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + deployment = response.parse() + assert_matches_type(SyncOffsetPagination[DeploymentListResponse], deployment, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_method_follow(self, client: Kernel) -> None: + deployment_stream = client.deployments.follow( + id="id", + ) + deployment_stream.response.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_method_follow_with_all_params(self, client: Kernel) -> None: + deployment_stream = client.deployments.follow( + id="id", + since="2025-06-20T12:00:00Z", + ) + deployment_stream.response.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_raw_response_follow(self, client: Kernel) -> None: + response = client.deployments.with_raw_response.follow( + id="id", + ) + + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + stream = response.parse() + stream.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_streaming_response_follow(self, client: Kernel) -> None: + with client.deployments.with_streaming_response.follow( + id="id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + stream = response.parse() + stream.close() + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_path_params_follow(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.deployments.with_raw_response.follow( + id="", + ) + + +class TestAsyncDeployments: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_create(self, async_client: AsyncKernel) -> None: + deployment = await async_client.deployments.create() + assert_matches_type(DeploymentCreateResponse, deployment, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_create_with_all_params(self, async_client: AsyncKernel) -> None: + deployment = await async_client.deployments.create( + entrypoint_rel_path="src/app.py", + env_vars={"FOO": "bar"}, + file=b"raw file contents", + force=False, + region="aws.us-east-1a", + source={ + "entrypoint": "src/index.ts", + "ref": "main", + "type": "github", + "url": "https://github.com/org/repo", + "auth": { + "token": "ghs_***", + "method": "github_token", + }, + "path": "apps/api", + }, + version="1.0.0", + ) + assert_matches_type(DeploymentCreateResponse, deployment, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_create(self, async_client: AsyncKernel) -> None: + response = await async_client.deployments.with_raw_response.create() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + deployment = await response.parse() + assert_matches_type(DeploymentCreateResponse, deployment, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_create(self, async_client: AsyncKernel) -> None: + async with async_client.deployments.with_streaming_response.create() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + deployment = await response.parse() + assert_matches_type(DeploymentCreateResponse, deployment, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_retrieve(self, async_client: AsyncKernel) -> None: + deployment = await async_client.deployments.retrieve( + "id", + ) + assert_matches_type(DeploymentRetrieveResponse, deployment, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_retrieve(self, async_client: AsyncKernel) -> None: + response = await async_client.deployments.with_raw_response.retrieve( + "id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + deployment = await response.parse() + assert_matches_type(DeploymentRetrieveResponse, deployment, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_retrieve(self, async_client: AsyncKernel) -> None: + async with async_client.deployments.with_streaming_response.retrieve( + "id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + deployment = await response.parse() + assert_matches_type(DeploymentRetrieveResponse, deployment, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_retrieve(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.deployments.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_list(self, async_client: AsyncKernel) -> None: + deployment = await async_client.deployments.list() + assert_matches_type(AsyncOffsetPagination[DeploymentListResponse], deployment, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_list_with_all_params(self, async_client: AsyncKernel) -> None: + deployment = await async_client.deployments.list( + app_name="app_name", + limit=1, + offset=0, + ) + assert_matches_type(AsyncOffsetPagination[DeploymentListResponse], deployment, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_list(self, async_client: AsyncKernel) -> None: + response = await async_client.deployments.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + deployment = await response.parse() + assert_matches_type(AsyncOffsetPagination[DeploymentListResponse], deployment, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_list(self, async_client: AsyncKernel) -> None: + async with async_client.deployments.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + deployment = await response.parse() + assert_matches_type(AsyncOffsetPagination[DeploymentListResponse], deployment, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_method_follow(self, async_client: AsyncKernel) -> None: + deployment_stream = await async_client.deployments.follow( + id="id", + ) + await deployment_stream.response.aclose() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_method_follow_with_all_params(self, async_client: AsyncKernel) -> None: + deployment_stream = await async_client.deployments.follow( + id="id", + since="2025-06-20T12:00:00Z", + ) + await deployment_stream.response.aclose() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_raw_response_follow(self, async_client: AsyncKernel) -> None: + response = await async_client.deployments.with_raw_response.follow( + id="id", + ) + + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + stream = await response.parse() + await stream.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_streaming_response_follow(self, async_client: AsyncKernel) -> None: + async with async_client.deployments.with_streaming_response.follow( + id="id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + stream = await response.parse() + await stream.close() + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_path_params_follow(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.deployments.with_raw_response.follow( + id="", + ) diff --git a/tests/api_resources/test_extensions.py b/tests/api_resources/test_extensions.py new file mode 100644 index 0000000..5d61f32 --- /dev/null +++ b/tests/api_resources/test_extensions.py @@ -0,0 +1,477 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import httpx +import pytest +from respx import MockRouter + +from kernel import Kernel, AsyncKernel +from tests.utils import assert_matches_type +from kernel.types import ( + ExtensionListResponse, + ExtensionUploadResponse, +) +from kernel._response import ( + BinaryAPIResponse, + AsyncBinaryAPIResponse, + StreamedBinaryAPIResponse, + AsyncStreamedBinaryAPIResponse, +) + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestExtensions: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_list(self, client: Kernel) -> None: + extension = client.extensions.list() + assert_matches_type(ExtensionListResponse, extension, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_list(self, client: Kernel) -> None: + response = client.extensions.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + extension = response.parse() + assert_matches_type(ExtensionListResponse, extension, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_list(self, client: Kernel) -> None: + with client.extensions.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + extension = response.parse() + assert_matches_type(ExtensionListResponse, extension, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_delete(self, client: Kernel) -> None: + extension = client.extensions.delete( + "id_or_name", + ) + assert extension is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_delete(self, client: Kernel) -> None: + response = client.extensions.with_raw_response.delete( + "id_or_name", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + extension = response.parse() + assert extension is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_delete(self, client: Kernel) -> None: + with client.extensions.with_streaming_response.delete( + "id_or_name", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + extension = response.parse() + assert extension is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_delete(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id_or_name` but received ''"): + client.extensions.with_raw_response.delete( + "", + ) + + @parametrize + @pytest.mark.respx(base_url=base_url) + def test_method_download(self, client: Kernel, respx_mock: MockRouter) -> None: + respx_mock.get("/extensions/id_or_name").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + extension = client.extensions.download( + "id_or_name", + ) + assert extension.is_closed + assert extension.json() == {"foo": "bar"} + assert cast(Any, extension.is_closed) is True + assert isinstance(extension, BinaryAPIResponse) + + @parametrize + @pytest.mark.respx(base_url=base_url) + def test_raw_response_download(self, client: Kernel, respx_mock: MockRouter) -> None: + respx_mock.get("/extensions/id_or_name").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + + extension = client.extensions.with_raw_response.download( + "id_or_name", + ) + + assert extension.is_closed is True + assert extension.http_request.headers.get("X-Stainless-Lang") == "python" + assert extension.json() == {"foo": "bar"} + assert isinstance(extension, BinaryAPIResponse) + + @parametrize + @pytest.mark.respx(base_url=base_url) + def test_streaming_response_download(self, client: Kernel, respx_mock: MockRouter) -> None: + respx_mock.get("/extensions/id_or_name").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + with client.extensions.with_streaming_response.download( + "id_or_name", + ) as extension: + assert not extension.is_closed + assert extension.http_request.headers.get("X-Stainless-Lang") == "python" + + assert extension.json() == {"foo": "bar"} + assert cast(Any, extension.is_closed) is True + assert isinstance(extension, StreamedBinaryAPIResponse) + + assert cast(Any, extension.is_closed) is True + + @parametrize + @pytest.mark.respx(base_url=base_url) + def test_path_params_download(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id_or_name` but received ''"): + client.extensions.with_raw_response.download( + "", + ) + + @parametrize + @pytest.mark.respx(base_url=base_url) + def test_method_download_from_chrome_store(self, client: Kernel, respx_mock: MockRouter) -> None: + respx_mock.get("/extensions/from_chrome_store").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + extension = client.extensions.download_from_chrome_store( + url="url", + ) + assert extension.is_closed + assert extension.json() == {"foo": "bar"} + assert cast(Any, extension.is_closed) is True + assert isinstance(extension, BinaryAPIResponse) + + @parametrize + @pytest.mark.respx(base_url=base_url) + def test_method_download_from_chrome_store_with_all_params(self, client: Kernel, respx_mock: MockRouter) -> None: + respx_mock.get("/extensions/from_chrome_store").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + extension = client.extensions.download_from_chrome_store( + url="url", + os="win", + ) + assert extension.is_closed + assert extension.json() == {"foo": "bar"} + assert cast(Any, extension.is_closed) is True + assert isinstance(extension, BinaryAPIResponse) + + @parametrize + @pytest.mark.respx(base_url=base_url) + def test_raw_response_download_from_chrome_store(self, client: Kernel, respx_mock: MockRouter) -> None: + respx_mock.get("/extensions/from_chrome_store").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + + extension = client.extensions.with_raw_response.download_from_chrome_store( + url="url", + ) + + assert extension.is_closed is True + assert extension.http_request.headers.get("X-Stainless-Lang") == "python" + assert extension.json() == {"foo": "bar"} + assert isinstance(extension, BinaryAPIResponse) + + @parametrize + @pytest.mark.respx(base_url=base_url) + def test_streaming_response_download_from_chrome_store(self, client: Kernel, respx_mock: MockRouter) -> None: + respx_mock.get("/extensions/from_chrome_store").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + with client.extensions.with_streaming_response.download_from_chrome_store( + url="url", + ) as extension: + assert not extension.is_closed + assert extension.http_request.headers.get("X-Stainless-Lang") == "python" + + assert extension.json() == {"foo": "bar"} + assert cast(Any, extension.is_closed) is True + assert isinstance(extension, StreamedBinaryAPIResponse) + + assert cast(Any, extension.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_upload(self, client: Kernel) -> None: + extension = client.extensions.upload( + file=b"raw file contents", + ) + assert_matches_type(ExtensionUploadResponse, extension, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_upload_with_all_params(self, client: Kernel) -> None: + extension = client.extensions.upload( + file=b"raw file contents", + name="name", + ) + assert_matches_type(ExtensionUploadResponse, extension, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_upload(self, client: Kernel) -> None: + response = client.extensions.with_raw_response.upload( + file=b"raw file contents", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + extension = response.parse() + assert_matches_type(ExtensionUploadResponse, extension, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_upload(self, client: Kernel) -> None: + with client.extensions.with_streaming_response.upload( + file=b"raw file contents", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + extension = response.parse() + assert_matches_type(ExtensionUploadResponse, extension, path=["response"]) + + assert cast(Any, response.is_closed) is True + + +class TestAsyncExtensions: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_list(self, async_client: AsyncKernel) -> None: + extension = await async_client.extensions.list() + assert_matches_type(ExtensionListResponse, extension, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_list(self, async_client: AsyncKernel) -> None: + response = await async_client.extensions.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + extension = await response.parse() + assert_matches_type(ExtensionListResponse, extension, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_list(self, async_client: AsyncKernel) -> None: + async with async_client.extensions.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + extension = await response.parse() + assert_matches_type(ExtensionListResponse, extension, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_delete(self, async_client: AsyncKernel) -> None: + extension = await async_client.extensions.delete( + "id_or_name", + ) + assert extension is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_delete(self, async_client: AsyncKernel) -> None: + response = await async_client.extensions.with_raw_response.delete( + "id_or_name", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + extension = await response.parse() + assert extension is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_delete(self, async_client: AsyncKernel) -> None: + async with async_client.extensions.with_streaming_response.delete( + "id_or_name", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + extension = await response.parse() + assert extension is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_delete(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id_or_name` but received ''"): + await async_client.extensions.with_raw_response.delete( + "", + ) + + @parametrize + @pytest.mark.respx(base_url=base_url) + async def test_method_download(self, async_client: AsyncKernel, respx_mock: MockRouter) -> None: + respx_mock.get("/extensions/id_or_name").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + extension = await async_client.extensions.download( + "id_or_name", + ) + assert extension.is_closed + assert await extension.json() == {"foo": "bar"} + assert cast(Any, extension.is_closed) is True + assert isinstance(extension, AsyncBinaryAPIResponse) + + @parametrize + @pytest.mark.respx(base_url=base_url) + async def test_raw_response_download(self, async_client: AsyncKernel, respx_mock: MockRouter) -> None: + respx_mock.get("/extensions/id_or_name").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + + extension = await async_client.extensions.with_raw_response.download( + "id_or_name", + ) + + assert extension.is_closed is True + assert extension.http_request.headers.get("X-Stainless-Lang") == "python" + assert await extension.json() == {"foo": "bar"} + assert isinstance(extension, AsyncBinaryAPIResponse) + + @parametrize + @pytest.mark.respx(base_url=base_url) + async def test_streaming_response_download(self, async_client: AsyncKernel, respx_mock: MockRouter) -> None: + respx_mock.get("/extensions/id_or_name").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + async with async_client.extensions.with_streaming_response.download( + "id_or_name", + ) as extension: + assert not extension.is_closed + assert extension.http_request.headers.get("X-Stainless-Lang") == "python" + + assert await extension.json() == {"foo": "bar"} + assert cast(Any, extension.is_closed) is True + assert isinstance(extension, AsyncStreamedBinaryAPIResponse) + + assert cast(Any, extension.is_closed) is True + + @parametrize + @pytest.mark.respx(base_url=base_url) + async def test_path_params_download(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id_or_name` but received ''"): + await async_client.extensions.with_raw_response.download( + "", + ) + + @parametrize + @pytest.mark.respx(base_url=base_url) + async def test_method_download_from_chrome_store(self, async_client: AsyncKernel, respx_mock: MockRouter) -> None: + respx_mock.get("/extensions/from_chrome_store").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + extension = await async_client.extensions.download_from_chrome_store( + url="url", + ) + assert extension.is_closed + assert await extension.json() == {"foo": "bar"} + assert cast(Any, extension.is_closed) is True + assert isinstance(extension, AsyncBinaryAPIResponse) + + @parametrize + @pytest.mark.respx(base_url=base_url) + async def test_method_download_from_chrome_store_with_all_params( + self, async_client: AsyncKernel, respx_mock: MockRouter + ) -> None: + respx_mock.get("/extensions/from_chrome_store").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + extension = await async_client.extensions.download_from_chrome_store( + url="url", + os="win", + ) + assert extension.is_closed + assert await extension.json() == {"foo": "bar"} + assert cast(Any, extension.is_closed) is True + assert isinstance(extension, AsyncBinaryAPIResponse) + + @parametrize + @pytest.mark.respx(base_url=base_url) + async def test_raw_response_download_from_chrome_store( + self, async_client: AsyncKernel, respx_mock: MockRouter + ) -> None: + respx_mock.get("/extensions/from_chrome_store").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + + extension = await async_client.extensions.with_raw_response.download_from_chrome_store( + url="url", + ) + + assert extension.is_closed is True + assert extension.http_request.headers.get("X-Stainless-Lang") == "python" + assert await extension.json() == {"foo": "bar"} + assert isinstance(extension, AsyncBinaryAPIResponse) + + @parametrize + @pytest.mark.respx(base_url=base_url) + async def test_streaming_response_download_from_chrome_store( + self, async_client: AsyncKernel, respx_mock: MockRouter + ) -> None: + respx_mock.get("/extensions/from_chrome_store").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + async with async_client.extensions.with_streaming_response.download_from_chrome_store( + url="url", + ) as extension: + assert not extension.is_closed + assert extension.http_request.headers.get("X-Stainless-Lang") == "python" + + assert await extension.json() == {"foo": "bar"} + assert cast(Any, extension.is_closed) is True + assert isinstance(extension, AsyncStreamedBinaryAPIResponse) + + assert cast(Any, extension.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_upload(self, async_client: AsyncKernel) -> None: + extension = await async_client.extensions.upload( + file=b"raw file contents", + ) + assert_matches_type(ExtensionUploadResponse, extension, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_upload_with_all_params(self, async_client: AsyncKernel) -> None: + extension = await async_client.extensions.upload( + file=b"raw file contents", + name="name", + ) + assert_matches_type(ExtensionUploadResponse, extension, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_upload(self, async_client: AsyncKernel) -> None: + response = await async_client.extensions.with_raw_response.upload( + file=b"raw file contents", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + extension = await response.parse() + assert_matches_type(ExtensionUploadResponse, extension, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_upload(self, async_client: AsyncKernel) -> None: + async with async_client.extensions.with_streaming_response.upload( + file=b"raw file contents", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + extension = await response.parse() + assert_matches_type(ExtensionUploadResponse, extension, path=["response"]) + + assert cast(Any, response.is_closed) is True diff --git a/tests/api_resources/test_invocations.py b/tests/api_resources/test_invocations.py new file mode 100644 index 0000000..40c0545 --- /dev/null +++ b/tests/api_resources/test_invocations.py @@ -0,0 +1,602 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from kernel import Kernel, AsyncKernel +from tests.utils import assert_matches_type +from kernel.types import ( + InvocationListResponse, + InvocationCreateResponse, + InvocationUpdateResponse, + InvocationRetrieveResponse, +) +from kernel.pagination import SyncOffsetPagination, AsyncOffsetPagination + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestInvocations: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_create(self, client: Kernel) -> None: + invocation = client.invocations.create( + action_name="analyze", + app_name="my-app", + version="1.0.0", + ) + assert_matches_type(InvocationCreateResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_create_with_all_params(self, client: Kernel) -> None: + invocation = client.invocations.create( + action_name="analyze", + app_name="my-app", + version="1.0.0", + async_=True, + async_timeout_seconds=600, + payload='{"data":"example input"}', + ) + assert_matches_type(InvocationCreateResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_create(self, client: Kernel) -> None: + response = client.invocations.with_raw_response.create( + action_name="analyze", + app_name="my-app", + version="1.0.0", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + invocation = response.parse() + assert_matches_type(InvocationCreateResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_create(self, client: Kernel) -> None: + with client.invocations.with_streaming_response.create( + action_name="analyze", + app_name="my-app", + version="1.0.0", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + invocation = response.parse() + assert_matches_type(InvocationCreateResponse, invocation, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_retrieve(self, client: Kernel) -> None: + invocation = client.invocations.retrieve( + "rr33xuugxj9h0bkf1rdt2bet", + ) + assert_matches_type(InvocationRetrieveResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_retrieve(self, client: Kernel) -> None: + response = client.invocations.with_raw_response.retrieve( + "rr33xuugxj9h0bkf1rdt2bet", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + invocation = response.parse() + assert_matches_type(InvocationRetrieveResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_retrieve(self, client: Kernel) -> None: + with client.invocations.with_streaming_response.retrieve( + "rr33xuugxj9h0bkf1rdt2bet", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + invocation = response.parse() + assert_matches_type(InvocationRetrieveResponse, invocation, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_retrieve(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.invocations.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_update(self, client: Kernel) -> None: + invocation = client.invocations.update( + id="id", + status="succeeded", + ) + assert_matches_type(InvocationUpdateResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_update_with_all_params(self, client: Kernel) -> None: + invocation = client.invocations.update( + id="id", + status="succeeded", + output="output", + ) + assert_matches_type(InvocationUpdateResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_update(self, client: Kernel) -> None: + response = client.invocations.with_raw_response.update( + id="id", + status="succeeded", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + invocation = response.parse() + assert_matches_type(InvocationUpdateResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_update(self, client: Kernel) -> None: + with client.invocations.with_streaming_response.update( + id="id", + status="succeeded", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + invocation = response.parse() + assert_matches_type(InvocationUpdateResponse, invocation, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_update(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.invocations.with_raw_response.update( + id="", + status="succeeded", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_list(self, client: Kernel) -> None: + invocation = client.invocations.list() + assert_matches_type(SyncOffsetPagination[InvocationListResponse], invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_list_with_all_params(self, client: Kernel) -> None: + invocation = client.invocations.list( + action_name="action_name", + app_name="app_name", + deployment_id="deployment_id", + limit=1, + offset=0, + since="2025-06-20T12:00:00Z", + status="queued", + version="version", + ) + assert_matches_type(SyncOffsetPagination[InvocationListResponse], invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_list(self, client: Kernel) -> None: + response = client.invocations.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + invocation = response.parse() + assert_matches_type(SyncOffsetPagination[InvocationListResponse], invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_list(self, client: Kernel) -> None: + with client.invocations.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + invocation = response.parse() + assert_matches_type(SyncOffsetPagination[InvocationListResponse], invocation, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_delete_browsers(self, client: Kernel) -> None: + invocation = client.invocations.delete_browsers( + "id", + ) + assert invocation is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_delete_browsers(self, client: Kernel) -> None: + response = client.invocations.with_raw_response.delete_browsers( + "id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + invocation = response.parse() + assert invocation is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_delete_browsers(self, client: Kernel) -> None: + with client.invocations.with_streaming_response.delete_browsers( + "id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + invocation = response.parse() + assert invocation is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_delete_browsers(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.invocations.with_raw_response.delete_browsers( + "", + ) + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_method_follow(self, client: Kernel) -> None: + invocation_stream = client.invocations.follow( + id="id", + ) + invocation_stream.response.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_method_follow_with_all_params(self, client: Kernel) -> None: + invocation_stream = client.invocations.follow( + id="id", + since="2025-06-20T12:00:00Z", + ) + invocation_stream.response.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_raw_response_follow(self, client: Kernel) -> None: + response = client.invocations.with_raw_response.follow( + id="id", + ) + + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + stream = response.parse() + stream.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_streaming_response_follow(self, client: Kernel) -> None: + with client.invocations.with_streaming_response.follow( + id="id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + stream = response.parse() + stream.close() + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + def test_path_params_follow(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.invocations.with_raw_response.follow( + id="", + ) + + +class TestAsyncInvocations: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_create(self, async_client: AsyncKernel) -> None: + invocation = await async_client.invocations.create( + action_name="analyze", + app_name="my-app", + version="1.0.0", + ) + assert_matches_type(InvocationCreateResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_create_with_all_params(self, async_client: AsyncKernel) -> None: + invocation = await async_client.invocations.create( + action_name="analyze", + app_name="my-app", + version="1.0.0", + async_=True, + async_timeout_seconds=600, + payload='{"data":"example input"}', + ) + assert_matches_type(InvocationCreateResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_create(self, async_client: AsyncKernel) -> None: + response = await async_client.invocations.with_raw_response.create( + action_name="analyze", + app_name="my-app", + version="1.0.0", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + invocation = await response.parse() + assert_matches_type(InvocationCreateResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_create(self, async_client: AsyncKernel) -> None: + async with async_client.invocations.with_streaming_response.create( + action_name="analyze", + app_name="my-app", + version="1.0.0", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + invocation = await response.parse() + assert_matches_type(InvocationCreateResponse, invocation, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_retrieve(self, async_client: AsyncKernel) -> None: + invocation = await async_client.invocations.retrieve( + "rr33xuugxj9h0bkf1rdt2bet", + ) + assert_matches_type(InvocationRetrieveResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_retrieve(self, async_client: AsyncKernel) -> None: + response = await async_client.invocations.with_raw_response.retrieve( + "rr33xuugxj9h0bkf1rdt2bet", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + invocation = await response.parse() + assert_matches_type(InvocationRetrieveResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_retrieve(self, async_client: AsyncKernel) -> None: + async with async_client.invocations.with_streaming_response.retrieve( + "rr33xuugxj9h0bkf1rdt2bet", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + invocation = await response.parse() + assert_matches_type(InvocationRetrieveResponse, invocation, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_retrieve(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.invocations.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_update(self, async_client: AsyncKernel) -> None: + invocation = await async_client.invocations.update( + id="id", + status="succeeded", + ) + assert_matches_type(InvocationUpdateResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_update_with_all_params(self, async_client: AsyncKernel) -> None: + invocation = await async_client.invocations.update( + id="id", + status="succeeded", + output="output", + ) + assert_matches_type(InvocationUpdateResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_update(self, async_client: AsyncKernel) -> None: + response = await async_client.invocations.with_raw_response.update( + id="id", + status="succeeded", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + invocation = await response.parse() + assert_matches_type(InvocationUpdateResponse, invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_update(self, async_client: AsyncKernel) -> None: + async with async_client.invocations.with_streaming_response.update( + id="id", + status="succeeded", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + invocation = await response.parse() + assert_matches_type(InvocationUpdateResponse, invocation, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_update(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.invocations.with_raw_response.update( + id="", + status="succeeded", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_list(self, async_client: AsyncKernel) -> None: + invocation = await async_client.invocations.list() + assert_matches_type(AsyncOffsetPagination[InvocationListResponse], invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_list_with_all_params(self, async_client: AsyncKernel) -> None: + invocation = await async_client.invocations.list( + action_name="action_name", + app_name="app_name", + deployment_id="deployment_id", + limit=1, + offset=0, + since="2025-06-20T12:00:00Z", + status="queued", + version="version", + ) + assert_matches_type(AsyncOffsetPagination[InvocationListResponse], invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_list(self, async_client: AsyncKernel) -> None: + response = await async_client.invocations.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + invocation = await response.parse() + assert_matches_type(AsyncOffsetPagination[InvocationListResponse], invocation, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_list(self, async_client: AsyncKernel) -> None: + async with async_client.invocations.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + invocation = await response.parse() + assert_matches_type(AsyncOffsetPagination[InvocationListResponse], invocation, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_delete_browsers(self, async_client: AsyncKernel) -> None: + invocation = await async_client.invocations.delete_browsers( + "id", + ) + assert invocation is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_delete_browsers(self, async_client: AsyncKernel) -> None: + response = await async_client.invocations.with_raw_response.delete_browsers( + "id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + invocation = await response.parse() + assert invocation is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_delete_browsers(self, async_client: AsyncKernel) -> None: + async with async_client.invocations.with_streaming_response.delete_browsers( + "id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + invocation = await response.parse() + assert invocation is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_delete_browsers(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.invocations.with_raw_response.delete_browsers( + "", + ) + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_method_follow(self, async_client: AsyncKernel) -> None: + invocation_stream = await async_client.invocations.follow( + id="id", + ) + await invocation_stream.response.aclose() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_method_follow_with_all_params(self, async_client: AsyncKernel) -> None: + invocation_stream = await async_client.invocations.follow( + id="id", + since="2025-06-20T12:00:00Z", + ) + await invocation_stream.response.aclose() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_raw_response_follow(self, async_client: AsyncKernel) -> None: + response = await async_client.invocations.with_raw_response.follow( + id="id", + ) + + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + stream = await response.parse() + await stream.close() + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_streaming_response_follow(self, async_client: AsyncKernel) -> None: + async with async_client.invocations.with_streaming_response.follow( + id="id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + stream = await response.parse() + await stream.close() + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism doesn't support text/event-stream responses") + @parametrize + async def test_path_params_follow(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.invocations.with_raw_response.follow( + id="", + ) diff --git a/tests/api_resources/test_profiles.py b/tests/api_resources/test_profiles.py new file mode 100644 index 0000000..6c97855 --- /dev/null +++ b/tests/api_resources/test_profiles.py @@ -0,0 +1,428 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import httpx +import pytest +from respx import MockRouter + +from kernel import Kernel, AsyncKernel +from tests.utils import assert_matches_type +from kernel.types import Profile, ProfileListResponse +from kernel._response import ( + BinaryAPIResponse, + AsyncBinaryAPIResponse, + StreamedBinaryAPIResponse, + AsyncStreamedBinaryAPIResponse, +) + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestProfiles: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_create(self, client: Kernel) -> None: + profile = client.profiles.create() + assert_matches_type(Profile, profile, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_create_with_all_params(self, client: Kernel) -> None: + profile = client.profiles.create( + name="name", + ) + assert_matches_type(Profile, profile, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_create(self, client: Kernel) -> None: + response = client.profiles.with_raw_response.create() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + profile = response.parse() + assert_matches_type(Profile, profile, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_create(self, client: Kernel) -> None: + with client.profiles.with_streaming_response.create() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + profile = response.parse() + assert_matches_type(Profile, profile, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_retrieve(self, client: Kernel) -> None: + profile = client.profiles.retrieve( + "id_or_name", + ) + assert_matches_type(Profile, profile, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_retrieve(self, client: Kernel) -> None: + response = client.profiles.with_raw_response.retrieve( + "id_or_name", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + profile = response.parse() + assert_matches_type(Profile, profile, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_retrieve(self, client: Kernel) -> None: + with client.profiles.with_streaming_response.retrieve( + "id_or_name", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + profile = response.parse() + assert_matches_type(Profile, profile, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_retrieve(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id_or_name` but received ''"): + client.profiles.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_list(self, client: Kernel) -> None: + profile = client.profiles.list() + assert_matches_type(ProfileListResponse, profile, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_list(self, client: Kernel) -> None: + response = client.profiles.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + profile = response.parse() + assert_matches_type(ProfileListResponse, profile, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_list(self, client: Kernel) -> None: + with client.profiles.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + profile = response.parse() + assert_matches_type(ProfileListResponse, profile, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_delete(self, client: Kernel) -> None: + profile = client.profiles.delete( + "id_or_name", + ) + assert profile is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_delete(self, client: Kernel) -> None: + response = client.profiles.with_raw_response.delete( + "id_or_name", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + profile = response.parse() + assert profile is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_delete(self, client: Kernel) -> None: + with client.profiles.with_streaming_response.delete( + "id_or_name", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + profile = response.parse() + assert profile is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_delete(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id_or_name` but received ''"): + client.profiles.with_raw_response.delete( + "", + ) + + @parametrize + @pytest.mark.respx(base_url=base_url) + def test_method_download(self, client: Kernel, respx_mock: MockRouter) -> None: + respx_mock.get("/profiles/id_or_name/download").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + profile = client.profiles.download( + "id_or_name", + ) + assert profile.is_closed + assert profile.json() == {"foo": "bar"} + assert cast(Any, profile.is_closed) is True + assert isinstance(profile, BinaryAPIResponse) + + @parametrize + @pytest.mark.respx(base_url=base_url) + def test_raw_response_download(self, client: Kernel, respx_mock: MockRouter) -> None: + respx_mock.get("/profiles/id_or_name/download").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + + profile = client.profiles.with_raw_response.download( + "id_or_name", + ) + + assert profile.is_closed is True + assert profile.http_request.headers.get("X-Stainless-Lang") == "python" + assert profile.json() == {"foo": "bar"} + assert isinstance(profile, BinaryAPIResponse) + + @parametrize + @pytest.mark.respx(base_url=base_url) + def test_streaming_response_download(self, client: Kernel, respx_mock: MockRouter) -> None: + respx_mock.get("/profiles/id_or_name/download").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + with client.profiles.with_streaming_response.download( + "id_or_name", + ) as profile: + assert not profile.is_closed + assert profile.http_request.headers.get("X-Stainless-Lang") == "python" + + assert profile.json() == {"foo": "bar"} + assert cast(Any, profile.is_closed) is True + assert isinstance(profile, StreamedBinaryAPIResponse) + + assert cast(Any, profile.is_closed) is True + + @parametrize + @pytest.mark.respx(base_url=base_url) + def test_path_params_download(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id_or_name` but received ''"): + client.profiles.with_raw_response.download( + "", + ) + + +class TestAsyncProfiles: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_create(self, async_client: AsyncKernel) -> None: + profile = await async_client.profiles.create() + assert_matches_type(Profile, profile, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_create_with_all_params(self, async_client: AsyncKernel) -> None: + profile = await async_client.profiles.create( + name="name", + ) + assert_matches_type(Profile, profile, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_create(self, async_client: AsyncKernel) -> None: + response = await async_client.profiles.with_raw_response.create() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + profile = await response.parse() + assert_matches_type(Profile, profile, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_create(self, async_client: AsyncKernel) -> None: + async with async_client.profiles.with_streaming_response.create() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + profile = await response.parse() + assert_matches_type(Profile, profile, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_retrieve(self, async_client: AsyncKernel) -> None: + profile = await async_client.profiles.retrieve( + "id_or_name", + ) + assert_matches_type(Profile, profile, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_retrieve(self, async_client: AsyncKernel) -> None: + response = await async_client.profiles.with_raw_response.retrieve( + "id_or_name", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + profile = await response.parse() + assert_matches_type(Profile, profile, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_retrieve(self, async_client: AsyncKernel) -> None: + async with async_client.profiles.with_streaming_response.retrieve( + "id_or_name", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + profile = await response.parse() + assert_matches_type(Profile, profile, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_retrieve(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id_or_name` but received ''"): + await async_client.profiles.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_list(self, async_client: AsyncKernel) -> None: + profile = await async_client.profiles.list() + assert_matches_type(ProfileListResponse, profile, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_list(self, async_client: AsyncKernel) -> None: + response = await async_client.profiles.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + profile = await response.parse() + assert_matches_type(ProfileListResponse, profile, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_list(self, async_client: AsyncKernel) -> None: + async with async_client.profiles.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + profile = await response.parse() + assert_matches_type(ProfileListResponse, profile, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_delete(self, async_client: AsyncKernel) -> None: + profile = await async_client.profiles.delete( + "id_or_name", + ) + assert profile is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_delete(self, async_client: AsyncKernel) -> None: + response = await async_client.profiles.with_raw_response.delete( + "id_or_name", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + profile = await response.parse() + assert profile is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_delete(self, async_client: AsyncKernel) -> None: + async with async_client.profiles.with_streaming_response.delete( + "id_or_name", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + profile = await response.parse() + assert profile is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_delete(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id_or_name` but received ''"): + await async_client.profiles.with_raw_response.delete( + "", + ) + + @parametrize + @pytest.mark.respx(base_url=base_url) + async def test_method_download(self, async_client: AsyncKernel, respx_mock: MockRouter) -> None: + respx_mock.get("/profiles/id_or_name/download").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + profile = await async_client.profiles.download( + "id_or_name", + ) + assert profile.is_closed + assert await profile.json() == {"foo": "bar"} + assert cast(Any, profile.is_closed) is True + assert isinstance(profile, AsyncBinaryAPIResponse) + + @parametrize + @pytest.mark.respx(base_url=base_url) + async def test_raw_response_download(self, async_client: AsyncKernel, respx_mock: MockRouter) -> None: + respx_mock.get("/profiles/id_or_name/download").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + + profile = await async_client.profiles.with_raw_response.download( + "id_or_name", + ) + + assert profile.is_closed is True + assert profile.http_request.headers.get("X-Stainless-Lang") == "python" + assert await profile.json() == {"foo": "bar"} + assert isinstance(profile, AsyncBinaryAPIResponse) + + @parametrize + @pytest.mark.respx(base_url=base_url) + async def test_streaming_response_download(self, async_client: AsyncKernel, respx_mock: MockRouter) -> None: + respx_mock.get("/profiles/id_or_name/download").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + async with async_client.profiles.with_streaming_response.download( + "id_or_name", + ) as profile: + assert not profile.is_closed + assert profile.http_request.headers.get("X-Stainless-Lang") == "python" + + assert await profile.json() == {"foo": "bar"} + assert cast(Any, profile.is_closed) is True + assert isinstance(profile, AsyncStreamedBinaryAPIResponse) + + assert cast(Any, profile.is_closed) is True + + @parametrize + @pytest.mark.respx(base_url=base_url) + async def test_path_params_download(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id_or_name` but received ''"): + await async_client.profiles.with_raw_response.download( + "", + ) diff --git a/tests/api_resources/test_proxies.py b/tests/api_resources/test_proxies.py new file mode 100644 index 0000000..ed858e8 --- /dev/null +++ b/tests/api_resources/test_proxies.py @@ -0,0 +1,427 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from kernel import Kernel, AsyncKernel +from tests.utils import assert_matches_type +from kernel.types import ( + ProxyListResponse, + ProxyCheckResponse, + ProxyCreateResponse, + ProxyRetrieveResponse, +) + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestProxies: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_create(self, client: Kernel) -> None: + proxy = client.proxies.create( + type="datacenter", + ) + assert_matches_type(ProxyCreateResponse, proxy, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_create_with_all_params(self, client: Kernel) -> None: + proxy = client.proxies.create( + type="datacenter", + config={"country": "US"}, + name="name", + protocol="http", + ) + assert_matches_type(ProxyCreateResponse, proxy, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_create(self, client: Kernel) -> None: + response = client.proxies.with_raw_response.create( + type="datacenter", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + proxy = response.parse() + assert_matches_type(ProxyCreateResponse, proxy, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_create(self, client: Kernel) -> None: + with client.proxies.with_streaming_response.create( + type="datacenter", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + proxy = response.parse() + assert_matches_type(ProxyCreateResponse, proxy, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_retrieve(self, client: Kernel) -> None: + proxy = client.proxies.retrieve( + "id", + ) + assert_matches_type(ProxyRetrieveResponse, proxy, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_retrieve(self, client: Kernel) -> None: + response = client.proxies.with_raw_response.retrieve( + "id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + proxy = response.parse() + assert_matches_type(ProxyRetrieveResponse, proxy, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_retrieve(self, client: Kernel) -> None: + with client.proxies.with_streaming_response.retrieve( + "id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + proxy = response.parse() + assert_matches_type(ProxyRetrieveResponse, proxy, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_retrieve(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.proxies.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_list(self, client: Kernel) -> None: + proxy = client.proxies.list() + assert_matches_type(ProxyListResponse, proxy, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_list(self, client: Kernel) -> None: + response = client.proxies.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + proxy = response.parse() + assert_matches_type(ProxyListResponse, proxy, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_list(self, client: Kernel) -> None: + with client.proxies.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + proxy = response.parse() + assert_matches_type(ProxyListResponse, proxy, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_delete(self, client: Kernel) -> None: + proxy = client.proxies.delete( + "id", + ) + assert proxy is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_delete(self, client: Kernel) -> None: + response = client.proxies.with_raw_response.delete( + "id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + proxy = response.parse() + assert proxy is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_delete(self, client: Kernel) -> None: + with client.proxies.with_streaming_response.delete( + "id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + proxy = response.parse() + assert proxy is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_delete(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.proxies.with_raw_response.delete( + "", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_check(self, client: Kernel) -> None: + proxy = client.proxies.check( + "id", + ) + assert_matches_type(ProxyCheckResponse, proxy, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_check(self, client: Kernel) -> None: + response = client.proxies.with_raw_response.check( + "id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + proxy = response.parse() + assert_matches_type(ProxyCheckResponse, proxy, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_check(self, client: Kernel) -> None: + with client.proxies.with_streaming_response.check( + "id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + proxy = response.parse() + assert_matches_type(ProxyCheckResponse, proxy, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_check(self, client: Kernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + client.proxies.with_raw_response.check( + "", + ) + + +class TestAsyncProxies: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_create(self, async_client: AsyncKernel) -> None: + proxy = await async_client.proxies.create( + type="datacenter", + ) + assert_matches_type(ProxyCreateResponse, proxy, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_create_with_all_params(self, async_client: AsyncKernel) -> None: + proxy = await async_client.proxies.create( + type="datacenter", + config={"country": "US"}, + name="name", + protocol="http", + ) + assert_matches_type(ProxyCreateResponse, proxy, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_create(self, async_client: AsyncKernel) -> None: + response = await async_client.proxies.with_raw_response.create( + type="datacenter", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + proxy = await response.parse() + assert_matches_type(ProxyCreateResponse, proxy, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_create(self, async_client: AsyncKernel) -> None: + async with async_client.proxies.with_streaming_response.create( + type="datacenter", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + proxy = await response.parse() + assert_matches_type(ProxyCreateResponse, proxy, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_retrieve(self, async_client: AsyncKernel) -> None: + proxy = await async_client.proxies.retrieve( + "id", + ) + assert_matches_type(ProxyRetrieveResponse, proxy, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_retrieve(self, async_client: AsyncKernel) -> None: + response = await async_client.proxies.with_raw_response.retrieve( + "id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + proxy = await response.parse() + assert_matches_type(ProxyRetrieveResponse, proxy, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_retrieve(self, async_client: AsyncKernel) -> None: + async with async_client.proxies.with_streaming_response.retrieve( + "id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + proxy = await response.parse() + assert_matches_type(ProxyRetrieveResponse, proxy, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_retrieve(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.proxies.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_list(self, async_client: AsyncKernel) -> None: + proxy = await async_client.proxies.list() + assert_matches_type(ProxyListResponse, proxy, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_list(self, async_client: AsyncKernel) -> None: + response = await async_client.proxies.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + proxy = await response.parse() + assert_matches_type(ProxyListResponse, proxy, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_list(self, async_client: AsyncKernel) -> None: + async with async_client.proxies.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + proxy = await response.parse() + assert_matches_type(ProxyListResponse, proxy, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_delete(self, async_client: AsyncKernel) -> None: + proxy = await async_client.proxies.delete( + "id", + ) + assert proxy is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_delete(self, async_client: AsyncKernel) -> None: + response = await async_client.proxies.with_raw_response.delete( + "id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + proxy = await response.parse() + assert proxy is None + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_delete(self, async_client: AsyncKernel) -> None: + async with async_client.proxies.with_streaming_response.delete( + "id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + proxy = await response.parse() + assert proxy is None + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_delete(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.proxies.with_raw_response.delete( + "", + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_check(self, async_client: AsyncKernel) -> None: + proxy = await async_client.proxies.check( + "id", + ) + assert_matches_type(ProxyCheckResponse, proxy, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_check(self, async_client: AsyncKernel) -> None: + response = await async_client.proxies.with_raw_response.check( + "id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + proxy = await response.parse() + assert_matches_type(ProxyCheckResponse, proxy, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_check(self, async_client: AsyncKernel) -> None: + async with async_client.proxies.with_streaming_response.check( + "id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + proxy = await response.parse() + assert_matches_type(ProxyCheckResponse, proxy, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_check(self, async_client: AsyncKernel) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): + await async_client.proxies.with_raw_response.check( + "", + ) diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..c860af0 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,84 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +import logging +from typing import TYPE_CHECKING, Iterator, AsyncIterator + +import httpx +import pytest +from pytest_asyncio import is_async_test + +from kernel import Kernel, AsyncKernel, DefaultAioHttpClient +from kernel._utils import is_dict + +if TYPE_CHECKING: + from _pytest.fixtures import FixtureRequest # pyright: ignore[reportPrivateImportUsage] + +pytest.register_assert_rewrite("tests.utils") + +logging.getLogger("kernel").setLevel(logging.DEBUG) + + +# automatically add `pytest.mark.asyncio()` to all of our async tests +# so we don't have to add that boilerplate everywhere +def pytest_collection_modifyitems(items: list[pytest.Function]) -> None: + pytest_asyncio_tests = (item for item in items if is_async_test(item)) + session_scope_marker = pytest.mark.asyncio(loop_scope="session") + for async_test in pytest_asyncio_tests: + async_test.add_marker(session_scope_marker, append=False) + + # We skip tests that use both the aiohttp client and respx_mock as respx_mock + # doesn't support custom transports. + for item in items: + if "async_client" not in item.fixturenames or "respx_mock" not in item.fixturenames: + continue + + if not hasattr(item, "callspec"): + continue + + async_client_param = item.callspec.params.get("async_client") + if is_dict(async_client_param) and async_client_param.get("http_client") == "aiohttp": + item.add_marker(pytest.mark.skip(reason="aiohttp client is not compatible with respx_mock")) + + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + +api_key = "My API Key" + + +@pytest.fixture(scope="session") +def client(request: FixtureRequest) -> Iterator[Kernel]: + strict = getattr(request, "param", True) + if not isinstance(strict, bool): + raise TypeError(f"Unexpected fixture parameter type {type(strict)}, expected {bool}") + + with Kernel(base_url=base_url, api_key=api_key, _strict_response_validation=strict) as client: + yield client + + +@pytest.fixture(scope="session") +async def async_client(request: FixtureRequest) -> AsyncIterator[AsyncKernel]: + param = getattr(request, "param", True) + + # defaults + strict = True + http_client: None | httpx.AsyncClient = None + + if isinstance(param, bool): + strict = param + elif is_dict(param): + strict = param.get("strict", True) + assert isinstance(strict, bool) + + http_client_type = param.get("http_client", "httpx") + if http_client_type == "aiohttp": + http_client = DefaultAioHttpClient() + else: + raise TypeError(f"Unexpected fixture parameter type {type(param)}, expected bool or dict") + + async with AsyncKernel( + base_url=base_url, api_key=api_key, _strict_response_validation=strict, http_client=http_client + ) as client: + yield client diff --git a/tests/sample_file.txt b/tests/sample_file.txt new file mode 100644 index 0000000..af5626b --- /dev/null +++ b/tests/sample_file.txt @@ -0,0 +1 @@ +Hello, world! diff --git a/tests/test_client.py b/tests/test_client.py new file mode 100644 index 0000000..95661e8 --- /dev/null +++ b/tests/test_client.py @@ -0,0 +1,1733 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import gc +import os +import sys +import json +import asyncio +import inspect +import tracemalloc +from typing import Any, Union, cast +from unittest import mock +from typing_extensions import Literal + +import httpx +import pytest +from respx import MockRouter +from pydantic import ValidationError + +from kernel import Kernel, AsyncKernel, APIResponseValidationError +from kernel._types import Omit +from kernel._utils import asyncify +from kernel._models import BaseModel, FinalRequestOptions +from kernel._exceptions import KernelError, APIStatusError, APITimeoutError, APIResponseValidationError +from kernel._base_client import ( + DEFAULT_TIMEOUT, + HTTPX_DEFAULT_TIMEOUT, + BaseClient, + OtherPlatform, + DefaultHttpxClient, + DefaultAsyncHttpxClient, + get_platform, + make_request_options, +) + +from .utils import update_env + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") +api_key = "My API Key" + + +def _get_params(client: BaseClient[Any, Any]) -> dict[str, str]: + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + url = httpx.URL(request.url) + return dict(url.params) + + +def _low_retry_timeout(*_args: Any, **_kwargs: Any) -> float: + return 0.1 + + +def _get_open_connections(client: Kernel | AsyncKernel) -> int: + transport = client._client._transport + assert isinstance(transport, httpx.HTTPTransport) or isinstance(transport, httpx.AsyncHTTPTransport) + + pool = transport._pool + return len(pool._requests) + + +class TestKernel: + @pytest.mark.respx(base_url=base_url) + def test_raw_response(self, respx_mock: MockRouter, client: Kernel) -> None: + respx_mock.post("/foo").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + + response = client.post("/foo", cast_to=httpx.Response) + assert response.status_code == 200 + assert isinstance(response, httpx.Response) + assert response.json() == {"foo": "bar"} + + @pytest.mark.respx(base_url=base_url) + def test_raw_response_for_binary(self, respx_mock: MockRouter, client: Kernel) -> None: + respx_mock.post("/foo").mock( + return_value=httpx.Response(200, headers={"Content-Type": "application/binary"}, content='{"foo": "bar"}') + ) + + response = client.post("/foo", cast_to=httpx.Response) + assert response.status_code == 200 + assert isinstance(response, httpx.Response) + assert response.json() == {"foo": "bar"} + + def test_copy(self, client: Kernel) -> None: + copied = client.copy() + assert id(copied) != id(client) + + copied = client.copy(api_key="another My API Key") + assert copied.api_key == "another My API Key" + assert client.api_key == "My API Key" + + def test_copy_default_options(self, client: Kernel) -> None: + # options that have a default are overridden correctly + copied = client.copy(max_retries=7) + assert copied.max_retries == 7 + assert client.max_retries == 2 + + copied2 = copied.copy(max_retries=6) + assert copied2.max_retries == 6 + assert copied.max_retries == 7 + + # timeout + assert isinstance(client.timeout, httpx.Timeout) + copied = client.copy(timeout=None) + assert copied.timeout is None + assert isinstance(client.timeout, httpx.Timeout) + + def test_copy_default_headers(self) -> None: + client = Kernel( + base_url=base_url, api_key=api_key, _strict_response_validation=True, default_headers={"X-Foo": "bar"} + ) + assert client.default_headers["X-Foo"] == "bar" + + # does not override the already given value when not specified + copied = client.copy() + assert copied.default_headers["X-Foo"] == "bar" + + # merges already given headers + copied = client.copy(default_headers={"X-Bar": "stainless"}) + assert copied.default_headers["X-Foo"] == "bar" + assert copied.default_headers["X-Bar"] == "stainless" + + # uses new values for any already given headers + copied = client.copy(default_headers={"X-Foo": "stainless"}) + assert copied.default_headers["X-Foo"] == "stainless" + + # set_default_headers + + # completely overrides already set values + copied = client.copy(set_default_headers={}) + assert copied.default_headers.get("X-Foo") is None + + copied = client.copy(set_default_headers={"X-Bar": "Robert"}) + assert copied.default_headers["X-Bar"] == "Robert" + + with pytest.raises( + ValueError, + match="`default_headers` and `set_default_headers` arguments are mutually exclusive", + ): + client.copy(set_default_headers={}, default_headers={"X-Foo": "Bar"}) + client.close() + + def test_copy_default_query(self) -> None: + client = Kernel( + base_url=base_url, api_key=api_key, _strict_response_validation=True, default_query={"foo": "bar"} + ) + assert _get_params(client)["foo"] == "bar" + + # does not override the already given value when not specified + copied = client.copy() + assert _get_params(copied)["foo"] == "bar" + + # merges already given params + copied = client.copy(default_query={"bar": "stainless"}) + params = _get_params(copied) + assert params["foo"] == "bar" + assert params["bar"] == "stainless" + + # uses new values for any already given headers + copied = client.copy(default_query={"foo": "stainless"}) + assert _get_params(copied)["foo"] == "stainless" + + # set_default_query + + # completely overrides already set values + copied = client.copy(set_default_query={}) + assert _get_params(copied) == {} + + copied = client.copy(set_default_query={"bar": "Robert"}) + assert _get_params(copied)["bar"] == "Robert" + + with pytest.raises( + ValueError, + # TODO: update + match="`default_query` and `set_default_query` arguments are mutually exclusive", + ): + client.copy(set_default_query={}, default_query={"foo": "Bar"}) + + client.close() + + def test_copy_signature(self, client: Kernel) -> None: + # ensure the same parameters that can be passed to the client are defined in the `.copy()` method + init_signature = inspect.signature( + # mypy doesn't like that we access the `__init__` property. + client.__init__, # type: ignore[misc] + ) + copy_signature = inspect.signature(client.copy) + exclude_params = {"transport", "proxies", "_strict_response_validation"} + + for name in init_signature.parameters.keys(): + if name in exclude_params: + continue + + copy_param = copy_signature.parameters.get(name) + assert copy_param is not None, f"copy() signature is missing the {name} param" + + @pytest.mark.skipif(sys.version_info >= (3, 10), reason="fails because of a memory leak that started from 3.12") + def test_copy_build_request(self, client: Kernel) -> None: + options = FinalRequestOptions(method="get", url="/foo") + + def build_request(options: FinalRequestOptions) -> None: + client_copy = client.copy() + client_copy._build_request(options) + + # ensure that the machinery is warmed up before tracing starts. + build_request(options) + gc.collect() + + tracemalloc.start(1000) + + snapshot_before = tracemalloc.take_snapshot() + + ITERATIONS = 10 + for _ in range(ITERATIONS): + build_request(options) + + gc.collect() + snapshot_after = tracemalloc.take_snapshot() + + tracemalloc.stop() + + def add_leak(leaks: list[tracemalloc.StatisticDiff], diff: tracemalloc.StatisticDiff) -> None: + if diff.count == 0: + # Avoid false positives by considering only leaks (i.e. allocations that persist). + return + + if diff.count % ITERATIONS != 0: + # Avoid false positives by considering only leaks that appear per iteration. + return + + for frame in diff.traceback: + if any( + frame.filename.endswith(fragment) + for fragment in [ + # to_raw_response_wrapper leaks through the @functools.wraps() decorator. + # + # removing the decorator fixes the leak for reasons we don't understand. + "kernel/_legacy_response.py", + "kernel/_response.py", + # pydantic.BaseModel.model_dump || pydantic.BaseModel.dict leak memory for some reason. + "kernel/_compat.py", + # Standard library leaks we don't care about. + "/logging/__init__.py", + ] + ): + return + + leaks.append(diff) + + leaks: list[tracemalloc.StatisticDiff] = [] + for diff in snapshot_after.compare_to(snapshot_before, "traceback"): + add_leak(leaks, diff) + if leaks: + for leak in leaks: + print("MEMORY LEAK:", leak) + for frame in leak.traceback: + print(frame) + raise AssertionError() + + def test_request_timeout(self, client: Kernel) -> None: + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == DEFAULT_TIMEOUT + + request = client._build_request(FinalRequestOptions(method="get", url="/foo", timeout=httpx.Timeout(100.0))) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == httpx.Timeout(100.0) + + def test_client_timeout_option(self) -> None: + client = Kernel(base_url=base_url, api_key=api_key, _strict_response_validation=True, timeout=httpx.Timeout(0)) + + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == httpx.Timeout(0) + + client.close() + + def test_http_client_timeout_option(self) -> None: + # custom timeout given to the httpx client should be used + with httpx.Client(timeout=None) as http_client: + client = Kernel( + base_url=base_url, api_key=api_key, _strict_response_validation=True, http_client=http_client + ) + + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == httpx.Timeout(None) + + client.close() + + # no timeout given to the httpx client should not use the httpx default + with httpx.Client() as http_client: + client = Kernel( + base_url=base_url, api_key=api_key, _strict_response_validation=True, http_client=http_client + ) + + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == DEFAULT_TIMEOUT + + client.close() + + # explicitly passing the default timeout currently results in it being ignored + with httpx.Client(timeout=HTTPX_DEFAULT_TIMEOUT) as http_client: + client = Kernel( + base_url=base_url, api_key=api_key, _strict_response_validation=True, http_client=http_client + ) + + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == DEFAULT_TIMEOUT # our default + + client.close() + + async def test_invalid_http_client(self) -> None: + with pytest.raises(TypeError, match="Invalid `http_client` arg"): + async with httpx.AsyncClient() as http_client: + Kernel( + base_url=base_url, + api_key=api_key, + _strict_response_validation=True, + http_client=cast(Any, http_client), + ) + + def test_default_headers_option(self) -> None: + test_client = Kernel( + base_url=base_url, api_key=api_key, _strict_response_validation=True, default_headers={"X-Foo": "bar"} + ) + request = test_client._build_request(FinalRequestOptions(method="get", url="/foo")) + assert request.headers.get("x-foo") == "bar" + assert request.headers.get("x-stainless-lang") == "python" + + test_client2 = Kernel( + base_url=base_url, + api_key=api_key, + _strict_response_validation=True, + default_headers={ + "X-Foo": "stainless", + "X-Stainless-Lang": "my-overriding-header", + }, + ) + request = test_client2._build_request(FinalRequestOptions(method="get", url="/foo")) + assert request.headers.get("x-foo") == "stainless" + assert request.headers.get("x-stainless-lang") == "my-overriding-header" + + test_client.close() + test_client2.close() + + def test_validate_headers(self) -> None: + client = Kernel(base_url=base_url, api_key=api_key, _strict_response_validation=True) + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + assert request.headers.get("Authorization") == f"Bearer {api_key}" + + with pytest.raises(KernelError): + with update_env(**{"KERNEL_API_KEY": Omit()}): + client2 = Kernel(base_url=base_url, api_key=None, _strict_response_validation=True) + _ = client2 + + def test_default_query_option(self) -> None: + client = Kernel( + base_url=base_url, api_key=api_key, _strict_response_validation=True, default_query={"query_param": "bar"} + ) + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + url = httpx.URL(request.url) + assert dict(url.params) == {"query_param": "bar"} + + request = client._build_request( + FinalRequestOptions( + method="get", + url="/foo", + params={"foo": "baz", "query_param": "overridden"}, + ) + ) + url = httpx.URL(request.url) + assert dict(url.params) == {"foo": "baz", "query_param": "overridden"} + + client.close() + + def test_request_extra_json(self, client: Kernel) -> None: + request = client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + json_data={"foo": "bar"}, + extra_json={"baz": False}, + ), + ) + data = json.loads(request.content.decode("utf-8")) + assert data == {"foo": "bar", "baz": False} + + request = client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + extra_json={"baz": False}, + ), + ) + data = json.loads(request.content.decode("utf-8")) + assert data == {"baz": False} + + # `extra_json` takes priority over `json_data` when keys clash + request = client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + json_data={"foo": "bar", "baz": True}, + extra_json={"baz": None}, + ), + ) + data = json.loads(request.content.decode("utf-8")) + assert data == {"foo": "bar", "baz": None} + + def test_request_extra_headers(self, client: Kernel) -> None: + request = client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + **make_request_options(extra_headers={"X-Foo": "Foo"}), + ), + ) + assert request.headers.get("X-Foo") == "Foo" + + # `extra_headers` takes priority over `default_headers` when keys clash + request = client.with_options(default_headers={"X-Bar": "true"})._build_request( + FinalRequestOptions( + method="post", + url="/foo", + **make_request_options( + extra_headers={"X-Bar": "false"}, + ), + ), + ) + assert request.headers.get("X-Bar") == "false" + + def test_request_extra_query(self, client: Kernel) -> None: + request = client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + **make_request_options( + extra_query={"my_query_param": "Foo"}, + ), + ), + ) + params = dict(request.url.params) + assert params == {"my_query_param": "Foo"} + + # if both `query` and `extra_query` are given, they are merged + request = client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + **make_request_options( + query={"bar": "1"}, + extra_query={"foo": "2"}, + ), + ), + ) + params = dict(request.url.params) + assert params == {"bar": "1", "foo": "2"} + + # `extra_query` takes priority over `query` when keys clash + request = client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + **make_request_options( + query={"foo": "1"}, + extra_query={"foo": "2"}, + ), + ), + ) + params = dict(request.url.params) + assert params == {"foo": "2"} + + def test_multipart_repeating_array(self, client: Kernel) -> None: + request = client._build_request( + FinalRequestOptions.construct( + method="post", + url="/foo", + headers={"Content-Type": "multipart/form-data; boundary=6b7ba517decee4a450543ea6ae821c82"}, + json_data={"array": ["foo", "bar"]}, + files=[("foo.txt", b"hello world")], + ) + ) + + assert request.read().split(b"\r\n") == [ + b"--6b7ba517decee4a450543ea6ae821c82", + b'Content-Disposition: form-data; name="array[]"', + b"", + b"foo", + b"--6b7ba517decee4a450543ea6ae821c82", + b'Content-Disposition: form-data; name="array[]"', + b"", + b"bar", + b"--6b7ba517decee4a450543ea6ae821c82", + b'Content-Disposition: form-data; name="foo.txt"; filename="upload"', + b"Content-Type: application/octet-stream", + b"", + b"hello world", + b"--6b7ba517decee4a450543ea6ae821c82--", + b"", + ] + + @pytest.mark.respx(base_url=base_url) + def test_basic_union_response(self, respx_mock: MockRouter, client: Kernel) -> None: + class Model1(BaseModel): + name: str + + class Model2(BaseModel): + foo: str + + respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + + response = client.get("/foo", cast_to=cast(Any, Union[Model1, Model2])) + assert isinstance(response, Model2) + assert response.foo == "bar" + + @pytest.mark.respx(base_url=base_url) + def test_union_response_different_types(self, respx_mock: MockRouter, client: Kernel) -> None: + """Union of objects with the same field name using a different type""" + + class Model1(BaseModel): + foo: int + + class Model2(BaseModel): + foo: str + + respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + + response = client.get("/foo", cast_to=cast(Any, Union[Model1, Model2])) + assert isinstance(response, Model2) + assert response.foo == "bar" + + respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": 1})) + + response = client.get("/foo", cast_to=cast(Any, Union[Model1, Model2])) + assert isinstance(response, Model1) + assert response.foo == 1 + + @pytest.mark.respx(base_url=base_url) + def test_non_application_json_content_type_for_json_data(self, respx_mock: MockRouter, client: Kernel) -> None: + """ + Response that sets Content-Type to something other than application/json but returns json data + """ + + class Model(BaseModel): + foo: int + + respx_mock.get("/foo").mock( + return_value=httpx.Response( + 200, + content=json.dumps({"foo": 2}), + headers={"Content-Type": "application/text"}, + ) + ) + + response = client.get("/foo", cast_to=Model) + assert isinstance(response, Model) + assert response.foo == 2 + + def test_base_url_setter(self) -> None: + client = Kernel(base_url="https://example.com/from_init", api_key=api_key, _strict_response_validation=True) + assert client.base_url == "https://example.com/from_init/" + + client.base_url = "https://example.com/from_setter" # type: ignore[assignment] + + assert client.base_url == "https://example.com/from_setter/" + + client.close() + + def test_base_url_env(self) -> None: + with update_env(KERNEL_BASE_URL="http://localhost:5000/from/env"): + client = Kernel(api_key=api_key, _strict_response_validation=True) + assert client.base_url == "http://localhost:5000/from/env/" + + # explicit environment arg requires explicitness + with update_env(KERNEL_BASE_URL="http://localhost:5000/from/env"): + with pytest.raises(ValueError, match=r"you must pass base_url=None"): + Kernel(api_key=api_key, _strict_response_validation=True, environment="production") + + client = Kernel(base_url=None, api_key=api_key, _strict_response_validation=True, environment="production") + assert str(client.base_url).startswith("https://api.onkernel.com/") + + client.close() + + @pytest.mark.parametrize( + "client", + [ + Kernel(base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True), + Kernel( + base_url="http://localhost:5000/custom/path/", + api_key=api_key, + _strict_response_validation=True, + http_client=httpx.Client(), + ), + ], + ids=["standard", "custom http client"], + ) + def test_base_url_trailing_slash(self, client: Kernel) -> None: + request = client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + json_data={"foo": "bar"}, + ), + ) + assert request.url == "http://localhost:5000/custom/path/foo" + client.close() + + @pytest.mark.parametrize( + "client", + [ + Kernel(base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True), + Kernel( + base_url="http://localhost:5000/custom/path/", + api_key=api_key, + _strict_response_validation=True, + http_client=httpx.Client(), + ), + ], + ids=["standard", "custom http client"], + ) + def test_base_url_no_trailing_slash(self, client: Kernel) -> None: + request = client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + json_data={"foo": "bar"}, + ), + ) + assert request.url == "http://localhost:5000/custom/path/foo" + client.close() + + @pytest.mark.parametrize( + "client", + [ + Kernel(base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True), + Kernel( + base_url="http://localhost:5000/custom/path/", + api_key=api_key, + _strict_response_validation=True, + http_client=httpx.Client(), + ), + ], + ids=["standard", "custom http client"], + ) + def test_absolute_request_url(self, client: Kernel) -> None: + request = client._build_request( + FinalRequestOptions( + method="post", + url="https://myapi.com/foo", + json_data={"foo": "bar"}, + ), + ) + assert request.url == "https://myapi.com/foo" + client.close() + + def test_copied_client_does_not_close_http(self) -> None: + test_client = Kernel(base_url=base_url, api_key=api_key, _strict_response_validation=True) + assert not test_client.is_closed() + + copied = test_client.copy() + assert copied is not test_client + + del copied + + assert not test_client.is_closed() + + def test_client_context_manager(self) -> None: + test_client = Kernel(base_url=base_url, api_key=api_key, _strict_response_validation=True) + with test_client as c2: + assert c2 is test_client + assert not c2.is_closed() + assert not test_client.is_closed() + assert test_client.is_closed() + + @pytest.mark.respx(base_url=base_url) + def test_client_response_validation_error(self, respx_mock: MockRouter, client: Kernel) -> None: + class Model(BaseModel): + foo: str + + respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": {"invalid": True}})) + + with pytest.raises(APIResponseValidationError) as exc: + client.get("/foo", cast_to=Model) + + assert isinstance(exc.value.__cause__, ValidationError) + + def test_client_max_retries_validation(self) -> None: + with pytest.raises(TypeError, match=r"max_retries cannot be None"): + Kernel(base_url=base_url, api_key=api_key, _strict_response_validation=True, max_retries=cast(Any, None)) + + @pytest.mark.respx(base_url=base_url) + def test_received_text_for_expected_json(self, respx_mock: MockRouter) -> None: + class Model(BaseModel): + name: str + + respx_mock.get("/foo").mock(return_value=httpx.Response(200, text="my-custom-format")) + + strict_client = Kernel(base_url=base_url, api_key=api_key, _strict_response_validation=True) + + with pytest.raises(APIResponseValidationError): + strict_client.get("/foo", cast_to=Model) + + non_strict_client = Kernel(base_url=base_url, api_key=api_key, _strict_response_validation=False) + + response = non_strict_client.get("/foo", cast_to=Model) + assert isinstance(response, str) # type: ignore[unreachable] + + strict_client.close() + non_strict_client.close() + + @pytest.mark.parametrize( + "remaining_retries,retry_after,timeout", + [ + [3, "20", 20], + [3, "0", 0.5], + [3, "-10", 0.5], + [3, "60", 60], + [3, "61", 0.5], + [3, "Fri, 29 Sep 2023 16:26:57 GMT", 20], + [3, "Fri, 29 Sep 2023 16:26:37 GMT", 0.5], + [3, "Fri, 29 Sep 2023 16:26:27 GMT", 0.5], + [3, "Fri, 29 Sep 2023 16:27:37 GMT", 60], + [3, "Fri, 29 Sep 2023 16:27:38 GMT", 0.5], + [3, "99999999999999999999999999999999999", 0.5], + [3, "Zun, 29 Sep 2023 16:26:27 GMT", 0.5], + [3, "", 0.5], + [2, "", 0.5 * 2.0], + [1, "", 0.5 * 4.0], + [-1100, "", 8], # test large number potentially overflowing + ], + ) + @mock.patch("time.time", mock.MagicMock(return_value=1696004797)) + def test_parse_retry_after_header( + self, remaining_retries: int, retry_after: str, timeout: float, client: Kernel + ) -> None: + headers = httpx.Headers({"retry-after": retry_after}) + options = FinalRequestOptions(method="get", url="/foo", max_retries=3) + calculated = client._calculate_retry_timeout(remaining_retries, options, headers) + assert calculated == pytest.approx(timeout, 0.5 * 0.875) # pyright: ignore[reportUnknownMemberType] + + @mock.patch("kernel._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter, client: Kernel) -> None: + respx_mock.post("/browsers").mock(side_effect=httpx.TimeoutException("Test timeout error")) + + with pytest.raises(APITimeoutError): + client.browsers.with_streaming_response.create().__enter__() + + assert _get_open_connections(client) == 0 + + @mock.patch("kernel._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter, client: Kernel) -> None: + respx_mock.post("/browsers").mock(return_value=httpx.Response(500)) + + with pytest.raises(APIStatusError): + client.browsers.with_streaming_response.create().__enter__() + assert _get_open_connections(client) == 0 + + @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) + @mock.patch("kernel._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + @pytest.mark.parametrize("failure_mode", ["status", "exception"]) + def test_retries_taken( + self, + client: Kernel, + failures_before_success: int, + failure_mode: Literal["status", "exception"], + respx_mock: MockRouter, + ) -> None: + client = client.with_options(max_retries=4) + + nb_retries = 0 + + def retry_handler(_request: httpx.Request) -> httpx.Response: + nonlocal nb_retries + if nb_retries < failures_before_success: + nb_retries += 1 + if failure_mode == "exception": + raise RuntimeError("oops") + return httpx.Response(500) + return httpx.Response(200) + + respx_mock.post("/browsers").mock(side_effect=retry_handler) + + response = client.browsers.with_raw_response.create() + + assert response.retries_taken == failures_before_success + assert int(response.http_request.headers.get("x-stainless-retry-count")) == failures_before_success + + @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) + @mock.patch("kernel._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + def test_omit_retry_count_header( + self, client: Kernel, failures_before_success: int, respx_mock: MockRouter + ) -> None: + client = client.with_options(max_retries=4) + + nb_retries = 0 + + def retry_handler(_request: httpx.Request) -> httpx.Response: + nonlocal nb_retries + if nb_retries < failures_before_success: + nb_retries += 1 + return httpx.Response(500) + return httpx.Response(200) + + respx_mock.post("/browsers").mock(side_effect=retry_handler) + + response = client.browsers.with_raw_response.create(extra_headers={"x-stainless-retry-count": Omit()}) + + assert len(response.http_request.headers.get_list("x-stainless-retry-count")) == 0 + + @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) + @mock.patch("kernel._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + def test_overwrite_retry_count_header( + self, client: Kernel, failures_before_success: int, respx_mock: MockRouter + ) -> None: + client = client.with_options(max_retries=4) + + nb_retries = 0 + + def retry_handler(_request: httpx.Request) -> httpx.Response: + nonlocal nb_retries + if nb_retries < failures_before_success: + nb_retries += 1 + return httpx.Response(500) + return httpx.Response(200) + + respx_mock.post("/browsers").mock(side_effect=retry_handler) + + response = client.browsers.with_raw_response.create(extra_headers={"x-stainless-retry-count": "42"}) + + assert response.http_request.headers.get("x-stainless-retry-count") == "42" + + def test_proxy_environment_variables(self, monkeypatch: pytest.MonkeyPatch) -> None: + # Test that the proxy environment variables are set correctly + monkeypatch.setenv("HTTPS_PROXY", "https://example.org") + + client = DefaultHttpxClient() + + mounts = tuple(client._mounts.items()) + assert len(mounts) == 1 + assert mounts[0][0].pattern == "https://" + + @pytest.mark.filterwarnings("ignore:.*deprecated.*:DeprecationWarning") + def test_default_client_creation(self) -> None: + # Ensure that the client can be initialized without any exceptions + DefaultHttpxClient( + verify=True, + cert=None, + trust_env=True, + http1=True, + http2=False, + limits=httpx.Limits(max_connections=100, max_keepalive_connections=20), + ) + + @pytest.mark.respx(base_url=base_url) + def test_follow_redirects(self, respx_mock: MockRouter, client: Kernel) -> None: + # Test that the default follow_redirects=True allows following redirects + respx_mock.post("/redirect").mock( + return_value=httpx.Response(302, headers={"Location": f"{base_url}/redirected"}) + ) + respx_mock.get("/redirected").mock(return_value=httpx.Response(200, json={"status": "ok"})) + + response = client.post("/redirect", body={"key": "value"}, cast_to=httpx.Response) + assert response.status_code == 200 + assert response.json() == {"status": "ok"} + + @pytest.mark.respx(base_url=base_url) + def test_follow_redirects_disabled(self, respx_mock: MockRouter, client: Kernel) -> None: + # Test that follow_redirects=False prevents following redirects + respx_mock.post("/redirect").mock( + return_value=httpx.Response(302, headers={"Location": f"{base_url}/redirected"}) + ) + + with pytest.raises(APIStatusError) as exc_info: + client.post("/redirect", body={"key": "value"}, options={"follow_redirects": False}, cast_to=httpx.Response) + + assert exc_info.value.response.status_code == 302 + assert exc_info.value.response.headers["Location"] == f"{base_url}/redirected" + + +class TestAsyncKernel: + @pytest.mark.respx(base_url=base_url) + async def test_raw_response(self, respx_mock: MockRouter, async_client: AsyncKernel) -> None: + respx_mock.post("/foo").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + + response = await async_client.post("/foo", cast_to=httpx.Response) + assert response.status_code == 200 + assert isinstance(response, httpx.Response) + assert response.json() == {"foo": "bar"} + + @pytest.mark.respx(base_url=base_url) + async def test_raw_response_for_binary(self, respx_mock: MockRouter, async_client: AsyncKernel) -> None: + respx_mock.post("/foo").mock( + return_value=httpx.Response(200, headers={"Content-Type": "application/binary"}, content='{"foo": "bar"}') + ) + + response = await async_client.post("/foo", cast_to=httpx.Response) + assert response.status_code == 200 + assert isinstance(response, httpx.Response) + assert response.json() == {"foo": "bar"} + + def test_copy(self, async_client: AsyncKernel) -> None: + copied = async_client.copy() + assert id(copied) != id(async_client) + + copied = async_client.copy(api_key="another My API Key") + assert copied.api_key == "another My API Key" + assert async_client.api_key == "My API Key" + + def test_copy_default_options(self, async_client: AsyncKernel) -> None: + # options that have a default are overridden correctly + copied = async_client.copy(max_retries=7) + assert copied.max_retries == 7 + assert async_client.max_retries == 2 + + copied2 = copied.copy(max_retries=6) + assert copied2.max_retries == 6 + assert copied.max_retries == 7 + + # timeout + assert isinstance(async_client.timeout, httpx.Timeout) + copied = async_client.copy(timeout=None) + assert copied.timeout is None + assert isinstance(async_client.timeout, httpx.Timeout) + + async def test_copy_default_headers(self) -> None: + client = AsyncKernel( + base_url=base_url, api_key=api_key, _strict_response_validation=True, default_headers={"X-Foo": "bar"} + ) + assert client.default_headers["X-Foo"] == "bar" + + # does not override the already given value when not specified + copied = client.copy() + assert copied.default_headers["X-Foo"] == "bar" + + # merges already given headers + copied = client.copy(default_headers={"X-Bar": "stainless"}) + assert copied.default_headers["X-Foo"] == "bar" + assert copied.default_headers["X-Bar"] == "stainless" + + # uses new values for any already given headers + copied = client.copy(default_headers={"X-Foo": "stainless"}) + assert copied.default_headers["X-Foo"] == "stainless" + + # set_default_headers + + # completely overrides already set values + copied = client.copy(set_default_headers={}) + assert copied.default_headers.get("X-Foo") is None + + copied = client.copy(set_default_headers={"X-Bar": "Robert"}) + assert copied.default_headers["X-Bar"] == "Robert" + + with pytest.raises( + ValueError, + match="`default_headers` and `set_default_headers` arguments are mutually exclusive", + ): + client.copy(set_default_headers={}, default_headers={"X-Foo": "Bar"}) + await client.close() + + async def test_copy_default_query(self) -> None: + client = AsyncKernel( + base_url=base_url, api_key=api_key, _strict_response_validation=True, default_query={"foo": "bar"} + ) + assert _get_params(client)["foo"] == "bar" + + # does not override the already given value when not specified + copied = client.copy() + assert _get_params(copied)["foo"] == "bar" + + # merges already given params + copied = client.copy(default_query={"bar": "stainless"}) + params = _get_params(copied) + assert params["foo"] == "bar" + assert params["bar"] == "stainless" + + # uses new values for any already given headers + copied = client.copy(default_query={"foo": "stainless"}) + assert _get_params(copied)["foo"] == "stainless" + + # set_default_query + + # completely overrides already set values + copied = client.copy(set_default_query={}) + assert _get_params(copied) == {} + + copied = client.copy(set_default_query={"bar": "Robert"}) + assert _get_params(copied)["bar"] == "Robert" + + with pytest.raises( + ValueError, + # TODO: update + match="`default_query` and `set_default_query` arguments are mutually exclusive", + ): + client.copy(set_default_query={}, default_query={"foo": "Bar"}) + + await client.close() + + def test_copy_signature(self, async_client: AsyncKernel) -> None: + # ensure the same parameters that can be passed to the client are defined in the `.copy()` method + init_signature = inspect.signature( + # mypy doesn't like that we access the `__init__` property. + async_client.__init__, # type: ignore[misc] + ) + copy_signature = inspect.signature(async_client.copy) + exclude_params = {"transport", "proxies", "_strict_response_validation"} + + for name in init_signature.parameters.keys(): + if name in exclude_params: + continue + + copy_param = copy_signature.parameters.get(name) + assert copy_param is not None, f"copy() signature is missing the {name} param" + + @pytest.mark.skipif(sys.version_info >= (3, 10), reason="fails because of a memory leak that started from 3.12") + def test_copy_build_request(self, async_client: AsyncKernel) -> None: + options = FinalRequestOptions(method="get", url="/foo") + + def build_request(options: FinalRequestOptions) -> None: + client_copy = async_client.copy() + client_copy._build_request(options) + + # ensure that the machinery is warmed up before tracing starts. + build_request(options) + gc.collect() + + tracemalloc.start(1000) + + snapshot_before = tracemalloc.take_snapshot() + + ITERATIONS = 10 + for _ in range(ITERATIONS): + build_request(options) + + gc.collect() + snapshot_after = tracemalloc.take_snapshot() + + tracemalloc.stop() + + def add_leak(leaks: list[tracemalloc.StatisticDiff], diff: tracemalloc.StatisticDiff) -> None: + if diff.count == 0: + # Avoid false positives by considering only leaks (i.e. allocations that persist). + return + + if diff.count % ITERATIONS != 0: + # Avoid false positives by considering only leaks that appear per iteration. + return + + for frame in diff.traceback: + if any( + frame.filename.endswith(fragment) + for fragment in [ + # to_raw_response_wrapper leaks through the @functools.wraps() decorator. + # + # removing the decorator fixes the leak for reasons we don't understand. + "kernel/_legacy_response.py", + "kernel/_response.py", + # pydantic.BaseModel.model_dump || pydantic.BaseModel.dict leak memory for some reason. + "kernel/_compat.py", + # Standard library leaks we don't care about. + "/logging/__init__.py", + ] + ): + return + + leaks.append(diff) + + leaks: list[tracemalloc.StatisticDiff] = [] + for diff in snapshot_after.compare_to(snapshot_before, "traceback"): + add_leak(leaks, diff) + if leaks: + for leak in leaks: + print("MEMORY LEAK:", leak) + for frame in leak.traceback: + print(frame) + raise AssertionError() + + async def test_request_timeout(self, async_client: AsyncKernel) -> None: + request = async_client._build_request(FinalRequestOptions(method="get", url="/foo")) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == DEFAULT_TIMEOUT + + request = async_client._build_request( + FinalRequestOptions(method="get", url="/foo", timeout=httpx.Timeout(100.0)) + ) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == httpx.Timeout(100.0) + + async def test_client_timeout_option(self) -> None: + client = AsyncKernel( + base_url=base_url, api_key=api_key, _strict_response_validation=True, timeout=httpx.Timeout(0) + ) + + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == httpx.Timeout(0) + + await client.close() + + async def test_http_client_timeout_option(self) -> None: + # custom timeout given to the httpx client should be used + async with httpx.AsyncClient(timeout=None) as http_client: + client = AsyncKernel( + base_url=base_url, api_key=api_key, _strict_response_validation=True, http_client=http_client + ) + + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == httpx.Timeout(None) + + await client.close() + + # no timeout given to the httpx client should not use the httpx default + async with httpx.AsyncClient() as http_client: + client = AsyncKernel( + base_url=base_url, api_key=api_key, _strict_response_validation=True, http_client=http_client + ) + + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == DEFAULT_TIMEOUT + + await client.close() + + # explicitly passing the default timeout currently results in it being ignored + async with httpx.AsyncClient(timeout=HTTPX_DEFAULT_TIMEOUT) as http_client: + client = AsyncKernel( + base_url=base_url, api_key=api_key, _strict_response_validation=True, http_client=http_client + ) + + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == DEFAULT_TIMEOUT # our default + + await client.close() + + def test_invalid_http_client(self) -> None: + with pytest.raises(TypeError, match="Invalid `http_client` arg"): + with httpx.Client() as http_client: + AsyncKernel( + base_url=base_url, + api_key=api_key, + _strict_response_validation=True, + http_client=cast(Any, http_client), + ) + + async def test_default_headers_option(self) -> None: + test_client = AsyncKernel( + base_url=base_url, api_key=api_key, _strict_response_validation=True, default_headers={"X-Foo": "bar"} + ) + request = test_client._build_request(FinalRequestOptions(method="get", url="/foo")) + assert request.headers.get("x-foo") == "bar" + assert request.headers.get("x-stainless-lang") == "python" + + test_client2 = AsyncKernel( + base_url=base_url, + api_key=api_key, + _strict_response_validation=True, + default_headers={ + "X-Foo": "stainless", + "X-Stainless-Lang": "my-overriding-header", + }, + ) + request = test_client2._build_request(FinalRequestOptions(method="get", url="/foo")) + assert request.headers.get("x-foo") == "stainless" + assert request.headers.get("x-stainless-lang") == "my-overriding-header" + + await test_client.close() + await test_client2.close() + + def test_validate_headers(self) -> None: + client = AsyncKernel(base_url=base_url, api_key=api_key, _strict_response_validation=True) + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + assert request.headers.get("Authorization") == f"Bearer {api_key}" + + with pytest.raises(KernelError): + with update_env(**{"KERNEL_API_KEY": Omit()}): + client2 = AsyncKernel(base_url=base_url, api_key=None, _strict_response_validation=True) + _ = client2 + + async def test_default_query_option(self) -> None: + client = AsyncKernel( + base_url=base_url, api_key=api_key, _strict_response_validation=True, default_query={"query_param": "bar"} + ) + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + url = httpx.URL(request.url) + assert dict(url.params) == {"query_param": "bar"} + + request = client._build_request( + FinalRequestOptions( + method="get", + url="/foo", + params={"foo": "baz", "query_param": "overridden"}, + ) + ) + url = httpx.URL(request.url) + assert dict(url.params) == {"foo": "baz", "query_param": "overridden"} + + await client.close() + + def test_request_extra_json(self, client: Kernel) -> None: + request = client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + json_data={"foo": "bar"}, + extra_json={"baz": False}, + ), + ) + data = json.loads(request.content.decode("utf-8")) + assert data == {"foo": "bar", "baz": False} + + request = client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + extra_json={"baz": False}, + ), + ) + data = json.loads(request.content.decode("utf-8")) + assert data == {"baz": False} + + # `extra_json` takes priority over `json_data` when keys clash + request = client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + json_data={"foo": "bar", "baz": True}, + extra_json={"baz": None}, + ), + ) + data = json.loads(request.content.decode("utf-8")) + assert data == {"foo": "bar", "baz": None} + + def test_request_extra_headers(self, client: Kernel) -> None: + request = client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + **make_request_options(extra_headers={"X-Foo": "Foo"}), + ), + ) + assert request.headers.get("X-Foo") == "Foo" + + # `extra_headers` takes priority over `default_headers` when keys clash + request = client.with_options(default_headers={"X-Bar": "true"})._build_request( + FinalRequestOptions( + method="post", + url="/foo", + **make_request_options( + extra_headers={"X-Bar": "false"}, + ), + ), + ) + assert request.headers.get("X-Bar") == "false" + + def test_request_extra_query(self, client: Kernel) -> None: + request = client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + **make_request_options( + extra_query={"my_query_param": "Foo"}, + ), + ), + ) + params = dict(request.url.params) + assert params == {"my_query_param": "Foo"} + + # if both `query` and `extra_query` are given, they are merged + request = client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + **make_request_options( + query={"bar": "1"}, + extra_query={"foo": "2"}, + ), + ), + ) + params = dict(request.url.params) + assert params == {"bar": "1", "foo": "2"} + + # `extra_query` takes priority over `query` when keys clash + request = client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + **make_request_options( + query={"foo": "1"}, + extra_query={"foo": "2"}, + ), + ), + ) + params = dict(request.url.params) + assert params == {"foo": "2"} + + def test_multipart_repeating_array(self, async_client: AsyncKernel) -> None: + request = async_client._build_request( + FinalRequestOptions.construct( + method="post", + url="/foo", + headers={"Content-Type": "multipart/form-data; boundary=6b7ba517decee4a450543ea6ae821c82"}, + json_data={"array": ["foo", "bar"]}, + files=[("foo.txt", b"hello world")], + ) + ) + + assert request.read().split(b"\r\n") == [ + b"--6b7ba517decee4a450543ea6ae821c82", + b'Content-Disposition: form-data; name="array[]"', + b"", + b"foo", + b"--6b7ba517decee4a450543ea6ae821c82", + b'Content-Disposition: form-data; name="array[]"', + b"", + b"bar", + b"--6b7ba517decee4a450543ea6ae821c82", + b'Content-Disposition: form-data; name="foo.txt"; filename="upload"', + b"Content-Type: application/octet-stream", + b"", + b"hello world", + b"--6b7ba517decee4a450543ea6ae821c82--", + b"", + ] + + @pytest.mark.respx(base_url=base_url) + async def test_basic_union_response(self, respx_mock: MockRouter, async_client: AsyncKernel) -> None: + class Model1(BaseModel): + name: str + + class Model2(BaseModel): + foo: str + + respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + + response = await async_client.get("/foo", cast_to=cast(Any, Union[Model1, Model2])) + assert isinstance(response, Model2) + assert response.foo == "bar" + + @pytest.mark.respx(base_url=base_url) + async def test_union_response_different_types(self, respx_mock: MockRouter, async_client: AsyncKernel) -> None: + """Union of objects with the same field name using a different type""" + + class Model1(BaseModel): + foo: int + + class Model2(BaseModel): + foo: str + + respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + + response = await async_client.get("/foo", cast_to=cast(Any, Union[Model1, Model2])) + assert isinstance(response, Model2) + assert response.foo == "bar" + + respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": 1})) + + response = await async_client.get("/foo", cast_to=cast(Any, Union[Model1, Model2])) + assert isinstance(response, Model1) + assert response.foo == 1 + + @pytest.mark.respx(base_url=base_url) + async def test_non_application_json_content_type_for_json_data( + self, respx_mock: MockRouter, async_client: AsyncKernel + ) -> None: + """ + Response that sets Content-Type to something other than application/json but returns json data + """ + + class Model(BaseModel): + foo: int + + respx_mock.get("/foo").mock( + return_value=httpx.Response( + 200, + content=json.dumps({"foo": 2}), + headers={"Content-Type": "application/text"}, + ) + ) + + response = await async_client.get("/foo", cast_to=Model) + assert isinstance(response, Model) + assert response.foo == 2 + + async def test_base_url_setter(self) -> None: + client = AsyncKernel( + base_url="https://example.com/from_init", api_key=api_key, _strict_response_validation=True + ) + assert client.base_url == "https://example.com/from_init/" + + client.base_url = "https://example.com/from_setter" # type: ignore[assignment] + + assert client.base_url == "https://example.com/from_setter/" + + await client.close() + + async def test_base_url_env(self) -> None: + with update_env(KERNEL_BASE_URL="http://localhost:5000/from/env"): + client = AsyncKernel(api_key=api_key, _strict_response_validation=True) + assert client.base_url == "http://localhost:5000/from/env/" + + # explicit environment arg requires explicitness + with update_env(KERNEL_BASE_URL="http://localhost:5000/from/env"): + with pytest.raises(ValueError, match=r"you must pass base_url=None"): + AsyncKernel(api_key=api_key, _strict_response_validation=True, environment="production") + + client = AsyncKernel( + base_url=None, api_key=api_key, _strict_response_validation=True, environment="production" + ) + assert str(client.base_url).startswith("https://api.onkernel.com/") + + await client.close() + + @pytest.mark.parametrize( + "client", + [ + AsyncKernel( + base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True + ), + AsyncKernel( + base_url="http://localhost:5000/custom/path/", + api_key=api_key, + _strict_response_validation=True, + http_client=httpx.AsyncClient(), + ), + ], + ids=["standard", "custom http client"], + ) + async def test_base_url_trailing_slash(self, client: AsyncKernel) -> None: + request = client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + json_data={"foo": "bar"}, + ), + ) + assert request.url == "http://localhost:5000/custom/path/foo" + await client.close() + + @pytest.mark.parametrize( + "client", + [ + AsyncKernel( + base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True + ), + AsyncKernel( + base_url="http://localhost:5000/custom/path/", + api_key=api_key, + _strict_response_validation=True, + http_client=httpx.AsyncClient(), + ), + ], + ids=["standard", "custom http client"], + ) + async def test_base_url_no_trailing_slash(self, client: AsyncKernel) -> None: + request = client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + json_data={"foo": "bar"}, + ), + ) + assert request.url == "http://localhost:5000/custom/path/foo" + await client.close() + + @pytest.mark.parametrize( + "client", + [ + AsyncKernel( + base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True + ), + AsyncKernel( + base_url="http://localhost:5000/custom/path/", + api_key=api_key, + _strict_response_validation=True, + http_client=httpx.AsyncClient(), + ), + ], + ids=["standard", "custom http client"], + ) + async def test_absolute_request_url(self, client: AsyncKernel) -> None: + request = client._build_request( + FinalRequestOptions( + method="post", + url="https://myapi.com/foo", + json_data={"foo": "bar"}, + ), + ) + assert request.url == "https://myapi.com/foo" + await client.close() + + async def test_copied_client_does_not_close_http(self) -> None: + test_client = AsyncKernel(base_url=base_url, api_key=api_key, _strict_response_validation=True) + assert not test_client.is_closed() + + copied = test_client.copy() + assert copied is not test_client + + del copied + + await asyncio.sleep(0.2) + assert not test_client.is_closed() + + async def test_client_context_manager(self) -> None: + test_client = AsyncKernel(base_url=base_url, api_key=api_key, _strict_response_validation=True) + async with test_client as c2: + assert c2 is test_client + assert not c2.is_closed() + assert not test_client.is_closed() + assert test_client.is_closed() + + @pytest.mark.respx(base_url=base_url) + async def test_client_response_validation_error(self, respx_mock: MockRouter, async_client: AsyncKernel) -> None: + class Model(BaseModel): + foo: str + + respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": {"invalid": True}})) + + with pytest.raises(APIResponseValidationError) as exc: + await async_client.get("/foo", cast_to=Model) + + assert isinstance(exc.value.__cause__, ValidationError) + + async def test_client_max_retries_validation(self) -> None: + with pytest.raises(TypeError, match=r"max_retries cannot be None"): + AsyncKernel( + base_url=base_url, api_key=api_key, _strict_response_validation=True, max_retries=cast(Any, None) + ) + + @pytest.mark.respx(base_url=base_url) + async def test_received_text_for_expected_json(self, respx_mock: MockRouter) -> None: + class Model(BaseModel): + name: str + + respx_mock.get("/foo").mock(return_value=httpx.Response(200, text="my-custom-format")) + + strict_client = AsyncKernel(base_url=base_url, api_key=api_key, _strict_response_validation=True) + + with pytest.raises(APIResponseValidationError): + await strict_client.get("/foo", cast_to=Model) + + non_strict_client = AsyncKernel(base_url=base_url, api_key=api_key, _strict_response_validation=False) + + response = await non_strict_client.get("/foo", cast_to=Model) + assert isinstance(response, str) # type: ignore[unreachable] + + await strict_client.close() + await non_strict_client.close() + + @pytest.mark.parametrize( + "remaining_retries,retry_after,timeout", + [ + [3, "20", 20], + [3, "0", 0.5], + [3, "-10", 0.5], + [3, "60", 60], + [3, "61", 0.5], + [3, "Fri, 29 Sep 2023 16:26:57 GMT", 20], + [3, "Fri, 29 Sep 2023 16:26:37 GMT", 0.5], + [3, "Fri, 29 Sep 2023 16:26:27 GMT", 0.5], + [3, "Fri, 29 Sep 2023 16:27:37 GMT", 60], + [3, "Fri, 29 Sep 2023 16:27:38 GMT", 0.5], + [3, "99999999999999999999999999999999999", 0.5], + [3, "Zun, 29 Sep 2023 16:26:27 GMT", 0.5], + [3, "", 0.5], + [2, "", 0.5 * 2.0], + [1, "", 0.5 * 4.0], + [-1100, "", 8], # test large number potentially overflowing + ], + ) + @mock.patch("time.time", mock.MagicMock(return_value=1696004797)) + async def test_parse_retry_after_header( + self, remaining_retries: int, retry_after: str, timeout: float, async_client: AsyncKernel + ) -> None: + headers = httpx.Headers({"retry-after": retry_after}) + options = FinalRequestOptions(method="get", url="/foo", max_retries=3) + calculated = async_client._calculate_retry_timeout(remaining_retries, options, headers) + assert calculated == pytest.approx(timeout, 0.5 * 0.875) # pyright: ignore[reportUnknownMemberType] + + @mock.patch("kernel._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + async def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter, async_client: AsyncKernel) -> None: + respx_mock.post("/browsers").mock(side_effect=httpx.TimeoutException("Test timeout error")) + + with pytest.raises(APITimeoutError): + await async_client.browsers.with_streaming_response.create().__aenter__() + + assert _get_open_connections(async_client) == 0 + + @mock.patch("kernel._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + async def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter, async_client: AsyncKernel) -> None: + respx_mock.post("/browsers").mock(return_value=httpx.Response(500)) + + with pytest.raises(APIStatusError): + await async_client.browsers.with_streaming_response.create().__aenter__() + assert _get_open_connections(async_client) == 0 + + @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) + @mock.patch("kernel._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + @pytest.mark.parametrize("failure_mode", ["status", "exception"]) + async def test_retries_taken( + self, + async_client: AsyncKernel, + failures_before_success: int, + failure_mode: Literal["status", "exception"], + respx_mock: MockRouter, + ) -> None: + client = async_client.with_options(max_retries=4) + + nb_retries = 0 + + def retry_handler(_request: httpx.Request) -> httpx.Response: + nonlocal nb_retries + if nb_retries < failures_before_success: + nb_retries += 1 + if failure_mode == "exception": + raise RuntimeError("oops") + return httpx.Response(500) + return httpx.Response(200) + + respx_mock.post("/browsers").mock(side_effect=retry_handler) + + response = await client.browsers.with_raw_response.create() + + assert response.retries_taken == failures_before_success + assert int(response.http_request.headers.get("x-stainless-retry-count")) == failures_before_success + + @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) + @mock.patch("kernel._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + async def test_omit_retry_count_header( + self, async_client: AsyncKernel, failures_before_success: int, respx_mock: MockRouter + ) -> None: + client = async_client.with_options(max_retries=4) + + nb_retries = 0 + + def retry_handler(_request: httpx.Request) -> httpx.Response: + nonlocal nb_retries + if nb_retries < failures_before_success: + nb_retries += 1 + return httpx.Response(500) + return httpx.Response(200) + + respx_mock.post("/browsers").mock(side_effect=retry_handler) + + response = await client.browsers.with_raw_response.create(extra_headers={"x-stainless-retry-count": Omit()}) + + assert len(response.http_request.headers.get_list("x-stainless-retry-count")) == 0 + + @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) + @mock.patch("kernel._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + async def test_overwrite_retry_count_header( + self, async_client: AsyncKernel, failures_before_success: int, respx_mock: MockRouter + ) -> None: + client = async_client.with_options(max_retries=4) + + nb_retries = 0 + + def retry_handler(_request: httpx.Request) -> httpx.Response: + nonlocal nb_retries + if nb_retries < failures_before_success: + nb_retries += 1 + return httpx.Response(500) + return httpx.Response(200) + + respx_mock.post("/browsers").mock(side_effect=retry_handler) + + response = await client.browsers.with_raw_response.create(extra_headers={"x-stainless-retry-count": "42"}) + + assert response.http_request.headers.get("x-stainless-retry-count") == "42" + + async def test_get_platform(self) -> None: + platform = await asyncify(get_platform)() + assert isinstance(platform, (str, OtherPlatform)) + + async def test_proxy_environment_variables(self, monkeypatch: pytest.MonkeyPatch) -> None: + # Test that the proxy environment variables are set correctly + monkeypatch.setenv("HTTPS_PROXY", "https://example.org") + + client = DefaultAsyncHttpxClient() + + mounts = tuple(client._mounts.items()) + assert len(mounts) == 1 + assert mounts[0][0].pattern == "https://" + + @pytest.mark.filterwarnings("ignore:.*deprecated.*:DeprecationWarning") + async def test_default_client_creation(self) -> None: + # Ensure that the client can be initialized without any exceptions + DefaultAsyncHttpxClient( + verify=True, + cert=None, + trust_env=True, + http1=True, + http2=False, + limits=httpx.Limits(max_connections=100, max_keepalive_connections=20), + ) + + @pytest.mark.respx(base_url=base_url) + async def test_follow_redirects(self, respx_mock: MockRouter, async_client: AsyncKernel) -> None: + # Test that the default follow_redirects=True allows following redirects + respx_mock.post("/redirect").mock( + return_value=httpx.Response(302, headers={"Location": f"{base_url}/redirected"}) + ) + respx_mock.get("/redirected").mock(return_value=httpx.Response(200, json={"status": "ok"})) + + response = await async_client.post("/redirect", body={"key": "value"}, cast_to=httpx.Response) + assert response.status_code == 200 + assert response.json() == {"status": "ok"} + + @pytest.mark.respx(base_url=base_url) + async def test_follow_redirects_disabled(self, respx_mock: MockRouter, async_client: AsyncKernel) -> None: + # Test that follow_redirects=False prevents following redirects + respx_mock.post("/redirect").mock( + return_value=httpx.Response(302, headers={"Location": f"{base_url}/redirected"}) + ) + + with pytest.raises(APIStatusError) as exc_info: + await async_client.post( + "/redirect", body={"key": "value"}, options={"follow_redirects": False}, cast_to=httpx.Response + ) + + assert exc_info.value.response.status_code == 302 + assert exc_info.value.response.headers["Location"] == f"{base_url}/redirected" diff --git a/tests/test_deepcopy.py b/tests/test_deepcopy.py new file mode 100644 index 0000000..83b72cd --- /dev/null +++ b/tests/test_deepcopy.py @@ -0,0 +1,58 @@ +from kernel._utils import deepcopy_minimal + + +def assert_different_identities(obj1: object, obj2: object) -> None: + assert obj1 == obj2 + assert id(obj1) != id(obj2) + + +def test_simple_dict() -> None: + obj1 = {"foo": "bar"} + obj2 = deepcopy_minimal(obj1) + assert_different_identities(obj1, obj2) + + +def test_nested_dict() -> None: + obj1 = {"foo": {"bar": True}} + obj2 = deepcopy_minimal(obj1) + assert_different_identities(obj1, obj2) + assert_different_identities(obj1["foo"], obj2["foo"]) + + +def test_complex_nested_dict() -> None: + obj1 = {"foo": {"bar": [{"hello": "world"}]}} + obj2 = deepcopy_minimal(obj1) + assert_different_identities(obj1, obj2) + assert_different_identities(obj1["foo"], obj2["foo"]) + assert_different_identities(obj1["foo"]["bar"], obj2["foo"]["bar"]) + assert_different_identities(obj1["foo"]["bar"][0], obj2["foo"]["bar"][0]) + + +def test_simple_list() -> None: + obj1 = ["a", "b", "c"] + obj2 = deepcopy_minimal(obj1) + assert_different_identities(obj1, obj2) + + +def test_nested_list() -> None: + obj1 = ["a", [1, 2, 3]] + obj2 = deepcopy_minimal(obj1) + assert_different_identities(obj1, obj2) + assert_different_identities(obj1[1], obj2[1]) + + +class MyObject: ... + + +def test_ignores_other_types() -> None: + # custom classes + my_obj = MyObject() + obj1 = {"foo": my_obj} + obj2 = deepcopy_minimal(obj1) + assert_different_identities(obj1, obj2) + assert obj1["foo"] is my_obj + + # tuples + obj3 = ("a", "b") + obj4 = deepcopy_minimal(obj3) + assert obj3 is obj4 diff --git a/tests/test_extract_files.py b/tests/test_extract_files.py new file mode 100644 index 0000000..e5cf4a1 --- /dev/null +++ b/tests/test_extract_files.py @@ -0,0 +1,64 @@ +from __future__ import annotations + +from typing import Sequence + +import pytest + +from kernel._types import FileTypes +from kernel._utils import extract_files + + +def test_removes_files_from_input() -> None: + query = {"foo": "bar"} + assert extract_files(query, paths=[]) == [] + assert query == {"foo": "bar"} + + query2 = {"foo": b"Bar", "hello": "world"} + assert extract_files(query2, paths=[["foo"]]) == [("foo", b"Bar")] + assert query2 == {"hello": "world"} + + query3 = {"foo": {"foo": {"bar": b"Bar"}}, "hello": "world"} + assert extract_files(query3, paths=[["foo", "foo", "bar"]]) == [("foo[foo][bar]", b"Bar")] + assert query3 == {"foo": {"foo": {}}, "hello": "world"} + + query4 = {"foo": {"bar": b"Bar", "baz": "foo"}, "hello": "world"} + assert extract_files(query4, paths=[["foo", "bar"]]) == [("foo[bar]", b"Bar")] + assert query4 == {"hello": "world", "foo": {"baz": "foo"}} + + +def test_multiple_files() -> None: + query = {"documents": [{"file": b"My first file"}, {"file": b"My second file"}]} + assert extract_files(query, paths=[["documents", "", "file"]]) == [ + ("documents[][file]", b"My first file"), + ("documents[][file]", b"My second file"), + ] + assert query == {"documents": [{}, {}]} + + +@pytest.mark.parametrize( + "query,paths,expected", + [ + [ + {"foo": {"bar": "baz"}}, + [["foo", "", "bar"]], + [], + ], + [ + {"foo": ["bar", "baz"]}, + [["foo", "bar"]], + [], + ], + [ + {"foo": {"bar": "baz"}}, + [["foo", "foo"]], + [], + ], + ], + ids=["dict expecting array", "array expecting dict", "unknown keys"], +) +def test_ignores_incorrect_paths( + query: dict[str, object], + paths: Sequence[Sequence[str]], + expected: list[tuple[str, FileTypes]], +) -> None: + assert extract_files(query, paths=paths) == expected diff --git a/tests/test_files.py b/tests/test_files.py new file mode 100644 index 0000000..62b874f --- /dev/null +++ b/tests/test_files.py @@ -0,0 +1,51 @@ +from pathlib import Path + +import anyio +import pytest +from dirty_equals import IsDict, IsList, IsBytes, IsTuple + +from kernel._files import to_httpx_files, async_to_httpx_files + +readme_path = Path(__file__).parent.parent.joinpath("README.md") + + +def test_pathlib_includes_file_name() -> None: + result = to_httpx_files({"file": readme_path}) + print(result) + assert result == IsDict({"file": IsTuple("README.md", IsBytes())}) + + +def test_tuple_input() -> None: + result = to_httpx_files([("file", readme_path)]) + print(result) + assert result == IsList(IsTuple("file", IsTuple("README.md", IsBytes()))) + + +@pytest.mark.asyncio +async def test_async_pathlib_includes_file_name() -> None: + result = await async_to_httpx_files({"file": readme_path}) + print(result) + assert result == IsDict({"file": IsTuple("README.md", IsBytes())}) + + +@pytest.mark.asyncio +async def test_async_supports_anyio_path() -> None: + result = await async_to_httpx_files({"file": anyio.Path(readme_path)}) + print(result) + assert result == IsDict({"file": IsTuple("README.md", IsBytes())}) + + +@pytest.mark.asyncio +async def test_async_tuple_input() -> None: + result = await async_to_httpx_files([("file", readme_path)]) + print(result) + assert result == IsList(IsTuple("file", IsTuple("README.md", IsBytes()))) + + +def test_string_not_allowed() -> None: + with pytest.raises(TypeError, match="Expected file types input to be a FileContent type or to be a tuple"): + to_httpx_files( + { + "file": "foo", # type: ignore + } + ) diff --git a/tests/test_models.py b/tests/test_models.py new file mode 100644 index 0000000..78f0fd3 --- /dev/null +++ b/tests/test_models.py @@ -0,0 +1,963 @@ +import json +from typing import TYPE_CHECKING, Any, Dict, List, Union, Optional, cast +from datetime import datetime, timezone +from typing_extensions import Literal, Annotated, TypeAliasType + +import pytest +import pydantic +from pydantic import Field + +from kernel._utils import PropertyInfo +from kernel._compat import PYDANTIC_V1, parse_obj, model_dump, model_json +from kernel._models import DISCRIMINATOR_CACHE, BaseModel, construct_type + + +class BasicModel(BaseModel): + foo: str + + +@pytest.mark.parametrize("value", ["hello", 1], ids=["correct type", "mismatched"]) +def test_basic(value: object) -> None: + m = BasicModel.construct(foo=value) + assert m.foo == value + + +def test_directly_nested_model() -> None: + class NestedModel(BaseModel): + nested: BasicModel + + m = NestedModel.construct(nested={"foo": "Foo!"}) + assert m.nested.foo == "Foo!" + + # mismatched types + m = NestedModel.construct(nested="hello!") + assert cast(Any, m.nested) == "hello!" + + +def test_optional_nested_model() -> None: + class NestedModel(BaseModel): + nested: Optional[BasicModel] + + m1 = NestedModel.construct(nested=None) + assert m1.nested is None + + m2 = NestedModel.construct(nested={"foo": "bar"}) + assert m2.nested is not None + assert m2.nested.foo == "bar" + + # mismatched types + m3 = NestedModel.construct(nested={"foo"}) + assert isinstance(cast(Any, m3.nested), set) + assert cast(Any, m3.nested) == {"foo"} + + +def test_list_nested_model() -> None: + class NestedModel(BaseModel): + nested: List[BasicModel] + + m = NestedModel.construct(nested=[{"foo": "bar"}, {"foo": "2"}]) + assert m.nested is not None + assert isinstance(m.nested, list) + assert len(m.nested) == 2 + assert m.nested[0].foo == "bar" + assert m.nested[1].foo == "2" + + # mismatched types + m = NestedModel.construct(nested=True) + assert cast(Any, m.nested) is True + + m = NestedModel.construct(nested=[False]) + assert cast(Any, m.nested) == [False] + + +def test_optional_list_nested_model() -> None: + class NestedModel(BaseModel): + nested: Optional[List[BasicModel]] + + m1 = NestedModel.construct(nested=[{"foo": "bar"}, {"foo": "2"}]) + assert m1.nested is not None + assert isinstance(m1.nested, list) + assert len(m1.nested) == 2 + assert m1.nested[0].foo == "bar" + assert m1.nested[1].foo == "2" + + m2 = NestedModel.construct(nested=None) + assert m2.nested is None + + # mismatched types + m3 = NestedModel.construct(nested={1}) + assert cast(Any, m3.nested) == {1} + + m4 = NestedModel.construct(nested=[False]) + assert cast(Any, m4.nested) == [False] + + +def test_list_optional_items_nested_model() -> None: + class NestedModel(BaseModel): + nested: List[Optional[BasicModel]] + + m = NestedModel.construct(nested=[None, {"foo": "bar"}]) + assert m.nested is not None + assert isinstance(m.nested, list) + assert len(m.nested) == 2 + assert m.nested[0] is None + assert m.nested[1] is not None + assert m.nested[1].foo == "bar" + + # mismatched types + m3 = NestedModel.construct(nested="foo") + assert cast(Any, m3.nested) == "foo" + + m4 = NestedModel.construct(nested=[False]) + assert cast(Any, m4.nested) == [False] + + +def test_list_mismatched_type() -> None: + class NestedModel(BaseModel): + nested: List[str] + + m = NestedModel.construct(nested=False) + assert cast(Any, m.nested) is False + + +def test_raw_dictionary() -> None: + class NestedModel(BaseModel): + nested: Dict[str, str] + + m = NestedModel.construct(nested={"hello": "world"}) + assert m.nested == {"hello": "world"} + + # mismatched types + m = NestedModel.construct(nested=False) + assert cast(Any, m.nested) is False + + +def test_nested_dictionary_model() -> None: + class NestedModel(BaseModel): + nested: Dict[str, BasicModel] + + m = NestedModel.construct(nested={"hello": {"foo": "bar"}}) + assert isinstance(m.nested, dict) + assert m.nested["hello"].foo == "bar" + + # mismatched types + m = NestedModel.construct(nested={"hello": False}) + assert cast(Any, m.nested["hello"]) is False + + +def test_unknown_fields() -> None: + m1 = BasicModel.construct(foo="foo", unknown=1) + assert m1.foo == "foo" + assert cast(Any, m1).unknown == 1 + + m2 = BasicModel.construct(foo="foo", unknown={"foo_bar": True}) + assert m2.foo == "foo" + assert cast(Any, m2).unknown == {"foo_bar": True} + + assert model_dump(m2) == {"foo": "foo", "unknown": {"foo_bar": True}} + + +def test_strict_validation_unknown_fields() -> None: + class Model(BaseModel): + foo: str + + model = parse_obj(Model, dict(foo="hello!", user="Robert")) + assert model.foo == "hello!" + assert cast(Any, model).user == "Robert" + + assert model_dump(model) == {"foo": "hello!", "user": "Robert"} + + +def test_aliases() -> None: + class Model(BaseModel): + my_field: int = Field(alias="myField") + + m = Model.construct(myField=1) + assert m.my_field == 1 + + # mismatched types + m = Model.construct(myField={"hello": False}) + assert cast(Any, m.my_field) == {"hello": False} + + +def test_repr() -> None: + model = BasicModel(foo="bar") + assert str(model) == "BasicModel(foo='bar')" + assert repr(model) == "BasicModel(foo='bar')" + + +def test_repr_nested_model() -> None: + class Child(BaseModel): + name: str + age: int + + class Parent(BaseModel): + name: str + child: Child + + model = Parent(name="Robert", child=Child(name="Foo", age=5)) + assert str(model) == "Parent(name='Robert', child=Child(name='Foo', age=5))" + assert repr(model) == "Parent(name='Robert', child=Child(name='Foo', age=5))" + + +def test_optional_list() -> None: + class Submodel(BaseModel): + name: str + + class Model(BaseModel): + items: Optional[List[Submodel]] + + m = Model.construct(items=None) + assert m.items is None + + m = Model.construct(items=[]) + assert m.items == [] + + m = Model.construct(items=[{"name": "Robert"}]) + assert m.items is not None + assert len(m.items) == 1 + assert m.items[0].name == "Robert" + + +def test_nested_union_of_models() -> None: + class Submodel1(BaseModel): + bar: bool + + class Submodel2(BaseModel): + thing: str + + class Model(BaseModel): + foo: Union[Submodel1, Submodel2] + + m = Model.construct(foo={"thing": "hello"}) + assert isinstance(m.foo, Submodel2) + assert m.foo.thing == "hello" + + +def test_nested_union_of_mixed_types() -> None: + class Submodel1(BaseModel): + bar: bool + + class Model(BaseModel): + foo: Union[Submodel1, Literal[True], Literal["CARD_HOLDER"]] + + m = Model.construct(foo=True) + assert m.foo is True + + m = Model.construct(foo="CARD_HOLDER") + assert m.foo == "CARD_HOLDER" + + m = Model.construct(foo={"bar": False}) + assert isinstance(m.foo, Submodel1) + assert m.foo.bar is False + + +def test_nested_union_multiple_variants() -> None: + class Submodel1(BaseModel): + bar: bool + + class Submodel2(BaseModel): + thing: str + + class Submodel3(BaseModel): + foo: int + + class Model(BaseModel): + foo: Union[Submodel1, Submodel2, None, Submodel3] + + m = Model.construct(foo={"thing": "hello"}) + assert isinstance(m.foo, Submodel2) + assert m.foo.thing == "hello" + + m = Model.construct(foo=None) + assert m.foo is None + + m = Model.construct() + assert m.foo is None + + m = Model.construct(foo={"foo": "1"}) + assert isinstance(m.foo, Submodel3) + assert m.foo.foo == 1 + + +def test_nested_union_invalid_data() -> None: + class Submodel1(BaseModel): + level: int + + class Submodel2(BaseModel): + name: str + + class Model(BaseModel): + foo: Union[Submodel1, Submodel2] + + m = Model.construct(foo=True) + assert cast(bool, m.foo) is True + + m = Model.construct(foo={"name": 3}) + if PYDANTIC_V1: + assert isinstance(m.foo, Submodel2) + assert m.foo.name == "3" + else: + assert isinstance(m.foo, Submodel1) + assert m.foo.name == 3 # type: ignore + + +def test_list_of_unions() -> None: + class Submodel1(BaseModel): + level: int + + class Submodel2(BaseModel): + name: str + + class Model(BaseModel): + items: List[Union[Submodel1, Submodel2]] + + m = Model.construct(items=[{"level": 1}, {"name": "Robert"}]) + assert len(m.items) == 2 + assert isinstance(m.items[0], Submodel1) + assert m.items[0].level == 1 + assert isinstance(m.items[1], Submodel2) + assert m.items[1].name == "Robert" + + m = Model.construct(items=[{"level": -1}, 156]) + assert len(m.items) == 2 + assert isinstance(m.items[0], Submodel1) + assert m.items[0].level == -1 + assert cast(Any, m.items[1]) == 156 + + +def test_union_of_lists() -> None: + class SubModel1(BaseModel): + level: int + + class SubModel2(BaseModel): + name: str + + class Model(BaseModel): + items: Union[List[SubModel1], List[SubModel2]] + + # with one valid entry + m = Model.construct(items=[{"name": "Robert"}]) + assert len(m.items) == 1 + assert isinstance(m.items[0], SubModel2) + assert m.items[0].name == "Robert" + + # with two entries pointing to different types + m = Model.construct(items=[{"level": 1}, {"name": "Robert"}]) + assert len(m.items) == 2 + assert isinstance(m.items[0], SubModel1) + assert m.items[0].level == 1 + assert isinstance(m.items[1], SubModel1) + assert cast(Any, m.items[1]).name == "Robert" + + # with two entries pointing to *completely* different types + m = Model.construct(items=[{"level": -1}, 156]) + assert len(m.items) == 2 + assert isinstance(m.items[0], SubModel1) + assert m.items[0].level == -1 + assert cast(Any, m.items[1]) == 156 + + +def test_dict_of_union() -> None: + class SubModel1(BaseModel): + name: str + + class SubModel2(BaseModel): + foo: str + + class Model(BaseModel): + data: Dict[str, Union[SubModel1, SubModel2]] + + m = Model.construct(data={"hello": {"name": "there"}, "foo": {"foo": "bar"}}) + assert len(list(m.data.keys())) == 2 + assert isinstance(m.data["hello"], SubModel1) + assert m.data["hello"].name == "there" + assert isinstance(m.data["foo"], SubModel2) + assert m.data["foo"].foo == "bar" + + # TODO: test mismatched type + + +def test_double_nested_union() -> None: + class SubModel1(BaseModel): + name: str + + class SubModel2(BaseModel): + bar: str + + class Model(BaseModel): + data: Dict[str, List[Union[SubModel1, SubModel2]]] + + m = Model.construct(data={"foo": [{"bar": "baz"}, {"name": "Robert"}]}) + assert len(m.data["foo"]) == 2 + + entry1 = m.data["foo"][0] + assert isinstance(entry1, SubModel2) + assert entry1.bar == "baz" + + entry2 = m.data["foo"][1] + assert isinstance(entry2, SubModel1) + assert entry2.name == "Robert" + + # TODO: test mismatched type + + +def test_union_of_dict() -> None: + class SubModel1(BaseModel): + name: str + + class SubModel2(BaseModel): + foo: str + + class Model(BaseModel): + data: Union[Dict[str, SubModel1], Dict[str, SubModel2]] + + m = Model.construct(data={"hello": {"name": "there"}, "foo": {"foo": "bar"}}) + assert len(list(m.data.keys())) == 2 + assert isinstance(m.data["hello"], SubModel1) + assert m.data["hello"].name == "there" + assert isinstance(m.data["foo"], SubModel1) + assert cast(Any, m.data["foo"]).foo == "bar" + + +def test_iso8601_datetime() -> None: + class Model(BaseModel): + created_at: datetime + + expected = datetime(2019, 12, 27, 18, 11, 19, 117000, tzinfo=timezone.utc) + + if PYDANTIC_V1: + expected_json = '{"created_at": "2019-12-27T18:11:19.117000+00:00"}' + else: + expected_json = '{"created_at":"2019-12-27T18:11:19.117000Z"}' + + model = Model.construct(created_at="2019-12-27T18:11:19.117Z") + assert model.created_at == expected + assert model_json(model) == expected_json + + model = parse_obj(Model, dict(created_at="2019-12-27T18:11:19.117Z")) + assert model.created_at == expected + assert model_json(model) == expected_json + + +def test_does_not_coerce_int() -> None: + class Model(BaseModel): + bar: int + + assert Model.construct(bar=1).bar == 1 + assert Model.construct(bar=10.9).bar == 10.9 + assert Model.construct(bar="19").bar == "19" # type: ignore[comparison-overlap] + assert Model.construct(bar=False).bar is False + + +def test_int_to_float_safe_conversion() -> None: + class Model(BaseModel): + float_field: float + + m = Model.construct(float_field=10) + assert m.float_field == 10.0 + assert isinstance(m.float_field, float) + + m = Model.construct(float_field=10.12) + assert m.float_field == 10.12 + assert isinstance(m.float_field, float) + + # number too big + m = Model.construct(float_field=2**53 + 1) + assert m.float_field == 2**53 + 1 + assert isinstance(m.float_field, int) + + +def test_deprecated_alias() -> None: + class Model(BaseModel): + resource_id: str = Field(alias="model_id") + + @property + def model_id(self) -> str: + return self.resource_id + + m = Model.construct(model_id="id") + assert m.model_id == "id" + assert m.resource_id == "id" + assert m.resource_id is m.model_id + + m = parse_obj(Model, {"model_id": "id"}) + assert m.model_id == "id" + assert m.resource_id == "id" + assert m.resource_id is m.model_id + + +def test_omitted_fields() -> None: + class Model(BaseModel): + resource_id: Optional[str] = None + + m = Model.construct() + assert m.resource_id is None + assert "resource_id" not in m.model_fields_set + + m = Model.construct(resource_id=None) + assert m.resource_id is None + assert "resource_id" in m.model_fields_set + + m = Model.construct(resource_id="foo") + assert m.resource_id == "foo" + assert "resource_id" in m.model_fields_set + + +def test_to_dict() -> None: + class Model(BaseModel): + foo: Optional[str] = Field(alias="FOO", default=None) + + m = Model(FOO="hello") + assert m.to_dict() == {"FOO": "hello"} + assert m.to_dict(use_api_names=False) == {"foo": "hello"} + + m2 = Model() + assert m2.to_dict() == {} + assert m2.to_dict(exclude_unset=False) == {"FOO": None} + assert m2.to_dict(exclude_unset=False, exclude_none=True) == {} + assert m2.to_dict(exclude_unset=False, exclude_defaults=True) == {} + + m3 = Model(FOO=None) + assert m3.to_dict() == {"FOO": None} + assert m3.to_dict(exclude_none=True) == {} + assert m3.to_dict(exclude_defaults=True) == {} + + class Model2(BaseModel): + created_at: datetime + + time_str = "2024-03-21T11:39:01.275859" + m4 = Model2.construct(created_at=time_str) + assert m4.to_dict(mode="python") == {"created_at": datetime.fromisoformat(time_str)} + assert m4.to_dict(mode="json") == {"created_at": time_str} + + if PYDANTIC_V1: + with pytest.raises(ValueError, match="warnings is only supported in Pydantic v2"): + m.to_dict(warnings=False) + + +def test_forwards_compat_model_dump_method() -> None: + class Model(BaseModel): + foo: Optional[str] = Field(alias="FOO", default=None) + + m = Model(FOO="hello") + assert m.model_dump() == {"foo": "hello"} + assert m.model_dump(include={"bar"}) == {} + assert m.model_dump(exclude={"foo"}) == {} + assert m.model_dump(by_alias=True) == {"FOO": "hello"} + + m2 = Model() + assert m2.model_dump() == {"foo": None} + assert m2.model_dump(exclude_unset=True) == {} + assert m2.model_dump(exclude_none=True) == {} + assert m2.model_dump(exclude_defaults=True) == {} + + m3 = Model(FOO=None) + assert m3.model_dump() == {"foo": None} + assert m3.model_dump(exclude_none=True) == {} + + if PYDANTIC_V1: + with pytest.raises(ValueError, match="round_trip is only supported in Pydantic v2"): + m.model_dump(round_trip=True) + + with pytest.raises(ValueError, match="warnings is only supported in Pydantic v2"): + m.model_dump(warnings=False) + + +def test_compat_method_no_error_for_warnings() -> None: + class Model(BaseModel): + foo: Optional[str] + + m = Model(foo="hello") + assert isinstance(model_dump(m, warnings=False), dict) + + +def test_to_json() -> None: + class Model(BaseModel): + foo: Optional[str] = Field(alias="FOO", default=None) + + m = Model(FOO="hello") + assert json.loads(m.to_json()) == {"FOO": "hello"} + assert json.loads(m.to_json(use_api_names=False)) == {"foo": "hello"} + + if PYDANTIC_V1: + assert m.to_json(indent=None) == '{"FOO": "hello"}' + else: + assert m.to_json(indent=None) == '{"FOO":"hello"}' + + m2 = Model() + assert json.loads(m2.to_json()) == {} + assert json.loads(m2.to_json(exclude_unset=False)) == {"FOO": None} + assert json.loads(m2.to_json(exclude_unset=False, exclude_none=True)) == {} + assert json.loads(m2.to_json(exclude_unset=False, exclude_defaults=True)) == {} + + m3 = Model(FOO=None) + assert json.loads(m3.to_json()) == {"FOO": None} + assert json.loads(m3.to_json(exclude_none=True)) == {} + + if PYDANTIC_V1: + with pytest.raises(ValueError, match="warnings is only supported in Pydantic v2"): + m.to_json(warnings=False) + + +def test_forwards_compat_model_dump_json_method() -> None: + class Model(BaseModel): + foo: Optional[str] = Field(alias="FOO", default=None) + + m = Model(FOO="hello") + assert json.loads(m.model_dump_json()) == {"foo": "hello"} + assert json.loads(m.model_dump_json(include={"bar"})) == {} + assert json.loads(m.model_dump_json(include={"foo"})) == {"foo": "hello"} + assert json.loads(m.model_dump_json(by_alias=True)) == {"FOO": "hello"} + + assert m.model_dump_json(indent=2) == '{\n "foo": "hello"\n}' + + m2 = Model() + assert json.loads(m2.model_dump_json()) == {"foo": None} + assert json.loads(m2.model_dump_json(exclude_unset=True)) == {} + assert json.loads(m2.model_dump_json(exclude_none=True)) == {} + assert json.loads(m2.model_dump_json(exclude_defaults=True)) == {} + + m3 = Model(FOO=None) + assert json.loads(m3.model_dump_json()) == {"foo": None} + assert json.loads(m3.model_dump_json(exclude_none=True)) == {} + + if PYDANTIC_V1: + with pytest.raises(ValueError, match="round_trip is only supported in Pydantic v2"): + m.model_dump_json(round_trip=True) + + with pytest.raises(ValueError, match="warnings is only supported in Pydantic v2"): + m.model_dump_json(warnings=False) + + +def test_type_compat() -> None: + # our model type can be assigned to Pydantic's model type + + def takes_pydantic(model: pydantic.BaseModel) -> None: # noqa: ARG001 + ... + + class OurModel(BaseModel): + foo: Optional[str] = None + + takes_pydantic(OurModel()) + + +def test_annotated_types() -> None: + class Model(BaseModel): + value: str + + m = construct_type( + value={"value": "foo"}, + type_=cast(Any, Annotated[Model, "random metadata"]), + ) + assert isinstance(m, Model) + assert m.value == "foo" + + +def test_discriminated_unions_invalid_data() -> None: + class A(BaseModel): + type: Literal["a"] + + data: str + + class B(BaseModel): + type: Literal["b"] + + data: int + + m = construct_type( + value={"type": "b", "data": "foo"}, + type_=cast(Any, Annotated[Union[A, B], PropertyInfo(discriminator="type")]), + ) + assert isinstance(m, B) + assert m.type == "b" + assert m.data == "foo" # type: ignore[comparison-overlap] + + m = construct_type( + value={"type": "a", "data": 100}, + type_=cast(Any, Annotated[Union[A, B], PropertyInfo(discriminator="type")]), + ) + assert isinstance(m, A) + assert m.type == "a" + if PYDANTIC_V1: + # pydantic v1 automatically converts inputs to strings + # if the expected type is a str + assert m.data == "100" + else: + assert m.data == 100 # type: ignore[comparison-overlap] + + +def test_discriminated_unions_unknown_variant() -> None: + class A(BaseModel): + type: Literal["a"] + + data: str + + class B(BaseModel): + type: Literal["b"] + + data: int + + m = construct_type( + value={"type": "c", "data": None, "new_thing": "bar"}, + type_=cast(Any, Annotated[Union[A, B], PropertyInfo(discriminator="type")]), + ) + + # just chooses the first variant + assert isinstance(m, A) + assert m.type == "c" # type: ignore[comparison-overlap] + assert m.data == None # type: ignore[unreachable] + assert m.new_thing == "bar" + + +def test_discriminated_unions_invalid_data_nested_unions() -> None: + class A(BaseModel): + type: Literal["a"] + + data: str + + class B(BaseModel): + type: Literal["b"] + + data: int + + class C(BaseModel): + type: Literal["c"] + + data: bool + + m = construct_type( + value={"type": "b", "data": "foo"}, + type_=cast(Any, Annotated[Union[Union[A, B], C], PropertyInfo(discriminator="type")]), + ) + assert isinstance(m, B) + assert m.type == "b" + assert m.data == "foo" # type: ignore[comparison-overlap] + + m = construct_type( + value={"type": "c", "data": "foo"}, + type_=cast(Any, Annotated[Union[Union[A, B], C], PropertyInfo(discriminator="type")]), + ) + assert isinstance(m, C) + assert m.type == "c" + assert m.data == "foo" # type: ignore[comparison-overlap] + + +def test_discriminated_unions_with_aliases_invalid_data() -> None: + class A(BaseModel): + foo_type: Literal["a"] = Field(alias="type") + + data: str + + class B(BaseModel): + foo_type: Literal["b"] = Field(alias="type") + + data: int + + m = construct_type( + value={"type": "b", "data": "foo"}, + type_=cast(Any, Annotated[Union[A, B], PropertyInfo(discriminator="foo_type")]), + ) + assert isinstance(m, B) + assert m.foo_type == "b" + assert m.data == "foo" # type: ignore[comparison-overlap] + + m = construct_type( + value={"type": "a", "data": 100}, + type_=cast(Any, Annotated[Union[A, B], PropertyInfo(discriminator="foo_type")]), + ) + assert isinstance(m, A) + assert m.foo_type == "a" + if PYDANTIC_V1: + # pydantic v1 automatically converts inputs to strings + # if the expected type is a str + assert m.data == "100" + else: + assert m.data == 100 # type: ignore[comparison-overlap] + + +def test_discriminated_unions_overlapping_discriminators_invalid_data() -> None: + class A(BaseModel): + type: Literal["a"] + + data: bool + + class B(BaseModel): + type: Literal["a"] + + data: int + + m = construct_type( + value={"type": "a", "data": "foo"}, + type_=cast(Any, Annotated[Union[A, B], PropertyInfo(discriminator="type")]), + ) + assert isinstance(m, B) + assert m.type == "a" + assert m.data == "foo" # type: ignore[comparison-overlap] + + +def test_discriminated_unions_invalid_data_uses_cache() -> None: + class A(BaseModel): + type: Literal["a"] + + data: str + + class B(BaseModel): + type: Literal["b"] + + data: int + + UnionType = cast(Any, Union[A, B]) + + assert not DISCRIMINATOR_CACHE.get(UnionType) + + m = construct_type( + value={"type": "b", "data": "foo"}, type_=cast(Any, Annotated[UnionType, PropertyInfo(discriminator="type")]) + ) + assert isinstance(m, B) + assert m.type == "b" + assert m.data == "foo" # type: ignore[comparison-overlap] + + discriminator = DISCRIMINATOR_CACHE.get(UnionType) + assert discriminator is not None + + m = construct_type( + value={"type": "b", "data": "foo"}, type_=cast(Any, Annotated[UnionType, PropertyInfo(discriminator="type")]) + ) + assert isinstance(m, B) + assert m.type == "b" + assert m.data == "foo" # type: ignore[comparison-overlap] + + # if the discriminator details object stays the same between invocations then + # we hit the cache + assert DISCRIMINATOR_CACHE.get(UnionType) is discriminator + + +@pytest.mark.skipif(PYDANTIC_V1, reason="TypeAliasType is not supported in Pydantic v1") +def test_type_alias_type() -> None: + Alias = TypeAliasType("Alias", str) # pyright: ignore + + class Model(BaseModel): + alias: Alias + union: Union[int, Alias] + + m = construct_type(value={"alias": "foo", "union": "bar"}, type_=Model) + assert isinstance(m, Model) + assert isinstance(m.alias, str) + assert m.alias == "foo" + assert isinstance(m.union, str) + assert m.union == "bar" + + +@pytest.mark.skipif(PYDANTIC_V1, reason="TypeAliasType is not supported in Pydantic v1") +def test_field_named_cls() -> None: + class Model(BaseModel): + cls: str + + m = construct_type(value={"cls": "foo"}, type_=Model) + assert isinstance(m, Model) + assert isinstance(m.cls, str) + + +def test_discriminated_union_case() -> None: + class A(BaseModel): + type: Literal["a"] + + data: bool + + class B(BaseModel): + type: Literal["b"] + + data: List[Union[A, object]] + + class ModelA(BaseModel): + type: Literal["modelA"] + + data: int + + class ModelB(BaseModel): + type: Literal["modelB"] + + required: str + + data: Union[A, B] + + # when constructing ModelA | ModelB, value data doesn't match ModelB exactly - missing `required` + m = construct_type( + value={"type": "modelB", "data": {"type": "a", "data": True}}, + type_=cast(Any, Annotated[Union[ModelA, ModelB], PropertyInfo(discriminator="type")]), + ) + + assert isinstance(m, ModelB) + + +def test_nested_discriminated_union() -> None: + class InnerType1(BaseModel): + type: Literal["type_1"] + + class InnerModel(BaseModel): + inner_value: str + + class InnerType2(BaseModel): + type: Literal["type_2"] + some_inner_model: InnerModel + + class Type1(BaseModel): + base_type: Literal["base_type_1"] + value: Annotated[ + Union[ + InnerType1, + InnerType2, + ], + PropertyInfo(discriminator="type"), + ] + + class Type2(BaseModel): + base_type: Literal["base_type_2"] + + T = Annotated[ + Union[ + Type1, + Type2, + ], + PropertyInfo(discriminator="base_type"), + ] + + model = construct_type( + type_=T, + value={ + "base_type": "base_type_1", + "value": { + "type": "type_2", + }, + }, + ) + assert isinstance(model, Type1) + assert isinstance(model.value, InnerType2) + + +@pytest.mark.skipif(PYDANTIC_V1, reason="this is only supported in pydantic v2 for now") +def test_extra_properties() -> None: + class Item(BaseModel): + prop: int + + class Model(BaseModel): + __pydantic_extra__: Dict[str, Item] = Field(init=False) # pyright: ignore[reportIncompatibleVariableOverride] + + other: str + + if TYPE_CHECKING: + + def __getattr__(self, attr: str) -> Item: ... + + model = construct_type( + type_=Model, + value={ + "a": {"prop": 1}, + "other": "foo", + }, + ) + assert isinstance(model, Model) + assert model.a.prop == 1 + assert isinstance(model.a, Item) + assert model.other == "foo" diff --git a/tests/test_qs.py b/tests/test_qs.py new file mode 100644 index 0000000..78ae641 --- /dev/null +++ b/tests/test_qs.py @@ -0,0 +1,78 @@ +from typing import Any, cast +from functools import partial +from urllib.parse import unquote + +import pytest + +from kernel._qs import Querystring, stringify + + +def test_empty() -> None: + assert stringify({}) == "" + assert stringify({"a": {}}) == "" + assert stringify({"a": {"b": {"c": {}}}}) == "" + + +def test_basic() -> None: + assert stringify({"a": 1}) == "a=1" + assert stringify({"a": "b"}) == "a=b" + assert stringify({"a": True}) == "a=true" + assert stringify({"a": False}) == "a=false" + assert stringify({"a": 1.23456}) == "a=1.23456" + assert stringify({"a": None}) == "" + + +@pytest.mark.parametrize("method", ["class", "function"]) +def test_nested_dotted(method: str) -> None: + if method == "class": + serialise = Querystring(nested_format="dots").stringify + else: + serialise = partial(stringify, nested_format="dots") + + assert unquote(serialise({"a": {"b": "c"}})) == "a.b=c" + assert unquote(serialise({"a": {"b": "c", "d": "e", "f": "g"}})) == "a.b=c&a.d=e&a.f=g" + assert unquote(serialise({"a": {"b": {"c": {"d": "e"}}}})) == "a.b.c.d=e" + assert unquote(serialise({"a": {"b": True}})) == "a.b=true" + + +def test_nested_brackets() -> None: + assert unquote(stringify({"a": {"b": "c"}})) == "a[b]=c" + assert unquote(stringify({"a": {"b": "c", "d": "e", "f": "g"}})) == "a[b]=c&a[d]=e&a[f]=g" + assert unquote(stringify({"a": {"b": {"c": {"d": "e"}}}})) == "a[b][c][d]=e" + assert unquote(stringify({"a": {"b": True}})) == "a[b]=true" + + +@pytest.mark.parametrize("method", ["class", "function"]) +def test_array_comma(method: str) -> None: + if method == "class": + serialise = Querystring(array_format="comma").stringify + else: + serialise = partial(stringify, array_format="comma") + + assert unquote(serialise({"in": ["foo", "bar"]})) == "in=foo,bar" + assert unquote(serialise({"a": {"b": [True, False]}})) == "a[b]=true,false" + assert unquote(serialise({"a": {"b": [True, False, None, True]}})) == "a[b]=true,false,true" + + +def test_array_repeat() -> None: + assert unquote(stringify({"in": ["foo", "bar"]})) == "in=foo&in=bar" + assert unquote(stringify({"a": {"b": [True, False]}})) == "a[b]=true&a[b]=false" + assert unquote(stringify({"a": {"b": [True, False, None, True]}})) == "a[b]=true&a[b]=false&a[b]=true" + assert unquote(stringify({"in": ["foo", {"b": {"c": ["d", "e"]}}]})) == "in=foo&in[b][c]=d&in[b][c]=e" + + +@pytest.mark.parametrize("method", ["class", "function"]) +def test_array_brackets(method: str) -> None: + if method == "class": + serialise = Querystring(array_format="brackets").stringify + else: + serialise = partial(stringify, array_format="brackets") + + assert unquote(serialise({"in": ["foo", "bar"]})) == "in[]=foo&in[]=bar" + assert unquote(serialise({"a": {"b": [True, False]}})) == "a[b][]=true&a[b][]=false" + assert unquote(serialise({"a": {"b": [True, False, None, True]}})) == "a[b][]=true&a[b][]=false&a[b][]=true" + + +def test_unknown_array_format() -> None: + with pytest.raises(NotImplementedError, match="Unknown array_format value: foo, choose from comma, repeat"): + stringify({"a": ["foo", "bar"]}, array_format=cast(Any, "foo")) diff --git a/tests/test_required_args.py b/tests/test_required_args.py new file mode 100644 index 0000000..7186db8 --- /dev/null +++ b/tests/test_required_args.py @@ -0,0 +1,111 @@ +from __future__ import annotations + +import pytest + +from kernel._utils import required_args + + +def test_too_many_positional_params() -> None: + @required_args(["a"]) + def foo(a: str | None = None) -> str | None: + return a + + with pytest.raises(TypeError, match=r"foo\(\) takes 1 argument\(s\) but 2 were given"): + foo("a", "b") # type: ignore + + +def test_positional_param() -> None: + @required_args(["a"]) + def foo(a: str | None = None) -> str | None: + return a + + assert foo("a") == "a" + assert foo(None) is None + assert foo(a="b") == "b" + + with pytest.raises(TypeError, match="Missing required argument: 'a'"): + foo() + + +def test_keyword_only_param() -> None: + @required_args(["a"]) + def foo(*, a: str | None = None) -> str | None: + return a + + assert foo(a="a") == "a" + assert foo(a=None) is None + assert foo(a="b") == "b" + + with pytest.raises(TypeError, match="Missing required argument: 'a'"): + foo() + + +def test_multiple_params() -> None: + @required_args(["a", "b", "c"]) + def foo(a: str = "", *, b: str = "", c: str = "") -> str | None: + return f"{a} {b} {c}" + + assert foo(a="a", b="b", c="c") == "a b c" + + error_message = r"Missing required arguments.*" + + with pytest.raises(TypeError, match=error_message): + foo() + + with pytest.raises(TypeError, match=error_message): + foo(a="a") + + with pytest.raises(TypeError, match=error_message): + foo(b="b") + + with pytest.raises(TypeError, match=error_message): + foo(c="c") + + with pytest.raises(TypeError, match=r"Missing required argument: 'a'"): + foo(b="a", c="c") + + with pytest.raises(TypeError, match=r"Missing required argument: 'b'"): + foo("a", c="c") + + +def test_multiple_variants() -> None: + @required_args(["a"], ["b"]) + def foo(*, a: str | None = None, b: str | None = None) -> str | None: + return a if a is not None else b + + assert foo(a="foo") == "foo" + assert foo(b="bar") == "bar" + assert foo(a=None) is None + assert foo(b=None) is None + + # TODO: this error message could probably be improved + with pytest.raises( + TypeError, + match=r"Missing required arguments; Expected either \('a'\) or \('b'\) arguments to be given", + ): + foo() + + +def test_multiple_params_multiple_variants() -> None: + @required_args(["a", "b"], ["c"]) + def foo(*, a: str | None = None, b: str | None = None, c: str | None = None) -> str | None: + if a is not None: + return a + if b is not None: + return b + return c + + error_message = r"Missing required arguments; Expected either \('a' and 'b'\) or \('c'\) arguments to be given" + + with pytest.raises(TypeError, match=error_message): + foo(a="foo") + + with pytest.raises(TypeError, match=error_message): + foo(b="bar") + + with pytest.raises(TypeError, match=error_message): + foo() + + assert foo(a=None, b="bar") == "bar" + assert foo(c=None) is None + assert foo(c="foo") == "foo" diff --git a/tests/test_response.py b/tests/test_response.py new file mode 100644 index 0000000..bf62a9b --- /dev/null +++ b/tests/test_response.py @@ -0,0 +1,277 @@ +import json +from typing import Any, List, Union, cast +from typing_extensions import Annotated + +import httpx +import pytest +import pydantic + +from kernel import Kernel, BaseModel, AsyncKernel +from kernel._response import ( + APIResponse, + BaseAPIResponse, + AsyncAPIResponse, + BinaryAPIResponse, + AsyncBinaryAPIResponse, + extract_response_type, +) +from kernel._streaming import Stream +from kernel._base_client import FinalRequestOptions + + +class ConcreteBaseAPIResponse(APIResponse[bytes]): ... + + +class ConcreteAPIResponse(APIResponse[List[str]]): ... + + +class ConcreteAsyncAPIResponse(APIResponse[httpx.Response]): ... + + +def test_extract_response_type_direct_classes() -> None: + assert extract_response_type(BaseAPIResponse[str]) == str + assert extract_response_type(APIResponse[str]) == str + assert extract_response_type(AsyncAPIResponse[str]) == str + + +def test_extract_response_type_direct_class_missing_type_arg() -> None: + with pytest.raises( + RuntimeError, + match="Expected type to have a type argument at index 0 but it did not", + ): + extract_response_type(AsyncAPIResponse) + + +def test_extract_response_type_concrete_subclasses() -> None: + assert extract_response_type(ConcreteBaseAPIResponse) == bytes + assert extract_response_type(ConcreteAPIResponse) == List[str] + assert extract_response_type(ConcreteAsyncAPIResponse) == httpx.Response + + +def test_extract_response_type_binary_response() -> None: + assert extract_response_type(BinaryAPIResponse) == bytes + assert extract_response_type(AsyncBinaryAPIResponse) == bytes + + +class PydanticModel(pydantic.BaseModel): ... + + +def test_response_parse_mismatched_basemodel(client: Kernel) -> None: + response = APIResponse( + raw=httpx.Response(200, content=b"foo"), + client=client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + with pytest.raises( + TypeError, + match="Pydantic models must subclass our base model type, e.g. `from kernel import BaseModel`", + ): + response.parse(to=PydanticModel) + + +@pytest.mark.asyncio +async def test_async_response_parse_mismatched_basemodel(async_client: AsyncKernel) -> None: + response = AsyncAPIResponse( + raw=httpx.Response(200, content=b"foo"), + client=async_client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + with pytest.raises( + TypeError, + match="Pydantic models must subclass our base model type, e.g. `from kernel import BaseModel`", + ): + await response.parse(to=PydanticModel) + + +def test_response_parse_custom_stream(client: Kernel) -> None: + response = APIResponse( + raw=httpx.Response(200, content=b"foo"), + client=client, + stream=True, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + stream = response.parse(to=Stream[int]) + assert stream._cast_to == int + + +@pytest.mark.asyncio +async def test_async_response_parse_custom_stream(async_client: AsyncKernel) -> None: + response = AsyncAPIResponse( + raw=httpx.Response(200, content=b"foo"), + client=async_client, + stream=True, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + stream = await response.parse(to=Stream[int]) + assert stream._cast_to == int + + +class CustomModel(BaseModel): + foo: str + bar: int + + +def test_response_parse_custom_model(client: Kernel) -> None: + response = APIResponse( + raw=httpx.Response(200, content=json.dumps({"foo": "hello!", "bar": 2})), + client=client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + obj = response.parse(to=CustomModel) + assert obj.foo == "hello!" + assert obj.bar == 2 + + +@pytest.mark.asyncio +async def test_async_response_parse_custom_model(async_client: AsyncKernel) -> None: + response = AsyncAPIResponse( + raw=httpx.Response(200, content=json.dumps({"foo": "hello!", "bar": 2})), + client=async_client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + obj = await response.parse(to=CustomModel) + assert obj.foo == "hello!" + assert obj.bar == 2 + + +def test_response_parse_annotated_type(client: Kernel) -> None: + response = APIResponse( + raw=httpx.Response(200, content=json.dumps({"foo": "hello!", "bar": 2})), + client=client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + obj = response.parse( + to=cast("type[CustomModel]", Annotated[CustomModel, "random metadata"]), + ) + assert obj.foo == "hello!" + assert obj.bar == 2 + + +async def test_async_response_parse_annotated_type(async_client: AsyncKernel) -> None: + response = AsyncAPIResponse( + raw=httpx.Response(200, content=json.dumps({"foo": "hello!", "bar": 2})), + client=async_client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + obj = await response.parse( + to=cast("type[CustomModel]", Annotated[CustomModel, "random metadata"]), + ) + assert obj.foo == "hello!" + assert obj.bar == 2 + + +@pytest.mark.parametrize( + "content, expected", + [ + ("false", False), + ("true", True), + ("False", False), + ("True", True), + ("TrUe", True), + ("FalSe", False), + ], +) +def test_response_parse_bool(client: Kernel, content: str, expected: bool) -> None: + response = APIResponse( + raw=httpx.Response(200, content=content), + client=client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + result = response.parse(to=bool) + assert result is expected + + +@pytest.mark.parametrize( + "content, expected", + [ + ("false", False), + ("true", True), + ("False", False), + ("True", True), + ("TrUe", True), + ("FalSe", False), + ], +) +async def test_async_response_parse_bool(client: AsyncKernel, content: str, expected: bool) -> None: + response = AsyncAPIResponse( + raw=httpx.Response(200, content=content), + client=client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + result = await response.parse(to=bool) + assert result is expected + + +class OtherModel(BaseModel): + a: str + + +@pytest.mark.parametrize("client", [False], indirect=True) # loose validation +def test_response_parse_expect_model_union_non_json_content(client: Kernel) -> None: + response = APIResponse( + raw=httpx.Response(200, content=b"foo", headers={"Content-Type": "application/text"}), + client=client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + obj = response.parse(to=cast(Any, Union[CustomModel, OtherModel])) + assert isinstance(obj, str) + assert obj == "foo" + + +@pytest.mark.asyncio +@pytest.mark.parametrize("async_client", [False], indirect=True) # loose validation +async def test_async_response_parse_expect_model_union_non_json_content(async_client: AsyncKernel) -> None: + response = AsyncAPIResponse( + raw=httpx.Response(200, content=b"foo", headers={"Content-Type": "application/text"}), + client=async_client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + obj = await response.parse(to=cast(Any, Union[CustomModel, OtherModel])) + assert isinstance(obj, str) + assert obj == "foo" diff --git a/tests/test_streaming.py b/tests/test_streaming.py new file mode 100644 index 0000000..4b8e4e4 --- /dev/null +++ b/tests/test_streaming.py @@ -0,0 +1,248 @@ +from __future__ import annotations + +from typing import Iterator, AsyncIterator + +import httpx +import pytest + +from kernel import Kernel, AsyncKernel +from kernel._streaming import Stream, AsyncStream, ServerSentEvent + + +@pytest.mark.asyncio +@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) +async def test_basic(sync: bool, client: Kernel, async_client: AsyncKernel) -> None: + def body() -> Iterator[bytes]: + yield b"event: completion\n" + yield b'data: {"foo":true}\n' + yield b"\n" + + iterator = make_event_iterator(content=body(), sync=sync, client=client, async_client=async_client) + + sse = await iter_next(iterator) + assert sse.event == "completion" + assert sse.json() == {"foo": True} + + await assert_empty_iter(iterator) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) +async def test_data_missing_event(sync: bool, client: Kernel, async_client: AsyncKernel) -> None: + def body() -> Iterator[bytes]: + yield b'data: {"foo":true}\n' + yield b"\n" + + iterator = make_event_iterator(content=body(), sync=sync, client=client, async_client=async_client) + + sse = await iter_next(iterator) + assert sse.event is None + assert sse.json() == {"foo": True} + + await assert_empty_iter(iterator) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) +async def test_event_missing_data(sync: bool, client: Kernel, async_client: AsyncKernel) -> None: + def body() -> Iterator[bytes]: + yield b"event: ping\n" + yield b"\n" + + iterator = make_event_iterator(content=body(), sync=sync, client=client, async_client=async_client) + + sse = await iter_next(iterator) + assert sse.event == "ping" + assert sse.data == "" + + await assert_empty_iter(iterator) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) +async def test_multiple_events(sync: bool, client: Kernel, async_client: AsyncKernel) -> None: + def body() -> Iterator[bytes]: + yield b"event: ping\n" + yield b"\n" + yield b"event: completion\n" + yield b"\n" + + iterator = make_event_iterator(content=body(), sync=sync, client=client, async_client=async_client) + + sse = await iter_next(iterator) + assert sse.event == "ping" + assert sse.data == "" + + sse = await iter_next(iterator) + assert sse.event == "completion" + assert sse.data == "" + + await assert_empty_iter(iterator) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) +async def test_multiple_events_with_data(sync: bool, client: Kernel, async_client: AsyncKernel) -> None: + def body() -> Iterator[bytes]: + yield b"event: ping\n" + yield b'data: {"foo":true}\n' + yield b"\n" + yield b"event: completion\n" + yield b'data: {"bar":false}\n' + yield b"\n" + + iterator = make_event_iterator(content=body(), sync=sync, client=client, async_client=async_client) + + sse = await iter_next(iterator) + assert sse.event == "ping" + assert sse.json() == {"foo": True} + + sse = await iter_next(iterator) + assert sse.event == "completion" + assert sse.json() == {"bar": False} + + await assert_empty_iter(iterator) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) +async def test_multiple_data_lines_with_empty_line(sync: bool, client: Kernel, async_client: AsyncKernel) -> None: + def body() -> Iterator[bytes]: + yield b"event: ping\n" + yield b"data: {\n" + yield b'data: "foo":\n' + yield b"data: \n" + yield b"data:\n" + yield b"data: true}\n" + yield b"\n\n" + + iterator = make_event_iterator(content=body(), sync=sync, client=client, async_client=async_client) + + sse = await iter_next(iterator) + assert sse.event == "ping" + assert sse.json() == {"foo": True} + assert sse.data == '{\n"foo":\n\n\ntrue}' + + await assert_empty_iter(iterator) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) +async def test_data_json_escaped_double_new_line(sync: bool, client: Kernel, async_client: AsyncKernel) -> None: + def body() -> Iterator[bytes]: + yield b"event: ping\n" + yield b'data: {"foo": "my long\\n\\ncontent"}' + yield b"\n\n" + + iterator = make_event_iterator(content=body(), sync=sync, client=client, async_client=async_client) + + sse = await iter_next(iterator) + assert sse.event == "ping" + assert sse.json() == {"foo": "my long\n\ncontent"} + + await assert_empty_iter(iterator) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) +async def test_multiple_data_lines(sync: bool, client: Kernel, async_client: AsyncKernel) -> None: + def body() -> Iterator[bytes]: + yield b"event: ping\n" + yield b"data: {\n" + yield b'data: "foo":\n' + yield b"data: true}\n" + yield b"\n\n" + + iterator = make_event_iterator(content=body(), sync=sync, client=client, async_client=async_client) + + sse = await iter_next(iterator) + assert sse.event == "ping" + assert sse.json() == {"foo": True} + + await assert_empty_iter(iterator) + + +@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) +async def test_special_new_line_character( + sync: bool, + client: Kernel, + async_client: AsyncKernel, +) -> None: + def body() -> Iterator[bytes]: + yield b'data: {"content":" culpa"}\n' + yield b"\n" + yield b'data: {"content":" \xe2\x80\xa8"}\n' + yield b"\n" + yield b'data: {"content":"foo"}\n' + yield b"\n" + + iterator = make_event_iterator(content=body(), sync=sync, client=client, async_client=async_client) + + sse = await iter_next(iterator) + assert sse.event is None + assert sse.json() == {"content": " culpa"} + + sse = await iter_next(iterator) + assert sse.event is None + assert sse.json() == {"content": " 
"} + + sse = await iter_next(iterator) + assert sse.event is None + assert sse.json() == {"content": "foo"} + + await assert_empty_iter(iterator) + + +@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) +async def test_multi_byte_character_multiple_chunks( + sync: bool, + client: Kernel, + async_client: AsyncKernel, +) -> None: + def body() -> Iterator[bytes]: + yield b'data: {"content":"' + # bytes taken from the string 'известни' and arbitrarily split + # so that some multi-byte characters span multiple chunks + yield b"\xd0" + yield b"\xb8\xd0\xb7\xd0" + yield b"\xb2\xd0\xb5\xd1\x81\xd1\x82\xd0\xbd\xd0\xb8" + yield b'"}\n' + yield b"\n" + + iterator = make_event_iterator(content=body(), sync=sync, client=client, async_client=async_client) + + sse = await iter_next(iterator) + assert sse.event is None + assert sse.json() == {"content": "известни"} + + +async def to_aiter(iter: Iterator[bytes]) -> AsyncIterator[bytes]: + for chunk in iter: + yield chunk + + +async def iter_next(iter: Iterator[ServerSentEvent] | AsyncIterator[ServerSentEvent]) -> ServerSentEvent: + if isinstance(iter, AsyncIterator): + return await iter.__anext__() + + return next(iter) + + +async def assert_empty_iter(iter: Iterator[ServerSentEvent] | AsyncIterator[ServerSentEvent]) -> None: + with pytest.raises((StopAsyncIteration, RuntimeError)): + await iter_next(iter) + + +def make_event_iterator( + content: Iterator[bytes], + *, + sync: bool, + client: Kernel, + async_client: AsyncKernel, +) -> Iterator[ServerSentEvent] | AsyncIterator[ServerSentEvent]: + if sync: + return Stream(cast_to=object, client=client, response=httpx.Response(200, content=content))._iter_events() + + return AsyncStream( + cast_to=object, client=async_client, response=httpx.Response(200, content=to_aiter(content)) + )._iter_events() diff --git a/tests/test_transform.py b/tests/test_transform.py new file mode 100644 index 0000000..68ca9b2 --- /dev/null +++ b/tests/test_transform.py @@ -0,0 +1,460 @@ +from __future__ import annotations + +import io +import pathlib +from typing import Any, Dict, List, Union, TypeVar, Iterable, Optional, cast +from datetime import date, datetime +from typing_extensions import Required, Annotated, TypedDict + +import pytest + +from kernel._types import Base64FileInput, omit, not_given +from kernel._utils import ( + PropertyInfo, + transform as _transform, + parse_datetime, + async_transform as _async_transform, +) +from kernel._compat import PYDANTIC_V1 +from kernel._models import BaseModel + +_T = TypeVar("_T") + +SAMPLE_FILE_PATH = pathlib.Path(__file__).parent.joinpath("sample_file.txt") + + +async def transform( + data: _T, + expected_type: object, + use_async: bool, +) -> _T: + if use_async: + return await _async_transform(data, expected_type=expected_type) + + return _transform(data, expected_type=expected_type) + + +parametrize = pytest.mark.parametrize("use_async", [False, True], ids=["sync", "async"]) + + +class Foo1(TypedDict): + foo_bar: Annotated[str, PropertyInfo(alias="fooBar")] + + +@parametrize +@pytest.mark.asyncio +async def test_top_level_alias(use_async: bool) -> None: + assert await transform({"foo_bar": "hello"}, expected_type=Foo1, use_async=use_async) == {"fooBar": "hello"} + + +class Foo2(TypedDict): + bar: Bar2 + + +class Bar2(TypedDict): + this_thing: Annotated[int, PropertyInfo(alias="this__thing")] + baz: Annotated[Baz2, PropertyInfo(alias="Baz")] + + +class Baz2(TypedDict): + my_baz: Annotated[str, PropertyInfo(alias="myBaz")] + + +@parametrize +@pytest.mark.asyncio +async def test_recursive_typeddict(use_async: bool) -> None: + assert await transform({"bar": {"this_thing": 1}}, Foo2, use_async) == {"bar": {"this__thing": 1}} + assert await transform({"bar": {"baz": {"my_baz": "foo"}}}, Foo2, use_async) == {"bar": {"Baz": {"myBaz": "foo"}}} + + +class Foo3(TypedDict): + things: List[Bar3] + + +class Bar3(TypedDict): + my_field: Annotated[str, PropertyInfo(alias="myField")] + + +@parametrize +@pytest.mark.asyncio +async def test_list_of_typeddict(use_async: bool) -> None: + result = await transform({"things": [{"my_field": "foo"}, {"my_field": "foo2"}]}, Foo3, use_async) + assert result == {"things": [{"myField": "foo"}, {"myField": "foo2"}]} + + +class Foo4(TypedDict): + foo: Union[Bar4, Baz4] + + +class Bar4(TypedDict): + foo_bar: Annotated[str, PropertyInfo(alias="fooBar")] + + +class Baz4(TypedDict): + foo_baz: Annotated[str, PropertyInfo(alias="fooBaz")] + + +@parametrize +@pytest.mark.asyncio +async def test_union_of_typeddict(use_async: bool) -> None: + assert await transform({"foo": {"foo_bar": "bar"}}, Foo4, use_async) == {"foo": {"fooBar": "bar"}} + assert await transform({"foo": {"foo_baz": "baz"}}, Foo4, use_async) == {"foo": {"fooBaz": "baz"}} + assert await transform({"foo": {"foo_baz": "baz", "foo_bar": "bar"}}, Foo4, use_async) == { + "foo": {"fooBaz": "baz", "fooBar": "bar"} + } + + +class Foo5(TypedDict): + foo: Annotated[Union[Bar4, List[Baz4]], PropertyInfo(alias="FOO")] + + +class Bar5(TypedDict): + foo_bar: Annotated[str, PropertyInfo(alias="fooBar")] + + +class Baz5(TypedDict): + foo_baz: Annotated[str, PropertyInfo(alias="fooBaz")] + + +@parametrize +@pytest.mark.asyncio +async def test_union_of_list(use_async: bool) -> None: + assert await transform({"foo": {"foo_bar": "bar"}}, Foo5, use_async) == {"FOO": {"fooBar": "bar"}} + assert await transform( + { + "foo": [ + {"foo_baz": "baz"}, + {"foo_baz": "baz"}, + ] + }, + Foo5, + use_async, + ) == {"FOO": [{"fooBaz": "baz"}, {"fooBaz": "baz"}]} + + +class Foo6(TypedDict): + bar: Annotated[str, PropertyInfo(alias="Bar")] + + +@parametrize +@pytest.mark.asyncio +async def test_includes_unknown_keys(use_async: bool) -> None: + assert await transform({"bar": "bar", "baz_": {"FOO": 1}}, Foo6, use_async) == { + "Bar": "bar", + "baz_": {"FOO": 1}, + } + + +class Foo7(TypedDict): + bar: Annotated[List[Bar7], PropertyInfo(alias="bAr")] + foo: Bar7 + + +class Bar7(TypedDict): + foo: str + + +@parametrize +@pytest.mark.asyncio +async def test_ignores_invalid_input(use_async: bool) -> None: + assert await transform({"bar": ""}, Foo7, use_async) == {"bAr": ""} + assert await transform({"foo": ""}, Foo7, use_async) == {"foo": ""} + + +class DatetimeDict(TypedDict, total=False): + foo: Annotated[datetime, PropertyInfo(format="iso8601")] + + bar: Annotated[Optional[datetime], PropertyInfo(format="iso8601")] + + required: Required[Annotated[Optional[datetime], PropertyInfo(format="iso8601")]] + + list_: Required[Annotated[Optional[List[datetime]], PropertyInfo(format="iso8601")]] + + union: Annotated[Union[int, datetime], PropertyInfo(format="iso8601")] + + +class DateDict(TypedDict, total=False): + foo: Annotated[date, PropertyInfo(format="iso8601")] + + +class DatetimeModel(BaseModel): + foo: datetime + + +class DateModel(BaseModel): + foo: Optional[date] + + +@parametrize +@pytest.mark.asyncio +async def test_iso8601_format(use_async: bool) -> None: + dt = datetime.fromisoformat("2023-02-23T14:16:36.337692+00:00") + tz = "+00:00" if PYDANTIC_V1 else "Z" + assert await transform({"foo": dt}, DatetimeDict, use_async) == {"foo": "2023-02-23T14:16:36.337692+00:00"} # type: ignore[comparison-overlap] + assert await transform(DatetimeModel(foo=dt), Any, use_async) == {"foo": "2023-02-23T14:16:36.337692" + tz} # type: ignore[comparison-overlap] + + dt = dt.replace(tzinfo=None) + assert await transform({"foo": dt}, DatetimeDict, use_async) == {"foo": "2023-02-23T14:16:36.337692"} # type: ignore[comparison-overlap] + assert await transform(DatetimeModel(foo=dt), Any, use_async) == {"foo": "2023-02-23T14:16:36.337692"} # type: ignore[comparison-overlap] + + assert await transform({"foo": None}, DateDict, use_async) == {"foo": None} # type: ignore[comparison-overlap] + assert await transform(DateModel(foo=None), Any, use_async) == {"foo": None} # type: ignore + assert await transform({"foo": date.fromisoformat("2023-02-23")}, DateDict, use_async) == {"foo": "2023-02-23"} # type: ignore[comparison-overlap] + assert await transform(DateModel(foo=date.fromisoformat("2023-02-23")), DateDict, use_async) == { + "foo": "2023-02-23" + } # type: ignore[comparison-overlap] + + +@parametrize +@pytest.mark.asyncio +async def test_optional_iso8601_format(use_async: bool) -> None: + dt = datetime.fromisoformat("2023-02-23T14:16:36.337692+00:00") + assert await transform({"bar": dt}, DatetimeDict, use_async) == {"bar": "2023-02-23T14:16:36.337692+00:00"} # type: ignore[comparison-overlap] + + assert await transform({"bar": None}, DatetimeDict, use_async) == {"bar": None} + + +@parametrize +@pytest.mark.asyncio +async def test_required_iso8601_format(use_async: bool) -> None: + dt = datetime.fromisoformat("2023-02-23T14:16:36.337692+00:00") + assert await transform({"required": dt}, DatetimeDict, use_async) == { + "required": "2023-02-23T14:16:36.337692+00:00" + } # type: ignore[comparison-overlap] + + assert await transform({"required": None}, DatetimeDict, use_async) == {"required": None} + + +@parametrize +@pytest.mark.asyncio +async def test_union_datetime(use_async: bool) -> None: + dt = datetime.fromisoformat("2023-02-23T14:16:36.337692+00:00") + assert await transform({"union": dt}, DatetimeDict, use_async) == { # type: ignore[comparison-overlap] + "union": "2023-02-23T14:16:36.337692+00:00" + } + + assert await transform({"union": "foo"}, DatetimeDict, use_async) == {"union": "foo"} + + +@parametrize +@pytest.mark.asyncio +async def test_nested_list_iso6801_format(use_async: bool) -> None: + dt1 = datetime.fromisoformat("2023-02-23T14:16:36.337692+00:00") + dt2 = parse_datetime("2022-01-15T06:34:23Z") + assert await transform({"list_": [dt1, dt2]}, DatetimeDict, use_async) == { # type: ignore[comparison-overlap] + "list_": ["2023-02-23T14:16:36.337692+00:00", "2022-01-15T06:34:23+00:00"] + } + + +@parametrize +@pytest.mark.asyncio +async def test_datetime_custom_format(use_async: bool) -> None: + dt = parse_datetime("2022-01-15T06:34:23Z") + + result = await transform(dt, Annotated[datetime, PropertyInfo(format="custom", format_template="%H")], use_async) + assert result == "06" # type: ignore[comparison-overlap] + + +class DateDictWithRequiredAlias(TypedDict, total=False): + required_prop: Required[Annotated[date, PropertyInfo(format="iso8601", alias="prop")]] + + +@parametrize +@pytest.mark.asyncio +async def test_datetime_with_alias(use_async: bool) -> None: + assert await transform({"required_prop": None}, DateDictWithRequiredAlias, use_async) == {"prop": None} # type: ignore[comparison-overlap] + assert await transform( + {"required_prop": date.fromisoformat("2023-02-23")}, DateDictWithRequiredAlias, use_async + ) == {"prop": "2023-02-23"} # type: ignore[comparison-overlap] + + +class MyModel(BaseModel): + foo: str + + +@parametrize +@pytest.mark.asyncio +async def test_pydantic_model_to_dictionary(use_async: bool) -> None: + assert cast(Any, await transform(MyModel(foo="hi!"), Any, use_async)) == {"foo": "hi!"} + assert cast(Any, await transform(MyModel.construct(foo="hi!"), Any, use_async)) == {"foo": "hi!"} + + +@parametrize +@pytest.mark.asyncio +async def test_pydantic_empty_model(use_async: bool) -> None: + assert cast(Any, await transform(MyModel.construct(), Any, use_async)) == {} + + +@parametrize +@pytest.mark.asyncio +async def test_pydantic_unknown_field(use_async: bool) -> None: + assert cast(Any, await transform(MyModel.construct(my_untyped_field=True), Any, use_async)) == { + "my_untyped_field": True + } + + +@parametrize +@pytest.mark.asyncio +async def test_pydantic_mismatched_types(use_async: bool) -> None: + model = MyModel.construct(foo=True) + if PYDANTIC_V1: + params = await transform(model, Any, use_async) + else: + with pytest.warns(UserWarning): + params = await transform(model, Any, use_async) + assert cast(Any, params) == {"foo": True} + + +@parametrize +@pytest.mark.asyncio +async def test_pydantic_mismatched_object_type(use_async: bool) -> None: + model = MyModel.construct(foo=MyModel.construct(hello="world")) + if PYDANTIC_V1: + params = await transform(model, Any, use_async) + else: + with pytest.warns(UserWarning): + params = await transform(model, Any, use_async) + assert cast(Any, params) == {"foo": {"hello": "world"}} + + +class ModelNestedObjects(BaseModel): + nested: MyModel + + +@parametrize +@pytest.mark.asyncio +async def test_pydantic_nested_objects(use_async: bool) -> None: + model = ModelNestedObjects.construct(nested={"foo": "stainless"}) + assert isinstance(model.nested, MyModel) + assert cast(Any, await transform(model, Any, use_async)) == {"nested": {"foo": "stainless"}} + + +class ModelWithDefaultField(BaseModel): + foo: str + with_none_default: Union[str, None] = None + with_str_default: str = "foo" + + +@parametrize +@pytest.mark.asyncio +async def test_pydantic_default_field(use_async: bool) -> None: + # should be excluded when defaults are used + model = ModelWithDefaultField.construct() + assert model.with_none_default is None + assert model.with_str_default == "foo" + assert cast(Any, await transform(model, Any, use_async)) == {} + + # should be included when the default value is explicitly given + model = ModelWithDefaultField.construct(with_none_default=None, with_str_default="foo") + assert model.with_none_default is None + assert model.with_str_default == "foo" + assert cast(Any, await transform(model, Any, use_async)) == {"with_none_default": None, "with_str_default": "foo"} + + # should be included when a non-default value is explicitly given + model = ModelWithDefaultField.construct(with_none_default="bar", with_str_default="baz") + assert model.with_none_default == "bar" + assert model.with_str_default == "baz" + assert cast(Any, await transform(model, Any, use_async)) == {"with_none_default": "bar", "with_str_default": "baz"} + + +class TypedDictIterableUnion(TypedDict): + foo: Annotated[Union[Bar8, Iterable[Baz8]], PropertyInfo(alias="FOO")] + + +class Bar8(TypedDict): + foo_bar: Annotated[str, PropertyInfo(alias="fooBar")] + + +class Baz8(TypedDict): + foo_baz: Annotated[str, PropertyInfo(alias="fooBaz")] + + +@parametrize +@pytest.mark.asyncio +async def test_iterable_of_dictionaries(use_async: bool) -> None: + assert await transform({"foo": [{"foo_baz": "bar"}]}, TypedDictIterableUnion, use_async) == { + "FOO": [{"fooBaz": "bar"}] + } + assert cast(Any, await transform({"foo": ({"foo_baz": "bar"},)}, TypedDictIterableUnion, use_async)) == { + "FOO": [{"fooBaz": "bar"}] + } + + def my_iter() -> Iterable[Baz8]: + yield {"foo_baz": "hello"} + yield {"foo_baz": "world"} + + assert await transform({"foo": my_iter()}, TypedDictIterableUnion, use_async) == { + "FOO": [{"fooBaz": "hello"}, {"fooBaz": "world"}] + } + + +@parametrize +@pytest.mark.asyncio +async def test_dictionary_items(use_async: bool) -> None: + class DictItems(TypedDict): + foo_baz: Annotated[str, PropertyInfo(alias="fooBaz")] + + assert await transform({"foo": {"foo_baz": "bar"}}, Dict[str, DictItems], use_async) == {"foo": {"fooBaz": "bar"}} + + +class TypedDictIterableUnionStr(TypedDict): + foo: Annotated[Union[str, Iterable[Baz8]], PropertyInfo(alias="FOO")] + + +@parametrize +@pytest.mark.asyncio +async def test_iterable_union_str(use_async: bool) -> None: + assert await transform({"foo": "bar"}, TypedDictIterableUnionStr, use_async) == {"FOO": "bar"} + assert cast(Any, await transform(iter([{"foo_baz": "bar"}]), Union[str, Iterable[Baz8]], use_async)) == [ + {"fooBaz": "bar"} + ] + + +class TypedDictBase64Input(TypedDict): + foo: Annotated[Union[str, Base64FileInput], PropertyInfo(format="base64")] + + +@parametrize +@pytest.mark.asyncio +async def test_base64_file_input(use_async: bool) -> None: + # strings are left as-is + assert await transform({"foo": "bar"}, TypedDictBase64Input, use_async) == {"foo": "bar"} + + # pathlib.Path is automatically converted to base64 + assert await transform({"foo": SAMPLE_FILE_PATH}, TypedDictBase64Input, use_async) == { + "foo": "SGVsbG8sIHdvcmxkIQo=" + } # type: ignore[comparison-overlap] + + # io instances are automatically converted to base64 + assert await transform({"foo": io.StringIO("Hello, world!")}, TypedDictBase64Input, use_async) == { + "foo": "SGVsbG8sIHdvcmxkIQ==" + } # type: ignore[comparison-overlap] + assert await transform({"foo": io.BytesIO(b"Hello, world!")}, TypedDictBase64Input, use_async) == { + "foo": "SGVsbG8sIHdvcmxkIQ==" + } # type: ignore[comparison-overlap] + + +@parametrize +@pytest.mark.asyncio +async def test_transform_skipping(use_async: bool) -> None: + # lists of ints are left as-is + data = [1, 2, 3] + assert await transform(data, List[int], use_async) is data + + # iterables of ints are converted to a list + data = iter([1, 2, 3]) + assert await transform(data, Iterable[int], use_async) == [1, 2, 3] + + +@parametrize +@pytest.mark.asyncio +async def test_strips_notgiven(use_async: bool) -> None: + assert await transform({"foo_bar": "bar"}, Foo1, use_async) == {"fooBar": "bar"} + assert await transform({"foo_bar": not_given}, Foo1, use_async) == {} + + +@parametrize +@pytest.mark.asyncio +async def test_strips_omit(use_async: bool) -> None: + assert await transform({"foo_bar": "bar"}, Foo1, use_async) == {"fooBar": "bar"} + assert await transform({"foo_bar": omit}, Foo1, use_async) == {} diff --git a/tests/test_utils/test_datetime_parse.py b/tests/test_utils/test_datetime_parse.py new file mode 100644 index 0000000..f626532 --- /dev/null +++ b/tests/test_utils/test_datetime_parse.py @@ -0,0 +1,110 @@ +""" +Copied from https://github.com/pydantic/pydantic/blob/v1.10.22/tests/test_datetime_parse.py +with modifications so it works without pydantic v1 imports. +""" + +from typing import Type, Union +from datetime import date, datetime, timezone, timedelta + +import pytest + +from kernel._utils import parse_date, parse_datetime + + +def create_tz(minutes: int) -> timezone: + return timezone(timedelta(minutes=minutes)) + + +@pytest.mark.parametrize( + "value,result", + [ + # Valid inputs + ("1494012444.883309", date(2017, 5, 5)), + (b"1494012444.883309", date(2017, 5, 5)), + (1_494_012_444.883_309, date(2017, 5, 5)), + ("1494012444", date(2017, 5, 5)), + (1_494_012_444, date(2017, 5, 5)), + (0, date(1970, 1, 1)), + ("2012-04-23", date(2012, 4, 23)), + (b"2012-04-23", date(2012, 4, 23)), + ("2012-4-9", date(2012, 4, 9)), + (date(2012, 4, 9), date(2012, 4, 9)), + (datetime(2012, 4, 9, 12, 15), date(2012, 4, 9)), + # Invalid inputs + ("x20120423", ValueError), + ("2012-04-56", ValueError), + (19_999_999_999, date(2603, 10, 11)), # just before watershed + (20_000_000_001, date(1970, 8, 20)), # just after watershed + (1_549_316_052, date(2019, 2, 4)), # nowish in s + (1_549_316_052_104, date(2019, 2, 4)), # nowish in ms + (1_549_316_052_104_324, date(2019, 2, 4)), # nowish in μs + (1_549_316_052_104_324_096, date(2019, 2, 4)), # nowish in ns + ("infinity", date(9999, 12, 31)), + ("inf", date(9999, 12, 31)), + (float("inf"), date(9999, 12, 31)), + ("infinity ", date(9999, 12, 31)), + (int("1" + "0" * 100), date(9999, 12, 31)), + (1e1000, date(9999, 12, 31)), + ("-infinity", date(1, 1, 1)), + ("-inf", date(1, 1, 1)), + ("nan", ValueError), + ], +) +def test_date_parsing(value: Union[str, bytes, int, float], result: Union[date, Type[Exception]]) -> None: + if type(result) == type and issubclass(result, Exception): # pyright: ignore[reportUnnecessaryIsInstance] + with pytest.raises(result): + parse_date(value) + else: + assert parse_date(value) == result + + +@pytest.mark.parametrize( + "value,result", + [ + # Valid inputs + # values in seconds + ("1494012444.883309", datetime(2017, 5, 5, 19, 27, 24, 883_309, tzinfo=timezone.utc)), + (1_494_012_444.883_309, datetime(2017, 5, 5, 19, 27, 24, 883_309, tzinfo=timezone.utc)), + ("1494012444", datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)), + (b"1494012444", datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)), + (1_494_012_444, datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)), + # values in ms + ("1494012444000.883309", datetime(2017, 5, 5, 19, 27, 24, 883, tzinfo=timezone.utc)), + ("-1494012444000.883309", datetime(1922, 8, 29, 4, 32, 35, 999117, tzinfo=timezone.utc)), + (1_494_012_444_000, datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)), + ("2012-04-23T09:15:00", datetime(2012, 4, 23, 9, 15)), + ("2012-4-9 4:8:16", datetime(2012, 4, 9, 4, 8, 16)), + ("2012-04-23T09:15:00Z", datetime(2012, 4, 23, 9, 15, 0, 0, timezone.utc)), + ("2012-4-9 4:8:16-0320", datetime(2012, 4, 9, 4, 8, 16, 0, create_tz(-200))), + ("2012-04-23T10:20:30.400+02:30", datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(150))), + ("2012-04-23T10:20:30.400+02", datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(120))), + ("2012-04-23T10:20:30.400-02", datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(-120))), + (b"2012-04-23T10:20:30.400-02", datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(-120))), + (datetime(2017, 5, 5), datetime(2017, 5, 5)), + (0, datetime(1970, 1, 1, 0, 0, 0, tzinfo=timezone.utc)), + # Invalid inputs + ("x20120423091500", ValueError), + ("2012-04-56T09:15:90", ValueError), + ("2012-04-23T11:05:00-25:00", ValueError), + (19_999_999_999, datetime(2603, 10, 11, 11, 33, 19, tzinfo=timezone.utc)), # just before watershed + (20_000_000_001, datetime(1970, 8, 20, 11, 33, 20, 1000, tzinfo=timezone.utc)), # just after watershed + (1_549_316_052, datetime(2019, 2, 4, 21, 34, 12, 0, tzinfo=timezone.utc)), # nowish in s + (1_549_316_052_104, datetime(2019, 2, 4, 21, 34, 12, 104_000, tzinfo=timezone.utc)), # nowish in ms + (1_549_316_052_104_324, datetime(2019, 2, 4, 21, 34, 12, 104_324, tzinfo=timezone.utc)), # nowish in μs + (1_549_316_052_104_324_096, datetime(2019, 2, 4, 21, 34, 12, 104_324, tzinfo=timezone.utc)), # nowish in ns + ("infinity", datetime(9999, 12, 31, 23, 59, 59, 999999)), + ("inf", datetime(9999, 12, 31, 23, 59, 59, 999999)), + ("inf ", datetime(9999, 12, 31, 23, 59, 59, 999999)), + (1e50, datetime(9999, 12, 31, 23, 59, 59, 999999)), + (float("inf"), datetime(9999, 12, 31, 23, 59, 59, 999999)), + ("-infinity", datetime(1, 1, 1, 0, 0)), + ("-inf", datetime(1, 1, 1, 0, 0)), + ("nan", ValueError), + ], +) +def test_datetime_parsing(value: Union[str, bytes, int, float], result: Union[datetime, Type[Exception]]) -> None: + if type(result) == type and issubclass(result, Exception): # pyright: ignore[reportUnnecessaryIsInstance] + with pytest.raises(result): + parse_datetime(value) + else: + assert parse_datetime(value) == result diff --git a/tests/test_utils/test_proxy.py b/tests/test_utils/test_proxy.py new file mode 100644 index 0000000..8c9c8ae --- /dev/null +++ b/tests/test_utils/test_proxy.py @@ -0,0 +1,34 @@ +import operator +from typing import Any +from typing_extensions import override + +from kernel._utils import LazyProxy + + +class RecursiveLazyProxy(LazyProxy[Any]): + @override + def __load__(self) -> Any: + return self + + def __call__(self, *_args: Any, **_kwds: Any) -> Any: + raise RuntimeError("This should never be called!") + + +def test_recursive_proxy() -> None: + proxy = RecursiveLazyProxy() + assert repr(proxy) == "RecursiveLazyProxy" + assert str(proxy) == "RecursiveLazyProxy" + assert dir(proxy) == [] + assert type(proxy).__name__ == "RecursiveLazyProxy" + assert type(operator.attrgetter("name.foo.bar.baz")(proxy)).__name__ == "RecursiveLazyProxy" + + +def test_isinstance_does_not_error() -> None: + class AlwaysErrorProxy(LazyProxy[Any]): + @override + def __load__(self) -> Any: + raise RuntimeError("Mocking missing dependency") + + proxy = AlwaysErrorProxy() + assert not isinstance(proxy, dict) + assert isinstance(proxy, LazyProxy) diff --git a/tests/test_utils/test_typing.py b/tests/test_utils/test_typing.py new file mode 100644 index 0000000..3b18d48 --- /dev/null +++ b/tests/test_utils/test_typing.py @@ -0,0 +1,73 @@ +from __future__ import annotations + +from typing import Generic, TypeVar, cast + +from kernel._utils import extract_type_var_from_base + +_T = TypeVar("_T") +_T2 = TypeVar("_T2") +_T3 = TypeVar("_T3") + + +class BaseGeneric(Generic[_T]): ... + + +class SubclassGeneric(BaseGeneric[_T]): ... + + +class BaseGenericMultipleTypeArgs(Generic[_T, _T2, _T3]): ... + + +class SubclassGenericMultipleTypeArgs(BaseGenericMultipleTypeArgs[_T, _T2, _T3]): ... + + +class SubclassDifferentOrderGenericMultipleTypeArgs(BaseGenericMultipleTypeArgs[_T2, _T, _T3]): ... + + +def test_extract_type_var() -> None: + assert ( + extract_type_var_from_base( + BaseGeneric[int], + index=0, + generic_bases=cast("tuple[type, ...]", (BaseGeneric,)), + ) + == int + ) + + +def test_extract_type_var_generic_subclass() -> None: + assert ( + extract_type_var_from_base( + SubclassGeneric[int], + index=0, + generic_bases=cast("tuple[type, ...]", (BaseGeneric,)), + ) + == int + ) + + +def test_extract_type_var_multiple() -> None: + typ = BaseGenericMultipleTypeArgs[int, str, None] + + generic_bases = cast("tuple[type, ...]", (BaseGenericMultipleTypeArgs,)) + assert extract_type_var_from_base(typ, index=0, generic_bases=generic_bases) == int + assert extract_type_var_from_base(typ, index=1, generic_bases=generic_bases) == str + assert extract_type_var_from_base(typ, index=2, generic_bases=generic_bases) == type(None) + + +def test_extract_type_var_generic_subclass_multiple() -> None: + typ = SubclassGenericMultipleTypeArgs[int, str, None] + + generic_bases = cast("tuple[type, ...]", (BaseGenericMultipleTypeArgs,)) + assert extract_type_var_from_base(typ, index=0, generic_bases=generic_bases) == int + assert extract_type_var_from_base(typ, index=1, generic_bases=generic_bases) == str + assert extract_type_var_from_base(typ, index=2, generic_bases=generic_bases) == type(None) + + +def test_extract_type_var_generic_subclass_different_ordering_multiple() -> None: + typ = SubclassDifferentOrderGenericMultipleTypeArgs[int, str, None] + + generic_bases = cast("tuple[type, ...]", (BaseGenericMultipleTypeArgs,)) + assert extract_type_var_from_base(typ, index=0, generic_bases=generic_bases) == int + assert extract_type_var_from_base(typ, index=1, generic_bases=generic_bases) == str + assert extract_type_var_from_base(typ, index=2, generic_bases=generic_bases) == type(None) diff --git a/tests/utils.py b/tests/utils.py new file mode 100644 index 0000000..3147457 --- /dev/null +++ b/tests/utils.py @@ -0,0 +1,167 @@ +from __future__ import annotations + +import os +import inspect +import traceback +import contextlib +from typing import Any, TypeVar, Iterator, Sequence, cast +from datetime import date, datetime +from typing_extensions import Literal, get_args, get_origin, assert_type + +from kernel._types import Omit, NoneType +from kernel._utils import ( + is_dict, + is_list, + is_list_type, + is_union_type, + extract_type_arg, + is_sequence_type, + is_annotated_type, + is_type_alias_type, +) +from kernel._compat import PYDANTIC_V1, field_outer_type, get_model_fields +from kernel._models import BaseModel + +BaseModelT = TypeVar("BaseModelT", bound=BaseModel) + + +def assert_matches_model(model: type[BaseModelT], value: BaseModelT, *, path: list[str]) -> bool: + for name, field in get_model_fields(model).items(): + field_value = getattr(value, name) + if PYDANTIC_V1: + # in v1 nullability was structured differently + # https://docs.pydantic.dev/2.0/migration/#required-optional-and-nullable-fields + allow_none = getattr(field, "allow_none", False) + else: + allow_none = False + + assert_matches_type( + field_outer_type(field), + field_value, + path=[*path, name], + allow_none=allow_none, + ) + + return True + + +# Note: the `path` argument is only used to improve error messages when `--showlocals` is used +def assert_matches_type( + type_: Any, + value: object, + *, + path: list[str], + allow_none: bool = False, +) -> None: + if is_type_alias_type(type_): + type_ = type_.__value__ + + # unwrap `Annotated[T, ...]` -> `T` + if is_annotated_type(type_): + type_ = extract_type_arg(type_, 0) + + if allow_none and value is None: + return + + if type_ is None or type_ is NoneType: + assert value is None + return + + origin = get_origin(type_) or type_ + + if is_list_type(type_): + return _assert_list_type(type_, value) + + if is_sequence_type(type_): + assert isinstance(value, Sequence) + inner_type = get_args(type_)[0] + for entry in value: # type: ignore + assert_type(inner_type, entry) # type: ignore + return + + if origin == str: + assert isinstance(value, str) + elif origin == int: + assert isinstance(value, int) + elif origin == bool: + assert isinstance(value, bool) + elif origin == float: + assert isinstance(value, float) + elif origin == bytes: + assert isinstance(value, bytes) + elif origin == datetime: + assert isinstance(value, datetime) + elif origin == date: + assert isinstance(value, date) + elif origin == object: + # nothing to do here, the expected type is unknown + pass + elif origin == Literal: + assert value in get_args(type_) + elif origin == dict: + assert is_dict(value) + + args = get_args(type_) + key_type = args[0] + items_type = args[1] + + for key, item in value.items(): + assert_matches_type(key_type, key, path=[*path, ""]) + assert_matches_type(items_type, item, path=[*path, ""]) + elif is_union_type(type_): + variants = get_args(type_) + + try: + none_index = variants.index(type(None)) + except ValueError: + pass + else: + # special case Optional[T] for better error messages + if len(variants) == 2: + if value is None: + # valid + return + + return assert_matches_type(type_=variants[not none_index], value=value, path=path) + + for i, variant in enumerate(variants): + try: + assert_matches_type(variant, value, path=[*path, f"variant {i}"]) + return + except AssertionError: + traceback.print_exc() + continue + + raise AssertionError("Did not match any variants") + elif issubclass(origin, BaseModel): + assert isinstance(value, type_) + assert assert_matches_model(type_, cast(Any, value), path=path) + elif inspect.isclass(origin) and origin.__name__ == "HttpxBinaryResponseContent": + assert value.__class__.__name__ == "HttpxBinaryResponseContent" + else: + assert None, f"Unhandled field type: {type_}" + + +def _assert_list_type(type_: type[object], value: object) -> None: + assert is_list(value) + + inner_type = get_args(type_)[0] + for entry in value: + assert_type(inner_type, entry) # type: ignore + + +@contextlib.contextmanager +def update_env(**new_env: str | Omit) -> Iterator[None]: + old = os.environ.copy() + + try: + for name, value in new_env.items(): + if isinstance(value, Omit): + os.environ.pop(name, None) + else: + os.environ[name] = value + + yield None + finally: + os.environ.clear() + os.environ.update(old)