mirror of
https://github.com/anomalyco/opencode-sdk-python.git
synced 2026-04-26 11:40:54 +00:00
feat(api): update via SDK Studio
This commit is contained in:
parent
604017133e
commit
ff05a4adf0
130 changed files with 17166 additions and 1 deletions
9
.devcontainer/Dockerfile
Normal file
9
.devcontainer/Dockerfile
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
ARG VARIANT="3.9"
|
||||
FROM mcr.microsoft.com/vscode/devcontainers/python:0-${VARIANT}
|
||||
|
||||
USER vscode
|
||||
|
||||
RUN curl -sSf https://rye.astral.sh/get | RYE_VERSION="0.44.0" RYE_INSTALL_OPTION="--yes" bash
|
||||
ENV PATH=/home/vscode/.rye/shims:$PATH
|
||||
|
||||
RUN echo "[[ -d .venv ]] && source .venv/bin/activate || export PATH=\$PATH" >> /home/vscode/.bashrc
|
||||
43
.devcontainer/devcontainer.json
Normal file
43
.devcontainer/devcontainer.json
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
|
||||
// README at: https://github.com/devcontainers/templates/tree/main/src/debian
|
||||
{
|
||||
"name": "Debian",
|
||||
"build": {
|
||||
"dockerfile": "Dockerfile",
|
||||
"context": ".."
|
||||
},
|
||||
|
||||
"postStartCommand": "rye sync --all-features",
|
||||
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
"ms-python.python"
|
||||
],
|
||||
"settings": {
|
||||
"terminal.integrated.shell.linux": "/bin/bash",
|
||||
"python.pythonPath": ".venv/bin/python",
|
||||
"python.defaultInterpreterPath": ".venv/bin/python",
|
||||
"python.typeChecking": "basic",
|
||||
"terminal.integrated.env.linux": {
|
||||
"PATH": "/home/vscode/.rye/shims:${env:PATH}"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/node:1": {}
|
||||
}
|
||||
|
||||
// Features to add to the dev container. More info: https://containers.dev/features.
|
||||
// "features": {},
|
||||
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
// "forwardPorts": [],
|
||||
|
||||
// Configure tool-specific properties.
|
||||
// "customizations": {},
|
||||
|
||||
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
|
||||
// "remoteUser": "root"
|
||||
}
|
||||
80
.github/workflows/ci.yml
vendored
Normal file
80
.github/workflows/ci.yml
vendored
Normal file
|
|
@ -0,0 +1,80 @@
|
|||
name: CI
|
||||
on:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'generated'
|
||||
- 'codegen/**'
|
||||
- 'integrated/**'
|
||||
- 'stl-preview-head/**'
|
||||
- 'stl-preview-base/**'
|
||||
pull_request:
|
||||
branches-ignore:
|
||||
- 'stl-preview-head/**'
|
||||
- 'stl-preview-base/**'
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
timeout-minutes: 10
|
||||
name: lint
|
||||
runs-on: ${{ github.repository == 'stainless-sdks/opencode-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rye
|
||||
run: |
|
||||
curl -sSf https://rye.astral.sh/get | bash
|
||||
echo "$HOME/.rye/shims" >> $GITHUB_PATH
|
||||
env:
|
||||
RYE_VERSION: '0.44.0'
|
||||
RYE_INSTALL_OPTION: '--yes'
|
||||
|
||||
- name: Install dependencies
|
||||
run: rye sync --all-features
|
||||
|
||||
- name: Run lints
|
||||
run: ./scripts/lint
|
||||
|
||||
upload:
|
||||
if: github.repository == 'stainless-sdks/opencode-python'
|
||||
timeout-minutes: 10
|
||||
name: upload
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
runs-on: depot-ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Get GitHub OIDC Token
|
||||
id: github-oidc
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
script: core.setOutput('github_token', await core.getIDToken());
|
||||
|
||||
- name: Upload tarball
|
||||
env:
|
||||
URL: https://pkg.stainless.com/s
|
||||
AUTH: ${{ steps.github-oidc.outputs.github_token }}
|
||||
SHA: ${{ github.sha }}
|
||||
run: ./scripts/utils/upload-artifact.sh
|
||||
|
||||
test:
|
||||
timeout-minutes: 10
|
||||
name: test
|
||||
runs-on: ${{ github.repository == 'stainless-sdks/opencode-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rye
|
||||
run: |
|
||||
curl -sSf https://rye.astral.sh/get | bash
|
||||
echo "$HOME/.rye/shims" >> $GITHUB_PATH
|
||||
env:
|
||||
RYE_VERSION: '0.44.0'
|
||||
RYE_INSTALL_OPTION: '--yes'
|
||||
|
||||
- name: Bootstrap
|
||||
run: ./scripts/bootstrap
|
||||
|
||||
- name: Run tests
|
||||
run: ./scripts/test
|
||||
38
.github/workflows/create-releases.yml
vendored
Normal file
38
.github/workflows/create-releases.yml
vendored
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
name: Create releases
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 5 * * *' # every day at 5am UTC
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
release:
|
||||
name: release
|
||||
if: github.ref == 'refs/heads/main' && github.repository == 'sst/opencode-sdk-python'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: stainless-api/trigger-release-please@v1
|
||||
id: release
|
||||
with:
|
||||
repo: ${{ github.event.repository.full_name }}
|
||||
stainless-api-key: ${{ secrets.STAINLESS_API_KEY }}
|
||||
|
||||
- name: Install Rye
|
||||
if: ${{ steps.release.outputs.releases_created }}
|
||||
run: |
|
||||
curl -sSf https://rye.astral.sh/get | bash
|
||||
echo "$HOME/.rye/shims" >> $GITHUB_PATH
|
||||
env:
|
||||
RYE_VERSION: '0.44.0'
|
||||
RYE_INSTALL_OPTION: '--yes'
|
||||
|
||||
- name: Publish to PyPI
|
||||
if: ${{ steps.release.outputs.releases_created }}
|
||||
run: |
|
||||
bash ./bin/publish-pypi
|
||||
env:
|
||||
PYPI_TOKEN: ${{ secrets.OPENCODE_PYPI_TOKEN || secrets.PYPI_TOKEN }}
|
||||
27
.github/workflows/publish-pypi.yml
vendored
Normal file
27
.github/workflows/publish-pypi.yml
vendored
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
# workflow for re-running publishing to PyPI in case it fails for some reason
|
||||
# you can run this workflow by navigating to https://www.github.com/sst/opencode-sdk-python/actions/workflows/publish-pypi.yml
|
||||
name: Publish PyPI
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: publish
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rye
|
||||
run: |
|
||||
curl -sSf https://rye.astral.sh/get | bash
|
||||
echo "$HOME/.rye/shims" >> $GITHUB_PATH
|
||||
env:
|
||||
RYE_VERSION: '0.44.0'
|
||||
RYE_INSTALL_OPTION: '--yes'
|
||||
|
||||
- name: Publish to PyPI
|
||||
run: |
|
||||
bash ./bin/publish-pypi
|
||||
env:
|
||||
PYPI_TOKEN: ${{ secrets.OPENCODE_PYPI_TOKEN || secrets.PYPI_TOKEN }}
|
||||
22
.github/workflows/release-doctor.yml
vendored
Normal file
22
.github/workflows/release-doctor.yml
vendored
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
name: Release Doctor
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
release_doctor:
|
||||
name: release doctor
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository == 'sst/opencode-sdk-python' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch' || startsWith(github.head_ref, 'release-please') || github.head_ref == 'next')
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check release environment
|
||||
run: |
|
||||
bash ./bin/check-release-environment
|
||||
env:
|
||||
STAINLESS_API_KEY: ${{ secrets.STAINLESS_API_KEY }}
|
||||
PYPI_TOKEN: ${{ secrets.OPENCODE_PYPI_TOKEN || secrets.PYPI_TOKEN }}
|
||||
16
.gitignore
vendored
Normal file
16
.gitignore
vendored
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
.prism.log
|
||||
.vscode
|
||||
_dev
|
||||
|
||||
__pycache__
|
||||
.mypy_cache
|
||||
|
||||
dist
|
||||
|
||||
.venv
|
||||
.idea
|
||||
|
||||
.env
|
||||
.envrc
|
||||
codegen.log
|
||||
Brewfile.lock.json
|
||||
1
.python-version
Normal file
1
.python-version
Normal file
|
|
@ -0,0 +1 @@
|
|||
3.9.18
|
||||
3
.release-please-manifest.json
Normal file
3
.release-please-manifest.json
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
".": "0.0.1-alpha.0"
|
||||
}
|
||||
4
.stats.yml
Normal file
4
.stats.yml
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
configured_endpoints: 16
|
||||
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/opencode%2Fopencode-3c79948402e96d2aae6e46095db2cf80759750d1b042d6f91281a72c415b14de.yml
|
||||
openapi_spec_hash: f9c2fc5988f0a30397929995c2be2c85
|
||||
config_hash: 482f0765aa5f3dbc38c35bc576a4946e
|
||||
2
Brewfile
Normal file
2
Brewfile
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
brew "rye"
|
||||
|
||||
128
CONTRIBUTING.md
Normal file
128
CONTRIBUTING.md
Normal file
|
|
@ -0,0 +1,128 @@
|
|||
## Setting up the environment
|
||||
|
||||
### With Rye
|
||||
|
||||
We use [Rye](https://rye.astral.sh/) to manage dependencies because it will automatically provision a Python environment with the expected Python version. To set it up, run:
|
||||
|
||||
```sh
|
||||
$ ./scripts/bootstrap
|
||||
```
|
||||
|
||||
Or [install Rye manually](https://rye.astral.sh/guide/installation/) and run:
|
||||
|
||||
```sh
|
||||
$ rye sync --all-features
|
||||
```
|
||||
|
||||
You can then run scripts using `rye run python script.py` or by activating the virtual environment:
|
||||
|
||||
```sh
|
||||
# Activate the virtual environment - https://docs.python.org/3/library/venv.html#how-venvs-work
|
||||
$ source .venv/bin/activate
|
||||
|
||||
# now you can omit the `rye run` prefix
|
||||
$ python script.py
|
||||
```
|
||||
|
||||
### Without Rye
|
||||
|
||||
Alternatively if you don't want to install `Rye`, you can stick with the standard `pip` setup by ensuring you have the Python version specified in `.python-version`, create a virtual environment however you desire and then install dependencies using this command:
|
||||
|
||||
```sh
|
||||
$ pip install -r requirements-dev.lock
|
||||
```
|
||||
|
||||
## Modifying/Adding code
|
||||
|
||||
Most of the SDK is generated code. Modifications to code will be persisted between generations, but may
|
||||
result in merge conflicts between manual patches and changes from the generator. The generator will never
|
||||
modify the contents of the `src/opencode/lib/` and `examples/` directories.
|
||||
|
||||
## Adding and running examples
|
||||
|
||||
All files in the `examples/` directory are not modified by the generator and can be freely edited or added to.
|
||||
|
||||
```py
|
||||
# add an example to examples/<your-example>.py
|
||||
|
||||
#!/usr/bin/env -S rye run python
|
||||
…
|
||||
```
|
||||
|
||||
```sh
|
||||
$ chmod +x examples/<your-example>.py
|
||||
# run the example against your api
|
||||
$ ./examples/<your-example>.py
|
||||
```
|
||||
|
||||
## Using the repository from source
|
||||
|
||||
If you’d like to use the repository from source, you can either install from git or link to a cloned repository:
|
||||
|
||||
To install via git:
|
||||
|
||||
```sh
|
||||
$ pip install git+ssh://git@github.com/sst/opencode-sdk-python.git
|
||||
```
|
||||
|
||||
Alternatively, you can build from source and install the wheel file:
|
||||
|
||||
Building this package will create two files in the `dist/` directory, a `.tar.gz` containing the source files and a `.whl` that can be used to install the package efficiently.
|
||||
|
||||
To create a distributable version of the library, all you have to do is run this command:
|
||||
|
||||
```sh
|
||||
$ rye build
|
||||
# or
|
||||
$ python -m build
|
||||
```
|
||||
|
||||
Then to install:
|
||||
|
||||
```sh
|
||||
$ pip install ./path-to-wheel-file.whl
|
||||
```
|
||||
|
||||
## Running tests
|
||||
|
||||
Most tests require you to [set up a mock server](https://github.com/stoplightio/prism) against the OpenAPI spec to run the tests.
|
||||
|
||||
```sh
|
||||
# you will need npm installed
|
||||
$ npx prism mock path/to/your/openapi.yml
|
||||
```
|
||||
|
||||
```sh
|
||||
$ ./scripts/test
|
||||
```
|
||||
|
||||
## Linting and formatting
|
||||
|
||||
This repository uses [ruff](https://github.com/astral-sh/ruff) and
|
||||
[black](https://github.com/psf/black) to format the code in the repository.
|
||||
|
||||
To lint:
|
||||
|
||||
```sh
|
||||
$ ./scripts/lint
|
||||
```
|
||||
|
||||
To format and fix all ruff issues automatically:
|
||||
|
||||
```sh
|
||||
$ ./scripts/format
|
||||
```
|
||||
|
||||
## Publishing and releases
|
||||
|
||||
Changes made to this repository via the automated release PR pipeline should publish to PyPI automatically. If
|
||||
the changes aren't made through the automated pipeline, you may want to make releases manually.
|
||||
|
||||
### Publish with a GitHub workflow
|
||||
|
||||
You can release to package managers by using [the `Publish PyPI` GitHub action](https://www.github.com/sst/opencode-sdk-python/actions/workflows/publish-pypi.yml). This requires a setup organization or repository secret to be set up.
|
||||
|
||||
### Publish manually
|
||||
|
||||
If you need to manually release a package, you can run the `bin/publish-pypi` script with a `PYPI_TOKEN` set on
|
||||
the environment.
|
||||
201
LICENSE
Normal file
201
LICENSE
Normal file
|
|
@ -0,0 +1,201 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2025 Opencode
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
351
README.md
351
README.md
|
|
@ -1 +1,350 @@
|
|||
# opencode-python
|
||||
# Opencode Python API library
|
||||
|
||||
[>)](https://pypi.org/project/opencode/)
|
||||
|
||||
The Opencode Python library provides convenient access to the Opencode REST API from any Python 3.8+
|
||||
application. The library includes type definitions for all request params and response fields,
|
||||
and offers both synchronous and asynchronous clients powered by [httpx](https://github.com/encode/httpx).
|
||||
|
||||
It is generated with [Stainless](https://www.stainless.com/).
|
||||
|
||||
## Documentation
|
||||
|
||||
The REST API documentation can be found on [opencode.ai](https://opencode.ai/docs). The full API of this library can be found in [api.md](api.md).
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
# install from the production repo
|
||||
pip install git+ssh://git@github.com/sst/opencode-sdk-python.git
|
||||
```
|
||||
|
||||
> [!NOTE]
|
||||
> Once this package is [published to PyPI](https://www.stainless.com/docs/guides/publish), this will become: `pip install --pre opencode`
|
||||
|
||||
## Usage
|
||||
|
||||
The full API of this library can be found in [api.md](api.md).
|
||||
|
||||
```python
|
||||
from opencode import Opencode
|
||||
|
||||
client = Opencode()
|
||||
|
||||
events = client.event.list()
|
||||
```
|
||||
|
||||
## Async usage
|
||||
|
||||
Simply import `AsyncOpencode` instead of `Opencode` and use `await` with each API call:
|
||||
|
||||
```python
|
||||
import asyncio
|
||||
from opencode import AsyncOpencode
|
||||
|
||||
client = AsyncOpencode()
|
||||
|
||||
|
||||
async def main() -> None:
|
||||
events = await client.event.list()
|
||||
|
||||
|
||||
asyncio.run(main())
|
||||
```
|
||||
|
||||
Functionality between the synchronous and asynchronous clients is otherwise identical.
|
||||
|
||||
### With aiohttp
|
||||
|
||||
By default, the async client uses `httpx` for HTTP requests. However, for improved concurrency performance you may also use `aiohttp` as the HTTP backend.
|
||||
|
||||
You can enable this by installing `aiohttp`:
|
||||
|
||||
```sh
|
||||
# install from the production repo
|
||||
pip install 'opencode[aiohttp] @ git+ssh://git@github.com/sst/opencode-sdk-python.git'
|
||||
```
|
||||
|
||||
Then you can enable it by instantiating the client with `http_client=DefaultAioHttpClient()`:
|
||||
|
||||
```python
|
||||
import asyncio
|
||||
from opencode import DefaultAioHttpClient
|
||||
from opencode import AsyncOpencode
|
||||
|
||||
|
||||
async def main() -> None:
|
||||
async with AsyncOpencode(
|
||||
http_client=DefaultAioHttpClient(),
|
||||
) as client:
|
||||
events = await client.event.list()
|
||||
|
||||
|
||||
asyncio.run(main())
|
||||
```
|
||||
|
||||
## Using types
|
||||
|
||||
Nested request parameters are [TypedDicts](https://docs.python.org/3/library/typing.html#typing.TypedDict). Responses are [Pydantic models](https://docs.pydantic.dev) which also provide helper methods for things like:
|
||||
|
||||
- Serializing back into JSON, `model.to_json()`
|
||||
- Converting to a dictionary, `model.to_dict()`
|
||||
|
||||
Typed requests and responses provide autocomplete and documentation within your editor. If you would like to see type errors in VS Code to help catch bugs earlier, set `python.analysis.typeCheckingMode` to `basic`.
|
||||
|
||||
## Handling errors
|
||||
|
||||
When the library is unable to connect to the API (for example, due to network connection problems or a timeout), a subclass of `opencode.APIConnectionError` is raised.
|
||||
|
||||
When the API returns a non-success status code (that is, 4xx or 5xx
|
||||
response), a subclass of `opencode.APIStatusError` is raised, containing `status_code` and `response` properties.
|
||||
|
||||
All errors inherit from `opencode.APIError`.
|
||||
|
||||
```python
|
||||
import opencode
|
||||
from opencode import Opencode
|
||||
|
||||
client = Opencode()
|
||||
|
||||
try:
|
||||
client.event.list()
|
||||
except opencode.APIConnectionError as e:
|
||||
print("The server could not be reached")
|
||||
print(e.__cause__) # an underlying Exception, likely raised within httpx.
|
||||
except opencode.RateLimitError as e:
|
||||
print("A 429 status code was received; we should back off a bit.")
|
||||
except opencode.APIStatusError as e:
|
||||
print("Another non-200-range status code was received")
|
||||
print(e.status_code)
|
||||
print(e.response)
|
||||
```
|
||||
|
||||
Error codes are as follows:
|
||||
|
||||
| Status Code | Error Type |
|
||||
| ----------- | -------------------------- |
|
||||
| 400 | `BadRequestError` |
|
||||
| 401 | `AuthenticationError` |
|
||||
| 403 | `PermissionDeniedError` |
|
||||
| 404 | `NotFoundError` |
|
||||
| 422 | `UnprocessableEntityError` |
|
||||
| 429 | `RateLimitError` |
|
||||
| >=500 | `InternalServerError` |
|
||||
| N/A | `APIConnectionError` |
|
||||
|
||||
### Retries
|
||||
|
||||
Certain errors are automatically retried 2 times by default, with a short exponential backoff.
|
||||
Connection errors (for example, due to a network connectivity problem), 408 Request Timeout, 409 Conflict,
|
||||
429 Rate Limit, and >=500 Internal errors are all retried by default.
|
||||
|
||||
You can use the `max_retries` option to configure or disable retry settings:
|
||||
|
||||
```python
|
||||
from opencode import Opencode
|
||||
|
||||
# Configure the default for all requests:
|
||||
client = Opencode(
|
||||
# default is 2
|
||||
max_retries=0,
|
||||
)
|
||||
|
||||
# Or, configure per-request:
|
||||
client.with_options(max_retries=5).event.list()
|
||||
```
|
||||
|
||||
### Timeouts
|
||||
|
||||
By default requests time out after 1 minute. You can configure this with a `timeout` option,
|
||||
which accepts a float or an [`httpx.Timeout`](https://www.python-httpx.org/advanced/timeouts/#fine-tuning-the-configuration) object:
|
||||
|
||||
```python
|
||||
from opencode import Opencode
|
||||
|
||||
# Configure the default for all requests:
|
||||
client = Opencode(
|
||||
# 20 seconds (default is 1 minute)
|
||||
timeout=20.0,
|
||||
)
|
||||
|
||||
# More granular control:
|
||||
client = Opencode(
|
||||
timeout=httpx.Timeout(60.0, read=5.0, write=10.0, connect=2.0),
|
||||
)
|
||||
|
||||
# Override per-request:
|
||||
client.with_options(timeout=5.0).event.list()
|
||||
```
|
||||
|
||||
On timeout, an `APITimeoutError` is thrown.
|
||||
|
||||
Note that requests that time out are [retried twice by default](#retries).
|
||||
|
||||
## Advanced
|
||||
|
||||
### Logging
|
||||
|
||||
We use the standard library [`logging`](https://docs.python.org/3/library/logging.html) module.
|
||||
|
||||
You can enable logging by setting the environment variable `OPENCODE_LOG` to `info`.
|
||||
|
||||
```shell
|
||||
$ export OPENCODE_LOG=info
|
||||
```
|
||||
|
||||
Or to `debug` for more verbose logging.
|
||||
|
||||
### How to tell whether `None` means `null` or missing
|
||||
|
||||
In an API response, a field may be explicitly `null`, or missing entirely; in either case, its value is `None` in this library. You can differentiate the two cases with `.model_fields_set`:
|
||||
|
||||
```py
|
||||
if response.my_field is None:
|
||||
if 'my_field' not in response.model_fields_set:
|
||||
print('Got json like {}, without a "my_field" key present at all.')
|
||||
else:
|
||||
print('Got json like {"my_field": null}.')
|
||||
```
|
||||
|
||||
### Accessing raw response data (e.g. headers)
|
||||
|
||||
The "raw" Response object can be accessed by prefixing `.with_raw_response.` to any HTTP method call, e.g.,
|
||||
|
||||
```py
|
||||
from opencode import Opencode
|
||||
|
||||
client = Opencode()
|
||||
response = client.event.with_raw_response.list()
|
||||
print(response.headers.get('X-My-Header'))
|
||||
|
||||
event = response.parse() # get the object that `event.list()` would have returned
|
||||
print(event)
|
||||
```
|
||||
|
||||
These methods return an [`APIResponse`](https://github.com/sst/opencode-sdk-python/tree/main/src/opencode/_response.py) object.
|
||||
|
||||
The async client returns an [`AsyncAPIResponse`](https://github.com/sst/opencode-sdk-python/tree/main/src/opencode/_response.py) with the same structure, the only difference being `await`able methods for reading the response content.
|
||||
|
||||
#### `.with_streaming_response`
|
||||
|
||||
The above interface eagerly reads the full response body when you make the request, which may not always be what you want.
|
||||
|
||||
To stream the response body, use `.with_streaming_response` instead, which requires a context manager and only reads the response body once you call `.read()`, `.text()`, `.json()`, `.iter_bytes()`, `.iter_text()`, `.iter_lines()` or `.parse()`. In the async client, these are async methods.
|
||||
|
||||
```python
|
||||
with client.event.with_streaming_response.list() as response:
|
||||
print(response.headers.get("X-My-Header"))
|
||||
|
||||
for line in response.iter_lines():
|
||||
print(line)
|
||||
```
|
||||
|
||||
The context manager is required so that the response will reliably be closed.
|
||||
|
||||
### Making custom/undocumented requests
|
||||
|
||||
This library is typed for convenient access to the documented API.
|
||||
|
||||
If you need to access undocumented endpoints, params, or response properties, the library can still be used.
|
||||
|
||||
#### Undocumented endpoints
|
||||
|
||||
To make requests to undocumented endpoints, you can make requests using `client.get`, `client.post`, and other
|
||||
http verbs. Options on the client will be respected (such as retries) when making this request.
|
||||
|
||||
```py
|
||||
import httpx
|
||||
|
||||
response = client.post(
|
||||
"/foo",
|
||||
cast_to=httpx.Response,
|
||||
body={"my_param": True},
|
||||
)
|
||||
|
||||
print(response.headers.get("x-foo"))
|
||||
```
|
||||
|
||||
#### Undocumented request params
|
||||
|
||||
If you want to explicitly send an extra param, you can do so with the `extra_query`, `extra_body`, and `extra_headers` request
|
||||
options.
|
||||
|
||||
#### Undocumented response properties
|
||||
|
||||
To access undocumented response properties, you can access the extra fields like `response.unknown_prop`. You
|
||||
can also get all the extra fields on the Pydantic model as a dict with
|
||||
[`response.model_extra`](https://docs.pydantic.dev/latest/api/base_model/#pydantic.BaseModel.model_extra).
|
||||
|
||||
### Configuring the HTTP client
|
||||
|
||||
You can directly override the [httpx client](https://www.python-httpx.org/api/#client) to customize it for your use case, including:
|
||||
|
||||
- Support for [proxies](https://www.python-httpx.org/advanced/proxies/)
|
||||
- Custom [transports](https://www.python-httpx.org/advanced/transports/)
|
||||
- Additional [advanced](https://www.python-httpx.org/advanced/clients/) functionality
|
||||
|
||||
```python
|
||||
import httpx
|
||||
from opencode import Opencode, DefaultHttpxClient
|
||||
|
||||
client = Opencode(
|
||||
# Or use the `OPENCODE_BASE_URL` env var
|
||||
base_url="http://my.test.server.example.com:8083",
|
||||
http_client=DefaultHttpxClient(
|
||||
proxy="http://my.test.proxy.example.com",
|
||||
transport=httpx.HTTPTransport(local_address="0.0.0.0"),
|
||||
),
|
||||
)
|
||||
```
|
||||
|
||||
You can also customize the client on a per-request basis by using `with_options()`:
|
||||
|
||||
```python
|
||||
client.with_options(http_client=DefaultHttpxClient(...))
|
||||
```
|
||||
|
||||
### Managing HTTP resources
|
||||
|
||||
By default the library closes underlying HTTP connections whenever the client is [garbage collected](https://docs.python.org/3/reference/datamodel.html#object.__del__). You can manually close the client using the `.close()` method if desired, or with a context manager that closes when exiting.
|
||||
|
||||
```py
|
||||
from opencode import Opencode
|
||||
|
||||
with Opencode() as client:
|
||||
# make requests here
|
||||
...
|
||||
|
||||
# HTTP client is now closed
|
||||
```
|
||||
|
||||
## Versioning
|
||||
|
||||
This package generally follows [SemVer](https://semver.org/spec/v2.0.0.html) conventions, though certain backwards-incompatible changes may be released as minor versions:
|
||||
|
||||
1. Changes that only affect static types, without breaking runtime behavior.
|
||||
2. Changes to library internals which are technically public but not intended or documented for external use. _(Please open a GitHub issue to let us know if you are relying on such internals.)_
|
||||
3. Changes that we do not expect to impact the vast majority of users in practice.
|
||||
|
||||
We take backwards-compatibility seriously and work hard to ensure you can rely on a smooth upgrade experience.
|
||||
|
||||
We are keen for your feedback; please open an [issue](https://www.github.com/sst/opencode-sdk-python/issues) with questions, bugs, or suggestions.
|
||||
|
||||
### Determining the installed version
|
||||
|
||||
If you've upgraded to the latest version but aren't seeing any new features you were expecting then your python environment is likely still using an older version.
|
||||
|
||||
You can determine the version that is being used at runtime with:
|
||||
|
||||
```py
|
||||
import opencode
|
||||
print(opencode.__version__)
|
||||
```
|
||||
|
||||
## Requirements
|
||||
|
||||
Python 3.8 or higher.
|
||||
|
||||
## Contributing
|
||||
|
||||
See [the contributing documentation](./CONTRIBUTING.md).
|
||||
|
|
|
|||
27
SECURITY.md
Normal file
27
SECURITY.md
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
# Security Policy
|
||||
|
||||
## Reporting Security Issues
|
||||
|
||||
This SDK is generated by [Stainless Software Inc](http://stainless.com). Stainless takes security seriously, and encourages you to report any security vulnerability promptly so that appropriate action can be taken.
|
||||
|
||||
To report a security issue, please contact the Stainless team at security@stainless.com.
|
||||
|
||||
## Responsible Disclosure
|
||||
|
||||
We appreciate the efforts of security researchers and individuals who help us maintain the security of
|
||||
SDKs we generate. If you believe you have found a security vulnerability, please adhere to responsible
|
||||
disclosure practices by allowing us a reasonable amount of time to investigate and address the issue
|
||||
before making any information public.
|
||||
|
||||
## Reporting Non-SDK Related Security Issues
|
||||
|
||||
If you encounter security issues that are not directly related to SDKs but pertain to the services
|
||||
or products provided by Opencode, please follow the respective company's security reporting guidelines.
|
||||
|
||||
### Opencode Terms and Policies
|
||||
|
||||
Please contact hello@sst.dev for any questions or concerns regarding the security of our services.
|
||||
|
||||
---
|
||||
|
||||
Thank you for helping us keep the SDKs and systems they interact with secure.
|
||||
97
api.md
Normal file
97
api.md
Normal file
|
|
@ -0,0 +1,97 @@
|
|||
# Event
|
||||
|
||||
Types:
|
||||
|
||||
```python
|
||||
from opencode.types import EventListResponse
|
||||
```
|
||||
|
||||
Methods:
|
||||
|
||||
- <code title="get /event">client.event.<a href="./src/opencode/resources/event.py">list</a>() -> <a href="./src/opencode/types/event_list_response.py">EventListResponse</a></code>
|
||||
|
||||
# App
|
||||
|
||||
Types:
|
||||
|
||||
```python
|
||||
from opencode.types import App, AppInitResponse
|
||||
```
|
||||
|
||||
Methods:
|
||||
|
||||
- <code title="get /app">client.app.<a href="./src/opencode/resources/app.py">get</a>() -> <a href="./src/opencode/types/app.py">App</a></code>
|
||||
- <code title="post /app/init">client.app.<a href="./src/opencode/resources/app.py">init</a>() -> <a href="./src/opencode/types/app_init_response.py">AppInitResponse</a></code>
|
||||
|
||||
# File
|
||||
|
||||
Types:
|
||||
|
||||
```python
|
||||
from opencode.types import FileSearchResponse
|
||||
```
|
||||
|
||||
Methods:
|
||||
|
||||
- <code title="get /file">client.file.<a href="./src/opencode/resources/file.py">search</a>(\*\*<a href="src/opencode/types/file_search_params.py">params</a>) -> <a href="./src/opencode/types/file_search_response.py">FileSearchResponse</a></code>
|
||||
|
||||
# Config
|
||||
|
||||
Types:
|
||||
|
||||
```python
|
||||
from opencode.types import (
|
||||
Config,
|
||||
Keybinds,
|
||||
McpLocal,
|
||||
McpRemote,
|
||||
Model,
|
||||
Provider,
|
||||
ConfigProvidersResponse,
|
||||
)
|
||||
```
|
||||
|
||||
Methods:
|
||||
|
||||
- <code title="get /config">client.config.<a href="./src/opencode/resources/config.py">get</a>() -> <a href="./src/opencode/types/config.py">Config</a></code>
|
||||
- <code title="get /config/providers">client.config.<a href="./src/opencode/resources/config.py">providers</a>() -> <a href="./src/opencode/types/config_providers_response.py">ConfigProvidersResponse</a></code>
|
||||
|
||||
# Session
|
||||
|
||||
Types:
|
||||
|
||||
```python
|
||||
from opencode.types import (
|
||||
FilePart,
|
||||
Message,
|
||||
MessagePart,
|
||||
ReasoningPart,
|
||||
Session,
|
||||
SourceURLPart,
|
||||
StepStartPart,
|
||||
TextPart,
|
||||
ToolCall,
|
||||
ToolInvocationPart,
|
||||
ToolPartialCall,
|
||||
ToolResult,
|
||||
SessionListResponse,
|
||||
SessionDeleteResponse,
|
||||
SessionAbortResponse,
|
||||
SessionInitResponse,
|
||||
SessionMessagesResponse,
|
||||
SessionSummarizeResponse,
|
||||
)
|
||||
```
|
||||
|
||||
Methods:
|
||||
|
||||
- <code title="post /session">client.session.<a href="./src/opencode/resources/session.py">create</a>() -> <a href="./src/opencode/types/session.py">Session</a></code>
|
||||
- <code title="get /session">client.session.<a href="./src/opencode/resources/session.py">list</a>() -> <a href="./src/opencode/types/session_list_response.py">SessionListResponse</a></code>
|
||||
- <code title="delete /session/{id}">client.session.<a href="./src/opencode/resources/session.py">delete</a>(id) -> <a href="./src/opencode/types/session_delete_response.py">SessionDeleteResponse</a></code>
|
||||
- <code title="post /session/{id}/abort">client.session.<a href="./src/opencode/resources/session.py">abort</a>(id) -> <a href="./src/opencode/types/session_abort_response.py">SessionAbortResponse</a></code>
|
||||
- <code title="post /session/{id}/message">client.session.<a href="./src/opencode/resources/session.py">chat</a>(id, \*\*<a href="src/opencode/types/session_chat_params.py">params</a>) -> <a href="./src/opencode/types/message.py">Message</a></code>
|
||||
- <code title="post /session/{id}/init">client.session.<a href="./src/opencode/resources/session.py">init</a>(id, \*\*<a href="src/opencode/types/session_init_params.py">params</a>) -> <a href="./src/opencode/types/session_init_response.py">SessionInitResponse</a></code>
|
||||
- <code title="get /session/{id}/message">client.session.<a href="./src/opencode/resources/session.py">messages</a>(id) -> <a href="./src/opencode/types/session_messages_response.py">SessionMessagesResponse</a></code>
|
||||
- <code title="post /session/{id}/share">client.session.<a href="./src/opencode/resources/session.py">share</a>(id) -> <a href="./src/opencode/types/session.py">Session</a></code>
|
||||
- <code title="post /session/{id}/summarize">client.session.<a href="./src/opencode/resources/session.py">summarize</a>(id, \*\*<a href="src/opencode/types/session_summarize_params.py">params</a>) -> <a href="./src/opencode/types/session_summarize_response.py">SessionSummarizeResponse</a></code>
|
||||
- <code title="delete /session/{id}/share">client.session.<a href="./src/opencode/resources/session.py">unshare</a>(id) -> <a href="./src/opencode/types/session.py">Session</a></code>
|
||||
25
bin/check-release-environment
Normal file
25
bin/check-release-environment
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
errors=()
|
||||
|
||||
if [ -z "${STAINLESS_API_KEY}" ]; then
|
||||
errors+=("The STAINLESS_API_KEY secret has not been set. Please contact Stainless for an API key & set it in your organization secrets on GitHub.")
|
||||
fi
|
||||
|
||||
if [ -z "${PYPI_TOKEN}" ]; then
|
||||
errors+=("The PYPI_TOKEN secret has not been set. Please set it in either this repository's secrets or your organization secrets.")
|
||||
fi
|
||||
|
||||
lenErrors=${#errors[@]}
|
||||
|
||||
if [[ lenErrors -gt 0 ]]; then
|
||||
echo -e "Found the following errors in the release environment:\n"
|
||||
|
||||
for error in "${errors[@]}"; do
|
||||
echo -e "- $error\n"
|
||||
done
|
||||
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "The environment is ready to push releases!"
|
||||
6
bin/publish-pypi
Normal file
6
bin/publish-pypi
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -eux
|
||||
mkdir -p dist
|
||||
rye build --clean
|
||||
rye publish --yes --token=$PYPI_TOKEN
|
||||
4
examples/.keep
Normal file
4
examples/.keep
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
File generated from our OpenAPI spec by Stainless.
|
||||
|
||||
This directory can be used to store example files demonstrating usage of this SDK.
|
||||
It is ignored by Stainless code generation and its content (other than this keep file) won't be touched.
|
||||
50
mypy.ini
Normal file
50
mypy.ini
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
[mypy]
|
||||
pretty = True
|
||||
show_error_codes = True
|
||||
|
||||
# Exclude _files.py because mypy isn't smart enough to apply
|
||||
# the correct type narrowing and as this is an internal module
|
||||
# it's fine to just use Pyright.
|
||||
#
|
||||
# We also exclude our `tests` as mypy doesn't always infer
|
||||
# types correctly and Pyright will still catch any type errors.
|
||||
exclude = ^(src/opencode/_files\.py|_dev/.*\.py|tests/.*)$
|
||||
|
||||
strict_equality = True
|
||||
implicit_reexport = True
|
||||
check_untyped_defs = True
|
||||
no_implicit_optional = True
|
||||
|
||||
warn_return_any = True
|
||||
warn_unreachable = True
|
||||
warn_unused_configs = True
|
||||
|
||||
# Turn these options off as it could cause conflicts
|
||||
# with the Pyright options.
|
||||
warn_unused_ignores = False
|
||||
warn_redundant_casts = False
|
||||
|
||||
disallow_any_generics = True
|
||||
disallow_untyped_defs = True
|
||||
disallow_untyped_calls = True
|
||||
disallow_subclassing_any = True
|
||||
disallow_incomplete_defs = True
|
||||
disallow_untyped_decorators = True
|
||||
cache_fine_grained = True
|
||||
|
||||
# By default, mypy reports an error if you assign a value to the result
|
||||
# of a function call that doesn't return anything. We do this in our test
|
||||
# cases:
|
||||
# ```
|
||||
# result = ...
|
||||
# assert result is None
|
||||
# ```
|
||||
# Changing this codegen to make mypy happy would increase complexity
|
||||
# and would not be worth it.
|
||||
disable_error_code = func-returns-value,overload-cannot-match
|
||||
|
||||
# https://github.com/python/mypy/issues/12162
|
||||
[mypy.overrides]
|
||||
module = "black.files.*"
|
||||
ignore_errors = true
|
||||
ignore_missing_imports = true
|
||||
9
noxfile.py
Normal file
9
noxfile.py
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
import nox
|
||||
|
||||
|
||||
@nox.session(reuse_venv=True, name="test-pydantic-v1")
|
||||
def test_pydantic_v1(session: nox.Session) -> None:
|
||||
session.install("-r", "requirements-dev.lock")
|
||||
session.install("pydantic<2")
|
||||
|
||||
session.run("pytest", "--showlocals", "--ignore=tests/functional", *session.posargs)
|
||||
210
pyproject.toml
Normal file
210
pyproject.toml
Normal file
|
|
@ -0,0 +1,210 @@
|
|||
[project]
|
||||
name = "opencode"
|
||||
version = "0.0.1-alpha.0"
|
||||
description = "The official Python library for the opencode API"
|
||||
dynamic = ["readme"]
|
||||
license = "Apache-2.0"
|
||||
authors = [
|
||||
{ name = "Opencode", email = "hello@sst.dev" },
|
||||
]
|
||||
dependencies = [
|
||||
"httpx>=0.23.0, <1",
|
||||
"pydantic>=1.9.0, <3",
|
||||
"typing-extensions>=4.10, <5",
|
||||
"anyio>=3.5.0, <5",
|
||||
"distro>=1.7.0, <2",
|
||||
"sniffio",
|
||||
]
|
||||
requires-python = ">= 3.8"
|
||||
classifiers = [
|
||||
"Typing :: Typed",
|
||||
"Intended Audience :: Developers",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Operating System :: OS Independent",
|
||||
"Operating System :: POSIX",
|
||||
"Operating System :: MacOS",
|
||||
"Operating System :: POSIX :: Linux",
|
||||
"Operating System :: Microsoft :: Windows",
|
||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||
"License :: OSI Approved :: Apache Software License"
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
Homepage = "https://github.com/sst/opencode-sdk-python"
|
||||
Repository = "https://github.com/sst/opencode-sdk-python"
|
||||
|
||||
[project.optional-dependencies]
|
||||
aiohttp = ["aiohttp", "httpx_aiohttp>=0.1.6"]
|
||||
|
||||
[tool.rye]
|
||||
managed = true
|
||||
# version pins are in requirements-dev.lock
|
||||
dev-dependencies = [
|
||||
"pyright==1.1.399",
|
||||
"mypy",
|
||||
"respx",
|
||||
"pytest",
|
||||
"pytest-asyncio",
|
||||
"ruff",
|
||||
"time-machine",
|
||||
"nox",
|
||||
"dirty-equals>=0.6.0",
|
||||
"importlib-metadata>=6.7.0",
|
||||
"rich>=13.7.1",
|
||||
"nest_asyncio==1.6.0",
|
||||
"pytest-xdist>=3.6.1",
|
||||
]
|
||||
|
||||
[tool.rye.scripts]
|
||||
format = { chain = [
|
||||
"format:ruff",
|
||||
"format:docs",
|
||||
"fix:ruff",
|
||||
# run formatting again to fix any inconsistencies when imports are stripped
|
||||
"format:ruff",
|
||||
]}
|
||||
"format:docs" = "python scripts/utils/ruffen-docs.py README.md api.md"
|
||||
"format:ruff" = "ruff format"
|
||||
|
||||
"lint" = { chain = [
|
||||
"check:ruff",
|
||||
"typecheck",
|
||||
"check:importable",
|
||||
]}
|
||||
"check:ruff" = "ruff check ."
|
||||
"fix:ruff" = "ruff check --fix ."
|
||||
|
||||
"check:importable" = "python -c 'import opencode'"
|
||||
|
||||
typecheck = { chain = [
|
||||
"typecheck:pyright",
|
||||
"typecheck:mypy"
|
||||
]}
|
||||
"typecheck:pyright" = "pyright"
|
||||
"typecheck:verify-types" = "pyright --verifytypes opencode --ignoreexternal"
|
||||
"typecheck:mypy" = "mypy ."
|
||||
|
||||
[build-system]
|
||||
requires = ["hatchling==1.26.3", "hatch-fancy-pypi-readme"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[tool.hatch.build]
|
||||
include = [
|
||||
"src/*"
|
||||
]
|
||||
|
||||
[tool.hatch.build.targets.wheel]
|
||||
packages = ["src/opencode"]
|
||||
|
||||
[tool.hatch.build.targets.sdist]
|
||||
# Basically everything except hidden files/directories (such as .github, .devcontainers, .python-version, etc)
|
||||
include = [
|
||||
"/*.toml",
|
||||
"/*.json",
|
||||
"/*.lock",
|
||||
"/*.md",
|
||||
"/mypy.ini",
|
||||
"/noxfile.py",
|
||||
"bin/*",
|
||||
"examples/*",
|
||||
"src/*",
|
||||
"tests/*",
|
||||
]
|
||||
|
||||
[tool.hatch.metadata.hooks.fancy-pypi-readme]
|
||||
content-type = "text/markdown"
|
||||
|
||||
[[tool.hatch.metadata.hooks.fancy-pypi-readme.fragments]]
|
||||
path = "README.md"
|
||||
|
||||
[[tool.hatch.metadata.hooks.fancy-pypi-readme.substitutions]]
|
||||
# replace relative links with absolute links
|
||||
pattern = '\[(.+?)\]\(((?!https?://)\S+?)\)'
|
||||
replacement = '[\1](https://github.com/sst/opencode-sdk-python/tree/main/\g<2>)'
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
testpaths = ["tests"]
|
||||
addopts = "--tb=short -n auto"
|
||||
xfail_strict = true
|
||||
asyncio_mode = "auto"
|
||||
asyncio_default_fixture_loop_scope = "session"
|
||||
filterwarnings = [
|
||||
"error"
|
||||
]
|
||||
|
||||
[tool.pyright]
|
||||
# this enables practically every flag given by pyright.
|
||||
# there are a couple of flags that are still disabled by
|
||||
# default in strict mode as they are experimental and niche.
|
||||
typeCheckingMode = "strict"
|
||||
pythonVersion = "3.8"
|
||||
|
||||
exclude = [
|
||||
"_dev",
|
||||
".venv",
|
||||
".nox",
|
||||
]
|
||||
|
||||
reportImplicitOverride = true
|
||||
reportOverlappingOverload = false
|
||||
|
||||
reportImportCycles = false
|
||||
reportPrivateUsage = false
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 120
|
||||
output-format = "grouped"
|
||||
target-version = "py37"
|
||||
|
||||
[tool.ruff.format]
|
||||
docstring-code-format = true
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = [
|
||||
# isort
|
||||
"I",
|
||||
# bugbear rules
|
||||
"B",
|
||||
# remove unused imports
|
||||
"F401",
|
||||
# bare except statements
|
||||
"E722",
|
||||
# unused arguments
|
||||
"ARG",
|
||||
# print statements
|
||||
"T201",
|
||||
"T203",
|
||||
# misuse of typing.TYPE_CHECKING
|
||||
"TC004",
|
||||
# import rules
|
||||
"TID251",
|
||||
]
|
||||
ignore = [
|
||||
# mutable defaults
|
||||
"B006",
|
||||
]
|
||||
unfixable = [
|
||||
# disable auto fix for print statements
|
||||
"T201",
|
||||
"T203",
|
||||
]
|
||||
|
||||
[tool.ruff.lint.flake8-tidy-imports.banned-api]
|
||||
"functools.lru_cache".msg = "This function does not retain type information for the wrapped function's arguments; The `lru_cache` function from `_utils` should be used instead"
|
||||
|
||||
[tool.ruff.lint.isort]
|
||||
length-sort = true
|
||||
length-sort-straight = true
|
||||
combine-as-imports = true
|
||||
extra-standard-library = ["typing_extensions"]
|
||||
known-first-party = ["opencode", "tests"]
|
||||
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"bin/**.py" = ["T201", "T203"]
|
||||
"scripts/**.py" = ["T201", "T203"]
|
||||
"tests/**.py" = ["T201", "T203"]
|
||||
"examples/**.py" = ["T201", "T203"]
|
||||
66
release-please-config.json
Normal file
66
release-please-config.json
Normal file
|
|
@ -0,0 +1,66 @@
|
|||
{
|
||||
"packages": {
|
||||
".": {}
|
||||
},
|
||||
"$schema": "https://raw.githubusercontent.com/stainless-api/release-please/main/schemas/config.json",
|
||||
"include-v-in-tag": true,
|
||||
"include-component-in-tag": false,
|
||||
"versioning": "prerelease",
|
||||
"prerelease": true,
|
||||
"bump-minor-pre-major": true,
|
||||
"bump-patch-for-minor-pre-major": false,
|
||||
"pull-request-header": "Automated Release PR",
|
||||
"pull-request-title-pattern": "release: ${version}",
|
||||
"changelog-sections": [
|
||||
{
|
||||
"type": "feat",
|
||||
"section": "Features"
|
||||
},
|
||||
{
|
||||
"type": "fix",
|
||||
"section": "Bug Fixes"
|
||||
},
|
||||
{
|
||||
"type": "perf",
|
||||
"section": "Performance Improvements"
|
||||
},
|
||||
{
|
||||
"type": "revert",
|
||||
"section": "Reverts"
|
||||
},
|
||||
{
|
||||
"type": "chore",
|
||||
"section": "Chores"
|
||||
},
|
||||
{
|
||||
"type": "docs",
|
||||
"section": "Documentation"
|
||||
},
|
||||
{
|
||||
"type": "style",
|
||||
"section": "Styles"
|
||||
},
|
||||
{
|
||||
"type": "refactor",
|
||||
"section": "Refactors"
|
||||
},
|
||||
{
|
||||
"type": "test",
|
||||
"section": "Tests",
|
||||
"hidden": true
|
||||
},
|
||||
{
|
||||
"type": "build",
|
||||
"section": "Build System"
|
||||
},
|
||||
{
|
||||
"type": "ci",
|
||||
"section": "Continuous Integration",
|
||||
"hidden": true
|
||||
}
|
||||
],
|
||||
"release-type": "python",
|
||||
"extra-files": [
|
||||
"src/opencode/_version.py"
|
||||
]
|
||||
}
|
||||
135
requirements-dev.lock
Normal file
135
requirements-dev.lock
Normal file
|
|
@ -0,0 +1,135 @@
|
|||
# generated by rye
|
||||
# use `rye lock` or `rye sync` to update this lockfile
|
||||
#
|
||||
# last locked with the following flags:
|
||||
# pre: false
|
||||
# features: []
|
||||
# all-features: true
|
||||
# with-sources: false
|
||||
# generate-hashes: false
|
||||
# universal: false
|
||||
|
||||
-e file:.
|
||||
aiohappyeyeballs==2.6.1
|
||||
# via aiohttp
|
||||
aiohttp==3.12.8
|
||||
# via httpx-aiohttp
|
||||
# via opencode
|
||||
aiosignal==1.3.2
|
||||
# via aiohttp
|
||||
annotated-types==0.6.0
|
||||
# via pydantic
|
||||
anyio==4.4.0
|
||||
# via httpx
|
||||
# via opencode
|
||||
argcomplete==3.1.2
|
||||
# via nox
|
||||
async-timeout==5.0.1
|
||||
# via aiohttp
|
||||
attrs==25.3.0
|
||||
# via aiohttp
|
||||
certifi==2023.7.22
|
||||
# via httpcore
|
||||
# via httpx
|
||||
colorlog==6.7.0
|
||||
# via nox
|
||||
dirty-equals==0.6.0
|
||||
distlib==0.3.7
|
||||
# via virtualenv
|
||||
distro==1.8.0
|
||||
# via opencode
|
||||
exceptiongroup==1.2.2
|
||||
# via anyio
|
||||
# via pytest
|
||||
execnet==2.1.1
|
||||
# via pytest-xdist
|
||||
filelock==3.12.4
|
||||
# via virtualenv
|
||||
frozenlist==1.6.2
|
||||
# via aiohttp
|
||||
# via aiosignal
|
||||
h11==0.14.0
|
||||
# via httpcore
|
||||
httpcore==1.0.2
|
||||
# via httpx
|
||||
httpx==0.28.1
|
||||
# via httpx-aiohttp
|
||||
# via opencode
|
||||
# via respx
|
||||
httpx-aiohttp==0.1.6
|
||||
# via opencode
|
||||
idna==3.4
|
||||
# via anyio
|
||||
# via httpx
|
||||
# via yarl
|
||||
importlib-metadata==7.0.0
|
||||
iniconfig==2.0.0
|
||||
# via pytest
|
||||
markdown-it-py==3.0.0
|
||||
# via rich
|
||||
mdurl==0.1.2
|
||||
# via markdown-it-py
|
||||
multidict==6.4.4
|
||||
# via aiohttp
|
||||
# via yarl
|
||||
mypy==1.14.1
|
||||
mypy-extensions==1.0.0
|
||||
# via mypy
|
||||
nest-asyncio==1.6.0
|
||||
nodeenv==1.8.0
|
||||
# via pyright
|
||||
nox==2023.4.22
|
||||
packaging==23.2
|
||||
# via nox
|
||||
# via pytest
|
||||
platformdirs==3.11.0
|
||||
# via virtualenv
|
||||
pluggy==1.5.0
|
||||
# via pytest
|
||||
propcache==0.3.1
|
||||
# via aiohttp
|
||||
# via yarl
|
||||
pydantic==2.10.3
|
||||
# via opencode
|
||||
pydantic-core==2.27.1
|
||||
# via pydantic
|
||||
pygments==2.18.0
|
||||
# via rich
|
||||
pyright==1.1.399
|
||||
pytest==8.3.3
|
||||
# via pytest-asyncio
|
||||
# via pytest-xdist
|
||||
pytest-asyncio==0.24.0
|
||||
pytest-xdist==3.7.0
|
||||
python-dateutil==2.8.2
|
||||
# via time-machine
|
||||
pytz==2023.3.post1
|
||||
# via dirty-equals
|
||||
respx==0.22.0
|
||||
rich==13.7.1
|
||||
ruff==0.9.4
|
||||
setuptools==68.2.2
|
||||
# via nodeenv
|
||||
six==1.16.0
|
||||
# via python-dateutil
|
||||
sniffio==1.3.0
|
||||
# via anyio
|
||||
# via opencode
|
||||
time-machine==2.9.0
|
||||
tomli==2.0.2
|
||||
# via mypy
|
||||
# via pytest
|
||||
typing-extensions==4.12.2
|
||||
# via anyio
|
||||
# via multidict
|
||||
# via mypy
|
||||
# via opencode
|
||||
# via pydantic
|
||||
# via pydantic-core
|
||||
# via pyright
|
||||
virtualenv==20.24.5
|
||||
# via nox
|
||||
yarl==1.20.0
|
||||
# via aiohttp
|
||||
zipp==3.17.0
|
||||
# via importlib-metadata
|
||||
72
requirements.lock
Normal file
72
requirements.lock
Normal file
|
|
@ -0,0 +1,72 @@
|
|||
# generated by rye
|
||||
# use `rye lock` or `rye sync` to update this lockfile
|
||||
#
|
||||
# last locked with the following flags:
|
||||
# pre: false
|
||||
# features: []
|
||||
# all-features: true
|
||||
# with-sources: false
|
||||
# generate-hashes: false
|
||||
# universal: false
|
||||
|
||||
-e file:.
|
||||
aiohappyeyeballs==2.6.1
|
||||
# via aiohttp
|
||||
aiohttp==3.12.8
|
||||
# via httpx-aiohttp
|
||||
# via opencode
|
||||
aiosignal==1.3.2
|
||||
# via aiohttp
|
||||
annotated-types==0.6.0
|
||||
# via pydantic
|
||||
anyio==4.4.0
|
||||
# via httpx
|
||||
# via opencode
|
||||
async-timeout==5.0.1
|
||||
# via aiohttp
|
||||
attrs==25.3.0
|
||||
# via aiohttp
|
||||
certifi==2023.7.22
|
||||
# via httpcore
|
||||
# via httpx
|
||||
distro==1.8.0
|
||||
# via opencode
|
||||
exceptiongroup==1.2.2
|
||||
# via anyio
|
||||
frozenlist==1.6.2
|
||||
# via aiohttp
|
||||
# via aiosignal
|
||||
h11==0.14.0
|
||||
# via httpcore
|
||||
httpcore==1.0.2
|
||||
# via httpx
|
||||
httpx==0.28.1
|
||||
# via httpx-aiohttp
|
||||
# via opencode
|
||||
httpx-aiohttp==0.1.6
|
||||
# via opencode
|
||||
idna==3.4
|
||||
# via anyio
|
||||
# via httpx
|
||||
# via yarl
|
||||
multidict==6.4.4
|
||||
# via aiohttp
|
||||
# via yarl
|
||||
propcache==0.3.1
|
||||
# via aiohttp
|
||||
# via yarl
|
||||
pydantic==2.10.3
|
||||
# via opencode
|
||||
pydantic-core==2.27.1
|
||||
# via pydantic
|
||||
sniffio==1.3.0
|
||||
# via anyio
|
||||
# via opencode
|
||||
typing-extensions==4.12.2
|
||||
# via anyio
|
||||
# via multidict
|
||||
# via opencode
|
||||
# via pydantic
|
||||
# via pydantic-core
|
||||
yarl==1.20.0
|
||||
# via aiohttp
|
||||
19
scripts/bootstrap
Executable file
19
scripts/bootstrap
Executable file
|
|
@ -0,0 +1,19 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
cd "$(dirname "$0")/.."
|
||||
|
||||
if ! command -v rye >/dev/null 2>&1 && [ -f "Brewfile" ] && [ "$(uname -s)" = "Darwin" ]; then
|
||||
brew bundle check >/dev/null 2>&1 || {
|
||||
echo "==> Installing Homebrew dependencies…"
|
||||
brew bundle
|
||||
}
|
||||
fi
|
||||
|
||||
echo "==> Installing Python dependencies…"
|
||||
|
||||
# experimental uv support makes installations significantly faster
|
||||
rye config --set-bool behavior.use-uv=true
|
||||
|
||||
rye sync --all-features
|
||||
8
scripts/format
Executable file
8
scripts/format
Executable file
|
|
@ -0,0 +1,8 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
cd "$(dirname "$0")/.."
|
||||
|
||||
echo "==> Running formatters"
|
||||
rye run format
|
||||
11
scripts/lint
Executable file
11
scripts/lint
Executable file
|
|
@ -0,0 +1,11 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
cd "$(dirname "$0")/.."
|
||||
|
||||
echo "==> Running lints"
|
||||
rye run lint
|
||||
|
||||
echo "==> Making sure it imports"
|
||||
rye run python -c 'import opencode'
|
||||
41
scripts/mock
Executable file
41
scripts/mock
Executable file
|
|
@ -0,0 +1,41 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
cd "$(dirname "$0")/.."
|
||||
|
||||
if [[ -n "$1" && "$1" != '--'* ]]; then
|
||||
URL="$1"
|
||||
shift
|
||||
else
|
||||
URL="$(grep 'openapi_spec_url' .stats.yml | cut -d' ' -f2)"
|
||||
fi
|
||||
|
||||
# Check if the URL is empty
|
||||
if [ -z "$URL" ]; then
|
||||
echo "Error: No OpenAPI spec path/url provided or found in .stats.yml"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "==> Starting mock server with URL ${URL}"
|
||||
|
||||
# Run prism mock on the given spec
|
||||
if [ "$1" == "--daemon" ]; then
|
||||
npm exec --package=@stainless-api/prism-cli@5.8.5 -- prism mock "$URL" &> .prism.log &
|
||||
|
||||
# Wait for server to come online
|
||||
echo -n "Waiting for server"
|
||||
while ! grep -q "✖ fatal\|Prism is listening" ".prism.log" ; do
|
||||
echo -n "."
|
||||
sleep 0.1
|
||||
done
|
||||
|
||||
if grep -q "✖ fatal" ".prism.log"; then
|
||||
cat .prism.log
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo
|
||||
else
|
||||
npm exec --package=@stainless-api/prism-cli@5.8.5 -- prism mock "$URL"
|
||||
fi
|
||||
61
scripts/test
Executable file
61
scripts/test
Executable file
|
|
@ -0,0 +1,61 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
cd "$(dirname "$0")/.."
|
||||
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[0;33m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
function prism_is_running() {
|
||||
curl --silent "http://localhost:4010" >/dev/null 2>&1
|
||||
}
|
||||
|
||||
kill_server_on_port() {
|
||||
pids=$(lsof -t -i tcp:"$1" || echo "")
|
||||
if [ "$pids" != "" ]; then
|
||||
kill "$pids"
|
||||
echo "Stopped $pids."
|
||||
fi
|
||||
}
|
||||
|
||||
function is_overriding_api_base_url() {
|
||||
[ -n "$TEST_API_BASE_URL" ]
|
||||
}
|
||||
|
||||
if ! is_overriding_api_base_url && ! prism_is_running ; then
|
||||
# When we exit this script, make sure to kill the background mock server process
|
||||
trap 'kill_server_on_port 4010' EXIT
|
||||
|
||||
# Start the dev server
|
||||
./scripts/mock --daemon
|
||||
fi
|
||||
|
||||
if is_overriding_api_base_url ; then
|
||||
echo -e "${GREEN}✔ Running tests against ${TEST_API_BASE_URL}${NC}"
|
||||
echo
|
||||
elif ! prism_is_running ; then
|
||||
echo -e "${RED}ERROR:${NC} The test suite will not run without a mock Prism server"
|
||||
echo -e "running against your OpenAPI spec."
|
||||
echo
|
||||
echo -e "To run the server, pass in the path or url of your OpenAPI"
|
||||
echo -e "spec to the prism command:"
|
||||
echo
|
||||
echo -e " \$ ${YELLOW}npm exec --package=@stoplight/prism-cli@~5.3.2 -- prism mock path/to/your.openapi.yml${NC}"
|
||||
echo
|
||||
|
||||
exit 1
|
||||
else
|
||||
echo -e "${GREEN}✔ Mock prism server is running with your OpenAPI spec${NC}"
|
||||
echo
|
||||
fi
|
||||
|
||||
export DEFER_PYDANTIC_BUILD=false
|
||||
|
||||
echo "==> Running tests"
|
||||
rye run pytest "$@"
|
||||
|
||||
echo "==> Running Pydantic v1 tests"
|
||||
rye run nox -s test-pydantic-v1 -- "$@"
|
||||
167
scripts/utils/ruffen-docs.py
Normal file
167
scripts/utils/ruffen-docs.py
Normal file
|
|
@ -0,0 +1,167 @@
|
|||
# fork of https://github.com/asottile/blacken-docs adapted for ruff
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
import sys
|
||||
import argparse
|
||||
import textwrap
|
||||
import contextlib
|
||||
import subprocess
|
||||
from typing import Match, Optional, Sequence, Generator, NamedTuple, cast
|
||||
|
||||
MD_RE = re.compile(
|
||||
r"(?P<before>^(?P<indent> *)```\s*python\n)" r"(?P<code>.*?)" r"(?P<after>^(?P=indent)```\s*$)",
|
||||
re.DOTALL | re.MULTILINE,
|
||||
)
|
||||
MD_PYCON_RE = re.compile(
|
||||
r"(?P<before>^(?P<indent> *)```\s*pycon\n)" r"(?P<code>.*?)" r"(?P<after>^(?P=indent)```.*$)",
|
||||
re.DOTALL | re.MULTILINE,
|
||||
)
|
||||
PYCON_PREFIX = ">>> "
|
||||
PYCON_CONTINUATION_PREFIX = "..."
|
||||
PYCON_CONTINUATION_RE = re.compile(
|
||||
rf"^{re.escape(PYCON_CONTINUATION_PREFIX)}( |$)",
|
||||
)
|
||||
DEFAULT_LINE_LENGTH = 100
|
||||
|
||||
|
||||
class CodeBlockError(NamedTuple):
|
||||
offset: int
|
||||
exc: Exception
|
||||
|
||||
|
||||
def format_str(
|
||||
src: str,
|
||||
) -> tuple[str, Sequence[CodeBlockError]]:
|
||||
errors: list[CodeBlockError] = []
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _collect_error(match: Match[str]) -> Generator[None, None, None]:
|
||||
try:
|
||||
yield
|
||||
except Exception as e:
|
||||
errors.append(CodeBlockError(match.start(), e))
|
||||
|
||||
def _md_match(match: Match[str]) -> str:
|
||||
code = textwrap.dedent(match["code"])
|
||||
with _collect_error(match):
|
||||
code = format_code_block(code)
|
||||
code = textwrap.indent(code, match["indent"])
|
||||
return f"{match['before']}{code}{match['after']}"
|
||||
|
||||
def _pycon_match(match: Match[str]) -> str:
|
||||
code = ""
|
||||
fragment = cast(Optional[str], None)
|
||||
|
||||
def finish_fragment() -> None:
|
||||
nonlocal code
|
||||
nonlocal fragment
|
||||
|
||||
if fragment is not None:
|
||||
with _collect_error(match):
|
||||
fragment = format_code_block(fragment)
|
||||
fragment_lines = fragment.splitlines()
|
||||
code += f"{PYCON_PREFIX}{fragment_lines[0]}\n"
|
||||
for line in fragment_lines[1:]:
|
||||
# Skip blank lines to handle Black adding a blank above
|
||||
# functions within blocks. A blank line would end the REPL
|
||||
# continuation prompt.
|
||||
#
|
||||
# >>> if True:
|
||||
# ... def f():
|
||||
# ... pass
|
||||
# ...
|
||||
if line:
|
||||
code += f"{PYCON_CONTINUATION_PREFIX} {line}\n"
|
||||
if fragment_lines[-1].startswith(" "):
|
||||
code += f"{PYCON_CONTINUATION_PREFIX}\n"
|
||||
fragment = None
|
||||
|
||||
indentation = None
|
||||
for line in match["code"].splitlines():
|
||||
orig_line, line = line, line.lstrip()
|
||||
if indentation is None and line:
|
||||
indentation = len(orig_line) - len(line)
|
||||
continuation_match = PYCON_CONTINUATION_RE.match(line)
|
||||
if continuation_match and fragment is not None:
|
||||
fragment += line[continuation_match.end() :] + "\n"
|
||||
else:
|
||||
finish_fragment()
|
||||
if line.startswith(PYCON_PREFIX):
|
||||
fragment = line[len(PYCON_PREFIX) :] + "\n"
|
||||
else:
|
||||
code += orig_line[indentation:] + "\n"
|
||||
finish_fragment()
|
||||
return code
|
||||
|
||||
def _md_pycon_match(match: Match[str]) -> str:
|
||||
code = _pycon_match(match)
|
||||
code = textwrap.indent(code, match["indent"])
|
||||
return f"{match['before']}{code}{match['after']}"
|
||||
|
||||
src = MD_RE.sub(_md_match, src)
|
||||
src = MD_PYCON_RE.sub(_md_pycon_match, src)
|
||||
return src, errors
|
||||
|
||||
|
||||
def format_code_block(code: str) -> str:
|
||||
return subprocess.check_output(
|
||||
[
|
||||
sys.executable,
|
||||
"-m",
|
||||
"ruff",
|
||||
"format",
|
||||
"--stdin-filename=script.py",
|
||||
f"--line-length={DEFAULT_LINE_LENGTH}",
|
||||
],
|
||||
encoding="utf-8",
|
||||
input=code,
|
||||
)
|
||||
|
||||
|
||||
def format_file(
|
||||
filename: str,
|
||||
skip_errors: bool,
|
||||
) -> int:
|
||||
with open(filename, encoding="UTF-8") as f:
|
||||
contents = f.read()
|
||||
new_contents, errors = format_str(contents)
|
||||
for error in errors:
|
||||
lineno = contents[: error.offset].count("\n") + 1
|
||||
print(f"{filename}:{lineno}: code block parse error {error.exc}")
|
||||
if errors and not skip_errors:
|
||||
return 1
|
||||
if contents != new_contents:
|
||||
print(f"{filename}: Rewriting...")
|
||||
with open(filename, "w", encoding="UTF-8") as f:
|
||||
f.write(new_contents)
|
||||
return 0
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
def main(argv: Sequence[str] | None = None) -> int:
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
"-l",
|
||||
"--line-length",
|
||||
type=int,
|
||||
default=DEFAULT_LINE_LENGTH,
|
||||
)
|
||||
parser.add_argument(
|
||||
"-S",
|
||||
"--skip-string-normalization",
|
||||
action="store_true",
|
||||
)
|
||||
parser.add_argument("-E", "--skip-errors", action="store_true")
|
||||
parser.add_argument("filenames", nargs="*")
|
||||
args = parser.parse_args(argv)
|
||||
|
||||
retv = 0
|
||||
for filename in args.filenames:
|
||||
retv |= format_file(filename, skip_errors=args.skip_errors)
|
||||
return retv
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
25
scripts/utils/upload-artifact.sh
Executable file
25
scripts/utils/upload-artifact.sh
Executable file
|
|
@ -0,0 +1,25 @@
|
|||
#!/usr/bin/env bash
|
||||
set -exuo pipefail
|
||||
|
||||
RESPONSE=$(curl -X POST "$URL" \
|
||||
-H "Authorization: Bearer $AUTH" \
|
||||
-H "Content-Type: application/json")
|
||||
|
||||
SIGNED_URL=$(echo "$RESPONSE" | jq -r '.url')
|
||||
|
||||
if [[ "$SIGNED_URL" == "null" ]]; then
|
||||
echo -e "\033[31mFailed to get signed URL.\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
UPLOAD_RESPONSE=$(tar -cz . | curl -v -X PUT \
|
||||
-H "Content-Type: application/gzip" \
|
||||
--data-binary @- "$SIGNED_URL" 2>&1)
|
||||
|
||||
if echo "$UPLOAD_RESPONSE" | grep -q "HTTP/[0-9.]* 200"; then
|
||||
echo -e "\033[32mUploaded build to Stainless storage.\033[0m"
|
||||
echo -e "\033[32mInstallation: pip install --pre 'https://pkg.stainless.com/s/opencode-python/$SHA'\033[0m"
|
||||
else
|
||||
echo -e "\033[31mFailed to upload artifact.\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
100
src/opencode/__init__.py
Normal file
100
src/opencode/__init__.py
Normal file
|
|
@ -0,0 +1,100 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
import typing as _t
|
||||
|
||||
from . import types
|
||||
from ._types import NOT_GIVEN, Omit, NoneType, NotGiven, Transport, ProxiesTypes
|
||||
from ._utils import file_from_path
|
||||
from ._client import (
|
||||
Client,
|
||||
Stream,
|
||||
Timeout,
|
||||
Opencode,
|
||||
Transport,
|
||||
AsyncClient,
|
||||
AsyncStream,
|
||||
AsyncOpencode,
|
||||
RequestOptions,
|
||||
)
|
||||
from ._models import BaseModel
|
||||
from ._version import __title__, __version__
|
||||
from ._response import APIResponse as APIResponse, AsyncAPIResponse as AsyncAPIResponse
|
||||
from ._constants import DEFAULT_TIMEOUT, DEFAULT_MAX_RETRIES, DEFAULT_CONNECTION_LIMITS
|
||||
from ._exceptions import (
|
||||
APIError,
|
||||
ConflictError,
|
||||
NotFoundError,
|
||||
OpencodeError,
|
||||
APIStatusError,
|
||||
RateLimitError,
|
||||
APITimeoutError,
|
||||
BadRequestError,
|
||||
APIConnectionError,
|
||||
AuthenticationError,
|
||||
InternalServerError,
|
||||
PermissionDeniedError,
|
||||
UnprocessableEntityError,
|
||||
APIResponseValidationError,
|
||||
)
|
||||
from ._base_client import DefaultHttpxClient, DefaultAioHttpClient, DefaultAsyncHttpxClient
|
||||
from ._utils._logs import setup_logging as _setup_logging
|
||||
|
||||
__all__ = [
|
||||
"types",
|
||||
"__version__",
|
||||
"__title__",
|
||||
"NoneType",
|
||||
"Transport",
|
||||
"ProxiesTypes",
|
||||
"NotGiven",
|
||||
"NOT_GIVEN",
|
||||
"Omit",
|
||||
"OpencodeError",
|
||||
"APIError",
|
||||
"APIStatusError",
|
||||
"APITimeoutError",
|
||||
"APIConnectionError",
|
||||
"APIResponseValidationError",
|
||||
"BadRequestError",
|
||||
"AuthenticationError",
|
||||
"PermissionDeniedError",
|
||||
"NotFoundError",
|
||||
"ConflictError",
|
||||
"UnprocessableEntityError",
|
||||
"RateLimitError",
|
||||
"InternalServerError",
|
||||
"Timeout",
|
||||
"RequestOptions",
|
||||
"Client",
|
||||
"AsyncClient",
|
||||
"Stream",
|
||||
"AsyncStream",
|
||||
"Opencode",
|
||||
"AsyncOpencode",
|
||||
"file_from_path",
|
||||
"BaseModel",
|
||||
"DEFAULT_TIMEOUT",
|
||||
"DEFAULT_MAX_RETRIES",
|
||||
"DEFAULT_CONNECTION_LIMITS",
|
||||
"DefaultHttpxClient",
|
||||
"DefaultAsyncHttpxClient",
|
||||
"DefaultAioHttpClient",
|
||||
]
|
||||
|
||||
if not _t.TYPE_CHECKING:
|
||||
from ._utils._resources_proxy import resources as resources
|
||||
|
||||
_setup_logging()
|
||||
|
||||
# Update the __module__ attribute for exported symbols so that
|
||||
# error messages point to this module instead of the module
|
||||
# it was originally defined in, e.g.
|
||||
# opencode._exceptions.NotFoundError -> opencode.NotFoundError
|
||||
__locals = locals()
|
||||
for __name in __all__:
|
||||
if not __name.startswith("__"):
|
||||
try:
|
||||
__locals[__name].__module__ = "opencode"
|
||||
except (TypeError, AttributeError):
|
||||
# Some of our exported symbols are builtins which we can't set attributes for.
|
||||
pass
|
||||
1985
src/opencode/_base_client.py
Normal file
1985
src/opencode/_base_client.py
Normal file
File diff suppressed because it is too large
Load diff
393
src/opencode/_client.py
Normal file
393
src/opencode/_client.py
Normal file
|
|
@ -0,0 +1,393 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import Any, Union, Mapping
|
||||
from typing_extensions import Self, override
|
||||
|
||||
import httpx
|
||||
|
||||
from . import _exceptions
|
||||
from ._qs import Querystring
|
||||
from ._types import (
|
||||
NOT_GIVEN,
|
||||
Omit,
|
||||
Timeout,
|
||||
NotGiven,
|
||||
Transport,
|
||||
ProxiesTypes,
|
||||
RequestOptions,
|
||||
)
|
||||
from ._utils import is_given, get_async_library
|
||||
from ._version import __version__
|
||||
from .resources import app, file, event, config, session
|
||||
from ._streaming import Stream as Stream, AsyncStream as AsyncStream
|
||||
from ._exceptions import APIStatusError
|
||||
from ._base_client import (
|
||||
DEFAULT_MAX_RETRIES,
|
||||
SyncAPIClient,
|
||||
AsyncAPIClient,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"Timeout",
|
||||
"Transport",
|
||||
"ProxiesTypes",
|
||||
"RequestOptions",
|
||||
"Opencode",
|
||||
"AsyncOpencode",
|
||||
"Client",
|
||||
"AsyncClient",
|
||||
]
|
||||
|
||||
|
||||
class Opencode(SyncAPIClient):
|
||||
event: event.EventResource
|
||||
app: app.AppResource
|
||||
file: file.FileResource
|
||||
config: config.ConfigResource
|
||||
session: session.SessionResource
|
||||
with_raw_response: OpencodeWithRawResponse
|
||||
with_streaming_response: OpencodeWithStreamedResponse
|
||||
|
||||
# client options
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
base_url: str | httpx.URL | None = None,
|
||||
timeout: Union[float, Timeout, None, NotGiven] = NOT_GIVEN,
|
||||
max_retries: int = DEFAULT_MAX_RETRIES,
|
||||
default_headers: Mapping[str, str] | None = None,
|
||||
default_query: Mapping[str, object] | None = None,
|
||||
# Configure a custom httpx client.
|
||||
# We provide a `DefaultHttpxClient` class that you can pass to retain the default values we use for `limits`, `timeout` & `follow_redirects`.
|
||||
# See the [httpx documentation](https://www.python-httpx.org/api/#client) for more details.
|
||||
http_client: httpx.Client | None = None,
|
||||
# Enable or disable schema validation for data returned by the API.
|
||||
# When enabled an error APIResponseValidationError is raised
|
||||
# if the API responds with invalid data for the expected schema.
|
||||
#
|
||||
# This parameter may be removed or changed in the future.
|
||||
# If you rely on this feature, please open a GitHub issue
|
||||
# outlining your use-case to help us decide if it should be
|
||||
# part of our public interface in the future.
|
||||
_strict_response_validation: bool = False,
|
||||
) -> None:
|
||||
"""Construct a new synchronous Opencode client instance."""
|
||||
if base_url is None:
|
||||
base_url = os.environ.get("OPENCODE_BASE_URL")
|
||||
if base_url is None:
|
||||
base_url = f"http://localhost:54321"
|
||||
|
||||
super().__init__(
|
||||
version=__version__,
|
||||
base_url=base_url,
|
||||
max_retries=max_retries,
|
||||
timeout=timeout,
|
||||
http_client=http_client,
|
||||
custom_headers=default_headers,
|
||||
custom_query=default_query,
|
||||
_strict_response_validation=_strict_response_validation,
|
||||
)
|
||||
|
||||
self.event = event.EventResource(self)
|
||||
self.app = app.AppResource(self)
|
||||
self.file = file.FileResource(self)
|
||||
self.config = config.ConfigResource(self)
|
||||
self.session = session.SessionResource(self)
|
||||
self.with_raw_response = OpencodeWithRawResponse(self)
|
||||
self.with_streaming_response = OpencodeWithStreamedResponse(self)
|
||||
|
||||
@property
|
||||
@override
|
||||
def qs(self) -> Querystring:
|
||||
return Querystring(array_format="comma")
|
||||
|
||||
@property
|
||||
@override
|
||||
def default_headers(self) -> dict[str, str | Omit]:
|
||||
return {
|
||||
**super().default_headers,
|
||||
"X-Stainless-Async": "false",
|
||||
**self._custom_headers,
|
||||
}
|
||||
|
||||
def copy(
|
||||
self,
|
||||
*,
|
||||
base_url: str | httpx.URL | None = None,
|
||||
timeout: float | Timeout | None | NotGiven = NOT_GIVEN,
|
||||
http_client: httpx.Client | None = None,
|
||||
max_retries: int | NotGiven = NOT_GIVEN,
|
||||
default_headers: Mapping[str, str] | None = None,
|
||||
set_default_headers: Mapping[str, str] | None = None,
|
||||
default_query: Mapping[str, object] | None = None,
|
||||
set_default_query: Mapping[str, object] | None = None,
|
||||
_extra_kwargs: Mapping[str, Any] = {},
|
||||
) -> Self:
|
||||
"""
|
||||
Create a new client instance re-using the same options given to the current client with optional overriding.
|
||||
"""
|
||||
if default_headers is not None and set_default_headers is not None:
|
||||
raise ValueError("The `default_headers` and `set_default_headers` arguments are mutually exclusive")
|
||||
|
||||
if default_query is not None and set_default_query is not None:
|
||||
raise ValueError("The `default_query` and `set_default_query` arguments are mutually exclusive")
|
||||
|
||||
headers = self._custom_headers
|
||||
if default_headers is not None:
|
||||
headers = {**headers, **default_headers}
|
||||
elif set_default_headers is not None:
|
||||
headers = set_default_headers
|
||||
|
||||
params = self._custom_query
|
||||
if default_query is not None:
|
||||
params = {**params, **default_query}
|
||||
elif set_default_query is not None:
|
||||
params = set_default_query
|
||||
|
||||
http_client = http_client or self._client
|
||||
return self.__class__(
|
||||
base_url=base_url or self.base_url,
|
||||
timeout=self.timeout if isinstance(timeout, NotGiven) else timeout,
|
||||
http_client=http_client,
|
||||
max_retries=max_retries if is_given(max_retries) else self.max_retries,
|
||||
default_headers=headers,
|
||||
default_query=params,
|
||||
**_extra_kwargs,
|
||||
)
|
||||
|
||||
# Alias for `copy` for nicer inline usage, e.g.
|
||||
# client.with_options(timeout=10).foo.create(...)
|
||||
with_options = copy
|
||||
|
||||
@override
|
||||
def _make_status_error(
|
||||
self,
|
||||
err_msg: str,
|
||||
*,
|
||||
body: object,
|
||||
response: httpx.Response,
|
||||
) -> APIStatusError:
|
||||
if response.status_code == 400:
|
||||
return _exceptions.BadRequestError(err_msg, response=response, body=body)
|
||||
|
||||
if response.status_code == 401:
|
||||
return _exceptions.AuthenticationError(err_msg, response=response, body=body)
|
||||
|
||||
if response.status_code == 403:
|
||||
return _exceptions.PermissionDeniedError(err_msg, response=response, body=body)
|
||||
|
||||
if response.status_code == 404:
|
||||
return _exceptions.NotFoundError(err_msg, response=response, body=body)
|
||||
|
||||
if response.status_code == 409:
|
||||
return _exceptions.ConflictError(err_msg, response=response, body=body)
|
||||
|
||||
if response.status_code == 422:
|
||||
return _exceptions.UnprocessableEntityError(err_msg, response=response, body=body)
|
||||
|
||||
if response.status_code == 429:
|
||||
return _exceptions.RateLimitError(err_msg, response=response, body=body)
|
||||
|
||||
if response.status_code >= 500:
|
||||
return _exceptions.InternalServerError(err_msg, response=response, body=body)
|
||||
return APIStatusError(err_msg, response=response, body=body)
|
||||
|
||||
|
||||
class AsyncOpencode(AsyncAPIClient):
|
||||
event: event.AsyncEventResource
|
||||
app: app.AsyncAppResource
|
||||
file: file.AsyncFileResource
|
||||
config: config.AsyncConfigResource
|
||||
session: session.AsyncSessionResource
|
||||
with_raw_response: AsyncOpencodeWithRawResponse
|
||||
with_streaming_response: AsyncOpencodeWithStreamedResponse
|
||||
|
||||
# client options
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
base_url: str | httpx.URL | None = None,
|
||||
timeout: Union[float, Timeout, None, NotGiven] = NOT_GIVEN,
|
||||
max_retries: int = DEFAULT_MAX_RETRIES,
|
||||
default_headers: Mapping[str, str] | None = None,
|
||||
default_query: Mapping[str, object] | None = None,
|
||||
# Configure a custom httpx client.
|
||||
# We provide a `DefaultAsyncHttpxClient` class that you can pass to retain the default values we use for `limits`, `timeout` & `follow_redirects`.
|
||||
# See the [httpx documentation](https://www.python-httpx.org/api/#asyncclient) for more details.
|
||||
http_client: httpx.AsyncClient | None = None,
|
||||
# Enable or disable schema validation for data returned by the API.
|
||||
# When enabled an error APIResponseValidationError is raised
|
||||
# if the API responds with invalid data for the expected schema.
|
||||
#
|
||||
# This parameter may be removed or changed in the future.
|
||||
# If you rely on this feature, please open a GitHub issue
|
||||
# outlining your use-case to help us decide if it should be
|
||||
# part of our public interface in the future.
|
||||
_strict_response_validation: bool = False,
|
||||
) -> None:
|
||||
"""Construct a new async AsyncOpencode client instance."""
|
||||
if base_url is None:
|
||||
base_url = os.environ.get("OPENCODE_BASE_URL")
|
||||
if base_url is None:
|
||||
base_url = f"http://localhost:54321"
|
||||
|
||||
super().__init__(
|
||||
version=__version__,
|
||||
base_url=base_url,
|
||||
max_retries=max_retries,
|
||||
timeout=timeout,
|
||||
http_client=http_client,
|
||||
custom_headers=default_headers,
|
||||
custom_query=default_query,
|
||||
_strict_response_validation=_strict_response_validation,
|
||||
)
|
||||
|
||||
self.event = event.AsyncEventResource(self)
|
||||
self.app = app.AsyncAppResource(self)
|
||||
self.file = file.AsyncFileResource(self)
|
||||
self.config = config.AsyncConfigResource(self)
|
||||
self.session = session.AsyncSessionResource(self)
|
||||
self.with_raw_response = AsyncOpencodeWithRawResponse(self)
|
||||
self.with_streaming_response = AsyncOpencodeWithStreamedResponse(self)
|
||||
|
||||
@property
|
||||
@override
|
||||
def qs(self) -> Querystring:
|
||||
return Querystring(array_format="comma")
|
||||
|
||||
@property
|
||||
@override
|
||||
def default_headers(self) -> dict[str, str | Omit]:
|
||||
return {
|
||||
**super().default_headers,
|
||||
"X-Stainless-Async": f"async:{get_async_library()}",
|
||||
**self._custom_headers,
|
||||
}
|
||||
|
||||
def copy(
|
||||
self,
|
||||
*,
|
||||
base_url: str | httpx.URL | None = None,
|
||||
timeout: float | Timeout | None | NotGiven = NOT_GIVEN,
|
||||
http_client: httpx.AsyncClient | None = None,
|
||||
max_retries: int | NotGiven = NOT_GIVEN,
|
||||
default_headers: Mapping[str, str] | None = None,
|
||||
set_default_headers: Mapping[str, str] | None = None,
|
||||
default_query: Mapping[str, object] | None = None,
|
||||
set_default_query: Mapping[str, object] | None = None,
|
||||
_extra_kwargs: Mapping[str, Any] = {},
|
||||
) -> Self:
|
||||
"""
|
||||
Create a new client instance re-using the same options given to the current client with optional overriding.
|
||||
"""
|
||||
if default_headers is not None and set_default_headers is not None:
|
||||
raise ValueError("The `default_headers` and `set_default_headers` arguments are mutually exclusive")
|
||||
|
||||
if default_query is not None and set_default_query is not None:
|
||||
raise ValueError("The `default_query` and `set_default_query` arguments are mutually exclusive")
|
||||
|
||||
headers = self._custom_headers
|
||||
if default_headers is not None:
|
||||
headers = {**headers, **default_headers}
|
||||
elif set_default_headers is not None:
|
||||
headers = set_default_headers
|
||||
|
||||
params = self._custom_query
|
||||
if default_query is not None:
|
||||
params = {**params, **default_query}
|
||||
elif set_default_query is not None:
|
||||
params = set_default_query
|
||||
|
||||
http_client = http_client or self._client
|
||||
return self.__class__(
|
||||
base_url=base_url or self.base_url,
|
||||
timeout=self.timeout if isinstance(timeout, NotGiven) else timeout,
|
||||
http_client=http_client,
|
||||
max_retries=max_retries if is_given(max_retries) else self.max_retries,
|
||||
default_headers=headers,
|
||||
default_query=params,
|
||||
**_extra_kwargs,
|
||||
)
|
||||
|
||||
# Alias for `copy` for nicer inline usage, e.g.
|
||||
# client.with_options(timeout=10).foo.create(...)
|
||||
with_options = copy
|
||||
|
||||
@override
|
||||
def _make_status_error(
|
||||
self,
|
||||
err_msg: str,
|
||||
*,
|
||||
body: object,
|
||||
response: httpx.Response,
|
||||
) -> APIStatusError:
|
||||
if response.status_code == 400:
|
||||
return _exceptions.BadRequestError(err_msg, response=response, body=body)
|
||||
|
||||
if response.status_code == 401:
|
||||
return _exceptions.AuthenticationError(err_msg, response=response, body=body)
|
||||
|
||||
if response.status_code == 403:
|
||||
return _exceptions.PermissionDeniedError(err_msg, response=response, body=body)
|
||||
|
||||
if response.status_code == 404:
|
||||
return _exceptions.NotFoundError(err_msg, response=response, body=body)
|
||||
|
||||
if response.status_code == 409:
|
||||
return _exceptions.ConflictError(err_msg, response=response, body=body)
|
||||
|
||||
if response.status_code == 422:
|
||||
return _exceptions.UnprocessableEntityError(err_msg, response=response, body=body)
|
||||
|
||||
if response.status_code == 429:
|
||||
return _exceptions.RateLimitError(err_msg, response=response, body=body)
|
||||
|
||||
if response.status_code >= 500:
|
||||
return _exceptions.InternalServerError(err_msg, response=response, body=body)
|
||||
return APIStatusError(err_msg, response=response, body=body)
|
||||
|
||||
|
||||
class OpencodeWithRawResponse:
|
||||
def __init__(self, client: Opencode) -> None:
|
||||
self.event = event.EventResourceWithRawResponse(client.event)
|
||||
self.app = app.AppResourceWithRawResponse(client.app)
|
||||
self.file = file.FileResourceWithRawResponse(client.file)
|
||||
self.config = config.ConfigResourceWithRawResponse(client.config)
|
||||
self.session = session.SessionResourceWithRawResponse(client.session)
|
||||
|
||||
|
||||
class AsyncOpencodeWithRawResponse:
|
||||
def __init__(self, client: AsyncOpencode) -> None:
|
||||
self.event = event.AsyncEventResourceWithRawResponse(client.event)
|
||||
self.app = app.AsyncAppResourceWithRawResponse(client.app)
|
||||
self.file = file.AsyncFileResourceWithRawResponse(client.file)
|
||||
self.config = config.AsyncConfigResourceWithRawResponse(client.config)
|
||||
self.session = session.AsyncSessionResourceWithRawResponse(client.session)
|
||||
|
||||
|
||||
class OpencodeWithStreamedResponse:
|
||||
def __init__(self, client: Opencode) -> None:
|
||||
self.event = event.EventResourceWithStreamingResponse(client.event)
|
||||
self.app = app.AppResourceWithStreamingResponse(client.app)
|
||||
self.file = file.FileResourceWithStreamingResponse(client.file)
|
||||
self.config = config.ConfigResourceWithStreamingResponse(client.config)
|
||||
self.session = session.SessionResourceWithStreamingResponse(client.session)
|
||||
|
||||
|
||||
class AsyncOpencodeWithStreamedResponse:
|
||||
def __init__(self, client: AsyncOpencode) -> None:
|
||||
self.event = event.AsyncEventResourceWithStreamingResponse(client.event)
|
||||
self.app = app.AsyncAppResourceWithStreamingResponse(client.app)
|
||||
self.file = file.AsyncFileResourceWithStreamingResponse(client.file)
|
||||
self.config = config.AsyncConfigResourceWithStreamingResponse(client.config)
|
||||
self.session = session.AsyncSessionResourceWithStreamingResponse(client.session)
|
||||
|
||||
|
||||
Client = Opencode
|
||||
|
||||
AsyncClient = AsyncOpencode
|
||||
219
src/opencode/_compat.py
Normal file
219
src/opencode/_compat.py
Normal file
|
|
@ -0,0 +1,219 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any, Union, Generic, TypeVar, Callable, cast, overload
|
||||
from datetime import date, datetime
|
||||
from typing_extensions import Self, Literal
|
||||
|
||||
import pydantic
|
||||
from pydantic.fields import FieldInfo
|
||||
|
||||
from ._types import IncEx, StrBytesIntFloat
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_ModelT = TypeVar("_ModelT", bound=pydantic.BaseModel)
|
||||
|
||||
# --------------- Pydantic v2 compatibility ---------------
|
||||
|
||||
# Pyright incorrectly reports some of our functions as overriding a method when they don't
|
||||
# pyright: reportIncompatibleMethodOverride=false
|
||||
|
||||
PYDANTIC_V2 = pydantic.VERSION.startswith("2.")
|
||||
|
||||
# v1 re-exports
|
||||
if TYPE_CHECKING:
|
||||
|
||||
def parse_date(value: date | StrBytesIntFloat) -> date: # noqa: ARG001
|
||||
...
|
||||
|
||||
def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime: # noqa: ARG001
|
||||
...
|
||||
|
||||
def get_args(t: type[Any]) -> tuple[Any, ...]: # noqa: ARG001
|
||||
...
|
||||
|
||||
def is_union(tp: type[Any] | None) -> bool: # noqa: ARG001
|
||||
...
|
||||
|
||||
def get_origin(t: type[Any]) -> type[Any] | None: # noqa: ARG001
|
||||
...
|
||||
|
||||
def is_literal_type(type_: type[Any]) -> bool: # noqa: ARG001
|
||||
...
|
||||
|
||||
def is_typeddict(type_: type[Any]) -> bool: # noqa: ARG001
|
||||
...
|
||||
|
||||
else:
|
||||
if PYDANTIC_V2:
|
||||
from pydantic.v1.typing import (
|
||||
get_args as get_args,
|
||||
is_union as is_union,
|
||||
get_origin as get_origin,
|
||||
is_typeddict as is_typeddict,
|
||||
is_literal_type as is_literal_type,
|
||||
)
|
||||
from pydantic.v1.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime
|
||||
else:
|
||||
from pydantic.typing import (
|
||||
get_args as get_args,
|
||||
is_union as is_union,
|
||||
get_origin as get_origin,
|
||||
is_typeddict as is_typeddict,
|
||||
is_literal_type as is_literal_type,
|
||||
)
|
||||
from pydantic.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime
|
||||
|
||||
|
||||
# refactored config
|
||||
if TYPE_CHECKING:
|
||||
from pydantic import ConfigDict as ConfigDict
|
||||
else:
|
||||
if PYDANTIC_V2:
|
||||
from pydantic import ConfigDict
|
||||
else:
|
||||
# TODO: provide an error message here?
|
||||
ConfigDict = None
|
||||
|
||||
|
||||
# renamed methods / properties
|
||||
def parse_obj(model: type[_ModelT], value: object) -> _ModelT:
|
||||
if PYDANTIC_V2:
|
||||
return model.model_validate(value)
|
||||
else:
|
||||
return cast(_ModelT, model.parse_obj(value)) # pyright: ignore[reportDeprecated, reportUnnecessaryCast]
|
||||
|
||||
|
||||
def field_is_required(field: FieldInfo) -> bool:
|
||||
if PYDANTIC_V2:
|
||||
return field.is_required()
|
||||
return field.required # type: ignore
|
||||
|
||||
|
||||
def field_get_default(field: FieldInfo) -> Any:
|
||||
value = field.get_default()
|
||||
if PYDANTIC_V2:
|
||||
from pydantic_core import PydanticUndefined
|
||||
|
||||
if value == PydanticUndefined:
|
||||
return None
|
||||
return value
|
||||
return value
|
||||
|
||||
|
||||
def field_outer_type(field: FieldInfo) -> Any:
|
||||
if PYDANTIC_V2:
|
||||
return field.annotation
|
||||
return field.outer_type_ # type: ignore
|
||||
|
||||
|
||||
def get_model_config(model: type[pydantic.BaseModel]) -> Any:
|
||||
if PYDANTIC_V2:
|
||||
return model.model_config
|
||||
return model.__config__ # type: ignore
|
||||
|
||||
|
||||
def get_model_fields(model: type[pydantic.BaseModel]) -> dict[str, FieldInfo]:
|
||||
if PYDANTIC_V2:
|
||||
return model.model_fields
|
||||
return model.__fields__ # type: ignore
|
||||
|
||||
|
||||
def model_copy(model: _ModelT, *, deep: bool = False) -> _ModelT:
|
||||
if PYDANTIC_V2:
|
||||
return model.model_copy(deep=deep)
|
||||
return model.copy(deep=deep) # type: ignore
|
||||
|
||||
|
||||
def model_json(model: pydantic.BaseModel, *, indent: int | None = None) -> str:
|
||||
if PYDANTIC_V2:
|
||||
return model.model_dump_json(indent=indent)
|
||||
return model.json(indent=indent) # type: ignore
|
||||
|
||||
|
||||
def model_dump(
|
||||
model: pydantic.BaseModel,
|
||||
*,
|
||||
exclude: IncEx | None = None,
|
||||
exclude_unset: bool = False,
|
||||
exclude_defaults: bool = False,
|
||||
warnings: bool = True,
|
||||
mode: Literal["json", "python"] = "python",
|
||||
) -> dict[str, Any]:
|
||||
if PYDANTIC_V2 or hasattr(model, "model_dump"):
|
||||
return model.model_dump(
|
||||
mode=mode,
|
||||
exclude=exclude,
|
||||
exclude_unset=exclude_unset,
|
||||
exclude_defaults=exclude_defaults,
|
||||
# warnings are not supported in Pydantic v1
|
||||
warnings=warnings if PYDANTIC_V2 else True,
|
||||
)
|
||||
return cast(
|
||||
"dict[str, Any]",
|
||||
model.dict( # pyright: ignore[reportDeprecated, reportUnnecessaryCast]
|
||||
exclude=exclude,
|
||||
exclude_unset=exclude_unset,
|
||||
exclude_defaults=exclude_defaults,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def model_parse(model: type[_ModelT], data: Any) -> _ModelT:
|
||||
if PYDANTIC_V2:
|
||||
return model.model_validate(data)
|
||||
return model.parse_obj(data) # pyright: ignore[reportDeprecated]
|
||||
|
||||
|
||||
# generic models
|
||||
if TYPE_CHECKING:
|
||||
|
||||
class GenericModel(pydantic.BaseModel): ...
|
||||
|
||||
else:
|
||||
if PYDANTIC_V2:
|
||||
# there no longer needs to be a distinction in v2 but
|
||||
# we still have to create our own subclass to avoid
|
||||
# inconsistent MRO ordering errors
|
||||
class GenericModel(pydantic.BaseModel): ...
|
||||
|
||||
else:
|
||||
import pydantic.generics
|
||||
|
||||
class GenericModel(pydantic.generics.GenericModel, pydantic.BaseModel): ...
|
||||
|
||||
|
||||
# cached properties
|
||||
if TYPE_CHECKING:
|
||||
cached_property = property
|
||||
|
||||
# we define a separate type (copied from typeshed)
|
||||
# that represents that `cached_property` is `set`able
|
||||
# at runtime, which differs from `@property`.
|
||||
#
|
||||
# this is a separate type as editors likely special case
|
||||
# `@property` and we don't want to cause issues just to have
|
||||
# more helpful internal types.
|
||||
|
||||
class typed_cached_property(Generic[_T]):
|
||||
func: Callable[[Any], _T]
|
||||
attrname: str | None
|
||||
|
||||
def __init__(self, func: Callable[[Any], _T]) -> None: ...
|
||||
|
||||
@overload
|
||||
def __get__(self, instance: None, owner: type[Any] | None = None) -> Self: ...
|
||||
|
||||
@overload
|
||||
def __get__(self, instance: object, owner: type[Any] | None = None) -> _T: ...
|
||||
|
||||
def __get__(self, instance: object, owner: type[Any] | None = None) -> _T | Self:
|
||||
raise NotImplementedError()
|
||||
|
||||
def __set_name__(self, owner: type[Any], name: str) -> None: ...
|
||||
|
||||
# __set__ is not defined at runtime, but @cached_property is designed to be settable
|
||||
def __set__(self, instance: object, value: _T) -> None: ...
|
||||
else:
|
||||
from functools import cached_property as cached_property
|
||||
|
||||
typed_cached_property = cached_property
|
||||
14
src/opencode/_constants.py
Normal file
14
src/opencode/_constants.py
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
import httpx
|
||||
|
||||
RAW_RESPONSE_HEADER = "X-Stainless-Raw-Response"
|
||||
OVERRIDE_CAST_TO_HEADER = "____stainless_override_cast_to"
|
||||
|
||||
# default timeout is 1 minute
|
||||
DEFAULT_TIMEOUT = httpx.Timeout(timeout=60, connect=5.0)
|
||||
DEFAULT_MAX_RETRIES = 2
|
||||
DEFAULT_CONNECTION_LIMITS = httpx.Limits(max_connections=100, max_keepalive_connections=20)
|
||||
|
||||
INITIAL_RETRY_DELAY = 0.5
|
||||
MAX_RETRY_DELAY = 8.0
|
||||
108
src/opencode/_exceptions.py
Normal file
108
src/opencode/_exceptions.py
Normal file
|
|
@ -0,0 +1,108 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing_extensions import Literal
|
||||
|
||||
import httpx
|
||||
|
||||
__all__ = [
|
||||
"BadRequestError",
|
||||
"AuthenticationError",
|
||||
"PermissionDeniedError",
|
||||
"NotFoundError",
|
||||
"ConflictError",
|
||||
"UnprocessableEntityError",
|
||||
"RateLimitError",
|
||||
"InternalServerError",
|
||||
]
|
||||
|
||||
|
||||
class OpencodeError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class APIError(OpencodeError):
|
||||
message: str
|
||||
request: httpx.Request
|
||||
|
||||
body: object | None
|
||||
"""The API response body.
|
||||
|
||||
If the API responded with a valid JSON structure then this property will be the
|
||||
decoded result.
|
||||
|
||||
If it isn't a valid JSON structure then this will be the raw response.
|
||||
|
||||
If there was no response associated with this error then it will be `None`.
|
||||
"""
|
||||
|
||||
def __init__(self, message: str, request: httpx.Request, *, body: object | None) -> None: # noqa: ARG002
|
||||
super().__init__(message)
|
||||
self.request = request
|
||||
self.message = message
|
||||
self.body = body
|
||||
|
||||
|
||||
class APIResponseValidationError(APIError):
|
||||
response: httpx.Response
|
||||
status_code: int
|
||||
|
||||
def __init__(self, response: httpx.Response, body: object | None, *, message: str | None = None) -> None:
|
||||
super().__init__(message or "Data returned by API invalid for expected schema.", response.request, body=body)
|
||||
self.response = response
|
||||
self.status_code = response.status_code
|
||||
|
||||
|
||||
class APIStatusError(APIError):
|
||||
"""Raised when an API response has a status code of 4xx or 5xx."""
|
||||
|
||||
response: httpx.Response
|
||||
status_code: int
|
||||
|
||||
def __init__(self, message: str, *, response: httpx.Response, body: object | None) -> None:
|
||||
super().__init__(message, response.request, body=body)
|
||||
self.response = response
|
||||
self.status_code = response.status_code
|
||||
|
||||
|
||||
class APIConnectionError(APIError):
|
||||
def __init__(self, *, message: str = "Connection error.", request: httpx.Request) -> None:
|
||||
super().__init__(message, request, body=None)
|
||||
|
||||
|
||||
class APITimeoutError(APIConnectionError):
|
||||
def __init__(self, request: httpx.Request) -> None:
|
||||
super().__init__(message="Request timed out.", request=request)
|
||||
|
||||
|
||||
class BadRequestError(APIStatusError):
|
||||
status_code: Literal[400] = 400 # pyright: ignore[reportIncompatibleVariableOverride]
|
||||
|
||||
|
||||
class AuthenticationError(APIStatusError):
|
||||
status_code: Literal[401] = 401 # pyright: ignore[reportIncompatibleVariableOverride]
|
||||
|
||||
|
||||
class PermissionDeniedError(APIStatusError):
|
||||
status_code: Literal[403] = 403 # pyright: ignore[reportIncompatibleVariableOverride]
|
||||
|
||||
|
||||
class NotFoundError(APIStatusError):
|
||||
status_code: Literal[404] = 404 # pyright: ignore[reportIncompatibleVariableOverride]
|
||||
|
||||
|
||||
class ConflictError(APIStatusError):
|
||||
status_code: Literal[409] = 409 # pyright: ignore[reportIncompatibleVariableOverride]
|
||||
|
||||
|
||||
class UnprocessableEntityError(APIStatusError):
|
||||
status_code: Literal[422] = 422 # pyright: ignore[reportIncompatibleVariableOverride]
|
||||
|
||||
|
||||
class RateLimitError(APIStatusError):
|
||||
status_code: Literal[429] = 429 # pyright: ignore[reportIncompatibleVariableOverride]
|
||||
|
||||
|
||||
class InternalServerError(APIStatusError):
|
||||
pass
|
||||
123
src/opencode/_files.py
Normal file
123
src/opencode/_files.py
Normal file
|
|
@ -0,0 +1,123 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import os
|
||||
import pathlib
|
||||
from typing import overload
|
||||
from typing_extensions import TypeGuard
|
||||
|
||||
import anyio
|
||||
|
||||
from ._types import (
|
||||
FileTypes,
|
||||
FileContent,
|
||||
RequestFiles,
|
||||
HttpxFileTypes,
|
||||
Base64FileInput,
|
||||
HttpxFileContent,
|
||||
HttpxRequestFiles,
|
||||
)
|
||||
from ._utils import is_tuple_t, is_mapping_t, is_sequence_t
|
||||
|
||||
|
||||
def is_base64_file_input(obj: object) -> TypeGuard[Base64FileInput]:
|
||||
return isinstance(obj, io.IOBase) or isinstance(obj, os.PathLike)
|
||||
|
||||
|
||||
def is_file_content(obj: object) -> TypeGuard[FileContent]:
|
||||
return (
|
||||
isinstance(obj, bytes) or isinstance(obj, tuple) or isinstance(obj, io.IOBase) or isinstance(obj, os.PathLike)
|
||||
)
|
||||
|
||||
|
||||
def assert_is_file_content(obj: object, *, key: str | None = None) -> None:
|
||||
if not is_file_content(obj):
|
||||
prefix = f"Expected entry at `{key}`" if key is not None else f"Expected file input `{obj!r}`"
|
||||
raise RuntimeError(
|
||||
f"{prefix} to be bytes, an io.IOBase instance, PathLike or a tuple but received {type(obj)} instead."
|
||||
) from None
|
||||
|
||||
|
||||
@overload
|
||||
def to_httpx_files(files: None) -> None: ...
|
||||
|
||||
|
||||
@overload
|
||||
def to_httpx_files(files: RequestFiles) -> HttpxRequestFiles: ...
|
||||
|
||||
|
||||
def to_httpx_files(files: RequestFiles | None) -> HttpxRequestFiles | None:
|
||||
if files is None:
|
||||
return None
|
||||
|
||||
if is_mapping_t(files):
|
||||
files = {key: _transform_file(file) for key, file in files.items()}
|
||||
elif is_sequence_t(files):
|
||||
files = [(key, _transform_file(file)) for key, file in files]
|
||||
else:
|
||||
raise TypeError(f"Unexpected file type input {type(files)}, expected mapping or sequence")
|
||||
|
||||
return files
|
||||
|
||||
|
||||
def _transform_file(file: FileTypes) -> HttpxFileTypes:
|
||||
if is_file_content(file):
|
||||
if isinstance(file, os.PathLike):
|
||||
path = pathlib.Path(file)
|
||||
return (path.name, path.read_bytes())
|
||||
|
||||
return file
|
||||
|
||||
if is_tuple_t(file):
|
||||
return (file[0], _read_file_content(file[1]), *file[2:])
|
||||
|
||||
raise TypeError(f"Expected file types input to be a FileContent type or to be a tuple")
|
||||
|
||||
|
||||
def _read_file_content(file: FileContent) -> HttpxFileContent:
|
||||
if isinstance(file, os.PathLike):
|
||||
return pathlib.Path(file).read_bytes()
|
||||
return file
|
||||
|
||||
|
||||
@overload
|
||||
async def async_to_httpx_files(files: None) -> None: ...
|
||||
|
||||
|
||||
@overload
|
||||
async def async_to_httpx_files(files: RequestFiles) -> HttpxRequestFiles: ...
|
||||
|
||||
|
||||
async def async_to_httpx_files(files: RequestFiles | None) -> HttpxRequestFiles | None:
|
||||
if files is None:
|
||||
return None
|
||||
|
||||
if is_mapping_t(files):
|
||||
files = {key: await _async_transform_file(file) for key, file in files.items()}
|
||||
elif is_sequence_t(files):
|
||||
files = [(key, await _async_transform_file(file)) for key, file in files]
|
||||
else:
|
||||
raise TypeError("Unexpected file type input {type(files)}, expected mapping or sequence")
|
||||
|
||||
return files
|
||||
|
||||
|
||||
async def _async_transform_file(file: FileTypes) -> HttpxFileTypes:
|
||||
if is_file_content(file):
|
||||
if isinstance(file, os.PathLike):
|
||||
path = anyio.Path(file)
|
||||
return (path.name, await path.read_bytes())
|
||||
|
||||
return file
|
||||
|
||||
if is_tuple_t(file):
|
||||
return (file[0], await _async_read_file_content(file[1]), *file[2:])
|
||||
|
||||
raise TypeError(f"Expected file types input to be a FileContent type or to be a tuple")
|
||||
|
||||
|
||||
async def _async_read_file_content(file: FileContent) -> HttpxFileContent:
|
||||
if isinstance(file, os.PathLike):
|
||||
return await anyio.Path(file).read_bytes()
|
||||
|
||||
return file
|
||||
805
src/opencode/_models.py
Normal file
805
src/opencode/_models.py
Normal file
|
|
@ -0,0 +1,805 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import inspect
|
||||
from typing import TYPE_CHECKING, Any, Type, Union, Generic, TypeVar, Callable, cast
|
||||
from datetime import date, datetime
|
||||
from typing_extensions import (
|
||||
Unpack,
|
||||
Literal,
|
||||
ClassVar,
|
||||
Protocol,
|
||||
Required,
|
||||
ParamSpec,
|
||||
TypedDict,
|
||||
TypeGuard,
|
||||
final,
|
||||
override,
|
||||
runtime_checkable,
|
||||
)
|
||||
|
||||
import pydantic
|
||||
from pydantic.fields import FieldInfo
|
||||
|
||||
from ._types import (
|
||||
Body,
|
||||
IncEx,
|
||||
Query,
|
||||
ModelT,
|
||||
Headers,
|
||||
Timeout,
|
||||
NotGiven,
|
||||
AnyMapping,
|
||||
HttpxRequestFiles,
|
||||
)
|
||||
from ._utils import (
|
||||
PropertyInfo,
|
||||
is_list,
|
||||
is_given,
|
||||
json_safe,
|
||||
lru_cache,
|
||||
is_mapping,
|
||||
parse_date,
|
||||
coerce_boolean,
|
||||
parse_datetime,
|
||||
strip_not_given,
|
||||
extract_type_arg,
|
||||
is_annotated_type,
|
||||
is_type_alias_type,
|
||||
strip_annotated_type,
|
||||
)
|
||||
from ._compat import (
|
||||
PYDANTIC_V2,
|
||||
ConfigDict,
|
||||
GenericModel as BaseGenericModel,
|
||||
get_args,
|
||||
is_union,
|
||||
parse_obj,
|
||||
get_origin,
|
||||
is_literal_type,
|
||||
get_model_config,
|
||||
get_model_fields,
|
||||
field_get_default,
|
||||
)
|
||||
from ._constants import RAW_RESPONSE_HEADER
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pydantic_core.core_schema import ModelField, ModelSchema, LiteralSchema, ModelFieldsSchema
|
||||
|
||||
__all__ = ["BaseModel", "GenericModel"]
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_BaseModelT = TypeVar("_BaseModelT", bound="BaseModel")
|
||||
|
||||
P = ParamSpec("P")
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class _ConfigProtocol(Protocol):
|
||||
allow_population_by_field_name: bool
|
||||
|
||||
|
||||
class BaseModel(pydantic.BaseModel):
|
||||
if PYDANTIC_V2:
|
||||
model_config: ClassVar[ConfigDict] = ConfigDict(
|
||||
extra="allow", defer_build=coerce_boolean(os.environ.get("DEFER_PYDANTIC_BUILD", "true"))
|
||||
)
|
||||
else:
|
||||
|
||||
@property
|
||||
@override
|
||||
def model_fields_set(self) -> set[str]:
|
||||
# a forwards-compat shim for pydantic v2
|
||||
return self.__fields_set__ # type: ignore
|
||||
|
||||
class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated]
|
||||
extra: Any = pydantic.Extra.allow # type: ignore
|
||||
|
||||
def to_dict(
|
||||
self,
|
||||
*,
|
||||
mode: Literal["json", "python"] = "python",
|
||||
use_api_names: bool = True,
|
||||
exclude_unset: bool = True,
|
||||
exclude_defaults: bool = False,
|
||||
exclude_none: bool = False,
|
||||
warnings: bool = True,
|
||||
) -> dict[str, object]:
|
||||
"""Recursively generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
|
||||
|
||||
By default, fields that were not set by the API will not be included,
|
||||
and keys will match the API response, *not* the property names from the model.
|
||||
|
||||
For example, if the API responds with `"fooBar": true` but we've defined a `foo_bar: bool` property,
|
||||
the output will use the `"fooBar"` key (unless `use_api_names=False` is passed).
|
||||
|
||||
Args:
|
||||
mode:
|
||||
If mode is 'json', the dictionary will only contain JSON serializable types. e.g. `datetime` will be turned into a string, `"2024-3-22T18:11:19.117000Z"`.
|
||||
If mode is 'python', the dictionary may contain any Python objects. e.g. `datetime(2024, 3, 22)`
|
||||
|
||||
use_api_names: Whether to use the key that the API responded with or the property name. Defaults to `True`.
|
||||
exclude_unset: Whether to exclude fields that have not been explicitly set.
|
||||
exclude_defaults: Whether to exclude fields that are set to their default value from the output.
|
||||
exclude_none: Whether to exclude fields that have a value of `None` from the output.
|
||||
warnings: Whether to log warnings when invalid fields are encountered. This is only supported in Pydantic v2.
|
||||
"""
|
||||
return self.model_dump(
|
||||
mode=mode,
|
||||
by_alias=use_api_names,
|
||||
exclude_unset=exclude_unset,
|
||||
exclude_defaults=exclude_defaults,
|
||||
exclude_none=exclude_none,
|
||||
warnings=warnings,
|
||||
)
|
||||
|
||||
def to_json(
|
||||
self,
|
||||
*,
|
||||
indent: int | None = 2,
|
||||
use_api_names: bool = True,
|
||||
exclude_unset: bool = True,
|
||||
exclude_defaults: bool = False,
|
||||
exclude_none: bool = False,
|
||||
warnings: bool = True,
|
||||
) -> str:
|
||||
"""Generates a JSON string representing this model as it would be received from or sent to the API (but with indentation).
|
||||
|
||||
By default, fields that were not set by the API will not be included,
|
||||
and keys will match the API response, *not* the property names from the model.
|
||||
|
||||
For example, if the API responds with `"fooBar": true` but we've defined a `foo_bar: bool` property,
|
||||
the output will use the `"fooBar"` key (unless `use_api_names=False` is passed).
|
||||
|
||||
Args:
|
||||
indent: Indentation to use in the JSON output. If `None` is passed, the output will be compact. Defaults to `2`
|
||||
use_api_names: Whether to use the key that the API responded with or the property name. Defaults to `True`.
|
||||
exclude_unset: Whether to exclude fields that have not been explicitly set.
|
||||
exclude_defaults: Whether to exclude fields that have the default value.
|
||||
exclude_none: Whether to exclude fields that have a value of `None`.
|
||||
warnings: Whether to show any warnings that occurred during serialization. This is only supported in Pydantic v2.
|
||||
"""
|
||||
return self.model_dump_json(
|
||||
indent=indent,
|
||||
by_alias=use_api_names,
|
||||
exclude_unset=exclude_unset,
|
||||
exclude_defaults=exclude_defaults,
|
||||
exclude_none=exclude_none,
|
||||
warnings=warnings,
|
||||
)
|
||||
|
||||
@override
|
||||
def __str__(self) -> str:
|
||||
# mypy complains about an invalid self arg
|
||||
return f"{self.__repr_name__()}({self.__repr_str__(', ')})" # type: ignore[misc]
|
||||
|
||||
# Override the 'construct' method in a way that supports recursive parsing without validation.
|
||||
# Based on https://github.com/samuelcolvin/pydantic/issues/1168#issuecomment-817742836.
|
||||
@classmethod
|
||||
@override
|
||||
def construct( # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
__cls: Type[ModelT],
|
||||
_fields_set: set[str] | None = None,
|
||||
**values: object,
|
||||
) -> ModelT:
|
||||
m = __cls.__new__(__cls)
|
||||
fields_values: dict[str, object] = {}
|
||||
|
||||
config = get_model_config(__cls)
|
||||
populate_by_name = (
|
||||
config.allow_population_by_field_name
|
||||
if isinstance(config, _ConfigProtocol)
|
||||
else config.get("populate_by_name")
|
||||
)
|
||||
|
||||
if _fields_set is None:
|
||||
_fields_set = set()
|
||||
|
||||
model_fields = get_model_fields(__cls)
|
||||
for name, field in model_fields.items():
|
||||
key = field.alias
|
||||
if key is None or (key not in values and populate_by_name):
|
||||
key = name
|
||||
|
||||
if key in values:
|
||||
fields_values[name] = _construct_field(value=values[key], field=field, key=key)
|
||||
_fields_set.add(name)
|
||||
else:
|
||||
fields_values[name] = field_get_default(field)
|
||||
|
||||
_extra = {}
|
||||
for key, value in values.items():
|
||||
if key not in model_fields:
|
||||
if PYDANTIC_V2:
|
||||
_extra[key] = value
|
||||
else:
|
||||
_fields_set.add(key)
|
||||
fields_values[key] = value
|
||||
|
||||
object.__setattr__(m, "__dict__", fields_values)
|
||||
|
||||
if PYDANTIC_V2:
|
||||
# these properties are copied from Pydantic's `model_construct()` method
|
||||
object.__setattr__(m, "__pydantic_private__", None)
|
||||
object.__setattr__(m, "__pydantic_extra__", _extra)
|
||||
object.__setattr__(m, "__pydantic_fields_set__", _fields_set)
|
||||
else:
|
||||
# init_private_attributes() does not exist in v2
|
||||
m._init_private_attributes() # type: ignore
|
||||
|
||||
# copied from Pydantic v1's `construct()` method
|
||||
object.__setattr__(m, "__fields_set__", _fields_set)
|
||||
|
||||
return m
|
||||
|
||||
if not TYPE_CHECKING:
|
||||
# type checkers incorrectly complain about this assignment
|
||||
# because the type signatures are technically different
|
||||
# although not in practice
|
||||
model_construct = construct
|
||||
|
||||
if not PYDANTIC_V2:
|
||||
# we define aliases for some of the new pydantic v2 methods so
|
||||
# that we can just document these methods without having to specify
|
||||
# a specific pydantic version as some users may not know which
|
||||
# pydantic version they are currently using
|
||||
|
||||
@override
|
||||
def model_dump(
|
||||
self,
|
||||
*,
|
||||
mode: Literal["json", "python"] | str = "python",
|
||||
include: IncEx | None = None,
|
||||
exclude: IncEx | None = None,
|
||||
by_alias: bool = False,
|
||||
exclude_unset: bool = False,
|
||||
exclude_defaults: bool = False,
|
||||
exclude_none: bool = False,
|
||||
round_trip: bool = False,
|
||||
warnings: bool | Literal["none", "warn", "error"] = True,
|
||||
context: dict[str, Any] | None = None,
|
||||
serialize_as_any: bool = False,
|
||||
) -> dict[str, Any]:
|
||||
"""Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump
|
||||
|
||||
Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
|
||||
|
||||
Args:
|
||||
mode: The mode in which `to_python` should run.
|
||||
If mode is 'json', the dictionary will only contain JSON serializable types.
|
||||
If mode is 'python', the dictionary may contain any Python objects.
|
||||
include: A list of fields to include in the output.
|
||||
exclude: A list of fields to exclude from the output.
|
||||
by_alias: Whether to use the field's alias in the dictionary key if defined.
|
||||
exclude_unset: Whether to exclude fields that are unset or None from the output.
|
||||
exclude_defaults: Whether to exclude fields that are set to their default value from the output.
|
||||
exclude_none: Whether to exclude fields that have a value of `None` from the output.
|
||||
round_trip: Whether to enable serialization and deserialization round-trip support.
|
||||
warnings: Whether to log warnings when invalid fields are encountered.
|
||||
|
||||
Returns:
|
||||
A dictionary representation of the model.
|
||||
"""
|
||||
if mode not in {"json", "python"}:
|
||||
raise ValueError("mode must be either 'json' or 'python'")
|
||||
if round_trip != False:
|
||||
raise ValueError("round_trip is only supported in Pydantic v2")
|
||||
if warnings != True:
|
||||
raise ValueError("warnings is only supported in Pydantic v2")
|
||||
if context is not None:
|
||||
raise ValueError("context is only supported in Pydantic v2")
|
||||
if serialize_as_any != False:
|
||||
raise ValueError("serialize_as_any is only supported in Pydantic v2")
|
||||
dumped = super().dict( # pyright: ignore[reportDeprecated]
|
||||
include=include,
|
||||
exclude=exclude,
|
||||
by_alias=by_alias,
|
||||
exclude_unset=exclude_unset,
|
||||
exclude_defaults=exclude_defaults,
|
||||
exclude_none=exclude_none,
|
||||
)
|
||||
|
||||
return cast(dict[str, Any], json_safe(dumped)) if mode == "json" else dumped
|
||||
|
||||
@override
|
||||
def model_dump_json(
|
||||
self,
|
||||
*,
|
||||
indent: int | None = None,
|
||||
include: IncEx | None = None,
|
||||
exclude: IncEx | None = None,
|
||||
by_alias: bool = False,
|
||||
exclude_unset: bool = False,
|
||||
exclude_defaults: bool = False,
|
||||
exclude_none: bool = False,
|
||||
round_trip: bool = False,
|
||||
warnings: bool | Literal["none", "warn", "error"] = True,
|
||||
context: dict[str, Any] | None = None,
|
||||
serialize_as_any: bool = False,
|
||||
) -> str:
|
||||
"""Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump_json
|
||||
|
||||
Generates a JSON representation of the model using Pydantic's `to_json` method.
|
||||
|
||||
Args:
|
||||
indent: Indentation to use in the JSON output. If None is passed, the output will be compact.
|
||||
include: Field(s) to include in the JSON output. Can take either a string or set of strings.
|
||||
exclude: Field(s) to exclude from the JSON output. Can take either a string or set of strings.
|
||||
by_alias: Whether to serialize using field aliases.
|
||||
exclude_unset: Whether to exclude fields that have not been explicitly set.
|
||||
exclude_defaults: Whether to exclude fields that have the default value.
|
||||
exclude_none: Whether to exclude fields that have a value of `None`.
|
||||
round_trip: Whether to use serialization/deserialization between JSON and class instance.
|
||||
warnings: Whether to show any warnings that occurred during serialization.
|
||||
|
||||
Returns:
|
||||
A JSON string representation of the model.
|
||||
"""
|
||||
if round_trip != False:
|
||||
raise ValueError("round_trip is only supported in Pydantic v2")
|
||||
if warnings != True:
|
||||
raise ValueError("warnings is only supported in Pydantic v2")
|
||||
if context is not None:
|
||||
raise ValueError("context is only supported in Pydantic v2")
|
||||
if serialize_as_any != False:
|
||||
raise ValueError("serialize_as_any is only supported in Pydantic v2")
|
||||
return super().json( # type: ignore[reportDeprecated]
|
||||
indent=indent,
|
||||
include=include,
|
||||
exclude=exclude,
|
||||
by_alias=by_alias,
|
||||
exclude_unset=exclude_unset,
|
||||
exclude_defaults=exclude_defaults,
|
||||
exclude_none=exclude_none,
|
||||
)
|
||||
|
||||
|
||||
def _construct_field(value: object, field: FieldInfo, key: str) -> object:
|
||||
if value is None:
|
||||
return field_get_default(field)
|
||||
|
||||
if PYDANTIC_V2:
|
||||
type_ = field.annotation
|
||||
else:
|
||||
type_ = cast(type, field.outer_type_) # type: ignore
|
||||
|
||||
if type_ is None:
|
||||
raise RuntimeError(f"Unexpected field type is None for {key}")
|
||||
|
||||
return construct_type(value=value, type_=type_)
|
||||
|
||||
|
||||
def is_basemodel(type_: type) -> bool:
|
||||
"""Returns whether or not the given type is either a `BaseModel` or a union of `BaseModel`"""
|
||||
if is_union(type_):
|
||||
for variant in get_args(type_):
|
||||
if is_basemodel(variant):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
return is_basemodel_type(type_)
|
||||
|
||||
|
||||
def is_basemodel_type(type_: type) -> TypeGuard[type[BaseModel] | type[GenericModel]]:
|
||||
origin = get_origin(type_) or type_
|
||||
if not inspect.isclass(origin):
|
||||
return False
|
||||
return issubclass(origin, BaseModel) or issubclass(origin, GenericModel)
|
||||
|
||||
|
||||
def build(
|
||||
base_model_cls: Callable[P, _BaseModelT],
|
||||
*args: P.args,
|
||||
**kwargs: P.kwargs,
|
||||
) -> _BaseModelT:
|
||||
"""Construct a BaseModel class without validation.
|
||||
|
||||
This is useful for cases where you need to instantiate a `BaseModel`
|
||||
from an API response as this provides type-safe params which isn't supported
|
||||
by helpers like `construct_type()`.
|
||||
|
||||
```py
|
||||
build(MyModel, my_field_a="foo", my_field_b=123)
|
||||
```
|
||||
"""
|
||||
if args:
|
||||
raise TypeError(
|
||||
"Received positional arguments which are not supported; Keyword arguments must be used instead",
|
||||
)
|
||||
|
||||
return cast(_BaseModelT, construct_type(type_=base_model_cls, value=kwargs))
|
||||
|
||||
|
||||
def construct_type_unchecked(*, value: object, type_: type[_T]) -> _T:
|
||||
"""Loose coercion to the expected type with construction of nested values.
|
||||
|
||||
Note: the returned value from this function is not guaranteed to match the
|
||||
given type.
|
||||
"""
|
||||
return cast(_T, construct_type(value=value, type_=type_))
|
||||
|
||||
|
||||
def construct_type(*, value: object, type_: object) -> object:
|
||||
"""Loose coercion to the expected type with construction of nested values.
|
||||
|
||||
If the given value does not match the expected type then it is returned as-is.
|
||||
"""
|
||||
|
||||
# store a reference to the original type we were given before we extract any inner
|
||||
# types so that we can properly resolve forward references in `TypeAliasType` annotations
|
||||
original_type = None
|
||||
|
||||
# we allow `object` as the input type because otherwise, passing things like
|
||||
# `Literal['value']` will be reported as a type error by type checkers
|
||||
type_ = cast("type[object]", type_)
|
||||
if is_type_alias_type(type_):
|
||||
original_type = type_ # type: ignore[unreachable]
|
||||
type_ = type_.__value__ # type: ignore[unreachable]
|
||||
|
||||
# unwrap `Annotated[T, ...]` -> `T`
|
||||
if is_annotated_type(type_):
|
||||
meta: tuple[Any, ...] = get_args(type_)[1:]
|
||||
type_ = extract_type_arg(type_, 0)
|
||||
else:
|
||||
meta = tuple()
|
||||
|
||||
# we need to use the origin class for any types that are subscripted generics
|
||||
# e.g. Dict[str, object]
|
||||
origin = get_origin(type_) or type_
|
||||
args = get_args(type_)
|
||||
|
||||
if is_union(origin):
|
||||
try:
|
||||
return validate_type(type_=cast("type[object]", original_type or type_), value=value)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# if the type is a discriminated union then we want to construct the right variant
|
||||
# in the union, even if the data doesn't match exactly, otherwise we'd break code
|
||||
# that relies on the constructed class types, e.g.
|
||||
#
|
||||
# class FooType:
|
||||
# kind: Literal['foo']
|
||||
# value: str
|
||||
#
|
||||
# class BarType:
|
||||
# kind: Literal['bar']
|
||||
# value: int
|
||||
#
|
||||
# without this block, if the data we get is something like `{'kind': 'bar', 'value': 'foo'}` then
|
||||
# we'd end up constructing `FooType` when it should be `BarType`.
|
||||
discriminator = _build_discriminated_union_meta(union=type_, meta_annotations=meta)
|
||||
if discriminator and is_mapping(value):
|
||||
variant_value = value.get(discriminator.field_alias_from or discriminator.field_name)
|
||||
if variant_value and isinstance(variant_value, str):
|
||||
variant_type = discriminator.mapping.get(variant_value)
|
||||
if variant_type:
|
||||
return construct_type(type_=variant_type, value=value)
|
||||
|
||||
# if the data is not valid, use the first variant that doesn't fail while deserializing
|
||||
for variant in args:
|
||||
try:
|
||||
return construct_type(value=value, type_=variant)
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
raise RuntimeError(f"Could not convert data into a valid instance of {type_}")
|
||||
|
||||
if origin == dict:
|
||||
if not is_mapping(value):
|
||||
return value
|
||||
|
||||
_, items_type = get_args(type_) # Dict[_, items_type]
|
||||
return {key: construct_type(value=item, type_=items_type) for key, item in value.items()}
|
||||
|
||||
if (
|
||||
not is_literal_type(type_)
|
||||
and inspect.isclass(origin)
|
||||
and (issubclass(origin, BaseModel) or issubclass(origin, GenericModel))
|
||||
):
|
||||
if is_list(value):
|
||||
return [cast(Any, type_).construct(**entry) if is_mapping(entry) else entry for entry in value]
|
||||
|
||||
if is_mapping(value):
|
||||
if issubclass(type_, BaseModel):
|
||||
return type_.construct(**value) # type: ignore[arg-type]
|
||||
|
||||
return cast(Any, type_).construct(**value)
|
||||
|
||||
if origin == list:
|
||||
if not is_list(value):
|
||||
return value
|
||||
|
||||
inner_type = args[0] # List[inner_type]
|
||||
return [construct_type(value=entry, type_=inner_type) for entry in value]
|
||||
|
||||
if origin == float:
|
||||
if isinstance(value, int):
|
||||
coerced = float(value)
|
||||
if coerced != value:
|
||||
return value
|
||||
return coerced
|
||||
|
||||
return value
|
||||
|
||||
if type_ == datetime:
|
||||
try:
|
||||
return parse_datetime(value) # type: ignore
|
||||
except Exception:
|
||||
return value
|
||||
|
||||
if type_ == date:
|
||||
try:
|
||||
return parse_date(value) # type: ignore
|
||||
except Exception:
|
||||
return value
|
||||
|
||||
return value
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class CachedDiscriminatorType(Protocol):
|
||||
__discriminator__: DiscriminatorDetails
|
||||
|
||||
|
||||
class DiscriminatorDetails:
|
||||
field_name: str
|
||||
"""The name of the discriminator field in the variant class, e.g.
|
||||
|
||||
```py
|
||||
class Foo(BaseModel):
|
||||
type: Literal['foo']
|
||||
```
|
||||
|
||||
Will result in field_name='type'
|
||||
"""
|
||||
|
||||
field_alias_from: str | None
|
||||
"""The name of the discriminator field in the API response, e.g.
|
||||
|
||||
```py
|
||||
class Foo(BaseModel):
|
||||
type: Literal['foo'] = Field(alias='type_from_api')
|
||||
```
|
||||
|
||||
Will result in field_alias_from='type_from_api'
|
||||
"""
|
||||
|
||||
mapping: dict[str, type]
|
||||
"""Mapping of discriminator value to variant type, e.g.
|
||||
|
||||
{'foo': FooVariant, 'bar': BarVariant}
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
mapping: dict[str, type],
|
||||
discriminator_field: str,
|
||||
discriminator_alias: str | None,
|
||||
) -> None:
|
||||
self.mapping = mapping
|
||||
self.field_name = discriminator_field
|
||||
self.field_alias_from = discriminator_alias
|
||||
|
||||
|
||||
def _build_discriminated_union_meta(*, union: type, meta_annotations: tuple[Any, ...]) -> DiscriminatorDetails | None:
|
||||
if isinstance(union, CachedDiscriminatorType):
|
||||
return union.__discriminator__
|
||||
|
||||
discriminator_field_name: str | None = None
|
||||
|
||||
for annotation in meta_annotations:
|
||||
if isinstance(annotation, PropertyInfo) and annotation.discriminator is not None:
|
||||
discriminator_field_name = annotation.discriminator
|
||||
break
|
||||
|
||||
if not discriminator_field_name:
|
||||
return None
|
||||
|
||||
mapping: dict[str, type] = {}
|
||||
discriminator_alias: str | None = None
|
||||
|
||||
for variant in get_args(union):
|
||||
variant = strip_annotated_type(variant)
|
||||
if is_basemodel_type(variant):
|
||||
if PYDANTIC_V2:
|
||||
field = _extract_field_schema_pv2(variant, discriminator_field_name)
|
||||
if not field:
|
||||
continue
|
||||
|
||||
# Note: if one variant defines an alias then they all should
|
||||
discriminator_alias = field.get("serialization_alias")
|
||||
|
||||
field_schema = field["schema"]
|
||||
|
||||
if field_schema["type"] == "literal":
|
||||
for entry in cast("LiteralSchema", field_schema)["expected"]:
|
||||
if isinstance(entry, str):
|
||||
mapping[entry] = variant
|
||||
else:
|
||||
field_info = cast("dict[str, FieldInfo]", variant.__fields__).get(discriminator_field_name) # pyright: ignore[reportDeprecated, reportUnnecessaryCast]
|
||||
if not field_info:
|
||||
continue
|
||||
|
||||
# Note: if one variant defines an alias then they all should
|
||||
discriminator_alias = field_info.alias
|
||||
|
||||
if (annotation := getattr(field_info, "annotation", None)) and is_literal_type(annotation):
|
||||
for entry in get_args(annotation):
|
||||
if isinstance(entry, str):
|
||||
mapping[entry] = variant
|
||||
|
||||
if not mapping:
|
||||
return None
|
||||
|
||||
details = DiscriminatorDetails(
|
||||
mapping=mapping,
|
||||
discriminator_field=discriminator_field_name,
|
||||
discriminator_alias=discriminator_alias,
|
||||
)
|
||||
cast(CachedDiscriminatorType, union).__discriminator__ = details
|
||||
return details
|
||||
|
||||
|
||||
def _extract_field_schema_pv2(model: type[BaseModel], field_name: str) -> ModelField | None:
|
||||
schema = model.__pydantic_core_schema__
|
||||
if schema["type"] == "definitions":
|
||||
schema = schema["schema"]
|
||||
|
||||
if schema["type"] != "model":
|
||||
return None
|
||||
|
||||
schema = cast("ModelSchema", schema)
|
||||
fields_schema = schema["schema"]
|
||||
if fields_schema["type"] != "model-fields":
|
||||
return None
|
||||
|
||||
fields_schema = cast("ModelFieldsSchema", fields_schema)
|
||||
field = fields_schema["fields"].get(field_name)
|
||||
if not field:
|
||||
return None
|
||||
|
||||
return cast("ModelField", field) # pyright: ignore[reportUnnecessaryCast]
|
||||
|
||||
|
||||
def validate_type(*, type_: type[_T], value: object) -> _T:
|
||||
"""Strict validation that the given value matches the expected type"""
|
||||
if inspect.isclass(type_) and issubclass(type_, pydantic.BaseModel):
|
||||
return cast(_T, parse_obj(type_, value))
|
||||
|
||||
return cast(_T, _validate_non_model_type(type_=type_, value=value))
|
||||
|
||||
|
||||
def set_pydantic_config(typ: Any, config: pydantic.ConfigDict) -> None:
|
||||
"""Add a pydantic config for the given type.
|
||||
|
||||
Note: this is a no-op on Pydantic v1.
|
||||
"""
|
||||
setattr(typ, "__pydantic_config__", config) # noqa: B010
|
||||
|
||||
|
||||
# our use of subclassing here causes weirdness for type checkers,
|
||||
# so we just pretend that we don't subclass
|
||||
if TYPE_CHECKING:
|
||||
GenericModel = BaseModel
|
||||
else:
|
||||
|
||||
class GenericModel(BaseGenericModel, BaseModel):
|
||||
pass
|
||||
|
||||
|
||||
if PYDANTIC_V2:
|
||||
from pydantic import TypeAdapter as _TypeAdapter
|
||||
|
||||
_CachedTypeAdapter = cast("TypeAdapter[object]", lru_cache(maxsize=None)(_TypeAdapter))
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pydantic import TypeAdapter
|
||||
else:
|
||||
TypeAdapter = _CachedTypeAdapter
|
||||
|
||||
def _validate_non_model_type(*, type_: type[_T], value: object) -> _T:
|
||||
return TypeAdapter(type_).validate_python(value)
|
||||
|
||||
elif not TYPE_CHECKING: # TODO: condition is weird
|
||||
|
||||
class RootModel(GenericModel, Generic[_T]):
|
||||
"""Used as a placeholder to easily convert runtime types to a Pydantic format
|
||||
to provide validation.
|
||||
|
||||
For example:
|
||||
```py
|
||||
validated = RootModel[int](__root__="5").__root__
|
||||
# validated: 5
|
||||
```
|
||||
"""
|
||||
|
||||
__root__: _T
|
||||
|
||||
def _validate_non_model_type(*, type_: type[_T], value: object) -> _T:
|
||||
model = _create_pydantic_model(type_).validate(value)
|
||||
return cast(_T, model.__root__)
|
||||
|
||||
def _create_pydantic_model(type_: _T) -> Type[RootModel[_T]]:
|
||||
return RootModel[type_] # type: ignore
|
||||
|
||||
|
||||
class FinalRequestOptionsInput(TypedDict, total=False):
|
||||
method: Required[str]
|
||||
url: Required[str]
|
||||
params: Query
|
||||
headers: Headers
|
||||
max_retries: int
|
||||
timeout: float | Timeout | None
|
||||
files: HttpxRequestFiles | None
|
||||
idempotency_key: str
|
||||
json_data: Body
|
||||
extra_json: AnyMapping
|
||||
follow_redirects: bool
|
||||
|
||||
|
||||
@final
|
||||
class FinalRequestOptions(pydantic.BaseModel):
|
||||
method: str
|
||||
url: str
|
||||
params: Query = {}
|
||||
headers: Union[Headers, NotGiven] = NotGiven()
|
||||
max_retries: Union[int, NotGiven] = NotGiven()
|
||||
timeout: Union[float, Timeout, None, NotGiven] = NotGiven()
|
||||
files: Union[HttpxRequestFiles, None] = None
|
||||
idempotency_key: Union[str, None] = None
|
||||
post_parser: Union[Callable[[Any], Any], NotGiven] = NotGiven()
|
||||
follow_redirects: Union[bool, None] = None
|
||||
|
||||
# It should be noted that we cannot use `json` here as that would override
|
||||
# a BaseModel method in an incompatible fashion.
|
||||
json_data: Union[Body, None] = None
|
||||
extra_json: Union[AnyMapping, None] = None
|
||||
|
||||
if PYDANTIC_V2:
|
||||
model_config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True)
|
||||
else:
|
||||
|
||||
class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated]
|
||||
arbitrary_types_allowed: bool = True
|
||||
|
||||
def get_max_retries(self, max_retries: int) -> int:
|
||||
if isinstance(self.max_retries, NotGiven):
|
||||
return max_retries
|
||||
return self.max_retries
|
||||
|
||||
def _strip_raw_response_header(self) -> None:
|
||||
if not is_given(self.headers):
|
||||
return
|
||||
|
||||
if self.headers.get(RAW_RESPONSE_HEADER):
|
||||
self.headers = {**self.headers}
|
||||
self.headers.pop(RAW_RESPONSE_HEADER)
|
||||
|
||||
# override the `construct` method so that we can run custom transformations.
|
||||
# this is necessary as we don't want to do any actual runtime type checking
|
||||
# (which means we can't use validators) but we do want to ensure that `NotGiven`
|
||||
# values are not present
|
||||
#
|
||||
# type ignore required because we're adding explicit types to `**values`
|
||||
@classmethod
|
||||
def construct( # type: ignore
|
||||
cls,
|
||||
_fields_set: set[str] | None = None,
|
||||
**values: Unpack[FinalRequestOptionsInput],
|
||||
) -> FinalRequestOptions:
|
||||
kwargs: dict[str, Any] = {
|
||||
# we unconditionally call `strip_not_given` on any value
|
||||
# as it will just ignore any non-mapping types
|
||||
key: strip_not_given(value)
|
||||
for key, value in values.items()
|
||||
}
|
||||
if PYDANTIC_V2:
|
||||
return super().model_construct(_fields_set, **kwargs)
|
||||
return cast(FinalRequestOptions, super().construct(_fields_set, **kwargs)) # pyright: ignore[reportDeprecated]
|
||||
|
||||
if not TYPE_CHECKING:
|
||||
# type checkers incorrectly complain about this assignment
|
||||
model_construct = construct
|
||||
150
src/opencode/_qs.py
Normal file
150
src/opencode/_qs.py
Normal file
|
|
@ -0,0 +1,150 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from typing import Any, List, Tuple, Union, Mapping, TypeVar
|
||||
from urllib.parse import parse_qs, urlencode
|
||||
from typing_extensions import Literal, get_args
|
||||
|
||||
from ._types import NOT_GIVEN, NotGiven, NotGivenOr
|
||||
from ._utils import flatten
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
|
||||
ArrayFormat = Literal["comma", "repeat", "indices", "brackets"]
|
||||
NestedFormat = Literal["dots", "brackets"]
|
||||
|
||||
PrimitiveData = Union[str, int, float, bool, None]
|
||||
# this should be Data = Union[PrimitiveData, "List[Data]", "Tuple[Data]", "Mapping[str, Data]"]
|
||||
# https://github.com/microsoft/pyright/issues/3555
|
||||
Data = Union[PrimitiveData, List[Any], Tuple[Any], "Mapping[str, Any]"]
|
||||
Params = Mapping[str, Data]
|
||||
|
||||
|
||||
class Querystring:
|
||||
array_format: ArrayFormat
|
||||
nested_format: NestedFormat
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
array_format: ArrayFormat = "repeat",
|
||||
nested_format: NestedFormat = "brackets",
|
||||
) -> None:
|
||||
self.array_format = array_format
|
||||
self.nested_format = nested_format
|
||||
|
||||
def parse(self, query: str) -> Mapping[str, object]:
|
||||
# Note: custom format syntax is not supported yet
|
||||
return parse_qs(query)
|
||||
|
||||
def stringify(
|
||||
self,
|
||||
params: Params,
|
||||
*,
|
||||
array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN,
|
||||
nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN,
|
||||
) -> str:
|
||||
return urlencode(
|
||||
self.stringify_items(
|
||||
params,
|
||||
array_format=array_format,
|
||||
nested_format=nested_format,
|
||||
)
|
||||
)
|
||||
|
||||
def stringify_items(
|
||||
self,
|
||||
params: Params,
|
||||
*,
|
||||
array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN,
|
||||
nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN,
|
||||
) -> list[tuple[str, str]]:
|
||||
opts = Options(
|
||||
qs=self,
|
||||
array_format=array_format,
|
||||
nested_format=nested_format,
|
||||
)
|
||||
return flatten([self._stringify_item(key, value, opts) for key, value in params.items()])
|
||||
|
||||
def _stringify_item(
|
||||
self,
|
||||
key: str,
|
||||
value: Data,
|
||||
opts: Options,
|
||||
) -> list[tuple[str, str]]:
|
||||
if isinstance(value, Mapping):
|
||||
items: list[tuple[str, str]] = []
|
||||
nested_format = opts.nested_format
|
||||
for subkey, subvalue in value.items():
|
||||
items.extend(
|
||||
self._stringify_item(
|
||||
# TODO: error if unknown format
|
||||
f"{key}.{subkey}" if nested_format == "dots" else f"{key}[{subkey}]",
|
||||
subvalue,
|
||||
opts,
|
||||
)
|
||||
)
|
||||
return items
|
||||
|
||||
if isinstance(value, (list, tuple)):
|
||||
array_format = opts.array_format
|
||||
if array_format == "comma":
|
||||
return [
|
||||
(
|
||||
key,
|
||||
",".join(self._primitive_value_to_str(item) for item in value if item is not None),
|
||||
),
|
||||
]
|
||||
elif array_format == "repeat":
|
||||
items = []
|
||||
for item in value:
|
||||
items.extend(self._stringify_item(key, item, opts))
|
||||
return items
|
||||
elif array_format == "indices":
|
||||
raise NotImplementedError("The array indices format is not supported yet")
|
||||
elif array_format == "brackets":
|
||||
items = []
|
||||
key = key + "[]"
|
||||
for item in value:
|
||||
items.extend(self._stringify_item(key, item, opts))
|
||||
return items
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
f"Unknown array_format value: {array_format}, choose from {', '.join(get_args(ArrayFormat))}"
|
||||
)
|
||||
|
||||
serialised = self._primitive_value_to_str(value)
|
||||
if not serialised:
|
||||
return []
|
||||
return [(key, serialised)]
|
||||
|
||||
def _primitive_value_to_str(self, value: PrimitiveData) -> str:
|
||||
# copied from httpx
|
||||
if value is True:
|
||||
return "true"
|
||||
elif value is False:
|
||||
return "false"
|
||||
elif value is None:
|
||||
return ""
|
||||
return str(value)
|
||||
|
||||
|
||||
_qs = Querystring()
|
||||
parse = _qs.parse
|
||||
stringify = _qs.stringify
|
||||
stringify_items = _qs.stringify_items
|
||||
|
||||
|
||||
class Options:
|
||||
array_format: ArrayFormat
|
||||
nested_format: NestedFormat
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
qs: Querystring = _qs,
|
||||
*,
|
||||
array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN,
|
||||
nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN,
|
||||
) -> None:
|
||||
self.array_format = qs.array_format if isinstance(array_format, NotGiven) else array_format
|
||||
self.nested_format = qs.nested_format if isinstance(nested_format, NotGiven) else nested_format
|
||||
43
src/opencode/_resource.py
Normal file
43
src/opencode/_resource.py
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import time
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import anyio
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._client import Opencode, AsyncOpencode
|
||||
|
||||
|
||||
class SyncAPIResource:
|
||||
_client: Opencode
|
||||
|
||||
def __init__(self, client: Opencode) -> None:
|
||||
self._client = client
|
||||
self._get = client.get
|
||||
self._post = client.post
|
||||
self._patch = client.patch
|
||||
self._put = client.put
|
||||
self._delete = client.delete
|
||||
self._get_api_list = client.get_api_list
|
||||
|
||||
def _sleep(self, seconds: float) -> None:
|
||||
time.sleep(seconds)
|
||||
|
||||
|
||||
class AsyncAPIResource:
|
||||
_client: AsyncOpencode
|
||||
|
||||
def __init__(self, client: AsyncOpencode) -> None:
|
||||
self._client = client
|
||||
self._get = client.get
|
||||
self._post = client.post
|
||||
self._patch = client.patch
|
||||
self._put = client.put
|
||||
self._delete = client.delete
|
||||
self._get_api_list = client.get_api_list
|
||||
|
||||
async def _sleep(self, seconds: float) -> None:
|
||||
await anyio.sleep(seconds)
|
||||
830
src/opencode/_response.py
Normal file
830
src/opencode/_response.py
Normal file
|
|
@ -0,0 +1,830 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import inspect
|
||||
import logging
|
||||
import datetime
|
||||
import functools
|
||||
from types import TracebackType
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Union,
|
||||
Generic,
|
||||
TypeVar,
|
||||
Callable,
|
||||
Iterator,
|
||||
AsyncIterator,
|
||||
cast,
|
||||
overload,
|
||||
)
|
||||
from typing_extensions import Awaitable, ParamSpec, override, get_origin
|
||||
|
||||
import anyio
|
||||
import httpx
|
||||
import pydantic
|
||||
|
||||
from ._types import NoneType
|
||||
from ._utils import is_given, extract_type_arg, is_annotated_type, is_type_alias_type, extract_type_var_from_base
|
||||
from ._models import BaseModel, is_basemodel
|
||||
from ._constants import RAW_RESPONSE_HEADER, OVERRIDE_CAST_TO_HEADER
|
||||
from ._streaming import Stream, AsyncStream, is_stream_class_type, extract_stream_chunk_type
|
||||
from ._exceptions import OpencodeError, APIResponseValidationError
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._models import FinalRequestOptions
|
||||
from ._base_client import BaseClient
|
||||
|
||||
|
||||
P = ParamSpec("P")
|
||||
R = TypeVar("R")
|
||||
_T = TypeVar("_T")
|
||||
_APIResponseT = TypeVar("_APIResponseT", bound="APIResponse[Any]")
|
||||
_AsyncAPIResponseT = TypeVar("_AsyncAPIResponseT", bound="AsyncAPIResponse[Any]")
|
||||
|
||||
log: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BaseAPIResponse(Generic[R]):
|
||||
_cast_to: type[R]
|
||||
_client: BaseClient[Any, Any]
|
||||
_parsed_by_type: dict[type[Any], Any]
|
||||
_is_sse_stream: bool
|
||||
_stream_cls: type[Stream[Any]] | type[AsyncStream[Any]] | None
|
||||
_options: FinalRequestOptions
|
||||
|
||||
http_response: httpx.Response
|
||||
|
||||
retries_taken: int
|
||||
"""The number of retries made. If no retries happened this will be `0`"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
raw: httpx.Response,
|
||||
cast_to: type[R],
|
||||
client: BaseClient[Any, Any],
|
||||
stream: bool,
|
||||
stream_cls: type[Stream[Any]] | type[AsyncStream[Any]] | None,
|
||||
options: FinalRequestOptions,
|
||||
retries_taken: int = 0,
|
||||
) -> None:
|
||||
self._cast_to = cast_to
|
||||
self._client = client
|
||||
self._parsed_by_type = {}
|
||||
self._is_sse_stream = stream
|
||||
self._stream_cls = stream_cls
|
||||
self._options = options
|
||||
self.http_response = raw
|
||||
self.retries_taken = retries_taken
|
||||
|
||||
@property
|
||||
def headers(self) -> httpx.Headers:
|
||||
return self.http_response.headers
|
||||
|
||||
@property
|
||||
def http_request(self) -> httpx.Request:
|
||||
"""Returns the httpx Request instance associated with the current response."""
|
||||
return self.http_response.request
|
||||
|
||||
@property
|
||||
def status_code(self) -> int:
|
||||
return self.http_response.status_code
|
||||
|
||||
@property
|
||||
def url(self) -> httpx.URL:
|
||||
"""Returns the URL for which the request was made."""
|
||||
return self.http_response.url
|
||||
|
||||
@property
|
||||
def method(self) -> str:
|
||||
return self.http_request.method
|
||||
|
||||
@property
|
||||
def http_version(self) -> str:
|
||||
return self.http_response.http_version
|
||||
|
||||
@property
|
||||
def elapsed(self) -> datetime.timedelta:
|
||||
"""The time taken for the complete request/response cycle to complete."""
|
||||
return self.http_response.elapsed
|
||||
|
||||
@property
|
||||
def is_closed(self) -> bool:
|
||||
"""Whether or not the response body has been closed.
|
||||
|
||||
If this is False then there is response data that has not been read yet.
|
||||
You must either fully consume the response body or call `.close()`
|
||||
before discarding the response to prevent resource leaks.
|
||||
"""
|
||||
return self.http_response.is_closed
|
||||
|
||||
@override
|
||||
def __repr__(self) -> str:
|
||||
return (
|
||||
f"<{self.__class__.__name__} [{self.status_code} {self.http_response.reason_phrase}] type={self._cast_to}>"
|
||||
)
|
||||
|
||||
def _parse(self, *, to: type[_T] | None = None) -> R | _T:
|
||||
cast_to = to if to is not None else self._cast_to
|
||||
|
||||
# unwrap `TypeAlias('Name', T)` -> `T`
|
||||
if is_type_alias_type(cast_to):
|
||||
cast_to = cast_to.__value__ # type: ignore[unreachable]
|
||||
|
||||
# unwrap `Annotated[T, ...]` -> `T`
|
||||
if cast_to and is_annotated_type(cast_to):
|
||||
cast_to = extract_type_arg(cast_to, 0)
|
||||
|
||||
origin = get_origin(cast_to) or cast_to
|
||||
|
||||
if self._is_sse_stream:
|
||||
if to:
|
||||
if not is_stream_class_type(to):
|
||||
raise TypeError(f"Expected custom parse type to be a subclass of {Stream} or {AsyncStream}")
|
||||
|
||||
return cast(
|
||||
_T,
|
||||
to(
|
||||
cast_to=extract_stream_chunk_type(
|
||||
to,
|
||||
failure_message="Expected custom stream type to be passed with a type argument, e.g. Stream[ChunkType]",
|
||||
),
|
||||
response=self.http_response,
|
||||
client=cast(Any, self._client),
|
||||
),
|
||||
)
|
||||
|
||||
if self._stream_cls:
|
||||
return cast(
|
||||
R,
|
||||
self._stream_cls(
|
||||
cast_to=extract_stream_chunk_type(self._stream_cls),
|
||||
response=self.http_response,
|
||||
client=cast(Any, self._client),
|
||||
),
|
||||
)
|
||||
|
||||
stream_cls = cast("type[Stream[Any]] | type[AsyncStream[Any]] | None", self._client._default_stream_cls)
|
||||
if stream_cls is None:
|
||||
raise MissingStreamClassError()
|
||||
|
||||
return cast(
|
||||
R,
|
||||
stream_cls(
|
||||
cast_to=cast_to,
|
||||
response=self.http_response,
|
||||
client=cast(Any, self._client),
|
||||
),
|
||||
)
|
||||
|
||||
if cast_to is NoneType:
|
||||
return cast(R, None)
|
||||
|
||||
response = self.http_response
|
||||
if cast_to == str:
|
||||
return cast(R, response.text)
|
||||
|
||||
if cast_to == bytes:
|
||||
return cast(R, response.content)
|
||||
|
||||
if cast_to == int:
|
||||
return cast(R, int(response.text))
|
||||
|
||||
if cast_to == float:
|
||||
return cast(R, float(response.text))
|
||||
|
||||
if cast_to == bool:
|
||||
return cast(R, response.text.lower() == "true")
|
||||
|
||||
if origin == APIResponse:
|
||||
raise RuntimeError("Unexpected state - cast_to is `APIResponse`")
|
||||
|
||||
if inspect.isclass(origin) and issubclass(origin, httpx.Response):
|
||||
# Because of the invariance of our ResponseT TypeVar, users can subclass httpx.Response
|
||||
# and pass that class to our request functions. We cannot change the variance to be either
|
||||
# covariant or contravariant as that makes our usage of ResponseT illegal. We could construct
|
||||
# the response class ourselves but that is something that should be supported directly in httpx
|
||||
# as it would be easy to incorrectly construct the Response object due to the multitude of arguments.
|
||||
if cast_to != httpx.Response:
|
||||
raise ValueError(f"Subclasses of httpx.Response cannot be passed to `cast_to`")
|
||||
return cast(R, response)
|
||||
|
||||
if (
|
||||
inspect.isclass(
|
||||
origin # pyright: ignore[reportUnknownArgumentType]
|
||||
)
|
||||
and not issubclass(origin, BaseModel)
|
||||
and issubclass(origin, pydantic.BaseModel)
|
||||
):
|
||||
raise TypeError("Pydantic models must subclass our base model type, e.g. `from opencode import BaseModel`")
|
||||
|
||||
if (
|
||||
cast_to is not object
|
||||
and not origin is list
|
||||
and not origin is dict
|
||||
and not origin is Union
|
||||
and not issubclass(origin, BaseModel)
|
||||
):
|
||||
raise RuntimeError(
|
||||
f"Unsupported type, expected {cast_to} to be a subclass of {BaseModel}, {dict}, {list}, {Union}, {NoneType}, {str} or {httpx.Response}."
|
||||
)
|
||||
|
||||
# split is required to handle cases where additional information is included
|
||||
# in the response, e.g. application/json; charset=utf-8
|
||||
content_type, *_ = response.headers.get("content-type", "*").split(";")
|
||||
if not content_type.endswith("json"):
|
||||
if is_basemodel(cast_to):
|
||||
try:
|
||||
data = response.json()
|
||||
except Exception as exc:
|
||||
log.debug("Could not read JSON from response data due to %s - %s", type(exc), exc)
|
||||
else:
|
||||
return self._client._process_response_data(
|
||||
data=data,
|
||||
cast_to=cast_to, # type: ignore
|
||||
response=response,
|
||||
)
|
||||
|
||||
if self._client._strict_response_validation:
|
||||
raise APIResponseValidationError(
|
||||
response=response,
|
||||
message=f"Expected Content-Type response header to be `application/json` but received `{content_type}` instead.",
|
||||
body=response.text,
|
||||
)
|
||||
|
||||
# If the API responds with content that isn't JSON then we just return
|
||||
# the (decoded) text without performing any parsing so that you can still
|
||||
# handle the response however you need to.
|
||||
return response.text # type: ignore
|
||||
|
||||
data = response.json()
|
||||
|
||||
return self._client._process_response_data(
|
||||
data=data,
|
||||
cast_to=cast_to, # type: ignore
|
||||
response=response,
|
||||
)
|
||||
|
||||
|
||||
class APIResponse(BaseAPIResponse[R]):
|
||||
@overload
|
||||
def parse(self, *, to: type[_T]) -> _T: ...
|
||||
|
||||
@overload
|
||||
def parse(self) -> R: ...
|
||||
|
||||
def parse(self, *, to: type[_T] | None = None) -> R | _T:
|
||||
"""Returns the rich python representation of this response's data.
|
||||
|
||||
For lower-level control, see `.read()`, `.json()`, `.iter_bytes()`.
|
||||
|
||||
You can customise the type that the response is parsed into through
|
||||
the `to` argument, e.g.
|
||||
|
||||
```py
|
||||
from opencode import BaseModel
|
||||
|
||||
|
||||
class MyModel(BaseModel):
|
||||
foo: str
|
||||
|
||||
|
||||
obj = response.parse(to=MyModel)
|
||||
print(obj.foo)
|
||||
```
|
||||
|
||||
We support parsing:
|
||||
- `BaseModel`
|
||||
- `dict`
|
||||
- `list`
|
||||
- `Union`
|
||||
- `str`
|
||||
- `int`
|
||||
- `float`
|
||||
- `httpx.Response`
|
||||
"""
|
||||
cache_key = to if to is not None else self._cast_to
|
||||
cached = self._parsed_by_type.get(cache_key)
|
||||
if cached is not None:
|
||||
return cached # type: ignore[no-any-return]
|
||||
|
||||
if not self._is_sse_stream:
|
||||
self.read()
|
||||
|
||||
parsed = self._parse(to=to)
|
||||
if is_given(self._options.post_parser):
|
||||
parsed = self._options.post_parser(parsed)
|
||||
|
||||
self._parsed_by_type[cache_key] = parsed
|
||||
return parsed
|
||||
|
||||
def read(self) -> bytes:
|
||||
"""Read and return the binary response content."""
|
||||
try:
|
||||
return self.http_response.read()
|
||||
except httpx.StreamConsumed as exc:
|
||||
# The default error raised by httpx isn't very
|
||||
# helpful in our case so we re-raise it with
|
||||
# a different error message.
|
||||
raise StreamAlreadyConsumed() from exc
|
||||
|
||||
def text(self) -> str:
|
||||
"""Read and decode the response content into a string."""
|
||||
self.read()
|
||||
return self.http_response.text
|
||||
|
||||
def json(self) -> object:
|
||||
"""Read and decode the JSON response content."""
|
||||
self.read()
|
||||
return self.http_response.json()
|
||||
|
||||
def close(self) -> None:
|
||||
"""Close the response and release the connection.
|
||||
|
||||
Automatically called if the response body is read to completion.
|
||||
"""
|
||||
self.http_response.close()
|
||||
|
||||
def iter_bytes(self, chunk_size: int | None = None) -> Iterator[bytes]:
|
||||
"""
|
||||
A byte-iterator over the decoded response content.
|
||||
|
||||
This automatically handles gzip, deflate and brotli encoded responses.
|
||||
"""
|
||||
for chunk in self.http_response.iter_bytes(chunk_size):
|
||||
yield chunk
|
||||
|
||||
def iter_text(self, chunk_size: int | None = None) -> Iterator[str]:
|
||||
"""A str-iterator over the decoded response content
|
||||
that handles both gzip, deflate, etc but also detects the content's
|
||||
string encoding.
|
||||
"""
|
||||
for chunk in self.http_response.iter_text(chunk_size):
|
||||
yield chunk
|
||||
|
||||
def iter_lines(self) -> Iterator[str]:
|
||||
"""Like `iter_text()` but will only yield chunks for each line"""
|
||||
for chunk in self.http_response.iter_lines():
|
||||
yield chunk
|
||||
|
||||
|
||||
class AsyncAPIResponse(BaseAPIResponse[R]):
|
||||
@overload
|
||||
async def parse(self, *, to: type[_T]) -> _T: ...
|
||||
|
||||
@overload
|
||||
async def parse(self) -> R: ...
|
||||
|
||||
async def parse(self, *, to: type[_T] | None = None) -> R | _T:
|
||||
"""Returns the rich python representation of this response's data.
|
||||
|
||||
For lower-level control, see `.read()`, `.json()`, `.iter_bytes()`.
|
||||
|
||||
You can customise the type that the response is parsed into through
|
||||
the `to` argument, e.g.
|
||||
|
||||
```py
|
||||
from opencode import BaseModel
|
||||
|
||||
|
||||
class MyModel(BaseModel):
|
||||
foo: str
|
||||
|
||||
|
||||
obj = response.parse(to=MyModel)
|
||||
print(obj.foo)
|
||||
```
|
||||
|
||||
We support parsing:
|
||||
- `BaseModel`
|
||||
- `dict`
|
||||
- `list`
|
||||
- `Union`
|
||||
- `str`
|
||||
- `httpx.Response`
|
||||
"""
|
||||
cache_key = to if to is not None else self._cast_to
|
||||
cached = self._parsed_by_type.get(cache_key)
|
||||
if cached is not None:
|
||||
return cached # type: ignore[no-any-return]
|
||||
|
||||
if not self._is_sse_stream:
|
||||
await self.read()
|
||||
|
||||
parsed = self._parse(to=to)
|
||||
if is_given(self._options.post_parser):
|
||||
parsed = self._options.post_parser(parsed)
|
||||
|
||||
self._parsed_by_type[cache_key] = parsed
|
||||
return parsed
|
||||
|
||||
async def read(self) -> bytes:
|
||||
"""Read and return the binary response content."""
|
||||
try:
|
||||
return await self.http_response.aread()
|
||||
except httpx.StreamConsumed as exc:
|
||||
# the default error raised by httpx isn't very
|
||||
# helpful in our case so we re-raise it with
|
||||
# a different error message
|
||||
raise StreamAlreadyConsumed() from exc
|
||||
|
||||
async def text(self) -> str:
|
||||
"""Read and decode the response content into a string."""
|
||||
await self.read()
|
||||
return self.http_response.text
|
||||
|
||||
async def json(self) -> object:
|
||||
"""Read and decode the JSON response content."""
|
||||
await self.read()
|
||||
return self.http_response.json()
|
||||
|
||||
async def close(self) -> None:
|
||||
"""Close the response and release the connection.
|
||||
|
||||
Automatically called if the response body is read to completion.
|
||||
"""
|
||||
await self.http_response.aclose()
|
||||
|
||||
async def iter_bytes(self, chunk_size: int | None = None) -> AsyncIterator[bytes]:
|
||||
"""
|
||||
A byte-iterator over the decoded response content.
|
||||
|
||||
This automatically handles gzip, deflate and brotli encoded responses.
|
||||
"""
|
||||
async for chunk in self.http_response.aiter_bytes(chunk_size):
|
||||
yield chunk
|
||||
|
||||
async def iter_text(self, chunk_size: int | None = None) -> AsyncIterator[str]:
|
||||
"""A str-iterator over the decoded response content
|
||||
that handles both gzip, deflate, etc but also detects the content's
|
||||
string encoding.
|
||||
"""
|
||||
async for chunk in self.http_response.aiter_text(chunk_size):
|
||||
yield chunk
|
||||
|
||||
async def iter_lines(self) -> AsyncIterator[str]:
|
||||
"""Like `iter_text()` but will only yield chunks for each line"""
|
||||
async for chunk in self.http_response.aiter_lines():
|
||||
yield chunk
|
||||
|
||||
|
||||
class BinaryAPIResponse(APIResponse[bytes]):
|
||||
"""Subclass of APIResponse providing helpers for dealing with binary data.
|
||||
|
||||
Note: If you want to stream the response data instead of eagerly reading it
|
||||
all at once then you should use `.with_streaming_response` when making
|
||||
the API request, e.g. `.with_streaming_response.get_binary_response()`
|
||||
"""
|
||||
|
||||
def write_to_file(
|
||||
self,
|
||||
file: str | os.PathLike[str],
|
||||
) -> None:
|
||||
"""Write the output to the given file.
|
||||
|
||||
Accepts a filename or any path-like object, e.g. pathlib.Path
|
||||
|
||||
Note: if you want to stream the data to the file instead of writing
|
||||
all at once then you should use `.with_streaming_response` when making
|
||||
the API request, e.g. `.with_streaming_response.get_binary_response()`
|
||||
"""
|
||||
with open(file, mode="wb") as f:
|
||||
for data in self.iter_bytes():
|
||||
f.write(data)
|
||||
|
||||
|
||||
class AsyncBinaryAPIResponse(AsyncAPIResponse[bytes]):
|
||||
"""Subclass of APIResponse providing helpers for dealing with binary data.
|
||||
|
||||
Note: If you want to stream the response data instead of eagerly reading it
|
||||
all at once then you should use `.with_streaming_response` when making
|
||||
the API request, e.g. `.with_streaming_response.get_binary_response()`
|
||||
"""
|
||||
|
||||
async def write_to_file(
|
||||
self,
|
||||
file: str | os.PathLike[str],
|
||||
) -> None:
|
||||
"""Write the output to the given file.
|
||||
|
||||
Accepts a filename or any path-like object, e.g. pathlib.Path
|
||||
|
||||
Note: if you want to stream the data to the file instead of writing
|
||||
all at once then you should use `.with_streaming_response` when making
|
||||
the API request, e.g. `.with_streaming_response.get_binary_response()`
|
||||
"""
|
||||
path = anyio.Path(file)
|
||||
async with await path.open(mode="wb") as f:
|
||||
async for data in self.iter_bytes():
|
||||
await f.write(data)
|
||||
|
||||
|
||||
class StreamedBinaryAPIResponse(APIResponse[bytes]):
|
||||
def stream_to_file(
|
||||
self,
|
||||
file: str | os.PathLike[str],
|
||||
*,
|
||||
chunk_size: int | None = None,
|
||||
) -> None:
|
||||
"""Streams the output to the given file.
|
||||
|
||||
Accepts a filename or any path-like object, e.g. pathlib.Path
|
||||
"""
|
||||
with open(file, mode="wb") as f:
|
||||
for data in self.iter_bytes(chunk_size):
|
||||
f.write(data)
|
||||
|
||||
|
||||
class AsyncStreamedBinaryAPIResponse(AsyncAPIResponse[bytes]):
|
||||
async def stream_to_file(
|
||||
self,
|
||||
file: str | os.PathLike[str],
|
||||
*,
|
||||
chunk_size: int | None = None,
|
||||
) -> None:
|
||||
"""Streams the output to the given file.
|
||||
|
||||
Accepts a filename or any path-like object, e.g. pathlib.Path
|
||||
"""
|
||||
path = anyio.Path(file)
|
||||
async with await path.open(mode="wb") as f:
|
||||
async for data in self.iter_bytes(chunk_size):
|
||||
await f.write(data)
|
||||
|
||||
|
||||
class MissingStreamClassError(TypeError):
|
||||
def __init__(self) -> None:
|
||||
super().__init__(
|
||||
"The `stream` argument was set to `True` but the `stream_cls` argument was not given. See `opencode._streaming` for reference",
|
||||
)
|
||||
|
||||
|
||||
class StreamAlreadyConsumed(OpencodeError):
|
||||
"""
|
||||
Attempted to read or stream content, but the content has already
|
||||
been streamed.
|
||||
|
||||
This can happen if you use a method like `.iter_lines()` and then attempt
|
||||
to read th entire response body afterwards, e.g.
|
||||
|
||||
```py
|
||||
response = await client.post(...)
|
||||
async for line in response.iter_lines():
|
||||
... # do something with `line`
|
||||
|
||||
content = await response.read()
|
||||
# ^ error
|
||||
```
|
||||
|
||||
If you want this behaviour you'll need to either manually accumulate the response
|
||||
content or call `await response.read()` before iterating over the stream.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
message = (
|
||||
"Attempted to read or stream some content, but the content has "
|
||||
"already been streamed. "
|
||||
"This could be due to attempting to stream the response "
|
||||
"content more than once."
|
||||
"\n\n"
|
||||
"You can fix this by manually accumulating the response content while streaming "
|
||||
"or by calling `.read()` before starting to stream."
|
||||
)
|
||||
super().__init__(message)
|
||||
|
||||
|
||||
class ResponseContextManager(Generic[_APIResponseT]):
|
||||
"""Context manager for ensuring that a request is not made
|
||||
until it is entered and that the response will always be closed
|
||||
when the context manager exits
|
||||
"""
|
||||
|
||||
def __init__(self, request_func: Callable[[], _APIResponseT]) -> None:
|
||||
self._request_func = request_func
|
||||
self.__response: _APIResponseT | None = None
|
||||
|
||||
def __enter__(self) -> _APIResponseT:
|
||||
self.__response = self._request_func()
|
||||
return self.__response
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc: BaseException | None,
|
||||
exc_tb: TracebackType | None,
|
||||
) -> None:
|
||||
if self.__response is not None:
|
||||
self.__response.close()
|
||||
|
||||
|
||||
class AsyncResponseContextManager(Generic[_AsyncAPIResponseT]):
|
||||
"""Context manager for ensuring that a request is not made
|
||||
until it is entered and that the response will always be closed
|
||||
when the context manager exits
|
||||
"""
|
||||
|
||||
def __init__(self, api_request: Awaitable[_AsyncAPIResponseT]) -> None:
|
||||
self._api_request = api_request
|
||||
self.__response: _AsyncAPIResponseT | None = None
|
||||
|
||||
async def __aenter__(self) -> _AsyncAPIResponseT:
|
||||
self.__response = await self._api_request
|
||||
return self.__response
|
||||
|
||||
async def __aexit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc: BaseException | None,
|
||||
exc_tb: TracebackType | None,
|
||||
) -> None:
|
||||
if self.__response is not None:
|
||||
await self.__response.close()
|
||||
|
||||
|
||||
def to_streamed_response_wrapper(func: Callable[P, R]) -> Callable[P, ResponseContextManager[APIResponse[R]]]:
|
||||
"""Higher order function that takes one of our bound API methods and wraps it
|
||||
to support streaming and returning the raw `APIResponse` object directly.
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapped(*args: P.args, **kwargs: P.kwargs) -> ResponseContextManager[APIResponse[R]]:
|
||||
extra_headers: dict[str, str] = {**(cast(Any, kwargs.get("extra_headers")) or {})}
|
||||
extra_headers[RAW_RESPONSE_HEADER] = "stream"
|
||||
|
||||
kwargs["extra_headers"] = extra_headers
|
||||
|
||||
make_request = functools.partial(func, *args, **kwargs)
|
||||
|
||||
return ResponseContextManager(cast(Callable[[], APIResponse[R]], make_request))
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
def async_to_streamed_response_wrapper(
|
||||
func: Callable[P, Awaitable[R]],
|
||||
) -> Callable[P, AsyncResponseContextManager[AsyncAPIResponse[R]]]:
|
||||
"""Higher order function that takes one of our bound API methods and wraps it
|
||||
to support streaming and returning the raw `APIResponse` object directly.
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapped(*args: P.args, **kwargs: P.kwargs) -> AsyncResponseContextManager[AsyncAPIResponse[R]]:
|
||||
extra_headers: dict[str, str] = {**(cast(Any, kwargs.get("extra_headers")) or {})}
|
||||
extra_headers[RAW_RESPONSE_HEADER] = "stream"
|
||||
|
||||
kwargs["extra_headers"] = extra_headers
|
||||
|
||||
make_request = func(*args, **kwargs)
|
||||
|
||||
return AsyncResponseContextManager(cast(Awaitable[AsyncAPIResponse[R]], make_request))
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
def to_custom_streamed_response_wrapper(
|
||||
func: Callable[P, object],
|
||||
response_cls: type[_APIResponseT],
|
||||
) -> Callable[P, ResponseContextManager[_APIResponseT]]:
|
||||
"""Higher order function that takes one of our bound API methods and an `APIResponse` class
|
||||
and wraps the method to support streaming and returning the given response class directly.
|
||||
|
||||
Note: the given `response_cls` *must* be concrete, e.g. `class BinaryAPIResponse(APIResponse[bytes])`
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapped(*args: P.args, **kwargs: P.kwargs) -> ResponseContextManager[_APIResponseT]:
|
||||
extra_headers: dict[str, Any] = {**(cast(Any, kwargs.get("extra_headers")) or {})}
|
||||
extra_headers[RAW_RESPONSE_HEADER] = "stream"
|
||||
extra_headers[OVERRIDE_CAST_TO_HEADER] = response_cls
|
||||
|
||||
kwargs["extra_headers"] = extra_headers
|
||||
|
||||
make_request = functools.partial(func, *args, **kwargs)
|
||||
|
||||
return ResponseContextManager(cast(Callable[[], _APIResponseT], make_request))
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
def async_to_custom_streamed_response_wrapper(
|
||||
func: Callable[P, Awaitable[object]],
|
||||
response_cls: type[_AsyncAPIResponseT],
|
||||
) -> Callable[P, AsyncResponseContextManager[_AsyncAPIResponseT]]:
|
||||
"""Higher order function that takes one of our bound API methods and an `APIResponse` class
|
||||
and wraps the method to support streaming and returning the given response class directly.
|
||||
|
||||
Note: the given `response_cls` *must* be concrete, e.g. `class BinaryAPIResponse(APIResponse[bytes])`
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapped(*args: P.args, **kwargs: P.kwargs) -> AsyncResponseContextManager[_AsyncAPIResponseT]:
|
||||
extra_headers: dict[str, Any] = {**(cast(Any, kwargs.get("extra_headers")) or {})}
|
||||
extra_headers[RAW_RESPONSE_HEADER] = "stream"
|
||||
extra_headers[OVERRIDE_CAST_TO_HEADER] = response_cls
|
||||
|
||||
kwargs["extra_headers"] = extra_headers
|
||||
|
||||
make_request = func(*args, **kwargs)
|
||||
|
||||
return AsyncResponseContextManager(cast(Awaitable[_AsyncAPIResponseT], make_request))
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
def to_raw_response_wrapper(func: Callable[P, R]) -> Callable[P, APIResponse[R]]:
|
||||
"""Higher order function that takes one of our bound API methods and wraps it
|
||||
to support returning the raw `APIResponse` object directly.
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapped(*args: P.args, **kwargs: P.kwargs) -> APIResponse[R]:
|
||||
extra_headers: dict[str, str] = {**(cast(Any, kwargs.get("extra_headers")) or {})}
|
||||
extra_headers[RAW_RESPONSE_HEADER] = "raw"
|
||||
|
||||
kwargs["extra_headers"] = extra_headers
|
||||
|
||||
return cast(APIResponse[R], func(*args, **kwargs))
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
def async_to_raw_response_wrapper(func: Callable[P, Awaitable[R]]) -> Callable[P, Awaitable[AsyncAPIResponse[R]]]:
|
||||
"""Higher order function that takes one of our bound API methods and wraps it
|
||||
to support returning the raw `APIResponse` object directly.
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
async def wrapped(*args: P.args, **kwargs: P.kwargs) -> AsyncAPIResponse[R]:
|
||||
extra_headers: dict[str, str] = {**(cast(Any, kwargs.get("extra_headers")) or {})}
|
||||
extra_headers[RAW_RESPONSE_HEADER] = "raw"
|
||||
|
||||
kwargs["extra_headers"] = extra_headers
|
||||
|
||||
return cast(AsyncAPIResponse[R], await func(*args, **kwargs))
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
def to_custom_raw_response_wrapper(
|
||||
func: Callable[P, object],
|
||||
response_cls: type[_APIResponseT],
|
||||
) -> Callable[P, _APIResponseT]:
|
||||
"""Higher order function that takes one of our bound API methods and an `APIResponse` class
|
||||
and wraps the method to support returning the given response class directly.
|
||||
|
||||
Note: the given `response_cls` *must* be concrete, e.g. `class BinaryAPIResponse(APIResponse[bytes])`
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapped(*args: P.args, **kwargs: P.kwargs) -> _APIResponseT:
|
||||
extra_headers: dict[str, Any] = {**(cast(Any, kwargs.get("extra_headers")) or {})}
|
||||
extra_headers[RAW_RESPONSE_HEADER] = "raw"
|
||||
extra_headers[OVERRIDE_CAST_TO_HEADER] = response_cls
|
||||
|
||||
kwargs["extra_headers"] = extra_headers
|
||||
|
||||
return cast(_APIResponseT, func(*args, **kwargs))
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
def async_to_custom_raw_response_wrapper(
|
||||
func: Callable[P, Awaitable[object]],
|
||||
response_cls: type[_AsyncAPIResponseT],
|
||||
) -> Callable[P, Awaitable[_AsyncAPIResponseT]]:
|
||||
"""Higher order function that takes one of our bound API methods and an `APIResponse` class
|
||||
and wraps the method to support returning the given response class directly.
|
||||
|
||||
Note: the given `response_cls` *must* be concrete, e.g. `class BinaryAPIResponse(APIResponse[bytes])`
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapped(*args: P.args, **kwargs: P.kwargs) -> Awaitable[_AsyncAPIResponseT]:
|
||||
extra_headers: dict[str, Any] = {**(cast(Any, kwargs.get("extra_headers")) or {})}
|
||||
extra_headers[RAW_RESPONSE_HEADER] = "raw"
|
||||
extra_headers[OVERRIDE_CAST_TO_HEADER] = response_cls
|
||||
|
||||
kwargs["extra_headers"] = extra_headers
|
||||
|
||||
return cast(Awaitable[_AsyncAPIResponseT], func(*args, **kwargs))
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
def extract_response_type(typ: type[BaseAPIResponse[Any]]) -> type:
|
||||
"""Given a type like `APIResponse[T]`, returns the generic type variable `T`.
|
||||
|
||||
This also handles the case where a concrete subclass is given, e.g.
|
||||
```py
|
||||
class MyResponse(APIResponse[bytes]):
|
||||
...
|
||||
|
||||
extract_response_type(MyResponse) -> bytes
|
||||
```
|
||||
"""
|
||||
return extract_type_var_from_base(
|
||||
typ,
|
||||
generic_bases=cast("tuple[type, ...]", (BaseAPIResponse, APIResponse, AsyncAPIResponse)),
|
||||
index=0,
|
||||
)
|
||||
333
src/opencode/_streaming.py
Normal file
333
src/opencode/_streaming.py
Normal file
|
|
@ -0,0 +1,333 @@
|
|||
# Note: initially copied from https://github.com/florimondmanca/httpx-sse/blob/master/src/httpx_sse/_decoders.py
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import inspect
|
||||
from types import TracebackType
|
||||
from typing import TYPE_CHECKING, Any, Generic, TypeVar, Iterator, AsyncIterator, cast
|
||||
from typing_extensions import Self, Protocol, TypeGuard, override, get_origin, runtime_checkable
|
||||
|
||||
import httpx
|
||||
|
||||
from ._utils import extract_type_var_from_base
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._client import Opencode, AsyncOpencode
|
||||
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
|
||||
class Stream(Generic[_T]):
|
||||
"""Provides the core interface to iterate over a synchronous stream response."""
|
||||
|
||||
response: httpx.Response
|
||||
|
||||
_decoder: SSEBytesDecoder
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
cast_to: type[_T],
|
||||
response: httpx.Response,
|
||||
client: Opencode,
|
||||
) -> None:
|
||||
self.response = response
|
||||
self._cast_to = cast_to
|
||||
self._client = client
|
||||
self._decoder = client._make_sse_decoder()
|
||||
self._iterator = self.__stream__()
|
||||
|
||||
def __next__(self) -> _T:
|
||||
return self._iterator.__next__()
|
||||
|
||||
def __iter__(self) -> Iterator[_T]:
|
||||
for item in self._iterator:
|
||||
yield item
|
||||
|
||||
def _iter_events(self) -> Iterator[ServerSentEvent]:
|
||||
yield from self._decoder.iter_bytes(self.response.iter_bytes())
|
||||
|
||||
def __stream__(self) -> Iterator[_T]:
|
||||
cast_to = cast(Any, self._cast_to)
|
||||
response = self.response
|
||||
process_data = self._client._process_response_data
|
||||
iterator = self._iter_events()
|
||||
|
||||
for sse in iterator:
|
||||
yield process_data(data=sse.json(), cast_to=cast_to, response=response)
|
||||
|
||||
# Ensure the entire stream is consumed
|
||||
for _sse in iterator:
|
||||
...
|
||||
|
||||
def __enter__(self) -> Self:
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc: BaseException | None,
|
||||
exc_tb: TracebackType | None,
|
||||
) -> None:
|
||||
self.close()
|
||||
|
||||
def close(self) -> None:
|
||||
"""
|
||||
Close the response and release the connection.
|
||||
|
||||
Automatically called if the response body is read to completion.
|
||||
"""
|
||||
self.response.close()
|
||||
|
||||
|
||||
class AsyncStream(Generic[_T]):
|
||||
"""Provides the core interface to iterate over an asynchronous stream response."""
|
||||
|
||||
response: httpx.Response
|
||||
|
||||
_decoder: SSEDecoder | SSEBytesDecoder
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
cast_to: type[_T],
|
||||
response: httpx.Response,
|
||||
client: AsyncOpencode,
|
||||
) -> None:
|
||||
self.response = response
|
||||
self._cast_to = cast_to
|
||||
self._client = client
|
||||
self._decoder = client._make_sse_decoder()
|
||||
self._iterator = self.__stream__()
|
||||
|
||||
async def __anext__(self) -> _T:
|
||||
return await self._iterator.__anext__()
|
||||
|
||||
async def __aiter__(self) -> AsyncIterator[_T]:
|
||||
async for item in self._iterator:
|
||||
yield item
|
||||
|
||||
async def _iter_events(self) -> AsyncIterator[ServerSentEvent]:
|
||||
async for sse in self._decoder.aiter_bytes(self.response.aiter_bytes()):
|
||||
yield sse
|
||||
|
||||
async def __stream__(self) -> AsyncIterator[_T]:
|
||||
cast_to = cast(Any, self._cast_to)
|
||||
response = self.response
|
||||
process_data = self._client._process_response_data
|
||||
iterator = self._iter_events()
|
||||
|
||||
async for sse in iterator:
|
||||
yield process_data(data=sse.json(), cast_to=cast_to, response=response)
|
||||
|
||||
# Ensure the entire stream is consumed
|
||||
async for _sse in iterator:
|
||||
...
|
||||
|
||||
async def __aenter__(self) -> Self:
|
||||
return self
|
||||
|
||||
async def __aexit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc: BaseException | None,
|
||||
exc_tb: TracebackType | None,
|
||||
) -> None:
|
||||
await self.close()
|
||||
|
||||
async def close(self) -> None:
|
||||
"""
|
||||
Close the response and release the connection.
|
||||
|
||||
Automatically called if the response body is read to completion.
|
||||
"""
|
||||
await self.response.aclose()
|
||||
|
||||
|
||||
class ServerSentEvent:
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
event: str | None = None,
|
||||
data: str | None = None,
|
||||
id: str | None = None,
|
||||
retry: int | None = None,
|
||||
) -> None:
|
||||
if data is None:
|
||||
data = ""
|
||||
|
||||
self._id = id
|
||||
self._data = data
|
||||
self._event = event or None
|
||||
self._retry = retry
|
||||
|
||||
@property
|
||||
def event(self) -> str | None:
|
||||
return self._event
|
||||
|
||||
@property
|
||||
def id(self) -> str | None:
|
||||
return self._id
|
||||
|
||||
@property
|
||||
def retry(self) -> int | None:
|
||||
return self._retry
|
||||
|
||||
@property
|
||||
def data(self) -> str:
|
||||
return self._data
|
||||
|
||||
def json(self) -> Any:
|
||||
return json.loads(self.data)
|
||||
|
||||
@override
|
||||
def __repr__(self) -> str:
|
||||
return f"ServerSentEvent(event={self.event}, data={self.data}, id={self.id}, retry={self.retry})"
|
||||
|
||||
|
||||
class SSEDecoder:
|
||||
_data: list[str]
|
||||
_event: str | None
|
||||
_retry: int | None
|
||||
_last_event_id: str | None
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._event = None
|
||||
self._data = []
|
||||
self._last_event_id = None
|
||||
self._retry = None
|
||||
|
||||
def iter_bytes(self, iterator: Iterator[bytes]) -> Iterator[ServerSentEvent]:
|
||||
"""Given an iterator that yields raw binary data, iterate over it & yield every event encountered"""
|
||||
for chunk in self._iter_chunks(iterator):
|
||||
# Split before decoding so splitlines() only uses \r and \n
|
||||
for raw_line in chunk.splitlines():
|
||||
line = raw_line.decode("utf-8")
|
||||
sse = self.decode(line)
|
||||
if sse:
|
||||
yield sse
|
||||
|
||||
def _iter_chunks(self, iterator: Iterator[bytes]) -> Iterator[bytes]:
|
||||
"""Given an iterator that yields raw binary data, iterate over it and yield individual SSE chunks"""
|
||||
data = b""
|
||||
for chunk in iterator:
|
||||
for line in chunk.splitlines(keepends=True):
|
||||
data += line
|
||||
if data.endswith((b"\r\r", b"\n\n", b"\r\n\r\n")):
|
||||
yield data
|
||||
data = b""
|
||||
if data:
|
||||
yield data
|
||||
|
||||
async def aiter_bytes(self, iterator: AsyncIterator[bytes]) -> AsyncIterator[ServerSentEvent]:
|
||||
"""Given an iterator that yields raw binary data, iterate over it & yield every event encountered"""
|
||||
async for chunk in self._aiter_chunks(iterator):
|
||||
# Split before decoding so splitlines() only uses \r and \n
|
||||
for raw_line in chunk.splitlines():
|
||||
line = raw_line.decode("utf-8")
|
||||
sse = self.decode(line)
|
||||
if sse:
|
||||
yield sse
|
||||
|
||||
async def _aiter_chunks(self, iterator: AsyncIterator[bytes]) -> AsyncIterator[bytes]:
|
||||
"""Given an iterator that yields raw binary data, iterate over it and yield individual SSE chunks"""
|
||||
data = b""
|
||||
async for chunk in iterator:
|
||||
for line in chunk.splitlines(keepends=True):
|
||||
data += line
|
||||
if data.endswith((b"\r\r", b"\n\n", b"\r\n\r\n")):
|
||||
yield data
|
||||
data = b""
|
||||
if data:
|
||||
yield data
|
||||
|
||||
def decode(self, line: str) -> ServerSentEvent | None:
|
||||
# See: https://html.spec.whatwg.org/multipage/server-sent-events.html#event-stream-interpretation # noqa: E501
|
||||
|
||||
if not line:
|
||||
if not self._event and not self._data and not self._last_event_id and self._retry is None:
|
||||
return None
|
||||
|
||||
sse = ServerSentEvent(
|
||||
event=self._event,
|
||||
data="\n".join(self._data),
|
||||
id=self._last_event_id,
|
||||
retry=self._retry,
|
||||
)
|
||||
|
||||
# NOTE: as per the SSE spec, do not reset last_event_id.
|
||||
self._event = None
|
||||
self._data = []
|
||||
self._retry = None
|
||||
|
||||
return sse
|
||||
|
||||
if line.startswith(":"):
|
||||
return None
|
||||
|
||||
fieldname, _, value = line.partition(":")
|
||||
|
||||
if value.startswith(" "):
|
||||
value = value[1:]
|
||||
|
||||
if fieldname == "event":
|
||||
self._event = value
|
||||
elif fieldname == "data":
|
||||
self._data.append(value)
|
||||
elif fieldname == "id":
|
||||
if "\0" in value:
|
||||
pass
|
||||
else:
|
||||
self._last_event_id = value
|
||||
elif fieldname == "retry":
|
||||
try:
|
||||
self._retry = int(value)
|
||||
except (TypeError, ValueError):
|
||||
pass
|
||||
else:
|
||||
pass # Field is ignored.
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class SSEBytesDecoder(Protocol):
|
||||
def iter_bytes(self, iterator: Iterator[bytes]) -> Iterator[ServerSentEvent]:
|
||||
"""Given an iterator that yields raw binary data, iterate over it & yield every event encountered"""
|
||||
...
|
||||
|
||||
def aiter_bytes(self, iterator: AsyncIterator[bytes]) -> AsyncIterator[ServerSentEvent]:
|
||||
"""Given an async iterator that yields raw binary data, iterate over it & yield every event encountered"""
|
||||
...
|
||||
|
||||
|
||||
def is_stream_class_type(typ: type) -> TypeGuard[type[Stream[object]] | type[AsyncStream[object]]]:
|
||||
"""TypeGuard for determining whether or not the given type is a subclass of `Stream` / `AsyncStream`"""
|
||||
origin = get_origin(typ) or typ
|
||||
return inspect.isclass(origin) and issubclass(origin, (Stream, AsyncStream))
|
||||
|
||||
|
||||
def extract_stream_chunk_type(
|
||||
stream_cls: type,
|
||||
*,
|
||||
failure_message: str | None = None,
|
||||
) -> type:
|
||||
"""Given a type like `Stream[T]`, returns the generic type variable `T`.
|
||||
|
||||
This also handles the case where a concrete subclass is given, e.g.
|
||||
```py
|
||||
class MyStream(Stream[bytes]):
|
||||
...
|
||||
|
||||
extract_stream_chunk_type(MyStream) -> bytes
|
||||
```
|
||||
"""
|
||||
from ._base_client import Stream, AsyncStream
|
||||
|
||||
return extract_type_var_from_base(
|
||||
stream_cls,
|
||||
index=0,
|
||||
generic_bases=cast("tuple[type, ...]", (Stream, AsyncStream)),
|
||||
failure_message=failure_message,
|
||||
)
|
||||
219
src/opencode/_types.py
Normal file
219
src/opencode/_types.py
Normal file
|
|
@ -0,0 +1,219 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from os import PathLike
|
||||
from typing import (
|
||||
IO,
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Dict,
|
||||
List,
|
||||
Type,
|
||||
Tuple,
|
||||
Union,
|
||||
Mapping,
|
||||
TypeVar,
|
||||
Callable,
|
||||
Optional,
|
||||
Sequence,
|
||||
)
|
||||
from typing_extensions import Set, Literal, Protocol, TypeAlias, TypedDict, override, runtime_checkable
|
||||
|
||||
import httpx
|
||||
import pydantic
|
||||
from httpx import URL, Proxy, Timeout, Response, BaseTransport, AsyncBaseTransport
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._models import BaseModel
|
||||
from ._response import APIResponse, AsyncAPIResponse
|
||||
|
||||
Transport = BaseTransport
|
||||
AsyncTransport = AsyncBaseTransport
|
||||
Query = Mapping[str, object]
|
||||
Body = object
|
||||
AnyMapping = Mapping[str, object]
|
||||
ModelT = TypeVar("ModelT", bound=pydantic.BaseModel)
|
||||
_T = TypeVar("_T")
|
||||
|
||||
|
||||
# Approximates httpx internal ProxiesTypes and RequestFiles types
|
||||
# while adding support for `PathLike` instances
|
||||
ProxiesDict = Dict["str | URL", Union[None, str, URL, Proxy]]
|
||||
ProxiesTypes = Union[str, Proxy, ProxiesDict]
|
||||
if TYPE_CHECKING:
|
||||
Base64FileInput = Union[IO[bytes], PathLike[str]]
|
||||
FileContent = Union[IO[bytes], bytes, PathLike[str]]
|
||||
else:
|
||||
Base64FileInput = Union[IO[bytes], PathLike]
|
||||
FileContent = Union[IO[bytes], bytes, PathLike] # PathLike is not subscriptable in Python 3.8.
|
||||
FileTypes = Union[
|
||||
# file (or bytes)
|
||||
FileContent,
|
||||
# (filename, file (or bytes))
|
||||
Tuple[Optional[str], FileContent],
|
||||
# (filename, file (or bytes), content_type)
|
||||
Tuple[Optional[str], FileContent, Optional[str]],
|
||||
# (filename, file (or bytes), content_type, headers)
|
||||
Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]],
|
||||
]
|
||||
RequestFiles = Union[Mapping[str, FileTypes], Sequence[Tuple[str, FileTypes]]]
|
||||
|
||||
# duplicate of the above but without our custom file support
|
||||
HttpxFileContent = Union[IO[bytes], bytes]
|
||||
HttpxFileTypes = Union[
|
||||
# file (or bytes)
|
||||
HttpxFileContent,
|
||||
# (filename, file (or bytes))
|
||||
Tuple[Optional[str], HttpxFileContent],
|
||||
# (filename, file (or bytes), content_type)
|
||||
Tuple[Optional[str], HttpxFileContent, Optional[str]],
|
||||
# (filename, file (or bytes), content_type, headers)
|
||||
Tuple[Optional[str], HttpxFileContent, Optional[str], Mapping[str, str]],
|
||||
]
|
||||
HttpxRequestFiles = Union[Mapping[str, HttpxFileTypes], Sequence[Tuple[str, HttpxFileTypes]]]
|
||||
|
||||
# Workaround to support (cast_to: Type[ResponseT]) -> ResponseT
|
||||
# where ResponseT includes `None`. In order to support directly
|
||||
# passing `None`, overloads would have to be defined for every
|
||||
# method that uses `ResponseT` which would lead to an unacceptable
|
||||
# amount of code duplication and make it unreadable. See _base_client.py
|
||||
# for example usage.
|
||||
#
|
||||
# This unfortunately means that you will either have
|
||||
# to import this type and pass it explicitly:
|
||||
#
|
||||
# from opencode import NoneType
|
||||
# client.get('/foo', cast_to=NoneType)
|
||||
#
|
||||
# or build it yourself:
|
||||
#
|
||||
# client.get('/foo', cast_to=type(None))
|
||||
if TYPE_CHECKING:
|
||||
NoneType: Type[None]
|
||||
else:
|
||||
NoneType = type(None)
|
||||
|
||||
|
||||
class RequestOptions(TypedDict, total=False):
|
||||
headers: Headers
|
||||
max_retries: int
|
||||
timeout: float | Timeout | None
|
||||
params: Query
|
||||
extra_json: AnyMapping
|
||||
idempotency_key: str
|
||||
follow_redirects: bool
|
||||
|
||||
|
||||
# Sentinel class used until PEP 0661 is accepted
|
||||
class NotGiven:
|
||||
"""
|
||||
A sentinel singleton class used to distinguish omitted keyword arguments
|
||||
from those passed in with the value None (which may have different behavior).
|
||||
|
||||
For example:
|
||||
|
||||
```py
|
||||
def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: ...
|
||||
|
||||
|
||||
get(timeout=1) # 1s timeout
|
||||
get(timeout=None) # No timeout
|
||||
get() # Default timeout behavior, which may not be statically known at the method definition.
|
||||
```
|
||||
"""
|
||||
|
||||
def __bool__(self) -> Literal[False]:
|
||||
return False
|
||||
|
||||
@override
|
||||
def __repr__(self) -> str:
|
||||
return "NOT_GIVEN"
|
||||
|
||||
|
||||
NotGivenOr = Union[_T, NotGiven]
|
||||
NOT_GIVEN = NotGiven()
|
||||
|
||||
|
||||
class Omit:
|
||||
"""In certain situations you need to be able to represent a case where a default value has
|
||||
to be explicitly removed and `None` is not an appropriate substitute, for example:
|
||||
|
||||
```py
|
||||
# as the default `Content-Type` header is `application/json` that will be sent
|
||||
client.post("/upload/files", files={"file": b"my raw file content"})
|
||||
|
||||
# you can't explicitly override the header as it has to be dynamically generated
|
||||
# to look something like: 'multipart/form-data; boundary=0d8382fcf5f8c3be01ca2e11002d2983'
|
||||
client.post(..., headers={"Content-Type": "multipart/form-data"})
|
||||
|
||||
# instead you can remove the default `application/json` header by passing Omit
|
||||
client.post(..., headers={"Content-Type": Omit()})
|
||||
```
|
||||
"""
|
||||
|
||||
def __bool__(self) -> Literal[False]:
|
||||
return False
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class ModelBuilderProtocol(Protocol):
|
||||
@classmethod
|
||||
def build(
|
||||
cls: type[_T],
|
||||
*,
|
||||
response: Response,
|
||||
data: object,
|
||||
) -> _T: ...
|
||||
|
||||
|
||||
Headers = Mapping[str, Union[str, Omit]]
|
||||
|
||||
|
||||
class HeadersLikeProtocol(Protocol):
|
||||
def get(self, __key: str) -> str | None: ...
|
||||
|
||||
|
||||
HeadersLike = Union[Headers, HeadersLikeProtocol]
|
||||
|
||||
ResponseT = TypeVar(
|
||||
"ResponseT",
|
||||
bound=Union[
|
||||
object,
|
||||
str,
|
||||
None,
|
||||
"BaseModel",
|
||||
List[Any],
|
||||
Dict[str, Any],
|
||||
Response,
|
||||
ModelBuilderProtocol,
|
||||
"APIResponse[Any]",
|
||||
"AsyncAPIResponse[Any]",
|
||||
],
|
||||
)
|
||||
|
||||
StrBytesIntFloat = Union[str, bytes, int, float]
|
||||
|
||||
# Note: copied from Pydantic
|
||||
# https://github.com/pydantic/pydantic/blob/6f31f8f68ef011f84357330186f603ff295312fd/pydantic/main.py#L79
|
||||
IncEx: TypeAlias = Union[Set[int], Set[str], Mapping[int, Union["IncEx", bool]], Mapping[str, Union["IncEx", bool]]]
|
||||
|
||||
PostParser = Callable[[Any], Any]
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class InheritsGeneric(Protocol):
|
||||
"""Represents a type that has inherited from `Generic`
|
||||
|
||||
The `__orig_bases__` property can be used to determine the resolved
|
||||
type variable for a given base class.
|
||||
"""
|
||||
|
||||
__orig_bases__: tuple[_GenericAlias]
|
||||
|
||||
|
||||
class _GenericAlias(Protocol):
|
||||
__origin__: type[object]
|
||||
|
||||
|
||||
class HttpxSendArgs(TypedDict, total=False):
|
||||
auth: httpx.Auth
|
||||
follow_redirects: bool
|
||||
57
src/opencode/_utils/__init__.py
Normal file
57
src/opencode/_utils/__init__.py
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
from ._sync import asyncify as asyncify
|
||||
from ._proxy import LazyProxy as LazyProxy
|
||||
from ._utils import (
|
||||
flatten as flatten,
|
||||
is_dict as is_dict,
|
||||
is_list as is_list,
|
||||
is_given as is_given,
|
||||
is_tuple as is_tuple,
|
||||
json_safe as json_safe,
|
||||
lru_cache as lru_cache,
|
||||
is_mapping as is_mapping,
|
||||
is_tuple_t as is_tuple_t,
|
||||
parse_date as parse_date,
|
||||
is_iterable as is_iterable,
|
||||
is_sequence as is_sequence,
|
||||
coerce_float as coerce_float,
|
||||
is_mapping_t as is_mapping_t,
|
||||
removeprefix as removeprefix,
|
||||
removesuffix as removesuffix,
|
||||
extract_files as extract_files,
|
||||
is_sequence_t as is_sequence_t,
|
||||
required_args as required_args,
|
||||
coerce_boolean as coerce_boolean,
|
||||
coerce_integer as coerce_integer,
|
||||
file_from_path as file_from_path,
|
||||
parse_datetime as parse_datetime,
|
||||
strip_not_given as strip_not_given,
|
||||
deepcopy_minimal as deepcopy_minimal,
|
||||
get_async_library as get_async_library,
|
||||
maybe_coerce_float as maybe_coerce_float,
|
||||
get_required_header as get_required_header,
|
||||
maybe_coerce_boolean as maybe_coerce_boolean,
|
||||
maybe_coerce_integer as maybe_coerce_integer,
|
||||
)
|
||||
from ._typing import (
|
||||
is_list_type as is_list_type,
|
||||
is_union_type as is_union_type,
|
||||
extract_type_arg as extract_type_arg,
|
||||
is_iterable_type as is_iterable_type,
|
||||
is_required_type as is_required_type,
|
||||
is_annotated_type as is_annotated_type,
|
||||
is_type_alias_type as is_type_alias_type,
|
||||
strip_annotated_type as strip_annotated_type,
|
||||
extract_type_var_from_base as extract_type_var_from_base,
|
||||
)
|
||||
from ._streams import consume_sync_iterator as consume_sync_iterator, consume_async_iterator as consume_async_iterator
|
||||
from ._transform import (
|
||||
PropertyInfo as PropertyInfo,
|
||||
transform as transform,
|
||||
async_transform as async_transform,
|
||||
maybe_transform as maybe_transform,
|
||||
async_maybe_transform as async_maybe_transform,
|
||||
)
|
||||
from ._reflection import (
|
||||
function_has_argument as function_has_argument,
|
||||
assert_signatures_in_sync as assert_signatures_in_sync,
|
||||
)
|
||||
25
src/opencode/_utils/_logs.py
Normal file
25
src/opencode/_utils/_logs.py
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
import os
|
||||
import logging
|
||||
|
||||
logger: logging.Logger = logging.getLogger("opencode")
|
||||
httpx_logger: logging.Logger = logging.getLogger("httpx")
|
||||
|
||||
|
||||
def _basic_config() -> None:
|
||||
# e.g. [2023-10-05 14:12:26 - opencode._base_client:818 - DEBUG] HTTP Request: POST http://127.0.0.1:4010/foo/bar "200 OK"
|
||||
logging.basicConfig(
|
||||
format="[%(asctime)s - %(name)s:%(lineno)d - %(levelname)s] %(message)s",
|
||||
datefmt="%Y-%m-%d %H:%M:%S",
|
||||
)
|
||||
|
||||
|
||||
def setup_logging() -> None:
|
||||
env = os.environ.get("OPENCODE_LOG")
|
||||
if env == "debug":
|
||||
_basic_config()
|
||||
logger.setLevel(logging.DEBUG)
|
||||
httpx_logger.setLevel(logging.DEBUG)
|
||||
elif env == "info":
|
||||
_basic_config()
|
||||
logger.setLevel(logging.INFO)
|
||||
httpx_logger.setLevel(logging.INFO)
|
||||
65
src/opencode/_utils/_proxy.py
Normal file
65
src/opencode/_utils/_proxy.py
Normal file
|
|
@ -0,0 +1,65 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Generic, TypeVar, Iterable, cast
|
||||
from typing_extensions import override
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
class LazyProxy(Generic[T], ABC):
|
||||
"""Implements data methods to pretend that an instance is another instance.
|
||||
|
||||
This includes forwarding attribute access and other methods.
|
||||
"""
|
||||
|
||||
# Note: we have to special case proxies that themselves return proxies
|
||||
# to support using a proxy as a catch-all for any random access, e.g. `proxy.foo.bar.baz`
|
||||
|
||||
def __getattr__(self, attr: str) -> object:
|
||||
proxied = self.__get_proxied__()
|
||||
if isinstance(proxied, LazyProxy):
|
||||
return proxied # pyright: ignore
|
||||
return getattr(proxied, attr)
|
||||
|
||||
@override
|
||||
def __repr__(self) -> str:
|
||||
proxied = self.__get_proxied__()
|
||||
if isinstance(proxied, LazyProxy):
|
||||
return proxied.__class__.__name__
|
||||
return repr(self.__get_proxied__())
|
||||
|
||||
@override
|
||||
def __str__(self) -> str:
|
||||
proxied = self.__get_proxied__()
|
||||
if isinstance(proxied, LazyProxy):
|
||||
return proxied.__class__.__name__
|
||||
return str(proxied)
|
||||
|
||||
@override
|
||||
def __dir__(self) -> Iterable[str]:
|
||||
proxied = self.__get_proxied__()
|
||||
if isinstance(proxied, LazyProxy):
|
||||
return []
|
||||
return proxied.__dir__()
|
||||
|
||||
@property # type: ignore
|
||||
@override
|
||||
def __class__(self) -> type: # pyright: ignore
|
||||
try:
|
||||
proxied = self.__get_proxied__()
|
||||
except Exception:
|
||||
return type(self)
|
||||
if issubclass(type(proxied), LazyProxy):
|
||||
return type(proxied)
|
||||
return proxied.__class__
|
||||
|
||||
def __get_proxied__(self) -> T:
|
||||
return self.__load__()
|
||||
|
||||
def __as_proxied__(self) -> T:
|
||||
"""Helper method that returns the current proxy, typed as the loaded object"""
|
||||
return cast(T, self)
|
||||
|
||||
@abstractmethod
|
||||
def __load__(self) -> T: ...
|
||||
42
src/opencode/_utils/_reflection.py
Normal file
42
src/opencode/_utils/_reflection.py
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import inspect
|
||||
from typing import Any, Callable
|
||||
|
||||
|
||||
def function_has_argument(func: Callable[..., Any], arg_name: str) -> bool:
|
||||
"""Returns whether or not the given function has a specific parameter"""
|
||||
sig = inspect.signature(func)
|
||||
return arg_name in sig.parameters
|
||||
|
||||
|
||||
def assert_signatures_in_sync(
|
||||
source_func: Callable[..., Any],
|
||||
check_func: Callable[..., Any],
|
||||
*,
|
||||
exclude_params: set[str] = set(),
|
||||
) -> None:
|
||||
"""Ensure that the signature of the second function matches the first."""
|
||||
|
||||
check_sig = inspect.signature(check_func)
|
||||
source_sig = inspect.signature(source_func)
|
||||
|
||||
errors: list[str] = []
|
||||
|
||||
for name, source_param in source_sig.parameters.items():
|
||||
if name in exclude_params:
|
||||
continue
|
||||
|
||||
custom_param = check_sig.parameters.get(name)
|
||||
if not custom_param:
|
||||
errors.append(f"the `{name}` param is missing")
|
||||
continue
|
||||
|
||||
if custom_param.annotation != source_param.annotation:
|
||||
errors.append(
|
||||
f"types for the `{name}` param are do not match; source={repr(source_param.annotation)} checking={repr(custom_param.annotation)}"
|
||||
)
|
||||
continue
|
||||
|
||||
if errors:
|
||||
raise AssertionError(f"{len(errors)} errors encountered when comparing signatures:\n\n" + "\n\n".join(errors))
|
||||
24
src/opencode/_utils/_resources_proxy.py
Normal file
24
src/opencode/_utils/_resources_proxy.py
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
from typing_extensions import override
|
||||
|
||||
from ._proxy import LazyProxy
|
||||
|
||||
|
||||
class ResourcesProxy(LazyProxy[Any]):
|
||||
"""A proxy for the `opencode.resources` module.
|
||||
|
||||
This is used so that we can lazily import `opencode.resources` only when
|
||||
needed *and* so that users can just import `opencode` and reference `opencode.resources`
|
||||
"""
|
||||
|
||||
@override
|
||||
def __load__(self) -> Any:
|
||||
import importlib
|
||||
|
||||
mod = importlib.import_module("opencode.resources")
|
||||
return mod
|
||||
|
||||
|
||||
resources = ResourcesProxy().__as_proxied__()
|
||||
12
src/opencode/_utils/_streams.py
Normal file
12
src/opencode/_utils/_streams.py
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
from typing import Any
|
||||
from typing_extensions import Iterator, AsyncIterator
|
||||
|
||||
|
||||
def consume_sync_iterator(iterator: Iterator[Any]) -> None:
|
||||
for _ in iterator:
|
||||
...
|
||||
|
||||
|
||||
async def consume_async_iterator(iterator: AsyncIterator[Any]) -> None:
|
||||
async for _ in iterator:
|
||||
...
|
||||
86
src/opencode/_utils/_sync.py
Normal file
86
src/opencode/_utils/_sync.py
Normal file
|
|
@ -0,0 +1,86 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
import asyncio
|
||||
import functools
|
||||
import contextvars
|
||||
from typing import Any, TypeVar, Callable, Awaitable
|
||||
from typing_extensions import ParamSpec
|
||||
|
||||
import anyio
|
||||
import sniffio
|
||||
import anyio.to_thread
|
||||
|
||||
T_Retval = TypeVar("T_Retval")
|
||||
T_ParamSpec = ParamSpec("T_ParamSpec")
|
||||
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
_asyncio_to_thread = asyncio.to_thread
|
||||
else:
|
||||
# backport of https://docs.python.org/3/library/asyncio-task.html#asyncio.to_thread
|
||||
# for Python 3.8 support
|
||||
async def _asyncio_to_thread(
|
||||
func: Callable[T_ParamSpec, T_Retval], /, *args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs
|
||||
) -> Any:
|
||||
"""Asynchronously run function *func* in a separate thread.
|
||||
|
||||
Any *args and **kwargs supplied for this function are directly passed
|
||||
to *func*. Also, the current :class:`contextvars.Context` is propagated,
|
||||
allowing context variables from the main thread to be accessed in the
|
||||
separate thread.
|
||||
|
||||
Returns a coroutine that can be awaited to get the eventual result of *func*.
|
||||
"""
|
||||
loop = asyncio.events.get_running_loop()
|
||||
ctx = contextvars.copy_context()
|
||||
func_call = functools.partial(ctx.run, func, *args, **kwargs)
|
||||
return await loop.run_in_executor(None, func_call)
|
||||
|
||||
|
||||
async def to_thread(
|
||||
func: Callable[T_ParamSpec, T_Retval], /, *args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs
|
||||
) -> T_Retval:
|
||||
if sniffio.current_async_library() == "asyncio":
|
||||
return await _asyncio_to_thread(func, *args, **kwargs)
|
||||
|
||||
return await anyio.to_thread.run_sync(
|
||||
functools.partial(func, *args, **kwargs),
|
||||
)
|
||||
|
||||
|
||||
# inspired by `asyncer`, https://github.com/tiangolo/asyncer
|
||||
def asyncify(function: Callable[T_ParamSpec, T_Retval]) -> Callable[T_ParamSpec, Awaitable[T_Retval]]:
|
||||
"""
|
||||
Take a blocking function and create an async one that receives the same
|
||||
positional and keyword arguments. For python version 3.9 and above, it uses
|
||||
asyncio.to_thread to run the function in a separate thread. For python version
|
||||
3.8, it uses locally defined copy of the asyncio.to_thread function which was
|
||||
introduced in python 3.9.
|
||||
|
||||
Usage:
|
||||
|
||||
```python
|
||||
def blocking_func(arg1, arg2, kwarg1=None):
|
||||
# blocking code
|
||||
return result
|
||||
|
||||
|
||||
result = asyncify(blocking_function)(arg1, arg2, kwarg1=value1)
|
||||
```
|
||||
|
||||
## Arguments
|
||||
|
||||
`function`: a blocking regular callable (e.g. a function)
|
||||
|
||||
## Return
|
||||
|
||||
An async function that takes the same positional and keyword arguments as the
|
||||
original one, that when called runs the same original function in a thread worker
|
||||
and returns the result.
|
||||
"""
|
||||
|
||||
async def wrapper(*args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs) -> T_Retval:
|
||||
return await to_thread(function, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
447
src/opencode/_utils/_transform.py
Normal file
447
src/opencode/_utils/_transform.py
Normal file
|
|
@ -0,0 +1,447 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import base64
|
||||
import pathlib
|
||||
from typing import Any, Mapping, TypeVar, cast
|
||||
from datetime import date, datetime
|
||||
from typing_extensions import Literal, get_args, override, get_type_hints as _get_type_hints
|
||||
|
||||
import anyio
|
||||
import pydantic
|
||||
|
||||
from ._utils import (
|
||||
is_list,
|
||||
is_given,
|
||||
lru_cache,
|
||||
is_mapping,
|
||||
is_iterable,
|
||||
)
|
||||
from .._files import is_base64_file_input
|
||||
from ._typing import (
|
||||
is_list_type,
|
||||
is_union_type,
|
||||
extract_type_arg,
|
||||
is_iterable_type,
|
||||
is_required_type,
|
||||
is_annotated_type,
|
||||
strip_annotated_type,
|
||||
)
|
||||
from .._compat import get_origin, model_dump, is_typeddict
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
|
||||
# TODO: support for drilling globals() and locals()
|
||||
# TODO: ensure works correctly with forward references in all cases
|
||||
|
||||
|
||||
PropertyFormat = Literal["iso8601", "base64", "custom"]
|
||||
|
||||
|
||||
class PropertyInfo:
|
||||
"""Metadata class to be used in Annotated types to provide information about a given type.
|
||||
|
||||
For example:
|
||||
|
||||
class MyParams(TypedDict):
|
||||
account_holder_name: Annotated[str, PropertyInfo(alias='accountHolderName')]
|
||||
|
||||
This means that {'account_holder_name': 'Robert'} will be transformed to {'accountHolderName': 'Robert'} before being sent to the API.
|
||||
"""
|
||||
|
||||
alias: str | None
|
||||
format: PropertyFormat | None
|
||||
format_template: str | None
|
||||
discriminator: str | None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
alias: str | None = None,
|
||||
format: PropertyFormat | None = None,
|
||||
format_template: str | None = None,
|
||||
discriminator: str | None = None,
|
||||
) -> None:
|
||||
self.alias = alias
|
||||
self.format = format
|
||||
self.format_template = format_template
|
||||
self.discriminator = discriminator
|
||||
|
||||
@override
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.__class__.__name__}(alias='{self.alias}', format={self.format}, format_template='{self.format_template}', discriminator='{self.discriminator}')"
|
||||
|
||||
|
||||
def maybe_transform(
|
||||
data: object,
|
||||
expected_type: object,
|
||||
) -> Any | None:
|
||||
"""Wrapper over `transform()` that allows `None` to be passed.
|
||||
|
||||
See `transform()` for more details.
|
||||
"""
|
||||
if data is None:
|
||||
return None
|
||||
return transform(data, expected_type)
|
||||
|
||||
|
||||
# Wrapper over _transform_recursive providing fake types
|
||||
def transform(
|
||||
data: _T,
|
||||
expected_type: object,
|
||||
) -> _T:
|
||||
"""Transform dictionaries based off of type information from the given type, for example:
|
||||
|
||||
```py
|
||||
class Params(TypedDict, total=False):
|
||||
card_id: Required[Annotated[str, PropertyInfo(alias="cardID")]]
|
||||
|
||||
|
||||
transformed = transform({"card_id": "<my card ID>"}, Params)
|
||||
# {'cardID': '<my card ID>'}
|
||||
```
|
||||
|
||||
Any keys / data that does not have type information given will be included as is.
|
||||
|
||||
It should be noted that the transformations that this function does are not represented in the type system.
|
||||
"""
|
||||
transformed = _transform_recursive(data, annotation=cast(type, expected_type))
|
||||
return cast(_T, transformed)
|
||||
|
||||
|
||||
@lru_cache(maxsize=8096)
|
||||
def _get_annotated_type(type_: type) -> type | None:
|
||||
"""If the given type is an `Annotated` type then it is returned, if not `None` is returned.
|
||||
|
||||
This also unwraps the type when applicable, e.g. `Required[Annotated[T, ...]]`
|
||||
"""
|
||||
if is_required_type(type_):
|
||||
# Unwrap `Required[Annotated[T, ...]]` to `Annotated[T, ...]`
|
||||
type_ = get_args(type_)[0]
|
||||
|
||||
if is_annotated_type(type_):
|
||||
return type_
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _maybe_transform_key(key: str, type_: type) -> str:
|
||||
"""Transform the given `data` based on the annotations provided in `type_`.
|
||||
|
||||
Note: this function only looks at `Annotated` types that contain `PropertyInfo` metadata.
|
||||
"""
|
||||
annotated_type = _get_annotated_type(type_)
|
||||
if annotated_type is None:
|
||||
# no `Annotated` definition for this type, no transformation needed
|
||||
return key
|
||||
|
||||
# ignore the first argument as it is the actual type
|
||||
annotations = get_args(annotated_type)[1:]
|
||||
for annotation in annotations:
|
||||
if isinstance(annotation, PropertyInfo) and annotation.alias is not None:
|
||||
return annotation.alias
|
||||
|
||||
return key
|
||||
|
||||
|
||||
def _no_transform_needed(annotation: type) -> bool:
|
||||
return annotation == float or annotation == int
|
||||
|
||||
|
||||
def _transform_recursive(
|
||||
data: object,
|
||||
*,
|
||||
annotation: type,
|
||||
inner_type: type | None = None,
|
||||
) -> object:
|
||||
"""Transform the given data against the expected type.
|
||||
|
||||
Args:
|
||||
annotation: The direct type annotation given to the particular piece of data.
|
||||
This may or may not be wrapped in metadata types, e.g. `Required[T]`, `Annotated[T, ...]` etc
|
||||
|
||||
inner_type: If applicable, this is the "inside" type. This is useful in certain cases where the outside type
|
||||
is a container type such as `List[T]`. In that case `inner_type` should be set to `T` so that each entry in
|
||||
the list can be transformed using the metadata from the container type.
|
||||
|
||||
Defaults to the same value as the `annotation` argument.
|
||||
"""
|
||||
if inner_type is None:
|
||||
inner_type = annotation
|
||||
|
||||
stripped_type = strip_annotated_type(inner_type)
|
||||
origin = get_origin(stripped_type) or stripped_type
|
||||
if is_typeddict(stripped_type) and is_mapping(data):
|
||||
return _transform_typeddict(data, stripped_type)
|
||||
|
||||
if origin == dict and is_mapping(data):
|
||||
items_type = get_args(stripped_type)[1]
|
||||
return {key: _transform_recursive(value, annotation=items_type) for key, value in data.items()}
|
||||
|
||||
if (
|
||||
# List[T]
|
||||
(is_list_type(stripped_type) and is_list(data))
|
||||
# Iterable[T]
|
||||
or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str))
|
||||
):
|
||||
# dicts are technically iterable, but it is an iterable on the keys of the dict and is not usually
|
||||
# intended as an iterable, so we don't transform it.
|
||||
if isinstance(data, dict):
|
||||
return cast(object, data)
|
||||
|
||||
inner_type = extract_type_arg(stripped_type, 0)
|
||||
if _no_transform_needed(inner_type):
|
||||
# for some types there is no need to transform anything, so we can get a small
|
||||
# perf boost from skipping that work.
|
||||
#
|
||||
# but we still need to convert to a list to ensure the data is json-serializable
|
||||
if is_list(data):
|
||||
return data
|
||||
return list(data)
|
||||
|
||||
return [_transform_recursive(d, annotation=annotation, inner_type=inner_type) for d in data]
|
||||
|
||||
if is_union_type(stripped_type):
|
||||
# For union types we run the transformation against all subtypes to ensure that everything is transformed.
|
||||
#
|
||||
# TODO: there may be edge cases where the same normalized field name will transform to two different names
|
||||
# in different subtypes.
|
||||
for subtype in get_args(stripped_type):
|
||||
data = _transform_recursive(data, annotation=annotation, inner_type=subtype)
|
||||
return data
|
||||
|
||||
if isinstance(data, pydantic.BaseModel):
|
||||
return model_dump(data, exclude_unset=True, mode="json")
|
||||
|
||||
annotated_type = _get_annotated_type(annotation)
|
||||
if annotated_type is None:
|
||||
return data
|
||||
|
||||
# ignore the first argument as it is the actual type
|
||||
annotations = get_args(annotated_type)[1:]
|
||||
for annotation in annotations:
|
||||
if isinstance(annotation, PropertyInfo) and annotation.format is not None:
|
||||
return _format_data(data, annotation.format, annotation.format_template)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def _format_data(data: object, format_: PropertyFormat, format_template: str | None) -> object:
|
||||
if isinstance(data, (date, datetime)):
|
||||
if format_ == "iso8601":
|
||||
return data.isoformat()
|
||||
|
||||
if format_ == "custom" and format_template is not None:
|
||||
return data.strftime(format_template)
|
||||
|
||||
if format_ == "base64" and is_base64_file_input(data):
|
||||
binary: str | bytes | None = None
|
||||
|
||||
if isinstance(data, pathlib.Path):
|
||||
binary = data.read_bytes()
|
||||
elif isinstance(data, io.IOBase):
|
||||
binary = data.read()
|
||||
|
||||
if isinstance(binary, str): # type: ignore[unreachable]
|
||||
binary = binary.encode()
|
||||
|
||||
if not isinstance(binary, bytes):
|
||||
raise RuntimeError(f"Could not read bytes from {data}; Received {type(binary)}")
|
||||
|
||||
return base64.b64encode(binary).decode("ascii")
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def _transform_typeddict(
|
||||
data: Mapping[str, object],
|
||||
expected_type: type,
|
||||
) -> Mapping[str, object]:
|
||||
result: dict[str, object] = {}
|
||||
annotations = get_type_hints(expected_type, include_extras=True)
|
||||
for key, value in data.items():
|
||||
if not is_given(value):
|
||||
# we don't need to include `NotGiven` values here as they'll
|
||||
# be stripped out before the request is sent anyway
|
||||
continue
|
||||
|
||||
type_ = annotations.get(key)
|
||||
if type_ is None:
|
||||
# we do not have a type annotation for this field, leave it as is
|
||||
result[key] = value
|
||||
else:
|
||||
result[_maybe_transform_key(key, type_)] = _transform_recursive(value, annotation=type_)
|
||||
return result
|
||||
|
||||
|
||||
async def async_maybe_transform(
|
||||
data: object,
|
||||
expected_type: object,
|
||||
) -> Any | None:
|
||||
"""Wrapper over `async_transform()` that allows `None` to be passed.
|
||||
|
||||
See `async_transform()` for more details.
|
||||
"""
|
||||
if data is None:
|
||||
return None
|
||||
return await async_transform(data, expected_type)
|
||||
|
||||
|
||||
async def async_transform(
|
||||
data: _T,
|
||||
expected_type: object,
|
||||
) -> _T:
|
||||
"""Transform dictionaries based off of type information from the given type, for example:
|
||||
|
||||
```py
|
||||
class Params(TypedDict, total=False):
|
||||
card_id: Required[Annotated[str, PropertyInfo(alias="cardID")]]
|
||||
|
||||
|
||||
transformed = transform({"card_id": "<my card ID>"}, Params)
|
||||
# {'cardID': '<my card ID>'}
|
||||
```
|
||||
|
||||
Any keys / data that does not have type information given will be included as is.
|
||||
|
||||
It should be noted that the transformations that this function does are not represented in the type system.
|
||||
"""
|
||||
transformed = await _async_transform_recursive(data, annotation=cast(type, expected_type))
|
||||
return cast(_T, transformed)
|
||||
|
||||
|
||||
async def _async_transform_recursive(
|
||||
data: object,
|
||||
*,
|
||||
annotation: type,
|
||||
inner_type: type | None = None,
|
||||
) -> object:
|
||||
"""Transform the given data against the expected type.
|
||||
|
||||
Args:
|
||||
annotation: The direct type annotation given to the particular piece of data.
|
||||
This may or may not be wrapped in metadata types, e.g. `Required[T]`, `Annotated[T, ...]` etc
|
||||
|
||||
inner_type: If applicable, this is the "inside" type. This is useful in certain cases where the outside type
|
||||
is a container type such as `List[T]`. In that case `inner_type` should be set to `T` so that each entry in
|
||||
the list can be transformed using the metadata from the container type.
|
||||
|
||||
Defaults to the same value as the `annotation` argument.
|
||||
"""
|
||||
if inner_type is None:
|
||||
inner_type = annotation
|
||||
|
||||
stripped_type = strip_annotated_type(inner_type)
|
||||
origin = get_origin(stripped_type) or stripped_type
|
||||
if is_typeddict(stripped_type) and is_mapping(data):
|
||||
return await _async_transform_typeddict(data, stripped_type)
|
||||
|
||||
if origin == dict and is_mapping(data):
|
||||
items_type = get_args(stripped_type)[1]
|
||||
return {key: _transform_recursive(value, annotation=items_type) for key, value in data.items()}
|
||||
|
||||
if (
|
||||
# List[T]
|
||||
(is_list_type(stripped_type) and is_list(data))
|
||||
# Iterable[T]
|
||||
or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str))
|
||||
):
|
||||
# dicts are technically iterable, but it is an iterable on the keys of the dict and is not usually
|
||||
# intended as an iterable, so we don't transform it.
|
||||
if isinstance(data, dict):
|
||||
return cast(object, data)
|
||||
|
||||
inner_type = extract_type_arg(stripped_type, 0)
|
||||
if _no_transform_needed(inner_type):
|
||||
# for some types there is no need to transform anything, so we can get a small
|
||||
# perf boost from skipping that work.
|
||||
#
|
||||
# but we still need to convert to a list to ensure the data is json-serializable
|
||||
if is_list(data):
|
||||
return data
|
||||
return list(data)
|
||||
|
||||
return [await _async_transform_recursive(d, annotation=annotation, inner_type=inner_type) for d in data]
|
||||
|
||||
if is_union_type(stripped_type):
|
||||
# For union types we run the transformation against all subtypes to ensure that everything is transformed.
|
||||
#
|
||||
# TODO: there may be edge cases where the same normalized field name will transform to two different names
|
||||
# in different subtypes.
|
||||
for subtype in get_args(stripped_type):
|
||||
data = await _async_transform_recursive(data, annotation=annotation, inner_type=subtype)
|
||||
return data
|
||||
|
||||
if isinstance(data, pydantic.BaseModel):
|
||||
return model_dump(data, exclude_unset=True, mode="json")
|
||||
|
||||
annotated_type = _get_annotated_type(annotation)
|
||||
if annotated_type is None:
|
||||
return data
|
||||
|
||||
# ignore the first argument as it is the actual type
|
||||
annotations = get_args(annotated_type)[1:]
|
||||
for annotation in annotations:
|
||||
if isinstance(annotation, PropertyInfo) and annotation.format is not None:
|
||||
return await _async_format_data(data, annotation.format, annotation.format_template)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
async def _async_format_data(data: object, format_: PropertyFormat, format_template: str | None) -> object:
|
||||
if isinstance(data, (date, datetime)):
|
||||
if format_ == "iso8601":
|
||||
return data.isoformat()
|
||||
|
||||
if format_ == "custom" and format_template is not None:
|
||||
return data.strftime(format_template)
|
||||
|
||||
if format_ == "base64" and is_base64_file_input(data):
|
||||
binary: str | bytes | None = None
|
||||
|
||||
if isinstance(data, pathlib.Path):
|
||||
binary = await anyio.Path(data).read_bytes()
|
||||
elif isinstance(data, io.IOBase):
|
||||
binary = data.read()
|
||||
|
||||
if isinstance(binary, str): # type: ignore[unreachable]
|
||||
binary = binary.encode()
|
||||
|
||||
if not isinstance(binary, bytes):
|
||||
raise RuntimeError(f"Could not read bytes from {data}; Received {type(binary)}")
|
||||
|
||||
return base64.b64encode(binary).decode("ascii")
|
||||
|
||||
return data
|
||||
|
||||
|
||||
async def _async_transform_typeddict(
|
||||
data: Mapping[str, object],
|
||||
expected_type: type,
|
||||
) -> Mapping[str, object]:
|
||||
result: dict[str, object] = {}
|
||||
annotations = get_type_hints(expected_type, include_extras=True)
|
||||
for key, value in data.items():
|
||||
if not is_given(value):
|
||||
# we don't need to include `NotGiven` values here as they'll
|
||||
# be stripped out before the request is sent anyway
|
||||
continue
|
||||
|
||||
type_ = annotations.get(key)
|
||||
if type_ is None:
|
||||
# we do not have a type annotation for this field, leave it as is
|
||||
result[key] = value
|
||||
else:
|
||||
result[_maybe_transform_key(key, type_)] = await _async_transform_recursive(value, annotation=type_)
|
||||
return result
|
||||
|
||||
|
||||
@lru_cache(maxsize=8096)
|
||||
def get_type_hints(
|
||||
obj: Any,
|
||||
globalns: dict[str, Any] | None = None,
|
||||
localns: Mapping[str, Any] | None = None,
|
||||
include_extras: bool = False,
|
||||
) -> dict[str, Any]:
|
||||
return _get_type_hints(obj, globalns=globalns, localns=localns, include_extras=include_extras)
|
||||
151
src/opencode/_utils/_typing.py
Normal file
151
src/opencode/_utils/_typing.py
Normal file
|
|
@ -0,0 +1,151 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
import typing
|
||||
import typing_extensions
|
||||
from typing import Any, TypeVar, Iterable, cast
|
||||
from collections import abc as _c_abc
|
||||
from typing_extensions import (
|
||||
TypeIs,
|
||||
Required,
|
||||
Annotated,
|
||||
get_args,
|
||||
get_origin,
|
||||
)
|
||||
|
||||
from ._utils import lru_cache
|
||||
from .._types import InheritsGeneric
|
||||
from .._compat import is_union as _is_union
|
||||
|
||||
|
||||
def is_annotated_type(typ: type) -> bool:
|
||||
return get_origin(typ) == Annotated
|
||||
|
||||
|
||||
def is_list_type(typ: type) -> bool:
|
||||
return (get_origin(typ) or typ) == list
|
||||
|
||||
|
||||
def is_iterable_type(typ: type) -> bool:
|
||||
"""If the given type is `typing.Iterable[T]`"""
|
||||
origin = get_origin(typ) or typ
|
||||
return origin == Iterable or origin == _c_abc.Iterable
|
||||
|
||||
|
||||
def is_union_type(typ: type) -> bool:
|
||||
return _is_union(get_origin(typ))
|
||||
|
||||
|
||||
def is_required_type(typ: type) -> bool:
|
||||
return get_origin(typ) == Required
|
||||
|
||||
|
||||
def is_typevar(typ: type) -> bool:
|
||||
# type ignore is required because type checkers
|
||||
# think this expression will always return False
|
||||
return type(typ) == TypeVar # type: ignore
|
||||
|
||||
|
||||
_TYPE_ALIAS_TYPES: tuple[type[typing_extensions.TypeAliasType], ...] = (typing_extensions.TypeAliasType,)
|
||||
if sys.version_info >= (3, 12):
|
||||
_TYPE_ALIAS_TYPES = (*_TYPE_ALIAS_TYPES, typing.TypeAliasType)
|
||||
|
||||
|
||||
def is_type_alias_type(tp: Any, /) -> TypeIs[typing_extensions.TypeAliasType]:
|
||||
"""Return whether the provided argument is an instance of `TypeAliasType`.
|
||||
|
||||
```python
|
||||
type Int = int
|
||||
is_type_alias_type(Int)
|
||||
# > True
|
||||
Str = TypeAliasType("Str", str)
|
||||
is_type_alias_type(Str)
|
||||
# > True
|
||||
```
|
||||
"""
|
||||
return isinstance(tp, _TYPE_ALIAS_TYPES)
|
||||
|
||||
|
||||
# Extracts T from Annotated[T, ...] or from Required[Annotated[T, ...]]
|
||||
@lru_cache(maxsize=8096)
|
||||
def strip_annotated_type(typ: type) -> type:
|
||||
if is_required_type(typ) or is_annotated_type(typ):
|
||||
return strip_annotated_type(cast(type, get_args(typ)[0]))
|
||||
|
||||
return typ
|
||||
|
||||
|
||||
def extract_type_arg(typ: type, index: int) -> type:
|
||||
args = get_args(typ)
|
||||
try:
|
||||
return cast(type, args[index])
|
||||
except IndexError as err:
|
||||
raise RuntimeError(f"Expected type {typ} to have a type argument at index {index} but it did not") from err
|
||||
|
||||
|
||||
def extract_type_var_from_base(
|
||||
typ: type,
|
||||
*,
|
||||
generic_bases: tuple[type, ...],
|
||||
index: int,
|
||||
failure_message: str | None = None,
|
||||
) -> type:
|
||||
"""Given a type like `Foo[T]`, returns the generic type variable `T`.
|
||||
|
||||
This also handles the case where a concrete subclass is given, e.g.
|
||||
```py
|
||||
class MyResponse(Foo[bytes]):
|
||||
...
|
||||
|
||||
extract_type_var(MyResponse, bases=(Foo,), index=0) -> bytes
|
||||
```
|
||||
|
||||
And where a generic subclass is given:
|
||||
```py
|
||||
_T = TypeVar('_T')
|
||||
class MyResponse(Foo[_T]):
|
||||
...
|
||||
|
||||
extract_type_var(MyResponse[bytes], bases=(Foo,), index=0) -> bytes
|
||||
```
|
||||
"""
|
||||
cls = cast(object, get_origin(typ) or typ)
|
||||
if cls in generic_bases: # pyright: ignore[reportUnnecessaryContains]
|
||||
# we're given the class directly
|
||||
return extract_type_arg(typ, index)
|
||||
|
||||
# if a subclass is given
|
||||
# ---
|
||||
# this is needed as __orig_bases__ is not present in the typeshed stubs
|
||||
# because it is intended to be for internal use only, however there does
|
||||
# not seem to be a way to resolve generic TypeVars for inherited subclasses
|
||||
# without using it.
|
||||
if isinstance(cls, InheritsGeneric):
|
||||
target_base_class: Any | None = None
|
||||
for base in cls.__orig_bases__:
|
||||
if base.__origin__ in generic_bases:
|
||||
target_base_class = base
|
||||
break
|
||||
|
||||
if target_base_class is None:
|
||||
raise RuntimeError(
|
||||
"Could not find the generic base class;\n"
|
||||
"This should never happen;\n"
|
||||
f"Does {cls} inherit from one of {generic_bases} ?"
|
||||
)
|
||||
|
||||
extracted = extract_type_arg(target_base_class, index)
|
||||
if is_typevar(extracted):
|
||||
# If the extracted type argument is itself a type variable
|
||||
# then that means the subclass itself is generic, so we have
|
||||
# to resolve the type argument from the class itself, not
|
||||
# the base class.
|
||||
#
|
||||
# Note: if there is more than 1 type argument, the subclass could
|
||||
# change the ordering of the type arguments, this is not currently
|
||||
# supported.
|
||||
return extract_type_arg(typ, index)
|
||||
|
||||
return extracted
|
||||
|
||||
raise RuntimeError(failure_message or f"Could not resolve inner type variable at index {index} for {typ}")
|
||||
422
src/opencode/_utils/_utils.py
Normal file
422
src/opencode/_utils/_utils.py
Normal file
|
|
@ -0,0 +1,422 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
import inspect
|
||||
import functools
|
||||
from typing import (
|
||||
Any,
|
||||
Tuple,
|
||||
Mapping,
|
||||
TypeVar,
|
||||
Callable,
|
||||
Iterable,
|
||||
Sequence,
|
||||
cast,
|
||||
overload,
|
||||
)
|
||||
from pathlib import Path
|
||||
from datetime import date, datetime
|
||||
from typing_extensions import TypeGuard
|
||||
|
||||
import sniffio
|
||||
|
||||
from .._types import NotGiven, FileTypes, NotGivenOr, HeadersLike
|
||||
from .._compat import parse_date as parse_date, parse_datetime as parse_datetime
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_TupleT = TypeVar("_TupleT", bound=Tuple[object, ...])
|
||||
_MappingT = TypeVar("_MappingT", bound=Mapping[str, object])
|
||||
_SequenceT = TypeVar("_SequenceT", bound=Sequence[object])
|
||||
CallableT = TypeVar("CallableT", bound=Callable[..., Any])
|
||||
|
||||
|
||||
def flatten(t: Iterable[Iterable[_T]]) -> list[_T]:
|
||||
return [item for sublist in t for item in sublist]
|
||||
|
||||
|
||||
def extract_files(
|
||||
# TODO: this needs to take Dict but variance issues.....
|
||||
# create protocol type ?
|
||||
query: Mapping[str, object],
|
||||
*,
|
||||
paths: Sequence[Sequence[str]],
|
||||
) -> list[tuple[str, FileTypes]]:
|
||||
"""Recursively extract files from the given dictionary based on specified paths.
|
||||
|
||||
A path may look like this ['foo', 'files', '<array>', 'data'].
|
||||
|
||||
Note: this mutates the given dictionary.
|
||||
"""
|
||||
files: list[tuple[str, FileTypes]] = []
|
||||
for path in paths:
|
||||
files.extend(_extract_items(query, path, index=0, flattened_key=None))
|
||||
return files
|
||||
|
||||
|
||||
def _extract_items(
|
||||
obj: object,
|
||||
path: Sequence[str],
|
||||
*,
|
||||
index: int,
|
||||
flattened_key: str | None,
|
||||
) -> list[tuple[str, FileTypes]]:
|
||||
try:
|
||||
key = path[index]
|
||||
except IndexError:
|
||||
if isinstance(obj, NotGiven):
|
||||
# no value was provided - we can safely ignore
|
||||
return []
|
||||
|
||||
# cyclical import
|
||||
from .._files import assert_is_file_content
|
||||
|
||||
# We have exhausted the path, return the entry we found.
|
||||
assert flattened_key is not None
|
||||
|
||||
if is_list(obj):
|
||||
files: list[tuple[str, FileTypes]] = []
|
||||
for entry in obj:
|
||||
assert_is_file_content(entry, key=flattened_key + "[]" if flattened_key else "")
|
||||
files.append((flattened_key + "[]", cast(FileTypes, entry)))
|
||||
return files
|
||||
|
||||
assert_is_file_content(obj, key=flattened_key)
|
||||
return [(flattened_key, cast(FileTypes, obj))]
|
||||
|
||||
index += 1
|
||||
if is_dict(obj):
|
||||
try:
|
||||
# We are at the last entry in the path so we must remove the field
|
||||
if (len(path)) == index:
|
||||
item = obj.pop(key)
|
||||
else:
|
||||
item = obj[key]
|
||||
except KeyError:
|
||||
# Key was not present in the dictionary, this is not indicative of an error
|
||||
# as the given path may not point to a required field. We also do not want
|
||||
# to enforce required fields as the API may differ from the spec in some cases.
|
||||
return []
|
||||
if flattened_key is None:
|
||||
flattened_key = key
|
||||
else:
|
||||
flattened_key += f"[{key}]"
|
||||
return _extract_items(
|
||||
item,
|
||||
path,
|
||||
index=index,
|
||||
flattened_key=flattened_key,
|
||||
)
|
||||
elif is_list(obj):
|
||||
if key != "<array>":
|
||||
return []
|
||||
|
||||
return flatten(
|
||||
[
|
||||
_extract_items(
|
||||
item,
|
||||
path,
|
||||
index=index,
|
||||
flattened_key=flattened_key + "[]" if flattened_key is not None else "[]",
|
||||
)
|
||||
for item in obj
|
||||
]
|
||||
)
|
||||
|
||||
# Something unexpected was passed, just ignore it.
|
||||
return []
|
||||
|
||||
|
||||
def is_given(obj: NotGivenOr[_T]) -> TypeGuard[_T]:
|
||||
return not isinstance(obj, NotGiven)
|
||||
|
||||
|
||||
# Type safe methods for narrowing types with TypeVars.
|
||||
# The default narrowing for isinstance(obj, dict) is dict[unknown, unknown],
|
||||
# however this cause Pyright to rightfully report errors. As we know we don't
|
||||
# care about the contained types we can safely use `object` in it's place.
|
||||
#
|
||||
# There are two separate functions defined, `is_*` and `is_*_t` for different use cases.
|
||||
# `is_*` is for when you're dealing with an unknown input
|
||||
# `is_*_t` is for when you're narrowing a known union type to a specific subset
|
||||
|
||||
|
||||
def is_tuple(obj: object) -> TypeGuard[tuple[object, ...]]:
|
||||
return isinstance(obj, tuple)
|
||||
|
||||
|
||||
def is_tuple_t(obj: _TupleT | object) -> TypeGuard[_TupleT]:
|
||||
return isinstance(obj, tuple)
|
||||
|
||||
|
||||
def is_sequence(obj: object) -> TypeGuard[Sequence[object]]:
|
||||
return isinstance(obj, Sequence)
|
||||
|
||||
|
||||
def is_sequence_t(obj: _SequenceT | object) -> TypeGuard[_SequenceT]:
|
||||
return isinstance(obj, Sequence)
|
||||
|
||||
|
||||
def is_mapping(obj: object) -> TypeGuard[Mapping[str, object]]:
|
||||
return isinstance(obj, Mapping)
|
||||
|
||||
|
||||
def is_mapping_t(obj: _MappingT | object) -> TypeGuard[_MappingT]:
|
||||
return isinstance(obj, Mapping)
|
||||
|
||||
|
||||
def is_dict(obj: object) -> TypeGuard[dict[object, object]]:
|
||||
return isinstance(obj, dict)
|
||||
|
||||
|
||||
def is_list(obj: object) -> TypeGuard[list[object]]:
|
||||
return isinstance(obj, list)
|
||||
|
||||
|
||||
def is_iterable(obj: object) -> TypeGuard[Iterable[object]]:
|
||||
return isinstance(obj, Iterable)
|
||||
|
||||
|
||||
def deepcopy_minimal(item: _T) -> _T:
|
||||
"""Minimal reimplementation of copy.deepcopy() that will only copy certain object types:
|
||||
|
||||
- mappings, e.g. `dict`
|
||||
- list
|
||||
|
||||
This is done for performance reasons.
|
||||
"""
|
||||
if is_mapping(item):
|
||||
return cast(_T, {k: deepcopy_minimal(v) for k, v in item.items()})
|
||||
if is_list(item):
|
||||
return cast(_T, [deepcopy_minimal(entry) for entry in item])
|
||||
return item
|
||||
|
||||
|
||||
# copied from https://github.com/Rapptz/RoboDanny
|
||||
def human_join(seq: Sequence[str], *, delim: str = ", ", final: str = "or") -> str:
|
||||
size = len(seq)
|
||||
if size == 0:
|
||||
return ""
|
||||
|
||||
if size == 1:
|
||||
return seq[0]
|
||||
|
||||
if size == 2:
|
||||
return f"{seq[0]} {final} {seq[1]}"
|
||||
|
||||
return delim.join(seq[:-1]) + f" {final} {seq[-1]}"
|
||||
|
||||
|
||||
def quote(string: str) -> str:
|
||||
"""Add single quotation marks around the given string. Does *not* do any escaping."""
|
||||
return f"'{string}'"
|
||||
|
||||
|
||||
def required_args(*variants: Sequence[str]) -> Callable[[CallableT], CallableT]:
|
||||
"""Decorator to enforce a given set of arguments or variants of arguments are passed to the decorated function.
|
||||
|
||||
Useful for enforcing runtime validation of overloaded functions.
|
||||
|
||||
Example usage:
|
||||
```py
|
||||
@overload
|
||||
def foo(*, a: str) -> str: ...
|
||||
|
||||
|
||||
@overload
|
||||
def foo(*, b: bool) -> str: ...
|
||||
|
||||
|
||||
# This enforces the same constraints that a static type checker would
|
||||
# i.e. that either a or b must be passed to the function
|
||||
@required_args(["a"], ["b"])
|
||||
def foo(*, a: str | None = None, b: bool | None = None) -> str: ...
|
||||
```
|
||||
"""
|
||||
|
||||
def inner(func: CallableT) -> CallableT:
|
||||
params = inspect.signature(func).parameters
|
||||
positional = [
|
||||
name
|
||||
for name, param in params.items()
|
||||
if param.kind
|
||||
in {
|
||||
param.POSITIONAL_ONLY,
|
||||
param.POSITIONAL_OR_KEYWORD,
|
||||
}
|
||||
]
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args: object, **kwargs: object) -> object:
|
||||
given_params: set[str] = set()
|
||||
for i, _ in enumerate(args):
|
||||
try:
|
||||
given_params.add(positional[i])
|
||||
except IndexError:
|
||||
raise TypeError(
|
||||
f"{func.__name__}() takes {len(positional)} argument(s) but {len(args)} were given"
|
||||
) from None
|
||||
|
||||
for key in kwargs.keys():
|
||||
given_params.add(key)
|
||||
|
||||
for variant in variants:
|
||||
matches = all((param in given_params for param in variant))
|
||||
if matches:
|
||||
break
|
||||
else: # no break
|
||||
if len(variants) > 1:
|
||||
variations = human_join(
|
||||
["(" + human_join([quote(arg) for arg in variant], final="and") + ")" for variant in variants]
|
||||
)
|
||||
msg = f"Missing required arguments; Expected either {variations} arguments to be given"
|
||||
else:
|
||||
assert len(variants) > 0
|
||||
|
||||
# TODO: this error message is not deterministic
|
||||
missing = list(set(variants[0]) - given_params)
|
||||
if len(missing) > 1:
|
||||
msg = f"Missing required arguments: {human_join([quote(arg) for arg in missing])}"
|
||||
else:
|
||||
msg = f"Missing required argument: {quote(missing[0])}"
|
||||
raise TypeError(msg)
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper # type: ignore
|
||||
|
||||
return inner
|
||||
|
||||
|
||||
_K = TypeVar("_K")
|
||||
_V = TypeVar("_V")
|
||||
|
||||
|
||||
@overload
|
||||
def strip_not_given(obj: None) -> None: ...
|
||||
|
||||
|
||||
@overload
|
||||
def strip_not_given(obj: Mapping[_K, _V | NotGiven]) -> dict[_K, _V]: ...
|
||||
|
||||
|
||||
@overload
|
||||
def strip_not_given(obj: object) -> object: ...
|
||||
|
||||
|
||||
def strip_not_given(obj: object | None) -> object:
|
||||
"""Remove all top-level keys where their values are instances of `NotGiven`"""
|
||||
if obj is None:
|
||||
return None
|
||||
|
||||
if not is_mapping(obj):
|
||||
return obj
|
||||
|
||||
return {key: value for key, value in obj.items() if not isinstance(value, NotGiven)}
|
||||
|
||||
|
||||
def coerce_integer(val: str) -> int:
|
||||
return int(val, base=10)
|
||||
|
||||
|
||||
def coerce_float(val: str) -> float:
|
||||
return float(val)
|
||||
|
||||
|
||||
def coerce_boolean(val: str) -> bool:
|
||||
return val == "true" or val == "1" or val == "on"
|
||||
|
||||
|
||||
def maybe_coerce_integer(val: str | None) -> int | None:
|
||||
if val is None:
|
||||
return None
|
||||
return coerce_integer(val)
|
||||
|
||||
|
||||
def maybe_coerce_float(val: str | None) -> float | None:
|
||||
if val is None:
|
||||
return None
|
||||
return coerce_float(val)
|
||||
|
||||
|
||||
def maybe_coerce_boolean(val: str | None) -> bool | None:
|
||||
if val is None:
|
||||
return None
|
||||
return coerce_boolean(val)
|
||||
|
||||
|
||||
def removeprefix(string: str, prefix: str) -> str:
|
||||
"""Remove a prefix from a string.
|
||||
|
||||
Backport of `str.removeprefix` for Python < 3.9
|
||||
"""
|
||||
if string.startswith(prefix):
|
||||
return string[len(prefix) :]
|
||||
return string
|
||||
|
||||
|
||||
def removesuffix(string: str, suffix: str) -> str:
|
||||
"""Remove a suffix from a string.
|
||||
|
||||
Backport of `str.removesuffix` for Python < 3.9
|
||||
"""
|
||||
if string.endswith(suffix):
|
||||
return string[: -len(suffix)]
|
||||
return string
|
||||
|
||||
|
||||
def file_from_path(path: str) -> FileTypes:
|
||||
contents = Path(path).read_bytes()
|
||||
file_name = os.path.basename(path)
|
||||
return (file_name, contents)
|
||||
|
||||
|
||||
def get_required_header(headers: HeadersLike, header: str) -> str:
|
||||
lower_header = header.lower()
|
||||
if is_mapping_t(headers):
|
||||
# mypy doesn't understand the type narrowing here
|
||||
for k, v in headers.items(): # type: ignore
|
||||
if k.lower() == lower_header and isinstance(v, str):
|
||||
return v
|
||||
|
||||
# to deal with the case where the header looks like Stainless-Event-Id
|
||||
intercaps_header = re.sub(r"([^\w])(\w)", lambda pat: pat.group(1) + pat.group(2).upper(), header.capitalize())
|
||||
|
||||
for normalized_header in [header, lower_header, header.upper(), intercaps_header]:
|
||||
value = headers.get(normalized_header)
|
||||
if value:
|
||||
return value
|
||||
|
||||
raise ValueError(f"Could not find {header} header")
|
||||
|
||||
|
||||
def get_async_library() -> str:
|
||||
try:
|
||||
return sniffio.current_async_library()
|
||||
except Exception:
|
||||
return "false"
|
||||
|
||||
|
||||
def lru_cache(*, maxsize: int | None = 128) -> Callable[[CallableT], CallableT]:
|
||||
"""A version of functools.lru_cache that retains the type signature
|
||||
for the wrapped function arguments.
|
||||
"""
|
||||
wrapper = functools.lru_cache( # noqa: TID251
|
||||
maxsize=maxsize,
|
||||
)
|
||||
return cast(Any, wrapper) # type: ignore[no-any-return]
|
||||
|
||||
|
||||
def json_safe(data: object) -> object:
|
||||
"""Translates a mapping / sequence recursively in the same fashion
|
||||
as `pydantic` v2's `model_dump(mode="json")`.
|
||||
"""
|
||||
if is_mapping(data):
|
||||
return {json_safe(key): json_safe(value) for key, value in data.items()}
|
||||
|
||||
if is_iterable(data) and not isinstance(data, (str, bytes, bytearray)):
|
||||
return [json_safe(item) for item in data]
|
||||
|
||||
if isinstance(data, (datetime, date)):
|
||||
return data.isoformat()
|
||||
|
||||
return data
|
||||
4
src/opencode/_version.py
Normal file
4
src/opencode/_version.py
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
__title__ = "opencode"
|
||||
__version__ = "0.0.1-alpha.0" # x-release-please-version
|
||||
4
src/opencode/lib/.keep
Normal file
4
src/opencode/lib/.keep
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
File generated from our OpenAPI spec by Stainless.
|
||||
|
||||
This directory can be used to store custom files to expand the SDK.
|
||||
It is ignored by Stainless code generation and its content (other than this keep file) won't be touched.
|
||||
0
src/opencode/py.typed
Normal file
0
src/opencode/py.typed
Normal file
75
src/opencode/resources/__init__.py
Normal file
75
src/opencode/resources/__init__.py
Normal file
|
|
@ -0,0 +1,75 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from .app import (
|
||||
AppResource,
|
||||
AsyncAppResource,
|
||||
AppResourceWithRawResponse,
|
||||
AsyncAppResourceWithRawResponse,
|
||||
AppResourceWithStreamingResponse,
|
||||
AsyncAppResourceWithStreamingResponse,
|
||||
)
|
||||
from .file import (
|
||||
FileResource,
|
||||
AsyncFileResource,
|
||||
FileResourceWithRawResponse,
|
||||
AsyncFileResourceWithRawResponse,
|
||||
FileResourceWithStreamingResponse,
|
||||
AsyncFileResourceWithStreamingResponse,
|
||||
)
|
||||
from .event import (
|
||||
EventResource,
|
||||
AsyncEventResource,
|
||||
EventResourceWithRawResponse,
|
||||
AsyncEventResourceWithRawResponse,
|
||||
EventResourceWithStreamingResponse,
|
||||
AsyncEventResourceWithStreamingResponse,
|
||||
)
|
||||
from .config import (
|
||||
ConfigResource,
|
||||
AsyncConfigResource,
|
||||
ConfigResourceWithRawResponse,
|
||||
AsyncConfigResourceWithRawResponse,
|
||||
ConfigResourceWithStreamingResponse,
|
||||
AsyncConfigResourceWithStreamingResponse,
|
||||
)
|
||||
from .session import (
|
||||
SessionResource,
|
||||
AsyncSessionResource,
|
||||
SessionResourceWithRawResponse,
|
||||
AsyncSessionResourceWithRawResponse,
|
||||
SessionResourceWithStreamingResponse,
|
||||
AsyncSessionResourceWithStreamingResponse,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"EventResource",
|
||||
"AsyncEventResource",
|
||||
"EventResourceWithRawResponse",
|
||||
"AsyncEventResourceWithRawResponse",
|
||||
"EventResourceWithStreamingResponse",
|
||||
"AsyncEventResourceWithStreamingResponse",
|
||||
"AppResource",
|
||||
"AsyncAppResource",
|
||||
"AppResourceWithRawResponse",
|
||||
"AsyncAppResourceWithRawResponse",
|
||||
"AppResourceWithStreamingResponse",
|
||||
"AsyncAppResourceWithStreamingResponse",
|
||||
"FileResource",
|
||||
"AsyncFileResource",
|
||||
"FileResourceWithRawResponse",
|
||||
"AsyncFileResourceWithRawResponse",
|
||||
"FileResourceWithStreamingResponse",
|
||||
"AsyncFileResourceWithStreamingResponse",
|
||||
"ConfigResource",
|
||||
"AsyncConfigResource",
|
||||
"ConfigResourceWithRawResponse",
|
||||
"AsyncConfigResourceWithRawResponse",
|
||||
"ConfigResourceWithStreamingResponse",
|
||||
"AsyncConfigResourceWithStreamingResponse",
|
||||
"SessionResource",
|
||||
"AsyncSessionResource",
|
||||
"SessionResourceWithRawResponse",
|
||||
"AsyncSessionResourceWithRawResponse",
|
||||
"SessionResourceWithStreamingResponse",
|
||||
"AsyncSessionResourceWithStreamingResponse",
|
||||
]
|
||||
186
src/opencode/resources/app.py
Normal file
186
src/opencode/resources/app.py
Normal file
|
|
@ -0,0 +1,186 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import httpx
|
||||
|
||||
from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
|
||||
from .._compat import cached_property
|
||||
from .._resource import SyncAPIResource, AsyncAPIResource
|
||||
from .._response import (
|
||||
to_raw_response_wrapper,
|
||||
to_streamed_response_wrapper,
|
||||
async_to_raw_response_wrapper,
|
||||
async_to_streamed_response_wrapper,
|
||||
)
|
||||
from ..types.app import App
|
||||
from .._base_client import make_request_options
|
||||
from ..types.app_init_response import AppInitResponse
|
||||
|
||||
__all__ = ["AppResource", "AsyncAppResource"]
|
||||
|
||||
|
||||
class AppResource(SyncAPIResource):
|
||||
@cached_property
|
||||
def with_raw_response(self) -> AppResourceWithRawResponse:
|
||||
"""
|
||||
This property can be used as a prefix for any HTTP method call to return
|
||||
the raw response object instead of the parsed content.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#accessing-raw-response-data-eg-headers
|
||||
"""
|
||||
return AppResourceWithRawResponse(self)
|
||||
|
||||
@cached_property
|
||||
def with_streaming_response(self) -> AppResourceWithStreamingResponse:
|
||||
"""
|
||||
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#with_streaming_response
|
||||
"""
|
||||
return AppResourceWithStreamingResponse(self)
|
||||
|
||||
def get(
|
||||
self,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> App:
|
||||
"""Get app info"""
|
||||
return self._get(
|
||||
"/app",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=App,
|
||||
)
|
||||
|
||||
def init(
|
||||
self,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> AppInitResponse:
|
||||
"""Initialize the app"""
|
||||
return self._post(
|
||||
"/app/init",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=AppInitResponse,
|
||||
)
|
||||
|
||||
|
||||
class AsyncAppResource(AsyncAPIResource):
|
||||
@cached_property
|
||||
def with_raw_response(self) -> AsyncAppResourceWithRawResponse:
|
||||
"""
|
||||
This property can be used as a prefix for any HTTP method call to return
|
||||
the raw response object instead of the parsed content.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#accessing-raw-response-data-eg-headers
|
||||
"""
|
||||
return AsyncAppResourceWithRawResponse(self)
|
||||
|
||||
@cached_property
|
||||
def with_streaming_response(self) -> AsyncAppResourceWithStreamingResponse:
|
||||
"""
|
||||
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#with_streaming_response
|
||||
"""
|
||||
return AsyncAppResourceWithStreamingResponse(self)
|
||||
|
||||
async def get(
|
||||
self,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> App:
|
||||
"""Get app info"""
|
||||
return await self._get(
|
||||
"/app",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=App,
|
||||
)
|
||||
|
||||
async def init(
|
||||
self,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> AppInitResponse:
|
||||
"""Initialize the app"""
|
||||
return await self._post(
|
||||
"/app/init",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=AppInitResponse,
|
||||
)
|
||||
|
||||
|
||||
class AppResourceWithRawResponse:
|
||||
def __init__(self, app: AppResource) -> None:
|
||||
self._app = app
|
||||
|
||||
self.get = to_raw_response_wrapper(
|
||||
app.get,
|
||||
)
|
||||
self.init = to_raw_response_wrapper(
|
||||
app.init,
|
||||
)
|
||||
|
||||
|
||||
class AsyncAppResourceWithRawResponse:
|
||||
def __init__(self, app: AsyncAppResource) -> None:
|
||||
self._app = app
|
||||
|
||||
self.get = async_to_raw_response_wrapper(
|
||||
app.get,
|
||||
)
|
||||
self.init = async_to_raw_response_wrapper(
|
||||
app.init,
|
||||
)
|
||||
|
||||
|
||||
class AppResourceWithStreamingResponse:
|
||||
def __init__(self, app: AppResource) -> None:
|
||||
self._app = app
|
||||
|
||||
self.get = to_streamed_response_wrapper(
|
||||
app.get,
|
||||
)
|
||||
self.init = to_streamed_response_wrapper(
|
||||
app.init,
|
||||
)
|
||||
|
||||
|
||||
class AsyncAppResourceWithStreamingResponse:
|
||||
def __init__(self, app: AsyncAppResource) -> None:
|
||||
self._app = app
|
||||
|
||||
self.get = async_to_streamed_response_wrapper(
|
||||
app.get,
|
||||
)
|
||||
self.init = async_to_streamed_response_wrapper(
|
||||
app.init,
|
||||
)
|
||||
186
src/opencode/resources/config.py
Normal file
186
src/opencode/resources/config.py
Normal file
|
|
@ -0,0 +1,186 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import httpx
|
||||
|
||||
from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
|
||||
from .._compat import cached_property
|
||||
from .._resource import SyncAPIResource, AsyncAPIResource
|
||||
from .._response import (
|
||||
to_raw_response_wrapper,
|
||||
to_streamed_response_wrapper,
|
||||
async_to_raw_response_wrapper,
|
||||
async_to_streamed_response_wrapper,
|
||||
)
|
||||
from .._base_client import make_request_options
|
||||
from ..types.config import Config
|
||||
from ..types.config_providers_response import ConfigProvidersResponse
|
||||
|
||||
__all__ = ["ConfigResource", "AsyncConfigResource"]
|
||||
|
||||
|
||||
class ConfigResource(SyncAPIResource):
|
||||
@cached_property
|
||||
def with_raw_response(self) -> ConfigResourceWithRawResponse:
|
||||
"""
|
||||
This property can be used as a prefix for any HTTP method call to return
|
||||
the raw response object instead of the parsed content.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#accessing-raw-response-data-eg-headers
|
||||
"""
|
||||
return ConfigResourceWithRawResponse(self)
|
||||
|
||||
@cached_property
|
||||
def with_streaming_response(self) -> ConfigResourceWithStreamingResponse:
|
||||
"""
|
||||
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#with_streaming_response
|
||||
"""
|
||||
return ConfigResourceWithStreamingResponse(self)
|
||||
|
||||
def get(
|
||||
self,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> Config:
|
||||
"""Get config info"""
|
||||
return self._get(
|
||||
"/config",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=Config,
|
||||
)
|
||||
|
||||
def providers(
|
||||
self,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> ConfigProvidersResponse:
|
||||
"""List all providers"""
|
||||
return self._get(
|
||||
"/config/providers",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=ConfigProvidersResponse,
|
||||
)
|
||||
|
||||
|
||||
class AsyncConfigResource(AsyncAPIResource):
|
||||
@cached_property
|
||||
def with_raw_response(self) -> AsyncConfigResourceWithRawResponse:
|
||||
"""
|
||||
This property can be used as a prefix for any HTTP method call to return
|
||||
the raw response object instead of the parsed content.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#accessing-raw-response-data-eg-headers
|
||||
"""
|
||||
return AsyncConfigResourceWithRawResponse(self)
|
||||
|
||||
@cached_property
|
||||
def with_streaming_response(self) -> AsyncConfigResourceWithStreamingResponse:
|
||||
"""
|
||||
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#with_streaming_response
|
||||
"""
|
||||
return AsyncConfigResourceWithStreamingResponse(self)
|
||||
|
||||
async def get(
|
||||
self,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> Config:
|
||||
"""Get config info"""
|
||||
return await self._get(
|
||||
"/config",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=Config,
|
||||
)
|
||||
|
||||
async def providers(
|
||||
self,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> ConfigProvidersResponse:
|
||||
"""List all providers"""
|
||||
return await self._get(
|
||||
"/config/providers",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=ConfigProvidersResponse,
|
||||
)
|
||||
|
||||
|
||||
class ConfigResourceWithRawResponse:
|
||||
def __init__(self, config: ConfigResource) -> None:
|
||||
self._config = config
|
||||
|
||||
self.get = to_raw_response_wrapper(
|
||||
config.get,
|
||||
)
|
||||
self.providers = to_raw_response_wrapper(
|
||||
config.providers,
|
||||
)
|
||||
|
||||
|
||||
class AsyncConfigResourceWithRawResponse:
|
||||
def __init__(self, config: AsyncConfigResource) -> None:
|
||||
self._config = config
|
||||
|
||||
self.get = async_to_raw_response_wrapper(
|
||||
config.get,
|
||||
)
|
||||
self.providers = async_to_raw_response_wrapper(
|
||||
config.providers,
|
||||
)
|
||||
|
||||
|
||||
class ConfigResourceWithStreamingResponse:
|
||||
def __init__(self, config: ConfigResource) -> None:
|
||||
self._config = config
|
||||
|
||||
self.get = to_streamed_response_wrapper(
|
||||
config.get,
|
||||
)
|
||||
self.providers = to_streamed_response_wrapper(
|
||||
config.providers,
|
||||
)
|
||||
|
||||
|
||||
class AsyncConfigResourceWithStreamingResponse:
|
||||
def __init__(self, config: AsyncConfigResource) -> None:
|
||||
self._config = config
|
||||
|
||||
self.get = async_to_streamed_response_wrapper(
|
||||
config.get,
|
||||
)
|
||||
self.providers = async_to_streamed_response_wrapper(
|
||||
config.providers,
|
||||
)
|
||||
143
src/opencode/resources/event.py
Normal file
143
src/opencode/resources/event.py
Normal file
|
|
@ -0,0 +1,143 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, cast
|
||||
|
||||
import httpx
|
||||
|
||||
from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
|
||||
from .._compat import cached_property
|
||||
from .._resource import SyncAPIResource, AsyncAPIResource
|
||||
from .._response import (
|
||||
to_raw_response_wrapper,
|
||||
to_streamed_response_wrapper,
|
||||
async_to_raw_response_wrapper,
|
||||
async_to_streamed_response_wrapper,
|
||||
)
|
||||
from .._base_client import make_request_options
|
||||
from ..types.event_list_response import EventListResponse
|
||||
|
||||
__all__ = ["EventResource", "AsyncEventResource"]
|
||||
|
||||
|
||||
class EventResource(SyncAPIResource):
|
||||
@cached_property
|
||||
def with_raw_response(self) -> EventResourceWithRawResponse:
|
||||
"""
|
||||
This property can be used as a prefix for any HTTP method call to return
|
||||
the raw response object instead of the parsed content.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#accessing-raw-response-data-eg-headers
|
||||
"""
|
||||
return EventResourceWithRawResponse(self)
|
||||
|
||||
@cached_property
|
||||
def with_streaming_response(self) -> EventResourceWithStreamingResponse:
|
||||
"""
|
||||
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#with_streaming_response
|
||||
"""
|
||||
return EventResourceWithStreamingResponse(self)
|
||||
|
||||
def list(
|
||||
self,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> EventListResponse:
|
||||
"""Get events"""
|
||||
return cast(
|
||||
EventListResponse,
|
||||
self._get(
|
||||
"/event",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=cast(Any, EventListResponse), # Union types cannot be passed in as arguments in the type system
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class AsyncEventResource(AsyncAPIResource):
|
||||
@cached_property
|
||||
def with_raw_response(self) -> AsyncEventResourceWithRawResponse:
|
||||
"""
|
||||
This property can be used as a prefix for any HTTP method call to return
|
||||
the raw response object instead of the parsed content.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#accessing-raw-response-data-eg-headers
|
||||
"""
|
||||
return AsyncEventResourceWithRawResponse(self)
|
||||
|
||||
@cached_property
|
||||
def with_streaming_response(self) -> AsyncEventResourceWithStreamingResponse:
|
||||
"""
|
||||
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#with_streaming_response
|
||||
"""
|
||||
return AsyncEventResourceWithStreamingResponse(self)
|
||||
|
||||
async def list(
|
||||
self,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> EventListResponse:
|
||||
"""Get events"""
|
||||
return cast(
|
||||
EventListResponse,
|
||||
await self._get(
|
||||
"/event",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=cast(Any, EventListResponse), # Union types cannot be passed in as arguments in the type system
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class EventResourceWithRawResponse:
|
||||
def __init__(self, event: EventResource) -> None:
|
||||
self._event = event
|
||||
|
||||
self.list = to_raw_response_wrapper(
|
||||
event.list,
|
||||
)
|
||||
|
||||
|
||||
class AsyncEventResourceWithRawResponse:
|
||||
def __init__(self, event: AsyncEventResource) -> None:
|
||||
self._event = event
|
||||
|
||||
self.list = async_to_raw_response_wrapper(
|
||||
event.list,
|
||||
)
|
||||
|
||||
|
||||
class EventResourceWithStreamingResponse:
|
||||
def __init__(self, event: EventResource) -> None:
|
||||
self._event = event
|
||||
|
||||
self.list = to_streamed_response_wrapper(
|
||||
event.list,
|
||||
)
|
||||
|
||||
|
||||
class AsyncEventResourceWithStreamingResponse:
|
||||
def __init__(self, event: AsyncEventResource) -> None:
|
||||
self._event = event
|
||||
|
||||
self.list = async_to_streamed_response_wrapper(
|
||||
event.list,
|
||||
)
|
||||
169
src/opencode/resources/file.py
Normal file
169
src/opencode/resources/file.py
Normal file
|
|
@ -0,0 +1,169 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import httpx
|
||||
|
||||
from ..types import file_search_params
|
||||
from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
|
||||
from .._utils import maybe_transform, async_maybe_transform
|
||||
from .._compat import cached_property
|
||||
from .._resource import SyncAPIResource, AsyncAPIResource
|
||||
from .._response import (
|
||||
to_raw_response_wrapper,
|
||||
to_streamed_response_wrapper,
|
||||
async_to_raw_response_wrapper,
|
||||
async_to_streamed_response_wrapper,
|
||||
)
|
||||
from .._base_client import make_request_options
|
||||
from ..types.file_search_response import FileSearchResponse
|
||||
|
||||
__all__ = ["FileResource", "AsyncFileResource"]
|
||||
|
||||
|
||||
class FileResource(SyncAPIResource):
|
||||
@cached_property
|
||||
def with_raw_response(self) -> FileResourceWithRawResponse:
|
||||
"""
|
||||
This property can be used as a prefix for any HTTP method call to return
|
||||
the raw response object instead of the parsed content.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#accessing-raw-response-data-eg-headers
|
||||
"""
|
||||
return FileResourceWithRawResponse(self)
|
||||
|
||||
@cached_property
|
||||
def with_streaming_response(self) -> FileResourceWithStreamingResponse:
|
||||
"""
|
||||
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#with_streaming_response
|
||||
"""
|
||||
return FileResourceWithStreamingResponse(self)
|
||||
|
||||
def search(
|
||||
self,
|
||||
*,
|
||||
query: str,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> FileSearchResponse:
|
||||
"""
|
||||
Search for files
|
||||
|
||||
Args:
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
return self._get(
|
||||
"/file",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers,
|
||||
extra_query=extra_query,
|
||||
extra_body=extra_body,
|
||||
timeout=timeout,
|
||||
query=maybe_transform({"query": query}, file_search_params.FileSearchParams),
|
||||
),
|
||||
cast_to=FileSearchResponse,
|
||||
)
|
||||
|
||||
|
||||
class AsyncFileResource(AsyncAPIResource):
|
||||
@cached_property
|
||||
def with_raw_response(self) -> AsyncFileResourceWithRawResponse:
|
||||
"""
|
||||
This property can be used as a prefix for any HTTP method call to return
|
||||
the raw response object instead of the parsed content.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#accessing-raw-response-data-eg-headers
|
||||
"""
|
||||
return AsyncFileResourceWithRawResponse(self)
|
||||
|
||||
@cached_property
|
||||
def with_streaming_response(self) -> AsyncFileResourceWithStreamingResponse:
|
||||
"""
|
||||
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#with_streaming_response
|
||||
"""
|
||||
return AsyncFileResourceWithStreamingResponse(self)
|
||||
|
||||
async def search(
|
||||
self,
|
||||
*,
|
||||
query: str,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> FileSearchResponse:
|
||||
"""
|
||||
Search for files
|
||||
|
||||
Args:
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
return await self._get(
|
||||
"/file",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers,
|
||||
extra_query=extra_query,
|
||||
extra_body=extra_body,
|
||||
timeout=timeout,
|
||||
query=await async_maybe_transform({"query": query}, file_search_params.FileSearchParams),
|
||||
),
|
||||
cast_to=FileSearchResponse,
|
||||
)
|
||||
|
||||
|
||||
class FileResourceWithRawResponse:
|
||||
def __init__(self, file: FileResource) -> None:
|
||||
self._file = file
|
||||
|
||||
self.search = to_raw_response_wrapper(
|
||||
file.search,
|
||||
)
|
||||
|
||||
|
||||
class AsyncFileResourceWithRawResponse:
|
||||
def __init__(self, file: AsyncFileResource) -> None:
|
||||
self._file = file
|
||||
|
||||
self.search = async_to_raw_response_wrapper(
|
||||
file.search,
|
||||
)
|
||||
|
||||
|
||||
class FileResourceWithStreamingResponse:
|
||||
def __init__(self, file: FileResource) -> None:
|
||||
self._file = file
|
||||
|
||||
self.search = to_streamed_response_wrapper(
|
||||
file.search,
|
||||
)
|
||||
|
||||
|
||||
class AsyncFileResourceWithStreamingResponse:
|
||||
def __init__(self, file: AsyncFileResource) -> None:
|
||||
self._file = file
|
||||
|
||||
self.search = async_to_streamed_response_wrapper(
|
||||
file.search,
|
||||
)
|
||||
895
src/opencode/resources/session.py
Normal file
895
src/opencode/resources/session.py
Normal file
|
|
@ -0,0 +1,895 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Iterable
|
||||
|
||||
import httpx
|
||||
|
||||
from ..types import session_chat_params, session_init_params, session_summarize_params
|
||||
from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
|
||||
from .._utils import maybe_transform, async_maybe_transform
|
||||
from .._compat import cached_property
|
||||
from .._resource import SyncAPIResource, AsyncAPIResource
|
||||
from .._response import (
|
||||
to_raw_response_wrapper,
|
||||
to_streamed_response_wrapper,
|
||||
async_to_raw_response_wrapper,
|
||||
async_to_streamed_response_wrapper,
|
||||
)
|
||||
from .._base_client import make_request_options
|
||||
from ..types.message import Message
|
||||
from ..types.session import Session
|
||||
from ..types.message_part_param import MessagePartParam
|
||||
from ..types.session_init_response import SessionInitResponse
|
||||
from ..types.session_list_response import SessionListResponse
|
||||
from ..types.session_abort_response import SessionAbortResponse
|
||||
from ..types.session_delete_response import SessionDeleteResponse
|
||||
from ..types.session_messages_response import SessionMessagesResponse
|
||||
from ..types.session_summarize_response import SessionSummarizeResponse
|
||||
|
||||
__all__ = ["SessionResource", "AsyncSessionResource"]
|
||||
|
||||
|
||||
class SessionResource(SyncAPIResource):
|
||||
@cached_property
|
||||
def with_raw_response(self) -> SessionResourceWithRawResponse:
|
||||
"""
|
||||
This property can be used as a prefix for any HTTP method call to return
|
||||
the raw response object instead of the parsed content.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#accessing-raw-response-data-eg-headers
|
||||
"""
|
||||
return SessionResourceWithRawResponse(self)
|
||||
|
||||
@cached_property
|
||||
def with_streaming_response(self) -> SessionResourceWithStreamingResponse:
|
||||
"""
|
||||
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#with_streaming_response
|
||||
"""
|
||||
return SessionResourceWithStreamingResponse(self)
|
||||
|
||||
def create(
|
||||
self,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> Session:
|
||||
"""Create a new session"""
|
||||
return self._post(
|
||||
"/session",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=Session,
|
||||
)
|
||||
|
||||
def list(
|
||||
self,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> SessionListResponse:
|
||||
"""List all sessions"""
|
||||
return self._get(
|
||||
"/session",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=SessionListResponse,
|
||||
)
|
||||
|
||||
def delete(
|
||||
self,
|
||||
id: str,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> SessionDeleteResponse:
|
||||
"""
|
||||
Delete a session and all its data
|
||||
|
||||
Args:
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
if not id:
|
||||
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
|
||||
return self._delete(
|
||||
f"/session/{id}",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=SessionDeleteResponse,
|
||||
)
|
||||
|
||||
def abort(
|
||||
self,
|
||||
id: str,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> SessionAbortResponse:
|
||||
"""
|
||||
Abort a session
|
||||
|
||||
Args:
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
if not id:
|
||||
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
|
||||
return self._post(
|
||||
f"/session/{id}/abort",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=SessionAbortResponse,
|
||||
)
|
||||
|
||||
def chat(
|
||||
self,
|
||||
id: str,
|
||||
*,
|
||||
model_id: str,
|
||||
parts: Iterable[MessagePartParam],
|
||||
provider_id: str,
|
||||
session_id: str,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> Message:
|
||||
"""
|
||||
Create and send a new message to a session
|
||||
|
||||
Args:
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
if not id:
|
||||
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
|
||||
return self._post(
|
||||
f"/session/{id}/message",
|
||||
body=maybe_transform(
|
||||
{
|
||||
"model_id": model_id,
|
||||
"parts": parts,
|
||||
"provider_id": provider_id,
|
||||
"session_id": session_id,
|
||||
},
|
||||
session_chat_params.SessionChatParams,
|
||||
),
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=Message,
|
||||
)
|
||||
|
||||
def init(
|
||||
self,
|
||||
id: str,
|
||||
*,
|
||||
model_id: str,
|
||||
provider_id: str,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> SessionInitResponse:
|
||||
"""
|
||||
Analyze the app and create an AGENTS.md file
|
||||
|
||||
Args:
|
||||
id: Session ID
|
||||
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
if not id:
|
||||
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
|
||||
return self._post(
|
||||
f"/session/{id}/init",
|
||||
body=maybe_transform(
|
||||
{
|
||||
"model_id": model_id,
|
||||
"provider_id": provider_id,
|
||||
},
|
||||
session_init_params.SessionInitParams,
|
||||
),
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=SessionInitResponse,
|
||||
)
|
||||
|
||||
def messages(
|
||||
self,
|
||||
id: str,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> SessionMessagesResponse:
|
||||
"""
|
||||
List messages for a session
|
||||
|
||||
Args:
|
||||
id: Session ID
|
||||
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
if not id:
|
||||
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
|
||||
return self._get(
|
||||
f"/session/{id}/message",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=SessionMessagesResponse,
|
||||
)
|
||||
|
||||
def share(
|
||||
self,
|
||||
id: str,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> Session:
|
||||
"""
|
||||
Share a session
|
||||
|
||||
Args:
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
if not id:
|
||||
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
|
||||
return self._post(
|
||||
f"/session/{id}/share",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=Session,
|
||||
)
|
||||
|
||||
def summarize(
|
||||
self,
|
||||
id: str,
|
||||
*,
|
||||
model_id: str,
|
||||
provider_id: str,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> SessionSummarizeResponse:
|
||||
"""
|
||||
Summarize the session
|
||||
|
||||
Args:
|
||||
id: Session ID
|
||||
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
if not id:
|
||||
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
|
||||
return self._post(
|
||||
f"/session/{id}/summarize",
|
||||
body=maybe_transform(
|
||||
{
|
||||
"model_id": model_id,
|
||||
"provider_id": provider_id,
|
||||
},
|
||||
session_summarize_params.SessionSummarizeParams,
|
||||
),
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=SessionSummarizeResponse,
|
||||
)
|
||||
|
||||
def unshare(
|
||||
self,
|
||||
id: str,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> Session:
|
||||
"""
|
||||
Unshare the session
|
||||
|
||||
Args:
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
if not id:
|
||||
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
|
||||
return self._delete(
|
||||
f"/session/{id}/share",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=Session,
|
||||
)
|
||||
|
||||
|
||||
class AsyncSessionResource(AsyncAPIResource):
|
||||
@cached_property
|
||||
def with_raw_response(self) -> AsyncSessionResourceWithRawResponse:
|
||||
"""
|
||||
This property can be used as a prefix for any HTTP method call to return
|
||||
the raw response object instead of the parsed content.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#accessing-raw-response-data-eg-headers
|
||||
"""
|
||||
return AsyncSessionResourceWithRawResponse(self)
|
||||
|
||||
@cached_property
|
||||
def with_streaming_response(self) -> AsyncSessionResourceWithStreamingResponse:
|
||||
"""
|
||||
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#with_streaming_response
|
||||
"""
|
||||
return AsyncSessionResourceWithStreamingResponse(self)
|
||||
|
||||
async def create(
|
||||
self,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> Session:
|
||||
"""Create a new session"""
|
||||
return await self._post(
|
||||
"/session",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=Session,
|
||||
)
|
||||
|
||||
async def list(
|
||||
self,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> SessionListResponse:
|
||||
"""List all sessions"""
|
||||
return await self._get(
|
||||
"/session",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=SessionListResponse,
|
||||
)
|
||||
|
||||
async def delete(
|
||||
self,
|
||||
id: str,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> SessionDeleteResponse:
|
||||
"""
|
||||
Delete a session and all its data
|
||||
|
||||
Args:
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
if not id:
|
||||
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
|
||||
return await self._delete(
|
||||
f"/session/{id}",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=SessionDeleteResponse,
|
||||
)
|
||||
|
||||
async def abort(
|
||||
self,
|
||||
id: str,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> SessionAbortResponse:
|
||||
"""
|
||||
Abort a session
|
||||
|
||||
Args:
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
if not id:
|
||||
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
|
||||
return await self._post(
|
||||
f"/session/{id}/abort",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=SessionAbortResponse,
|
||||
)
|
||||
|
||||
async def chat(
|
||||
self,
|
||||
id: str,
|
||||
*,
|
||||
model_id: str,
|
||||
parts: Iterable[MessagePartParam],
|
||||
provider_id: str,
|
||||
session_id: str,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> Message:
|
||||
"""
|
||||
Create and send a new message to a session
|
||||
|
||||
Args:
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
if not id:
|
||||
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
|
||||
return await self._post(
|
||||
f"/session/{id}/message",
|
||||
body=await async_maybe_transform(
|
||||
{
|
||||
"model_id": model_id,
|
||||
"parts": parts,
|
||||
"provider_id": provider_id,
|
||||
"session_id": session_id,
|
||||
},
|
||||
session_chat_params.SessionChatParams,
|
||||
),
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=Message,
|
||||
)
|
||||
|
||||
async def init(
|
||||
self,
|
||||
id: str,
|
||||
*,
|
||||
model_id: str,
|
||||
provider_id: str,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> SessionInitResponse:
|
||||
"""
|
||||
Analyze the app and create an AGENTS.md file
|
||||
|
||||
Args:
|
||||
id: Session ID
|
||||
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
if not id:
|
||||
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
|
||||
return await self._post(
|
||||
f"/session/{id}/init",
|
||||
body=await async_maybe_transform(
|
||||
{
|
||||
"model_id": model_id,
|
||||
"provider_id": provider_id,
|
||||
},
|
||||
session_init_params.SessionInitParams,
|
||||
),
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=SessionInitResponse,
|
||||
)
|
||||
|
||||
async def messages(
|
||||
self,
|
||||
id: str,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> SessionMessagesResponse:
|
||||
"""
|
||||
List messages for a session
|
||||
|
||||
Args:
|
||||
id: Session ID
|
||||
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
if not id:
|
||||
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
|
||||
return await self._get(
|
||||
f"/session/{id}/message",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=SessionMessagesResponse,
|
||||
)
|
||||
|
||||
async def share(
|
||||
self,
|
||||
id: str,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> Session:
|
||||
"""
|
||||
Share a session
|
||||
|
||||
Args:
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
if not id:
|
||||
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
|
||||
return await self._post(
|
||||
f"/session/{id}/share",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=Session,
|
||||
)
|
||||
|
||||
async def summarize(
|
||||
self,
|
||||
id: str,
|
||||
*,
|
||||
model_id: str,
|
||||
provider_id: str,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> SessionSummarizeResponse:
|
||||
"""
|
||||
Summarize the session
|
||||
|
||||
Args:
|
||||
id: Session ID
|
||||
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
if not id:
|
||||
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
|
||||
return await self._post(
|
||||
f"/session/{id}/summarize",
|
||||
body=await async_maybe_transform(
|
||||
{
|
||||
"model_id": model_id,
|
||||
"provider_id": provider_id,
|
||||
},
|
||||
session_summarize_params.SessionSummarizeParams,
|
||||
),
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=SessionSummarizeResponse,
|
||||
)
|
||||
|
||||
async def unshare(
|
||||
self,
|
||||
id: str,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> Session:
|
||||
"""
|
||||
Unshare the session
|
||||
|
||||
Args:
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
if not id:
|
||||
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
|
||||
return await self._delete(
|
||||
f"/session/{id}/share",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=Session,
|
||||
)
|
||||
|
||||
|
||||
class SessionResourceWithRawResponse:
|
||||
def __init__(self, session: SessionResource) -> None:
|
||||
self._session = session
|
||||
|
||||
self.create = to_raw_response_wrapper(
|
||||
session.create,
|
||||
)
|
||||
self.list = to_raw_response_wrapper(
|
||||
session.list,
|
||||
)
|
||||
self.delete = to_raw_response_wrapper(
|
||||
session.delete,
|
||||
)
|
||||
self.abort = to_raw_response_wrapper(
|
||||
session.abort,
|
||||
)
|
||||
self.chat = to_raw_response_wrapper(
|
||||
session.chat,
|
||||
)
|
||||
self.init = to_raw_response_wrapper(
|
||||
session.init,
|
||||
)
|
||||
self.messages = to_raw_response_wrapper(
|
||||
session.messages,
|
||||
)
|
||||
self.share = to_raw_response_wrapper(
|
||||
session.share,
|
||||
)
|
||||
self.summarize = to_raw_response_wrapper(
|
||||
session.summarize,
|
||||
)
|
||||
self.unshare = to_raw_response_wrapper(
|
||||
session.unshare,
|
||||
)
|
||||
|
||||
|
||||
class AsyncSessionResourceWithRawResponse:
|
||||
def __init__(self, session: AsyncSessionResource) -> None:
|
||||
self._session = session
|
||||
|
||||
self.create = async_to_raw_response_wrapper(
|
||||
session.create,
|
||||
)
|
||||
self.list = async_to_raw_response_wrapper(
|
||||
session.list,
|
||||
)
|
||||
self.delete = async_to_raw_response_wrapper(
|
||||
session.delete,
|
||||
)
|
||||
self.abort = async_to_raw_response_wrapper(
|
||||
session.abort,
|
||||
)
|
||||
self.chat = async_to_raw_response_wrapper(
|
||||
session.chat,
|
||||
)
|
||||
self.init = async_to_raw_response_wrapper(
|
||||
session.init,
|
||||
)
|
||||
self.messages = async_to_raw_response_wrapper(
|
||||
session.messages,
|
||||
)
|
||||
self.share = async_to_raw_response_wrapper(
|
||||
session.share,
|
||||
)
|
||||
self.summarize = async_to_raw_response_wrapper(
|
||||
session.summarize,
|
||||
)
|
||||
self.unshare = async_to_raw_response_wrapper(
|
||||
session.unshare,
|
||||
)
|
||||
|
||||
|
||||
class SessionResourceWithStreamingResponse:
|
||||
def __init__(self, session: SessionResource) -> None:
|
||||
self._session = session
|
||||
|
||||
self.create = to_streamed_response_wrapper(
|
||||
session.create,
|
||||
)
|
||||
self.list = to_streamed_response_wrapper(
|
||||
session.list,
|
||||
)
|
||||
self.delete = to_streamed_response_wrapper(
|
||||
session.delete,
|
||||
)
|
||||
self.abort = to_streamed_response_wrapper(
|
||||
session.abort,
|
||||
)
|
||||
self.chat = to_streamed_response_wrapper(
|
||||
session.chat,
|
||||
)
|
||||
self.init = to_streamed_response_wrapper(
|
||||
session.init,
|
||||
)
|
||||
self.messages = to_streamed_response_wrapper(
|
||||
session.messages,
|
||||
)
|
||||
self.share = to_streamed_response_wrapper(
|
||||
session.share,
|
||||
)
|
||||
self.summarize = to_streamed_response_wrapper(
|
||||
session.summarize,
|
||||
)
|
||||
self.unshare = to_streamed_response_wrapper(
|
||||
session.unshare,
|
||||
)
|
||||
|
||||
|
||||
class AsyncSessionResourceWithStreamingResponse:
|
||||
def __init__(self, session: AsyncSessionResource) -> None:
|
||||
self._session = session
|
||||
|
||||
self.create = async_to_streamed_response_wrapper(
|
||||
session.create,
|
||||
)
|
||||
self.list = async_to_streamed_response_wrapper(
|
||||
session.list,
|
||||
)
|
||||
self.delete = async_to_streamed_response_wrapper(
|
||||
session.delete,
|
||||
)
|
||||
self.abort = async_to_streamed_response_wrapper(
|
||||
session.abort,
|
||||
)
|
||||
self.chat = async_to_streamed_response_wrapper(
|
||||
session.chat,
|
||||
)
|
||||
self.init = async_to_streamed_response_wrapper(
|
||||
session.init,
|
||||
)
|
||||
self.messages = async_to_streamed_response_wrapper(
|
||||
session.messages,
|
||||
)
|
||||
self.share = async_to_streamed_response_wrapper(
|
||||
session.share,
|
||||
)
|
||||
self.summarize = async_to_streamed_response_wrapper(
|
||||
session.summarize,
|
||||
)
|
||||
self.unshare = async_to_streamed_response_wrapper(
|
||||
session.unshare,
|
||||
)
|
||||
47
src/opencode/types/__init__.py
Normal file
47
src/opencode/types/__init__.py
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from .app import App as App
|
||||
from .model import Model as Model
|
||||
from .config import Config as Config
|
||||
from .message import Message as Message
|
||||
from .session import Session as Session
|
||||
from .keybinds import Keybinds as Keybinds
|
||||
from .provider import Provider as Provider
|
||||
from .file_part import FilePart as FilePart
|
||||
from .mcp_local import McpLocal as McpLocal
|
||||
from .text_part import TextPart as TextPart
|
||||
from .tool_call import ToolCall as ToolCall
|
||||
from .mcp_remote import McpRemote as McpRemote
|
||||
from .tool_result import ToolResult as ToolResult
|
||||
from .message_part import MessagePart as MessagePart
|
||||
from .reasoning_part import ReasoningPart as ReasoningPart
|
||||
from .file_part_param import FilePartParam as FilePartParam
|
||||
from .source_url_part import SourceURLPart as SourceURLPart
|
||||
from .step_start_part import StepStartPart as StepStartPart
|
||||
from .text_part_param import TextPartParam as TextPartParam
|
||||
from .tool_call_param import ToolCallParam as ToolCallParam
|
||||
from .app_init_response import AppInitResponse as AppInitResponse
|
||||
from .tool_partial_call import ToolPartialCall as ToolPartialCall
|
||||
from .tool_result_param import ToolResultParam as ToolResultParam
|
||||
from .file_search_params import FileSearchParams as FileSearchParams
|
||||
from .message_part_param import MessagePartParam as MessagePartParam
|
||||
from .event_list_response import EventListResponse as EventListResponse
|
||||
from .session_chat_params import SessionChatParams as SessionChatParams
|
||||
from .session_init_params import SessionInitParams as SessionInitParams
|
||||
from .file_search_response import FileSearchResponse as FileSearchResponse
|
||||
from .reasoning_part_param import ReasoningPartParam as ReasoningPartParam
|
||||
from .tool_invocation_part import ToolInvocationPart as ToolInvocationPart
|
||||
from .session_init_response import SessionInitResponse as SessionInitResponse
|
||||
from .session_list_response import SessionListResponse as SessionListResponse
|
||||
from .source_url_part_param import SourceURLPartParam as SourceURLPartParam
|
||||
from .step_start_part_param import StepStartPartParam as StepStartPartParam
|
||||
from .session_abort_response import SessionAbortResponse as SessionAbortResponse
|
||||
from .session_delete_response import SessionDeleteResponse as SessionDeleteResponse
|
||||
from .tool_partial_call_param import ToolPartialCallParam as ToolPartialCallParam
|
||||
from .session_summarize_params import SessionSummarizeParams as SessionSummarizeParams
|
||||
from .config_providers_response import ConfigProvidersResponse as ConfigProvidersResponse
|
||||
from .session_messages_response import SessionMessagesResponse as SessionMessagesResponse
|
||||
from .session_summarize_response import SessionSummarizeResponse as SessionSummarizeResponse
|
||||
from .tool_invocation_part_param import ToolInvocationPartParam as ToolInvocationPartParam
|
||||
37
src/opencode/types/app.py
Normal file
37
src/opencode/types/app.py
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from .._models import BaseModel
|
||||
|
||||
__all__ = ["App", "Path", "Time"]
|
||||
|
||||
|
||||
class Path(BaseModel):
|
||||
config: str
|
||||
|
||||
cwd: str
|
||||
|
||||
data: str
|
||||
|
||||
root: str
|
||||
|
||||
state: str
|
||||
|
||||
|
||||
class Time(BaseModel):
|
||||
initialized: Optional[float] = None
|
||||
|
||||
|
||||
class App(BaseModel):
|
||||
git: bool
|
||||
|
||||
hostname: str
|
||||
|
||||
path: Path
|
||||
|
||||
project: str
|
||||
|
||||
time: Time
|
||||
|
||||
user: str
|
||||
7
src/opencode/types/app_init_response.py
Normal file
7
src/opencode/types/app_init_response.py
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
__all__ = ["AppInitResponse"]
|
||||
|
||||
AppInitResponse: TypeAlias = bool
|
||||
133
src/opencode/types/config.py
Normal file
133
src/opencode/types/config.py
Normal file
|
|
@ -0,0 +1,133 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import Dict, List, Union, Optional
|
||||
from typing_extensions import Annotated, TypeAlias
|
||||
|
||||
from pydantic import Field as FieldInfo
|
||||
|
||||
from .._utils import PropertyInfo
|
||||
from .._models import BaseModel
|
||||
from .keybinds import Keybinds
|
||||
from .mcp_local import McpLocal
|
||||
from .mcp_remote import McpRemote
|
||||
|
||||
__all__ = [
|
||||
"Config",
|
||||
"Experimental",
|
||||
"ExperimentalHook",
|
||||
"ExperimentalHookFileEdited",
|
||||
"ExperimentalHookSessionCompleted",
|
||||
"Mcp",
|
||||
"Provider",
|
||||
"ProviderModels",
|
||||
"ProviderModelsCost",
|
||||
"ProviderModelsLimit",
|
||||
]
|
||||
|
||||
|
||||
class ExperimentalHookFileEdited(BaseModel):
|
||||
command: List[str]
|
||||
|
||||
environment: Optional[Dict[str, str]] = None
|
||||
|
||||
|
||||
class ExperimentalHookSessionCompleted(BaseModel):
|
||||
command: List[str]
|
||||
|
||||
environment: Optional[Dict[str, str]] = None
|
||||
|
||||
|
||||
class ExperimentalHook(BaseModel):
|
||||
file_edited: Optional[Dict[str, List[ExperimentalHookFileEdited]]] = None
|
||||
|
||||
session_completed: Optional[List[ExperimentalHookSessionCompleted]] = None
|
||||
|
||||
|
||||
class Experimental(BaseModel):
|
||||
hook: Optional[ExperimentalHook] = None
|
||||
|
||||
|
||||
Mcp: TypeAlias = Annotated[Union[McpLocal, McpRemote], PropertyInfo(discriminator="type")]
|
||||
|
||||
|
||||
class ProviderModelsCost(BaseModel):
|
||||
input: float
|
||||
|
||||
output: float
|
||||
|
||||
cache_read: Optional[float] = None
|
||||
|
||||
cache_write: Optional[float] = None
|
||||
|
||||
|
||||
class ProviderModelsLimit(BaseModel):
|
||||
context: float
|
||||
|
||||
output: float
|
||||
|
||||
|
||||
class ProviderModels(BaseModel):
|
||||
id: Optional[str] = None
|
||||
|
||||
attachment: Optional[bool] = None
|
||||
|
||||
cost: Optional[ProviderModelsCost] = None
|
||||
|
||||
limit: Optional[ProviderModelsLimit] = None
|
||||
|
||||
name: Optional[str] = None
|
||||
|
||||
options: Optional[Dict[str, object]] = None
|
||||
|
||||
reasoning: Optional[bool] = None
|
||||
|
||||
temperature: Optional[bool] = None
|
||||
|
||||
tool_call: Optional[bool] = None
|
||||
|
||||
|
||||
class Provider(BaseModel):
|
||||
models: Dict[str, ProviderModels]
|
||||
|
||||
id: Optional[str] = None
|
||||
|
||||
api: Optional[str] = None
|
||||
|
||||
env: Optional[List[str]] = None
|
||||
|
||||
name: Optional[str] = None
|
||||
|
||||
npm: Optional[str] = None
|
||||
|
||||
options: Optional[Dict[str, object]] = None
|
||||
|
||||
|
||||
class Config(BaseModel):
|
||||
schema_: Optional[str] = FieldInfo(alias="$schema", default=None)
|
||||
"""JSON schema reference for configuration validation"""
|
||||
|
||||
autoshare: Optional[bool] = None
|
||||
"""Share newly created sessions automatically"""
|
||||
|
||||
autoupdate: Optional[bool] = None
|
||||
"""Automatically update to the latest version"""
|
||||
|
||||
disabled_providers: Optional[List[str]] = None
|
||||
"""Disable providers that are loaded automatically"""
|
||||
|
||||
experimental: Optional[Experimental] = None
|
||||
|
||||
keybinds: Optional[Keybinds] = None
|
||||
"""Custom keybind configurations"""
|
||||
|
||||
mcp: Optional[Dict[str, Mcp]] = None
|
||||
"""MCP (Model Context Protocol) server configurations"""
|
||||
|
||||
model: Optional[str] = None
|
||||
"""Model to use in the format of provider/model, eg anthropic/claude-2"""
|
||||
|
||||
provider: Optional[Dict[str, Provider]] = None
|
||||
"""Custom provider configurations and model overrides"""
|
||||
|
||||
theme: Optional[str] = None
|
||||
"""Theme name to use for the interface"""
|
||||
14
src/opencode/types/config_providers_response.py
Normal file
14
src/opencode/types/config_providers_response.py
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import Dict, List
|
||||
|
||||
from .._models import BaseModel
|
||||
from .provider import Provider
|
||||
|
||||
__all__ = ["ConfigProvidersResponse"]
|
||||
|
||||
|
||||
class ConfigProvidersResponse(BaseModel):
|
||||
default: Dict[str, str]
|
||||
|
||||
providers: List[Provider]
|
||||
205
src/opencode/types/event_list_response.py
Normal file
205
src/opencode/types/event_list_response.py
Normal file
|
|
@ -0,0 +1,205 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import Dict, Union, Optional
|
||||
from typing_extensions import Literal, Annotated, TypeAlias
|
||||
|
||||
from pydantic import Field as FieldInfo
|
||||
|
||||
from .._utils import PropertyInfo
|
||||
from .message import Message
|
||||
from .session import Session
|
||||
from .._models import BaseModel
|
||||
from .message_part import MessagePart
|
||||
|
||||
__all__ = [
|
||||
"EventListResponse",
|
||||
"EventStorageWrite",
|
||||
"EventStorageWriteProperties",
|
||||
"EventInstallationUpdated",
|
||||
"EventInstallationUpdatedProperties",
|
||||
"EventLspClientDiagnostics",
|
||||
"EventLspClientDiagnosticsProperties",
|
||||
"EventPermissionUpdated",
|
||||
"EventPermissionUpdatedProperties",
|
||||
"EventPermissionUpdatedPropertiesTime",
|
||||
"EventMessageUpdated",
|
||||
"EventMessageUpdatedProperties",
|
||||
"EventMessagePartUpdated",
|
||||
"EventMessagePartUpdatedProperties",
|
||||
"EventSessionUpdated",
|
||||
"EventSessionUpdatedProperties",
|
||||
"EventSessionDeleted",
|
||||
"EventSessionDeletedProperties",
|
||||
"EventSessionError",
|
||||
"EventSessionErrorProperties",
|
||||
"EventSessionErrorPropertiesError",
|
||||
"EventSessionErrorPropertiesErrorProviderAuthError",
|
||||
"EventSessionErrorPropertiesErrorProviderAuthErrorData",
|
||||
"EventSessionErrorPropertiesErrorUnknownError",
|
||||
"EventSessionErrorPropertiesErrorUnknownErrorData",
|
||||
"EventSessionErrorPropertiesErrorMessageOutputLengthError",
|
||||
]
|
||||
|
||||
|
||||
class EventStorageWriteProperties(BaseModel):
|
||||
key: str
|
||||
|
||||
content: Optional[object] = None
|
||||
|
||||
|
||||
class EventStorageWrite(BaseModel):
|
||||
properties: EventStorageWriteProperties
|
||||
|
||||
type: Literal["storage.write"]
|
||||
|
||||
|
||||
class EventInstallationUpdatedProperties(BaseModel):
|
||||
version: str
|
||||
|
||||
|
||||
class EventInstallationUpdated(BaseModel):
|
||||
properties: EventInstallationUpdatedProperties
|
||||
|
||||
type: Literal["installation.updated"]
|
||||
|
||||
|
||||
class EventLspClientDiagnosticsProperties(BaseModel):
|
||||
path: str
|
||||
|
||||
server_id: str = FieldInfo(alias="serverID")
|
||||
|
||||
|
||||
class EventLspClientDiagnostics(BaseModel):
|
||||
properties: EventLspClientDiagnosticsProperties
|
||||
|
||||
type: Literal["lsp.client.diagnostics"]
|
||||
|
||||
|
||||
class EventPermissionUpdatedPropertiesTime(BaseModel):
|
||||
created: float
|
||||
|
||||
|
||||
class EventPermissionUpdatedProperties(BaseModel):
|
||||
id: str
|
||||
|
||||
metadata: Dict[str, object]
|
||||
|
||||
session_id: str = FieldInfo(alias="sessionID")
|
||||
|
||||
time: EventPermissionUpdatedPropertiesTime
|
||||
|
||||
title: str
|
||||
|
||||
|
||||
class EventPermissionUpdated(BaseModel):
|
||||
properties: EventPermissionUpdatedProperties
|
||||
|
||||
type: Literal["permission.updated"]
|
||||
|
||||
|
||||
class EventMessageUpdatedProperties(BaseModel):
|
||||
info: Message
|
||||
|
||||
|
||||
class EventMessageUpdated(BaseModel):
|
||||
properties: EventMessageUpdatedProperties
|
||||
|
||||
type: Literal["message.updated"]
|
||||
|
||||
|
||||
class EventMessagePartUpdatedProperties(BaseModel):
|
||||
message_id: str = FieldInfo(alias="messageID")
|
||||
|
||||
part: MessagePart
|
||||
|
||||
session_id: str = FieldInfo(alias="sessionID")
|
||||
|
||||
|
||||
class EventMessagePartUpdated(BaseModel):
|
||||
properties: EventMessagePartUpdatedProperties
|
||||
|
||||
type: Literal["message.part.updated"]
|
||||
|
||||
|
||||
class EventSessionUpdatedProperties(BaseModel):
|
||||
info: Session
|
||||
|
||||
|
||||
class EventSessionUpdated(BaseModel):
|
||||
properties: EventSessionUpdatedProperties
|
||||
|
||||
type: Literal["session.updated"]
|
||||
|
||||
|
||||
class EventSessionDeletedProperties(BaseModel):
|
||||
info: Session
|
||||
|
||||
|
||||
class EventSessionDeleted(BaseModel):
|
||||
properties: EventSessionDeletedProperties
|
||||
|
||||
type: Literal["session.deleted"]
|
||||
|
||||
|
||||
class EventSessionErrorPropertiesErrorProviderAuthErrorData(BaseModel):
|
||||
message: str
|
||||
|
||||
provider_id: str = FieldInfo(alias="providerID")
|
||||
|
||||
|
||||
class EventSessionErrorPropertiesErrorProviderAuthError(BaseModel):
|
||||
data: EventSessionErrorPropertiesErrorProviderAuthErrorData
|
||||
|
||||
name: Literal["ProviderAuthError"]
|
||||
|
||||
|
||||
class EventSessionErrorPropertiesErrorUnknownErrorData(BaseModel):
|
||||
message: str
|
||||
|
||||
|
||||
class EventSessionErrorPropertiesErrorUnknownError(BaseModel):
|
||||
data: EventSessionErrorPropertiesErrorUnknownErrorData
|
||||
|
||||
name: Literal["UnknownError"]
|
||||
|
||||
|
||||
class EventSessionErrorPropertiesErrorMessageOutputLengthError(BaseModel):
|
||||
data: object
|
||||
|
||||
name: Literal["MessageOutputLengthError"]
|
||||
|
||||
|
||||
EventSessionErrorPropertiesError: TypeAlias = Annotated[
|
||||
Union[
|
||||
EventSessionErrorPropertiesErrorProviderAuthError,
|
||||
EventSessionErrorPropertiesErrorUnknownError,
|
||||
EventSessionErrorPropertiesErrorMessageOutputLengthError,
|
||||
],
|
||||
PropertyInfo(discriminator="name"),
|
||||
]
|
||||
|
||||
|
||||
class EventSessionErrorProperties(BaseModel):
|
||||
error: Optional[EventSessionErrorPropertiesError] = None
|
||||
|
||||
|
||||
class EventSessionError(BaseModel):
|
||||
properties: EventSessionErrorProperties
|
||||
|
||||
type: Literal["session.error"]
|
||||
|
||||
|
||||
EventListResponse: TypeAlias = Annotated[
|
||||
Union[
|
||||
EventStorageWrite,
|
||||
EventInstallationUpdated,
|
||||
EventLspClientDiagnostics,
|
||||
EventPermissionUpdated,
|
||||
EventMessageUpdated,
|
||||
EventMessagePartUpdated,
|
||||
EventSessionUpdated,
|
||||
EventSessionDeleted,
|
||||
EventSessionError,
|
||||
],
|
||||
PropertyInfo(discriminator="type"),
|
||||
]
|
||||
20
src/opencode/types/file_part.py
Normal file
20
src/opencode/types/file_part.py
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import Optional
|
||||
from typing_extensions import Literal
|
||||
|
||||
from pydantic import Field as FieldInfo
|
||||
|
||||
from .._models import BaseModel
|
||||
|
||||
__all__ = ["FilePart"]
|
||||
|
||||
|
||||
class FilePart(BaseModel):
|
||||
media_type: str = FieldInfo(alias="mediaType")
|
||||
|
||||
type: Literal["file"]
|
||||
|
||||
url: str
|
||||
|
||||
filename: Optional[str] = None
|
||||
19
src/opencode/types/file_part_param.py
Normal file
19
src/opencode/types/file_part_param.py
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing_extensions import Literal, Required, Annotated, TypedDict
|
||||
|
||||
from .._utils import PropertyInfo
|
||||
|
||||
__all__ = ["FilePartParam"]
|
||||
|
||||
|
||||
class FilePartParam(TypedDict, total=False):
|
||||
media_type: Required[Annotated[str, PropertyInfo(alias="mediaType")]]
|
||||
|
||||
type: Required[Literal["file"]]
|
||||
|
||||
url: Required[str]
|
||||
|
||||
filename: str
|
||||
11
src/opencode/types/file_search_params.py
Normal file
11
src/opencode/types/file_search_params.py
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing_extensions import Required, TypedDict
|
||||
|
||||
__all__ = ["FileSearchParams"]
|
||||
|
||||
|
||||
class FileSearchParams(TypedDict, total=False):
|
||||
query: Required[str]
|
||||
8
src/opencode/types/file_search_response.py
Normal file
8
src/opencode/types/file_search_response.py
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import List
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
__all__ = ["FileSearchResponse"]
|
||||
|
||||
FileSearchResponse: TypeAlias = List[str]
|
||||
92
src/opencode/types/keybinds.py
Normal file
92
src/opencode/types/keybinds.py
Normal file
|
|
@ -0,0 +1,92 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field as FieldInfo
|
||||
|
||||
from .._models import BaseModel
|
||||
|
||||
__all__ = ["Keybinds"]
|
||||
|
||||
|
||||
class Keybinds(BaseModel):
|
||||
app_exit: Optional[str] = None
|
||||
"""Exit the application"""
|
||||
|
||||
editor_open: Optional[str] = None
|
||||
"""Open external editor"""
|
||||
|
||||
help: Optional[str] = None
|
||||
"""Show help dialog"""
|
||||
|
||||
history_next: Optional[str] = None
|
||||
"""Navigate to next history item"""
|
||||
|
||||
history_previous: Optional[str] = None
|
||||
"""Navigate to previous history item"""
|
||||
|
||||
input_clear: Optional[str] = None
|
||||
"""Clear input field"""
|
||||
|
||||
input_newline: Optional[str] = None
|
||||
"""Insert newline in input"""
|
||||
|
||||
input_paste: Optional[str] = None
|
||||
"""Paste from clipboard"""
|
||||
|
||||
input_submit: Optional[str] = None
|
||||
"""Submit input"""
|
||||
|
||||
leader: Optional[str] = None
|
||||
"""Leader key for keybind combinations"""
|
||||
|
||||
messages_first: Optional[str] = None
|
||||
"""Navigate to first message"""
|
||||
|
||||
messages_half_page_down: Optional[str] = None
|
||||
"""Scroll messages down by half page"""
|
||||
|
||||
messages_half_page_up: Optional[str] = None
|
||||
"""Scroll messages up by half page"""
|
||||
|
||||
messages_last: Optional[str] = None
|
||||
"""Navigate to last message"""
|
||||
|
||||
messages_next: Optional[str] = None
|
||||
"""Navigate to next message"""
|
||||
|
||||
messages_page_down: Optional[str] = None
|
||||
"""Scroll messages down by one page"""
|
||||
|
||||
messages_page_up: Optional[str] = None
|
||||
"""Scroll messages up by one page"""
|
||||
|
||||
messages_previous: Optional[str] = None
|
||||
"""Navigate to previous message"""
|
||||
|
||||
api_model_list: Optional[str] = FieldInfo(alias="model_list", default=None)
|
||||
"""List available models"""
|
||||
|
||||
project_init: Optional[str] = None
|
||||
"""Initialize project configuration"""
|
||||
|
||||
session_compact: Optional[str] = None
|
||||
"""Toggle compact mode for session"""
|
||||
|
||||
session_interrupt: Optional[str] = None
|
||||
"""Interrupt current session"""
|
||||
|
||||
session_list: Optional[str] = None
|
||||
"""List all sessions"""
|
||||
|
||||
session_new: Optional[str] = None
|
||||
"""Create a new session"""
|
||||
|
||||
session_share: Optional[str] = None
|
||||
"""Share current session"""
|
||||
|
||||
theme_list: Optional[str] = None
|
||||
"""List available themes"""
|
||||
|
||||
tool_details: Optional[str] = None
|
||||
"""Show tool details"""
|
||||
19
src/opencode/types/mcp_local.py
Normal file
19
src/opencode/types/mcp_local.py
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import Dict, List, Optional
|
||||
from typing_extensions import Literal
|
||||
|
||||
from .._models import BaseModel
|
||||
|
||||
__all__ = ["McpLocal"]
|
||||
|
||||
|
||||
class McpLocal(BaseModel):
|
||||
command: List[str]
|
||||
"""Command and arguments to run the MCP server"""
|
||||
|
||||
type: Literal["local"]
|
||||
"""Type of MCP server connection"""
|
||||
|
||||
environment: Optional[Dict[str, str]] = None
|
||||
"""Environment variables to set when running the MCP server"""
|
||||
15
src/opencode/types/mcp_remote.py
Normal file
15
src/opencode/types/mcp_remote.py
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing_extensions import Literal
|
||||
|
||||
from .._models import BaseModel
|
||||
|
||||
__all__ = ["McpRemote"]
|
||||
|
||||
|
||||
class McpRemote(BaseModel):
|
||||
type: Literal["remote"]
|
||||
"""Type of MCP server connection"""
|
||||
|
||||
url: str
|
||||
"""URL of the remote MCP server"""
|
||||
146
src/opencode/types/message.py
Normal file
146
src/opencode/types/message.py
Normal file
|
|
@ -0,0 +1,146 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import TYPE_CHECKING, Dict, List, Union, Optional
|
||||
from typing_extensions import Literal, Annotated, TypeAlias
|
||||
|
||||
from pydantic import Field as FieldInfo
|
||||
|
||||
from .._utils import PropertyInfo
|
||||
from .._models import BaseModel
|
||||
from .message_part import MessagePart
|
||||
|
||||
__all__ = [
|
||||
"Message",
|
||||
"Metadata",
|
||||
"MetadataTime",
|
||||
"MetadataTool",
|
||||
"MetadataToolTime",
|
||||
"MetadataAssistant",
|
||||
"MetadataAssistantPath",
|
||||
"MetadataAssistantTokens",
|
||||
"MetadataAssistantTokensCache",
|
||||
"MetadataError",
|
||||
"MetadataErrorProviderAuthError",
|
||||
"MetadataErrorProviderAuthErrorData",
|
||||
"MetadataErrorUnknownError",
|
||||
"MetadataErrorUnknownErrorData",
|
||||
"MetadataErrorMessageOutputLengthError",
|
||||
]
|
||||
|
||||
|
||||
class MetadataTime(BaseModel):
|
||||
created: float
|
||||
|
||||
completed: Optional[float] = None
|
||||
|
||||
|
||||
class MetadataToolTime(BaseModel):
|
||||
end: float
|
||||
|
||||
start: float
|
||||
|
||||
|
||||
class MetadataTool(BaseModel):
|
||||
time: MetadataToolTime
|
||||
|
||||
title: str
|
||||
|
||||
if TYPE_CHECKING:
|
||||
# Stub to indicate that arbitrary properties are accepted.
|
||||
# To access properties that are not valid identifiers you can use `getattr`, e.g.
|
||||
# `getattr(obj, '$type')`
|
||||
def __getattr__(self, attr: str) -> object: ...
|
||||
|
||||
|
||||
class MetadataAssistantPath(BaseModel):
|
||||
cwd: str
|
||||
|
||||
root: str
|
||||
|
||||
|
||||
class MetadataAssistantTokensCache(BaseModel):
|
||||
read: float
|
||||
|
||||
write: float
|
||||
|
||||
|
||||
class MetadataAssistantTokens(BaseModel):
|
||||
cache: MetadataAssistantTokensCache
|
||||
|
||||
input: float
|
||||
|
||||
output: float
|
||||
|
||||
reasoning: float
|
||||
|
||||
|
||||
class MetadataAssistant(BaseModel):
|
||||
cost: float
|
||||
|
||||
api_model_id: str = FieldInfo(alias="modelID")
|
||||
|
||||
path: MetadataAssistantPath
|
||||
|
||||
provider_id: str = FieldInfo(alias="providerID")
|
||||
|
||||
system: List[str]
|
||||
|
||||
tokens: MetadataAssistantTokens
|
||||
|
||||
summary: Optional[bool] = None
|
||||
|
||||
|
||||
class MetadataErrorProviderAuthErrorData(BaseModel):
|
||||
message: str
|
||||
|
||||
provider_id: str = FieldInfo(alias="providerID")
|
||||
|
||||
|
||||
class MetadataErrorProviderAuthError(BaseModel):
|
||||
data: MetadataErrorProviderAuthErrorData
|
||||
|
||||
name: Literal["ProviderAuthError"]
|
||||
|
||||
|
||||
class MetadataErrorUnknownErrorData(BaseModel):
|
||||
message: str
|
||||
|
||||
|
||||
class MetadataErrorUnknownError(BaseModel):
|
||||
data: MetadataErrorUnknownErrorData
|
||||
|
||||
name: Literal["UnknownError"]
|
||||
|
||||
|
||||
class MetadataErrorMessageOutputLengthError(BaseModel):
|
||||
data: object
|
||||
|
||||
name: Literal["MessageOutputLengthError"]
|
||||
|
||||
|
||||
MetadataError: TypeAlias = Annotated[
|
||||
Union[MetadataErrorProviderAuthError, MetadataErrorUnknownError, MetadataErrorMessageOutputLengthError],
|
||||
PropertyInfo(discriminator="name"),
|
||||
]
|
||||
|
||||
|
||||
class Metadata(BaseModel):
|
||||
session_id: str = FieldInfo(alias="sessionID")
|
||||
|
||||
time: MetadataTime
|
||||
|
||||
tool: Dict[str, MetadataTool]
|
||||
|
||||
assistant: Optional[MetadataAssistant] = None
|
||||
|
||||
error: Optional[MetadataError] = None
|
||||
|
||||
|
||||
class Message(BaseModel):
|
||||
id: str
|
||||
|
||||
metadata: Metadata
|
||||
|
||||
parts: List[MessagePart]
|
||||
|
||||
role: Literal["user", "assistant"]
|
||||
19
src/opencode/types/message_part.py
Normal file
19
src/opencode/types/message_part.py
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import Union
|
||||
from typing_extensions import Annotated, TypeAlias
|
||||
|
||||
from .._utils import PropertyInfo
|
||||
from .file_part import FilePart
|
||||
from .text_part import TextPart
|
||||
from .reasoning_part import ReasoningPart
|
||||
from .source_url_part import SourceURLPart
|
||||
from .step_start_part import StepStartPart
|
||||
from .tool_invocation_part import ToolInvocationPart
|
||||
|
||||
__all__ = ["MessagePart"]
|
||||
|
||||
MessagePart: TypeAlias = Annotated[
|
||||
Union[TextPart, ReasoningPart, ToolInvocationPart, SourceURLPart, FilePart, StepStartPart],
|
||||
PropertyInfo(discriminator="type"),
|
||||
]
|
||||
19
src/opencode/types/message_part_param.py
Normal file
19
src/opencode/types/message_part_param.py
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Union
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
from .file_part_param import FilePartParam
|
||||
from .text_part_param import TextPartParam
|
||||
from .reasoning_part_param import ReasoningPartParam
|
||||
from .source_url_part_param import SourceURLPartParam
|
||||
from .step_start_part_param import StepStartPartParam
|
||||
from .tool_invocation_part_param import ToolInvocationPartParam
|
||||
|
||||
__all__ = ["MessagePartParam"]
|
||||
|
||||
MessagePartParam: TypeAlias = Union[
|
||||
TextPartParam, ReasoningPartParam, ToolInvocationPartParam, SourceURLPartParam, FilePartParam, StepStartPartParam
|
||||
]
|
||||
43
src/opencode/types/model.py
Normal file
43
src/opencode/types/model.py
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import Dict, Optional
|
||||
|
||||
from .._models import BaseModel
|
||||
|
||||
__all__ = ["Model", "Cost", "Limit"]
|
||||
|
||||
|
||||
class Cost(BaseModel):
|
||||
input: float
|
||||
|
||||
output: float
|
||||
|
||||
cache_read: Optional[float] = None
|
||||
|
||||
cache_write: Optional[float] = None
|
||||
|
||||
|
||||
class Limit(BaseModel):
|
||||
context: float
|
||||
|
||||
output: float
|
||||
|
||||
|
||||
class Model(BaseModel):
|
||||
id: str
|
||||
|
||||
attachment: bool
|
||||
|
||||
cost: Cost
|
||||
|
||||
limit: Limit
|
||||
|
||||
name: str
|
||||
|
||||
options: Dict[str, object]
|
||||
|
||||
reasoning: bool
|
||||
|
||||
temperature: bool
|
||||
|
||||
tool_call: bool
|
||||
22
src/opencode/types/provider.py
Normal file
22
src/opencode/types/provider.py
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from .model import Model
|
||||
from .._models import BaseModel
|
||||
|
||||
__all__ = ["Provider"]
|
||||
|
||||
|
||||
class Provider(BaseModel):
|
||||
id: str
|
||||
|
||||
env: List[str]
|
||||
|
||||
models: Dict[str, Model]
|
||||
|
||||
name: str
|
||||
|
||||
api: Optional[str] = None
|
||||
|
||||
npm: Optional[str] = None
|
||||
18
src/opencode/types/reasoning_part.py
Normal file
18
src/opencode/types/reasoning_part.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import Dict, Optional
|
||||
from typing_extensions import Literal
|
||||
|
||||
from pydantic import Field as FieldInfo
|
||||
|
||||
from .._models import BaseModel
|
||||
|
||||
__all__ = ["ReasoningPart"]
|
||||
|
||||
|
||||
class ReasoningPart(BaseModel):
|
||||
text: str
|
||||
|
||||
type: Literal["reasoning"]
|
||||
|
||||
provider_metadata: Optional[Dict[str, object]] = FieldInfo(alias="providerMetadata", default=None)
|
||||
18
src/opencode/types/reasoning_part_param.py
Normal file
18
src/opencode/types/reasoning_part_param.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Dict
|
||||
from typing_extensions import Literal, Required, Annotated, TypedDict
|
||||
|
||||
from .._utils import PropertyInfo
|
||||
|
||||
__all__ = ["ReasoningPartParam"]
|
||||
|
||||
|
||||
class ReasoningPartParam(TypedDict, total=False):
|
||||
text: Required[str]
|
||||
|
||||
type: Required[Literal["reasoning"]]
|
||||
|
||||
provider_metadata: Annotated[Dict[str, object], PropertyInfo(alias="providerMetadata")]
|
||||
33
src/opencode/types/session.py
Normal file
33
src/opencode/types/session.py
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field as FieldInfo
|
||||
|
||||
from .._models import BaseModel
|
||||
|
||||
__all__ = ["Session", "Time", "Share"]
|
||||
|
||||
|
||||
class Time(BaseModel):
|
||||
created: float
|
||||
|
||||
updated: float
|
||||
|
||||
|
||||
class Share(BaseModel):
|
||||
url: str
|
||||
|
||||
|
||||
class Session(BaseModel):
|
||||
id: str
|
||||
|
||||
time: Time
|
||||
|
||||
title: str
|
||||
|
||||
version: str
|
||||
|
||||
parent_id: Optional[str] = FieldInfo(alias="parentID", default=None)
|
||||
|
||||
share: Optional[Share] = None
|
||||
7
src/opencode/types/session_abort_response.py
Normal file
7
src/opencode/types/session_abort_response.py
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
__all__ = ["SessionAbortResponse"]
|
||||
|
||||
SessionAbortResponse: TypeAlias = bool
|
||||
21
src/opencode/types/session_chat_params.py
Normal file
21
src/opencode/types/session_chat_params.py
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Iterable
|
||||
from typing_extensions import Required, Annotated, TypedDict
|
||||
|
||||
from .._utils import PropertyInfo
|
||||
from .message_part_param import MessagePartParam
|
||||
|
||||
__all__ = ["SessionChatParams"]
|
||||
|
||||
|
||||
class SessionChatParams(TypedDict, total=False):
|
||||
model_id: Required[Annotated[str, PropertyInfo(alias="modelID")]]
|
||||
|
||||
parts: Required[Iterable[MessagePartParam]]
|
||||
|
||||
provider_id: Required[Annotated[str, PropertyInfo(alias="providerID")]]
|
||||
|
||||
session_id: Required[Annotated[str, PropertyInfo(alias="sessionID")]]
|
||||
7
src/opencode/types/session_delete_response.py
Normal file
7
src/opencode/types/session_delete_response.py
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
__all__ = ["SessionDeleteResponse"]
|
||||
|
||||
SessionDeleteResponse: TypeAlias = bool
|
||||
15
src/opencode/types/session_init_params.py
Normal file
15
src/opencode/types/session_init_params.py
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing_extensions import Required, Annotated, TypedDict
|
||||
|
||||
from .._utils import PropertyInfo
|
||||
|
||||
__all__ = ["SessionInitParams"]
|
||||
|
||||
|
||||
class SessionInitParams(TypedDict, total=False):
|
||||
model_id: Required[Annotated[str, PropertyInfo(alias="modelID")]]
|
||||
|
||||
provider_id: Required[Annotated[str, PropertyInfo(alias="providerID")]]
|
||||
7
src/opencode/types/session_init_response.py
Normal file
7
src/opencode/types/session_init_response.py
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
__all__ = ["SessionInitResponse"]
|
||||
|
||||
SessionInitResponse: TypeAlias = bool
|
||||
10
src/opencode/types/session_list_response.py
Normal file
10
src/opencode/types/session_list_response.py
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import List
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
from .session import Session
|
||||
|
||||
__all__ = ["SessionListResponse"]
|
||||
|
||||
SessionListResponse: TypeAlias = List[Session]
|
||||
10
src/opencode/types/session_messages_response.py
Normal file
10
src/opencode/types/session_messages_response.py
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import List
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
from .message import Message
|
||||
|
||||
__all__ = ["SessionMessagesResponse"]
|
||||
|
||||
SessionMessagesResponse: TypeAlias = List[Message]
|
||||
15
src/opencode/types/session_summarize_params.py
Normal file
15
src/opencode/types/session_summarize_params.py
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing_extensions import Required, Annotated, TypedDict
|
||||
|
||||
from .._utils import PropertyInfo
|
||||
|
||||
__all__ = ["SessionSummarizeParams"]
|
||||
|
||||
|
||||
class SessionSummarizeParams(TypedDict, total=False):
|
||||
model_id: Required[Annotated[str, PropertyInfo(alias="modelID")]]
|
||||
|
||||
provider_id: Required[Annotated[str, PropertyInfo(alias="providerID")]]
|
||||
7
src/opencode/types/session_summarize_response.py
Normal file
7
src/opencode/types/session_summarize_response.py
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
__all__ = ["SessionSummarizeResponse"]
|
||||
|
||||
SessionSummarizeResponse: TypeAlias = bool
|
||||
22
src/opencode/types/source_url_part.py
Normal file
22
src/opencode/types/source_url_part.py
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import Dict, Optional
|
||||
from typing_extensions import Literal
|
||||
|
||||
from pydantic import Field as FieldInfo
|
||||
|
||||
from .._models import BaseModel
|
||||
|
||||
__all__ = ["SourceURLPart"]
|
||||
|
||||
|
||||
class SourceURLPart(BaseModel):
|
||||
source_id: str = FieldInfo(alias="sourceId")
|
||||
|
||||
type: Literal["source-url"]
|
||||
|
||||
url: str
|
||||
|
||||
provider_metadata: Optional[Dict[str, object]] = FieldInfo(alias="providerMetadata", default=None)
|
||||
|
||||
title: Optional[str] = None
|
||||
22
src/opencode/types/source_url_part_param.py
Normal file
22
src/opencode/types/source_url_part_param.py
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Dict
|
||||
from typing_extensions import Literal, Required, Annotated, TypedDict
|
||||
|
||||
from .._utils import PropertyInfo
|
||||
|
||||
__all__ = ["SourceURLPartParam"]
|
||||
|
||||
|
||||
class SourceURLPartParam(TypedDict, total=False):
|
||||
source_id: Required[Annotated[str, PropertyInfo(alias="sourceId")]]
|
||||
|
||||
type: Required[Literal["source-url"]]
|
||||
|
||||
url: Required[str]
|
||||
|
||||
provider_metadata: Annotated[Dict[str, object], PropertyInfo(alias="providerMetadata")]
|
||||
|
||||
title: str
|
||||
11
src/opencode/types/step_start_part.py
Normal file
11
src/opencode/types/step_start_part.py
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing_extensions import Literal
|
||||
|
||||
from .._models import BaseModel
|
||||
|
||||
__all__ = ["StepStartPart"]
|
||||
|
||||
|
||||
class StepStartPart(BaseModel):
|
||||
type: Literal["step-start"]
|
||||
11
src/opencode/types/step_start_part_param.py
Normal file
11
src/opencode/types/step_start_part_param.py
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing_extensions import Literal, Required, TypedDict
|
||||
|
||||
__all__ = ["StepStartPartParam"]
|
||||
|
||||
|
||||
class StepStartPartParam(TypedDict, total=False):
|
||||
type: Required[Literal["step-start"]]
|
||||
13
src/opencode/types/text_part.py
Normal file
13
src/opencode/types/text_part.py
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing_extensions import Literal
|
||||
|
||||
from .._models import BaseModel
|
||||
|
||||
__all__ = ["TextPart"]
|
||||
|
||||
|
||||
class TextPart(BaseModel):
|
||||
text: str
|
||||
|
||||
type: Literal["text"]
|
||||
13
src/opencode/types/text_part_param.py
Normal file
13
src/opencode/types/text_part_param.py
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing_extensions import Literal, Required, TypedDict
|
||||
|
||||
__all__ = ["TextPartParam"]
|
||||
|
||||
|
||||
class TextPartParam(TypedDict, total=False):
|
||||
text: Required[str]
|
||||
|
||||
type: Required[Literal["text"]]
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue