mirror of
https://github.com/anomalyco/opencode-sdk-python.git
synced 2026-04-28 20:50:07 +00:00
feat(api): update via SDK Studio
This commit is contained in:
parent
ddb79fc19d
commit
a6cf7c5b2a
109 changed files with 159 additions and 153 deletions
100
src/opencode_ai/__init__.py
Normal file
100
src/opencode_ai/__init__.py
Normal file
|
|
@ -0,0 +1,100 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
import typing as _t
|
||||
|
||||
from . import types
|
||||
from ._types import NOT_GIVEN, Omit, NoneType, NotGiven, Transport, ProxiesTypes
|
||||
from ._utils import file_from_path
|
||||
from ._client import (
|
||||
Client,
|
||||
Stream,
|
||||
Timeout,
|
||||
Opencode,
|
||||
Transport,
|
||||
AsyncClient,
|
||||
AsyncStream,
|
||||
AsyncOpencode,
|
||||
RequestOptions,
|
||||
)
|
||||
from ._models import BaseModel
|
||||
from ._version import __title__, __version__
|
||||
from ._response import APIResponse as APIResponse, AsyncAPIResponse as AsyncAPIResponse
|
||||
from ._constants import DEFAULT_TIMEOUT, DEFAULT_MAX_RETRIES, DEFAULT_CONNECTION_LIMITS
|
||||
from ._exceptions import (
|
||||
APIError,
|
||||
ConflictError,
|
||||
NotFoundError,
|
||||
OpencodeError,
|
||||
APIStatusError,
|
||||
RateLimitError,
|
||||
APITimeoutError,
|
||||
BadRequestError,
|
||||
APIConnectionError,
|
||||
AuthenticationError,
|
||||
InternalServerError,
|
||||
PermissionDeniedError,
|
||||
UnprocessableEntityError,
|
||||
APIResponseValidationError,
|
||||
)
|
||||
from ._base_client import DefaultHttpxClient, DefaultAioHttpClient, DefaultAsyncHttpxClient
|
||||
from ._utils._logs import setup_logging as _setup_logging
|
||||
|
||||
__all__ = [
|
||||
"types",
|
||||
"__version__",
|
||||
"__title__",
|
||||
"NoneType",
|
||||
"Transport",
|
||||
"ProxiesTypes",
|
||||
"NotGiven",
|
||||
"NOT_GIVEN",
|
||||
"Omit",
|
||||
"OpencodeError",
|
||||
"APIError",
|
||||
"APIStatusError",
|
||||
"APITimeoutError",
|
||||
"APIConnectionError",
|
||||
"APIResponseValidationError",
|
||||
"BadRequestError",
|
||||
"AuthenticationError",
|
||||
"PermissionDeniedError",
|
||||
"NotFoundError",
|
||||
"ConflictError",
|
||||
"UnprocessableEntityError",
|
||||
"RateLimitError",
|
||||
"InternalServerError",
|
||||
"Timeout",
|
||||
"RequestOptions",
|
||||
"Client",
|
||||
"AsyncClient",
|
||||
"Stream",
|
||||
"AsyncStream",
|
||||
"Opencode",
|
||||
"AsyncOpencode",
|
||||
"file_from_path",
|
||||
"BaseModel",
|
||||
"DEFAULT_TIMEOUT",
|
||||
"DEFAULT_MAX_RETRIES",
|
||||
"DEFAULT_CONNECTION_LIMITS",
|
||||
"DefaultHttpxClient",
|
||||
"DefaultAsyncHttpxClient",
|
||||
"DefaultAioHttpClient",
|
||||
]
|
||||
|
||||
if not _t.TYPE_CHECKING:
|
||||
from ._utils._resources_proxy import resources as resources
|
||||
|
||||
_setup_logging()
|
||||
|
||||
# Update the __module__ attribute for exported symbols so that
|
||||
# error messages point to this module instead of the module
|
||||
# it was originally defined in, e.g.
|
||||
# opencode_ai._exceptions.NotFoundError -> opencode_ai.NotFoundError
|
||||
__locals = locals()
|
||||
for __name in __all__:
|
||||
if not __name.startswith("__"):
|
||||
try:
|
||||
__locals[__name].__module__ = "opencode_ai"
|
||||
except (TypeError, AttributeError):
|
||||
# Some of our exported symbols are builtins which we can't set attributes for.
|
||||
pass
|
||||
1985
src/opencode_ai/_base_client.py
Normal file
1985
src/opencode_ai/_base_client.py
Normal file
File diff suppressed because it is too large
Load diff
393
src/opencode_ai/_client.py
Normal file
393
src/opencode_ai/_client.py
Normal file
|
|
@ -0,0 +1,393 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import Any, Union, Mapping
|
||||
from typing_extensions import Self, override
|
||||
|
||||
import httpx
|
||||
|
||||
from . import _exceptions
|
||||
from ._qs import Querystring
|
||||
from ._types import (
|
||||
NOT_GIVEN,
|
||||
Omit,
|
||||
Timeout,
|
||||
NotGiven,
|
||||
Transport,
|
||||
ProxiesTypes,
|
||||
RequestOptions,
|
||||
)
|
||||
from ._utils import is_given, get_async_library
|
||||
from ._version import __version__
|
||||
from .resources import app, file, event, config, session
|
||||
from ._streaming import Stream as Stream, AsyncStream as AsyncStream
|
||||
from ._exceptions import APIStatusError
|
||||
from ._base_client import (
|
||||
DEFAULT_MAX_RETRIES,
|
||||
SyncAPIClient,
|
||||
AsyncAPIClient,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"Timeout",
|
||||
"Transport",
|
||||
"ProxiesTypes",
|
||||
"RequestOptions",
|
||||
"Opencode",
|
||||
"AsyncOpencode",
|
||||
"Client",
|
||||
"AsyncClient",
|
||||
]
|
||||
|
||||
|
||||
class Opencode(SyncAPIClient):
|
||||
event: event.EventResource
|
||||
app: app.AppResource
|
||||
file: file.FileResource
|
||||
config: config.ConfigResource
|
||||
session: session.SessionResource
|
||||
with_raw_response: OpencodeWithRawResponse
|
||||
with_streaming_response: OpencodeWithStreamedResponse
|
||||
|
||||
# client options
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
base_url: str | httpx.URL | None = None,
|
||||
timeout: Union[float, Timeout, None, NotGiven] = NOT_GIVEN,
|
||||
max_retries: int = DEFAULT_MAX_RETRIES,
|
||||
default_headers: Mapping[str, str] | None = None,
|
||||
default_query: Mapping[str, object] | None = None,
|
||||
# Configure a custom httpx client.
|
||||
# We provide a `DefaultHttpxClient` class that you can pass to retain the default values we use for `limits`, `timeout` & `follow_redirects`.
|
||||
# See the [httpx documentation](https://www.python-httpx.org/api/#client) for more details.
|
||||
http_client: httpx.Client | None = None,
|
||||
# Enable or disable schema validation for data returned by the API.
|
||||
# When enabled an error APIResponseValidationError is raised
|
||||
# if the API responds with invalid data for the expected schema.
|
||||
#
|
||||
# This parameter may be removed or changed in the future.
|
||||
# If you rely on this feature, please open a GitHub issue
|
||||
# outlining your use-case to help us decide if it should be
|
||||
# part of our public interface in the future.
|
||||
_strict_response_validation: bool = False,
|
||||
) -> None:
|
||||
"""Construct a new synchronous Opencode client instance."""
|
||||
if base_url is None:
|
||||
base_url = os.environ.get("OPENCODE_BASE_URL")
|
||||
if base_url is None:
|
||||
base_url = f"http://localhost:54321"
|
||||
|
||||
super().__init__(
|
||||
version=__version__,
|
||||
base_url=base_url,
|
||||
max_retries=max_retries,
|
||||
timeout=timeout,
|
||||
http_client=http_client,
|
||||
custom_headers=default_headers,
|
||||
custom_query=default_query,
|
||||
_strict_response_validation=_strict_response_validation,
|
||||
)
|
||||
|
||||
self.event = event.EventResource(self)
|
||||
self.app = app.AppResource(self)
|
||||
self.file = file.FileResource(self)
|
||||
self.config = config.ConfigResource(self)
|
||||
self.session = session.SessionResource(self)
|
||||
self.with_raw_response = OpencodeWithRawResponse(self)
|
||||
self.with_streaming_response = OpencodeWithStreamedResponse(self)
|
||||
|
||||
@property
|
||||
@override
|
||||
def qs(self) -> Querystring:
|
||||
return Querystring(array_format="comma")
|
||||
|
||||
@property
|
||||
@override
|
||||
def default_headers(self) -> dict[str, str | Omit]:
|
||||
return {
|
||||
**super().default_headers,
|
||||
"X-Stainless-Async": "false",
|
||||
**self._custom_headers,
|
||||
}
|
||||
|
||||
def copy(
|
||||
self,
|
||||
*,
|
||||
base_url: str | httpx.URL | None = None,
|
||||
timeout: float | Timeout | None | NotGiven = NOT_GIVEN,
|
||||
http_client: httpx.Client | None = None,
|
||||
max_retries: int | NotGiven = NOT_GIVEN,
|
||||
default_headers: Mapping[str, str] | None = None,
|
||||
set_default_headers: Mapping[str, str] | None = None,
|
||||
default_query: Mapping[str, object] | None = None,
|
||||
set_default_query: Mapping[str, object] | None = None,
|
||||
_extra_kwargs: Mapping[str, Any] = {},
|
||||
) -> Self:
|
||||
"""
|
||||
Create a new client instance re-using the same options given to the current client with optional overriding.
|
||||
"""
|
||||
if default_headers is not None and set_default_headers is not None:
|
||||
raise ValueError("The `default_headers` and `set_default_headers` arguments are mutually exclusive")
|
||||
|
||||
if default_query is not None and set_default_query is not None:
|
||||
raise ValueError("The `default_query` and `set_default_query` arguments are mutually exclusive")
|
||||
|
||||
headers = self._custom_headers
|
||||
if default_headers is not None:
|
||||
headers = {**headers, **default_headers}
|
||||
elif set_default_headers is not None:
|
||||
headers = set_default_headers
|
||||
|
||||
params = self._custom_query
|
||||
if default_query is not None:
|
||||
params = {**params, **default_query}
|
||||
elif set_default_query is not None:
|
||||
params = set_default_query
|
||||
|
||||
http_client = http_client or self._client
|
||||
return self.__class__(
|
||||
base_url=base_url or self.base_url,
|
||||
timeout=self.timeout if isinstance(timeout, NotGiven) else timeout,
|
||||
http_client=http_client,
|
||||
max_retries=max_retries if is_given(max_retries) else self.max_retries,
|
||||
default_headers=headers,
|
||||
default_query=params,
|
||||
**_extra_kwargs,
|
||||
)
|
||||
|
||||
# Alias for `copy` for nicer inline usage, e.g.
|
||||
# client.with_options(timeout=10).foo.create(...)
|
||||
with_options = copy
|
||||
|
||||
@override
|
||||
def _make_status_error(
|
||||
self,
|
||||
err_msg: str,
|
||||
*,
|
||||
body: object,
|
||||
response: httpx.Response,
|
||||
) -> APIStatusError:
|
||||
if response.status_code == 400:
|
||||
return _exceptions.BadRequestError(err_msg, response=response, body=body)
|
||||
|
||||
if response.status_code == 401:
|
||||
return _exceptions.AuthenticationError(err_msg, response=response, body=body)
|
||||
|
||||
if response.status_code == 403:
|
||||
return _exceptions.PermissionDeniedError(err_msg, response=response, body=body)
|
||||
|
||||
if response.status_code == 404:
|
||||
return _exceptions.NotFoundError(err_msg, response=response, body=body)
|
||||
|
||||
if response.status_code == 409:
|
||||
return _exceptions.ConflictError(err_msg, response=response, body=body)
|
||||
|
||||
if response.status_code == 422:
|
||||
return _exceptions.UnprocessableEntityError(err_msg, response=response, body=body)
|
||||
|
||||
if response.status_code == 429:
|
||||
return _exceptions.RateLimitError(err_msg, response=response, body=body)
|
||||
|
||||
if response.status_code >= 500:
|
||||
return _exceptions.InternalServerError(err_msg, response=response, body=body)
|
||||
return APIStatusError(err_msg, response=response, body=body)
|
||||
|
||||
|
||||
class AsyncOpencode(AsyncAPIClient):
|
||||
event: event.AsyncEventResource
|
||||
app: app.AsyncAppResource
|
||||
file: file.AsyncFileResource
|
||||
config: config.AsyncConfigResource
|
||||
session: session.AsyncSessionResource
|
||||
with_raw_response: AsyncOpencodeWithRawResponse
|
||||
with_streaming_response: AsyncOpencodeWithStreamedResponse
|
||||
|
||||
# client options
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
base_url: str | httpx.URL | None = None,
|
||||
timeout: Union[float, Timeout, None, NotGiven] = NOT_GIVEN,
|
||||
max_retries: int = DEFAULT_MAX_RETRIES,
|
||||
default_headers: Mapping[str, str] | None = None,
|
||||
default_query: Mapping[str, object] | None = None,
|
||||
# Configure a custom httpx client.
|
||||
# We provide a `DefaultAsyncHttpxClient` class that you can pass to retain the default values we use for `limits`, `timeout` & `follow_redirects`.
|
||||
# See the [httpx documentation](https://www.python-httpx.org/api/#asyncclient) for more details.
|
||||
http_client: httpx.AsyncClient | None = None,
|
||||
# Enable or disable schema validation for data returned by the API.
|
||||
# When enabled an error APIResponseValidationError is raised
|
||||
# if the API responds with invalid data for the expected schema.
|
||||
#
|
||||
# This parameter may be removed or changed in the future.
|
||||
# If you rely on this feature, please open a GitHub issue
|
||||
# outlining your use-case to help us decide if it should be
|
||||
# part of our public interface in the future.
|
||||
_strict_response_validation: bool = False,
|
||||
) -> None:
|
||||
"""Construct a new async AsyncOpencode client instance."""
|
||||
if base_url is None:
|
||||
base_url = os.environ.get("OPENCODE_BASE_URL")
|
||||
if base_url is None:
|
||||
base_url = f"http://localhost:54321"
|
||||
|
||||
super().__init__(
|
||||
version=__version__,
|
||||
base_url=base_url,
|
||||
max_retries=max_retries,
|
||||
timeout=timeout,
|
||||
http_client=http_client,
|
||||
custom_headers=default_headers,
|
||||
custom_query=default_query,
|
||||
_strict_response_validation=_strict_response_validation,
|
||||
)
|
||||
|
||||
self.event = event.AsyncEventResource(self)
|
||||
self.app = app.AsyncAppResource(self)
|
||||
self.file = file.AsyncFileResource(self)
|
||||
self.config = config.AsyncConfigResource(self)
|
||||
self.session = session.AsyncSessionResource(self)
|
||||
self.with_raw_response = AsyncOpencodeWithRawResponse(self)
|
||||
self.with_streaming_response = AsyncOpencodeWithStreamedResponse(self)
|
||||
|
||||
@property
|
||||
@override
|
||||
def qs(self) -> Querystring:
|
||||
return Querystring(array_format="comma")
|
||||
|
||||
@property
|
||||
@override
|
||||
def default_headers(self) -> dict[str, str | Omit]:
|
||||
return {
|
||||
**super().default_headers,
|
||||
"X-Stainless-Async": f"async:{get_async_library()}",
|
||||
**self._custom_headers,
|
||||
}
|
||||
|
||||
def copy(
|
||||
self,
|
||||
*,
|
||||
base_url: str | httpx.URL | None = None,
|
||||
timeout: float | Timeout | None | NotGiven = NOT_GIVEN,
|
||||
http_client: httpx.AsyncClient | None = None,
|
||||
max_retries: int | NotGiven = NOT_GIVEN,
|
||||
default_headers: Mapping[str, str] | None = None,
|
||||
set_default_headers: Mapping[str, str] | None = None,
|
||||
default_query: Mapping[str, object] | None = None,
|
||||
set_default_query: Mapping[str, object] | None = None,
|
||||
_extra_kwargs: Mapping[str, Any] = {},
|
||||
) -> Self:
|
||||
"""
|
||||
Create a new client instance re-using the same options given to the current client with optional overriding.
|
||||
"""
|
||||
if default_headers is not None and set_default_headers is not None:
|
||||
raise ValueError("The `default_headers` and `set_default_headers` arguments are mutually exclusive")
|
||||
|
||||
if default_query is not None and set_default_query is not None:
|
||||
raise ValueError("The `default_query` and `set_default_query` arguments are mutually exclusive")
|
||||
|
||||
headers = self._custom_headers
|
||||
if default_headers is not None:
|
||||
headers = {**headers, **default_headers}
|
||||
elif set_default_headers is not None:
|
||||
headers = set_default_headers
|
||||
|
||||
params = self._custom_query
|
||||
if default_query is not None:
|
||||
params = {**params, **default_query}
|
||||
elif set_default_query is not None:
|
||||
params = set_default_query
|
||||
|
||||
http_client = http_client or self._client
|
||||
return self.__class__(
|
||||
base_url=base_url or self.base_url,
|
||||
timeout=self.timeout if isinstance(timeout, NotGiven) else timeout,
|
||||
http_client=http_client,
|
||||
max_retries=max_retries if is_given(max_retries) else self.max_retries,
|
||||
default_headers=headers,
|
||||
default_query=params,
|
||||
**_extra_kwargs,
|
||||
)
|
||||
|
||||
# Alias for `copy` for nicer inline usage, e.g.
|
||||
# client.with_options(timeout=10).foo.create(...)
|
||||
with_options = copy
|
||||
|
||||
@override
|
||||
def _make_status_error(
|
||||
self,
|
||||
err_msg: str,
|
||||
*,
|
||||
body: object,
|
||||
response: httpx.Response,
|
||||
) -> APIStatusError:
|
||||
if response.status_code == 400:
|
||||
return _exceptions.BadRequestError(err_msg, response=response, body=body)
|
||||
|
||||
if response.status_code == 401:
|
||||
return _exceptions.AuthenticationError(err_msg, response=response, body=body)
|
||||
|
||||
if response.status_code == 403:
|
||||
return _exceptions.PermissionDeniedError(err_msg, response=response, body=body)
|
||||
|
||||
if response.status_code == 404:
|
||||
return _exceptions.NotFoundError(err_msg, response=response, body=body)
|
||||
|
||||
if response.status_code == 409:
|
||||
return _exceptions.ConflictError(err_msg, response=response, body=body)
|
||||
|
||||
if response.status_code == 422:
|
||||
return _exceptions.UnprocessableEntityError(err_msg, response=response, body=body)
|
||||
|
||||
if response.status_code == 429:
|
||||
return _exceptions.RateLimitError(err_msg, response=response, body=body)
|
||||
|
||||
if response.status_code >= 500:
|
||||
return _exceptions.InternalServerError(err_msg, response=response, body=body)
|
||||
return APIStatusError(err_msg, response=response, body=body)
|
||||
|
||||
|
||||
class OpencodeWithRawResponse:
|
||||
def __init__(self, client: Opencode) -> None:
|
||||
self.event = event.EventResourceWithRawResponse(client.event)
|
||||
self.app = app.AppResourceWithRawResponse(client.app)
|
||||
self.file = file.FileResourceWithRawResponse(client.file)
|
||||
self.config = config.ConfigResourceWithRawResponse(client.config)
|
||||
self.session = session.SessionResourceWithRawResponse(client.session)
|
||||
|
||||
|
||||
class AsyncOpencodeWithRawResponse:
|
||||
def __init__(self, client: AsyncOpencode) -> None:
|
||||
self.event = event.AsyncEventResourceWithRawResponse(client.event)
|
||||
self.app = app.AsyncAppResourceWithRawResponse(client.app)
|
||||
self.file = file.AsyncFileResourceWithRawResponse(client.file)
|
||||
self.config = config.AsyncConfigResourceWithRawResponse(client.config)
|
||||
self.session = session.AsyncSessionResourceWithRawResponse(client.session)
|
||||
|
||||
|
||||
class OpencodeWithStreamedResponse:
|
||||
def __init__(self, client: Opencode) -> None:
|
||||
self.event = event.EventResourceWithStreamingResponse(client.event)
|
||||
self.app = app.AppResourceWithStreamingResponse(client.app)
|
||||
self.file = file.FileResourceWithStreamingResponse(client.file)
|
||||
self.config = config.ConfigResourceWithStreamingResponse(client.config)
|
||||
self.session = session.SessionResourceWithStreamingResponse(client.session)
|
||||
|
||||
|
||||
class AsyncOpencodeWithStreamedResponse:
|
||||
def __init__(self, client: AsyncOpencode) -> None:
|
||||
self.event = event.AsyncEventResourceWithStreamingResponse(client.event)
|
||||
self.app = app.AsyncAppResourceWithStreamingResponse(client.app)
|
||||
self.file = file.AsyncFileResourceWithStreamingResponse(client.file)
|
||||
self.config = config.AsyncConfigResourceWithStreamingResponse(client.config)
|
||||
self.session = session.AsyncSessionResourceWithStreamingResponse(client.session)
|
||||
|
||||
|
||||
Client = Opencode
|
||||
|
||||
AsyncClient = AsyncOpencode
|
||||
219
src/opencode_ai/_compat.py
Normal file
219
src/opencode_ai/_compat.py
Normal file
|
|
@ -0,0 +1,219 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any, Union, Generic, TypeVar, Callable, cast, overload
|
||||
from datetime import date, datetime
|
||||
from typing_extensions import Self, Literal
|
||||
|
||||
import pydantic
|
||||
from pydantic.fields import FieldInfo
|
||||
|
||||
from ._types import IncEx, StrBytesIntFloat
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_ModelT = TypeVar("_ModelT", bound=pydantic.BaseModel)
|
||||
|
||||
# --------------- Pydantic v2 compatibility ---------------
|
||||
|
||||
# Pyright incorrectly reports some of our functions as overriding a method when they don't
|
||||
# pyright: reportIncompatibleMethodOverride=false
|
||||
|
||||
PYDANTIC_V2 = pydantic.VERSION.startswith("2.")
|
||||
|
||||
# v1 re-exports
|
||||
if TYPE_CHECKING:
|
||||
|
||||
def parse_date(value: date | StrBytesIntFloat) -> date: # noqa: ARG001
|
||||
...
|
||||
|
||||
def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime: # noqa: ARG001
|
||||
...
|
||||
|
||||
def get_args(t: type[Any]) -> tuple[Any, ...]: # noqa: ARG001
|
||||
...
|
||||
|
||||
def is_union(tp: type[Any] | None) -> bool: # noqa: ARG001
|
||||
...
|
||||
|
||||
def get_origin(t: type[Any]) -> type[Any] | None: # noqa: ARG001
|
||||
...
|
||||
|
||||
def is_literal_type(type_: type[Any]) -> bool: # noqa: ARG001
|
||||
...
|
||||
|
||||
def is_typeddict(type_: type[Any]) -> bool: # noqa: ARG001
|
||||
...
|
||||
|
||||
else:
|
||||
if PYDANTIC_V2:
|
||||
from pydantic.v1.typing import (
|
||||
get_args as get_args,
|
||||
is_union as is_union,
|
||||
get_origin as get_origin,
|
||||
is_typeddict as is_typeddict,
|
||||
is_literal_type as is_literal_type,
|
||||
)
|
||||
from pydantic.v1.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime
|
||||
else:
|
||||
from pydantic.typing import (
|
||||
get_args as get_args,
|
||||
is_union as is_union,
|
||||
get_origin as get_origin,
|
||||
is_typeddict as is_typeddict,
|
||||
is_literal_type as is_literal_type,
|
||||
)
|
||||
from pydantic.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime
|
||||
|
||||
|
||||
# refactored config
|
||||
if TYPE_CHECKING:
|
||||
from pydantic import ConfigDict as ConfigDict
|
||||
else:
|
||||
if PYDANTIC_V2:
|
||||
from pydantic import ConfigDict
|
||||
else:
|
||||
# TODO: provide an error message here?
|
||||
ConfigDict = None
|
||||
|
||||
|
||||
# renamed methods / properties
|
||||
def parse_obj(model: type[_ModelT], value: object) -> _ModelT:
|
||||
if PYDANTIC_V2:
|
||||
return model.model_validate(value)
|
||||
else:
|
||||
return cast(_ModelT, model.parse_obj(value)) # pyright: ignore[reportDeprecated, reportUnnecessaryCast]
|
||||
|
||||
|
||||
def field_is_required(field: FieldInfo) -> bool:
|
||||
if PYDANTIC_V2:
|
||||
return field.is_required()
|
||||
return field.required # type: ignore
|
||||
|
||||
|
||||
def field_get_default(field: FieldInfo) -> Any:
|
||||
value = field.get_default()
|
||||
if PYDANTIC_V2:
|
||||
from pydantic_core import PydanticUndefined
|
||||
|
||||
if value == PydanticUndefined:
|
||||
return None
|
||||
return value
|
||||
return value
|
||||
|
||||
|
||||
def field_outer_type(field: FieldInfo) -> Any:
|
||||
if PYDANTIC_V2:
|
||||
return field.annotation
|
||||
return field.outer_type_ # type: ignore
|
||||
|
||||
|
||||
def get_model_config(model: type[pydantic.BaseModel]) -> Any:
|
||||
if PYDANTIC_V2:
|
||||
return model.model_config
|
||||
return model.__config__ # type: ignore
|
||||
|
||||
|
||||
def get_model_fields(model: type[pydantic.BaseModel]) -> dict[str, FieldInfo]:
|
||||
if PYDANTIC_V2:
|
||||
return model.model_fields
|
||||
return model.__fields__ # type: ignore
|
||||
|
||||
|
||||
def model_copy(model: _ModelT, *, deep: bool = False) -> _ModelT:
|
||||
if PYDANTIC_V2:
|
||||
return model.model_copy(deep=deep)
|
||||
return model.copy(deep=deep) # type: ignore
|
||||
|
||||
|
||||
def model_json(model: pydantic.BaseModel, *, indent: int | None = None) -> str:
|
||||
if PYDANTIC_V2:
|
||||
return model.model_dump_json(indent=indent)
|
||||
return model.json(indent=indent) # type: ignore
|
||||
|
||||
|
||||
def model_dump(
|
||||
model: pydantic.BaseModel,
|
||||
*,
|
||||
exclude: IncEx | None = None,
|
||||
exclude_unset: bool = False,
|
||||
exclude_defaults: bool = False,
|
||||
warnings: bool = True,
|
||||
mode: Literal["json", "python"] = "python",
|
||||
) -> dict[str, Any]:
|
||||
if PYDANTIC_V2 or hasattr(model, "model_dump"):
|
||||
return model.model_dump(
|
||||
mode=mode,
|
||||
exclude=exclude,
|
||||
exclude_unset=exclude_unset,
|
||||
exclude_defaults=exclude_defaults,
|
||||
# warnings are not supported in Pydantic v1
|
||||
warnings=warnings if PYDANTIC_V2 else True,
|
||||
)
|
||||
return cast(
|
||||
"dict[str, Any]",
|
||||
model.dict( # pyright: ignore[reportDeprecated, reportUnnecessaryCast]
|
||||
exclude=exclude,
|
||||
exclude_unset=exclude_unset,
|
||||
exclude_defaults=exclude_defaults,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def model_parse(model: type[_ModelT], data: Any) -> _ModelT:
|
||||
if PYDANTIC_V2:
|
||||
return model.model_validate(data)
|
||||
return model.parse_obj(data) # pyright: ignore[reportDeprecated]
|
||||
|
||||
|
||||
# generic models
|
||||
if TYPE_CHECKING:
|
||||
|
||||
class GenericModel(pydantic.BaseModel): ...
|
||||
|
||||
else:
|
||||
if PYDANTIC_V2:
|
||||
# there no longer needs to be a distinction in v2 but
|
||||
# we still have to create our own subclass to avoid
|
||||
# inconsistent MRO ordering errors
|
||||
class GenericModel(pydantic.BaseModel): ...
|
||||
|
||||
else:
|
||||
import pydantic.generics
|
||||
|
||||
class GenericModel(pydantic.generics.GenericModel, pydantic.BaseModel): ...
|
||||
|
||||
|
||||
# cached properties
|
||||
if TYPE_CHECKING:
|
||||
cached_property = property
|
||||
|
||||
# we define a separate type (copied from typeshed)
|
||||
# that represents that `cached_property` is `set`able
|
||||
# at runtime, which differs from `@property`.
|
||||
#
|
||||
# this is a separate type as editors likely special case
|
||||
# `@property` and we don't want to cause issues just to have
|
||||
# more helpful internal types.
|
||||
|
||||
class typed_cached_property(Generic[_T]):
|
||||
func: Callable[[Any], _T]
|
||||
attrname: str | None
|
||||
|
||||
def __init__(self, func: Callable[[Any], _T]) -> None: ...
|
||||
|
||||
@overload
|
||||
def __get__(self, instance: None, owner: type[Any] | None = None) -> Self: ...
|
||||
|
||||
@overload
|
||||
def __get__(self, instance: object, owner: type[Any] | None = None) -> _T: ...
|
||||
|
||||
def __get__(self, instance: object, owner: type[Any] | None = None) -> _T | Self:
|
||||
raise NotImplementedError()
|
||||
|
||||
def __set_name__(self, owner: type[Any], name: str) -> None: ...
|
||||
|
||||
# __set__ is not defined at runtime, but @cached_property is designed to be settable
|
||||
def __set__(self, instance: object, value: _T) -> None: ...
|
||||
else:
|
||||
from functools import cached_property as cached_property
|
||||
|
||||
typed_cached_property = cached_property
|
||||
14
src/opencode_ai/_constants.py
Normal file
14
src/opencode_ai/_constants.py
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
import httpx
|
||||
|
||||
RAW_RESPONSE_HEADER = "X-Stainless-Raw-Response"
|
||||
OVERRIDE_CAST_TO_HEADER = "____stainless_override_cast_to"
|
||||
|
||||
# default timeout is 1 minute
|
||||
DEFAULT_TIMEOUT = httpx.Timeout(timeout=60, connect=5.0)
|
||||
DEFAULT_MAX_RETRIES = 2
|
||||
DEFAULT_CONNECTION_LIMITS = httpx.Limits(max_connections=100, max_keepalive_connections=20)
|
||||
|
||||
INITIAL_RETRY_DELAY = 0.5
|
||||
MAX_RETRY_DELAY = 8.0
|
||||
108
src/opencode_ai/_exceptions.py
Normal file
108
src/opencode_ai/_exceptions.py
Normal file
|
|
@ -0,0 +1,108 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing_extensions import Literal
|
||||
|
||||
import httpx
|
||||
|
||||
__all__ = [
|
||||
"BadRequestError",
|
||||
"AuthenticationError",
|
||||
"PermissionDeniedError",
|
||||
"NotFoundError",
|
||||
"ConflictError",
|
||||
"UnprocessableEntityError",
|
||||
"RateLimitError",
|
||||
"InternalServerError",
|
||||
]
|
||||
|
||||
|
||||
class OpencodeError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class APIError(OpencodeError):
|
||||
message: str
|
||||
request: httpx.Request
|
||||
|
||||
body: object | None
|
||||
"""The API response body.
|
||||
|
||||
If the API responded with a valid JSON structure then this property will be the
|
||||
decoded result.
|
||||
|
||||
If it isn't a valid JSON structure then this will be the raw response.
|
||||
|
||||
If there was no response associated with this error then it will be `None`.
|
||||
"""
|
||||
|
||||
def __init__(self, message: str, request: httpx.Request, *, body: object | None) -> None: # noqa: ARG002
|
||||
super().__init__(message)
|
||||
self.request = request
|
||||
self.message = message
|
||||
self.body = body
|
||||
|
||||
|
||||
class APIResponseValidationError(APIError):
|
||||
response: httpx.Response
|
||||
status_code: int
|
||||
|
||||
def __init__(self, response: httpx.Response, body: object | None, *, message: str | None = None) -> None:
|
||||
super().__init__(message or "Data returned by API invalid for expected schema.", response.request, body=body)
|
||||
self.response = response
|
||||
self.status_code = response.status_code
|
||||
|
||||
|
||||
class APIStatusError(APIError):
|
||||
"""Raised when an API response has a status code of 4xx or 5xx."""
|
||||
|
||||
response: httpx.Response
|
||||
status_code: int
|
||||
|
||||
def __init__(self, message: str, *, response: httpx.Response, body: object | None) -> None:
|
||||
super().__init__(message, response.request, body=body)
|
||||
self.response = response
|
||||
self.status_code = response.status_code
|
||||
|
||||
|
||||
class APIConnectionError(APIError):
|
||||
def __init__(self, *, message: str = "Connection error.", request: httpx.Request) -> None:
|
||||
super().__init__(message, request, body=None)
|
||||
|
||||
|
||||
class APITimeoutError(APIConnectionError):
|
||||
def __init__(self, request: httpx.Request) -> None:
|
||||
super().__init__(message="Request timed out.", request=request)
|
||||
|
||||
|
||||
class BadRequestError(APIStatusError):
|
||||
status_code: Literal[400] = 400 # pyright: ignore[reportIncompatibleVariableOverride]
|
||||
|
||||
|
||||
class AuthenticationError(APIStatusError):
|
||||
status_code: Literal[401] = 401 # pyright: ignore[reportIncompatibleVariableOverride]
|
||||
|
||||
|
||||
class PermissionDeniedError(APIStatusError):
|
||||
status_code: Literal[403] = 403 # pyright: ignore[reportIncompatibleVariableOverride]
|
||||
|
||||
|
||||
class NotFoundError(APIStatusError):
|
||||
status_code: Literal[404] = 404 # pyright: ignore[reportIncompatibleVariableOverride]
|
||||
|
||||
|
||||
class ConflictError(APIStatusError):
|
||||
status_code: Literal[409] = 409 # pyright: ignore[reportIncompatibleVariableOverride]
|
||||
|
||||
|
||||
class UnprocessableEntityError(APIStatusError):
|
||||
status_code: Literal[422] = 422 # pyright: ignore[reportIncompatibleVariableOverride]
|
||||
|
||||
|
||||
class RateLimitError(APIStatusError):
|
||||
status_code: Literal[429] = 429 # pyright: ignore[reportIncompatibleVariableOverride]
|
||||
|
||||
|
||||
class InternalServerError(APIStatusError):
|
||||
pass
|
||||
123
src/opencode_ai/_files.py
Normal file
123
src/opencode_ai/_files.py
Normal file
|
|
@ -0,0 +1,123 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import os
|
||||
import pathlib
|
||||
from typing import overload
|
||||
from typing_extensions import TypeGuard
|
||||
|
||||
import anyio
|
||||
|
||||
from ._types import (
|
||||
FileTypes,
|
||||
FileContent,
|
||||
RequestFiles,
|
||||
HttpxFileTypes,
|
||||
Base64FileInput,
|
||||
HttpxFileContent,
|
||||
HttpxRequestFiles,
|
||||
)
|
||||
from ._utils import is_tuple_t, is_mapping_t, is_sequence_t
|
||||
|
||||
|
||||
def is_base64_file_input(obj: object) -> TypeGuard[Base64FileInput]:
|
||||
return isinstance(obj, io.IOBase) or isinstance(obj, os.PathLike)
|
||||
|
||||
|
||||
def is_file_content(obj: object) -> TypeGuard[FileContent]:
|
||||
return (
|
||||
isinstance(obj, bytes) or isinstance(obj, tuple) or isinstance(obj, io.IOBase) or isinstance(obj, os.PathLike)
|
||||
)
|
||||
|
||||
|
||||
def assert_is_file_content(obj: object, *, key: str | None = None) -> None:
|
||||
if not is_file_content(obj):
|
||||
prefix = f"Expected entry at `{key}`" if key is not None else f"Expected file input `{obj!r}`"
|
||||
raise RuntimeError(
|
||||
f"{prefix} to be bytes, an io.IOBase instance, PathLike or a tuple but received {type(obj)} instead."
|
||||
) from None
|
||||
|
||||
|
||||
@overload
|
||||
def to_httpx_files(files: None) -> None: ...
|
||||
|
||||
|
||||
@overload
|
||||
def to_httpx_files(files: RequestFiles) -> HttpxRequestFiles: ...
|
||||
|
||||
|
||||
def to_httpx_files(files: RequestFiles | None) -> HttpxRequestFiles | None:
|
||||
if files is None:
|
||||
return None
|
||||
|
||||
if is_mapping_t(files):
|
||||
files = {key: _transform_file(file) for key, file in files.items()}
|
||||
elif is_sequence_t(files):
|
||||
files = [(key, _transform_file(file)) for key, file in files]
|
||||
else:
|
||||
raise TypeError(f"Unexpected file type input {type(files)}, expected mapping or sequence")
|
||||
|
||||
return files
|
||||
|
||||
|
||||
def _transform_file(file: FileTypes) -> HttpxFileTypes:
|
||||
if is_file_content(file):
|
||||
if isinstance(file, os.PathLike):
|
||||
path = pathlib.Path(file)
|
||||
return (path.name, path.read_bytes())
|
||||
|
||||
return file
|
||||
|
||||
if is_tuple_t(file):
|
||||
return (file[0], _read_file_content(file[1]), *file[2:])
|
||||
|
||||
raise TypeError(f"Expected file types input to be a FileContent type or to be a tuple")
|
||||
|
||||
|
||||
def _read_file_content(file: FileContent) -> HttpxFileContent:
|
||||
if isinstance(file, os.PathLike):
|
||||
return pathlib.Path(file).read_bytes()
|
||||
return file
|
||||
|
||||
|
||||
@overload
|
||||
async def async_to_httpx_files(files: None) -> None: ...
|
||||
|
||||
|
||||
@overload
|
||||
async def async_to_httpx_files(files: RequestFiles) -> HttpxRequestFiles: ...
|
||||
|
||||
|
||||
async def async_to_httpx_files(files: RequestFiles | None) -> HttpxRequestFiles | None:
|
||||
if files is None:
|
||||
return None
|
||||
|
||||
if is_mapping_t(files):
|
||||
files = {key: await _async_transform_file(file) for key, file in files.items()}
|
||||
elif is_sequence_t(files):
|
||||
files = [(key, await _async_transform_file(file)) for key, file in files]
|
||||
else:
|
||||
raise TypeError("Unexpected file type input {type(files)}, expected mapping or sequence")
|
||||
|
||||
return files
|
||||
|
||||
|
||||
async def _async_transform_file(file: FileTypes) -> HttpxFileTypes:
|
||||
if is_file_content(file):
|
||||
if isinstance(file, os.PathLike):
|
||||
path = anyio.Path(file)
|
||||
return (path.name, await path.read_bytes())
|
||||
|
||||
return file
|
||||
|
||||
if is_tuple_t(file):
|
||||
return (file[0], await _async_read_file_content(file[1]), *file[2:])
|
||||
|
||||
raise TypeError(f"Expected file types input to be a FileContent type or to be a tuple")
|
||||
|
||||
|
||||
async def _async_read_file_content(file: FileContent) -> HttpxFileContent:
|
||||
if isinstance(file, os.PathLike):
|
||||
return await anyio.Path(file).read_bytes()
|
||||
|
||||
return file
|
||||
805
src/opencode_ai/_models.py
Normal file
805
src/opencode_ai/_models.py
Normal file
|
|
@ -0,0 +1,805 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import inspect
|
||||
from typing import TYPE_CHECKING, Any, Type, Union, Generic, TypeVar, Callable, cast
|
||||
from datetime import date, datetime
|
||||
from typing_extensions import (
|
||||
Unpack,
|
||||
Literal,
|
||||
ClassVar,
|
||||
Protocol,
|
||||
Required,
|
||||
ParamSpec,
|
||||
TypedDict,
|
||||
TypeGuard,
|
||||
final,
|
||||
override,
|
||||
runtime_checkable,
|
||||
)
|
||||
|
||||
import pydantic
|
||||
from pydantic.fields import FieldInfo
|
||||
|
||||
from ._types import (
|
||||
Body,
|
||||
IncEx,
|
||||
Query,
|
||||
ModelT,
|
||||
Headers,
|
||||
Timeout,
|
||||
NotGiven,
|
||||
AnyMapping,
|
||||
HttpxRequestFiles,
|
||||
)
|
||||
from ._utils import (
|
||||
PropertyInfo,
|
||||
is_list,
|
||||
is_given,
|
||||
json_safe,
|
||||
lru_cache,
|
||||
is_mapping,
|
||||
parse_date,
|
||||
coerce_boolean,
|
||||
parse_datetime,
|
||||
strip_not_given,
|
||||
extract_type_arg,
|
||||
is_annotated_type,
|
||||
is_type_alias_type,
|
||||
strip_annotated_type,
|
||||
)
|
||||
from ._compat import (
|
||||
PYDANTIC_V2,
|
||||
ConfigDict,
|
||||
GenericModel as BaseGenericModel,
|
||||
get_args,
|
||||
is_union,
|
||||
parse_obj,
|
||||
get_origin,
|
||||
is_literal_type,
|
||||
get_model_config,
|
||||
get_model_fields,
|
||||
field_get_default,
|
||||
)
|
||||
from ._constants import RAW_RESPONSE_HEADER
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pydantic_core.core_schema import ModelField, ModelSchema, LiteralSchema, ModelFieldsSchema
|
||||
|
||||
__all__ = ["BaseModel", "GenericModel"]
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_BaseModelT = TypeVar("_BaseModelT", bound="BaseModel")
|
||||
|
||||
P = ParamSpec("P")
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class _ConfigProtocol(Protocol):
|
||||
allow_population_by_field_name: bool
|
||||
|
||||
|
||||
class BaseModel(pydantic.BaseModel):
|
||||
if PYDANTIC_V2:
|
||||
model_config: ClassVar[ConfigDict] = ConfigDict(
|
||||
extra="allow", defer_build=coerce_boolean(os.environ.get("DEFER_PYDANTIC_BUILD", "true"))
|
||||
)
|
||||
else:
|
||||
|
||||
@property
|
||||
@override
|
||||
def model_fields_set(self) -> set[str]:
|
||||
# a forwards-compat shim for pydantic v2
|
||||
return self.__fields_set__ # type: ignore
|
||||
|
||||
class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated]
|
||||
extra: Any = pydantic.Extra.allow # type: ignore
|
||||
|
||||
def to_dict(
|
||||
self,
|
||||
*,
|
||||
mode: Literal["json", "python"] = "python",
|
||||
use_api_names: bool = True,
|
||||
exclude_unset: bool = True,
|
||||
exclude_defaults: bool = False,
|
||||
exclude_none: bool = False,
|
||||
warnings: bool = True,
|
||||
) -> dict[str, object]:
|
||||
"""Recursively generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
|
||||
|
||||
By default, fields that were not set by the API will not be included,
|
||||
and keys will match the API response, *not* the property names from the model.
|
||||
|
||||
For example, if the API responds with `"fooBar": true` but we've defined a `foo_bar: bool` property,
|
||||
the output will use the `"fooBar"` key (unless `use_api_names=False` is passed).
|
||||
|
||||
Args:
|
||||
mode:
|
||||
If mode is 'json', the dictionary will only contain JSON serializable types. e.g. `datetime` will be turned into a string, `"2024-3-22T18:11:19.117000Z"`.
|
||||
If mode is 'python', the dictionary may contain any Python objects. e.g. `datetime(2024, 3, 22)`
|
||||
|
||||
use_api_names: Whether to use the key that the API responded with or the property name. Defaults to `True`.
|
||||
exclude_unset: Whether to exclude fields that have not been explicitly set.
|
||||
exclude_defaults: Whether to exclude fields that are set to their default value from the output.
|
||||
exclude_none: Whether to exclude fields that have a value of `None` from the output.
|
||||
warnings: Whether to log warnings when invalid fields are encountered. This is only supported in Pydantic v2.
|
||||
"""
|
||||
return self.model_dump(
|
||||
mode=mode,
|
||||
by_alias=use_api_names,
|
||||
exclude_unset=exclude_unset,
|
||||
exclude_defaults=exclude_defaults,
|
||||
exclude_none=exclude_none,
|
||||
warnings=warnings,
|
||||
)
|
||||
|
||||
def to_json(
|
||||
self,
|
||||
*,
|
||||
indent: int | None = 2,
|
||||
use_api_names: bool = True,
|
||||
exclude_unset: bool = True,
|
||||
exclude_defaults: bool = False,
|
||||
exclude_none: bool = False,
|
||||
warnings: bool = True,
|
||||
) -> str:
|
||||
"""Generates a JSON string representing this model as it would be received from or sent to the API (but with indentation).
|
||||
|
||||
By default, fields that were not set by the API will not be included,
|
||||
and keys will match the API response, *not* the property names from the model.
|
||||
|
||||
For example, if the API responds with `"fooBar": true` but we've defined a `foo_bar: bool` property,
|
||||
the output will use the `"fooBar"` key (unless `use_api_names=False` is passed).
|
||||
|
||||
Args:
|
||||
indent: Indentation to use in the JSON output. If `None` is passed, the output will be compact. Defaults to `2`
|
||||
use_api_names: Whether to use the key that the API responded with or the property name. Defaults to `True`.
|
||||
exclude_unset: Whether to exclude fields that have not been explicitly set.
|
||||
exclude_defaults: Whether to exclude fields that have the default value.
|
||||
exclude_none: Whether to exclude fields that have a value of `None`.
|
||||
warnings: Whether to show any warnings that occurred during serialization. This is only supported in Pydantic v2.
|
||||
"""
|
||||
return self.model_dump_json(
|
||||
indent=indent,
|
||||
by_alias=use_api_names,
|
||||
exclude_unset=exclude_unset,
|
||||
exclude_defaults=exclude_defaults,
|
||||
exclude_none=exclude_none,
|
||||
warnings=warnings,
|
||||
)
|
||||
|
||||
@override
|
||||
def __str__(self) -> str:
|
||||
# mypy complains about an invalid self arg
|
||||
return f"{self.__repr_name__()}({self.__repr_str__(', ')})" # type: ignore[misc]
|
||||
|
||||
# Override the 'construct' method in a way that supports recursive parsing without validation.
|
||||
# Based on https://github.com/samuelcolvin/pydantic/issues/1168#issuecomment-817742836.
|
||||
@classmethod
|
||||
@override
|
||||
def construct( # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
__cls: Type[ModelT],
|
||||
_fields_set: set[str] | None = None,
|
||||
**values: object,
|
||||
) -> ModelT:
|
||||
m = __cls.__new__(__cls)
|
||||
fields_values: dict[str, object] = {}
|
||||
|
||||
config = get_model_config(__cls)
|
||||
populate_by_name = (
|
||||
config.allow_population_by_field_name
|
||||
if isinstance(config, _ConfigProtocol)
|
||||
else config.get("populate_by_name")
|
||||
)
|
||||
|
||||
if _fields_set is None:
|
||||
_fields_set = set()
|
||||
|
||||
model_fields = get_model_fields(__cls)
|
||||
for name, field in model_fields.items():
|
||||
key = field.alias
|
||||
if key is None or (key not in values and populate_by_name):
|
||||
key = name
|
||||
|
||||
if key in values:
|
||||
fields_values[name] = _construct_field(value=values[key], field=field, key=key)
|
||||
_fields_set.add(name)
|
||||
else:
|
||||
fields_values[name] = field_get_default(field)
|
||||
|
||||
_extra = {}
|
||||
for key, value in values.items():
|
||||
if key not in model_fields:
|
||||
if PYDANTIC_V2:
|
||||
_extra[key] = value
|
||||
else:
|
||||
_fields_set.add(key)
|
||||
fields_values[key] = value
|
||||
|
||||
object.__setattr__(m, "__dict__", fields_values)
|
||||
|
||||
if PYDANTIC_V2:
|
||||
# these properties are copied from Pydantic's `model_construct()` method
|
||||
object.__setattr__(m, "__pydantic_private__", None)
|
||||
object.__setattr__(m, "__pydantic_extra__", _extra)
|
||||
object.__setattr__(m, "__pydantic_fields_set__", _fields_set)
|
||||
else:
|
||||
# init_private_attributes() does not exist in v2
|
||||
m._init_private_attributes() # type: ignore
|
||||
|
||||
# copied from Pydantic v1's `construct()` method
|
||||
object.__setattr__(m, "__fields_set__", _fields_set)
|
||||
|
||||
return m
|
||||
|
||||
if not TYPE_CHECKING:
|
||||
# type checkers incorrectly complain about this assignment
|
||||
# because the type signatures are technically different
|
||||
# although not in practice
|
||||
model_construct = construct
|
||||
|
||||
if not PYDANTIC_V2:
|
||||
# we define aliases for some of the new pydantic v2 methods so
|
||||
# that we can just document these methods without having to specify
|
||||
# a specific pydantic version as some users may not know which
|
||||
# pydantic version they are currently using
|
||||
|
||||
@override
|
||||
def model_dump(
|
||||
self,
|
||||
*,
|
||||
mode: Literal["json", "python"] | str = "python",
|
||||
include: IncEx | None = None,
|
||||
exclude: IncEx | None = None,
|
||||
by_alias: bool = False,
|
||||
exclude_unset: bool = False,
|
||||
exclude_defaults: bool = False,
|
||||
exclude_none: bool = False,
|
||||
round_trip: bool = False,
|
||||
warnings: bool | Literal["none", "warn", "error"] = True,
|
||||
context: dict[str, Any] | None = None,
|
||||
serialize_as_any: bool = False,
|
||||
) -> dict[str, Any]:
|
||||
"""Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump
|
||||
|
||||
Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
|
||||
|
||||
Args:
|
||||
mode: The mode in which `to_python` should run.
|
||||
If mode is 'json', the dictionary will only contain JSON serializable types.
|
||||
If mode is 'python', the dictionary may contain any Python objects.
|
||||
include: A list of fields to include in the output.
|
||||
exclude: A list of fields to exclude from the output.
|
||||
by_alias: Whether to use the field's alias in the dictionary key if defined.
|
||||
exclude_unset: Whether to exclude fields that are unset or None from the output.
|
||||
exclude_defaults: Whether to exclude fields that are set to their default value from the output.
|
||||
exclude_none: Whether to exclude fields that have a value of `None` from the output.
|
||||
round_trip: Whether to enable serialization and deserialization round-trip support.
|
||||
warnings: Whether to log warnings when invalid fields are encountered.
|
||||
|
||||
Returns:
|
||||
A dictionary representation of the model.
|
||||
"""
|
||||
if mode not in {"json", "python"}:
|
||||
raise ValueError("mode must be either 'json' or 'python'")
|
||||
if round_trip != False:
|
||||
raise ValueError("round_trip is only supported in Pydantic v2")
|
||||
if warnings != True:
|
||||
raise ValueError("warnings is only supported in Pydantic v2")
|
||||
if context is not None:
|
||||
raise ValueError("context is only supported in Pydantic v2")
|
||||
if serialize_as_any != False:
|
||||
raise ValueError("serialize_as_any is only supported in Pydantic v2")
|
||||
dumped = super().dict( # pyright: ignore[reportDeprecated]
|
||||
include=include,
|
||||
exclude=exclude,
|
||||
by_alias=by_alias,
|
||||
exclude_unset=exclude_unset,
|
||||
exclude_defaults=exclude_defaults,
|
||||
exclude_none=exclude_none,
|
||||
)
|
||||
|
||||
return cast(dict[str, Any], json_safe(dumped)) if mode == "json" else dumped
|
||||
|
||||
@override
|
||||
def model_dump_json(
|
||||
self,
|
||||
*,
|
||||
indent: int | None = None,
|
||||
include: IncEx | None = None,
|
||||
exclude: IncEx | None = None,
|
||||
by_alias: bool = False,
|
||||
exclude_unset: bool = False,
|
||||
exclude_defaults: bool = False,
|
||||
exclude_none: bool = False,
|
||||
round_trip: bool = False,
|
||||
warnings: bool | Literal["none", "warn", "error"] = True,
|
||||
context: dict[str, Any] | None = None,
|
||||
serialize_as_any: bool = False,
|
||||
) -> str:
|
||||
"""Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump_json
|
||||
|
||||
Generates a JSON representation of the model using Pydantic's `to_json` method.
|
||||
|
||||
Args:
|
||||
indent: Indentation to use in the JSON output. If None is passed, the output will be compact.
|
||||
include: Field(s) to include in the JSON output. Can take either a string or set of strings.
|
||||
exclude: Field(s) to exclude from the JSON output. Can take either a string or set of strings.
|
||||
by_alias: Whether to serialize using field aliases.
|
||||
exclude_unset: Whether to exclude fields that have not been explicitly set.
|
||||
exclude_defaults: Whether to exclude fields that have the default value.
|
||||
exclude_none: Whether to exclude fields that have a value of `None`.
|
||||
round_trip: Whether to use serialization/deserialization between JSON and class instance.
|
||||
warnings: Whether to show any warnings that occurred during serialization.
|
||||
|
||||
Returns:
|
||||
A JSON string representation of the model.
|
||||
"""
|
||||
if round_trip != False:
|
||||
raise ValueError("round_trip is only supported in Pydantic v2")
|
||||
if warnings != True:
|
||||
raise ValueError("warnings is only supported in Pydantic v2")
|
||||
if context is not None:
|
||||
raise ValueError("context is only supported in Pydantic v2")
|
||||
if serialize_as_any != False:
|
||||
raise ValueError("serialize_as_any is only supported in Pydantic v2")
|
||||
return super().json( # type: ignore[reportDeprecated]
|
||||
indent=indent,
|
||||
include=include,
|
||||
exclude=exclude,
|
||||
by_alias=by_alias,
|
||||
exclude_unset=exclude_unset,
|
||||
exclude_defaults=exclude_defaults,
|
||||
exclude_none=exclude_none,
|
||||
)
|
||||
|
||||
|
||||
def _construct_field(value: object, field: FieldInfo, key: str) -> object:
|
||||
if value is None:
|
||||
return field_get_default(field)
|
||||
|
||||
if PYDANTIC_V2:
|
||||
type_ = field.annotation
|
||||
else:
|
||||
type_ = cast(type, field.outer_type_) # type: ignore
|
||||
|
||||
if type_ is None:
|
||||
raise RuntimeError(f"Unexpected field type is None for {key}")
|
||||
|
||||
return construct_type(value=value, type_=type_)
|
||||
|
||||
|
||||
def is_basemodel(type_: type) -> bool:
|
||||
"""Returns whether or not the given type is either a `BaseModel` or a union of `BaseModel`"""
|
||||
if is_union(type_):
|
||||
for variant in get_args(type_):
|
||||
if is_basemodel(variant):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
return is_basemodel_type(type_)
|
||||
|
||||
|
||||
def is_basemodel_type(type_: type) -> TypeGuard[type[BaseModel] | type[GenericModel]]:
|
||||
origin = get_origin(type_) or type_
|
||||
if not inspect.isclass(origin):
|
||||
return False
|
||||
return issubclass(origin, BaseModel) or issubclass(origin, GenericModel)
|
||||
|
||||
|
||||
def build(
|
||||
base_model_cls: Callable[P, _BaseModelT],
|
||||
*args: P.args,
|
||||
**kwargs: P.kwargs,
|
||||
) -> _BaseModelT:
|
||||
"""Construct a BaseModel class without validation.
|
||||
|
||||
This is useful for cases where you need to instantiate a `BaseModel`
|
||||
from an API response as this provides type-safe params which isn't supported
|
||||
by helpers like `construct_type()`.
|
||||
|
||||
```py
|
||||
build(MyModel, my_field_a="foo", my_field_b=123)
|
||||
```
|
||||
"""
|
||||
if args:
|
||||
raise TypeError(
|
||||
"Received positional arguments which are not supported; Keyword arguments must be used instead",
|
||||
)
|
||||
|
||||
return cast(_BaseModelT, construct_type(type_=base_model_cls, value=kwargs))
|
||||
|
||||
|
||||
def construct_type_unchecked(*, value: object, type_: type[_T]) -> _T:
|
||||
"""Loose coercion to the expected type with construction of nested values.
|
||||
|
||||
Note: the returned value from this function is not guaranteed to match the
|
||||
given type.
|
||||
"""
|
||||
return cast(_T, construct_type(value=value, type_=type_))
|
||||
|
||||
|
||||
def construct_type(*, value: object, type_: object) -> object:
|
||||
"""Loose coercion to the expected type with construction of nested values.
|
||||
|
||||
If the given value does not match the expected type then it is returned as-is.
|
||||
"""
|
||||
|
||||
# store a reference to the original type we were given before we extract any inner
|
||||
# types so that we can properly resolve forward references in `TypeAliasType` annotations
|
||||
original_type = None
|
||||
|
||||
# we allow `object` as the input type because otherwise, passing things like
|
||||
# `Literal['value']` will be reported as a type error by type checkers
|
||||
type_ = cast("type[object]", type_)
|
||||
if is_type_alias_type(type_):
|
||||
original_type = type_ # type: ignore[unreachable]
|
||||
type_ = type_.__value__ # type: ignore[unreachable]
|
||||
|
||||
# unwrap `Annotated[T, ...]` -> `T`
|
||||
if is_annotated_type(type_):
|
||||
meta: tuple[Any, ...] = get_args(type_)[1:]
|
||||
type_ = extract_type_arg(type_, 0)
|
||||
else:
|
||||
meta = tuple()
|
||||
|
||||
# we need to use the origin class for any types that are subscripted generics
|
||||
# e.g. Dict[str, object]
|
||||
origin = get_origin(type_) or type_
|
||||
args = get_args(type_)
|
||||
|
||||
if is_union(origin):
|
||||
try:
|
||||
return validate_type(type_=cast("type[object]", original_type or type_), value=value)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# if the type is a discriminated union then we want to construct the right variant
|
||||
# in the union, even if the data doesn't match exactly, otherwise we'd break code
|
||||
# that relies on the constructed class types, e.g.
|
||||
#
|
||||
# class FooType:
|
||||
# kind: Literal['foo']
|
||||
# value: str
|
||||
#
|
||||
# class BarType:
|
||||
# kind: Literal['bar']
|
||||
# value: int
|
||||
#
|
||||
# without this block, if the data we get is something like `{'kind': 'bar', 'value': 'foo'}` then
|
||||
# we'd end up constructing `FooType` when it should be `BarType`.
|
||||
discriminator = _build_discriminated_union_meta(union=type_, meta_annotations=meta)
|
||||
if discriminator and is_mapping(value):
|
||||
variant_value = value.get(discriminator.field_alias_from or discriminator.field_name)
|
||||
if variant_value and isinstance(variant_value, str):
|
||||
variant_type = discriminator.mapping.get(variant_value)
|
||||
if variant_type:
|
||||
return construct_type(type_=variant_type, value=value)
|
||||
|
||||
# if the data is not valid, use the first variant that doesn't fail while deserializing
|
||||
for variant in args:
|
||||
try:
|
||||
return construct_type(value=value, type_=variant)
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
raise RuntimeError(f"Could not convert data into a valid instance of {type_}")
|
||||
|
||||
if origin == dict:
|
||||
if not is_mapping(value):
|
||||
return value
|
||||
|
||||
_, items_type = get_args(type_) # Dict[_, items_type]
|
||||
return {key: construct_type(value=item, type_=items_type) for key, item in value.items()}
|
||||
|
||||
if (
|
||||
not is_literal_type(type_)
|
||||
and inspect.isclass(origin)
|
||||
and (issubclass(origin, BaseModel) or issubclass(origin, GenericModel))
|
||||
):
|
||||
if is_list(value):
|
||||
return [cast(Any, type_).construct(**entry) if is_mapping(entry) else entry for entry in value]
|
||||
|
||||
if is_mapping(value):
|
||||
if issubclass(type_, BaseModel):
|
||||
return type_.construct(**value) # type: ignore[arg-type]
|
||||
|
||||
return cast(Any, type_).construct(**value)
|
||||
|
||||
if origin == list:
|
||||
if not is_list(value):
|
||||
return value
|
||||
|
||||
inner_type = args[0] # List[inner_type]
|
||||
return [construct_type(value=entry, type_=inner_type) for entry in value]
|
||||
|
||||
if origin == float:
|
||||
if isinstance(value, int):
|
||||
coerced = float(value)
|
||||
if coerced != value:
|
||||
return value
|
||||
return coerced
|
||||
|
||||
return value
|
||||
|
||||
if type_ == datetime:
|
||||
try:
|
||||
return parse_datetime(value) # type: ignore
|
||||
except Exception:
|
||||
return value
|
||||
|
||||
if type_ == date:
|
||||
try:
|
||||
return parse_date(value) # type: ignore
|
||||
except Exception:
|
||||
return value
|
||||
|
||||
return value
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class CachedDiscriminatorType(Protocol):
|
||||
__discriminator__: DiscriminatorDetails
|
||||
|
||||
|
||||
class DiscriminatorDetails:
|
||||
field_name: str
|
||||
"""The name of the discriminator field in the variant class, e.g.
|
||||
|
||||
```py
|
||||
class Foo(BaseModel):
|
||||
type: Literal['foo']
|
||||
```
|
||||
|
||||
Will result in field_name='type'
|
||||
"""
|
||||
|
||||
field_alias_from: str | None
|
||||
"""The name of the discriminator field in the API response, e.g.
|
||||
|
||||
```py
|
||||
class Foo(BaseModel):
|
||||
type: Literal['foo'] = Field(alias='type_from_api')
|
||||
```
|
||||
|
||||
Will result in field_alias_from='type_from_api'
|
||||
"""
|
||||
|
||||
mapping: dict[str, type]
|
||||
"""Mapping of discriminator value to variant type, e.g.
|
||||
|
||||
{'foo': FooVariant, 'bar': BarVariant}
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
mapping: dict[str, type],
|
||||
discriminator_field: str,
|
||||
discriminator_alias: str | None,
|
||||
) -> None:
|
||||
self.mapping = mapping
|
||||
self.field_name = discriminator_field
|
||||
self.field_alias_from = discriminator_alias
|
||||
|
||||
|
||||
def _build_discriminated_union_meta(*, union: type, meta_annotations: tuple[Any, ...]) -> DiscriminatorDetails | None:
|
||||
if isinstance(union, CachedDiscriminatorType):
|
||||
return union.__discriminator__
|
||||
|
||||
discriminator_field_name: str | None = None
|
||||
|
||||
for annotation in meta_annotations:
|
||||
if isinstance(annotation, PropertyInfo) and annotation.discriminator is not None:
|
||||
discriminator_field_name = annotation.discriminator
|
||||
break
|
||||
|
||||
if not discriminator_field_name:
|
||||
return None
|
||||
|
||||
mapping: dict[str, type] = {}
|
||||
discriminator_alias: str | None = None
|
||||
|
||||
for variant in get_args(union):
|
||||
variant = strip_annotated_type(variant)
|
||||
if is_basemodel_type(variant):
|
||||
if PYDANTIC_V2:
|
||||
field = _extract_field_schema_pv2(variant, discriminator_field_name)
|
||||
if not field:
|
||||
continue
|
||||
|
||||
# Note: if one variant defines an alias then they all should
|
||||
discriminator_alias = field.get("serialization_alias")
|
||||
|
||||
field_schema = field["schema"]
|
||||
|
||||
if field_schema["type"] == "literal":
|
||||
for entry in cast("LiteralSchema", field_schema)["expected"]:
|
||||
if isinstance(entry, str):
|
||||
mapping[entry] = variant
|
||||
else:
|
||||
field_info = cast("dict[str, FieldInfo]", variant.__fields__).get(discriminator_field_name) # pyright: ignore[reportDeprecated, reportUnnecessaryCast]
|
||||
if not field_info:
|
||||
continue
|
||||
|
||||
# Note: if one variant defines an alias then they all should
|
||||
discriminator_alias = field_info.alias
|
||||
|
||||
if (annotation := getattr(field_info, "annotation", None)) and is_literal_type(annotation):
|
||||
for entry in get_args(annotation):
|
||||
if isinstance(entry, str):
|
||||
mapping[entry] = variant
|
||||
|
||||
if not mapping:
|
||||
return None
|
||||
|
||||
details = DiscriminatorDetails(
|
||||
mapping=mapping,
|
||||
discriminator_field=discriminator_field_name,
|
||||
discriminator_alias=discriminator_alias,
|
||||
)
|
||||
cast(CachedDiscriminatorType, union).__discriminator__ = details
|
||||
return details
|
||||
|
||||
|
||||
def _extract_field_schema_pv2(model: type[BaseModel], field_name: str) -> ModelField | None:
|
||||
schema = model.__pydantic_core_schema__
|
||||
if schema["type"] == "definitions":
|
||||
schema = schema["schema"]
|
||||
|
||||
if schema["type"] != "model":
|
||||
return None
|
||||
|
||||
schema = cast("ModelSchema", schema)
|
||||
fields_schema = schema["schema"]
|
||||
if fields_schema["type"] != "model-fields":
|
||||
return None
|
||||
|
||||
fields_schema = cast("ModelFieldsSchema", fields_schema)
|
||||
field = fields_schema["fields"].get(field_name)
|
||||
if not field:
|
||||
return None
|
||||
|
||||
return cast("ModelField", field) # pyright: ignore[reportUnnecessaryCast]
|
||||
|
||||
|
||||
def validate_type(*, type_: type[_T], value: object) -> _T:
|
||||
"""Strict validation that the given value matches the expected type"""
|
||||
if inspect.isclass(type_) and issubclass(type_, pydantic.BaseModel):
|
||||
return cast(_T, parse_obj(type_, value))
|
||||
|
||||
return cast(_T, _validate_non_model_type(type_=type_, value=value))
|
||||
|
||||
|
||||
def set_pydantic_config(typ: Any, config: pydantic.ConfigDict) -> None:
|
||||
"""Add a pydantic config for the given type.
|
||||
|
||||
Note: this is a no-op on Pydantic v1.
|
||||
"""
|
||||
setattr(typ, "__pydantic_config__", config) # noqa: B010
|
||||
|
||||
|
||||
# our use of subclassing here causes weirdness for type checkers,
|
||||
# so we just pretend that we don't subclass
|
||||
if TYPE_CHECKING:
|
||||
GenericModel = BaseModel
|
||||
else:
|
||||
|
||||
class GenericModel(BaseGenericModel, BaseModel):
|
||||
pass
|
||||
|
||||
|
||||
if PYDANTIC_V2:
|
||||
from pydantic import TypeAdapter as _TypeAdapter
|
||||
|
||||
_CachedTypeAdapter = cast("TypeAdapter[object]", lru_cache(maxsize=None)(_TypeAdapter))
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pydantic import TypeAdapter
|
||||
else:
|
||||
TypeAdapter = _CachedTypeAdapter
|
||||
|
||||
def _validate_non_model_type(*, type_: type[_T], value: object) -> _T:
|
||||
return TypeAdapter(type_).validate_python(value)
|
||||
|
||||
elif not TYPE_CHECKING: # TODO: condition is weird
|
||||
|
||||
class RootModel(GenericModel, Generic[_T]):
|
||||
"""Used as a placeholder to easily convert runtime types to a Pydantic format
|
||||
to provide validation.
|
||||
|
||||
For example:
|
||||
```py
|
||||
validated = RootModel[int](__root__="5").__root__
|
||||
# validated: 5
|
||||
```
|
||||
"""
|
||||
|
||||
__root__: _T
|
||||
|
||||
def _validate_non_model_type(*, type_: type[_T], value: object) -> _T:
|
||||
model = _create_pydantic_model(type_).validate(value)
|
||||
return cast(_T, model.__root__)
|
||||
|
||||
def _create_pydantic_model(type_: _T) -> Type[RootModel[_T]]:
|
||||
return RootModel[type_] # type: ignore
|
||||
|
||||
|
||||
class FinalRequestOptionsInput(TypedDict, total=False):
|
||||
method: Required[str]
|
||||
url: Required[str]
|
||||
params: Query
|
||||
headers: Headers
|
||||
max_retries: int
|
||||
timeout: float | Timeout | None
|
||||
files: HttpxRequestFiles | None
|
||||
idempotency_key: str
|
||||
json_data: Body
|
||||
extra_json: AnyMapping
|
||||
follow_redirects: bool
|
||||
|
||||
|
||||
@final
|
||||
class FinalRequestOptions(pydantic.BaseModel):
|
||||
method: str
|
||||
url: str
|
||||
params: Query = {}
|
||||
headers: Union[Headers, NotGiven] = NotGiven()
|
||||
max_retries: Union[int, NotGiven] = NotGiven()
|
||||
timeout: Union[float, Timeout, None, NotGiven] = NotGiven()
|
||||
files: Union[HttpxRequestFiles, None] = None
|
||||
idempotency_key: Union[str, None] = None
|
||||
post_parser: Union[Callable[[Any], Any], NotGiven] = NotGiven()
|
||||
follow_redirects: Union[bool, None] = None
|
||||
|
||||
# It should be noted that we cannot use `json` here as that would override
|
||||
# a BaseModel method in an incompatible fashion.
|
||||
json_data: Union[Body, None] = None
|
||||
extra_json: Union[AnyMapping, None] = None
|
||||
|
||||
if PYDANTIC_V2:
|
||||
model_config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True)
|
||||
else:
|
||||
|
||||
class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated]
|
||||
arbitrary_types_allowed: bool = True
|
||||
|
||||
def get_max_retries(self, max_retries: int) -> int:
|
||||
if isinstance(self.max_retries, NotGiven):
|
||||
return max_retries
|
||||
return self.max_retries
|
||||
|
||||
def _strip_raw_response_header(self) -> None:
|
||||
if not is_given(self.headers):
|
||||
return
|
||||
|
||||
if self.headers.get(RAW_RESPONSE_HEADER):
|
||||
self.headers = {**self.headers}
|
||||
self.headers.pop(RAW_RESPONSE_HEADER)
|
||||
|
||||
# override the `construct` method so that we can run custom transformations.
|
||||
# this is necessary as we don't want to do any actual runtime type checking
|
||||
# (which means we can't use validators) but we do want to ensure that `NotGiven`
|
||||
# values are not present
|
||||
#
|
||||
# type ignore required because we're adding explicit types to `**values`
|
||||
@classmethod
|
||||
def construct( # type: ignore
|
||||
cls,
|
||||
_fields_set: set[str] | None = None,
|
||||
**values: Unpack[FinalRequestOptionsInput],
|
||||
) -> FinalRequestOptions:
|
||||
kwargs: dict[str, Any] = {
|
||||
# we unconditionally call `strip_not_given` on any value
|
||||
# as it will just ignore any non-mapping types
|
||||
key: strip_not_given(value)
|
||||
for key, value in values.items()
|
||||
}
|
||||
if PYDANTIC_V2:
|
||||
return super().model_construct(_fields_set, **kwargs)
|
||||
return cast(FinalRequestOptions, super().construct(_fields_set, **kwargs)) # pyright: ignore[reportDeprecated]
|
||||
|
||||
if not TYPE_CHECKING:
|
||||
# type checkers incorrectly complain about this assignment
|
||||
model_construct = construct
|
||||
150
src/opencode_ai/_qs.py
Normal file
150
src/opencode_ai/_qs.py
Normal file
|
|
@ -0,0 +1,150 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from typing import Any, List, Tuple, Union, Mapping, TypeVar
|
||||
from urllib.parse import parse_qs, urlencode
|
||||
from typing_extensions import Literal, get_args
|
||||
|
||||
from ._types import NOT_GIVEN, NotGiven, NotGivenOr
|
||||
from ._utils import flatten
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
|
||||
ArrayFormat = Literal["comma", "repeat", "indices", "brackets"]
|
||||
NestedFormat = Literal["dots", "brackets"]
|
||||
|
||||
PrimitiveData = Union[str, int, float, bool, None]
|
||||
# this should be Data = Union[PrimitiveData, "List[Data]", "Tuple[Data]", "Mapping[str, Data]"]
|
||||
# https://github.com/microsoft/pyright/issues/3555
|
||||
Data = Union[PrimitiveData, List[Any], Tuple[Any], "Mapping[str, Any]"]
|
||||
Params = Mapping[str, Data]
|
||||
|
||||
|
||||
class Querystring:
|
||||
array_format: ArrayFormat
|
||||
nested_format: NestedFormat
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
array_format: ArrayFormat = "repeat",
|
||||
nested_format: NestedFormat = "brackets",
|
||||
) -> None:
|
||||
self.array_format = array_format
|
||||
self.nested_format = nested_format
|
||||
|
||||
def parse(self, query: str) -> Mapping[str, object]:
|
||||
# Note: custom format syntax is not supported yet
|
||||
return parse_qs(query)
|
||||
|
||||
def stringify(
|
||||
self,
|
||||
params: Params,
|
||||
*,
|
||||
array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN,
|
||||
nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN,
|
||||
) -> str:
|
||||
return urlencode(
|
||||
self.stringify_items(
|
||||
params,
|
||||
array_format=array_format,
|
||||
nested_format=nested_format,
|
||||
)
|
||||
)
|
||||
|
||||
def stringify_items(
|
||||
self,
|
||||
params: Params,
|
||||
*,
|
||||
array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN,
|
||||
nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN,
|
||||
) -> list[tuple[str, str]]:
|
||||
opts = Options(
|
||||
qs=self,
|
||||
array_format=array_format,
|
||||
nested_format=nested_format,
|
||||
)
|
||||
return flatten([self._stringify_item(key, value, opts) for key, value in params.items()])
|
||||
|
||||
def _stringify_item(
|
||||
self,
|
||||
key: str,
|
||||
value: Data,
|
||||
opts: Options,
|
||||
) -> list[tuple[str, str]]:
|
||||
if isinstance(value, Mapping):
|
||||
items: list[tuple[str, str]] = []
|
||||
nested_format = opts.nested_format
|
||||
for subkey, subvalue in value.items():
|
||||
items.extend(
|
||||
self._stringify_item(
|
||||
# TODO: error if unknown format
|
||||
f"{key}.{subkey}" if nested_format == "dots" else f"{key}[{subkey}]",
|
||||
subvalue,
|
||||
opts,
|
||||
)
|
||||
)
|
||||
return items
|
||||
|
||||
if isinstance(value, (list, tuple)):
|
||||
array_format = opts.array_format
|
||||
if array_format == "comma":
|
||||
return [
|
||||
(
|
||||
key,
|
||||
",".join(self._primitive_value_to_str(item) for item in value if item is not None),
|
||||
),
|
||||
]
|
||||
elif array_format == "repeat":
|
||||
items = []
|
||||
for item in value:
|
||||
items.extend(self._stringify_item(key, item, opts))
|
||||
return items
|
||||
elif array_format == "indices":
|
||||
raise NotImplementedError("The array indices format is not supported yet")
|
||||
elif array_format == "brackets":
|
||||
items = []
|
||||
key = key + "[]"
|
||||
for item in value:
|
||||
items.extend(self._stringify_item(key, item, opts))
|
||||
return items
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
f"Unknown array_format value: {array_format}, choose from {', '.join(get_args(ArrayFormat))}"
|
||||
)
|
||||
|
||||
serialised = self._primitive_value_to_str(value)
|
||||
if not serialised:
|
||||
return []
|
||||
return [(key, serialised)]
|
||||
|
||||
def _primitive_value_to_str(self, value: PrimitiveData) -> str:
|
||||
# copied from httpx
|
||||
if value is True:
|
||||
return "true"
|
||||
elif value is False:
|
||||
return "false"
|
||||
elif value is None:
|
||||
return ""
|
||||
return str(value)
|
||||
|
||||
|
||||
_qs = Querystring()
|
||||
parse = _qs.parse
|
||||
stringify = _qs.stringify
|
||||
stringify_items = _qs.stringify_items
|
||||
|
||||
|
||||
class Options:
|
||||
array_format: ArrayFormat
|
||||
nested_format: NestedFormat
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
qs: Querystring = _qs,
|
||||
*,
|
||||
array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN,
|
||||
nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN,
|
||||
) -> None:
|
||||
self.array_format = qs.array_format if isinstance(array_format, NotGiven) else array_format
|
||||
self.nested_format = qs.nested_format if isinstance(nested_format, NotGiven) else nested_format
|
||||
43
src/opencode_ai/_resource.py
Normal file
43
src/opencode_ai/_resource.py
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import time
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import anyio
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._client import Opencode, AsyncOpencode
|
||||
|
||||
|
||||
class SyncAPIResource:
|
||||
_client: Opencode
|
||||
|
||||
def __init__(self, client: Opencode) -> None:
|
||||
self._client = client
|
||||
self._get = client.get
|
||||
self._post = client.post
|
||||
self._patch = client.patch
|
||||
self._put = client.put
|
||||
self._delete = client.delete
|
||||
self._get_api_list = client.get_api_list
|
||||
|
||||
def _sleep(self, seconds: float) -> None:
|
||||
time.sleep(seconds)
|
||||
|
||||
|
||||
class AsyncAPIResource:
|
||||
_client: AsyncOpencode
|
||||
|
||||
def __init__(self, client: AsyncOpencode) -> None:
|
||||
self._client = client
|
||||
self._get = client.get
|
||||
self._post = client.post
|
||||
self._patch = client.patch
|
||||
self._put = client.put
|
||||
self._delete = client.delete
|
||||
self._get_api_list = client.get_api_list
|
||||
|
||||
async def _sleep(self, seconds: float) -> None:
|
||||
await anyio.sleep(seconds)
|
||||
832
src/opencode_ai/_response.py
Normal file
832
src/opencode_ai/_response.py
Normal file
|
|
@ -0,0 +1,832 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import inspect
|
||||
import logging
|
||||
import datetime
|
||||
import functools
|
||||
from types import TracebackType
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Union,
|
||||
Generic,
|
||||
TypeVar,
|
||||
Callable,
|
||||
Iterator,
|
||||
AsyncIterator,
|
||||
cast,
|
||||
overload,
|
||||
)
|
||||
from typing_extensions import Awaitable, ParamSpec, override, get_origin
|
||||
|
||||
import anyio
|
||||
import httpx
|
||||
import pydantic
|
||||
|
||||
from ._types import NoneType
|
||||
from ._utils import is_given, extract_type_arg, is_annotated_type, is_type_alias_type, extract_type_var_from_base
|
||||
from ._models import BaseModel, is_basemodel
|
||||
from ._constants import RAW_RESPONSE_HEADER, OVERRIDE_CAST_TO_HEADER
|
||||
from ._streaming import Stream, AsyncStream, is_stream_class_type, extract_stream_chunk_type
|
||||
from ._exceptions import OpencodeError, APIResponseValidationError
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._models import FinalRequestOptions
|
||||
from ._base_client import BaseClient
|
||||
|
||||
|
||||
P = ParamSpec("P")
|
||||
R = TypeVar("R")
|
||||
_T = TypeVar("_T")
|
||||
_APIResponseT = TypeVar("_APIResponseT", bound="APIResponse[Any]")
|
||||
_AsyncAPIResponseT = TypeVar("_AsyncAPIResponseT", bound="AsyncAPIResponse[Any]")
|
||||
|
||||
log: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BaseAPIResponse(Generic[R]):
|
||||
_cast_to: type[R]
|
||||
_client: BaseClient[Any, Any]
|
||||
_parsed_by_type: dict[type[Any], Any]
|
||||
_is_sse_stream: bool
|
||||
_stream_cls: type[Stream[Any]] | type[AsyncStream[Any]] | None
|
||||
_options: FinalRequestOptions
|
||||
|
||||
http_response: httpx.Response
|
||||
|
||||
retries_taken: int
|
||||
"""The number of retries made. If no retries happened this will be `0`"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
raw: httpx.Response,
|
||||
cast_to: type[R],
|
||||
client: BaseClient[Any, Any],
|
||||
stream: bool,
|
||||
stream_cls: type[Stream[Any]] | type[AsyncStream[Any]] | None,
|
||||
options: FinalRequestOptions,
|
||||
retries_taken: int = 0,
|
||||
) -> None:
|
||||
self._cast_to = cast_to
|
||||
self._client = client
|
||||
self._parsed_by_type = {}
|
||||
self._is_sse_stream = stream
|
||||
self._stream_cls = stream_cls
|
||||
self._options = options
|
||||
self.http_response = raw
|
||||
self.retries_taken = retries_taken
|
||||
|
||||
@property
|
||||
def headers(self) -> httpx.Headers:
|
||||
return self.http_response.headers
|
||||
|
||||
@property
|
||||
def http_request(self) -> httpx.Request:
|
||||
"""Returns the httpx Request instance associated with the current response."""
|
||||
return self.http_response.request
|
||||
|
||||
@property
|
||||
def status_code(self) -> int:
|
||||
return self.http_response.status_code
|
||||
|
||||
@property
|
||||
def url(self) -> httpx.URL:
|
||||
"""Returns the URL for which the request was made."""
|
||||
return self.http_response.url
|
||||
|
||||
@property
|
||||
def method(self) -> str:
|
||||
return self.http_request.method
|
||||
|
||||
@property
|
||||
def http_version(self) -> str:
|
||||
return self.http_response.http_version
|
||||
|
||||
@property
|
||||
def elapsed(self) -> datetime.timedelta:
|
||||
"""The time taken for the complete request/response cycle to complete."""
|
||||
return self.http_response.elapsed
|
||||
|
||||
@property
|
||||
def is_closed(self) -> bool:
|
||||
"""Whether or not the response body has been closed.
|
||||
|
||||
If this is False then there is response data that has not been read yet.
|
||||
You must either fully consume the response body or call `.close()`
|
||||
before discarding the response to prevent resource leaks.
|
||||
"""
|
||||
return self.http_response.is_closed
|
||||
|
||||
@override
|
||||
def __repr__(self) -> str:
|
||||
return (
|
||||
f"<{self.__class__.__name__} [{self.status_code} {self.http_response.reason_phrase}] type={self._cast_to}>"
|
||||
)
|
||||
|
||||
def _parse(self, *, to: type[_T] | None = None) -> R | _T:
|
||||
cast_to = to if to is not None else self._cast_to
|
||||
|
||||
# unwrap `TypeAlias('Name', T)` -> `T`
|
||||
if is_type_alias_type(cast_to):
|
||||
cast_to = cast_to.__value__ # type: ignore[unreachable]
|
||||
|
||||
# unwrap `Annotated[T, ...]` -> `T`
|
||||
if cast_to and is_annotated_type(cast_to):
|
||||
cast_to = extract_type_arg(cast_to, 0)
|
||||
|
||||
origin = get_origin(cast_to) or cast_to
|
||||
|
||||
if self._is_sse_stream:
|
||||
if to:
|
||||
if not is_stream_class_type(to):
|
||||
raise TypeError(f"Expected custom parse type to be a subclass of {Stream} or {AsyncStream}")
|
||||
|
||||
return cast(
|
||||
_T,
|
||||
to(
|
||||
cast_to=extract_stream_chunk_type(
|
||||
to,
|
||||
failure_message="Expected custom stream type to be passed with a type argument, e.g. Stream[ChunkType]",
|
||||
),
|
||||
response=self.http_response,
|
||||
client=cast(Any, self._client),
|
||||
),
|
||||
)
|
||||
|
||||
if self._stream_cls:
|
||||
return cast(
|
||||
R,
|
||||
self._stream_cls(
|
||||
cast_to=extract_stream_chunk_type(self._stream_cls),
|
||||
response=self.http_response,
|
||||
client=cast(Any, self._client),
|
||||
),
|
||||
)
|
||||
|
||||
stream_cls = cast("type[Stream[Any]] | type[AsyncStream[Any]] | None", self._client._default_stream_cls)
|
||||
if stream_cls is None:
|
||||
raise MissingStreamClassError()
|
||||
|
||||
return cast(
|
||||
R,
|
||||
stream_cls(
|
||||
cast_to=cast_to,
|
||||
response=self.http_response,
|
||||
client=cast(Any, self._client),
|
||||
),
|
||||
)
|
||||
|
||||
if cast_to is NoneType:
|
||||
return cast(R, None)
|
||||
|
||||
response = self.http_response
|
||||
if cast_to == str:
|
||||
return cast(R, response.text)
|
||||
|
||||
if cast_to == bytes:
|
||||
return cast(R, response.content)
|
||||
|
||||
if cast_to == int:
|
||||
return cast(R, int(response.text))
|
||||
|
||||
if cast_to == float:
|
||||
return cast(R, float(response.text))
|
||||
|
||||
if cast_to == bool:
|
||||
return cast(R, response.text.lower() == "true")
|
||||
|
||||
if origin == APIResponse:
|
||||
raise RuntimeError("Unexpected state - cast_to is `APIResponse`")
|
||||
|
||||
if inspect.isclass(origin) and issubclass(origin, httpx.Response):
|
||||
# Because of the invariance of our ResponseT TypeVar, users can subclass httpx.Response
|
||||
# and pass that class to our request functions. We cannot change the variance to be either
|
||||
# covariant or contravariant as that makes our usage of ResponseT illegal. We could construct
|
||||
# the response class ourselves but that is something that should be supported directly in httpx
|
||||
# as it would be easy to incorrectly construct the Response object due to the multitude of arguments.
|
||||
if cast_to != httpx.Response:
|
||||
raise ValueError(f"Subclasses of httpx.Response cannot be passed to `cast_to`")
|
||||
return cast(R, response)
|
||||
|
||||
if (
|
||||
inspect.isclass(
|
||||
origin # pyright: ignore[reportUnknownArgumentType]
|
||||
)
|
||||
and not issubclass(origin, BaseModel)
|
||||
and issubclass(origin, pydantic.BaseModel)
|
||||
):
|
||||
raise TypeError(
|
||||
"Pydantic models must subclass our base model type, e.g. `from opencode_ai import BaseModel`"
|
||||
)
|
||||
|
||||
if (
|
||||
cast_to is not object
|
||||
and not origin is list
|
||||
and not origin is dict
|
||||
and not origin is Union
|
||||
and not issubclass(origin, BaseModel)
|
||||
):
|
||||
raise RuntimeError(
|
||||
f"Unsupported type, expected {cast_to} to be a subclass of {BaseModel}, {dict}, {list}, {Union}, {NoneType}, {str} or {httpx.Response}."
|
||||
)
|
||||
|
||||
# split is required to handle cases where additional information is included
|
||||
# in the response, e.g. application/json; charset=utf-8
|
||||
content_type, *_ = response.headers.get("content-type", "*").split(";")
|
||||
if not content_type.endswith("json"):
|
||||
if is_basemodel(cast_to):
|
||||
try:
|
||||
data = response.json()
|
||||
except Exception as exc:
|
||||
log.debug("Could not read JSON from response data due to %s - %s", type(exc), exc)
|
||||
else:
|
||||
return self._client._process_response_data(
|
||||
data=data,
|
||||
cast_to=cast_to, # type: ignore
|
||||
response=response,
|
||||
)
|
||||
|
||||
if self._client._strict_response_validation:
|
||||
raise APIResponseValidationError(
|
||||
response=response,
|
||||
message=f"Expected Content-Type response header to be `application/json` but received `{content_type}` instead.",
|
||||
body=response.text,
|
||||
)
|
||||
|
||||
# If the API responds with content that isn't JSON then we just return
|
||||
# the (decoded) text without performing any parsing so that you can still
|
||||
# handle the response however you need to.
|
||||
return response.text # type: ignore
|
||||
|
||||
data = response.json()
|
||||
|
||||
return self._client._process_response_data(
|
||||
data=data,
|
||||
cast_to=cast_to, # type: ignore
|
||||
response=response,
|
||||
)
|
||||
|
||||
|
||||
class APIResponse(BaseAPIResponse[R]):
|
||||
@overload
|
||||
def parse(self, *, to: type[_T]) -> _T: ...
|
||||
|
||||
@overload
|
||||
def parse(self) -> R: ...
|
||||
|
||||
def parse(self, *, to: type[_T] | None = None) -> R | _T:
|
||||
"""Returns the rich python representation of this response's data.
|
||||
|
||||
For lower-level control, see `.read()`, `.json()`, `.iter_bytes()`.
|
||||
|
||||
You can customise the type that the response is parsed into through
|
||||
the `to` argument, e.g.
|
||||
|
||||
```py
|
||||
from opencode_ai import BaseModel
|
||||
|
||||
|
||||
class MyModel(BaseModel):
|
||||
foo: str
|
||||
|
||||
|
||||
obj = response.parse(to=MyModel)
|
||||
print(obj.foo)
|
||||
```
|
||||
|
||||
We support parsing:
|
||||
- `BaseModel`
|
||||
- `dict`
|
||||
- `list`
|
||||
- `Union`
|
||||
- `str`
|
||||
- `int`
|
||||
- `float`
|
||||
- `httpx.Response`
|
||||
"""
|
||||
cache_key = to if to is not None else self._cast_to
|
||||
cached = self._parsed_by_type.get(cache_key)
|
||||
if cached is not None:
|
||||
return cached # type: ignore[no-any-return]
|
||||
|
||||
if not self._is_sse_stream:
|
||||
self.read()
|
||||
|
||||
parsed = self._parse(to=to)
|
||||
if is_given(self._options.post_parser):
|
||||
parsed = self._options.post_parser(parsed)
|
||||
|
||||
self._parsed_by_type[cache_key] = parsed
|
||||
return parsed
|
||||
|
||||
def read(self) -> bytes:
|
||||
"""Read and return the binary response content."""
|
||||
try:
|
||||
return self.http_response.read()
|
||||
except httpx.StreamConsumed as exc:
|
||||
# The default error raised by httpx isn't very
|
||||
# helpful in our case so we re-raise it with
|
||||
# a different error message.
|
||||
raise StreamAlreadyConsumed() from exc
|
||||
|
||||
def text(self) -> str:
|
||||
"""Read and decode the response content into a string."""
|
||||
self.read()
|
||||
return self.http_response.text
|
||||
|
||||
def json(self) -> object:
|
||||
"""Read and decode the JSON response content."""
|
||||
self.read()
|
||||
return self.http_response.json()
|
||||
|
||||
def close(self) -> None:
|
||||
"""Close the response and release the connection.
|
||||
|
||||
Automatically called if the response body is read to completion.
|
||||
"""
|
||||
self.http_response.close()
|
||||
|
||||
def iter_bytes(self, chunk_size: int | None = None) -> Iterator[bytes]:
|
||||
"""
|
||||
A byte-iterator over the decoded response content.
|
||||
|
||||
This automatically handles gzip, deflate and brotli encoded responses.
|
||||
"""
|
||||
for chunk in self.http_response.iter_bytes(chunk_size):
|
||||
yield chunk
|
||||
|
||||
def iter_text(self, chunk_size: int | None = None) -> Iterator[str]:
|
||||
"""A str-iterator over the decoded response content
|
||||
that handles both gzip, deflate, etc but also detects the content's
|
||||
string encoding.
|
||||
"""
|
||||
for chunk in self.http_response.iter_text(chunk_size):
|
||||
yield chunk
|
||||
|
||||
def iter_lines(self) -> Iterator[str]:
|
||||
"""Like `iter_text()` but will only yield chunks for each line"""
|
||||
for chunk in self.http_response.iter_lines():
|
||||
yield chunk
|
||||
|
||||
|
||||
class AsyncAPIResponse(BaseAPIResponse[R]):
|
||||
@overload
|
||||
async def parse(self, *, to: type[_T]) -> _T: ...
|
||||
|
||||
@overload
|
||||
async def parse(self) -> R: ...
|
||||
|
||||
async def parse(self, *, to: type[_T] | None = None) -> R | _T:
|
||||
"""Returns the rich python representation of this response's data.
|
||||
|
||||
For lower-level control, see `.read()`, `.json()`, `.iter_bytes()`.
|
||||
|
||||
You can customise the type that the response is parsed into through
|
||||
the `to` argument, e.g.
|
||||
|
||||
```py
|
||||
from opencode_ai import BaseModel
|
||||
|
||||
|
||||
class MyModel(BaseModel):
|
||||
foo: str
|
||||
|
||||
|
||||
obj = response.parse(to=MyModel)
|
||||
print(obj.foo)
|
||||
```
|
||||
|
||||
We support parsing:
|
||||
- `BaseModel`
|
||||
- `dict`
|
||||
- `list`
|
||||
- `Union`
|
||||
- `str`
|
||||
- `httpx.Response`
|
||||
"""
|
||||
cache_key = to if to is not None else self._cast_to
|
||||
cached = self._parsed_by_type.get(cache_key)
|
||||
if cached is not None:
|
||||
return cached # type: ignore[no-any-return]
|
||||
|
||||
if not self._is_sse_stream:
|
||||
await self.read()
|
||||
|
||||
parsed = self._parse(to=to)
|
||||
if is_given(self._options.post_parser):
|
||||
parsed = self._options.post_parser(parsed)
|
||||
|
||||
self._parsed_by_type[cache_key] = parsed
|
||||
return parsed
|
||||
|
||||
async def read(self) -> bytes:
|
||||
"""Read and return the binary response content."""
|
||||
try:
|
||||
return await self.http_response.aread()
|
||||
except httpx.StreamConsumed as exc:
|
||||
# the default error raised by httpx isn't very
|
||||
# helpful in our case so we re-raise it with
|
||||
# a different error message
|
||||
raise StreamAlreadyConsumed() from exc
|
||||
|
||||
async def text(self) -> str:
|
||||
"""Read and decode the response content into a string."""
|
||||
await self.read()
|
||||
return self.http_response.text
|
||||
|
||||
async def json(self) -> object:
|
||||
"""Read and decode the JSON response content."""
|
||||
await self.read()
|
||||
return self.http_response.json()
|
||||
|
||||
async def close(self) -> None:
|
||||
"""Close the response and release the connection.
|
||||
|
||||
Automatically called if the response body is read to completion.
|
||||
"""
|
||||
await self.http_response.aclose()
|
||||
|
||||
async def iter_bytes(self, chunk_size: int | None = None) -> AsyncIterator[bytes]:
|
||||
"""
|
||||
A byte-iterator over the decoded response content.
|
||||
|
||||
This automatically handles gzip, deflate and brotli encoded responses.
|
||||
"""
|
||||
async for chunk in self.http_response.aiter_bytes(chunk_size):
|
||||
yield chunk
|
||||
|
||||
async def iter_text(self, chunk_size: int | None = None) -> AsyncIterator[str]:
|
||||
"""A str-iterator over the decoded response content
|
||||
that handles both gzip, deflate, etc but also detects the content's
|
||||
string encoding.
|
||||
"""
|
||||
async for chunk in self.http_response.aiter_text(chunk_size):
|
||||
yield chunk
|
||||
|
||||
async def iter_lines(self) -> AsyncIterator[str]:
|
||||
"""Like `iter_text()` but will only yield chunks for each line"""
|
||||
async for chunk in self.http_response.aiter_lines():
|
||||
yield chunk
|
||||
|
||||
|
||||
class BinaryAPIResponse(APIResponse[bytes]):
|
||||
"""Subclass of APIResponse providing helpers for dealing with binary data.
|
||||
|
||||
Note: If you want to stream the response data instead of eagerly reading it
|
||||
all at once then you should use `.with_streaming_response` when making
|
||||
the API request, e.g. `.with_streaming_response.get_binary_response()`
|
||||
"""
|
||||
|
||||
def write_to_file(
|
||||
self,
|
||||
file: str | os.PathLike[str],
|
||||
) -> None:
|
||||
"""Write the output to the given file.
|
||||
|
||||
Accepts a filename or any path-like object, e.g. pathlib.Path
|
||||
|
||||
Note: if you want to stream the data to the file instead of writing
|
||||
all at once then you should use `.with_streaming_response` when making
|
||||
the API request, e.g. `.with_streaming_response.get_binary_response()`
|
||||
"""
|
||||
with open(file, mode="wb") as f:
|
||||
for data in self.iter_bytes():
|
||||
f.write(data)
|
||||
|
||||
|
||||
class AsyncBinaryAPIResponse(AsyncAPIResponse[bytes]):
|
||||
"""Subclass of APIResponse providing helpers for dealing with binary data.
|
||||
|
||||
Note: If you want to stream the response data instead of eagerly reading it
|
||||
all at once then you should use `.with_streaming_response` when making
|
||||
the API request, e.g. `.with_streaming_response.get_binary_response()`
|
||||
"""
|
||||
|
||||
async def write_to_file(
|
||||
self,
|
||||
file: str | os.PathLike[str],
|
||||
) -> None:
|
||||
"""Write the output to the given file.
|
||||
|
||||
Accepts a filename or any path-like object, e.g. pathlib.Path
|
||||
|
||||
Note: if you want to stream the data to the file instead of writing
|
||||
all at once then you should use `.with_streaming_response` when making
|
||||
the API request, e.g. `.with_streaming_response.get_binary_response()`
|
||||
"""
|
||||
path = anyio.Path(file)
|
||||
async with await path.open(mode="wb") as f:
|
||||
async for data in self.iter_bytes():
|
||||
await f.write(data)
|
||||
|
||||
|
||||
class StreamedBinaryAPIResponse(APIResponse[bytes]):
|
||||
def stream_to_file(
|
||||
self,
|
||||
file: str | os.PathLike[str],
|
||||
*,
|
||||
chunk_size: int | None = None,
|
||||
) -> None:
|
||||
"""Streams the output to the given file.
|
||||
|
||||
Accepts a filename or any path-like object, e.g. pathlib.Path
|
||||
"""
|
||||
with open(file, mode="wb") as f:
|
||||
for data in self.iter_bytes(chunk_size):
|
||||
f.write(data)
|
||||
|
||||
|
||||
class AsyncStreamedBinaryAPIResponse(AsyncAPIResponse[bytes]):
|
||||
async def stream_to_file(
|
||||
self,
|
||||
file: str | os.PathLike[str],
|
||||
*,
|
||||
chunk_size: int | None = None,
|
||||
) -> None:
|
||||
"""Streams the output to the given file.
|
||||
|
||||
Accepts a filename or any path-like object, e.g. pathlib.Path
|
||||
"""
|
||||
path = anyio.Path(file)
|
||||
async with await path.open(mode="wb") as f:
|
||||
async for data in self.iter_bytes(chunk_size):
|
||||
await f.write(data)
|
||||
|
||||
|
||||
class MissingStreamClassError(TypeError):
|
||||
def __init__(self) -> None:
|
||||
super().__init__(
|
||||
"The `stream` argument was set to `True` but the `stream_cls` argument was not given. See `opencode_ai._streaming` for reference",
|
||||
)
|
||||
|
||||
|
||||
class StreamAlreadyConsumed(OpencodeError):
|
||||
"""
|
||||
Attempted to read or stream content, but the content has already
|
||||
been streamed.
|
||||
|
||||
This can happen if you use a method like `.iter_lines()` and then attempt
|
||||
to read th entire response body afterwards, e.g.
|
||||
|
||||
```py
|
||||
response = await client.post(...)
|
||||
async for line in response.iter_lines():
|
||||
... # do something with `line`
|
||||
|
||||
content = await response.read()
|
||||
# ^ error
|
||||
```
|
||||
|
||||
If you want this behaviour you'll need to either manually accumulate the response
|
||||
content or call `await response.read()` before iterating over the stream.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
message = (
|
||||
"Attempted to read or stream some content, but the content has "
|
||||
"already been streamed. "
|
||||
"This could be due to attempting to stream the response "
|
||||
"content more than once."
|
||||
"\n\n"
|
||||
"You can fix this by manually accumulating the response content while streaming "
|
||||
"or by calling `.read()` before starting to stream."
|
||||
)
|
||||
super().__init__(message)
|
||||
|
||||
|
||||
class ResponseContextManager(Generic[_APIResponseT]):
|
||||
"""Context manager for ensuring that a request is not made
|
||||
until it is entered and that the response will always be closed
|
||||
when the context manager exits
|
||||
"""
|
||||
|
||||
def __init__(self, request_func: Callable[[], _APIResponseT]) -> None:
|
||||
self._request_func = request_func
|
||||
self.__response: _APIResponseT | None = None
|
||||
|
||||
def __enter__(self) -> _APIResponseT:
|
||||
self.__response = self._request_func()
|
||||
return self.__response
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc: BaseException | None,
|
||||
exc_tb: TracebackType | None,
|
||||
) -> None:
|
||||
if self.__response is not None:
|
||||
self.__response.close()
|
||||
|
||||
|
||||
class AsyncResponseContextManager(Generic[_AsyncAPIResponseT]):
|
||||
"""Context manager for ensuring that a request is not made
|
||||
until it is entered and that the response will always be closed
|
||||
when the context manager exits
|
||||
"""
|
||||
|
||||
def __init__(self, api_request: Awaitable[_AsyncAPIResponseT]) -> None:
|
||||
self._api_request = api_request
|
||||
self.__response: _AsyncAPIResponseT | None = None
|
||||
|
||||
async def __aenter__(self) -> _AsyncAPIResponseT:
|
||||
self.__response = await self._api_request
|
||||
return self.__response
|
||||
|
||||
async def __aexit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc: BaseException | None,
|
||||
exc_tb: TracebackType | None,
|
||||
) -> None:
|
||||
if self.__response is not None:
|
||||
await self.__response.close()
|
||||
|
||||
|
||||
def to_streamed_response_wrapper(func: Callable[P, R]) -> Callable[P, ResponseContextManager[APIResponse[R]]]:
|
||||
"""Higher order function that takes one of our bound API methods and wraps it
|
||||
to support streaming and returning the raw `APIResponse` object directly.
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapped(*args: P.args, **kwargs: P.kwargs) -> ResponseContextManager[APIResponse[R]]:
|
||||
extra_headers: dict[str, str] = {**(cast(Any, kwargs.get("extra_headers")) or {})}
|
||||
extra_headers[RAW_RESPONSE_HEADER] = "stream"
|
||||
|
||||
kwargs["extra_headers"] = extra_headers
|
||||
|
||||
make_request = functools.partial(func, *args, **kwargs)
|
||||
|
||||
return ResponseContextManager(cast(Callable[[], APIResponse[R]], make_request))
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
def async_to_streamed_response_wrapper(
|
||||
func: Callable[P, Awaitable[R]],
|
||||
) -> Callable[P, AsyncResponseContextManager[AsyncAPIResponse[R]]]:
|
||||
"""Higher order function that takes one of our bound API methods and wraps it
|
||||
to support streaming and returning the raw `APIResponse` object directly.
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapped(*args: P.args, **kwargs: P.kwargs) -> AsyncResponseContextManager[AsyncAPIResponse[R]]:
|
||||
extra_headers: dict[str, str] = {**(cast(Any, kwargs.get("extra_headers")) or {})}
|
||||
extra_headers[RAW_RESPONSE_HEADER] = "stream"
|
||||
|
||||
kwargs["extra_headers"] = extra_headers
|
||||
|
||||
make_request = func(*args, **kwargs)
|
||||
|
||||
return AsyncResponseContextManager(cast(Awaitable[AsyncAPIResponse[R]], make_request))
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
def to_custom_streamed_response_wrapper(
|
||||
func: Callable[P, object],
|
||||
response_cls: type[_APIResponseT],
|
||||
) -> Callable[P, ResponseContextManager[_APIResponseT]]:
|
||||
"""Higher order function that takes one of our bound API methods and an `APIResponse` class
|
||||
and wraps the method to support streaming and returning the given response class directly.
|
||||
|
||||
Note: the given `response_cls` *must* be concrete, e.g. `class BinaryAPIResponse(APIResponse[bytes])`
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapped(*args: P.args, **kwargs: P.kwargs) -> ResponseContextManager[_APIResponseT]:
|
||||
extra_headers: dict[str, Any] = {**(cast(Any, kwargs.get("extra_headers")) or {})}
|
||||
extra_headers[RAW_RESPONSE_HEADER] = "stream"
|
||||
extra_headers[OVERRIDE_CAST_TO_HEADER] = response_cls
|
||||
|
||||
kwargs["extra_headers"] = extra_headers
|
||||
|
||||
make_request = functools.partial(func, *args, **kwargs)
|
||||
|
||||
return ResponseContextManager(cast(Callable[[], _APIResponseT], make_request))
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
def async_to_custom_streamed_response_wrapper(
|
||||
func: Callable[P, Awaitable[object]],
|
||||
response_cls: type[_AsyncAPIResponseT],
|
||||
) -> Callable[P, AsyncResponseContextManager[_AsyncAPIResponseT]]:
|
||||
"""Higher order function that takes one of our bound API methods and an `APIResponse` class
|
||||
and wraps the method to support streaming and returning the given response class directly.
|
||||
|
||||
Note: the given `response_cls` *must* be concrete, e.g. `class BinaryAPIResponse(APIResponse[bytes])`
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapped(*args: P.args, **kwargs: P.kwargs) -> AsyncResponseContextManager[_AsyncAPIResponseT]:
|
||||
extra_headers: dict[str, Any] = {**(cast(Any, kwargs.get("extra_headers")) or {})}
|
||||
extra_headers[RAW_RESPONSE_HEADER] = "stream"
|
||||
extra_headers[OVERRIDE_CAST_TO_HEADER] = response_cls
|
||||
|
||||
kwargs["extra_headers"] = extra_headers
|
||||
|
||||
make_request = func(*args, **kwargs)
|
||||
|
||||
return AsyncResponseContextManager(cast(Awaitable[_AsyncAPIResponseT], make_request))
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
def to_raw_response_wrapper(func: Callable[P, R]) -> Callable[P, APIResponse[R]]:
|
||||
"""Higher order function that takes one of our bound API methods and wraps it
|
||||
to support returning the raw `APIResponse` object directly.
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapped(*args: P.args, **kwargs: P.kwargs) -> APIResponse[R]:
|
||||
extra_headers: dict[str, str] = {**(cast(Any, kwargs.get("extra_headers")) or {})}
|
||||
extra_headers[RAW_RESPONSE_HEADER] = "raw"
|
||||
|
||||
kwargs["extra_headers"] = extra_headers
|
||||
|
||||
return cast(APIResponse[R], func(*args, **kwargs))
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
def async_to_raw_response_wrapper(func: Callable[P, Awaitable[R]]) -> Callable[P, Awaitable[AsyncAPIResponse[R]]]:
|
||||
"""Higher order function that takes one of our bound API methods and wraps it
|
||||
to support returning the raw `APIResponse` object directly.
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
async def wrapped(*args: P.args, **kwargs: P.kwargs) -> AsyncAPIResponse[R]:
|
||||
extra_headers: dict[str, str] = {**(cast(Any, kwargs.get("extra_headers")) or {})}
|
||||
extra_headers[RAW_RESPONSE_HEADER] = "raw"
|
||||
|
||||
kwargs["extra_headers"] = extra_headers
|
||||
|
||||
return cast(AsyncAPIResponse[R], await func(*args, **kwargs))
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
def to_custom_raw_response_wrapper(
|
||||
func: Callable[P, object],
|
||||
response_cls: type[_APIResponseT],
|
||||
) -> Callable[P, _APIResponseT]:
|
||||
"""Higher order function that takes one of our bound API methods and an `APIResponse` class
|
||||
and wraps the method to support returning the given response class directly.
|
||||
|
||||
Note: the given `response_cls` *must* be concrete, e.g. `class BinaryAPIResponse(APIResponse[bytes])`
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapped(*args: P.args, **kwargs: P.kwargs) -> _APIResponseT:
|
||||
extra_headers: dict[str, Any] = {**(cast(Any, kwargs.get("extra_headers")) or {})}
|
||||
extra_headers[RAW_RESPONSE_HEADER] = "raw"
|
||||
extra_headers[OVERRIDE_CAST_TO_HEADER] = response_cls
|
||||
|
||||
kwargs["extra_headers"] = extra_headers
|
||||
|
||||
return cast(_APIResponseT, func(*args, **kwargs))
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
def async_to_custom_raw_response_wrapper(
|
||||
func: Callable[P, Awaitable[object]],
|
||||
response_cls: type[_AsyncAPIResponseT],
|
||||
) -> Callable[P, Awaitable[_AsyncAPIResponseT]]:
|
||||
"""Higher order function that takes one of our bound API methods and an `APIResponse` class
|
||||
and wraps the method to support returning the given response class directly.
|
||||
|
||||
Note: the given `response_cls` *must* be concrete, e.g. `class BinaryAPIResponse(APIResponse[bytes])`
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapped(*args: P.args, **kwargs: P.kwargs) -> Awaitable[_AsyncAPIResponseT]:
|
||||
extra_headers: dict[str, Any] = {**(cast(Any, kwargs.get("extra_headers")) or {})}
|
||||
extra_headers[RAW_RESPONSE_HEADER] = "raw"
|
||||
extra_headers[OVERRIDE_CAST_TO_HEADER] = response_cls
|
||||
|
||||
kwargs["extra_headers"] = extra_headers
|
||||
|
||||
return cast(Awaitable[_AsyncAPIResponseT], func(*args, **kwargs))
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
def extract_response_type(typ: type[BaseAPIResponse[Any]]) -> type:
|
||||
"""Given a type like `APIResponse[T]`, returns the generic type variable `T`.
|
||||
|
||||
This also handles the case where a concrete subclass is given, e.g.
|
||||
```py
|
||||
class MyResponse(APIResponse[bytes]):
|
||||
...
|
||||
|
||||
extract_response_type(MyResponse) -> bytes
|
||||
```
|
||||
"""
|
||||
return extract_type_var_from_base(
|
||||
typ,
|
||||
generic_bases=cast("tuple[type, ...]", (BaseAPIResponse, APIResponse, AsyncAPIResponse)),
|
||||
index=0,
|
||||
)
|
||||
333
src/opencode_ai/_streaming.py
Normal file
333
src/opencode_ai/_streaming.py
Normal file
|
|
@ -0,0 +1,333 @@
|
|||
# Note: initially copied from https://github.com/florimondmanca/httpx-sse/blob/master/src/httpx_sse/_decoders.py
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import inspect
|
||||
from types import TracebackType
|
||||
from typing import TYPE_CHECKING, Any, Generic, TypeVar, Iterator, AsyncIterator, cast
|
||||
from typing_extensions import Self, Protocol, TypeGuard, override, get_origin, runtime_checkable
|
||||
|
||||
import httpx
|
||||
|
||||
from ._utils import extract_type_var_from_base
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._client import Opencode, AsyncOpencode
|
||||
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
|
||||
class Stream(Generic[_T]):
|
||||
"""Provides the core interface to iterate over a synchronous stream response."""
|
||||
|
||||
response: httpx.Response
|
||||
|
||||
_decoder: SSEBytesDecoder
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
cast_to: type[_T],
|
||||
response: httpx.Response,
|
||||
client: Opencode,
|
||||
) -> None:
|
||||
self.response = response
|
||||
self._cast_to = cast_to
|
||||
self._client = client
|
||||
self._decoder = client._make_sse_decoder()
|
||||
self._iterator = self.__stream__()
|
||||
|
||||
def __next__(self) -> _T:
|
||||
return self._iterator.__next__()
|
||||
|
||||
def __iter__(self) -> Iterator[_T]:
|
||||
for item in self._iterator:
|
||||
yield item
|
||||
|
||||
def _iter_events(self) -> Iterator[ServerSentEvent]:
|
||||
yield from self._decoder.iter_bytes(self.response.iter_bytes())
|
||||
|
||||
def __stream__(self) -> Iterator[_T]:
|
||||
cast_to = cast(Any, self._cast_to)
|
||||
response = self.response
|
||||
process_data = self._client._process_response_data
|
||||
iterator = self._iter_events()
|
||||
|
||||
for sse in iterator:
|
||||
yield process_data(data=sse.json(), cast_to=cast_to, response=response)
|
||||
|
||||
# Ensure the entire stream is consumed
|
||||
for _sse in iterator:
|
||||
...
|
||||
|
||||
def __enter__(self) -> Self:
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc: BaseException | None,
|
||||
exc_tb: TracebackType | None,
|
||||
) -> None:
|
||||
self.close()
|
||||
|
||||
def close(self) -> None:
|
||||
"""
|
||||
Close the response and release the connection.
|
||||
|
||||
Automatically called if the response body is read to completion.
|
||||
"""
|
||||
self.response.close()
|
||||
|
||||
|
||||
class AsyncStream(Generic[_T]):
|
||||
"""Provides the core interface to iterate over an asynchronous stream response."""
|
||||
|
||||
response: httpx.Response
|
||||
|
||||
_decoder: SSEDecoder | SSEBytesDecoder
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
cast_to: type[_T],
|
||||
response: httpx.Response,
|
||||
client: AsyncOpencode,
|
||||
) -> None:
|
||||
self.response = response
|
||||
self._cast_to = cast_to
|
||||
self._client = client
|
||||
self._decoder = client._make_sse_decoder()
|
||||
self._iterator = self.__stream__()
|
||||
|
||||
async def __anext__(self) -> _T:
|
||||
return await self._iterator.__anext__()
|
||||
|
||||
async def __aiter__(self) -> AsyncIterator[_T]:
|
||||
async for item in self._iterator:
|
||||
yield item
|
||||
|
||||
async def _iter_events(self) -> AsyncIterator[ServerSentEvent]:
|
||||
async for sse in self._decoder.aiter_bytes(self.response.aiter_bytes()):
|
||||
yield sse
|
||||
|
||||
async def __stream__(self) -> AsyncIterator[_T]:
|
||||
cast_to = cast(Any, self._cast_to)
|
||||
response = self.response
|
||||
process_data = self._client._process_response_data
|
||||
iterator = self._iter_events()
|
||||
|
||||
async for sse in iterator:
|
||||
yield process_data(data=sse.json(), cast_to=cast_to, response=response)
|
||||
|
||||
# Ensure the entire stream is consumed
|
||||
async for _sse in iterator:
|
||||
...
|
||||
|
||||
async def __aenter__(self) -> Self:
|
||||
return self
|
||||
|
||||
async def __aexit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc: BaseException | None,
|
||||
exc_tb: TracebackType | None,
|
||||
) -> None:
|
||||
await self.close()
|
||||
|
||||
async def close(self) -> None:
|
||||
"""
|
||||
Close the response and release the connection.
|
||||
|
||||
Automatically called if the response body is read to completion.
|
||||
"""
|
||||
await self.response.aclose()
|
||||
|
||||
|
||||
class ServerSentEvent:
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
event: str | None = None,
|
||||
data: str | None = None,
|
||||
id: str | None = None,
|
||||
retry: int | None = None,
|
||||
) -> None:
|
||||
if data is None:
|
||||
data = ""
|
||||
|
||||
self._id = id
|
||||
self._data = data
|
||||
self._event = event or None
|
||||
self._retry = retry
|
||||
|
||||
@property
|
||||
def event(self) -> str | None:
|
||||
return self._event
|
||||
|
||||
@property
|
||||
def id(self) -> str | None:
|
||||
return self._id
|
||||
|
||||
@property
|
||||
def retry(self) -> int | None:
|
||||
return self._retry
|
||||
|
||||
@property
|
||||
def data(self) -> str:
|
||||
return self._data
|
||||
|
||||
def json(self) -> Any:
|
||||
return json.loads(self.data)
|
||||
|
||||
@override
|
||||
def __repr__(self) -> str:
|
||||
return f"ServerSentEvent(event={self.event}, data={self.data}, id={self.id}, retry={self.retry})"
|
||||
|
||||
|
||||
class SSEDecoder:
|
||||
_data: list[str]
|
||||
_event: str | None
|
||||
_retry: int | None
|
||||
_last_event_id: str | None
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._event = None
|
||||
self._data = []
|
||||
self._last_event_id = None
|
||||
self._retry = None
|
||||
|
||||
def iter_bytes(self, iterator: Iterator[bytes]) -> Iterator[ServerSentEvent]:
|
||||
"""Given an iterator that yields raw binary data, iterate over it & yield every event encountered"""
|
||||
for chunk in self._iter_chunks(iterator):
|
||||
# Split before decoding so splitlines() only uses \r and \n
|
||||
for raw_line in chunk.splitlines():
|
||||
line = raw_line.decode("utf-8")
|
||||
sse = self.decode(line)
|
||||
if sse:
|
||||
yield sse
|
||||
|
||||
def _iter_chunks(self, iterator: Iterator[bytes]) -> Iterator[bytes]:
|
||||
"""Given an iterator that yields raw binary data, iterate over it and yield individual SSE chunks"""
|
||||
data = b""
|
||||
for chunk in iterator:
|
||||
for line in chunk.splitlines(keepends=True):
|
||||
data += line
|
||||
if data.endswith((b"\r\r", b"\n\n", b"\r\n\r\n")):
|
||||
yield data
|
||||
data = b""
|
||||
if data:
|
||||
yield data
|
||||
|
||||
async def aiter_bytes(self, iterator: AsyncIterator[bytes]) -> AsyncIterator[ServerSentEvent]:
|
||||
"""Given an iterator that yields raw binary data, iterate over it & yield every event encountered"""
|
||||
async for chunk in self._aiter_chunks(iterator):
|
||||
# Split before decoding so splitlines() only uses \r and \n
|
||||
for raw_line in chunk.splitlines():
|
||||
line = raw_line.decode("utf-8")
|
||||
sse = self.decode(line)
|
||||
if sse:
|
||||
yield sse
|
||||
|
||||
async def _aiter_chunks(self, iterator: AsyncIterator[bytes]) -> AsyncIterator[bytes]:
|
||||
"""Given an iterator that yields raw binary data, iterate over it and yield individual SSE chunks"""
|
||||
data = b""
|
||||
async for chunk in iterator:
|
||||
for line in chunk.splitlines(keepends=True):
|
||||
data += line
|
||||
if data.endswith((b"\r\r", b"\n\n", b"\r\n\r\n")):
|
||||
yield data
|
||||
data = b""
|
||||
if data:
|
||||
yield data
|
||||
|
||||
def decode(self, line: str) -> ServerSentEvent | None:
|
||||
# See: https://html.spec.whatwg.org/multipage/server-sent-events.html#event-stream-interpretation # noqa: E501
|
||||
|
||||
if not line:
|
||||
if not self._event and not self._data and not self._last_event_id and self._retry is None:
|
||||
return None
|
||||
|
||||
sse = ServerSentEvent(
|
||||
event=self._event,
|
||||
data="\n".join(self._data),
|
||||
id=self._last_event_id,
|
||||
retry=self._retry,
|
||||
)
|
||||
|
||||
# NOTE: as per the SSE spec, do not reset last_event_id.
|
||||
self._event = None
|
||||
self._data = []
|
||||
self._retry = None
|
||||
|
||||
return sse
|
||||
|
||||
if line.startswith(":"):
|
||||
return None
|
||||
|
||||
fieldname, _, value = line.partition(":")
|
||||
|
||||
if value.startswith(" "):
|
||||
value = value[1:]
|
||||
|
||||
if fieldname == "event":
|
||||
self._event = value
|
||||
elif fieldname == "data":
|
||||
self._data.append(value)
|
||||
elif fieldname == "id":
|
||||
if "\0" in value:
|
||||
pass
|
||||
else:
|
||||
self._last_event_id = value
|
||||
elif fieldname == "retry":
|
||||
try:
|
||||
self._retry = int(value)
|
||||
except (TypeError, ValueError):
|
||||
pass
|
||||
else:
|
||||
pass # Field is ignored.
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class SSEBytesDecoder(Protocol):
|
||||
def iter_bytes(self, iterator: Iterator[bytes]) -> Iterator[ServerSentEvent]:
|
||||
"""Given an iterator that yields raw binary data, iterate over it & yield every event encountered"""
|
||||
...
|
||||
|
||||
def aiter_bytes(self, iterator: AsyncIterator[bytes]) -> AsyncIterator[ServerSentEvent]:
|
||||
"""Given an async iterator that yields raw binary data, iterate over it & yield every event encountered"""
|
||||
...
|
||||
|
||||
|
||||
def is_stream_class_type(typ: type) -> TypeGuard[type[Stream[object]] | type[AsyncStream[object]]]:
|
||||
"""TypeGuard for determining whether or not the given type is a subclass of `Stream` / `AsyncStream`"""
|
||||
origin = get_origin(typ) or typ
|
||||
return inspect.isclass(origin) and issubclass(origin, (Stream, AsyncStream))
|
||||
|
||||
|
||||
def extract_stream_chunk_type(
|
||||
stream_cls: type,
|
||||
*,
|
||||
failure_message: str | None = None,
|
||||
) -> type:
|
||||
"""Given a type like `Stream[T]`, returns the generic type variable `T`.
|
||||
|
||||
This also handles the case where a concrete subclass is given, e.g.
|
||||
```py
|
||||
class MyStream(Stream[bytes]):
|
||||
...
|
||||
|
||||
extract_stream_chunk_type(MyStream) -> bytes
|
||||
```
|
||||
"""
|
||||
from ._base_client import Stream, AsyncStream
|
||||
|
||||
return extract_type_var_from_base(
|
||||
stream_cls,
|
||||
index=0,
|
||||
generic_bases=cast("tuple[type, ...]", (Stream, AsyncStream)),
|
||||
failure_message=failure_message,
|
||||
)
|
||||
219
src/opencode_ai/_types.py
Normal file
219
src/opencode_ai/_types.py
Normal file
|
|
@ -0,0 +1,219 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from os import PathLike
|
||||
from typing import (
|
||||
IO,
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Dict,
|
||||
List,
|
||||
Type,
|
||||
Tuple,
|
||||
Union,
|
||||
Mapping,
|
||||
TypeVar,
|
||||
Callable,
|
||||
Optional,
|
||||
Sequence,
|
||||
)
|
||||
from typing_extensions import Set, Literal, Protocol, TypeAlias, TypedDict, override, runtime_checkable
|
||||
|
||||
import httpx
|
||||
import pydantic
|
||||
from httpx import URL, Proxy, Timeout, Response, BaseTransport, AsyncBaseTransport
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._models import BaseModel
|
||||
from ._response import APIResponse, AsyncAPIResponse
|
||||
|
||||
Transport = BaseTransport
|
||||
AsyncTransport = AsyncBaseTransport
|
||||
Query = Mapping[str, object]
|
||||
Body = object
|
||||
AnyMapping = Mapping[str, object]
|
||||
ModelT = TypeVar("ModelT", bound=pydantic.BaseModel)
|
||||
_T = TypeVar("_T")
|
||||
|
||||
|
||||
# Approximates httpx internal ProxiesTypes and RequestFiles types
|
||||
# while adding support for `PathLike` instances
|
||||
ProxiesDict = Dict["str | URL", Union[None, str, URL, Proxy]]
|
||||
ProxiesTypes = Union[str, Proxy, ProxiesDict]
|
||||
if TYPE_CHECKING:
|
||||
Base64FileInput = Union[IO[bytes], PathLike[str]]
|
||||
FileContent = Union[IO[bytes], bytes, PathLike[str]]
|
||||
else:
|
||||
Base64FileInput = Union[IO[bytes], PathLike]
|
||||
FileContent = Union[IO[bytes], bytes, PathLike] # PathLike is not subscriptable in Python 3.8.
|
||||
FileTypes = Union[
|
||||
# file (or bytes)
|
||||
FileContent,
|
||||
# (filename, file (or bytes))
|
||||
Tuple[Optional[str], FileContent],
|
||||
# (filename, file (or bytes), content_type)
|
||||
Tuple[Optional[str], FileContent, Optional[str]],
|
||||
# (filename, file (or bytes), content_type, headers)
|
||||
Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]],
|
||||
]
|
||||
RequestFiles = Union[Mapping[str, FileTypes], Sequence[Tuple[str, FileTypes]]]
|
||||
|
||||
# duplicate of the above but without our custom file support
|
||||
HttpxFileContent = Union[IO[bytes], bytes]
|
||||
HttpxFileTypes = Union[
|
||||
# file (or bytes)
|
||||
HttpxFileContent,
|
||||
# (filename, file (or bytes))
|
||||
Tuple[Optional[str], HttpxFileContent],
|
||||
# (filename, file (or bytes), content_type)
|
||||
Tuple[Optional[str], HttpxFileContent, Optional[str]],
|
||||
# (filename, file (or bytes), content_type, headers)
|
||||
Tuple[Optional[str], HttpxFileContent, Optional[str], Mapping[str, str]],
|
||||
]
|
||||
HttpxRequestFiles = Union[Mapping[str, HttpxFileTypes], Sequence[Tuple[str, HttpxFileTypes]]]
|
||||
|
||||
# Workaround to support (cast_to: Type[ResponseT]) -> ResponseT
|
||||
# where ResponseT includes `None`. In order to support directly
|
||||
# passing `None`, overloads would have to be defined for every
|
||||
# method that uses `ResponseT` which would lead to an unacceptable
|
||||
# amount of code duplication and make it unreadable. See _base_client.py
|
||||
# for example usage.
|
||||
#
|
||||
# This unfortunately means that you will either have
|
||||
# to import this type and pass it explicitly:
|
||||
#
|
||||
# from opencode_ai import NoneType
|
||||
# client.get('/foo', cast_to=NoneType)
|
||||
#
|
||||
# or build it yourself:
|
||||
#
|
||||
# client.get('/foo', cast_to=type(None))
|
||||
if TYPE_CHECKING:
|
||||
NoneType: Type[None]
|
||||
else:
|
||||
NoneType = type(None)
|
||||
|
||||
|
||||
class RequestOptions(TypedDict, total=False):
|
||||
headers: Headers
|
||||
max_retries: int
|
||||
timeout: float | Timeout | None
|
||||
params: Query
|
||||
extra_json: AnyMapping
|
||||
idempotency_key: str
|
||||
follow_redirects: bool
|
||||
|
||||
|
||||
# Sentinel class used until PEP 0661 is accepted
|
||||
class NotGiven:
|
||||
"""
|
||||
A sentinel singleton class used to distinguish omitted keyword arguments
|
||||
from those passed in with the value None (which may have different behavior).
|
||||
|
||||
For example:
|
||||
|
||||
```py
|
||||
def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: ...
|
||||
|
||||
|
||||
get(timeout=1) # 1s timeout
|
||||
get(timeout=None) # No timeout
|
||||
get() # Default timeout behavior, which may not be statically known at the method definition.
|
||||
```
|
||||
"""
|
||||
|
||||
def __bool__(self) -> Literal[False]:
|
||||
return False
|
||||
|
||||
@override
|
||||
def __repr__(self) -> str:
|
||||
return "NOT_GIVEN"
|
||||
|
||||
|
||||
NotGivenOr = Union[_T, NotGiven]
|
||||
NOT_GIVEN = NotGiven()
|
||||
|
||||
|
||||
class Omit:
|
||||
"""In certain situations you need to be able to represent a case where a default value has
|
||||
to be explicitly removed and `None` is not an appropriate substitute, for example:
|
||||
|
||||
```py
|
||||
# as the default `Content-Type` header is `application/json` that will be sent
|
||||
client.post("/upload/files", files={"file": b"my raw file content"})
|
||||
|
||||
# you can't explicitly override the header as it has to be dynamically generated
|
||||
# to look something like: 'multipart/form-data; boundary=0d8382fcf5f8c3be01ca2e11002d2983'
|
||||
client.post(..., headers={"Content-Type": "multipart/form-data"})
|
||||
|
||||
# instead you can remove the default `application/json` header by passing Omit
|
||||
client.post(..., headers={"Content-Type": Omit()})
|
||||
```
|
||||
"""
|
||||
|
||||
def __bool__(self) -> Literal[False]:
|
||||
return False
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class ModelBuilderProtocol(Protocol):
|
||||
@classmethod
|
||||
def build(
|
||||
cls: type[_T],
|
||||
*,
|
||||
response: Response,
|
||||
data: object,
|
||||
) -> _T: ...
|
||||
|
||||
|
||||
Headers = Mapping[str, Union[str, Omit]]
|
||||
|
||||
|
||||
class HeadersLikeProtocol(Protocol):
|
||||
def get(self, __key: str) -> str | None: ...
|
||||
|
||||
|
||||
HeadersLike = Union[Headers, HeadersLikeProtocol]
|
||||
|
||||
ResponseT = TypeVar(
|
||||
"ResponseT",
|
||||
bound=Union[
|
||||
object,
|
||||
str,
|
||||
None,
|
||||
"BaseModel",
|
||||
List[Any],
|
||||
Dict[str, Any],
|
||||
Response,
|
||||
ModelBuilderProtocol,
|
||||
"APIResponse[Any]",
|
||||
"AsyncAPIResponse[Any]",
|
||||
],
|
||||
)
|
||||
|
||||
StrBytesIntFloat = Union[str, bytes, int, float]
|
||||
|
||||
# Note: copied from Pydantic
|
||||
# https://github.com/pydantic/pydantic/blob/6f31f8f68ef011f84357330186f603ff295312fd/pydantic/main.py#L79
|
||||
IncEx: TypeAlias = Union[Set[int], Set[str], Mapping[int, Union["IncEx", bool]], Mapping[str, Union["IncEx", bool]]]
|
||||
|
||||
PostParser = Callable[[Any], Any]
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class InheritsGeneric(Protocol):
|
||||
"""Represents a type that has inherited from `Generic`
|
||||
|
||||
The `__orig_bases__` property can be used to determine the resolved
|
||||
type variable for a given base class.
|
||||
"""
|
||||
|
||||
__orig_bases__: tuple[_GenericAlias]
|
||||
|
||||
|
||||
class _GenericAlias(Protocol):
|
||||
__origin__: type[object]
|
||||
|
||||
|
||||
class HttpxSendArgs(TypedDict, total=False):
|
||||
auth: httpx.Auth
|
||||
follow_redirects: bool
|
||||
57
src/opencode_ai/_utils/__init__.py
Normal file
57
src/opencode_ai/_utils/__init__.py
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
from ._sync import asyncify as asyncify
|
||||
from ._proxy import LazyProxy as LazyProxy
|
||||
from ._utils import (
|
||||
flatten as flatten,
|
||||
is_dict as is_dict,
|
||||
is_list as is_list,
|
||||
is_given as is_given,
|
||||
is_tuple as is_tuple,
|
||||
json_safe as json_safe,
|
||||
lru_cache as lru_cache,
|
||||
is_mapping as is_mapping,
|
||||
is_tuple_t as is_tuple_t,
|
||||
parse_date as parse_date,
|
||||
is_iterable as is_iterable,
|
||||
is_sequence as is_sequence,
|
||||
coerce_float as coerce_float,
|
||||
is_mapping_t as is_mapping_t,
|
||||
removeprefix as removeprefix,
|
||||
removesuffix as removesuffix,
|
||||
extract_files as extract_files,
|
||||
is_sequence_t as is_sequence_t,
|
||||
required_args as required_args,
|
||||
coerce_boolean as coerce_boolean,
|
||||
coerce_integer as coerce_integer,
|
||||
file_from_path as file_from_path,
|
||||
parse_datetime as parse_datetime,
|
||||
strip_not_given as strip_not_given,
|
||||
deepcopy_minimal as deepcopy_minimal,
|
||||
get_async_library as get_async_library,
|
||||
maybe_coerce_float as maybe_coerce_float,
|
||||
get_required_header as get_required_header,
|
||||
maybe_coerce_boolean as maybe_coerce_boolean,
|
||||
maybe_coerce_integer as maybe_coerce_integer,
|
||||
)
|
||||
from ._typing import (
|
||||
is_list_type as is_list_type,
|
||||
is_union_type as is_union_type,
|
||||
extract_type_arg as extract_type_arg,
|
||||
is_iterable_type as is_iterable_type,
|
||||
is_required_type as is_required_type,
|
||||
is_annotated_type as is_annotated_type,
|
||||
is_type_alias_type as is_type_alias_type,
|
||||
strip_annotated_type as strip_annotated_type,
|
||||
extract_type_var_from_base as extract_type_var_from_base,
|
||||
)
|
||||
from ._streams import consume_sync_iterator as consume_sync_iterator, consume_async_iterator as consume_async_iterator
|
||||
from ._transform import (
|
||||
PropertyInfo as PropertyInfo,
|
||||
transform as transform,
|
||||
async_transform as async_transform,
|
||||
maybe_transform as maybe_transform,
|
||||
async_maybe_transform as async_maybe_transform,
|
||||
)
|
||||
from ._reflection import (
|
||||
function_has_argument as function_has_argument,
|
||||
assert_signatures_in_sync as assert_signatures_in_sync,
|
||||
)
|
||||
25
src/opencode_ai/_utils/_logs.py
Normal file
25
src/opencode_ai/_utils/_logs.py
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
import os
|
||||
import logging
|
||||
|
||||
logger: logging.Logger = logging.getLogger("opencode_ai")
|
||||
httpx_logger: logging.Logger = logging.getLogger("httpx")
|
||||
|
||||
|
||||
def _basic_config() -> None:
|
||||
# e.g. [2023-10-05 14:12:26 - opencode_ai._base_client:818 - DEBUG] HTTP Request: POST http://127.0.0.1:4010/foo/bar "200 OK"
|
||||
logging.basicConfig(
|
||||
format="[%(asctime)s - %(name)s:%(lineno)d - %(levelname)s] %(message)s",
|
||||
datefmt="%Y-%m-%d %H:%M:%S",
|
||||
)
|
||||
|
||||
|
||||
def setup_logging() -> None:
|
||||
env = os.environ.get("OPENCODE_LOG")
|
||||
if env == "debug":
|
||||
_basic_config()
|
||||
logger.setLevel(logging.DEBUG)
|
||||
httpx_logger.setLevel(logging.DEBUG)
|
||||
elif env == "info":
|
||||
_basic_config()
|
||||
logger.setLevel(logging.INFO)
|
||||
httpx_logger.setLevel(logging.INFO)
|
||||
65
src/opencode_ai/_utils/_proxy.py
Normal file
65
src/opencode_ai/_utils/_proxy.py
Normal file
|
|
@ -0,0 +1,65 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Generic, TypeVar, Iterable, cast
|
||||
from typing_extensions import override
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
class LazyProxy(Generic[T], ABC):
|
||||
"""Implements data methods to pretend that an instance is another instance.
|
||||
|
||||
This includes forwarding attribute access and other methods.
|
||||
"""
|
||||
|
||||
# Note: we have to special case proxies that themselves return proxies
|
||||
# to support using a proxy as a catch-all for any random access, e.g. `proxy.foo.bar.baz`
|
||||
|
||||
def __getattr__(self, attr: str) -> object:
|
||||
proxied = self.__get_proxied__()
|
||||
if isinstance(proxied, LazyProxy):
|
||||
return proxied # pyright: ignore
|
||||
return getattr(proxied, attr)
|
||||
|
||||
@override
|
||||
def __repr__(self) -> str:
|
||||
proxied = self.__get_proxied__()
|
||||
if isinstance(proxied, LazyProxy):
|
||||
return proxied.__class__.__name__
|
||||
return repr(self.__get_proxied__())
|
||||
|
||||
@override
|
||||
def __str__(self) -> str:
|
||||
proxied = self.__get_proxied__()
|
||||
if isinstance(proxied, LazyProxy):
|
||||
return proxied.__class__.__name__
|
||||
return str(proxied)
|
||||
|
||||
@override
|
||||
def __dir__(self) -> Iterable[str]:
|
||||
proxied = self.__get_proxied__()
|
||||
if isinstance(proxied, LazyProxy):
|
||||
return []
|
||||
return proxied.__dir__()
|
||||
|
||||
@property # type: ignore
|
||||
@override
|
||||
def __class__(self) -> type: # pyright: ignore
|
||||
try:
|
||||
proxied = self.__get_proxied__()
|
||||
except Exception:
|
||||
return type(self)
|
||||
if issubclass(type(proxied), LazyProxy):
|
||||
return type(proxied)
|
||||
return proxied.__class__
|
||||
|
||||
def __get_proxied__(self) -> T:
|
||||
return self.__load__()
|
||||
|
||||
def __as_proxied__(self) -> T:
|
||||
"""Helper method that returns the current proxy, typed as the loaded object"""
|
||||
return cast(T, self)
|
||||
|
||||
@abstractmethod
|
||||
def __load__(self) -> T: ...
|
||||
42
src/opencode_ai/_utils/_reflection.py
Normal file
42
src/opencode_ai/_utils/_reflection.py
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import inspect
|
||||
from typing import Any, Callable
|
||||
|
||||
|
||||
def function_has_argument(func: Callable[..., Any], arg_name: str) -> bool:
|
||||
"""Returns whether or not the given function has a specific parameter"""
|
||||
sig = inspect.signature(func)
|
||||
return arg_name in sig.parameters
|
||||
|
||||
|
||||
def assert_signatures_in_sync(
|
||||
source_func: Callable[..., Any],
|
||||
check_func: Callable[..., Any],
|
||||
*,
|
||||
exclude_params: set[str] = set(),
|
||||
) -> None:
|
||||
"""Ensure that the signature of the second function matches the first."""
|
||||
|
||||
check_sig = inspect.signature(check_func)
|
||||
source_sig = inspect.signature(source_func)
|
||||
|
||||
errors: list[str] = []
|
||||
|
||||
for name, source_param in source_sig.parameters.items():
|
||||
if name in exclude_params:
|
||||
continue
|
||||
|
||||
custom_param = check_sig.parameters.get(name)
|
||||
if not custom_param:
|
||||
errors.append(f"the `{name}` param is missing")
|
||||
continue
|
||||
|
||||
if custom_param.annotation != source_param.annotation:
|
||||
errors.append(
|
||||
f"types for the `{name}` param are do not match; source={repr(source_param.annotation)} checking={repr(custom_param.annotation)}"
|
||||
)
|
||||
continue
|
||||
|
||||
if errors:
|
||||
raise AssertionError(f"{len(errors)} errors encountered when comparing signatures:\n\n" + "\n\n".join(errors))
|
||||
24
src/opencode_ai/_utils/_resources_proxy.py
Normal file
24
src/opencode_ai/_utils/_resources_proxy.py
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
from typing_extensions import override
|
||||
|
||||
from ._proxy import LazyProxy
|
||||
|
||||
|
||||
class ResourcesProxy(LazyProxy[Any]):
|
||||
"""A proxy for the `opencode_ai.resources` module.
|
||||
|
||||
This is used so that we can lazily import `opencode_ai.resources` only when
|
||||
needed *and* so that users can just import `opencode_ai` and reference `opencode_ai.resources`
|
||||
"""
|
||||
|
||||
@override
|
||||
def __load__(self) -> Any:
|
||||
import importlib
|
||||
|
||||
mod = importlib.import_module("opencode_ai.resources")
|
||||
return mod
|
||||
|
||||
|
||||
resources = ResourcesProxy().__as_proxied__()
|
||||
12
src/opencode_ai/_utils/_streams.py
Normal file
12
src/opencode_ai/_utils/_streams.py
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
from typing import Any
|
||||
from typing_extensions import Iterator, AsyncIterator
|
||||
|
||||
|
||||
def consume_sync_iterator(iterator: Iterator[Any]) -> None:
|
||||
for _ in iterator:
|
||||
...
|
||||
|
||||
|
||||
async def consume_async_iterator(iterator: AsyncIterator[Any]) -> None:
|
||||
async for _ in iterator:
|
||||
...
|
||||
86
src/opencode_ai/_utils/_sync.py
Normal file
86
src/opencode_ai/_utils/_sync.py
Normal file
|
|
@ -0,0 +1,86 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
import asyncio
|
||||
import functools
|
||||
import contextvars
|
||||
from typing import Any, TypeVar, Callable, Awaitable
|
||||
from typing_extensions import ParamSpec
|
||||
|
||||
import anyio
|
||||
import sniffio
|
||||
import anyio.to_thread
|
||||
|
||||
T_Retval = TypeVar("T_Retval")
|
||||
T_ParamSpec = ParamSpec("T_ParamSpec")
|
||||
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
_asyncio_to_thread = asyncio.to_thread
|
||||
else:
|
||||
# backport of https://docs.python.org/3/library/asyncio-task.html#asyncio.to_thread
|
||||
# for Python 3.8 support
|
||||
async def _asyncio_to_thread(
|
||||
func: Callable[T_ParamSpec, T_Retval], /, *args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs
|
||||
) -> Any:
|
||||
"""Asynchronously run function *func* in a separate thread.
|
||||
|
||||
Any *args and **kwargs supplied for this function are directly passed
|
||||
to *func*. Also, the current :class:`contextvars.Context` is propagated,
|
||||
allowing context variables from the main thread to be accessed in the
|
||||
separate thread.
|
||||
|
||||
Returns a coroutine that can be awaited to get the eventual result of *func*.
|
||||
"""
|
||||
loop = asyncio.events.get_running_loop()
|
||||
ctx = contextvars.copy_context()
|
||||
func_call = functools.partial(ctx.run, func, *args, **kwargs)
|
||||
return await loop.run_in_executor(None, func_call)
|
||||
|
||||
|
||||
async def to_thread(
|
||||
func: Callable[T_ParamSpec, T_Retval], /, *args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs
|
||||
) -> T_Retval:
|
||||
if sniffio.current_async_library() == "asyncio":
|
||||
return await _asyncio_to_thread(func, *args, **kwargs)
|
||||
|
||||
return await anyio.to_thread.run_sync(
|
||||
functools.partial(func, *args, **kwargs),
|
||||
)
|
||||
|
||||
|
||||
# inspired by `asyncer`, https://github.com/tiangolo/asyncer
|
||||
def asyncify(function: Callable[T_ParamSpec, T_Retval]) -> Callable[T_ParamSpec, Awaitable[T_Retval]]:
|
||||
"""
|
||||
Take a blocking function and create an async one that receives the same
|
||||
positional and keyword arguments. For python version 3.9 and above, it uses
|
||||
asyncio.to_thread to run the function in a separate thread. For python version
|
||||
3.8, it uses locally defined copy of the asyncio.to_thread function which was
|
||||
introduced in python 3.9.
|
||||
|
||||
Usage:
|
||||
|
||||
```python
|
||||
def blocking_func(arg1, arg2, kwarg1=None):
|
||||
# blocking code
|
||||
return result
|
||||
|
||||
|
||||
result = asyncify(blocking_function)(arg1, arg2, kwarg1=value1)
|
||||
```
|
||||
|
||||
## Arguments
|
||||
|
||||
`function`: a blocking regular callable (e.g. a function)
|
||||
|
||||
## Return
|
||||
|
||||
An async function that takes the same positional and keyword arguments as the
|
||||
original one, that when called runs the same original function in a thread worker
|
||||
and returns the result.
|
||||
"""
|
||||
|
||||
async def wrapper(*args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs) -> T_Retval:
|
||||
return await to_thread(function, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
447
src/opencode_ai/_utils/_transform.py
Normal file
447
src/opencode_ai/_utils/_transform.py
Normal file
|
|
@ -0,0 +1,447 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import base64
|
||||
import pathlib
|
||||
from typing import Any, Mapping, TypeVar, cast
|
||||
from datetime import date, datetime
|
||||
from typing_extensions import Literal, get_args, override, get_type_hints as _get_type_hints
|
||||
|
||||
import anyio
|
||||
import pydantic
|
||||
|
||||
from ._utils import (
|
||||
is_list,
|
||||
is_given,
|
||||
lru_cache,
|
||||
is_mapping,
|
||||
is_iterable,
|
||||
)
|
||||
from .._files import is_base64_file_input
|
||||
from ._typing import (
|
||||
is_list_type,
|
||||
is_union_type,
|
||||
extract_type_arg,
|
||||
is_iterable_type,
|
||||
is_required_type,
|
||||
is_annotated_type,
|
||||
strip_annotated_type,
|
||||
)
|
||||
from .._compat import get_origin, model_dump, is_typeddict
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
|
||||
# TODO: support for drilling globals() and locals()
|
||||
# TODO: ensure works correctly with forward references in all cases
|
||||
|
||||
|
||||
PropertyFormat = Literal["iso8601", "base64", "custom"]
|
||||
|
||||
|
||||
class PropertyInfo:
|
||||
"""Metadata class to be used in Annotated types to provide information about a given type.
|
||||
|
||||
For example:
|
||||
|
||||
class MyParams(TypedDict):
|
||||
account_holder_name: Annotated[str, PropertyInfo(alias='accountHolderName')]
|
||||
|
||||
This means that {'account_holder_name': 'Robert'} will be transformed to {'accountHolderName': 'Robert'} before being sent to the API.
|
||||
"""
|
||||
|
||||
alias: str | None
|
||||
format: PropertyFormat | None
|
||||
format_template: str | None
|
||||
discriminator: str | None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
alias: str | None = None,
|
||||
format: PropertyFormat | None = None,
|
||||
format_template: str | None = None,
|
||||
discriminator: str | None = None,
|
||||
) -> None:
|
||||
self.alias = alias
|
||||
self.format = format
|
||||
self.format_template = format_template
|
||||
self.discriminator = discriminator
|
||||
|
||||
@override
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.__class__.__name__}(alias='{self.alias}', format={self.format}, format_template='{self.format_template}', discriminator='{self.discriminator}')"
|
||||
|
||||
|
||||
def maybe_transform(
|
||||
data: object,
|
||||
expected_type: object,
|
||||
) -> Any | None:
|
||||
"""Wrapper over `transform()` that allows `None` to be passed.
|
||||
|
||||
See `transform()` for more details.
|
||||
"""
|
||||
if data is None:
|
||||
return None
|
||||
return transform(data, expected_type)
|
||||
|
||||
|
||||
# Wrapper over _transform_recursive providing fake types
|
||||
def transform(
|
||||
data: _T,
|
||||
expected_type: object,
|
||||
) -> _T:
|
||||
"""Transform dictionaries based off of type information from the given type, for example:
|
||||
|
||||
```py
|
||||
class Params(TypedDict, total=False):
|
||||
card_id: Required[Annotated[str, PropertyInfo(alias="cardID")]]
|
||||
|
||||
|
||||
transformed = transform({"card_id": "<my card ID>"}, Params)
|
||||
# {'cardID': '<my card ID>'}
|
||||
```
|
||||
|
||||
Any keys / data that does not have type information given will be included as is.
|
||||
|
||||
It should be noted that the transformations that this function does are not represented in the type system.
|
||||
"""
|
||||
transformed = _transform_recursive(data, annotation=cast(type, expected_type))
|
||||
return cast(_T, transformed)
|
||||
|
||||
|
||||
@lru_cache(maxsize=8096)
|
||||
def _get_annotated_type(type_: type) -> type | None:
|
||||
"""If the given type is an `Annotated` type then it is returned, if not `None` is returned.
|
||||
|
||||
This also unwraps the type when applicable, e.g. `Required[Annotated[T, ...]]`
|
||||
"""
|
||||
if is_required_type(type_):
|
||||
# Unwrap `Required[Annotated[T, ...]]` to `Annotated[T, ...]`
|
||||
type_ = get_args(type_)[0]
|
||||
|
||||
if is_annotated_type(type_):
|
||||
return type_
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _maybe_transform_key(key: str, type_: type) -> str:
|
||||
"""Transform the given `data` based on the annotations provided in `type_`.
|
||||
|
||||
Note: this function only looks at `Annotated` types that contain `PropertyInfo` metadata.
|
||||
"""
|
||||
annotated_type = _get_annotated_type(type_)
|
||||
if annotated_type is None:
|
||||
# no `Annotated` definition for this type, no transformation needed
|
||||
return key
|
||||
|
||||
# ignore the first argument as it is the actual type
|
||||
annotations = get_args(annotated_type)[1:]
|
||||
for annotation in annotations:
|
||||
if isinstance(annotation, PropertyInfo) and annotation.alias is not None:
|
||||
return annotation.alias
|
||||
|
||||
return key
|
||||
|
||||
|
||||
def _no_transform_needed(annotation: type) -> bool:
|
||||
return annotation == float or annotation == int
|
||||
|
||||
|
||||
def _transform_recursive(
|
||||
data: object,
|
||||
*,
|
||||
annotation: type,
|
||||
inner_type: type | None = None,
|
||||
) -> object:
|
||||
"""Transform the given data against the expected type.
|
||||
|
||||
Args:
|
||||
annotation: The direct type annotation given to the particular piece of data.
|
||||
This may or may not be wrapped in metadata types, e.g. `Required[T]`, `Annotated[T, ...]` etc
|
||||
|
||||
inner_type: If applicable, this is the "inside" type. This is useful in certain cases where the outside type
|
||||
is a container type such as `List[T]`. In that case `inner_type` should be set to `T` so that each entry in
|
||||
the list can be transformed using the metadata from the container type.
|
||||
|
||||
Defaults to the same value as the `annotation` argument.
|
||||
"""
|
||||
if inner_type is None:
|
||||
inner_type = annotation
|
||||
|
||||
stripped_type = strip_annotated_type(inner_type)
|
||||
origin = get_origin(stripped_type) or stripped_type
|
||||
if is_typeddict(stripped_type) and is_mapping(data):
|
||||
return _transform_typeddict(data, stripped_type)
|
||||
|
||||
if origin == dict and is_mapping(data):
|
||||
items_type = get_args(stripped_type)[1]
|
||||
return {key: _transform_recursive(value, annotation=items_type) for key, value in data.items()}
|
||||
|
||||
if (
|
||||
# List[T]
|
||||
(is_list_type(stripped_type) and is_list(data))
|
||||
# Iterable[T]
|
||||
or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str))
|
||||
):
|
||||
# dicts are technically iterable, but it is an iterable on the keys of the dict and is not usually
|
||||
# intended as an iterable, so we don't transform it.
|
||||
if isinstance(data, dict):
|
||||
return cast(object, data)
|
||||
|
||||
inner_type = extract_type_arg(stripped_type, 0)
|
||||
if _no_transform_needed(inner_type):
|
||||
# for some types there is no need to transform anything, so we can get a small
|
||||
# perf boost from skipping that work.
|
||||
#
|
||||
# but we still need to convert to a list to ensure the data is json-serializable
|
||||
if is_list(data):
|
||||
return data
|
||||
return list(data)
|
||||
|
||||
return [_transform_recursive(d, annotation=annotation, inner_type=inner_type) for d in data]
|
||||
|
||||
if is_union_type(stripped_type):
|
||||
# For union types we run the transformation against all subtypes to ensure that everything is transformed.
|
||||
#
|
||||
# TODO: there may be edge cases where the same normalized field name will transform to two different names
|
||||
# in different subtypes.
|
||||
for subtype in get_args(stripped_type):
|
||||
data = _transform_recursive(data, annotation=annotation, inner_type=subtype)
|
||||
return data
|
||||
|
||||
if isinstance(data, pydantic.BaseModel):
|
||||
return model_dump(data, exclude_unset=True, mode="json")
|
||||
|
||||
annotated_type = _get_annotated_type(annotation)
|
||||
if annotated_type is None:
|
||||
return data
|
||||
|
||||
# ignore the first argument as it is the actual type
|
||||
annotations = get_args(annotated_type)[1:]
|
||||
for annotation in annotations:
|
||||
if isinstance(annotation, PropertyInfo) and annotation.format is not None:
|
||||
return _format_data(data, annotation.format, annotation.format_template)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def _format_data(data: object, format_: PropertyFormat, format_template: str | None) -> object:
|
||||
if isinstance(data, (date, datetime)):
|
||||
if format_ == "iso8601":
|
||||
return data.isoformat()
|
||||
|
||||
if format_ == "custom" and format_template is not None:
|
||||
return data.strftime(format_template)
|
||||
|
||||
if format_ == "base64" and is_base64_file_input(data):
|
||||
binary: str | bytes | None = None
|
||||
|
||||
if isinstance(data, pathlib.Path):
|
||||
binary = data.read_bytes()
|
||||
elif isinstance(data, io.IOBase):
|
||||
binary = data.read()
|
||||
|
||||
if isinstance(binary, str): # type: ignore[unreachable]
|
||||
binary = binary.encode()
|
||||
|
||||
if not isinstance(binary, bytes):
|
||||
raise RuntimeError(f"Could not read bytes from {data}; Received {type(binary)}")
|
||||
|
||||
return base64.b64encode(binary).decode("ascii")
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def _transform_typeddict(
|
||||
data: Mapping[str, object],
|
||||
expected_type: type,
|
||||
) -> Mapping[str, object]:
|
||||
result: dict[str, object] = {}
|
||||
annotations = get_type_hints(expected_type, include_extras=True)
|
||||
for key, value in data.items():
|
||||
if not is_given(value):
|
||||
# we don't need to include `NotGiven` values here as they'll
|
||||
# be stripped out before the request is sent anyway
|
||||
continue
|
||||
|
||||
type_ = annotations.get(key)
|
||||
if type_ is None:
|
||||
# we do not have a type annotation for this field, leave it as is
|
||||
result[key] = value
|
||||
else:
|
||||
result[_maybe_transform_key(key, type_)] = _transform_recursive(value, annotation=type_)
|
||||
return result
|
||||
|
||||
|
||||
async def async_maybe_transform(
|
||||
data: object,
|
||||
expected_type: object,
|
||||
) -> Any | None:
|
||||
"""Wrapper over `async_transform()` that allows `None` to be passed.
|
||||
|
||||
See `async_transform()` for more details.
|
||||
"""
|
||||
if data is None:
|
||||
return None
|
||||
return await async_transform(data, expected_type)
|
||||
|
||||
|
||||
async def async_transform(
|
||||
data: _T,
|
||||
expected_type: object,
|
||||
) -> _T:
|
||||
"""Transform dictionaries based off of type information from the given type, for example:
|
||||
|
||||
```py
|
||||
class Params(TypedDict, total=False):
|
||||
card_id: Required[Annotated[str, PropertyInfo(alias="cardID")]]
|
||||
|
||||
|
||||
transformed = transform({"card_id": "<my card ID>"}, Params)
|
||||
# {'cardID': '<my card ID>'}
|
||||
```
|
||||
|
||||
Any keys / data that does not have type information given will be included as is.
|
||||
|
||||
It should be noted that the transformations that this function does are not represented in the type system.
|
||||
"""
|
||||
transformed = await _async_transform_recursive(data, annotation=cast(type, expected_type))
|
||||
return cast(_T, transformed)
|
||||
|
||||
|
||||
async def _async_transform_recursive(
|
||||
data: object,
|
||||
*,
|
||||
annotation: type,
|
||||
inner_type: type | None = None,
|
||||
) -> object:
|
||||
"""Transform the given data against the expected type.
|
||||
|
||||
Args:
|
||||
annotation: The direct type annotation given to the particular piece of data.
|
||||
This may or may not be wrapped in metadata types, e.g. `Required[T]`, `Annotated[T, ...]` etc
|
||||
|
||||
inner_type: If applicable, this is the "inside" type. This is useful in certain cases where the outside type
|
||||
is a container type such as `List[T]`. In that case `inner_type` should be set to `T` so that each entry in
|
||||
the list can be transformed using the metadata from the container type.
|
||||
|
||||
Defaults to the same value as the `annotation` argument.
|
||||
"""
|
||||
if inner_type is None:
|
||||
inner_type = annotation
|
||||
|
||||
stripped_type = strip_annotated_type(inner_type)
|
||||
origin = get_origin(stripped_type) or stripped_type
|
||||
if is_typeddict(stripped_type) and is_mapping(data):
|
||||
return await _async_transform_typeddict(data, stripped_type)
|
||||
|
||||
if origin == dict and is_mapping(data):
|
||||
items_type = get_args(stripped_type)[1]
|
||||
return {key: _transform_recursive(value, annotation=items_type) for key, value in data.items()}
|
||||
|
||||
if (
|
||||
# List[T]
|
||||
(is_list_type(stripped_type) and is_list(data))
|
||||
# Iterable[T]
|
||||
or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str))
|
||||
):
|
||||
# dicts are technically iterable, but it is an iterable on the keys of the dict and is not usually
|
||||
# intended as an iterable, so we don't transform it.
|
||||
if isinstance(data, dict):
|
||||
return cast(object, data)
|
||||
|
||||
inner_type = extract_type_arg(stripped_type, 0)
|
||||
if _no_transform_needed(inner_type):
|
||||
# for some types there is no need to transform anything, so we can get a small
|
||||
# perf boost from skipping that work.
|
||||
#
|
||||
# but we still need to convert to a list to ensure the data is json-serializable
|
||||
if is_list(data):
|
||||
return data
|
||||
return list(data)
|
||||
|
||||
return [await _async_transform_recursive(d, annotation=annotation, inner_type=inner_type) for d in data]
|
||||
|
||||
if is_union_type(stripped_type):
|
||||
# For union types we run the transformation against all subtypes to ensure that everything is transformed.
|
||||
#
|
||||
# TODO: there may be edge cases where the same normalized field name will transform to two different names
|
||||
# in different subtypes.
|
||||
for subtype in get_args(stripped_type):
|
||||
data = await _async_transform_recursive(data, annotation=annotation, inner_type=subtype)
|
||||
return data
|
||||
|
||||
if isinstance(data, pydantic.BaseModel):
|
||||
return model_dump(data, exclude_unset=True, mode="json")
|
||||
|
||||
annotated_type = _get_annotated_type(annotation)
|
||||
if annotated_type is None:
|
||||
return data
|
||||
|
||||
# ignore the first argument as it is the actual type
|
||||
annotations = get_args(annotated_type)[1:]
|
||||
for annotation in annotations:
|
||||
if isinstance(annotation, PropertyInfo) and annotation.format is not None:
|
||||
return await _async_format_data(data, annotation.format, annotation.format_template)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
async def _async_format_data(data: object, format_: PropertyFormat, format_template: str | None) -> object:
|
||||
if isinstance(data, (date, datetime)):
|
||||
if format_ == "iso8601":
|
||||
return data.isoformat()
|
||||
|
||||
if format_ == "custom" and format_template is not None:
|
||||
return data.strftime(format_template)
|
||||
|
||||
if format_ == "base64" and is_base64_file_input(data):
|
||||
binary: str | bytes | None = None
|
||||
|
||||
if isinstance(data, pathlib.Path):
|
||||
binary = await anyio.Path(data).read_bytes()
|
||||
elif isinstance(data, io.IOBase):
|
||||
binary = data.read()
|
||||
|
||||
if isinstance(binary, str): # type: ignore[unreachable]
|
||||
binary = binary.encode()
|
||||
|
||||
if not isinstance(binary, bytes):
|
||||
raise RuntimeError(f"Could not read bytes from {data}; Received {type(binary)}")
|
||||
|
||||
return base64.b64encode(binary).decode("ascii")
|
||||
|
||||
return data
|
||||
|
||||
|
||||
async def _async_transform_typeddict(
|
||||
data: Mapping[str, object],
|
||||
expected_type: type,
|
||||
) -> Mapping[str, object]:
|
||||
result: dict[str, object] = {}
|
||||
annotations = get_type_hints(expected_type, include_extras=True)
|
||||
for key, value in data.items():
|
||||
if not is_given(value):
|
||||
# we don't need to include `NotGiven` values here as they'll
|
||||
# be stripped out before the request is sent anyway
|
||||
continue
|
||||
|
||||
type_ = annotations.get(key)
|
||||
if type_ is None:
|
||||
# we do not have a type annotation for this field, leave it as is
|
||||
result[key] = value
|
||||
else:
|
||||
result[_maybe_transform_key(key, type_)] = await _async_transform_recursive(value, annotation=type_)
|
||||
return result
|
||||
|
||||
|
||||
@lru_cache(maxsize=8096)
|
||||
def get_type_hints(
|
||||
obj: Any,
|
||||
globalns: dict[str, Any] | None = None,
|
||||
localns: Mapping[str, Any] | None = None,
|
||||
include_extras: bool = False,
|
||||
) -> dict[str, Any]:
|
||||
return _get_type_hints(obj, globalns=globalns, localns=localns, include_extras=include_extras)
|
||||
151
src/opencode_ai/_utils/_typing.py
Normal file
151
src/opencode_ai/_utils/_typing.py
Normal file
|
|
@ -0,0 +1,151 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
import typing
|
||||
import typing_extensions
|
||||
from typing import Any, TypeVar, Iterable, cast
|
||||
from collections import abc as _c_abc
|
||||
from typing_extensions import (
|
||||
TypeIs,
|
||||
Required,
|
||||
Annotated,
|
||||
get_args,
|
||||
get_origin,
|
||||
)
|
||||
|
||||
from ._utils import lru_cache
|
||||
from .._types import InheritsGeneric
|
||||
from .._compat import is_union as _is_union
|
||||
|
||||
|
||||
def is_annotated_type(typ: type) -> bool:
|
||||
return get_origin(typ) == Annotated
|
||||
|
||||
|
||||
def is_list_type(typ: type) -> bool:
|
||||
return (get_origin(typ) or typ) == list
|
||||
|
||||
|
||||
def is_iterable_type(typ: type) -> bool:
|
||||
"""If the given type is `typing.Iterable[T]`"""
|
||||
origin = get_origin(typ) or typ
|
||||
return origin == Iterable or origin == _c_abc.Iterable
|
||||
|
||||
|
||||
def is_union_type(typ: type) -> bool:
|
||||
return _is_union(get_origin(typ))
|
||||
|
||||
|
||||
def is_required_type(typ: type) -> bool:
|
||||
return get_origin(typ) == Required
|
||||
|
||||
|
||||
def is_typevar(typ: type) -> bool:
|
||||
# type ignore is required because type checkers
|
||||
# think this expression will always return False
|
||||
return type(typ) == TypeVar # type: ignore
|
||||
|
||||
|
||||
_TYPE_ALIAS_TYPES: tuple[type[typing_extensions.TypeAliasType], ...] = (typing_extensions.TypeAliasType,)
|
||||
if sys.version_info >= (3, 12):
|
||||
_TYPE_ALIAS_TYPES = (*_TYPE_ALIAS_TYPES, typing.TypeAliasType)
|
||||
|
||||
|
||||
def is_type_alias_type(tp: Any, /) -> TypeIs[typing_extensions.TypeAliasType]:
|
||||
"""Return whether the provided argument is an instance of `TypeAliasType`.
|
||||
|
||||
```python
|
||||
type Int = int
|
||||
is_type_alias_type(Int)
|
||||
# > True
|
||||
Str = TypeAliasType("Str", str)
|
||||
is_type_alias_type(Str)
|
||||
# > True
|
||||
```
|
||||
"""
|
||||
return isinstance(tp, _TYPE_ALIAS_TYPES)
|
||||
|
||||
|
||||
# Extracts T from Annotated[T, ...] or from Required[Annotated[T, ...]]
|
||||
@lru_cache(maxsize=8096)
|
||||
def strip_annotated_type(typ: type) -> type:
|
||||
if is_required_type(typ) or is_annotated_type(typ):
|
||||
return strip_annotated_type(cast(type, get_args(typ)[0]))
|
||||
|
||||
return typ
|
||||
|
||||
|
||||
def extract_type_arg(typ: type, index: int) -> type:
|
||||
args = get_args(typ)
|
||||
try:
|
||||
return cast(type, args[index])
|
||||
except IndexError as err:
|
||||
raise RuntimeError(f"Expected type {typ} to have a type argument at index {index} but it did not") from err
|
||||
|
||||
|
||||
def extract_type_var_from_base(
|
||||
typ: type,
|
||||
*,
|
||||
generic_bases: tuple[type, ...],
|
||||
index: int,
|
||||
failure_message: str | None = None,
|
||||
) -> type:
|
||||
"""Given a type like `Foo[T]`, returns the generic type variable `T`.
|
||||
|
||||
This also handles the case where a concrete subclass is given, e.g.
|
||||
```py
|
||||
class MyResponse(Foo[bytes]):
|
||||
...
|
||||
|
||||
extract_type_var(MyResponse, bases=(Foo,), index=0) -> bytes
|
||||
```
|
||||
|
||||
And where a generic subclass is given:
|
||||
```py
|
||||
_T = TypeVar('_T')
|
||||
class MyResponse(Foo[_T]):
|
||||
...
|
||||
|
||||
extract_type_var(MyResponse[bytes], bases=(Foo,), index=0) -> bytes
|
||||
```
|
||||
"""
|
||||
cls = cast(object, get_origin(typ) or typ)
|
||||
if cls in generic_bases: # pyright: ignore[reportUnnecessaryContains]
|
||||
# we're given the class directly
|
||||
return extract_type_arg(typ, index)
|
||||
|
||||
# if a subclass is given
|
||||
# ---
|
||||
# this is needed as __orig_bases__ is not present in the typeshed stubs
|
||||
# because it is intended to be for internal use only, however there does
|
||||
# not seem to be a way to resolve generic TypeVars for inherited subclasses
|
||||
# without using it.
|
||||
if isinstance(cls, InheritsGeneric):
|
||||
target_base_class: Any | None = None
|
||||
for base in cls.__orig_bases__:
|
||||
if base.__origin__ in generic_bases:
|
||||
target_base_class = base
|
||||
break
|
||||
|
||||
if target_base_class is None:
|
||||
raise RuntimeError(
|
||||
"Could not find the generic base class;\n"
|
||||
"This should never happen;\n"
|
||||
f"Does {cls} inherit from one of {generic_bases} ?"
|
||||
)
|
||||
|
||||
extracted = extract_type_arg(target_base_class, index)
|
||||
if is_typevar(extracted):
|
||||
# If the extracted type argument is itself a type variable
|
||||
# then that means the subclass itself is generic, so we have
|
||||
# to resolve the type argument from the class itself, not
|
||||
# the base class.
|
||||
#
|
||||
# Note: if there is more than 1 type argument, the subclass could
|
||||
# change the ordering of the type arguments, this is not currently
|
||||
# supported.
|
||||
return extract_type_arg(typ, index)
|
||||
|
||||
return extracted
|
||||
|
||||
raise RuntimeError(failure_message or f"Could not resolve inner type variable at index {index} for {typ}")
|
||||
422
src/opencode_ai/_utils/_utils.py
Normal file
422
src/opencode_ai/_utils/_utils.py
Normal file
|
|
@ -0,0 +1,422 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
import inspect
|
||||
import functools
|
||||
from typing import (
|
||||
Any,
|
||||
Tuple,
|
||||
Mapping,
|
||||
TypeVar,
|
||||
Callable,
|
||||
Iterable,
|
||||
Sequence,
|
||||
cast,
|
||||
overload,
|
||||
)
|
||||
from pathlib import Path
|
||||
from datetime import date, datetime
|
||||
from typing_extensions import TypeGuard
|
||||
|
||||
import sniffio
|
||||
|
||||
from .._types import NotGiven, FileTypes, NotGivenOr, HeadersLike
|
||||
from .._compat import parse_date as parse_date, parse_datetime as parse_datetime
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_TupleT = TypeVar("_TupleT", bound=Tuple[object, ...])
|
||||
_MappingT = TypeVar("_MappingT", bound=Mapping[str, object])
|
||||
_SequenceT = TypeVar("_SequenceT", bound=Sequence[object])
|
||||
CallableT = TypeVar("CallableT", bound=Callable[..., Any])
|
||||
|
||||
|
||||
def flatten(t: Iterable[Iterable[_T]]) -> list[_T]:
|
||||
return [item for sublist in t for item in sublist]
|
||||
|
||||
|
||||
def extract_files(
|
||||
# TODO: this needs to take Dict but variance issues.....
|
||||
# create protocol type ?
|
||||
query: Mapping[str, object],
|
||||
*,
|
||||
paths: Sequence[Sequence[str]],
|
||||
) -> list[tuple[str, FileTypes]]:
|
||||
"""Recursively extract files from the given dictionary based on specified paths.
|
||||
|
||||
A path may look like this ['foo', 'files', '<array>', 'data'].
|
||||
|
||||
Note: this mutates the given dictionary.
|
||||
"""
|
||||
files: list[tuple[str, FileTypes]] = []
|
||||
for path in paths:
|
||||
files.extend(_extract_items(query, path, index=0, flattened_key=None))
|
||||
return files
|
||||
|
||||
|
||||
def _extract_items(
|
||||
obj: object,
|
||||
path: Sequence[str],
|
||||
*,
|
||||
index: int,
|
||||
flattened_key: str | None,
|
||||
) -> list[tuple[str, FileTypes]]:
|
||||
try:
|
||||
key = path[index]
|
||||
except IndexError:
|
||||
if isinstance(obj, NotGiven):
|
||||
# no value was provided - we can safely ignore
|
||||
return []
|
||||
|
||||
# cyclical import
|
||||
from .._files import assert_is_file_content
|
||||
|
||||
# We have exhausted the path, return the entry we found.
|
||||
assert flattened_key is not None
|
||||
|
||||
if is_list(obj):
|
||||
files: list[tuple[str, FileTypes]] = []
|
||||
for entry in obj:
|
||||
assert_is_file_content(entry, key=flattened_key + "[]" if flattened_key else "")
|
||||
files.append((flattened_key + "[]", cast(FileTypes, entry)))
|
||||
return files
|
||||
|
||||
assert_is_file_content(obj, key=flattened_key)
|
||||
return [(flattened_key, cast(FileTypes, obj))]
|
||||
|
||||
index += 1
|
||||
if is_dict(obj):
|
||||
try:
|
||||
# We are at the last entry in the path so we must remove the field
|
||||
if (len(path)) == index:
|
||||
item = obj.pop(key)
|
||||
else:
|
||||
item = obj[key]
|
||||
except KeyError:
|
||||
# Key was not present in the dictionary, this is not indicative of an error
|
||||
# as the given path may not point to a required field. We also do not want
|
||||
# to enforce required fields as the API may differ from the spec in some cases.
|
||||
return []
|
||||
if flattened_key is None:
|
||||
flattened_key = key
|
||||
else:
|
||||
flattened_key += f"[{key}]"
|
||||
return _extract_items(
|
||||
item,
|
||||
path,
|
||||
index=index,
|
||||
flattened_key=flattened_key,
|
||||
)
|
||||
elif is_list(obj):
|
||||
if key != "<array>":
|
||||
return []
|
||||
|
||||
return flatten(
|
||||
[
|
||||
_extract_items(
|
||||
item,
|
||||
path,
|
||||
index=index,
|
||||
flattened_key=flattened_key + "[]" if flattened_key is not None else "[]",
|
||||
)
|
||||
for item in obj
|
||||
]
|
||||
)
|
||||
|
||||
# Something unexpected was passed, just ignore it.
|
||||
return []
|
||||
|
||||
|
||||
def is_given(obj: NotGivenOr[_T]) -> TypeGuard[_T]:
|
||||
return not isinstance(obj, NotGiven)
|
||||
|
||||
|
||||
# Type safe methods for narrowing types with TypeVars.
|
||||
# The default narrowing for isinstance(obj, dict) is dict[unknown, unknown],
|
||||
# however this cause Pyright to rightfully report errors. As we know we don't
|
||||
# care about the contained types we can safely use `object` in it's place.
|
||||
#
|
||||
# There are two separate functions defined, `is_*` and `is_*_t` for different use cases.
|
||||
# `is_*` is for when you're dealing with an unknown input
|
||||
# `is_*_t` is for when you're narrowing a known union type to a specific subset
|
||||
|
||||
|
||||
def is_tuple(obj: object) -> TypeGuard[tuple[object, ...]]:
|
||||
return isinstance(obj, tuple)
|
||||
|
||||
|
||||
def is_tuple_t(obj: _TupleT | object) -> TypeGuard[_TupleT]:
|
||||
return isinstance(obj, tuple)
|
||||
|
||||
|
||||
def is_sequence(obj: object) -> TypeGuard[Sequence[object]]:
|
||||
return isinstance(obj, Sequence)
|
||||
|
||||
|
||||
def is_sequence_t(obj: _SequenceT | object) -> TypeGuard[_SequenceT]:
|
||||
return isinstance(obj, Sequence)
|
||||
|
||||
|
||||
def is_mapping(obj: object) -> TypeGuard[Mapping[str, object]]:
|
||||
return isinstance(obj, Mapping)
|
||||
|
||||
|
||||
def is_mapping_t(obj: _MappingT | object) -> TypeGuard[_MappingT]:
|
||||
return isinstance(obj, Mapping)
|
||||
|
||||
|
||||
def is_dict(obj: object) -> TypeGuard[dict[object, object]]:
|
||||
return isinstance(obj, dict)
|
||||
|
||||
|
||||
def is_list(obj: object) -> TypeGuard[list[object]]:
|
||||
return isinstance(obj, list)
|
||||
|
||||
|
||||
def is_iterable(obj: object) -> TypeGuard[Iterable[object]]:
|
||||
return isinstance(obj, Iterable)
|
||||
|
||||
|
||||
def deepcopy_minimal(item: _T) -> _T:
|
||||
"""Minimal reimplementation of copy.deepcopy() that will only copy certain object types:
|
||||
|
||||
- mappings, e.g. `dict`
|
||||
- list
|
||||
|
||||
This is done for performance reasons.
|
||||
"""
|
||||
if is_mapping(item):
|
||||
return cast(_T, {k: deepcopy_minimal(v) for k, v in item.items()})
|
||||
if is_list(item):
|
||||
return cast(_T, [deepcopy_minimal(entry) for entry in item])
|
||||
return item
|
||||
|
||||
|
||||
# copied from https://github.com/Rapptz/RoboDanny
|
||||
def human_join(seq: Sequence[str], *, delim: str = ", ", final: str = "or") -> str:
|
||||
size = len(seq)
|
||||
if size == 0:
|
||||
return ""
|
||||
|
||||
if size == 1:
|
||||
return seq[0]
|
||||
|
||||
if size == 2:
|
||||
return f"{seq[0]} {final} {seq[1]}"
|
||||
|
||||
return delim.join(seq[:-1]) + f" {final} {seq[-1]}"
|
||||
|
||||
|
||||
def quote(string: str) -> str:
|
||||
"""Add single quotation marks around the given string. Does *not* do any escaping."""
|
||||
return f"'{string}'"
|
||||
|
||||
|
||||
def required_args(*variants: Sequence[str]) -> Callable[[CallableT], CallableT]:
|
||||
"""Decorator to enforce a given set of arguments or variants of arguments are passed to the decorated function.
|
||||
|
||||
Useful for enforcing runtime validation of overloaded functions.
|
||||
|
||||
Example usage:
|
||||
```py
|
||||
@overload
|
||||
def foo(*, a: str) -> str: ...
|
||||
|
||||
|
||||
@overload
|
||||
def foo(*, b: bool) -> str: ...
|
||||
|
||||
|
||||
# This enforces the same constraints that a static type checker would
|
||||
# i.e. that either a or b must be passed to the function
|
||||
@required_args(["a"], ["b"])
|
||||
def foo(*, a: str | None = None, b: bool | None = None) -> str: ...
|
||||
```
|
||||
"""
|
||||
|
||||
def inner(func: CallableT) -> CallableT:
|
||||
params = inspect.signature(func).parameters
|
||||
positional = [
|
||||
name
|
||||
for name, param in params.items()
|
||||
if param.kind
|
||||
in {
|
||||
param.POSITIONAL_ONLY,
|
||||
param.POSITIONAL_OR_KEYWORD,
|
||||
}
|
||||
]
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args: object, **kwargs: object) -> object:
|
||||
given_params: set[str] = set()
|
||||
for i, _ in enumerate(args):
|
||||
try:
|
||||
given_params.add(positional[i])
|
||||
except IndexError:
|
||||
raise TypeError(
|
||||
f"{func.__name__}() takes {len(positional)} argument(s) but {len(args)} were given"
|
||||
) from None
|
||||
|
||||
for key in kwargs.keys():
|
||||
given_params.add(key)
|
||||
|
||||
for variant in variants:
|
||||
matches = all((param in given_params for param in variant))
|
||||
if matches:
|
||||
break
|
||||
else: # no break
|
||||
if len(variants) > 1:
|
||||
variations = human_join(
|
||||
["(" + human_join([quote(arg) for arg in variant], final="and") + ")" for variant in variants]
|
||||
)
|
||||
msg = f"Missing required arguments; Expected either {variations} arguments to be given"
|
||||
else:
|
||||
assert len(variants) > 0
|
||||
|
||||
# TODO: this error message is not deterministic
|
||||
missing = list(set(variants[0]) - given_params)
|
||||
if len(missing) > 1:
|
||||
msg = f"Missing required arguments: {human_join([quote(arg) for arg in missing])}"
|
||||
else:
|
||||
msg = f"Missing required argument: {quote(missing[0])}"
|
||||
raise TypeError(msg)
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper # type: ignore
|
||||
|
||||
return inner
|
||||
|
||||
|
||||
_K = TypeVar("_K")
|
||||
_V = TypeVar("_V")
|
||||
|
||||
|
||||
@overload
|
||||
def strip_not_given(obj: None) -> None: ...
|
||||
|
||||
|
||||
@overload
|
||||
def strip_not_given(obj: Mapping[_K, _V | NotGiven]) -> dict[_K, _V]: ...
|
||||
|
||||
|
||||
@overload
|
||||
def strip_not_given(obj: object) -> object: ...
|
||||
|
||||
|
||||
def strip_not_given(obj: object | None) -> object:
|
||||
"""Remove all top-level keys where their values are instances of `NotGiven`"""
|
||||
if obj is None:
|
||||
return None
|
||||
|
||||
if not is_mapping(obj):
|
||||
return obj
|
||||
|
||||
return {key: value for key, value in obj.items() if not isinstance(value, NotGiven)}
|
||||
|
||||
|
||||
def coerce_integer(val: str) -> int:
|
||||
return int(val, base=10)
|
||||
|
||||
|
||||
def coerce_float(val: str) -> float:
|
||||
return float(val)
|
||||
|
||||
|
||||
def coerce_boolean(val: str) -> bool:
|
||||
return val == "true" or val == "1" or val == "on"
|
||||
|
||||
|
||||
def maybe_coerce_integer(val: str | None) -> int | None:
|
||||
if val is None:
|
||||
return None
|
||||
return coerce_integer(val)
|
||||
|
||||
|
||||
def maybe_coerce_float(val: str | None) -> float | None:
|
||||
if val is None:
|
||||
return None
|
||||
return coerce_float(val)
|
||||
|
||||
|
||||
def maybe_coerce_boolean(val: str | None) -> bool | None:
|
||||
if val is None:
|
||||
return None
|
||||
return coerce_boolean(val)
|
||||
|
||||
|
||||
def removeprefix(string: str, prefix: str) -> str:
|
||||
"""Remove a prefix from a string.
|
||||
|
||||
Backport of `str.removeprefix` for Python < 3.9
|
||||
"""
|
||||
if string.startswith(prefix):
|
||||
return string[len(prefix) :]
|
||||
return string
|
||||
|
||||
|
||||
def removesuffix(string: str, suffix: str) -> str:
|
||||
"""Remove a suffix from a string.
|
||||
|
||||
Backport of `str.removesuffix` for Python < 3.9
|
||||
"""
|
||||
if string.endswith(suffix):
|
||||
return string[: -len(suffix)]
|
||||
return string
|
||||
|
||||
|
||||
def file_from_path(path: str) -> FileTypes:
|
||||
contents = Path(path).read_bytes()
|
||||
file_name = os.path.basename(path)
|
||||
return (file_name, contents)
|
||||
|
||||
|
||||
def get_required_header(headers: HeadersLike, header: str) -> str:
|
||||
lower_header = header.lower()
|
||||
if is_mapping_t(headers):
|
||||
# mypy doesn't understand the type narrowing here
|
||||
for k, v in headers.items(): # type: ignore
|
||||
if k.lower() == lower_header and isinstance(v, str):
|
||||
return v
|
||||
|
||||
# to deal with the case where the header looks like Stainless-Event-Id
|
||||
intercaps_header = re.sub(r"([^\w])(\w)", lambda pat: pat.group(1) + pat.group(2).upper(), header.capitalize())
|
||||
|
||||
for normalized_header in [header, lower_header, header.upper(), intercaps_header]:
|
||||
value = headers.get(normalized_header)
|
||||
if value:
|
||||
return value
|
||||
|
||||
raise ValueError(f"Could not find {header} header")
|
||||
|
||||
|
||||
def get_async_library() -> str:
|
||||
try:
|
||||
return sniffio.current_async_library()
|
||||
except Exception:
|
||||
return "false"
|
||||
|
||||
|
||||
def lru_cache(*, maxsize: int | None = 128) -> Callable[[CallableT], CallableT]:
|
||||
"""A version of functools.lru_cache that retains the type signature
|
||||
for the wrapped function arguments.
|
||||
"""
|
||||
wrapper = functools.lru_cache( # noqa: TID251
|
||||
maxsize=maxsize,
|
||||
)
|
||||
return cast(Any, wrapper) # type: ignore[no-any-return]
|
||||
|
||||
|
||||
def json_safe(data: object) -> object:
|
||||
"""Translates a mapping / sequence recursively in the same fashion
|
||||
as `pydantic` v2's `model_dump(mode="json")`.
|
||||
"""
|
||||
if is_mapping(data):
|
||||
return {json_safe(key): json_safe(value) for key, value in data.items()}
|
||||
|
||||
if is_iterable(data) and not isinstance(data, (str, bytes, bytearray)):
|
||||
return [json_safe(item) for item in data]
|
||||
|
||||
if isinstance(data, (datetime, date)):
|
||||
return data.isoformat()
|
||||
|
||||
return data
|
||||
4
src/opencode_ai/_version.py
Normal file
4
src/opencode_ai/_version.py
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
__title__ = "opencode_ai"
|
||||
__version__ = "0.1.0-alpha.1" # x-release-please-version
|
||||
4
src/opencode_ai/lib/.keep
Normal file
4
src/opencode_ai/lib/.keep
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
File generated from our OpenAPI spec by Stainless.
|
||||
|
||||
This directory can be used to store custom files to expand the SDK.
|
||||
It is ignored by Stainless code generation and its content (other than this keep file) won't be touched.
|
||||
0
src/opencode_ai/py.typed
Normal file
0
src/opencode_ai/py.typed
Normal file
75
src/opencode_ai/resources/__init__.py
Normal file
75
src/opencode_ai/resources/__init__.py
Normal file
|
|
@ -0,0 +1,75 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from .app import (
|
||||
AppResource,
|
||||
AsyncAppResource,
|
||||
AppResourceWithRawResponse,
|
||||
AsyncAppResourceWithRawResponse,
|
||||
AppResourceWithStreamingResponse,
|
||||
AsyncAppResourceWithStreamingResponse,
|
||||
)
|
||||
from .file import (
|
||||
FileResource,
|
||||
AsyncFileResource,
|
||||
FileResourceWithRawResponse,
|
||||
AsyncFileResourceWithRawResponse,
|
||||
FileResourceWithStreamingResponse,
|
||||
AsyncFileResourceWithStreamingResponse,
|
||||
)
|
||||
from .event import (
|
||||
EventResource,
|
||||
AsyncEventResource,
|
||||
EventResourceWithRawResponse,
|
||||
AsyncEventResourceWithRawResponse,
|
||||
EventResourceWithStreamingResponse,
|
||||
AsyncEventResourceWithStreamingResponse,
|
||||
)
|
||||
from .config import (
|
||||
ConfigResource,
|
||||
AsyncConfigResource,
|
||||
ConfigResourceWithRawResponse,
|
||||
AsyncConfigResourceWithRawResponse,
|
||||
ConfigResourceWithStreamingResponse,
|
||||
AsyncConfigResourceWithStreamingResponse,
|
||||
)
|
||||
from .session import (
|
||||
SessionResource,
|
||||
AsyncSessionResource,
|
||||
SessionResourceWithRawResponse,
|
||||
AsyncSessionResourceWithRawResponse,
|
||||
SessionResourceWithStreamingResponse,
|
||||
AsyncSessionResourceWithStreamingResponse,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"EventResource",
|
||||
"AsyncEventResource",
|
||||
"EventResourceWithRawResponse",
|
||||
"AsyncEventResourceWithRawResponse",
|
||||
"EventResourceWithStreamingResponse",
|
||||
"AsyncEventResourceWithStreamingResponse",
|
||||
"AppResource",
|
||||
"AsyncAppResource",
|
||||
"AppResourceWithRawResponse",
|
||||
"AsyncAppResourceWithRawResponse",
|
||||
"AppResourceWithStreamingResponse",
|
||||
"AsyncAppResourceWithStreamingResponse",
|
||||
"FileResource",
|
||||
"AsyncFileResource",
|
||||
"FileResourceWithRawResponse",
|
||||
"AsyncFileResourceWithRawResponse",
|
||||
"FileResourceWithStreamingResponse",
|
||||
"AsyncFileResourceWithStreamingResponse",
|
||||
"ConfigResource",
|
||||
"AsyncConfigResource",
|
||||
"ConfigResourceWithRawResponse",
|
||||
"AsyncConfigResourceWithRawResponse",
|
||||
"ConfigResourceWithStreamingResponse",
|
||||
"AsyncConfigResourceWithStreamingResponse",
|
||||
"SessionResource",
|
||||
"AsyncSessionResource",
|
||||
"SessionResourceWithRawResponse",
|
||||
"AsyncSessionResourceWithRawResponse",
|
||||
"SessionResourceWithStreamingResponse",
|
||||
"AsyncSessionResourceWithStreamingResponse",
|
||||
]
|
||||
186
src/opencode_ai/resources/app.py
Normal file
186
src/opencode_ai/resources/app.py
Normal file
|
|
@ -0,0 +1,186 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import httpx
|
||||
|
||||
from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
|
||||
from .._compat import cached_property
|
||||
from .._resource import SyncAPIResource, AsyncAPIResource
|
||||
from .._response import (
|
||||
to_raw_response_wrapper,
|
||||
to_streamed_response_wrapper,
|
||||
async_to_raw_response_wrapper,
|
||||
async_to_streamed_response_wrapper,
|
||||
)
|
||||
from ..types.app import App
|
||||
from .._base_client import make_request_options
|
||||
from ..types.app_init_response import AppInitResponse
|
||||
|
||||
__all__ = ["AppResource", "AsyncAppResource"]
|
||||
|
||||
|
||||
class AppResource(SyncAPIResource):
|
||||
@cached_property
|
||||
def with_raw_response(self) -> AppResourceWithRawResponse:
|
||||
"""
|
||||
This property can be used as a prefix for any HTTP method call to return
|
||||
the raw response object instead of the parsed content.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#accessing-raw-response-data-eg-headers
|
||||
"""
|
||||
return AppResourceWithRawResponse(self)
|
||||
|
||||
@cached_property
|
||||
def with_streaming_response(self) -> AppResourceWithStreamingResponse:
|
||||
"""
|
||||
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#with_streaming_response
|
||||
"""
|
||||
return AppResourceWithStreamingResponse(self)
|
||||
|
||||
def get(
|
||||
self,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> App:
|
||||
"""Get app info"""
|
||||
return self._get(
|
||||
"/app",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=App,
|
||||
)
|
||||
|
||||
def init(
|
||||
self,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> AppInitResponse:
|
||||
"""Initialize the app"""
|
||||
return self._post(
|
||||
"/app/init",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=AppInitResponse,
|
||||
)
|
||||
|
||||
|
||||
class AsyncAppResource(AsyncAPIResource):
|
||||
@cached_property
|
||||
def with_raw_response(self) -> AsyncAppResourceWithRawResponse:
|
||||
"""
|
||||
This property can be used as a prefix for any HTTP method call to return
|
||||
the raw response object instead of the parsed content.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#accessing-raw-response-data-eg-headers
|
||||
"""
|
||||
return AsyncAppResourceWithRawResponse(self)
|
||||
|
||||
@cached_property
|
||||
def with_streaming_response(self) -> AsyncAppResourceWithStreamingResponse:
|
||||
"""
|
||||
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#with_streaming_response
|
||||
"""
|
||||
return AsyncAppResourceWithStreamingResponse(self)
|
||||
|
||||
async def get(
|
||||
self,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> App:
|
||||
"""Get app info"""
|
||||
return await self._get(
|
||||
"/app",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=App,
|
||||
)
|
||||
|
||||
async def init(
|
||||
self,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> AppInitResponse:
|
||||
"""Initialize the app"""
|
||||
return await self._post(
|
||||
"/app/init",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=AppInitResponse,
|
||||
)
|
||||
|
||||
|
||||
class AppResourceWithRawResponse:
|
||||
def __init__(self, app: AppResource) -> None:
|
||||
self._app = app
|
||||
|
||||
self.get = to_raw_response_wrapper(
|
||||
app.get,
|
||||
)
|
||||
self.init = to_raw_response_wrapper(
|
||||
app.init,
|
||||
)
|
||||
|
||||
|
||||
class AsyncAppResourceWithRawResponse:
|
||||
def __init__(self, app: AsyncAppResource) -> None:
|
||||
self._app = app
|
||||
|
||||
self.get = async_to_raw_response_wrapper(
|
||||
app.get,
|
||||
)
|
||||
self.init = async_to_raw_response_wrapper(
|
||||
app.init,
|
||||
)
|
||||
|
||||
|
||||
class AppResourceWithStreamingResponse:
|
||||
def __init__(self, app: AppResource) -> None:
|
||||
self._app = app
|
||||
|
||||
self.get = to_streamed_response_wrapper(
|
||||
app.get,
|
||||
)
|
||||
self.init = to_streamed_response_wrapper(
|
||||
app.init,
|
||||
)
|
||||
|
||||
|
||||
class AsyncAppResourceWithStreamingResponse:
|
||||
def __init__(self, app: AsyncAppResource) -> None:
|
||||
self._app = app
|
||||
|
||||
self.get = async_to_streamed_response_wrapper(
|
||||
app.get,
|
||||
)
|
||||
self.init = async_to_streamed_response_wrapper(
|
||||
app.init,
|
||||
)
|
||||
186
src/opencode_ai/resources/config.py
Normal file
186
src/opencode_ai/resources/config.py
Normal file
|
|
@ -0,0 +1,186 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import httpx
|
||||
|
||||
from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
|
||||
from .._compat import cached_property
|
||||
from .._resource import SyncAPIResource, AsyncAPIResource
|
||||
from .._response import (
|
||||
to_raw_response_wrapper,
|
||||
to_streamed_response_wrapper,
|
||||
async_to_raw_response_wrapper,
|
||||
async_to_streamed_response_wrapper,
|
||||
)
|
||||
from .._base_client import make_request_options
|
||||
from ..types.config import Config
|
||||
from ..types.config_providers_response import ConfigProvidersResponse
|
||||
|
||||
__all__ = ["ConfigResource", "AsyncConfigResource"]
|
||||
|
||||
|
||||
class ConfigResource(SyncAPIResource):
|
||||
@cached_property
|
||||
def with_raw_response(self) -> ConfigResourceWithRawResponse:
|
||||
"""
|
||||
This property can be used as a prefix for any HTTP method call to return
|
||||
the raw response object instead of the parsed content.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#accessing-raw-response-data-eg-headers
|
||||
"""
|
||||
return ConfigResourceWithRawResponse(self)
|
||||
|
||||
@cached_property
|
||||
def with_streaming_response(self) -> ConfigResourceWithStreamingResponse:
|
||||
"""
|
||||
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#with_streaming_response
|
||||
"""
|
||||
return ConfigResourceWithStreamingResponse(self)
|
||||
|
||||
def get(
|
||||
self,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> Config:
|
||||
"""Get config info"""
|
||||
return self._get(
|
||||
"/config",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=Config,
|
||||
)
|
||||
|
||||
def providers(
|
||||
self,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> ConfigProvidersResponse:
|
||||
"""List all providers"""
|
||||
return self._get(
|
||||
"/config/providers",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=ConfigProvidersResponse,
|
||||
)
|
||||
|
||||
|
||||
class AsyncConfigResource(AsyncAPIResource):
|
||||
@cached_property
|
||||
def with_raw_response(self) -> AsyncConfigResourceWithRawResponse:
|
||||
"""
|
||||
This property can be used as a prefix for any HTTP method call to return
|
||||
the raw response object instead of the parsed content.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#accessing-raw-response-data-eg-headers
|
||||
"""
|
||||
return AsyncConfigResourceWithRawResponse(self)
|
||||
|
||||
@cached_property
|
||||
def with_streaming_response(self) -> AsyncConfigResourceWithStreamingResponse:
|
||||
"""
|
||||
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#with_streaming_response
|
||||
"""
|
||||
return AsyncConfigResourceWithStreamingResponse(self)
|
||||
|
||||
async def get(
|
||||
self,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> Config:
|
||||
"""Get config info"""
|
||||
return await self._get(
|
||||
"/config",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=Config,
|
||||
)
|
||||
|
||||
async def providers(
|
||||
self,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> ConfigProvidersResponse:
|
||||
"""List all providers"""
|
||||
return await self._get(
|
||||
"/config/providers",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=ConfigProvidersResponse,
|
||||
)
|
||||
|
||||
|
||||
class ConfigResourceWithRawResponse:
|
||||
def __init__(self, config: ConfigResource) -> None:
|
||||
self._config = config
|
||||
|
||||
self.get = to_raw_response_wrapper(
|
||||
config.get,
|
||||
)
|
||||
self.providers = to_raw_response_wrapper(
|
||||
config.providers,
|
||||
)
|
||||
|
||||
|
||||
class AsyncConfigResourceWithRawResponse:
|
||||
def __init__(self, config: AsyncConfigResource) -> None:
|
||||
self._config = config
|
||||
|
||||
self.get = async_to_raw_response_wrapper(
|
||||
config.get,
|
||||
)
|
||||
self.providers = async_to_raw_response_wrapper(
|
||||
config.providers,
|
||||
)
|
||||
|
||||
|
||||
class ConfigResourceWithStreamingResponse:
|
||||
def __init__(self, config: ConfigResource) -> None:
|
||||
self._config = config
|
||||
|
||||
self.get = to_streamed_response_wrapper(
|
||||
config.get,
|
||||
)
|
||||
self.providers = to_streamed_response_wrapper(
|
||||
config.providers,
|
||||
)
|
||||
|
||||
|
||||
class AsyncConfigResourceWithStreamingResponse:
|
||||
def __init__(self, config: AsyncConfigResource) -> None:
|
||||
self._config = config
|
||||
|
||||
self.get = async_to_streamed_response_wrapper(
|
||||
config.get,
|
||||
)
|
||||
self.providers = async_to_streamed_response_wrapper(
|
||||
config.providers,
|
||||
)
|
||||
143
src/opencode_ai/resources/event.py
Normal file
143
src/opencode_ai/resources/event.py
Normal file
|
|
@ -0,0 +1,143 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, cast
|
||||
|
||||
import httpx
|
||||
|
||||
from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
|
||||
from .._compat import cached_property
|
||||
from .._resource import SyncAPIResource, AsyncAPIResource
|
||||
from .._response import (
|
||||
to_raw_response_wrapper,
|
||||
to_streamed_response_wrapper,
|
||||
async_to_raw_response_wrapper,
|
||||
async_to_streamed_response_wrapper,
|
||||
)
|
||||
from .._base_client import make_request_options
|
||||
from ..types.event_list_response import EventListResponse
|
||||
|
||||
__all__ = ["EventResource", "AsyncEventResource"]
|
||||
|
||||
|
||||
class EventResource(SyncAPIResource):
|
||||
@cached_property
|
||||
def with_raw_response(self) -> EventResourceWithRawResponse:
|
||||
"""
|
||||
This property can be used as a prefix for any HTTP method call to return
|
||||
the raw response object instead of the parsed content.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#accessing-raw-response-data-eg-headers
|
||||
"""
|
||||
return EventResourceWithRawResponse(self)
|
||||
|
||||
@cached_property
|
||||
def with_streaming_response(self) -> EventResourceWithStreamingResponse:
|
||||
"""
|
||||
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#with_streaming_response
|
||||
"""
|
||||
return EventResourceWithStreamingResponse(self)
|
||||
|
||||
def list(
|
||||
self,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> EventListResponse:
|
||||
"""Get events"""
|
||||
return cast(
|
||||
EventListResponse,
|
||||
self._get(
|
||||
"/event",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=cast(Any, EventListResponse), # Union types cannot be passed in as arguments in the type system
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class AsyncEventResource(AsyncAPIResource):
|
||||
@cached_property
|
||||
def with_raw_response(self) -> AsyncEventResourceWithRawResponse:
|
||||
"""
|
||||
This property can be used as a prefix for any HTTP method call to return
|
||||
the raw response object instead of the parsed content.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#accessing-raw-response-data-eg-headers
|
||||
"""
|
||||
return AsyncEventResourceWithRawResponse(self)
|
||||
|
||||
@cached_property
|
||||
def with_streaming_response(self) -> AsyncEventResourceWithStreamingResponse:
|
||||
"""
|
||||
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#with_streaming_response
|
||||
"""
|
||||
return AsyncEventResourceWithStreamingResponse(self)
|
||||
|
||||
async def list(
|
||||
self,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> EventListResponse:
|
||||
"""Get events"""
|
||||
return cast(
|
||||
EventListResponse,
|
||||
await self._get(
|
||||
"/event",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=cast(Any, EventListResponse), # Union types cannot be passed in as arguments in the type system
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class EventResourceWithRawResponse:
|
||||
def __init__(self, event: EventResource) -> None:
|
||||
self._event = event
|
||||
|
||||
self.list = to_raw_response_wrapper(
|
||||
event.list,
|
||||
)
|
||||
|
||||
|
||||
class AsyncEventResourceWithRawResponse:
|
||||
def __init__(self, event: AsyncEventResource) -> None:
|
||||
self._event = event
|
||||
|
||||
self.list = async_to_raw_response_wrapper(
|
||||
event.list,
|
||||
)
|
||||
|
||||
|
||||
class EventResourceWithStreamingResponse:
|
||||
def __init__(self, event: EventResource) -> None:
|
||||
self._event = event
|
||||
|
||||
self.list = to_streamed_response_wrapper(
|
||||
event.list,
|
||||
)
|
||||
|
||||
|
||||
class AsyncEventResourceWithStreamingResponse:
|
||||
def __init__(self, event: AsyncEventResource) -> None:
|
||||
self._event = event
|
||||
|
||||
self.list = async_to_streamed_response_wrapper(
|
||||
event.list,
|
||||
)
|
||||
169
src/opencode_ai/resources/file.py
Normal file
169
src/opencode_ai/resources/file.py
Normal file
|
|
@ -0,0 +1,169 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import httpx
|
||||
|
||||
from ..types import file_search_params
|
||||
from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
|
||||
from .._utils import maybe_transform, async_maybe_transform
|
||||
from .._compat import cached_property
|
||||
from .._resource import SyncAPIResource, AsyncAPIResource
|
||||
from .._response import (
|
||||
to_raw_response_wrapper,
|
||||
to_streamed_response_wrapper,
|
||||
async_to_raw_response_wrapper,
|
||||
async_to_streamed_response_wrapper,
|
||||
)
|
||||
from .._base_client import make_request_options
|
||||
from ..types.file_search_response import FileSearchResponse
|
||||
|
||||
__all__ = ["FileResource", "AsyncFileResource"]
|
||||
|
||||
|
||||
class FileResource(SyncAPIResource):
|
||||
@cached_property
|
||||
def with_raw_response(self) -> FileResourceWithRawResponse:
|
||||
"""
|
||||
This property can be used as a prefix for any HTTP method call to return
|
||||
the raw response object instead of the parsed content.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#accessing-raw-response-data-eg-headers
|
||||
"""
|
||||
return FileResourceWithRawResponse(self)
|
||||
|
||||
@cached_property
|
||||
def with_streaming_response(self) -> FileResourceWithStreamingResponse:
|
||||
"""
|
||||
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#with_streaming_response
|
||||
"""
|
||||
return FileResourceWithStreamingResponse(self)
|
||||
|
||||
def search(
|
||||
self,
|
||||
*,
|
||||
query: str,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> FileSearchResponse:
|
||||
"""
|
||||
Search for files
|
||||
|
||||
Args:
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
return self._get(
|
||||
"/file",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers,
|
||||
extra_query=extra_query,
|
||||
extra_body=extra_body,
|
||||
timeout=timeout,
|
||||
query=maybe_transform({"query": query}, file_search_params.FileSearchParams),
|
||||
),
|
||||
cast_to=FileSearchResponse,
|
||||
)
|
||||
|
||||
|
||||
class AsyncFileResource(AsyncAPIResource):
|
||||
@cached_property
|
||||
def with_raw_response(self) -> AsyncFileResourceWithRawResponse:
|
||||
"""
|
||||
This property can be used as a prefix for any HTTP method call to return
|
||||
the raw response object instead of the parsed content.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#accessing-raw-response-data-eg-headers
|
||||
"""
|
||||
return AsyncFileResourceWithRawResponse(self)
|
||||
|
||||
@cached_property
|
||||
def with_streaming_response(self) -> AsyncFileResourceWithStreamingResponse:
|
||||
"""
|
||||
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#with_streaming_response
|
||||
"""
|
||||
return AsyncFileResourceWithStreamingResponse(self)
|
||||
|
||||
async def search(
|
||||
self,
|
||||
*,
|
||||
query: str,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> FileSearchResponse:
|
||||
"""
|
||||
Search for files
|
||||
|
||||
Args:
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
return await self._get(
|
||||
"/file",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers,
|
||||
extra_query=extra_query,
|
||||
extra_body=extra_body,
|
||||
timeout=timeout,
|
||||
query=await async_maybe_transform({"query": query}, file_search_params.FileSearchParams),
|
||||
),
|
||||
cast_to=FileSearchResponse,
|
||||
)
|
||||
|
||||
|
||||
class FileResourceWithRawResponse:
|
||||
def __init__(self, file: FileResource) -> None:
|
||||
self._file = file
|
||||
|
||||
self.search = to_raw_response_wrapper(
|
||||
file.search,
|
||||
)
|
||||
|
||||
|
||||
class AsyncFileResourceWithRawResponse:
|
||||
def __init__(self, file: AsyncFileResource) -> None:
|
||||
self._file = file
|
||||
|
||||
self.search = async_to_raw_response_wrapper(
|
||||
file.search,
|
||||
)
|
||||
|
||||
|
||||
class FileResourceWithStreamingResponse:
|
||||
def __init__(self, file: FileResource) -> None:
|
||||
self._file = file
|
||||
|
||||
self.search = to_streamed_response_wrapper(
|
||||
file.search,
|
||||
)
|
||||
|
||||
|
||||
class AsyncFileResourceWithStreamingResponse:
|
||||
def __init__(self, file: AsyncFileResource) -> None:
|
||||
self._file = file
|
||||
|
||||
self.search = async_to_streamed_response_wrapper(
|
||||
file.search,
|
||||
)
|
||||
895
src/opencode_ai/resources/session.py
Normal file
895
src/opencode_ai/resources/session.py
Normal file
|
|
@ -0,0 +1,895 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Iterable
|
||||
|
||||
import httpx
|
||||
|
||||
from ..types import session_chat_params, session_init_params, session_summarize_params
|
||||
from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
|
||||
from .._utils import maybe_transform, async_maybe_transform
|
||||
from .._compat import cached_property
|
||||
from .._resource import SyncAPIResource, AsyncAPIResource
|
||||
from .._response import (
|
||||
to_raw_response_wrapper,
|
||||
to_streamed_response_wrapper,
|
||||
async_to_raw_response_wrapper,
|
||||
async_to_streamed_response_wrapper,
|
||||
)
|
||||
from .._base_client import make_request_options
|
||||
from ..types.message import Message
|
||||
from ..types.session import Session
|
||||
from ..types.message_part_param import MessagePartParam
|
||||
from ..types.session_init_response import SessionInitResponse
|
||||
from ..types.session_list_response import SessionListResponse
|
||||
from ..types.session_abort_response import SessionAbortResponse
|
||||
from ..types.session_delete_response import SessionDeleteResponse
|
||||
from ..types.session_messages_response import SessionMessagesResponse
|
||||
from ..types.session_summarize_response import SessionSummarizeResponse
|
||||
|
||||
__all__ = ["SessionResource", "AsyncSessionResource"]
|
||||
|
||||
|
||||
class SessionResource(SyncAPIResource):
|
||||
@cached_property
|
||||
def with_raw_response(self) -> SessionResourceWithRawResponse:
|
||||
"""
|
||||
This property can be used as a prefix for any HTTP method call to return
|
||||
the raw response object instead of the parsed content.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#accessing-raw-response-data-eg-headers
|
||||
"""
|
||||
return SessionResourceWithRawResponse(self)
|
||||
|
||||
@cached_property
|
||||
def with_streaming_response(self) -> SessionResourceWithStreamingResponse:
|
||||
"""
|
||||
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#with_streaming_response
|
||||
"""
|
||||
return SessionResourceWithStreamingResponse(self)
|
||||
|
||||
def create(
|
||||
self,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> Session:
|
||||
"""Create a new session"""
|
||||
return self._post(
|
||||
"/session",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=Session,
|
||||
)
|
||||
|
||||
def list(
|
||||
self,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> SessionListResponse:
|
||||
"""List all sessions"""
|
||||
return self._get(
|
||||
"/session",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=SessionListResponse,
|
||||
)
|
||||
|
||||
def delete(
|
||||
self,
|
||||
id: str,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> SessionDeleteResponse:
|
||||
"""
|
||||
Delete a session and all its data
|
||||
|
||||
Args:
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
if not id:
|
||||
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
|
||||
return self._delete(
|
||||
f"/session/{id}",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=SessionDeleteResponse,
|
||||
)
|
||||
|
||||
def abort(
|
||||
self,
|
||||
id: str,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> SessionAbortResponse:
|
||||
"""
|
||||
Abort a session
|
||||
|
||||
Args:
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
if not id:
|
||||
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
|
||||
return self._post(
|
||||
f"/session/{id}/abort",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=SessionAbortResponse,
|
||||
)
|
||||
|
||||
def chat(
|
||||
self,
|
||||
id: str,
|
||||
*,
|
||||
model_id: str,
|
||||
parts: Iterable[MessagePartParam],
|
||||
provider_id: str,
|
||||
session_id: str,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> Message:
|
||||
"""
|
||||
Create and send a new message to a session
|
||||
|
||||
Args:
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
if not id:
|
||||
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
|
||||
return self._post(
|
||||
f"/session/{id}/message",
|
||||
body=maybe_transform(
|
||||
{
|
||||
"model_id": model_id,
|
||||
"parts": parts,
|
||||
"provider_id": provider_id,
|
||||
"session_id": session_id,
|
||||
},
|
||||
session_chat_params.SessionChatParams,
|
||||
),
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=Message,
|
||||
)
|
||||
|
||||
def init(
|
||||
self,
|
||||
id: str,
|
||||
*,
|
||||
model_id: str,
|
||||
provider_id: str,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> SessionInitResponse:
|
||||
"""
|
||||
Analyze the app and create an AGENTS.md file
|
||||
|
||||
Args:
|
||||
id: Session ID
|
||||
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
if not id:
|
||||
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
|
||||
return self._post(
|
||||
f"/session/{id}/init",
|
||||
body=maybe_transform(
|
||||
{
|
||||
"model_id": model_id,
|
||||
"provider_id": provider_id,
|
||||
},
|
||||
session_init_params.SessionInitParams,
|
||||
),
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=SessionInitResponse,
|
||||
)
|
||||
|
||||
def messages(
|
||||
self,
|
||||
id: str,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> SessionMessagesResponse:
|
||||
"""
|
||||
List messages for a session
|
||||
|
||||
Args:
|
||||
id: Session ID
|
||||
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
if not id:
|
||||
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
|
||||
return self._get(
|
||||
f"/session/{id}/message",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=SessionMessagesResponse,
|
||||
)
|
||||
|
||||
def share(
|
||||
self,
|
||||
id: str,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> Session:
|
||||
"""
|
||||
Share a session
|
||||
|
||||
Args:
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
if not id:
|
||||
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
|
||||
return self._post(
|
||||
f"/session/{id}/share",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=Session,
|
||||
)
|
||||
|
||||
def summarize(
|
||||
self,
|
||||
id: str,
|
||||
*,
|
||||
model_id: str,
|
||||
provider_id: str,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> SessionSummarizeResponse:
|
||||
"""
|
||||
Summarize the session
|
||||
|
||||
Args:
|
||||
id: Session ID
|
||||
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
if not id:
|
||||
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
|
||||
return self._post(
|
||||
f"/session/{id}/summarize",
|
||||
body=maybe_transform(
|
||||
{
|
||||
"model_id": model_id,
|
||||
"provider_id": provider_id,
|
||||
},
|
||||
session_summarize_params.SessionSummarizeParams,
|
||||
),
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=SessionSummarizeResponse,
|
||||
)
|
||||
|
||||
def unshare(
|
||||
self,
|
||||
id: str,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> Session:
|
||||
"""
|
||||
Unshare the session
|
||||
|
||||
Args:
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
if not id:
|
||||
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
|
||||
return self._delete(
|
||||
f"/session/{id}/share",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=Session,
|
||||
)
|
||||
|
||||
|
||||
class AsyncSessionResource(AsyncAPIResource):
|
||||
@cached_property
|
||||
def with_raw_response(self) -> AsyncSessionResourceWithRawResponse:
|
||||
"""
|
||||
This property can be used as a prefix for any HTTP method call to return
|
||||
the raw response object instead of the parsed content.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#accessing-raw-response-data-eg-headers
|
||||
"""
|
||||
return AsyncSessionResourceWithRawResponse(self)
|
||||
|
||||
@cached_property
|
||||
def with_streaming_response(self) -> AsyncSessionResourceWithStreamingResponse:
|
||||
"""
|
||||
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
|
||||
|
||||
For more information, see https://www.github.com/sst/opencode-sdk-python#with_streaming_response
|
||||
"""
|
||||
return AsyncSessionResourceWithStreamingResponse(self)
|
||||
|
||||
async def create(
|
||||
self,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> Session:
|
||||
"""Create a new session"""
|
||||
return await self._post(
|
||||
"/session",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=Session,
|
||||
)
|
||||
|
||||
async def list(
|
||||
self,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> SessionListResponse:
|
||||
"""List all sessions"""
|
||||
return await self._get(
|
||||
"/session",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=SessionListResponse,
|
||||
)
|
||||
|
||||
async def delete(
|
||||
self,
|
||||
id: str,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> SessionDeleteResponse:
|
||||
"""
|
||||
Delete a session and all its data
|
||||
|
||||
Args:
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
if not id:
|
||||
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
|
||||
return await self._delete(
|
||||
f"/session/{id}",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=SessionDeleteResponse,
|
||||
)
|
||||
|
||||
async def abort(
|
||||
self,
|
||||
id: str,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> SessionAbortResponse:
|
||||
"""
|
||||
Abort a session
|
||||
|
||||
Args:
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
if not id:
|
||||
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
|
||||
return await self._post(
|
||||
f"/session/{id}/abort",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=SessionAbortResponse,
|
||||
)
|
||||
|
||||
async def chat(
|
||||
self,
|
||||
id: str,
|
||||
*,
|
||||
model_id: str,
|
||||
parts: Iterable[MessagePartParam],
|
||||
provider_id: str,
|
||||
session_id: str,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> Message:
|
||||
"""
|
||||
Create and send a new message to a session
|
||||
|
||||
Args:
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
if not id:
|
||||
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
|
||||
return await self._post(
|
||||
f"/session/{id}/message",
|
||||
body=await async_maybe_transform(
|
||||
{
|
||||
"model_id": model_id,
|
||||
"parts": parts,
|
||||
"provider_id": provider_id,
|
||||
"session_id": session_id,
|
||||
},
|
||||
session_chat_params.SessionChatParams,
|
||||
),
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=Message,
|
||||
)
|
||||
|
||||
async def init(
|
||||
self,
|
||||
id: str,
|
||||
*,
|
||||
model_id: str,
|
||||
provider_id: str,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> SessionInitResponse:
|
||||
"""
|
||||
Analyze the app and create an AGENTS.md file
|
||||
|
||||
Args:
|
||||
id: Session ID
|
||||
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
if not id:
|
||||
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
|
||||
return await self._post(
|
||||
f"/session/{id}/init",
|
||||
body=await async_maybe_transform(
|
||||
{
|
||||
"model_id": model_id,
|
||||
"provider_id": provider_id,
|
||||
},
|
||||
session_init_params.SessionInitParams,
|
||||
),
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=SessionInitResponse,
|
||||
)
|
||||
|
||||
async def messages(
|
||||
self,
|
||||
id: str,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> SessionMessagesResponse:
|
||||
"""
|
||||
List messages for a session
|
||||
|
||||
Args:
|
||||
id: Session ID
|
||||
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
if not id:
|
||||
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
|
||||
return await self._get(
|
||||
f"/session/{id}/message",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=SessionMessagesResponse,
|
||||
)
|
||||
|
||||
async def share(
|
||||
self,
|
||||
id: str,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> Session:
|
||||
"""
|
||||
Share a session
|
||||
|
||||
Args:
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
if not id:
|
||||
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
|
||||
return await self._post(
|
||||
f"/session/{id}/share",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=Session,
|
||||
)
|
||||
|
||||
async def summarize(
|
||||
self,
|
||||
id: str,
|
||||
*,
|
||||
model_id: str,
|
||||
provider_id: str,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> SessionSummarizeResponse:
|
||||
"""
|
||||
Summarize the session
|
||||
|
||||
Args:
|
||||
id: Session ID
|
||||
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
if not id:
|
||||
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
|
||||
return await self._post(
|
||||
f"/session/{id}/summarize",
|
||||
body=await async_maybe_transform(
|
||||
{
|
||||
"model_id": model_id,
|
||||
"provider_id": provider_id,
|
||||
},
|
||||
session_summarize_params.SessionSummarizeParams,
|
||||
),
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=SessionSummarizeResponse,
|
||||
)
|
||||
|
||||
async def unshare(
|
||||
self,
|
||||
id: str,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> Session:
|
||||
"""
|
||||
Unshare the session
|
||||
|
||||
Args:
|
||||
extra_headers: Send extra headers
|
||||
|
||||
extra_query: Add additional query parameters to the request
|
||||
|
||||
extra_body: Add additional JSON properties to the request
|
||||
|
||||
timeout: Override the client-level default timeout for this request, in seconds
|
||||
"""
|
||||
if not id:
|
||||
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
|
||||
return await self._delete(
|
||||
f"/session/{id}/share",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=Session,
|
||||
)
|
||||
|
||||
|
||||
class SessionResourceWithRawResponse:
|
||||
def __init__(self, session: SessionResource) -> None:
|
||||
self._session = session
|
||||
|
||||
self.create = to_raw_response_wrapper(
|
||||
session.create,
|
||||
)
|
||||
self.list = to_raw_response_wrapper(
|
||||
session.list,
|
||||
)
|
||||
self.delete = to_raw_response_wrapper(
|
||||
session.delete,
|
||||
)
|
||||
self.abort = to_raw_response_wrapper(
|
||||
session.abort,
|
||||
)
|
||||
self.chat = to_raw_response_wrapper(
|
||||
session.chat,
|
||||
)
|
||||
self.init = to_raw_response_wrapper(
|
||||
session.init,
|
||||
)
|
||||
self.messages = to_raw_response_wrapper(
|
||||
session.messages,
|
||||
)
|
||||
self.share = to_raw_response_wrapper(
|
||||
session.share,
|
||||
)
|
||||
self.summarize = to_raw_response_wrapper(
|
||||
session.summarize,
|
||||
)
|
||||
self.unshare = to_raw_response_wrapper(
|
||||
session.unshare,
|
||||
)
|
||||
|
||||
|
||||
class AsyncSessionResourceWithRawResponse:
|
||||
def __init__(self, session: AsyncSessionResource) -> None:
|
||||
self._session = session
|
||||
|
||||
self.create = async_to_raw_response_wrapper(
|
||||
session.create,
|
||||
)
|
||||
self.list = async_to_raw_response_wrapper(
|
||||
session.list,
|
||||
)
|
||||
self.delete = async_to_raw_response_wrapper(
|
||||
session.delete,
|
||||
)
|
||||
self.abort = async_to_raw_response_wrapper(
|
||||
session.abort,
|
||||
)
|
||||
self.chat = async_to_raw_response_wrapper(
|
||||
session.chat,
|
||||
)
|
||||
self.init = async_to_raw_response_wrapper(
|
||||
session.init,
|
||||
)
|
||||
self.messages = async_to_raw_response_wrapper(
|
||||
session.messages,
|
||||
)
|
||||
self.share = async_to_raw_response_wrapper(
|
||||
session.share,
|
||||
)
|
||||
self.summarize = async_to_raw_response_wrapper(
|
||||
session.summarize,
|
||||
)
|
||||
self.unshare = async_to_raw_response_wrapper(
|
||||
session.unshare,
|
||||
)
|
||||
|
||||
|
||||
class SessionResourceWithStreamingResponse:
|
||||
def __init__(self, session: SessionResource) -> None:
|
||||
self._session = session
|
||||
|
||||
self.create = to_streamed_response_wrapper(
|
||||
session.create,
|
||||
)
|
||||
self.list = to_streamed_response_wrapper(
|
||||
session.list,
|
||||
)
|
||||
self.delete = to_streamed_response_wrapper(
|
||||
session.delete,
|
||||
)
|
||||
self.abort = to_streamed_response_wrapper(
|
||||
session.abort,
|
||||
)
|
||||
self.chat = to_streamed_response_wrapper(
|
||||
session.chat,
|
||||
)
|
||||
self.init = to_streamed_response_wrapper(
|
||||
session.init,
|
||||
)
|
||||
self.messages = to_streamed_response_wrapper(
|
||||
session.messages,
|
||||
)
|
||||
self.share = to_streamed_response_wrapper(
|
||||
session.share,
|
||||
)
|
||||
self.summarize = to_streamed_response_wrapper(
|
||||
session.summarize,
|
||||
)
|
||||
self.unshare = to_streamed_response_wrapper(
|
||||
session.unshare,
|
||||
)
|
||||
|
||||
|
||||
class AsyncSessionResourceWithStreamingResponse:
|
||||
def __init__(self, session: AsyncSessionResource) -> None:
|
||||
self._session = session
|
||||
|
||||
self.create = async_to_streamed_response_wrapper(
|
||||
session.create,
|
||||
)
|
||||
self.list = async_to_streamed_response_wrapper(
|
||||
session.list,
|
||||
)
|
||||
self.delete = async_to_streamed_response_wrapper(
|
||||
session.delete,
|
||||
)
|
||||
self.abort = async_to_streamed_response_wrapper(
|
||||
session.abort,
|
||||
)
|
||||
self.chat = async_to_streamed_response_wrapper(
|
||||
session.chat,
|
||||
)
|
||||
self.init = async_to_streamed_response_wrapper(
|
||||
session.init,
|
||||
)
|
||||
self.messages = async_to_streamed_response_wrapper(
|
||||
session.messages,
|
||||
)
|
||||
self.share = async_to_streamed_response_wrapper(
|
||||
session.share,
|
||||
)
|
||||
self.summarize = async_to_streamed_response_wrapper(
|
||||
session.summarize,
|
||||
)
|
||||
self.unshare = async_to_streamed_response_wrapper(
|
||||
session.unshare,
|
||||
)
|
||||
48
src/opencode_ai/types/__init__.py
Normal file
48
src/opencode_ai/types/__init__.py
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from .app import App as App
|
||||
from .model import Model as Model
|
||||
from .config import Config as Config
|
||||
from .shared import UnknownError as UnknownError, ProviderAuthError as ProviderAuthError
|
||||
from .message import Message as Message
|
||||
from .session import Session as Session
|
||||
from .keybinds import Keybinds as Keybinds
|
||||
from .provider import Provider as Provider
|
||||
from .file_part import FilePart as FilePart
|
||||
from .mcp_local import McpLocal as McpLocal
|
||||
from .text_part import TextPart as TextPart
|
||||
from .tool_call import ToolCall as ToolCall
|
||||
from .mcp_remote import McpRemote as McpRemote
|
||||
from .tool_result import ToolResult as ToolResult
|
||||
from .message_part import MessagePart as MessagePart
|
||||
from .reasoning_part import ReasoningPart as ReasoningPart
|
||||
from .file_part_param import FilePartParam as FilePartParam
|
||||
from .source_url_part import SourceURLPart as SourceURLPart
|
||||
from .step_start_part import StepStartPart as StepStartPart
|
||||
from .text_part_param import TextPartParam as TextPartParam
|
||||
from .tool_call_param import ToolCallParam as ToolCallParam
|
||||
from .app_init_response import AppInitResponse as AppInitResponse
|
||||
from .tool_partial_call import ToolPartialCall as ToolPartialCall
|
||||
from .tool_result_param import ToolResultParam as ToolResultParam
|
||||
from .file_search_params import FileSearchParams as FileSearchParams
|
||||
from .message_part_param import MessagePartParam as MessagePartParam
|
||||
from .event_list_response import EventListResponse as EventListResponse
|
||||
from .session_chat_params import SessionChatParams as SessionChatParams
|
||||
from .session_init_params import SessionInitParams as SessionInitParams
|
||||
from .file_search_response import FileSearchResponse as FileSearchResponse
|
||||
from .reasoning_part_param import ReasoningPartParam as ReasoningPartParam
|
||||
from .tool_invocation_part import ToolInvocationPart as ToolInvocationPart
|
||||
from .session_init_response import SessionInitResponse as SessionInitResponse
|
||||
from .session_list_response import SessionListResponse as SessionListResponse
|
||||
from .source_url_part_param import SourceURLPartParam as SourceURLPartParam
|
||||
from .step_start_part_param import StepStartPartParam as StepStartPartParam
|
||||
from .session_abort_response import SessionAbortResponse as SessionAbortResponse
|
||||
from .session_delete_response import SessionDeleteResponse as SessionDeleteResponse
|
||||
from .tool_partial_call_param import ToolPartialCallParam as ToolPartialCallParam
|
||||
from .session_summarize_params import SessionSummarizeParams as SessionSummarizeParams
|
||||
from .config_providers_response import ConfigProvidersResponse as ConfigProvidersResponse
|
||||
from .session_messages_response import SessionMessagesResponse as SessionMessagesResponse
|
||||
from .session_summarize_response import SessionSummarizeResponse as SessionSummarizeResponse
|
||||
from .tool_invocation_part_param import ToolInvocationPartParam as ToolInvocationPartParam
|
||||
37
src/opencode_ai/types/app.py
Normal file
37
src/opencode_ai/types/app.py
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from .._models import BaseModel
|
||||
|
||||
__all__ = ["App", "Path", "Time"]
|
||||
|
||||
|
||||
class Path(BaseModel):
|
||||
config: str
|
||||
|
||||
cwd: str
|
||||
|
||||
data: str
|
||||
|
||||
root: str
|
||||
|
||||
state: str
|
||||
|
||||
|
||||
class Time(BaseModel):
|
||||
initialized: Optional[float] = None
|
||||
|
||||
|
||||
class App(BaseModel):
|
||||
git: bool
|
||||
|
||||
hostname: str
|
||||
|
||||
path: Path
|
||||
|
||||
project: str
|
||||
|
||||
time: Time
|
||||
|
||||
user: str
|
||||
7
src/opencode_ai/types/app_init_response.py
Normal file
7
src/opencode_ai/types/app_init_response.py
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
__all__ = ["AppInitResponse"]
|
||||
|
||||
AppInitResponse: TypeAlias = bool
|
||||
133
src/opencode_ai/types/config.py
Normal file
133
src/opencode_ai/types/config.py
Normal file
|
|
@ -0,0 +1,133 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import Dict, List, Union, Optional
|
||||
from typing_extensions import Annotated, TypeAlias
|
||||
|
||||
from pydantic import Field as FieldInfo
|
||||
|
||||
from .._utils import PropertyInfo
|
||||
from .._models import BaseModel
|
||||
from .keybinds import Keybinds
|
||||
from .mcp_local import McpLocal
|
||||
from .mcp_remote import McpRemote
|
||||
|
||||
__all__ = [
|
||||
"Config",
|
||||
"Experimental",
|
||||
"ExperimentalHook",
|
||||
"ExperimentalHookFileEdited",
|
||||
"ExperimentalHookSessionCompleted",
|
||||
"Mcp",
|
||||
"Provider",
|
||||
"ProviderModels",
|
||||
"ProviderModelsCost",
|
||||
"ProviderModelsLimit",
|
||||
]
|
||||
|
||||
|
||||
class ExperimentalHookFileEdited(BaseModel):
|
||||
command: List[str]
|
||||
|
||||
environment: Optional[Dict[str, str]] = None
|
||||
|
||||
|
||||
class ExperimentalHookSessionCompleted(BaseModel):
|
||||
command: List[str]
|
||||
|
||||
environment: Optional[Dict[str, str]] = None
|
||||
|
||||
|
||||
class ExperimentalHook(BaseModel):
|
||||
file_edited: Optional[Dict[str, List[ExperimentalHookFileEdited]]] = None
|
||||
|
||||
session_completed: Optional[List[ExperimentalHookSessionCompleted]] = None
|
||||
|
||||
|
||||
class Experimental(BaseModel):
|
||||
hook: Optional[ExperimentalHook] = None
|
||||
|
||||
|
||||
Mcp: TypeAlias = Annotated[Union[McpLocal, McpRemote], PropertyInfo(discriminator="type")]
|
||||
|
||||
|
||||
class ProviderModelsCost(BaseModel):
|
||||
input: float
|
||||
|
||||
output: float
|
||||
|
||||
cache_read: Optional[float] = None
|
||||
|
||||
cache_write: Optional[float] = None
|
||||
|
||||
|
||||
class ProviderModelsLimit(BaseModel):
|
||||
context: float
|
||||
|
||||
output: float
|
||||
|
||||
|
||||
class ProviderModels(BaseModel):
|
||||
id: Optional[str] = None
|
||||
|
||||
attachment: Optional[bool] = None
|
||||
|
||||
cost: Optional[ProviderModelsCost] = None
|
||||
|
||||
limit: Optional[ProviderModelsLimit] = None
|
||||
|
||||
name: Optional[str] = None
|
||||
|
||||
options: Optional[Dict[str, object]] = None
|
||||
|
||||
reasoning: Optional[bool] = None
|
||||
|
||||
temperature: Optional[bool] = None
|
||||
|
||||
tool_call: Optional[bool] = None
|
||||
|
||||
|
||||
class Provider(BaseModel):
|
||||
models: Dict[str, ProviderModels]
|
||||
|
||||
id: Optional[str] = None
|
||||
|
||||
api: Optional[str] = None
|
||||
|
||||
env: Optional[List[str]] = None
|
||||
|
||||
name: Optional[str] = None
|
||||
|
||||
npm: Optional[str] = None
|
||||
|
||||
options: Optional[Dict[str, object]] = None
|
||||
|
||||
|
||||
class Config(BaseModel):
|
||||
schema_: Optional[str] = FieldInfo(alias="$schema", default=None)
|
||||
"""JSON schema reference for configuration validation"""
|
||||
|
||||
autoshare: Optional[bool] = None
|
||||
"""Share newly created sessions automatically"""
|
||||
|
||||
autoupdate: Optional[bool] = None
|
||||
"""Automatically update to the latest version"""
|
||||
|
||||
disabled_providers: Optional[List[str]] = None
|
||||
"""Disable providers that are loaded automatically"""
|
||||
|
||||
experimental: Optional[Experimental] = None
|
||||
|
||||
keybinds: Optional[Keybinds] = None
|
||||
"""Custom keybind configurations"""
|
||||
|
||||
mcp: Optional[Dict[str, Mcp]] = None
|
||||
"""MCP (Model Context Protocol) server configurations"""
|
||||
|
||||
model: Optional[str] = None
|
||||
"""Model to use in the format of provider/model, eg anthropic/claude-2"""
|
||||
|
||||
provider: Optional[Dict[str, Provider]] = None
|
||||
"""Custom provider configurations and model overrides"""
|
||||
|
||||
theme: Optional[str] = None
|
||||
"""Theme name to use for the interface"""
|
||||
14
src/opencode_ai/types/config_providers_response.py
Normal file
14
src/opencode_ai/types/config_providers_response.py
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import Dict, List
|
||||
|
||||
from .._models import BaseModel
|
||||
from .provider import Provider
|
||||
|
||||
__all__ = ["ConfigProvidersResponse"]
|
||||
|
||||
|
||||
class ConfigProvidersResponse(BaseModel):
|
||||
default: Dict[str, str]
|
||||
|
||||
providers: List[Provider]
|
||||
177
src/opencode_ai/types/event_list_response.py
Normal file
177
src/opencode_ai/types/event_list_response.py
Normal file
|
|
@ -0,0 +1,177 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import Dict, Union, Optional
|
||||
from typing_extensions import Literal, Annotated, TypeAlias
|
||||
|
||||
from pydantic import Field as FieldInfo
|
||||
|
||||
from .._utils import PropertyInfo
|
||||
from .message import Message
|
||||
from .session import Session
|
||||
from .._models import BaseModel
|
||||
from .message_part import MessagePart
|
||||
from .shared.unknown_error import UnknownError
|
||||
from .shared.provider_auth_error import ProviderAuthError
|
||||
|
||||
__all__ = [
|
||||
"EventListResponse",
|
||||
"EventStorageWrite",
|
||||
"EventStorageWriteProperties",
|
||||
"EventInstallationUpdated",
|
||||
"EventInstallationUpdatedProperties",
|
||||
"EventLspClientDiagnostics",
|
||||
"EventLspClientDiagnosticsProperties",
|
||||
"EventPermissionUpdated",
|
||||
"EventPermissionUpdatedProperties",
|
||||
"EventPermissionUpdatedPropertiesTime",
|
||||
"EventMessageUpdated",
|
||||
"EventMessageUpdatedProperties",
|
||||
"EventMessagePartUpdated",
|
||||
"EventMessagePartUpdatedProperties",
|
||||
"EventSessionUpdated",
|
||||
"EventSessionUpdatedProperties",
|
||||
"EventSessionDeleted",
|
||||
"EventSessionDeletedProperties",
|
||||
"EventSessionError",
|
||||
"EventSessionErrorProperties",
|
||||
"EventSessionErrorPropertiesError",
|
||||
"EventSessionErrorPropertiesErrorMessageOutputLengthError",
|
||||
]
|
||||
|
||||
|
||||
class EventStorageWriteProperties(BaseModel):
|
||||
key: str
|
||||
|
||||
content: Optional[object] = None
|
||||
|
||||
|
||||
class EventStorageWrite(BaseModel):
|
||||
properties: EventStorageWriteProperties
|
||||
|
||||
type: Literal["storage.write"]
|
||||
|
||||
|
||||
class EventInstallationUpdatedProperties(BaseModel):
|
||||
version: str
|
||||
|
||||
|
||||
class EventInstallationUpdated(BaseModel):
|
||||
properties: EventInstallationUpdatedProperties
|
||||
|
||||
type: Literal["installation.updated"]
|
||||
|
||||
|
||||
class EventLspClientDiagnosticsProperties(BaseModel):
|
||||
path: str
|
||||
|
||||
server_id: str = FieldInfo(alias="serverID")
|
||||
|
||||
|
||||
class EventLspClientDiagnostics(BaseModel):
|
||||
properties: EventLspClientDiagnosticsProperties
|
||||
|
||||
type: Literal["lsp.client.diagnostics"]
|
||||
|
||||
|
||||
class EventPermissionUpdatedPropertiesTime(BaseModel):
|
||||
created: float
|
||||
|
||||
|
||||
class EventPermissionUpdatedProperties(BaseModel):
|
||||
id: str
|
||||
|
||||
metadata: Dict[str, object]
|
||||
|
||||
session_id: str = FieldInfo(alias="sessionID")
|
||||
|
||||
time: EventPermissionUpdatedPropertiesTime
|
||||
|
||||
title: str
|
||||
|
||||
|
||||
class EventPermissionUpdated(BaseModel):
|
||||
properties: EventPermissionUpdatedProperties
|
||||
|
||||
type: Literal["permission.updated"]
|
||||
|
||||
|
||||
class EventMessageUpdatedProperties(BaseModel):
|
||||
info: Message
|
||||
|
||||
|
||||
class EventMessageUpdated(BaseModel):
|
||||
properties: EventMessageUpdatedProperties
|
||||
|
||||
type: Literal["message.updated"]
|
||||
|
||||
|
||||
class EventMessagePartUpdatedProperties(BaseModel):
|
||||
message_id: str = FieldInfo(alias="messageID")
|
||||
|
||||
part: MessagePart
|
||||
|
||||
session_id: str = FieldInfo(alias="sessionID")
|
||||
|
||||
|
||||
class EventMessagePartUpdated(BaseModel):
|
||||
properties: EventMessagePartUpdatedProperties
|
||||
|
||||
type: Literal["message.part.updated"]
|
||||
|
||||
|
||||
class EventSessionUpdatedProperties(BaseModel):
|
||||
info: Session
|
||||
|
||||
|
||||
class EventSessionUpdated(BaseModel):
|
||||
properties: EventSessionUpdatedProperties
|
||||
|
||||
type: Literal["session.updated"]
|
||||
|
||||
|
||||
class EventSessionDeletedProperties(BaseModel):
|
||||
info: Session
|
||||
|
||||
|
||||
class EventSessionDeleted(BaseModel):
|
||||
properties: EventSessionDeletedProperties
|
||||
|
||||
type: Literal["session.deleted"]
|
||||
|
||||
|
||||
class EventSessionErrorPropertiesErrorMessageOutputLengthError(BaseModel):
|
||||
data: object
|
||||
|
||||
name: Literal["MessageOutputLengthError"]
|
||||
|
||||
|
||||
EventSessionErrorPropertiesError: TypeAlias = Annotated[
|
||||
Union[ProviderAuthError, UnknownError, EventSessionErrorPropertiesErrorMessageOutputLengthError],
|
||||
PropertyInfo(discriminator="name"),
|
||||
]
|
||||
|
||||
|
||||
class EventSessionErrorProperties(BaseModel):
|
||||
error: Optional[EventSessionErrorPropertiesError] = None
|
||||
|
||||
|
||||
class EventSessionError(BaseModel):
|
||||
properties: EventSessionErrorProperties
|
||||
|
||||
type: Literal["session.error"]
|
||||
|
||||
|
||||
EventListResponse: TypeAlias = Annotated[
|
||||
Union[
|
||||
EventStorageWrite,
|
||||
EventInstallationUpdated,
|
||||
EventLspClientDiagnostics,
|
||||
EventPermissionUpdated,
|
||||
EventMessageUpdated,
|
||||
EventMessagePartUpdated,
|
||||
EventSessionUpdated,
|
||||
EventSessionDeleted,
|
||||
EventSessionError,
|
||||
],
|
||||
PropertyInfo(discriminator="type"),
|
||||
]
|
||||
20
src/opencode_ai/types/file_part.py
Normal file
20
src/opencode_ai/types/file_part.py
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import Optional
|
||||
from typing_extensions import Literal
|
||||
|
||||
from pydantic import Field as FieldInfo
|
||||
|
||||
from .._models import BaseModel
|
||||
|
||||
__all__ = ["FilePart"]
|
||||
|
||||
|
||||
class FilePart(BaseModel):
|
||||
media_type: str = FieldInfo(alias="mediaType")
|
||||
|
||||
type: Literal["file"]
|
||||
|
||||
url: str
|
||||
|
||||
filename: Optional[str] = None
|
||||
19
src/opencode_ai/types/file_part_param.py
Normal file
19
src/opencode_ai/types/file_part_param.py
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing_extensions import Literal, Required, Annotated, TypedDict
|
||||
|
||||
from .._utils import PropertyInfo
|
||||
|
||||
__all__ = ["FilePartParam"]
|
||||
|
||||
|
||||
class FilePartParam(TypedDict, total=False):
|
||||
media_type: Required[Annotated[str, PropertyInfo(alias="mediaType")]]
|
||||
|
||||
type: Required[Literal["file"]]
|
||||
|
||||
url: Required[str]
|
||||
|
||||
filename: str
|
||||
11
src/opencode_ai/types/file_search_params.py
Normal file
11
src/opencode_ai/types/file_search_params.py
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing_extensions import Required, TypedDict
|
||||
|
||||
__all__ = ["FileSearchParams"]
|
||||
|
||||
|
||||
class FileSearchParams(TypedDict, total=False):
|
||||
query: Required[str]
|
||||
8
src/opencode_ai/types/file_search_response.py
Normal file
8
src/opencode_ai/types/file_search_response.py
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import List
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
__all__ = ["FileSearchResponse"]
|
||||
|
||||
FileSearchResponse: TypeAlias = List[str]
|
||||
92
src/opencode_ai/types/keybinds.py
Normal file
92
src/opencode_ai/types/keybinds.py
Normal file
|
|
@ -0,0 +1,92 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field as FieldInfo
|
||||
|
||||
from .._models import BaseModel
|
||||
|
||||
__all__ = ["Keybinds"]
|
||||
|
||||
|
||||
class Keybinds(BaseModel):
|
||||
app_exit: Optional[str] = None
|
||||
"""Exit the application"""
|
||||
|
||||
editor_open: Optional[str] = None
|
||||
"""Open external editor"""
|
||||
|
||||
help: Optional[str] = None
|
||||
"""Show help dialog"""
|
||||
|
||||
history_next: Optional[str] = None
|
||||
"""Navigate to next history item"""
|
||||
|
||||
history_previous: Optional[str] = None
|
||||
"""Navigate to previous history item"""
|
||||
|
||||
input_clear: Optional[str] = None
|
||||
"""Clear input field"""
|
||||
|
||||
input_newline: Optional[str] = None
|
||||
"""Insert newline in input"""
|
||||
|
||||
input_paste: Optional[str] = None
|
||||
"""Paste from clipboard"""
|
||||
|
||||
input_submit: Optional[str] = None
|
||||
"""Submit input"""
|
||||
|
||||
leader: Optional[str] = None
|
||||
"""Leader key for keybind combinations"""
|
||||
|
||||
messages_first: Optional[str] = None
|
||||
"""Navigate to first message"""
|
||||
|
||||
messages_half_page_down: Optional[str] = None
|
||||
"""Scroll messages down by half page"""
|
||||
|
||||
messages_half_page_up: Optional[str] = None
|
||||
"""Scroll messages up by half page"""
|
||||
|
||||
messages_last: Optional[str] = None
|
||||
"""Navigate to last message"""
|
||||
|
||||
messages_next: Optional[str] = None
|
||||
"""Navigate to next message"""
|
||||
|
||||
messages_page_down: Optional[str] = None
|
||||
"""Scroll messages down by one page"""
|
||||
|
||||
messages_page_up: Optional[str] = None
|
||||
"""Scroll messages up by one page"""
|
||||
|
||||
messages_previous: Optional[str] = None
|
||||
"""Navigate to previous message"""
|
||||
|
||||
api_model_list: Optional[str] = FieldInfo(alias="model_list", default=None)
|
||||
"""List available models"""
|
||||
|
||||
project_init: Optional[str] = None
|
||||
"""Initialize project configuration"""
|
||||
|
||||
session_compact: Optional[str] = None
|
||||
"""Toggle compact mode for session"""
|
||||
|
||||
session_interrupt: Optional[str] = None
|
||||
"""Interrupt current session"""
|
||||
|
||||
session_list: Optional[str] = None
|
||||
"""List all sessions"""
|
||||
|
||||
session_new: Optional[str] = None
|
||||
"""Create a new session"""
|
||||
|
||||
session_share: Optional[str] = None
|
||||
"""Share current session"""
|
||||
|
||||
theme_list: Optional[str] = None
|
||||
"""List available themes"""
|
||||
|
||||
tool_details: Optional[str] = None
|
||||
"""Show tool details"""
|
||||
19
src/opencode_ai/types/mcp_local.py
Normal file
19
src/opencode_ai/types/mcp_local.py
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import Dict, List, Optional
|
||||
from typing_extensions import Literal
|
||||
|
||||
from .._models import BaseModel
|
||||
|
||||
__all__ = ["McpLocal"]
|
||||
|
||||
|
||||
class McpLocal(BaseModel):
|
||||
command: List[str]
|
||||
"""Command and arguments to run the MCP server"""
|
||||
|
||||
type: Literal["local"]
|
||||
"""Type of MCP server connection"""
|
||||
|
||||
environment: Optional[Dict[str, str]] = None
|
||||
"""Environment variables to set when running the MCP server"""
|
||||
15
src/opencode_ai/types/mcp_remote.py
Normal file
15
src/opencode_ai/types/mcp_remote.py
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing_extensions import Literal
|
||||
|
||||
from .._models import BaseModel
|
||||
|
||||
__all__ = ["McpRemote"]
|
||||
|
||||
|
||||
class McpRemote(BaseModel):
|
||||
type: Literal["remote"]
|
||||
"""Type of MCP server connection"""
|
||||
|
||||
url: str
|
||||
"""URL of the remote MCP server"""
|
||||
121
src/opencode_ai/types/message.py
Normal file
121
src/opencode_ai/types/message.py
Normal file
|
|
@ -0,0 +1,121 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import TYPE_CHECKING, Dict, List, Union, Optional
|
||||
from typing_extensions import Literal, Annotated, TypeAlias
|
||||
|
||||
from pydantic import Field as FieldInfo
|
||||
|
||||
from .._utils import PropertyInfo
|
||||
from .._models import BaseModel
|
||||
from .message_part import MessagePart
|
||||
from .shared.unknown_error import UnknownError
|
||||
from .shared.provider_auth_error import ProviderAuthError
|
||||
|
||||
__all__ = [
|
||||
"Message",
|
||||
"Metadata",
|
||||
"MetadataTime",
|
||||
"MetadataTool",
|
||||
"MetadataToolTime",
|
||||
"MetadataAssistant",
|
||||
"MetadataAssistantPath",
|
||||
"MetadataAssistantTokens",
|
||||
"MetadataAssistantTokensCache",
|
||||
"MetadataError",
|
||||
"MetadataErrorMessageOutputLengthError",
|
||||
]
|
||||
|
||||
|
||||
class MetadataTime(BaseModel):
|
||||
created: float
|
||||
|
||||
completed: Optional[float] = None
|
||||
|
||||
|
||||
class MetadataToolTime(BaseModel):
|
||||
end: float
|
||||
|
||||
start: float
|
||||
|
||||
|
||||
class MetadataTool(BaseModel):
|
||||
time: MetadataToolTime
|
||||
|
||||
title: str
|
||||
|
||||
if TYPE_CHECKING:
|
||||
# Stub to indicate that arbitrary properties are accepted.
|
||||
# To access properties that are not valid identifiers you can use `getattr`, e.g.
|
||||
# `getattr(obj, '$type')`
|
||||
def __getattr__(self, attr: str) -> object: ...
|
||||
|
||||
|
||||
class MetadataAssistantPath(BaseModel):
|
||||
cwd: str
|
||||
|
||||
root: str
|
||||
|
||||
|
||||
class MetadataAssistantTokensCache(BaseModel):
|
||||
read: float
|
||||
|
||||
write: float
|
||||
|
||||
|
||||
class MetadataAssistantTokens(BaseModel):
|
||||
cache: MetadataAssistantTokensCache
|
||||
|
||||
input: float
|
||||
|
||||
output: float
|
||||
|
||||
reasoning: float
|
||||
|
||||
|
||||
class MetadataAssistant(BaseModel):
|
||||
cost: float
|
||||
|
||||
api_model_id: str = FieldInfo(alias="modelID")
|
||||
|
||||
path: MetadataAssistantPath
|
||||
|
||||
provider_id: str = FieldInfo(alias="providerID")
|
||||
|
||||
system: List[str]
|
||||
|
||||
tokens: MetadataAssistantTokens
|
||||
|
||||
summary: Optional[bool] = None
|
||||
|
||||
|
||||
class MetadataErrorMessageOutputLengthError(BaseModel):
|
||||
data: object
|
||||
|
||||
name: Literal["MessageOutputLengthError"]
|
||||
|
||||
|
||||
MetadataError: TypeAlias = Annotated[
|
||||
Union[ProviderAuthError, UnknownError, MetadataErrorMessageOutputLengthError], PropertyInfo(discriminator="name")
|
||||
]
|
||||
|
||||
|
||||
class Metadata(BaseModel):
|
||||
session_id: str = FieldInfo(alias="sessionID")
|
||||
|
||||
time: MetadataTime
|
||||
|
||||
tool: Dict[str, MetadataTool]
|
||||
|
||||
assistant: Optional[MetadataAssistant] = None
|
||||
|
||||
error: Optional[MetadataError] = None
|
||||
|
||||
|
||||
class Message(BaseModel):
|
||||
id: str
|
||||
|
||||
metadata: Metadata
|
||||
|
||||
parts: List[MessagePart]
|
||||
|
||||
role: Literal["user", "assistant"]
|
||||
19
src/opencode_ai/types/message_part.py
Normal file
19
src/opencode_ai/types/message_part.py
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import Union
|
||||
from typing_extensions import Annotated, TypeAlias
|
||||
|
||||
from .._utils import PropertyInfo
|
||||
from .file_part import FilePart
|
||||
from .text_part import TextPart
|
||||
from .reasoning_part import ReasoningPart
|
||||
from .source_url_part import SourceURLPart
|
||||
from .step_start_part import StepStartPart
|
||||
from .tool_invocation_part import ToolInvocationPart
|
||||
|
||||
__all__ = ["MessagePart"]
|
||||
|
||||
MessagePart: TypeAlias = Annotated[
|
||||
Union[TextPart, ReasoningPart, ToolInvocationPart, SourceURLPart, FilePart, StepStartPart],
|
||||
PropertyInfo(discriminator="type"),
|
||||
]
|
||||
19
src/opencode_ai/types/message_part_param.py
Normal file
19
src/opencode_ai/types/message_part_param.py
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Union
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
from .file_part_param import FilePartParam
|
||||
from .text_part_param import TextPartParam
|
||||
from .reasoning_part_param import ReasoningPartParam
|
||||
from .source_url_part_param import SourceURLPartParam
|
||||
from .step_start_part_param import StepStartPartParam
|
||||
from .tool_invocation_part_param import ToolInvocationPartParam
|
||||
|
||||
__all__ = ["MessagePartParam"]
|
||||
|
||||
MessagePartParam: TypeAlias = Union[
|
||||
TextPartParam, ReasoningPartParam, ToolInvocationPartParam, SourceURLPartParam, FilePartParam, StepStartPartParam
|
||||
]
|
||||
43
src/opencode_ai/types/model.py
Normal file
43
src/opencode_ai/types/model.py
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import Dict, Optional
|
||||
|
||||
from .._models import BaseModel
|
||||
|
||||
__all__ = ["Model", "Cost", "Limit"]
|
||||
|
||||
|
||||
class Cost(BaseModel):
|
||||
input: float
|
||||
|
||||
output: float
|
||||
|
||||
cache_read: Optional[float] = None
|
||||
|
||||
cache_write: Optional[float] = None
|
||||
|
||||
|
||||
class Limit(BaseModel):
|
||||
context: float
|
||||
|
||||
output: float
|
||||
|
||||
|
||||
class Model(BaseModel):
|
||||
id: str
|
||||
|
||||
attachment: bool
|
||||
|
||||
cost: Cost
|
||||
|
||||
limit: Limit
|
||||
|
||||
name: str
|
||||
|
||||
options: Dict[str, object]
|
||||
|
||||
reasoning: bool
|
||||
|
||||
temperature: bool
|
||||
|
||||
tool_call: bool
|
||||
22
src/opencode_ai/types/provider.py
Normal file
22
src/opencode_ai/types/provider.py
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from .model import Model
|
||||
from .._models import BaseModel
|
||||
|
||||
__all__ = ["Provider"]
|
||||
|
||||
|
||||
class Provider(BaseModel):
|
||||
id: str
|
||||
|
||||
env: List[str]
|
||||
|
||||
models: Dict[str, Model]
|
||||
|
||||
name: str
|
||||
|
||||
api: Optional[str] = None
|
||||
|
||||
npm: Optional[str] = None
|
||||
18
src/opencode_ai/types/reasoning_part.py
Normal file
18
src/opencode_ai/types/reasoning_part.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import Dict, Optional
|
||||
from typing_extensions import Literal
|
||||
|
||||
from pydantic import Field as FieldInfo
|
||||
|
||||
from .._models import BaseModel
|
||||
|
||||
__all__ = ["ReasoningPart"]
|
||||
|
||||
|
||||
class ReasoningPart(BaseModel):
|
||||
text: str
|
||||
|
||||
type: Literal["reasoning"]
|
||||
|
||||
provider_metadata: Optional[Dict[str, object]] = FieldInfo(alias="providerMetadata", default=None)
|
||||
18
src/opencode_ai/types/reasoning_part_param.py
Normal file
18
src/opencode_ai/types/reasoning_part_param.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Dict
|
||||
from typing_extensions import Literal, Required, Annotated, TypedDict
|
||||
|
||||
from .._utils import PropertyInfo
|
||||
|
||||
__all__ = ["ReasoningPartParam"]
|
||||
|
||||
|
||||
class ReasoningPartParam(TypedDict, total=False):
|
||||
text: Required[str]
|
||||
|
||||
type: Required[Literal["reasoning"]]
|
||||
|
||||
provider_metadata: Annotated[Dict[str, object], PropertyInfo(alias="providerMetadata")]
|
||||
33
src/opencode_ai/types/session.py
Normal file
33
src/opencode_ai/types/session.py
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field as FieldInfo
|
||||
|
||||
from .._models import BaseModel
|
||||
|
||||
__all__ = ["Session", "Time", "Share"]
|
||||
|
||||
|
||||
class Time(BaseModel):
|
||||
created: float
|
||||
|
||||
updated: float
|
||||
|
||||
|
||||
class Share(BaseModel):
|
||||
url: str
|
||||
|
||||
|
||||
class Session(BaseModel):
|
||||
id: str
|
||||
|
||||
time: Time
|
||||
|
||||
title: str
|
||||
|
||||
version: str
|
||||
|
||||
parent_id: Optional[str] = FieldInfo(alias="parentID", default=None)
|
||||
|
||||
share: Optional[Share] = None
|
||||
7
src/opencode_ai/types/session_abort_response.py
Normal file
7
src/opencode_ai/types/session_abort_response.py
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
__all__ = ["SessionAbortResponse"]
|
||||
|
||||
SessionAbortResponse: TypeAlias = bool
|
||||
21
src/opencode_ai/types/session_chat_params.py
Normal file
21
src/opencode_ai/types/session_chat_params.py
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Iterable
|
||||
from typing_extensions import Required, Annotated, TypedDict
|
||||
|
||||
from .._utils import PropertyInfo
|
||||
from .message_part_param import MessagePartParam
|
||||
|
||||
__all__ = ["SessionChatParams"]
|
||||
|
||||
|
||||
class SessionChatParams(TypedDict, total=False):
|
||||
model_id: Required[Annotated[str, PropertyInfo(alias="modelID")]]
|
||||
|
||||
parts: Required[Iterable[MessagePartParam]]
|
||||
|
||||
provider_id: Required[Annotated[str, PropertyInfo(alias="providerID")]]
|
||||
|
||||
session_id: Required[Annotated[str, PropertyInfo(alias="sessionID")]]
|
||||
7
src/opencode_ai/types/session_delete_response.py
Normal file
7
src/opencode_ai/types/session_delete_response.py
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
__all__ = ["SessionDeleteResponse"]
|
||||
|
||||
SessionDeleteResponse: TypeAlias = bool
|
||||
15
src/opencode_ai/types/session_init_params.py
Normal file
15
src/opencode_ai/types/session_init_params.py
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing_extensions import Required, Annotated, TypedDict
|
||||
|
||||
from .._utils import PropertyInfo
|
||||
|
||||
__all__ = ["SessionInitParams"]
|
||||
|
||||
|
||||
class SessionInitParams(TypedDict, total=False):
|
||||
model_id: Required[Annotated[str, PropertyInfo(alias="modelID")]]
|
||||
|
||||
provider_id: Required[Annotated[str, PropertyInfo(alias="providerID")]]
|
||||
7
src/opencode_ai/types/session_init_response.py
Normal file
7
src/opencode_ai/types/session_init_response.py
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
__all__ = ["SessionInitResponse"]
|
||||
|
||||
SessionInitResponse: TypeAlias = bool
|
||||
10
src/opencode_ai/types/session_list_response.py
Normal file
10
src/opencode_ai/types/session_list_response.py
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import List
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
from .session import Session
|
||||
|
||||
__all__ = ["SessionListResponse"]
|
||||
|
||||
SessionListResponse: TypeAlias = List[Session]
|
||||
10
src/opencode_ai/types/session_messages_response.py
Normal file
10
src/opencode_ai/types/session_messages_response.py
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import List
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
from .message import Message
|
||||
|
||||
__all__ = ["SessionMessagesResponse"]
|
||||
|
||||
SessionMessagesResponse: TypeAlias = List[Message]
|
||||
15
src/opencode_ai/types/session_summarize_params.py
Normal file
15
src/opencode_ai/types/session_summarize_params.py
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing_extensions import Required, Annotated, TypedDict
|
||||
|
||||
from .._utils import PropertyInfo
|
||||
|
||||
__all__ = ["SessionSummarizeParams"]
|
||||
|
||||
|
||||
class SessionSummarizeParams(TypedDict, total=False):
|
||||
model_id: Required[Annotated[str, PropertyInfo(alias="modelID")]]
|
||||
|
||||
provider_id: Required[Annotated[str, PropertyInfo(alias="providerID")]]
|
||||
7
src/opencode_ai/types/session_summarize_response.py
Normal file
7
src/opencode_ai/types/session_summarize_response.py
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
__all__ = ["SessionSummarizeResponse"]
|
||||
|
||||
SessionSummarizeResponse: TypeAlias = bool
|
||||
4
src/opencode_ai/types/shared/__init__.py
Normal file
4
src/opencode_ai/types/shared/__init__.py
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from .unknown_error import UnknownError as UnknownError
|
||||
from .provider_auth_error import ProviderAuthError as ProviderAuthError
|
||||
21
src/opencode_ai/types/shared/provider_auth_error.py
Normal file
21
src/opencode_ai/types/shared/provider_auth_error.py
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing_extensions import Literal
|
||||
|
||||
from pydantic import Field as FieldInfo
|
||||
|
||||
from ..._models import BaseModel
|
||||
|
||||
__all__ = ["ProviderAuthError", "Data"]
|
||||
|
||||
|
||||
class Data(BaseModel):
|
||||
message: str
|
||||
|
||||
provider_id: str = FieldInfo(alias="providerID")
|
||||
|
||||
|
||||
class ProviderAuthError(BaseModel):
|
||||
data: Data
|
||||
|
||||
name: Literal["ProviderAuthError"]
|
||||
17
src/opencode_ai/types/shared/unknown_error.py
Normal file
17
src/opencode_ai/types/shared/unknown_error.py
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing_extensions import Literal
|
||||
|
||||
from ..._models import BaseModel
|
||||
|
||||
__all__ = ["UnknownError", "Data"]
|
||||
|
||||
|
||||
class Data(BaseModel):
|
||||
message: str
|
||||
|
||||
|
||||
class UnknownError(BaseModel):
|
||||
data: Data
|
||||
|
||||
name: Literal["UnknownError"]
|
||||
22
src/opencode_ai/types/source_url_part.py
Normal file
22
src/opencode_ai/types/source_url_part.py
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import Dict, Optional
|
||||
from typing_extensions import Literal
|
||||
|
||||
from pydantic import Field as FieldInfo
|
||||
|
||||
from .._models import BaseModel
|
||||
|
||||
__all__ = ["SourceURLPart"]
|
||||
|
||||
|
||||
class SourceURLPart(BaseModel):
|
||||
source_id: str = FieldInfo(alias="sourceId")
|
||||
|
||||
type: Literal["source-url"]
|
||||
|
||||
url: str
|
||||
|
||||
provider_metadata: Optional[Dict[str, object]] = FieldInfo(alias="providerMetadata", default=None)
|
||||
|
||||
title: Optional[str] = None
|
||||
22
src/opencode_ai/types/source_url_part_param.py
Normal file
22
src/opencode_ai/types/source_url_part_param.py
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Dict
|
||||
from typing_extensions import Literal, Required, Annotated, TypedDict
|
||||
|
||||
from .._utils import PropertyInfo
|
||||
|
||||
__all__ = ["SourceURLPartParam"]
|
||||
|
||||
|
||||
class SourceURLPartParam(TypedDict, total=False):
|
||||
source_id: Required[Annotated[str, PropertyInfo(alias="sourceId")]]
|
||||
|
||||
type: Required[Literal["source-url"]]
|
||||
|
||||
url: Required[str]
|
||||
|
||||
provider_metadata: Annotated[Dict[str, object], PropertyInfo(alias="providerMetadata")]
|
||||
|
||||
title: str
|
||||
11
src/opencode_ai/types/step_start_part.py
Normal file
11
src/opencode_ai/types/step_start_part.py
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing_extensions import Literal
|
||||
|
||||
from .._models import BaseModel
|
||||
|
||||
__all__ = ["StepStartPart"]
|
||||
|
||||
|
||||
class StepStartPart(BaseModel):
|
||||
type: Literal["step-start"]
|
||||
11
src/opencode_ai/types/step_start_part_param.py
Normal file
11
src/opencode_ai/types/step_start_part_param.py
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing_extensions import Literal, Required, TypedDict
|
||||
|
||||
__all__ = ["StepStartPartParam"]
|
||||
|
||||
|
||||
class StepStartPartParam(TypedDict, total=False):
|
||||
type: Required[Literal["step-start"]]
|
||||
13
src/opencode_ai/types/text_part.py
Normal file
13
src/opencode_ai/types/text_part.py
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing_extensions import Literal
|
||||
|
||||
from .._models import BaseModel
|
||||
|
||||
__all__ = ["TextPart"]
|
||||
|
||||
|
||||
class TextPart(BaseModel):
|
||||
text: str
|
||||
|
||||
type: Literal["text"]
|
||||
13
src/opencode_ai/types/text_part_param.py
Normal file
13
src/opencode_ai/types/text_part_param.py
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing_extensions import Literal, Required, TypedDict
|
||||
|
||||
__all__ = ["TextPartParam"]
|
||||
|
||||
|
||||
class TextPartParam(TypedDict, total=False):
|
||||
text: Required[str]
|
||||
|
||||
type: Required[Literal["text"]]
|
||||
22
src/opencode_ai/types/tool_call.py
Normal file
22
src/opencode_ai/types/tool_call.py
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import Optional
|
||||
from typing_extensions import Literal
|
||||
|
||||
from pydantic import Field as FieldInfo
|
||||
|
||||
from .._models import BaseModel
|
||||
|
||||
__all__ = ["ToolCall"]
|
||||
|
||||
|
||||
class ToolCall(BaseModel):
|
||||
state: Literal["call"]
|
||||
|
||||
tool_call_id: str = FieldInfo(alias="toolCallId")
|
||||
|
||||
tool_name: str = FieldInfo(alias="toolName")
|
||||
|
||||
args: Optional[object] = None
|
||||
|
||||
step: Optional[float] = None
|
||||
21
src/opencode_ai/types/tool_call_param.py
Normal file
21
src/opencode_ai/types/tool_call_param.py
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing_extensions import Literal, Required, Annotated, TypedDict
|
||||
|
||||
from .._utils import PropertyInfo
|
||||
|
||||
__all__ = ["ToolCallParam"]
|
||||
|
||||
|
||||
class ToolCallParam(TypedDict, total=False):
|
||||
state: Required[Literal["call"]]
|
||||
|
||||
tool_call_id: Required[Annotated[str, PropertyInfo(alias="toolCallId")]]
|
||||
|
||||
tool_name: Required[Annotated[str, PropertyInfo(alias="toolName")]]
|
||||
|
||||
args: object
|
||||
|
||||
step: float
|
||||
22
src/opencode_ai/types/tool_invocation_part.py
Normal file
22
src/opencode_ai/types/tool_invocation_part.py
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import Union
|
||||
from typing_extensions import Literal, Annotated, TypeAlias
|
||||
|
||||
from pydantic import Field as FieldInfo
|
||||
|
||||
from .._utils import PropertyInfo
|
||||
from .._models import BaseModel
|
||||
from .tool_call import ToolCall
|
||||
from .tool_result import ToolResult
|
||||
from .tool_partial_call import ToolPartialCall
|
||||
|
||||
__all__ = ["ToolInvocationPart", "ToolInvocation"]
|
||||
|
||||
ToolInvocation: TypeAlias = Annotated[Union[ToolCall, ToolPartialCall, ToolResult], PropertyInfo(discriminator="state")]
|
||||
|
||||
|
||||
class ToolInvocationPart(BaseModel):
|
||||
tool_invocation: ToolInvocation = FieldInfo(alias="toolInvocation")
|
||||
|
||||
type: Literal["tool-invocation"]
|
||||
21
src/opencode_ai/types/tool_invocation_part_param.py
Normal file
21
src/opencode_ai/types/tool_invocation_part_param.py
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Union
|
||||
from typing_extensions import Literal, Required, Annotated, TypeAlias, TypedDict
|
||||
|
||||
from .._utils import PropertyInfo
|
||||
from .tool_call_param import ToolCallParam
|
||||
from .tool_result_param import ToolResultParam
|
||||
from .tool_partial_call_param import ToolPartialCallParam
|
||||
|
||||
__all__ = ["ToolInvocationPartParam", "ToolInvocation"]
|
||||
|
||||
ToolInvocation: TypeAlias = Union[ToolCallParam, ToolPartialCallParam, ToolResultParam]
|
||||
|
||||
|
||||
class ToolInvocationPartParam(TypedDict, total=False):
|
||||
tool_invocation: Required[Annotated[ToolInvocation, PropertyInfo(alias="toolInvocation")]]
|
||||
|
||||
type: Required[Literal["tool-invocation"]]
|
||||
22
src/opencode_ai/types/tool_partial_call.py
Normal file
22
src/opencode_ai/types/tool_partial_call.py
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import Optional
|
||||
from typing_extensions import Literal
|
||||
|
||||
from pydantic import Field as FieldInfo
|
||||
|
||||
from .._models import BaseModel
|
||||
|
||||
__all__ = ["ToolPartialCall"]
|
||||
|
||||
|
||||
class ToolPartialCall(BaseModel):
|
||||
state: Literal["partial-call"]
|
||||
|
||||
tool_call_id: str = FieldInfo(alias="toolCallId")
|
||||
|
||||
tool_name: str = FieldInfo(alias="toolName")
|
||||
|
||||
args: Optional[object] = None
|
||||
|
||||
step: Optional[float] = None
|
||||
21
src/opencode_ai/types/tool_partial_call_param.py
Normal file
21
src/opencode_ai/types/tool_partial_call_param.py
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing_extensions import Literal, Required, Annotated, TypedDict
|
||||
|
||||
from .._utils import PropertyInfo
|
||||
|
||||
__all__ = ["ToolPartialCallParam"]
|
||||
|
||||
|
||||
class ToolPartialCallParam(TypedDict, total=False):
|
||||
state: Required[Literal["partial-call"]]
|
||||
|
||||
tool_call_id: Required[Annotated[str, PropertyInfo(alias="toolCallId")]]
|
||||
|
||||
tool_name: Required[Annotated[str, PropertyInfo(alias="toolName")]]
|
||||
|
||||
args: object
|
||||
|
||||
step: float
|
||||
24
src/opencode_ai/types/tool_result.py
Normal file
24
src/opencode_ai/types/tool_result.py
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing import Optional
|
||||
from typing_extensions import Literal
|
||||
|
||||
from pydantic import Field as FieldInfo
|
||||
|
||||
from .._models import BaseModel
|
||||
|
||||
__all__ = ["ToolResult"]
|
||||
|
||||
|
||||
class ToolResult(BaseModel):
|
||||
result: str
|
||||
|
||||
state: Literal["result"]
|
||||
|
||||
tool_call_id: str = FieldInfo(alias="toolCallId")
|
||||
|
||||
tool_name: str = FieldInfo(alias="toolName")
|
||||
|
||||
args: Optional[object] = None
|
||||
|
||||
step: Optional[float] = None
|
||||
23
src/opencode_ai/types/tool_result_param.py
Normal file
23
src/opencode_ai/types/tool_result_param.py
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing_extensions import Literal, Required, Annotated, TypedDict
|
||||
|
||||
from .._utils import PropertyInfo
|
||||
|
||||
__all__ = ["ToolResultParam"]
|
||||
|
||||
|
||||
class ToolResultParam(TypedDict, total=False):
|
||||
result: Required[str]
|
||||
|
||||
state: Required[Literal["result"]]
|
||||
|
||||
tool_call_id: Required[Annotated[str, PropertyInfo(alias="toolCallId")]]
|
||||
|
||||
tool_name: Required[Annotated[str, PropertyInfo(alias="toolName")]]
|
||||
|
||||
args: object
|
||||
|
||||
step: float
|
||||
Loading…
Add table
Add a link
Reference in a new issue