feat(api): api update

This commit is contained in:
stainless-app[bot] 2025-07-22 16:06:10 +00:00
parent 60df30faf5
commit 6e577d0bde
19 changed files with 171 additions and 521 deletions

View file

@ -1,4 +1,4 @@
configured_endpoints: 24 configured_endpoints: 24
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/opencode%2Fopencode-d10809ab68e48a338167e5504d69db2a0a80739adf6ecd3f065644a4139bc374.yml openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/opencode%2Fopencode-7f559270c87d0cbb6779d0edd57a2efcb4d60932674cea4444c40cd4e7b72569.yml
openapi_spec_hash: 4875565ef8df3446dbab11f450e04c51 openapi_spec_hash: 531acdc4f16185743ec56b3e07857cde
config_hash: 0032a76356d31c6b4c218b39fff635bb config_hash: 6d92d798d44906c9e43c6dee06615360

4
api.md
View file

@ -140,10 +140,10 @@ Methods:
Types: Types:
```python ```python
from opencode_ai.types import TuiOpenHelpResponse, TuiPromptResponse from opencode_ai.types import TuiAppendPromptResponse, TuiOpenHelpResponse
``` ```
Methods: Methods:
- <code title="post /tui/append-prompt">client.tui.<a href="./src/opencode_ai/resources/tui.py">append_prompt</a>(\*\*<a href="src/opencode_ai/types/tui_append_prompt_params.py">params</a>) -> <a href="./src/opencode_ai/types/tui_append_prompt_response.py">TuiAppendPromptResponse</a></code>
- <code title="post /tui/open-help">client.tui.<a href="./src/opencode_ai/resources/tui.py">open_help</a>() -> <a href="./src/opencode_ai/types/tui_open_help_response.py">TuiOpenHelpResponse</a></code> - <code title="post /tui/open-help">client.tui.<a href="./src/opencode_ai/resources/tui.py">open_help</a>() -> <a href="./src/opencode_ai/types/tui_open_help_response.py">TuiOpenHelpResponse</a></code>
- <code title="post /tui/prompt">client.tui.<a href="./src/opencode_ai/resources/tui.py">prompt</a>(\*\*<a href="src/opencode_ai/types/tui_prompt_params.py">params</a>) -> <a href="./src/opencode_ai/types/tui_prompt_response.py">TuiPromptResponse</a></code>

View file

@ -2,11 +2,9 @@
from __future__ import annotations from __future__ import annotations
from typing import Iterable
import httpx import httpx
from ..types import tui_prompt_params from ..types import tui_append_prompt_params
from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
from .._utils import maybe_transform, async_maybe_transform from .._utils import maybe_transform, async_maybe_transform
from .._compat import cached_property from .._compat import cached_property
@ -18,9 +16,8 @@ from .._response import (
async_to_streamed_response_wrapper, async_to_streamed_response_wrapper,
) )
from .._base_client import make_request_options from .._base_client import make_request_options
from ..types.part_param import PartParam
from ..types.tui_prompt_response import TuiPromptResponse
from ..types.tui_open_help_response import TuiOpenHelpResponse from ..types.tui_open_help_response import TuiOpenHelpResponse
from ..types.tui_append_prompt_response import TuiAppendPromptResponse
__all__ = ["TuiResource", "AsyncTuiResource"] __all__ = ["TuiResource", "AsyncTuiResource"]
@ -45,6 +42,38 @@ class TuiResource(SyncAPIResource):
""" """
return TuiResourceWithStreamingResponse(self) return TuiResourceWithStreamingResponse(self)
def append_prompt(
self,
*,
text: str,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> TuiAppendPromptResponse:
"""
Append prompt to the TUI
Args:
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
return self._post(
"/tui/append-prompt",
body=maybe_transform({"text": text}, tui_append_prompt_params.TuiAppendPromptParams),
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
cast_to=TuiAppendPromptResponse,
)
def open_help( def open_help(
self, self,
*, *,
@ -64,45 +93,6 @@ class TuiResource(SyncAPIResource):
cast_to=TuiOpenHelpResponse, cast_to=TuiOpenHelpResponse,
) )
def prompt(
self,
*,
parts: Iterable[PartParam],
text: str,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> TuiPromptResponse:
"""
Send a prompt to the TUI
Args:
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
return self._post(
"/tui/prompt",
body=maybe_transform(
{
"parts": parts,
"text": text,
},
tui_prompt_params.TuiPromptParams,
),
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
cast_to=TuiPromptResponse,
)
class AsyncTuiResource(AsyncAPIResource): class AsyncTuiResource(AsyncAPIResource):
@cached_property @cached_property
@ -124,6 +114,38 @@ class AsyncTuiResource(AsyncAPIResource):
""" """
return AsyncTuiResourceWithStreamingResponse(self) return AsyncTuiResourceWithStreamingResponse(self)
async def append_prompt(
self,
*,
text: str,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> TuiAppendPromptResponse:
"""
Append prompt to the TUI
Args:
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
return await self._post(
"/tui/append-prompt",
body=await async_maybe_transform({"text": text}, tui_append_prompt_params.TuiAppendPromptParams),
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
cast_to=TuiAppendPromptResponse,
)
async def open_help( async def open_help(
self, self,
*, *,
@ -143,89 +165,50 @@ class AsyncTuiResource(AsyncAPIResource):
cast_to=TuiOpenHelpResponse, cast_to=TuiOpenHelpResponse,
) )
async def prompt(
self,
*,
parts: Iterable[PartParam],
text: str,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> TuiPromptResponse:
"""
Send a prompt to the TUI
Args:
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
return await self._post(
"/tui/prompt",
body=await async_maybe_transform(
{
"parts": parts,
"text": text,
},
tui_prompt_params.TuiPromptParams,
),
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
cast_to=TuiPromptResponse,
)
class TuiResourceWithRawResponse: class TuiResourceWithRawResponse:
def __init__(self, tui: TuiResource) -> None: def __init__(self, tui: TuiResource) -> None:
self._tui = tui self._tui = tui
self.append_prompt = to_raw_response_wrapper(
tui.append_prompt,
)
self.open_help = to_raw_response_wrapper( self.open_help = to_raw_response_wrapper(
tui.open_help, tui.open_help,
) )
self.prompt = to_raw_response_wrapper(
tui.prompt,
)
class AsyncTuiResourceWithRawResponse: class AsyncTuiResourceWithRawResponse:
def __init__(self, tui: AsyncTuiResource) -> None: def __init__(self, tui: AsyncTuiResource) -> None:
self._tui = tui self._tui = tui
self.append_prompt = async_to_raw_response_wrapper(
tui.append_prompt,
)
self.open_help = async_to_raw_response_wrapper( self.open_help = async_to_raw_response_wrapper(
tui.open_help, tui.open_help,
) )
self.prompt = async_to_raw_response_wrapper(
tui.prompt,
)
class TuiResourceWithStreamingResponse: class TuiResourceWithStreamingResponse:
def __init__(self, tui: TuiResource) -> None: def __init__(self, tui: TuiResource) -> None:
self._tui = tui self._tui = tui
self.append_prompt = to_streamed_response_wrapper(
tui.append_prompt,
)
self.open_help = to_streamed_response_wrapper( self.open_help = to_streamed_response_wrapper(
tui.open_help, tui.open_help,
) )
self.prompt = to_streamed_response_wrapper(
tui.prompt,
)
class AsyncTuiResourceWithStreamingResponse: class AsyncTuiResourceWithStreamingResponse:
def __init__(self, tui: AsyncTuiResource) -> None: def __init__(self, tui: AsyncTuiResource) -> None:
self._tui = tui self._tui = tui
self.append_prompt = async_to_streamed_response_wrapper(
tui.append_prompt,
)
self.open_help = async_to_streamed_response_wrapper( self.open_help = async_to_streamed_response_wrapper(
tui.open_help, tui.open_help,
) )
self.prompt = async_to_streamed_response_wrapper(
tui.prompt,
)

View file

@ -21,18 +21,14 @@ from .provider import Provider as Provider
from .file_part import FilePart as FilePart from .file_part import FilePart as FilePart
from .text_part import TextPart as TextPart from .text_part import TextPart as TextPart
from .tool_part import ToolPart as ToolPart from .tool_part import ToolPart as ToolPart
from .part_param import PartParam as PartParam
from .file_source import FileSource as FileSource from .file_source import FileSource as FileSource
from .mode_config import ModeConfig as ModeConfig from .mode_config import ModeConfig as ModeConfig
from .user_message import UserMessage as UserMessage from .user_message import UserMessage as UserMessage
from .snapshot_part import SnapshotPart as SnapshotPart from .snapshot_part import SnapshotPart as SnapshotPart
from .symbol_source import SymbolSource as SymbolSource from .symbol_source import SymbolSource as SymbolSource
from .app_log_params import AppLogParams as AppLogParams from .app_log_params import AppLogParams as AppLogParams
from .file_part_param import FilePartParam as FilePartParam
from .keybinds_config import KeybindsConfig as KeybindsConfig from .keybinds_config import KeybindsConfig as KeybindsConfig
from .step_start_part import StepStartPart as StepStartPart from .step_start_part import StepStartPart as StepStartPart
from .text_part_param import TextPartParam as TextPartParam
from .tool_part_param import ToolPartParam as ToolPartParam
from .app_log_response import AppLogResponse as AppLogResponse from .app_log_response import AppLogResponse as AppLogResponse
from .file_part_source import FilePartSource as FilePartSource from .file_part_source import FilePartSource as FilePartSource
from .file_read_params import FileReadParams as FileReadParams from .file_read_params import FileReadParams as FileReadParams
@ -45,7 +41,6 @@ from .assistant_message import AssistantMessage as AssistantMessage
from .file_source_param import FileSourceParam as FileSourceParam from .file_source_param import FileSourceParam as FileSourceParam
from .find_files_params import FindFilesParams as FindFilesParams from .find_files_params import FindFilesParams as FindFilesParams
from .mcp_remote_config import McpRemoteConfig as McpRemoteConfig from .mcp_remote_config import McpRemoteConfig as McpRemoteConfig
from .tui_prompt_params import TuiPromptParams as TuiPromptParams
from .app_modes_response import AppModesResponse as AppModesResponse from .app_modes_response import AppModesResponse as AppModesResponse
from .file_read_response import FileReadResponse as FileReadResponse from .file_read_response import FileReadResponse as FileReadResponse
from .find_text_response import FindTextResponse as FindTextResponse from .find_text_response import FindTextResponse as FindTextResponse
@ -56,9 +51,7 @@ from .find_files_response import FindFilesResponse as FindFilesResponse
from .find_symbols_params import FindSymbolsParams as FindSymbolsParams from .find_symbols_params import FindSymbolsParams as FindSymbolsParams
from .session_chat_params import SessionChatParams as SessionChatParams from .session_chat_params import SessionChatParams as SessionChatParams
from .session_init_params import SessionInitParams as SessionInitParams from .session_init_params import SessionInitParams as SessionInitParams
from .snapshot_part_param import SnapshotPartParam as SnapshotPartParam
from .symbol_source_param import SymbolSourceParam as SymbolSourceParam from .symbol_source_param import SymbolSourceParam as SymbolSourceParam
from .tui_prompt_response import TuiPromptResponse as TuiPromptResponse
from .file_status_response import FileStatusResponse as FileStatusResponse from .file_status_response import FileStatusResponse as FileStatusResponse
from .tool_state_completed import ToolStateCompleted as ToolStateCompleted from .tool_state_completed import ToolStateCompleted as ToolStateCompleted
from .file_part_input_param import FilePartInputParam as FilePartInputParam from .file_part_input_param import FilePartInputParam as FilePartInputParam
@ -66,19 +59,15 @@ from .file_part_source_text import FilePartSourceText as FilePartSourceText
from .find_symbols_response import FindSymbolsResponse as FindSymbolsResponse from .find_symbols_response import FindSymbolsResponse as FindSymbolsResponse
from .session_init_response import SessionInitResponse as SessionInitResponse from .session_init_response import SessionInitResponse as SessionInitResponse
from .session_list_response import SessionListResponse as SessionListResponse from .session_list_response import SessionListResponse as SessionListResponse
from .step_start_part_param import StepStartPartParam as StepStartPartParam
from .text_part_input_param import TextPartInputParam as TextPartInputParam from .text_part_input_param import TextPartInputParam as TextPartInputParam
from .app_providers_response import AppProvidersResponse as AppProvidersResponse from .app_providers_response import AppProvidersResponse as AppProvidersResponse
from .file_part_source_param import FilePartSourceParam as FilePartSourceParam from .file_part_source_param import FilePartSourceParam as FilePartSourceParam
from .session_abort_response import SessionAbortResponse as SessionAbortResponse from .session_abort_response import SessionAbortResponse as SessionAbortResponse
from .step_finish_part_param import StepFinishPartParam as StepFinishPartParam
from .tool_state_error_param import ToolStateErrorParam as ToolStateErrorParam
from .tui_open_help_response import TuiOpenHelpResponse as TuiOpenHelpResponse from .tui_open_help_response import TuiOpenHelpResponse as TuiOpenHelpResponse
from .session_delete_response import SessionDeleteResponse as SessionDeleteResponse from .session_delete_response import SessionDeleteResponse as SessionDeleteResponse
from .session_summarize_params import SessionSummarizeParams as SessionSummarizeParams from .session_summarize_params import SessionSummarizeParams as SessionSummarizeParams
from .tool_state_pending_param import ToolStatePendingParam as ToolStatePendingParam from .tui_append_prompt_params import TuiAppendPromptParams as TuiAppendPromptParams
from .tool_state_running_param import ToolStateRunningParam as ToolStateRunningParam
from .session_messages_response import SessionMessagesResponse as SessionMessagesResponse from .session_messages_response import SessionMessagesResponse as SessionMessagesResponse
from .session_summarize_response import SessionSummarizeResponse as SessionSummarizeResponse from .session_summarize_response import SessionSummarizeResponse as SessionSummarizeResponse
from .tool_state_completed_param import ToolStateCompletedParam as ToolStateCompletedParam from .tui_append_prompt_response import TuiAppendPromptResponse as TuiAppendPromptResponse
from .file_part_source_text_param import FilePartSourceTextParam as FilePartSourceTextParam from .file_part_source_text_param import FilePartSourceTextParam as FilePartSourceTextParam

View file

@ -45,6 +45,8 @@ __all__ = [
"EventSessionErrorPropertiesErrorMessageOutputLengthError", "EventSessionErrorPropertiesErrorMessageOutputLengthError",
"EventFileWatcherUpdated", "EventFileWatcherUpdated",
"EventFileWatcherUpdatedProperties", "EventFileWatcherUpdatedProperties",
"EventIdeInstalled",
"EventIdeInstalledProperties",
] ]
@ -214,6 +216,16 @@ class EventFileWatcherUpdated(BaseModel):
type: Literal["file.watcher.updated"] type: Literal["file.watcher.updated"]
class EventIdeInstalledProperties(BaseModel):
ide: str
class EventIdeInstalled(BaseModel):
properties: EventIdeInstalledProperties
type: Literal["ide.installed"]
EventListResponse: TypeAlias = Annotated[ EventListResponse: TypeAlias = Annotated[
Union[ Union[
EventLspClientDiagnostics, EventLspClientDiagnostics,
@ -229,6 +241,7 @@ EventListResponse: TypeAlias = Annotated[
EventSessionIdle, EventSessionIdle,
EventSessionError, EventSessionError,
EventFileWatcherUpdated, EventFileWatcherUpdated,
EventIdeInstalled,
], ],
PropertyInfo(discriminator="type"), PropertyInfo(discriminator="type"),
] ]

View file

@ -1,28 +0,0 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing_extensions import Literal, Required, Annotated, TypedDict
from .._utils import PropertyInfo
from .file_part_source_param import FilePartSourceParam
__all__ = ["FilePartParam"]
class FilePartParam(TypedDict, total=False):
id: Required[str]
message_id: Required[Annotated[str, PropertyInfo(alias="messageID")]]
mime: Required[str]
session_id: Required[Annotated[str, PropertyInfo(alias="sessionID")]]
type: Required[Literal["file"]]
url: Required[str]
filename: str
source: FilePartSourceParam

View file

@ -1,19 +0,0 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing import Union
from typing_extensions import TypeAlias
from .file_part_param import FilePartParam
from .text_part_param import TextPartParam
from .tool_part_param import ToolPartParam
from .snapshot_part_param import SnapshotPartParam
from .step_start_part_param import StepStartPartParam
from .step_finish_part_param import StepFinishPartParam
__all__ = ["PartParam"]
PartParam: TypeAlias = Union[
TextPartParam, FilePartParam, ToolPartParam, StepStartPartParam, StepFinishPartParam, SnapshotPartParam
]

View file

@ -1,21 +0,0 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing_extensions import Literal, Required, Annotated, TypedDict
from .._utils import PropertyInfo
__all__ = ["SnapshotPartParam"]
class SnapshotPartParam(TypedDict, total=False):
id: Required[str]
message_id: Required[Annotated[str, PropertyInfo(alias="messageID")]]
session_id: Required[Annotated[str, PropertyInfo(alias="sessionID")]]
snapshot: Required[str]
type: Required[Literal["snapshot"]]

View file

@ -1,39 +0,0 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing_extensions import Literal, Required, Annotated, TypedDict
from .._utils import PropertyInfo
__all__ = ["StepFinishPartParam", "Tokens", "TokensCache"]
class TokensCache(TypedDict, total=False):
read: Required[float]
write: Required[float]
class Tokens(TypedDict, total=False):
cache: Required[TokensCache]
input: Required[float]
output: Required[float]
reasoning: Required[float]
class StepFinishPartParam(TypedDict, total=False):
id: Required[str]
cost: Required[float]
message_id: Required[Annotated[str, PropertyInfo(alias="messageID")]]
session_id: Required[Annotated[str, PropertyInfo(alias="sessionID")]]
tokens: Required[Tokens]
type: Required[Literal["step-finish"]]

View file

@ -1,19 +0,0 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing_extensions import Literal, Required, Annotated, TypedDict
from .._utils import PropertyInfo
__all__ = ["StepStartPartParam"]
class StepStartPartParam(TypedDict, total=False):
id: Required[str]
message_id: Required[Annotated[str, PropertyInfo(alias="messageID")]]
session_id: Required[Annotated[str, PropertyInfo(alias="sessionID")]]
type: Required[Literal["step-start"]]

View file

@ -1,31 +0,0 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing_extensions import Literal, Required, Annotated, TypedDict
from .._utils import PropertyInfo
__all__ = ["TextPartParam", "Time"]
class Time(TypedDict, total=False):
start: Required[float]
end: float
class TextPartParam(TypedDict, total=False):
id: Required[str]
message_id: Required[Annotated[str, PropertyInfo(alias="messageID")]]
session_id: Required[Annotated[str, PropertyInfo(alias="sessionID")]]
text: Required[str]
type: Required[Literal["text"]]
synthetic: bool
time: Time

View file

@ -1,32 +0,0 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing import Union
from typing_extensions import Literal, Required, Annotated, TypeAlias, TypedDict
from .._utils import PropertyInfo
from .tool_state_error_param import ToolStateErrorParam
from .tool_state_pending_param import ToolStatePendingParam
from .tool_state_running_param import ToolStateRunningParam
from .tool_state_completed_param import ToolStateCompletedParam
__all__ = ["ToolPartParam", "State"]
State: TypeAlias = Union[ToolStatePendingParam, ToolStateRunningParam, ToolStateCompletedParam, ToolStateErrorParam]
class ToolPartParam(TypedDict, total=False):
id: Required[str]
call_id: Required[Annotated[str, PropertyInfo(alias="callID")]]
message_id: Required[Annotated[str, PropertyInfo(alias="messageID")]]
session_id: Required[Annotated[str, PropertyInfo(alias="sessionID")]]
state: Required[State]
tool: Required[str]
type: Required[Literal["tool"]]

View file

@ -1,28 +0,0 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing import Dict
from typing_extensions import Literal, Required, TypedDict
__all__ = ["ToolStateCompletedParam", "Time"]
class Time(TypedDict, total=False):
end: Required[float]
start: Required[float]
class ToolStateCompletedParam(TypedDict, total=False):
input: Required[Dict[str, object]]
metadata: Required[Dict[str, object]]
output: Required[str]
status: Required[Literal["completed"]]
time: Required[Time]
title: Required[str]

View file

@ -1,24 +0,0 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing import Dict
from typing_extensions import Literal, Required, TypedDict
__all__ = ["ToolStateErrorParam", "Time"]
class Time(TypedDict, total=False):
end: Required[float]
start: Required[float]
class ToolStateErrorParam(TypedDict, total=False):
error: Required[str]
input: Required[Dict[str, object]]
status: Required[Literal["error"]]
time: Required[Time]

View file

@ -1,11 +0,0 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing_extensions import Literal, Required, TypedDict
__all__ = ["ToolStatePendingParam"]
class ToolStatePendingParam(TypedDict, total=False):
status: Required[Literal["pending"]]

View file

@ -1,24 +0,0 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing import Dict
from typing_extensions import Literal, Required, TypedDict
__all__ = ["ToolStateRunningParam", "Time"]
class Time(TypedDict, total=False):
start: Required[float]
class ToolStateRunningParam(TypedDict, total=False):
status: Required[Literal["running"]]
time: Required[Time]
input: object
metadata: Dict[str, object]
title: str

View file

@ -2,15 +2,10 @@
from __future__ import annotations from __future__ import annotations
from typing import Iterable
from typing_extensions import Required, TypedDict from typing_extensions import Required, TypedDict
from .part_param import PartParam __all__ = ["TuiAppendPromptParams"]
__all__ = ["TuiPromptParams"]
class TuiPromptParams(TypedDict, total=False): class TuiAppendPromptParams(TypedDict, total=False):
parts: Required[Iterable[PartParam]]
text: Required[str] text: Required[str]

View file

@ -2,6 +2,6 @@
from typing_extensions import TypeAlias from typing_extensions import TypeAlias
__all__ = ["TuiPromptResponse"] __all__ = ["TuiAppendPromptResponse"]
TuiPromptResponse: TypeAlias = bool TuiAppendPromptResponse: TypeAlias = bool

View file

@ -9,7 +9,7 @@ import pytest
from opencode_ai import Opencode, AsyncOpencode from opencode_ai import Opencode, AsyncOpencode
from tests.utils import assert_matches_type from tests.utils import assert_matches_type
from opencode_ai.types import TuiPromptResponse, TuiOpenHelpResponse from opencode_ai.types import TuiOpenHelpResponse, TuiAppendPromptResponse
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@ -17,6 +17,40 @@ base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
class TestTui: class TestTui:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
@pytest.mark.skip()
@parametrize
def test_method_append_prompt(self, client: Opencode) -> None:
tui = client.tui.append_prompt(
text="text",
)
assert_matches_type(TuiAppendPromptResponse, tui, path=["response"])
@pytest.mark.skip()
@parametrize
def test_raw_response_append_prompt(self, client: Opencode) -> None:
response = client.tui.with_raw_response.append_prompt(
text="text",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
tui = response.parse()
assert_matches_type(TuiAppendPromptResponse, tui, path=["response"])
@pytest.mark.skip()
@parametrize
def test_streaming_response_append_prompt(self, client: Opencode) -> None:
with client.tui.with_streaming_response.append_prompt(
text="text",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
tui = response.parse()
assert_matches_type(TuiAppendPromptResponse, tui, path=["response"])
assert cast(Any, response.is_closed) is True
@pytest.mark.skip() @pytest.mark.skip()
@parametrize @parametrize
def test_method_open_help(self, client: Opencode) -> None: def test_method_open_help(self, client: Opencode) -> None:
@ -45,73 +79,46 @@ class TestTui:
assert cast(Any, response.is_closed) is True assert cast(Any, response.is_closed) is True
@pytest.mark.skip()
@parametrize
def test_method_prompt(self, client: Opencode) -> None:
tui = client.tui.prompt(
parts=[
{
"id": "id",
"message_id": "messageID",
"session_id": "sessionID",
"text": "text",
"type": "text",
}
],
text="text",
)
assert_matches_type(TuiPromptResponse, tui, path=["response"])
@pytest.mark.skip()
@parametrize
def test_raw_response_prompt(self, client: Opencode) -> None:
response = client.tui.with_raw_response.prompt(
parts=[
{
"id": "id",
"message_id": "messageID",
"session_id": "sessionID",
"text": "text",
"type": "text",
}
],
text="text",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
tui = response.parse()
assert_matches_type(TuiPromptResponse, tui, path=["response"])
@pytest.mark.skip()
@parametrize
def test_streaming_response_prompt(self, client: Opencode) -> None:
with client.tui.with_streaming_response.prompt(
parts=[
{
"id": "id",
"message_id": "messageID",
"session_id": "sessionID",
"text": "text",
"type": "text",
}
],
text="text",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
tui = response.parse()
assert_matches_type(TuiPromptResponse, tui, path=["response"])
assert cast(Any, response.is_closed) is True
class TestAsyncTui: class TestAsyncTui:
parametrize = pytest.mark.parametrize( parametrize = pytest.mark.parametrize(
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
) )
@pytest.mark.skip()
@parametrize
async def test_method_append_prompt(self, async_client: AsyncOpencode) -> None:
tui = await async_client.tui.append_prompt(
text="text",
)
assert_matches_type(TuiAppendPromptResponse, tui, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_raw_response_append_prompt(self, async_client: AsyncOpencode) -> None:
response = await async_client.tui.with_raw_response.append_prompt(
text="text",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
tui = await response.parse()
assert_matches_type(TuiAppendPromptResponse, tui, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_streaming_response_append_prompt(self, async_client: AsyncOpencode) -> None:
async with async_client.tui.with_streaming_response.append_prompt(
text="text",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
tui = await response.parse()
assert_matches_type(TuiAppendPromptResponse, tui, path=["response"])
assert cast(Any, response.is_closed) is True
@pytest.mark.skip() @pytest.mark.skip()
@parametrize @parametrize
async def test_method_open_help(self, async_client: AsyncOpencode) -> None: async def test_method_open_help(self, async_client: AsyncOpencode) -> None:
@ -139,64 +146,3 @@ class TestAsyncTui:
assert_matches_type(TuiOpenHelpResponse, tui, path=["response"]) assert_matches_type(TuiOpenHelpResponse, tui, path=["response"])
assert cast(Any, response.is_closed) is True assert cast(Any, response.is_closed) is True
@pytest.mark.skip()
@parametrize
async def test_method_prompt(self, async_client: AsyncOpencode) -> None:
tui = await async_client.tui.prompt(
parts=[
{
"id": "id",
"message_id": "messageID",
"session_id": "sessionID",
"text": "text",
"type": "text",
}
],
text="text",
)
assert_matches_type(TuiPromptResponse, tui, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_raw_response_prompt(self, async_client: AsyncOpencode) -> None:
response = await async_client.tui.with_raw_response.prompt(
parts=[
{
"id": "id",
"message_id": "messageID",
"session_id": "sessionID",
"text": "text",
"type": "text",
}
],
text="text",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
tui = await response.parse()
assert_matches_type(TuiPromptResponse, tui, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_streaming_response_prompt(self, async_client: AsyncOpencode) -> None:
async with async_client.tui.with_streaming_response.prompt(
parts=[
{
"id": "id",
"message_id": "messageID",
"session_id": "sessionID",
"text": "text",
"type": "text",
}
],
text="text",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
tui = await response.parse()
assert_matches_type(TuiPromptResponse, tui, path=["response"])
assert cast(Any, response.is_closed) is True