mirror of
https://github.com/anomalyco/opencode-sdk-python.git
synced 2026-04-28 04:29:50 +00:00
feat(api): api update
This commit is contained in:
parent
c75c9633c9
commit
ff6b72a6da
6 changed files with 121 additions and 5 deletions
|
|
@ -1,4 +1,4 @@
|
|||
configured_endpoints: 23
|
||||
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/opencode%2Fopencode-04eaffcca7fcec3eba3c34ba4e91ba830867173c552015a0abfd65e25084d9b5.yml
|
||||
openapi_spec_hash: 4dfbcc2ce25451592f610e372ecad0cb
|
||||
configured_endpoints: 24
|
||||
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/opencode%2Fopencode-d10809ab68e48a338167e5504d69db2a0a80739adf6ecd3f065644a4139bc374.yml
|
||||
openapi_spec_hash: 4875565ef8df3446dbab11f450e04c51
|
||||
config_hash: 0032a76356d31c6b4c218b39fff635bb
|
||||
|
|
|
|||
3
api.md
3
api.md
|
|
@ -140,9 +140,10 @@ Methods:
|
|||
Types:
|
||||
|
||||
```python
|
||||
from opencode_ai.types import TuiPromptResponse
|
||||
from opencode_ai.types import TuiOpenHelpResponse, TuiPromptResponse
|
||||
```
|
||||
|
||||
Methods:
|
||||
|
||||
- <code title="post /tui/open-help">client.tui.<a href="./src/opencode_ai/resources/tui.py">open_help</a>() -> <a href="./src/opencode_ai/types/tui_open_help_response.py">TuiOpenHelpResponse</a></code>
|
||||
- <code title="post /tui/prompt">client.tui.<a href="./src/opencode_ai/resources/tui.py">prompt</a>(\*\*<a href="src/opencode_ai/types/tui_prompt_params.py">params</a>) -> <a href="./src/opencode_ai/types/tui_prompt_response.py">TuiPromptResponse</a></code>
|
||||
|
|
|
|||
|
|
@ -20,6 +20,7 @@ from .._response import (
|
|||
from .._base_client import make_request_options
|
||||
from ..types.part_param import PartParam
|
||||
from ..types.tui_prompt_response import TuiPromptResponse
|
||||
from ..types.tui_open_help_response import TuiOpenHelpResponse
|
||||
|
||||
__all__ = ["TuiResource", "AsyncTuiResource"]
|
||||
|
||||
|
|
@ -44,6 +45,25 @@ class TuiResource(SyncAPIResource):
|
|||
"""
|
||||
return TuiResourceWithStreamingResponse(self)
|
||||
|
||||
def open_help(
|
||||
self,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> TuiOpenHelpResponse:
|
||||
"""Open the help dialog"""
|
||||
return self._post(
|
||||
"/tui/open-help",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=TuiOpenHelpResponse,
|
||||
)
|
||||
|
||||
def prompt(
|
||||
self,
|
||||
*,
|
||||
|
|
@ -104,6 +124,25 @@ class AsyncTuiResource(AsyncAPIResource):
|
|||
"""
|
||||
return AsyncTuiResourceWithStreamingResponse(self)
|
||||
|
||||
async def open_help(
|
||||
self,
|
||||
*,
|
||||
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
||||
# The extra values given here take precedence over values defined on the client or passed to this method.
|
||||
extra_headers: Headers | None = None,
|
||||
extra_query: Query | None = None,
|
||||
extra_body: Body | None = None,
|
||||
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
|
||||
) -> TuiOpenHelpResponse:
|
||||
"""Open the help dialog"""
|
||||
return await self._post(
|
||||
"/tui/open-help",
|
||||
options=make_request_options(
|
||||
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
||||
),
|
||||
cast_to=TuiOpenHelpResponse,
|
||||
)
|
||||
|
||||
async def prompt(
|
||||
self,
|
||||
*,
|
||||
|
|
@ -148,6 +187,9 @@ class TuiResourceWithRawResponse:
|
|||
def __init__(self, tui: TuiResource) -> None:
|
||||
self._tui = tui
|
||||
|
||||
self.open_help = to_raw_response_wrapper(
|
||||
tui.open_help,
|
||||
)
|
||||
self.prompt = to_raw_response_wrapper(
|
||||
tui.prompt,
|
||||
)
|
||||
|
|
@ -157,6 +199,9 @@ class AsyncTuiResourceWithRawResponse:
|
|||
def __init__(self, tui: AsyncTuiResource) -> None:
|
||||
self._tui = tui
|
||||
|
||||
self.open_help = async_to_raw_response_wrapper(
|
||||
tui.open_help,
|
||||
)
|
||||
self.prompt = async_to_raw_response_wrapper(
|
||||
tui.prompt,
|
||||
)
|
||||
|
|
@ -166,6 +211,9 @@ class TuiResourceWithStreamingResponse:
|
|||
def __init__(self, tui: TuiResource) -> None:
|
||||
self._tui = tui
|
||||
|
||||
self.open_help = to_streamed_response_wrapper(
|
||||
tui.open_help,
|
||||
)
|
||||
self.prompt = to_streamed_response_wrapper(
|
||||
tui.prompt,
|
||||
)
|
||||
|
|
@ -175,6 +223,9 @@ class AsyncTuiResourceWithStreamingResponse:
|
|||
def __init__(self, tui: AsyncTuiResource) -> None:
|
||||
self._tui = tui
|
||||
|
||||
self.open_help = async_to_streamed_response_wrapper(
|
||||
tui.open_help,
|
||||
)
|
||||
self.prompt = async_to_streamed_response_wrapper(
|
||||
tui.prompt,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -73,6 +73,7 @@ from .file_part_source_param import FilePartSourceParam as FilePartSourceParam
|
|||
from .session_abort_response import SessionAbortResponse as SessionAbortResponse
|
||||
from .step_finish_part_param import StepFinishPartParam as StepFinishPartParam
|
||||
from .tool_state_error_param import ToolStateErrorParam as ToolStateErrorParam
|
||||
from .tui_open_help_response import TuiOpenHelpResponse as TuiOpenHelpResponse
|
||||
from .session_delete_response import SessionDeleteResponse as SessionDeleteResponse
|
||||
from .session_summarize_params import SessionSummarizeParams as SessionSummarizeParams
|
||||
from .tool_state_pending_param import ToolStatePendingParam as ToolStatePendingParam
|
||||
|
|
|
|||
7
src/opencode_ai/types/tui_open_help_response.py
Normal file
7
src/opencode_ai/types/tui_open_help_response.py
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
__all__ = ["TuiOpenHelpResponse"]
|
||||
|
||||
TuiOpenHelpResponse: TypeAlias = bool
|
||||
|
|
@ -9,7 +9,7 @@ import pytest
|
|||
|
||||
from opencode_ai import Opencode, AsyncOpencode
|
||||
from tests.utils import assert_matches_type
|
||||
from opencode_ai.types import TuiPromptResponse
|
||||
from opencode_ai.types import TuiPromptResponse, TuiOpenHelpResponse
|
||||
|
||||
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
|
||||
|
||||
|
|
@ -17,6 +17,34 @@ base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
|
|||
class TestTui:
|
||||
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
|
||||
|
||||
@pytest.mark.skip()
|
||||
@parametrize
|
||||
def test_method_open_help(self, client: Opencode) -> None:
|
||||
tui = client.tui.open_help()
|
||||
assert_matches_type(TuiOpenHelpResponse, tui, path=["response"])
|
||||
|
||||
@pytest.mark.skip()
|
||||
@parametrize
|
||||
def test_raw_response_open_help(self, client: Opencode) -> None:
|
||||
response = client.tui.with_raw_response.open_help()
|
||||
|
||||
assert response.is_closed is True
|
||||
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
|
||||
tui = response.parse()
|
||||
assert_matches_type(TuiOpenHelpResponse, tui, path=["response"])
|
||||
|
||||
@pytest.mark.skip()
|
||||
@parametrize
|
||||
def test_streaming_response_open_help(self, client: Opencode) -> None:
|
||||
with client.tui.with_streaming_response.open_help() as response:
|
||||
assert not response.is_closed
|
||||
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
|
||||
|
||||
tui = response.parse()
|
||||
assert_matches_type(TuiOpenHelpResponse, tui, path=["response"])
|
||||
|
||||
assert cast(Any, response.is_closed) is True
|
||||
|
||||
@pytest.mark.skip()
|
||||
@parametrize
|
||||
def test_method_prompt(self, client: Opencode) -> None:
|
||||
|
|
@ -84,6 +112,34 @@ class TestAsyncTui:
|
|||
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
|
||||
)
|
||||
|
||||
@pytest.mark.skip()
|
||||
@parametrize
|
||||
async def test_method_open_help(self, async_client: AsyncOpencode) -> None:
|
||||
tui = await async_client.tui.open_help()
|
||||
assert_matches_type(TuiOpenHelpResponse, tui, path=["response"])
|
||||
|
||||
@pytest.mark.skip()
|
||||
@parametrize
|
||||
async def test_raw_response_open_help(self, async_client: AsyncOpencode) -> None:
|
||||
response = await async_client.tui.with_raw_response.open_help()
|
||||
|
||||
assert response.is_closed is True
|
||||
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
|
||||
tui = await response.parse()
|
||||
assert_matches_type(TuiOpenHelpResponse, tui, path=["response"])
|
||||
|
||||
@pytest.mark.skip()
|
||||
@parametrize
|
||||
async def test_streaming_response_open_help(self, async_client: AsyncOpencode) -> None:
|
||||
async with async_client.tui.with_streaming_response.open_help() as response:
|
||||
assert not response.is_closed
|
||||
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
|
||||
|
||||
tui = await response.parse()
|
||||
assert_matches_type(TuiOpenHelpResponse, tui, path=["response"])
|
||||
|
||||
assert cast(Any, response.is_closed) is True
|
||||
|
||||
@pytest.mark.skip()
|
||||
@parametrize
|
||||
async def test_method_prompt(self, async_client: AsyncOpencode) -> None:
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue