From 81dc9145e044aff113827b1bd7bc54f9aeb1db8e Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 30 Oct 2025 02:10:23 +0000 Subject: [PATCH] fix(client): close streams without requiring full consumption --- src/opencode_ai/_streaming.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/src/opencode_ai/_streaming.py b/src/opencode_ai/_streaming.py index 34499b5..5dc9d33 100644 --- a/src/opencode_ai/_streaming.py +++ b/src/opencode_ai/_streaming.py @@ -57,9 +57,8 @@ class Stream(Generic[_T]): for sse in iterator: yield process_data(data=sse.json(), cast_to=cast_to, response=response) - # Ensure the entire stream is consumed - for _sse in iterator: - ... + # As we might not fully consume the response stream, we need to close it explicitly + response.close() def __enter__(self) -> Self: return self @@ -121,9 +120,8 @@ class AsyncStream(Generic[_T]): async for sse in iterator: yield process_data(data=sse.json(), cast_to=cast_to, response=response) - # Ensure the entire stream is consumed - async for _sse in iterator: - ... + # As we might not fully consume the response stream, we need to close it explicitly + await response.aclose() async def __aenter__(self) -> Self: return self