refactor(providers): extract http_status module and rename handle_status_openai_compat (#8620)
Some checks are pending
Canary / Prepare Version (push) Waiting to run
Canary / build-cli (push) Blocked by required conditions
Canary / Upload Install Script (push) Blocked by required conditions
Canary / bundle-desktop (push) Blocked by required conditions
Canary / bundle-desktop-intel (push) Blocked by required conditions
Canary / bundle-desktop-linux (push) Blocked by required conditions
Canary / bundle-desktop-windows (push) Blocked by required conditions
Canary / Release (push) Blocked by required conditions
Unused Dependencies / machete (push) Waiting to run
CI / changes (push) Waiting to run
CI / Check Rust Code Format (push) Blocked by required conditions
CI / Build and Test Rust Project (push) Blocked by required conditions
CI / Build Rust Project on Windows (push) Waiting to run
CI / Check MSRV (push) Blocked by required conditions
CI / Lint Rust Code (push) Blocked by required conditions
CI / Check Generated Schemas are Up-to-Date (push) Blocked by required conditions
CI / Test and Lint Electron Desktop App (push) Blocked by required conditions
Goose 2 CI / Lint & Format (push) Waiting to run
Goose 2 CI / Unit Tests (push) Waiting to run
Goose 2 CI / Desktop Build & E2E (push) Waiting to run
Goose 2 CI / Rust Lint (push) Waiting to run
Live Provider Tests / check-fork (push) Waiting to run
Live Provider Tests / changes (push) Blocked by required conditions
Live Provider Tests / Build Binary (push) Blocked by required conditions
Live Provider Tests / Smoke Tests (push) Blocked by required conditions
Live Provider Tests / Smoke Tests (Code Execution) (push) Blocked by required conditions
Live Provider Tests / Compaction Tests (push) Blocked by required conditions
Live Provider Tests / goose server HTTP integration tests (push) Blocked by required conditions
Publish Docker Image / docker (push) Waiting to run
Scorecard supply-chain security / Scorecard analysis (push) Waiting to run

Signed-off-by: DaeHee Lee <lee111dae11@proton.me>
This commit is contained in:
이대희 2026-04-22 17:33:26 +09:00 committed by GitHub
parent 015b0d92d3
commit d18bb6e512
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
14 changed files with 149 additions and 129 deletions

View file

@ -15,7 +15,7 @@ use super::formats::anthropic::{
create_request, response_to_streaming_message, thinking_type, ThinkingType,
};
use super::inventory::{config_secret_value, serialize_string_map, InventoryIdentityInput};
use super::openai_compatible::handle_status_openai_compat;
use super::openai_compatible::handle_status;
use super::openai_compatible::map_http_error_to_provider_error;
use super::retry::ProviderRetry;
use crate::config::declarative_providers::DeclarativeProviderConfig;
@ -322,7 +322,7 @@ impl Provider for AnthropicProvider {
request = request.header(key, value)?;
}
let resp = request.response_post(&payload).await?;
handle_status_openai_compat(resp).await
handle_status(resp).await
})
.await
.inspect_err(|e| {

View file

@ -5,7 +5,7 @@ use crate::providers::api_client::AuthProvider;
use crate::providers::base::{ConfigKey, MessageStream, Provider, ProviderDef, ProviderMetadata};
use crate::providers::errors::ProviderError;
use crate::providers::formats::openai_responses::responses_api_to_streaming_message;
use crate::providers::openai_compatible::handle_status_openai_compat;
use crate::providers::openai_compatible::handle_status;
use crate::providers::retry::ProviderRetry;
use crate::session_context::SESSION_ID_HEADER;
use anyhow::{anyhow, Result};
@ -928,7 +928,7 @@ impl ChatGptCodexProvider {
.await
.map_err(|e| ProviderError::RequestFailed(e.to_string()))?;
handle_status_openai_compat(response).await
handle_status(response).await
}
}

View file

@ -22,7 +22,7 @@ use super::formats::openai_responses::{
};
use super::oauth;
use super::openai_compatible::{
handle_response_openai_compat, handle_status_openai_compat, map_http_error_to_provider_error,
handle_response_openai_compat, handle_status, map_http_error_to_provider_error,
stream_openai_compat,
};
use super::retry::ProviderRetry;
@ -396,7 +396,7 @@ impl Provider for DatabricksProvider {
.api_client
.response_post(Some(session_id), &path, &payload_clone)
.await?;
handle_status_openai_compat(resp).await
handle_status(resp).await
})
.await
.inspect_err(|e| {

View file

@ -1,7 +1,7 @@
use crate::config::paths::Paths;
use crate::providers::api_client::{ApiClient, AuthMethod};
use crate::providers::oauth_device_flow::{run_device_flow, DeviceFlowConfig, RequestEncoding};
use crate::providers::openai_compatible::{handle_status_openai_compat, stream_openai_compat};
use crate::providers::openai_compatible::{handle_status, stream_openai_compat};
use anyhow::{anyhow, Context, Result};
use async_trait::async_trait;
use axum::http;
@ -434,7 +434,7 @@ impl Provider for GithubCopilotProvider {
.with_retry(|| async {
let mut payload_clone = payload.clone();
let resp = self.post(Some(session_id), &mut payload_clone).await?;
handle_status_openai_compat(resp).await
handle_status(resp).await
})
.await
.inspect_err(|e| {

View file

@ -1,7 +1,7 @@
use super::api_client::{ApiClient, AuthMethod};
use super::base::MessageStream;
use super::errors::ProviderError;
use super::openai_compatible::handle_status_openai_compat;
use super::openai_compatible::handle_status;
use super::retry::ProviderRetry;
use super::utils::RequestLog;
use crate::conversation::message::Message;
@ -101,7 +101,7 @@ impl GoogleProvider {
.api_client
.response_post(session_id, &path, payload)
.await?;
handle_status_openai_compat(response).await
handle_status(response).await
}
}

View file

@ -0,0 +1,115 @@
//! Format-agnostic HTTP status → `ProviderError` mapping.
//!
//! Used by providers regardless of their wire format (OpenAI, Anthropic,
//! Google, etc.). Parses both `{"error":{"message":"..."}}` and
//! `{"message":"..."}` error shapes.
use reqwest::{Response, StatusCode};
use serde_json::Value;
use super::errors::ProviderError;
fn check_context_length_exceeded(text: &str) -> bool {
let check_phrases = [
"too long",
"context length",
"context_length_exceeded",
"reduce the length",
"token count",
"exceeds",
"exceed context limit",
"input length",
"max_tokens",
"decrease input length",
"context limit",
"maximum prompt length",
];
let text_lower = text.to_lowercase();
check_phrases
.iter()
.any(|phrase| text_lower.contains(phrase))
}
pub fn map_http_error_to_provider_error(
status: StatusCode,
payload: Option<Value>,
) -> ProviderError {
let extract_message = || -> String {
payload
.as_ref()
.and_then(|p| {
p.get("error")
.and_then(|e| e.get("message"))
.or_else(|| p.get("message"))
.and_then(|m| m.as_str())
.map(String::from)
})
.unwrap_or_else(|| payload.as_ref().map(|p| p.to_string()).unwrap_or_default())
};
let error = match status {
StatusCode::OK => unreachable!("Should not call this function with OK status"),
StatusCode::UNAUTHORIZED | StatusCode::FORBIDDEN => ProviderError::Authentication(format!(
"Authentication failed. Status: {}. Response: {}",
status,
extract_message()
)),
StatusCode::NOT_FOUND => {
ProviderError::RequestFailed(format!("Resource not found (404): {}", extract_message()))
}
StatusCode::PAYMENT_REQUIRED => ProviderError::CreditsExhausted {
details: extract_message(),
top_up_url: None,
},
StatusCode::PAYLOAD_TOO_LARGE => ProviderError::ContextLengthExceeded(extract_message()),
StatusCode::BAD_REQUEST => {
let payload_str = extract_message();
if check_context_length_exceeded(&payload_str) {
ProviderError::ContextLengthExceeded(payload_str)
} else {
ProviderError::RequestFailed(format!("Bad request (400): {}", payload_str))
}
}
StatusCode::TOO_MANY_REQUESTS => ProviderError::RateLimitExceeded {
details: extract_message(),
retry_delay: None,
},
_ if status.is_server_error() => {
ProviderError::ServerError(format!("Server error ({}): {}", status, extract_message()))
}
_ => ProviderError::RequestFailed(format!(
"Request failed with status {}: {}",
status,
extract_message()
)),
};
if !status.is_success() {
tracing::warn!(
"Provider request failed with status: {}. Payload: {:?}. Returning error: {:?}",
status,
payload,
error
);
}
error
}
pub async fn handle_status(response: Response) -> Result<Response, ProviderError> {
let status = response.status();
if !status.is_success() {
let body = response.text().await.unwrap_or_default();
let payload = serde_json::from_str::<Value>(&body).ok();
return Err(map_http_error_to_provider_error(status, payload));
}
Ok(response)
}
pub async fn handle_response(response: Response) -> Result<Value, ProviderError> {
let response = handle_status(response).await?;
response.json::<Value>().await.map_err(|e| {
ProviderError::RequestFailed(format!("Response body is not valid JSON: {}", e))
})
}

View file

@ -22,7 +22,7 @@ use super::formats::anthropic::{create_request, response_to_streaming_message};
use super::oauth_device_flow::{
refresh_device_flow_token, run_device_flow, DeviceFlowConfig, DeviceFlowTokens, RequestEncoding,
};
use super::openai_compatible::handle_status_openai_compat;
use super::openai_compatible::handle_status;
use super::retry::ProviderRetry;
use super::utils::RequestLog;
use crate::conversation::message::Message;
@ -403,7 +403,7 @@ impl Provider for KimiCodeProvider {
let response = self
.with_retry(|| async {
let resp = self.post(Some(session_id), &payload).await?;
handle_status_openai_compat(resp).await
handle_status(resp).await
})
.await
.inspect_err(|e| {
@ -454,7 +454,7 @@ impl Provider for KimiCodeProvider {
.send()
.await
.map_err(|e| ProviderError::RequestFailed(e.to_string()))?;
let resp = handle_status_openai_compat(resp).await?;
let resp = handle_status(resp).await?;
let parsed: ModelsResp = resp.json().await.map_err(|e| {
ProviderError::RequestFailed(format!("/v1/models body is not valid JSON: {}", e))

View file

@ -28,6 +28,7 @@ pub mod gemini_cli;
pub mod gemini_oauth;
pub mod githubcopilot;
pub mod google;
pub mod http_status;
mod init;
pub mod inventory;
pub mod kimicode;

View file

@ -1,7 +1,7 @@
use super::api_client::{ApiClient, AuthMethod};
use super::base::{ConfigKey, MessageStream, Provider, ProviderDef, ProviderMetadata};
use super::errors::ProviderError;
use super::openai_compatible::{handle_status_openai_compat, stream_openai_compat};
use super::openai_compatible::{handle_status, stream_openai_compat};
use super::retry::ProviderRetry;
use super::utils::{ImageFormat, RequestLog};
use crate::conversation::message::Message;
@ -191,7 +191,7 @@ impl Provider for NanoGptProvider {
.api_client
.response_post(Some(session_id), "chat/completions", &payload)
.await?;
handle_status_openai_compat(resp).await
handle_status(resp).await
})
.await
.inspect_err(|e| {

View file

@ -2,7 +2,7 @@ use super::api_client::{ApiClient, AuthMethod};
use super::base::{ConfigKey, MessageStream, Provider, ProviderDef, ProviderMetadata};
use super::errors::ProviderError;
use super::inventory::InventoryIdentityInput;
use super::openai_compatible::handle_status_openai_compat;
use super::openai_compatible::handle_status;
use super::retry::{ProviderRetry, RetryConfig};
use super::utils::{ImageFormat, RequestLog};
use crate::config::declarative_providers::DeclarativeProviderConfig;
@ -324,7 +324,7 @@ impl Provider for OllamaProvider {
.api_client
.response_post(Some(session_id), "v1/chat/completions", &payload)
.await?;
handle_status_openai_compat(resp).await
handle_status(resp).await
})
.await
.inspect_err(|e| {

View file

@ -9,7 +9,7 @@ use super::formats::openai_responses::{
};
use super::inventory::{config_secret_value, InventoryIdentityInput};
use super::openai_compatible::{
handle_response_openai_compat, handle_status_openai_compat, stream_openai_compat,
handle_response_openai_compat, handle_status, stream_openai_compat,
};
use super::retry::ProviderRetry;
use super::utils::ImageFormat;
@ -579,7 +579,7 @@ impl Provider for OpenAiProvider {
&payload_clone,
)
.await?;
handle_status_openai_compat(resp).await
handle_status(resp).await
})
.await
.inspect_err(|e| {
@ -644,7 +644,7 @@ impl Provider for OpenAiProvider {
.api_client
.response_post(Some(session_id), &self.base_path, &payload)
.await?;
handle_status_openai_compat(resp).await
handle_status(resp).await
})
.await
.inspect_err(|e| {

View file

@ -1,7 +1,9 @@
use anyhow::Error;
use async_stream::try_stream;
use futures::TryStreamExt;
use reqwest::{Response, StatusCode};
use reqwest::Response;
#[cfg(test)]
use reqwest::StatusCode;
use serde_json::Value;
use tokio::pin;
use tokio_stream::StreamExt;
@ -117,7 +119,7 @@ impl Provider for OpenAiCompatibleProvider {
.api_client
.response_post(Some(session_id), &completions_path, &payload)
.await?;
handle_status_openai_compat(resp).await
handle_status(resp).await
})
.await
.inspect_err(|e| {
@ -128,110 +130,12 @@ impl Provider for OpenAiCompatibleProvider {
}
}
fn check_context_length_exceeded(text: &str) -> bool {
let check_phrases = [
"too long",
"context length",
"context_length_exceeded",
"reduce the length",
"token count",
"exceeds",
"exceed context limit",
"input length",
"max_tokens",
"decrease input length",
"context limit",
"maximum prompt length",
];
let text_lower = text.to_lowercase();
check_phrases
.iter()
.any(|phrase| text_lower.contains(phrase))
}
// Re-exported from the dedicated `http_status` module — these helpers are
// format-agnostic and used across all provider families.
pub use super::http_status::{handle_response, handle_status, map_http_error_to_provider_error};
pub fn map_http_error_to_provider_error(
status: StatusCode,
payload: Option<Value>,
) -> ProviderError {
let extract_message = || -> String {
payload
.as_ref()
.and_then(|p| {
p.get("error")
.and_then(|e| e.get("message"))
.or_else(|| p.get("message"))
.and_then(|m| m.as_str())
.map(String::from)
})
.unwrap_or_else(|| payload.as_ref().map(|p| p.to_string()).unwrap_or_default())
};
let error = match status {
StatusCode::OK => unreachable!("Should not call this function with OK status"),
StatusCode::UNAUTHORIZED | StatusCode::FORBIDDEN => ProviderError::Authentication(format!(
"Authentication failed. Status: {}. Response: {}",
status,
extract_message()
)),
StatusCode::NOT_FOUND => {
ProviderError::RequestFailed(format!("Resource not found (404): {}", extract_message()))
}
StatusCode::PAYMENT_REQUIRED => ProviderError::CreditsExhausted {
details: extract_message(),
top_up_url: None,
},
StatusCode::PAYLOAD_TOO_LARGE => ProviderError::ContextLengthExceeded(extract_message()),
StatusCode::BAD_REQUEST => {
let payload_str = extract_message();
if check_context_length_exceeded(&payload_str) {
ProviderError::ContextLengthExceeded(payload_str)
} else {
ProviderError::RequestFailed(format!("Bad request (400): {}", payload_str))
}
}
StatusCode::TOO_MANY_REQUESTS => ProviderError::RateLimitExceeded {
details: extract_message(),
retry_delay: None,
},
_ if status.is_server_error() => {
ProviderError::ServerError(format!("Server error ({}): {}", status, extract_message()))
}
_ => ProviderError::RequestFailed(format!(
"Request failed with status {}: {}",
status,
extract_message()
)),
};
if !status.is_success() {
tracing::warn!(
"Provider request failed with status: {}. Payload: {:?}. Returning error: {:?}",
status,
payload,
error
);
}
error
}
pub async fn handle_status_openai_compat(response: Response) -> Result<Response, ProviderError> {
let status = response.status();
if !status.is_success() {
let body = response.text().await.unwrap_or_default();
let payload = serde_json::from_str::<Value>(&body).ok();
return Err(map_http_error_to_provider_error(status, payload));
}
Ok(response)
}
pub async fn handle_response_openai_compat(response: Response) -> Result<Value, ProviderError> {
let response = handle_status_openai_compat(response).await?;
response.json::<Value>().await.map_err(|e| {
ProviderError::RequestFailed(format!("Response body is not valid JSON: {}", e))
})
}
// Legacy alias kept for callers that haven't migrated their import path yet.
pub use super::http_status::handle_response as handle_response_openai_compat;
pub fn stream_openai_compat(
response: Response,

View file

@ -6,7 +6,7 @@ use serde_json::{json, Value};
use super::api_client::{ApiClient, AuthMethod};
use super::base::{ConfigKey, MessageStream, Provider, ProviderDef, ProviderMetadata};
use super::errors::ProviderError;
use super::openai_compatible::{handle_status_openai_compat, stream_openai_compat};
use super::openai_compatible::{handle_status, stream_openai_compat};
use super::retry::ProviderRetry;
use super::utils::{ImageFormat, RequestLog};
use crate::conversation::message::Message;
@ -291,7 +291,7 @@ impl Provider for OpenRouterProvider {
.api_client
.response_post(Some(session_id), "api/v1/chat/completions", &payload)
.await?;
handle_status_openai_compat(resp).await
handle_status(resp).await
})
.await
.inspect_err(|e| {

View file

@ -2,7 +2,7 @@ use super::api_client::{ApiClient, AuthMethod};
use super::base::{ConfigKey, MessageStream, Provider, ProviderDef, ProviderMetadata};
use super::errors::ProviderError;
use super::openai_compatible::{
handle_response_openai_compat, handle_status_openai_compat, map_http_error_to_provider_error,
handle_response_openai_compat, handle_status, map_http_error_to_provider_error,
stream_openai_compat,
};
use super::retry::ProviderRetry;
@ -155,7 +155,7 @@ impl Provider for TetrateProvider {
.api_client
.response_post(Some(session_id), "v1/chat/completions", &payload)
.await?;
let resp = handle_status_openai_compat(resp)
let resp = handle_status(resp)
.await
.map_err(Self::enrich_credits_error)?;