feat: improve error clarity for LLM provider failures (#506)

Replace generic "An unexpected error occurred" messages with descriptive,
user-friendly error messages when LLM operations fail. Errors like invalid
API keys, wrong model names, and rate limits now surface clearly in the UI.

Adds error classification utility, global FastAPI exception handlers, and
frontend getApiErrorMessage() helper. Bumps version to 1.7.2.
This commit is contained in:
Luis Novo 2026-02-16 16:15:46 -03:00
parent b1101305f6
commit 20e18fdd0d
22 changed files with 480 additions and 186 deletions

View file

@ -0,0 +1,90 @@
"""
Error classification utility for LLM provider errors.
Maps raw exceptions from AI providers/Esperanto/LangChain to user-friendly
error messages and appropriate exception types.
"""
from loguru import logger
from open_notebook.exceptions import (
AuthenticationError,
ConfigurationError,
ExternalServiceError,
NetworkError,
OpenNotebookError,
RateLimitError,
)
# Classification rules: (keywords, exception_class, user_message or None to pass through)
_CLASSIFICATION_RULES: list[tuple[list[str], type[OpenNotebookError], str | None]] = [
# Authentication errors
(
["authentication", "unauthorized", "invalid api key", "invalid_api_key", "401"],
AuthenticationError,
"Authentication failed. Please check your API key in Settings -> Credentials.",
),
# Rate limit errors
(
["rate limit", "rate_limit", "429", "too many requests", "quota exceeded"],
RateLimitError,
"Rate limit exceeded. Please wait a moment and try again.",
),
# Model not found (pass through original message)
(
["model not found", "does not exist", "model_not_found"],
ConfigurationError,
None,
),
# Configuration errors from provision.py (pass through)
(
["no model configured", "please go to settings"],
ConfigurationError,
None,
),
# Network errors
(
["connecterror", "timeoutexception", "connection refused", "connection error", "timed out", "timeout"],
NetworkError,
"Could not connect to the AI provider. Please check your network connection and provider URL.",
),
# Context length errors
(
["context length", "token limit", "maximum context", "context_length_exceeded", "max_tokens"],
ExternalServiceError,
"Content too large for the selected model. Try using a smaller selection or a model with a larger context window.",
),
# Provider availability errors
(
["500", "502", "503", "service unavailable", "overloaded", "internal server error"],
ExternalServiceError,
"The AI provider is temporarily unavailable. Please try again in a few minutes.",
),
]
def classify_error(exception: BaseException) -> tuple[type[OpenNotebookError], str]:
"""
Classify a raw exception into a user-friendly error type and message.
Args:
exception: Any exception from LLM providers/Esperanto/LangChain
Returns:
Tuple of (exception_class, user_friendly_message)
"""
error_str = str(exception).lower()
error_type_name = type(exception).__name__.lower()
combined = f"{error_type_name}: {error_str}"
for keywords, exc_class, message in _CLASSIFICATION_RULES:
for keyword in keywords:
if keyword in combined:
user_message = message if message is not None else str(exception)
return exc_class, user_message
# Unclassified error - log for future improvement
logger.warning(
f"Unclassified LLM error ({type(exception).__name__}): {exception}"
)
return ExternalServiceError, f"AI service error: {exception}"