feat: deepseek api support (#118)

## Summary

* add native DeepSeek provider support via the shared OpenAI-compatible
provider base
* allow `deepseek/...` model prefixes in config validation
* add `DEEPSEEK_API_KEY` and `DEEPSEEK_BASE_URL` settings
* add DeepSeek entries to `.env.example` and `config/env.example`
* implement `DeepSeekProvider` and register it in provider dependencies
* add a DeepSeek request builder with DeepSeek-specific thinking payload
handling
* preserve Anthropic thinking blocks as `reasoning_content` for
DeepSeek-compatible continuation flows
* update `claude-pick` to discover DeepSeek models from the DeepSeek API
* document DeepSeek usage in `README.md`
* add tests for config validation, provider dependency wiring, request
building, and streaming behavior

## Motivation

DeepSeek exposes an OpenAI-compatible API and can be used directly
without routing through OpenRouter. This lets users spend their existing
DeepSeek balance through the proxy while keeping the same Claude Code
workflow and per-model provider mapping.

## Example

```dotenv
DEEPSEEK_API_KEY="sk-..."
DEEPSEEK_BASE_URL="https://api.deepseek.com"

MODEL_OPUS="deepseek/deepseek-reasoner"
MODEL_SONNET="deepseek/deepseek-chat"
MODEL_HAIKU="deepseek/deepseek-chat"
MODEL="deepseek/deepseek-chat"

---------

Co-authored-by: Alishahryar1 <alishahryar2@gmail.com>
This commit is contained in:
Pavel Yurchenko 2026-04-23 10:06:01 +10:00 committed by GitHub
parent c3f6dbe0bc
commit e719e4aed2
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
13 changed files with 410 additions and 14 deletions

View file

@ -7,6 +7,7 @@ from config.settings import Settings
from config.settings import get_settings as _get_settings
from providers.base import BaseProvider, ProviderConfig
from providers.common import get_user_facing_error_message
from providers.deepseek import DEEPSEEK_BASE_URL, DeepSeekProvider
from providers.exceptions import AuthenticationError
from providers.llamacpp import LlamaCppProvider
from providers.lmstudio import LMStudioProvider
@ -60,6 +61,24 @@ def _create_provider_for_type(provider_type: str, settings: Settings) -> BasePro
enable_thinking=settings.enable_thinking,
)
return OpenRouterProvider(config)
if provider_type == "deepseek":
if not settings.deepseek_api_key or not settings.deepseek_api_key.strip():
raise AuthenticationError(
"DEEPSEEK_API_KEY is not set. Add it to your .env file. "
"Get a key at https://platform.deepseek.com/api_keys"
)
config = ProviderConfig(
api_key=settings.deepseek_api_key,
base_url=DEEPSEEK_BASE_URL,
rate_limit=settings.provider_rate_limit,
rate_window=settings.provider_rate_window,
max_concurrency=settings.provider_max_concurrency,
http_read_timeout=settings.http_read_timeout,
http_write_timeout=settings.http_write_timeout,
http_connect_timeout=settings.http_connect_timeout,
enable_thinking=settings.enable_thinking,
)
return DeepSeekProvider(config)
if provider_type == "lmstudio":
config = ProviderConfig(
api_key="lm-studio",
@ -87,12 +106,12 @@ def _create_provider_for_type(provider_type: str, settings: Settings) -> BasePro
)
return LlamaCppProvider(config)
logger.error(
"Unknown provider_type: '{}'. Supported: 'nvidia_nim', 'open_router', 'lmstudio', 'llamacpp'",
"Unknown provider_type: '{}'. Supported: 'nvidia_nim', 'open_router', 'deepseek', 'lmstudio', 'llamacpp'",
provider_type,
)
raise ValueError(
f"Unknown provider_type: '{provider_type}'. "
f"Supported: 'nvidia_nim', 'open_router', 'lmstudio', 'llamacpp'"
f"Supported: 'nvidia_nim', 'open_router', 'deepseek', 'lmstudio', 'llamacpp'"
)