feat: support grok,mistral,sambanova (#1321)
Some checks are pending
CodeQL Advanced / Analyze (actions) (push) Waiting to run
CodeQL Advanced / Analyze (javascript-typescript) (push) Waiting to run
CodeQL Advanced / Analyze (python) (push) Waiting to run
Pre-commit / pre-commit (push) Waiting to run
Test / Run Python Tests (push) Waiting to run

This commit is contained in:
Wendong-Fan 2026-02-21 06:15:01 +00:00 committed by GitHub
parent 3b050b979e
commit 5ca2caf557
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
12 changed files with 258 additions and 32 deletions

View file

@ -15,7 +15,7 @@
import logging
from fastapi import APIRouter, HTTPException
from pydantic import BaseModel, Field, field_validator
from pydantic import BaseModel, Field
from app.component.error_format import normalize_error_to_openai_format
from app.component.model_validation import (
@ -23,7 +23,7 @@ from app.component.model_validation import (
ValidationStage,
validate_model_with_details,
)
from app.model.chat import PLATFORM_MAPPING
from app.model.model_platform import NormalizedModelPlatform
logger = logging.getLogger("model_controller")
@ -32,7 +32,9 @@ router = APIRouter()
class ValidateModelRequest(BaseModel):
model_platform: str = Field("OPENAI", description="Model platform")
model_platform: NormalizedModelPlatform = Field(
"OPENAI", description="Model platform"
)
model_type: str = Field("GPT_4O_MINI", description="Model type")
api_key: str | None = Field(None, description="API key")
url: str | None = Field(None, description="Model URL")
@ -46,11 +48,6 @@ class ValidateModelRequest(BaseModel):
False, description="Include detailed diagnostic information"
)
@field_validator("model_platform")
@classmethod
def map_model_platform(cls, v: str) -> str:
return PLATFORM_MAPPING.get(v, v)
class ValidateModelResponse(BaseModel):
is_valid: bool = Field(..., description="Is valid")

View file

@ -22,6 +22,10 @@ from camel.types import ModelType, RoleType
from pydantic import BaseModel, Field, field_validator
from app.model.enums import DEFAULT_SUMMARY_PROMPT, Status # noqa: F401
from app.model.model_platform import (
NormalizedModelPlatform,
NormalizedOptionalModelPlatform,
)
logger = logging.getLogger("chat_model")
@ -44,11 +48,6 @@ class QuestionAnalysisResult(BaseModel):
McpServers = dict[Literal["mcpServers"], dict[str, dict]]
PLATFORM_MAPPING = {
"z.ai": "openai-compatible-model",
"ModelArk": "openai-compatible-model",
}
class Chat(BaseModel):
task_id: str
@ -56,7 +55,7 @@ class Chat(BaseModel):
question: str
email: str
attaches: list[str] = []
model_platform: str
model_platform: NormalizedModelPlatform
model_type: str
api_key: str
# for cloud version, user don't need to set api_url
@ -79,11 +78,6 @@ class Chat(BaseModel):
# User identifier for user-specific skill configurations
user_id: str | None = None
@field_validator("model_platform")
@classmethod
def map_model_platform(cls, v: str) -> str:
return PLATFORM_MAPPING.get(v, v)
@field_validator("model_type")
@classmethod
def check_model_type(cls, model_type: str):
@ -166,7 +160,7 @@ class AgentModelConfig(BaseModel):
"""Optional per-agent model configuration
to override the default task model."""
model_platform: str | None = None
model_platform: NormalizedOptionalModelPlatform = None
model_type: str | None = None
api_key: str | None = None
api_url: str | None = None

View file

@ -0,0 +1,43 @@
# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========
from typing import Annotated, Final
from pydantic import BeforeValidator
PLATFORM_ALIAS_MAPPING: Final[dict[str, str]] = {
"z.ai": "openai-compatible-model",
"ModelArk": "openai-compatible-model",
"grok": "openai-compatible-model",
}
def normalize_model_platform(platform: str) -> str:
"""Normalize provider aliases to supported model platform names."""
return PLATFORM_ALIAS_MAPPING.get(platform, platform)
def normalize_optional_model_platform(platform: str | None) -> str | None:
"""Optional variant of normalize_model_platform."""
if platform is None:
return None
return normalize_model_platform(platform)
NormalizedModelPlatform = Annotated[
str, BeforeValidator(normalize_model_platform)
]
NormalizedOptionalModelPlatform = Annotated[
str | None, BeforeValidator(normalize_optional_model_platform)
]

View file

@ -6,7 +6,7 @@ readme = "README.md"
requires-python = ">=3.11,<3.12"
dependencies = [
"pip>=23.0",
"camel-ai[eigent]==0.2.90a1",
"camel-ai[eigent]==0.2.90a2",
"fastapi>=0.115.12",
"fastapi-babel>=1.0.0",
"uvicorn[standard]>=0.34.2",

View file

@ -34,6 +34,31 @@ from app.controller.model_controller import (
class TestModelControllerEnhanced:
"""Test cases for enhanced model controller with detailed validation."""
def test_validate_model_request_maps_grok_alias(self):
"""Test request model maps grok alias to openai-compatible-model."""
request_data = ValidateModelRequest(
model_platform="grok",
model_type="grok-3",
api_key="test_key",
)
assert request_data.model_platform == "openai-compatible-model"
def test_validate_model_request_keeps_supported_platforms_unchanged(self):
"""Test request model keeps native camel-ai platforms unchanged."""
request_data = ValidateModelRequest(
model_platform="mistral",
model_type="mistral-large-latest",
api_key="test_key",
)
assert request_data.model_platform == "mistral"
request_data = ValidateModelRequest(
model_platform="samba-nova",
model_type="Meta-Llama-3.1-8B-Instruct",
api_key="test_key",
)
assert request_data.model_platform == "samba-nova"
@pytest.mark.asyncio
async def test_validate_model_with_diagnostics_success(self):
"""Test successful model validation with diagnostics enabled."""

View file

@ -11,9 +11,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========
"""Unit tests for AgentModelConfig and per-agent model configuration."""
"""Unit tests for Chat and AgentModelConfig model configuration."""
from app.model.chat import AgentModelConfig, NewAgent
from app.model.chat import AgentModelConfig, Chat, NewAgent
class TestAgentModelConfig:
@ -118,3 +118,43 @@ class TestNewAgentWithModelConfig:
assert "custom_model_config" in data
assert data["custom_model_config"]["model_platform"] == "anthropic"
assert data["custom_model_config"]["model_type"] == "claude-3-sonnet"
class TestModelPlatformMapping:
"""Tests for model platform aliases mapped in backend."""
def _create_chat(self, model_platform: str) -> Chat:
return Chat(
task_id="task-1",
project_id="project-1",
question="test question",
email="tester@example.com",
model_platform=model_platform,
model_type="gpt-4o",
api_key="test-key",
api_url="https://api.example.com/v1",
)
def test_chat_maps_grok_to_openai_compatible_model(self):
"""Test Chat maps grok platform alias correctly."""
chat = self._create_chat("grok")
assert chat.model_platform == "openai-compatible-model"
def test_chat_keeps_supported_platforms_unchanged(self):
"""Test Chat keeps native camel-ai platforms unchanged."""
chat = self._create_chat("mistral")
assert chat.model_platform == "mistral"
chat = self._create_chat("samba-nova")
assert chat.model_platform == "samba-nova"
def test_agent_model_config_maps_grok_alias(self):
"""Test AgentModelConfig also maps grok alias for per-agent overrides."""
config = AgentModelConfig(model_platform="grok")
assert config.model_platform == "openai-compatible-model"
def test_agent_model_config_keeps_supported_platforms_unchanged(self):
"""Test AgentModelConfig keeps native camel-ai platforms unchanged."""
config = AgentModelConfig(model_platform="mistral")
assert config.model_platform == "mistral"
config = AgentModelConfig(model_platform="samba-nova")
assert config.model_platform == "samba-nova"

View file

@ -0,0 +1,51 @@
# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========
from pydantic import BaseModel
from app.model.model_platform import (
NormalizedModelPlatform,
NormalizedOptionalModelPlatform,
normalize_model_platform,
normalize_optional_model_platform,
)
def test_normalize_model_platform_maps_known_aliases():
assert normalize_model_platform("grok") == "openai-compatible-model"
assert normalize_model_platform("z.ai") == "openai-compatible-model"
assert normalize_model_platform("ModelArk") == "openai-compatible-model"
def test_normalize_model_platform_keeps_non_alias_unchanged():
assert normalize_model_platform("openai") == "openai"
assert normalize_model_platform("mistral") == "mistral"
def test_normalize_optional_model_platform_handles_none():
assert normalize_optional_model_platform(None) is None
def test_normalized_model_platform_type_applies_in_pydantic_model():
class _Model(BaseModel):
model_platform: NormalizedModelPlatform
optional_model_platform: NormalizedOptionalModelPlatform = None
item = _Model(
model_platform="grok",
optional_model_platform="ModelArk",
)
assert item.model_platform == "openai-compatible-model"
assert item.optional_model_platform == "openai-compatible-model"

48
backend/uv.lock generated
View file

@ -242,7 +242,7 @@ dev = [
[package.metadata]
requires-dist = [
{ name = "aiofiles", specifier = ">=24.1.0" },
{ name = "camel-ai", extras = ["eigent"], specifier = "==0.2.90a1" },
{ name = "camel-ai", extras = ["eigent"], specifier = "==0.2.90a2" },
{ name = "debugpy", specifier = ">=1.8.17" },
{ name = "fastapi", specifier = ">=0.115.12" },
{ name = "fastapi-babel", specifier = ">=1.0.0" },
@ -285,7 +285,7 @@ wheels = [
[[package]]
name = "camel-ai"
version = "0.2.90a1"
version = "0.2.90a2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "astor" },
@ -303,9 +303,9 @@ dependencies = [
{ name = "tiktoken" },
{ name = "websockets" },
]
sdist = { url = "https://files.pythonhosted.org/packages/85/cc/78345177dfffd532f21889bb4794f197e21ca79451a27243f0240db04840/camel_ai-0.2.90a1.tar.gz", hash = "sha256:0a84a7991a8679a83dcf1c6124d0a5ae953282526cf5a04a07bec8b7338436eb", size = 1156184, upload-time = "2026-02-12T22:32:31.727Z" }
sdist = { url = "https://files.pythonhosted.org/packages/4a/53/49d64b0d6f191f491a70ade6afeb9c866674adef7e0810f02fa7f9e839ae/camel_ai-0.2.90a2.tar.gz", hash = "sha256:7a85d2dca66fd7e51e81ec29c2e022116e12bb2957be876ac0e9142702ca410a", size = 1157046, upload-time = "2026-02-21T05:03:18.897Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/05/2c/926157452c27d1f93640a2293a7a0193212cdb4d1d34f62b98c4392491ce/camel_ai-0.2.90a1-py3-none-any.whl", hash = "sha256:2764de542c165d57b35836999500aeb2ba148077d494a168009fb7a4ddc64ca3", size = 1632784, upload-time = "2026-02-12T22:32:29.704Z" },
{ url = "https://files.pythonhosted.org/packages/d7/9b/6974d0af6adcc99f730e06f18b2c6fd394882692fededccd040e5d3f7994/camel_ai-0.2.90a2-py3-none-any.whl", hash = "sha256:4c1a6c874a4ec9a2774fcabca666833dc2ab9575ce439b2e132cbcef5b509aa2", size = 1633581, upload-time = "2026-02-21T05:03:16.444Z" },
]
[package.optional-dependencies]
@ -324,6 +324,7 @@ eigent = [
{ name = "markitdown", extra = ["all"] },
{ name = "mcp-server-fetch" },
{ name = "mcp-simple-arxiv" },
{ name = "mistralai" },
{ name = "numpy" },
{ name = "onnxruntime" },
{ name = "openpyxl" },
@ -631,6 +632,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/c1/8b/5fe2cc11fee489817272089c4203e679c63b570a5aaeb18d852ae3cbba6a/et_xmlfile-2.0.0-py3-none-any.whl", hash = "sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa", size = 18059, upload-time = "2024-10-25T17:25:39.051Z" },
]
[[package]]
name = "eval-type-backport"
version = "0.3.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/fb/a3/cafafb4558fd638aadfe4121dc6cefb8d743368c085acb2f521df0f3d9d7/eval_type_backport-0.3.1.tar.gz", hash = "sha256:57e993f7b5b69d271e37482e62f74e76a0276c82490cf8e4f0dffeb6b332d5ed", size = 9445, upload-time = "2025-12-02T11:51:42.987Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/cf/22/fdc2e30d43ff853720042fa15baa3e6122722be1a7950a98233ebb55cd71/eval_type_backport-0.3.1-py3-none-any.whl", hash = "sha256:279ab641905e9f11129f56a8a78f493518515b83402b860f6f06dd7c011fdfa8", size = 6063, upload-time = "2025-12-02T11:51:41.665Z" },
]
[[package]]
name = "exa-py"
version = "1.16.1"
@ -1136,6 +1146,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" },
]
[[package]]
name = "invoke"
version = "2.2.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/de/bd/b461d3424a24c80490313fd77feeb666ca4f6a28c7e72713e3d9095719b4/invoke-2.2.1.tar.gz", hash = "sha256:515bf49b4a48932b79b024590348da22f39c4942dff991ad1fb8b8baea1be707", size = 304762, upload-time = "2025-10-11T00:36:35.172Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/32/4b/b99e37f88336009971405cbb7630610322ed6fbfa31e1d7ab3fbf3049a2d/invoke-2.2.1-py3-none-any.whl", hash = "sha256:2413bc441b376e5cd3f55bb5d364f973ad8bdd7bf87e53c79de3c11bf3feecc8", size = 160287, upload-time = "2025-10-11T00:36:33.703Z" },
]
[[package]]
name = "isodate"
version = "0.7.2"
@ -1362,6 +1381,27 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/07/4e/6646a0004fc85b0c1df6e662db42f76fe5a0412179b7f65c066d7804370a/mcp_simple_arxiv-0.2.2-py3-none-any.whl", hash = "sha256:fcf607303c074ae5e88337b5bf3ea52cd781081f49ddf8fa0898eb3b8420dccb", size = 13686, upload-time = "2025-01-23T16:31:36.378Z" },
]
[[package]]
name = "mistralai"
version = "1.12.4"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "eval-type-backport" },
{ name = "httpx" },
{ name = "invoke" },
{ name = "opentelemetry-api" },
{ name = "opentelemetry-exporter-otlp-proto-http" },
{ name = "opentelemetry-sdk" },
{ name = "pydantic" },
{ name = "python-dateutil" },
{ name = "pyyaml" },
{ name = "typing-inspection" },
]
sdist = { url = "https://files.pythonhosted.org/packages/aa/12/c3476c53e907255b5f485f085ba50dd9a84b40fe662e9a888d6ded26fa7b/mistralai-1.12.4.tar.gz", hash = "sha256:e52b53bab58025dcd208eeac13e3c3df5778d4112eeca1f08124096c7738929f", size = 243129, upload-time = "2026-02-20T17:55:13.73Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c9/f9/98d825105c450b9c67c27026caa374112b7e466c18331601d02ca278a01b/mistralai-1.12.4-py3-none-any.whl", hash = "sha256:7b69fcbc306436491ad3377fbdead527c9f3a0ce145ec029bf04c6308ff2cca6", size = 509321, upload-time = "2026-02-20T17:55:15.27Z" },
]
[[package]]
name = "more-itertools"
version = "10.8.0"

View file

@ -7,7 +7,7 @@ requires-python = ">=3.12,<3.13"
dependencies = [
"alembic>=1.15.2",
"openai>=1.99.3,<2",
"camel-ai==0.2.90a1",
"camel-ai==0.2.90a2",
"pydantic[email]>=2.11.1",
"click>=8.1.8",
"fastapi>=0.115.12",

8
server/uv.lock generated
View file

@ -113,7 +113,7 @@ wheels = [
[[package]]
name = "camel-ai"
version = "0.2.90a1"
version = "0.2.90a2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "astor" },
@ -131,9 +131,9 @@ dependencies = [
{ name = "tiktoken" },
{ name = "websockets" },
]
sdist = { url = "https://files.pythonhosted.org/packages/85/cc/78345177dfffd532f21889bb4794f197e21ca79451a27243f0240db04840/camel_ai-0.2.90a1.tar.gz", hash = "sha256:0a84a7991a8679a83dcf1c6124d0a5ae953282526cf5a04a07bec8b7338436eb", size = 1156184, upload-time = "2026-02-12T22:32:31.727Z" }
sdist = { url = "https://files.pythonhosted.org/packages/4a/53/49d64b0d6f191f491a70ade6afeb9c866674adef7e0810f02fa7f9e839ae/camel_ai-0.2.90a2.tar.gz", hash = "sha256:7a85d2dca66fd7e51e81ec29c2e022116e12bb2957be876ac0e9142702ca410a", size = 1157046, upload-time = "2026-02-21T05:03:18.897Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/05/2c/926157452c27d1f93640a2293a7a0193212cdb4d1d34f62b98c4392491ce/camel_ai-0.2.90a1-py3-none-any.whl", hash = "sha256:2764de542c165d57b35836999500aeb2ba148077d494a168009fb7a4ddc64ca3", size = 1632784, upload-time = "2026-02-12T22:32:29.704Z" },
{ url = "https://files.pythonhosted.org/packages/d7/9b/6974d0af6adcc99f730e06f18b2c6fd394882692fededccd040e5d3f7994/camel_ai-0.2.90a2-py3-none-any.whl", hash = "sha256:4c1a6c874a4ec9a2774fcabca666833dc2ab9575ce439b2e132cbcef5b509aa2", size = 1633581, upload-time = "2026-02-21T05:03:16.444Z" },
]
[[package]]
@ -332,7 +332,7 @@ requires-dist = [
{ name = "alembic", specifier = ">=1.15.2" },
{ name = "arrow", specifier = ">=1.3.0" },
{ name = "bcrypt", specifier = "==4.0.1" },
{ name = "camel-ai", specifier = "==0.2.90a1" },
{ name = "camel-ai", specifier = "==0.2.90a2" },
{ name = "click", specifier = ">=8.1.8" },
{ name = "convert-case", specifier = ">=1.2.3" },
{ name = "cryptography", specifier = ">=45.0.4" },

View file

@ -105,6 +105,33 @@ export const INIT_PROVODERS: Provider[] = [
is_valid: false,
model_type: '',
},
{
id: 'samba-nova',
name: 'SambaNova',
apiKey: '',
apiHost: 'https://api.sambanova.ai/v1',
description: 'SambaNova model configuration.',
is_valid: false,
model_type: '',
},
{
id: 'grok',
name: 'Grok',
apiKey: '',
apiHost: 'https://api.x.ai/v1',
description: 'Grok model configuration.',
is_valid: false,
model_type: '',
},
{
id: 'mistral',
name: 'Mistral',
apiKey: '',
apiHost: 'https://api.mistral.ai',
description: 'Mistral model configuration.',
is_valid: false,
model_type: '',
},
{
id: 'aws-bedrock',
name: 'AWS Bedrock',

View file

@ -103,6 +103,12 @@ const DARK_FILL_MODELS = new Set([
'openai-compatible-model',
]);
const PROVIDER_AVATAR_URLS: Record<string, string> = {
'samba-nova': 'https://github.com/sambanova.png',
mistral: 'https://github.com/mistralai.png',
grok: 'https://github.com/xai-org.png',
};
export default function SettingModels() {
const {
modelType,
@ -1008,6 +1014,9 @@ export default function SettingModels() {
'z.ai': zaiImage,
moonshot: moonshotImage,
ModelArk: modelarkImage,
'samba-nova': PROVIDER_AVATAR_URLS['samba-nova'],
grok: PROVIDER_AVATAR_URLS.grok,
mistral: PROVIDER_AVATAR_URLS.mistral,
'aws-bedrock': bedrockImage,
azure: azureImage,
'openai-compatible-model': openaiImage, // Use OpenAI icon as fallback