Revert "feat: add model type autocomplete with api based suggestions and plat…" (#1352)

This commit is contained in:
Wendong-Fan 2026-02-23 18:02:46 +00:00 committed by GitHub
parent 73cea2f0ef
commit ddada3e642
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
23 changed files with 28 additions and 1081 deletions

View file

@ -50,6 +50,5 @@
"javascriptreact",
"typescript",
"typescriptreact"
],
"typescript.autoClosingTags": false
]
}

View file

@ -135,18 +135,11 @@ def agent_model(
)
model_platform_enum = None
# Anthropic SDK adds /v1 to every endpoint path internally, so a user-
# provided base URL must NOT include /v1 (unlike OpenAI-compatible APIs).
model_url = effective_config["api_url"]
if effective_config["model_platform"].lower() == "anthropic" and model_url:
stripped = model_url.rstrip("/")
if stripped.endswith("/v1"):
model_url = stripped[:-3]
model = ModelFactory.create(
model_platform=effective_config["model_platform"],
model_type=effective_config["model_type"],
api_key=effective_config["api_key"],
url=model_url,
url=effective_config["api_url"],
model_config_dict=model_config or None,
timeout=600, # 10 minutes
**init_params,

View file

@ -1,62 +0,0 @@
# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========
from camel.types import ModelType
# Maps platform name (lowercase) to model-name prefixes for filtering.
# An empty list means the platform accepts any model.
MODEL_PREFIXES: dict[str, list[str]] = {
"openai": ["gpt-", "o1", "o3", "o4", "chatgpt-"],
"anthropic": ["claude-"],
"gemini": ["gemini-"],
"deepseek": ["deepseek"],
"qwen": ["qwen"],
"minimax": ["minimax"],
"moonshot": ["moonshot", "kimi"],
"azure": ["gpt-", "o1", "o3", "o4"],
"openai-compatible-model": [],
"openrouter": [],
"bedrock": [],
"ollama": [],
"vllm": [],
"sglang": [],
"lmstudio": [],
"modelark": [],
"zai": [],
}
def get_model_type_suggestions(platform: str | None) -> list[str]:
"""Return CAMEL model names for the given platform, newest first.
Filters by platform prefix when known; returns all models otherwise.
Args:
platform (str | None): Platform name (e.g. 'openai', 'anthropic').
Case-insensitive. None or empty returns all models.
Returns:
list[str]: Model name strings, newest (by CAMEL enum order) first.
"""
platform_key = (platform or "").lower()
# Reversed enum order puts newer models (defined later in CAMEL) first
all_camel = list(reversed([mt.value for mt in ModelType]))
prefixes = MODEL_PREFIXES.get(platform_key)
if prefixes:
return [
m
for m in all_camel
if any(m.lower().startswith(p) for p in prefixes)
]
return all_camel

View file

@ -227,12 +227,6 @@ def create_agent(
raise ValueError(f"Invalid model_type: {model_type}")
if platform is None:
raise ValueError(f"Invalid model_platform: {model_platform}")
# Anthropic SDK adds /v1 to every endpoint path internally, so a user-
# provided base URL must NOT include /v1 (unlike OpenAI-compatible APIs).
if platform.lower() == "anthropic" and url:
stripped = url.rstrip("/")
if stripped.endswith("/v1"):
url = stripped[:-3]
model = ModelFactory.create(
model_platform=platform,
model_type=mtype,
@ -327,12 +321,6 @@ def validate_model_with_details(
# Stage 2: Model Creation
result.validation_stages[ValidationStage.MODEL_CREATION] = False
# Anthropic SDK adds /v1 to every endpoint path internally, so a user-
# provided base URL must NOT include /v1 (unlike OpenAI-compatible APIs).
if model_platform.lower() == "anthropic" and url:
stripped = url.rstrip("/")
if stripped.endswith("/v1"):
url = stripped[:-3]
try:
logger.debug(
"Creating model",

View file

@ -18,7 +18,6 @@ from fastapi import APIRouter, HTTPException
from pydantic import BaseModel, Field
from app.component.error_format import normalize_error_to_openai_format
from app.component.model_suggestions import get_model_type_suggestions
from app.component.model_validation import (
ValidationErrorType,
ValidationStage,
@ -290,28 +289,3 @@ async def validate_model(request: ValidateModelRequest):
},
},
)
class ModelTypeSuggestionRequest(BaseModel):
platform: str | None = Field(None, description="Model platform")
class ModelTypeSuggestionResponse(BaseModel):
model_types: list[str] = Field(
..., description="List of available model types"
)
@router.post("/model/types")
async def get_model_types(request: ModelTypeSuggestionRequest):
"""Return CAMEL model types for the given platform, newest first."""
try:
return ModelTypeSuggestionResponse(
model_types=get_model_type_suggestions(request.platform)
)
except Exception as e:
logger.error("Error getting model types: %s", e, exc_info=True)
raise HTTPException(
status_code=500,
detail={"message": f"Failed to get model types: {e}"},
)

View file

@ -341,10 +341,12 @@ async def step_solve(options: Chat, request: Request, task_lock: TaskLock):
extra={"project_id": options.project_id, "task_id": options.task_id},
)
logger.info("=" * 80)
logger.info(
"Step solve options: platform=%s type=%s",
options.model_platform,
options.model_type,
logger.debug(
"Step solve options",
extra={
"task_id": options.task_id,
"model_platform": options.model_platform,
},
)
while True:

View file

@ -1,178 +0,0 @@
# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========
from unittest.mock import MagicMock, patch
import pytest
from app.component.model_suggestions import (
MODEL_PREFIXES,
get_model_type_suggestions,
)
# Controlled model list (oldest → newest, matching CAMEL enum definition order)
_MOCK_MODELS = [
"gpt-3.5-turbo",
"gpt-4o",
"claude-3-opus",
"gemini-pro",
"deepseek-chat",
"qwen-turbo",
]
def _make_mock_enum(values: list[str]):
mocks = []
for v in values:
m = MagicMock()
m.value = v
mocks.append(m)
return mocks
@pytest.fixture(autouse=True)
def patch_model_type():
with patch(
"app.component.model_suggestions.ModelType",
new=_make_mock_enum(_MOCK_MODELS),
):
yield
def test_model_prefixes_contains_known_platforms():
assert "openai" in MODEL_PREFIXES
assert "anthropic" in MODEL_PREFIXES
assert "gemini" in MODEL_PREFIXES
assert "deepseek" in MODEL_PREFIXES
assert "qwen" in MODEL_PREFIXES
def test_model_prefixes_empty_list_for_open_platforms():
for platform in (
"openai-compatible-model",
"openrouter",
"ollama",
"vllm",
):
assert MODEL_PREFIXES[platform] == [], (
f"{platform} should have empty prefix list"
)
def test_returns_list():
assert isinstance(get_model_type_suggestions(None), list)
def test_all_entries_are_strings():
result = get_model_type_suggestions(None)
assert all(isinstance(m, str) for m in result)
def test_no_duplicates():
result = get_model_type_suggestions(None)
assert len(result) == len(set(result))
def test_none_platform_returns_all_models():
result = get_model_type_suggestions(None)
assert set(result) == set(_MOCK_MODELS)
def test_empty_string_platform_returns_all_models():
result = get_model_type_suggestions("")
assert set(result) == set(_MOCK_MODELS)
def test_unknown_platform_returns_all_models():
result = get_model_type_suggestions("totally-unknown-xyz")
assert set(result) == set(_MOCK_MODELS)
def test_newest_first_ordering():
result = get_model_type_suggestions(None)
assert result == list(reversed(_MOCK_MODELS))
def test_openai_platform_includes_gpt_models():
result = get_model_type_suggestions("openai")
assert "gpt-3.5-turbo" in result
assert "gpt-4o" in result
def test_openai_platform_excludes_other_models():
result = get_model_type_suggestions("openai")
assert "claude-3-opus" not in result
assert "gemini-pro" not in result
assert "deepseek-chat" not in result
def test_anthropic_platform_includes_claude_models():
result = get_model_type_suggestions("anthropic")
assert "claude-3-opus" in result
def test_anthropic_platform_excludes_other_models():
result = get_model_type_suggestions("anthropic")
assert "gpt-4o" not in result
assert "gemini-pro" not in result
def test_gemini_platform_includes_gemini_models():
result = get_model_type_suggestions("gemini")
assert "gemini-pro" in result
def test_gemini_platform_excludes_other_models():
result = get_model_type_suggestions("gemini")
assert "gpt-4o" not in result
def test_deepseek_platform_includes_deepseek_models():
result = get_model_type_suggestions("deepseek")
assert "deepseek-chat" in result
def test_qwen_platform_includes_qwen_models():
result = get_model_type_suggestions("qwen")
assert "qwen-turbo" in result
def test_platform_lookup_is_case_insensitive():
assert get_model_type_suggestions("OPENAI") == get_model_type_suggestions(
"openai"
)
assert get_model_type_suggestions(
"Anthropic"
) == get_model_type_suggestions("anthropic")
def test_openai_compatible_model_returns_all_models():
result = get_model_type_suggestions("openai-compatible-model")
assert set(result) == set(_MOCK_MODELS)
def test_ollama_returns_all_models():
result = get_model_type_suggestions("ollama")
assert set(result) == set(_MOCK_MODELS)
def test_vllm_returns_all_models():
result = get_model_type_suggestions("vllm")
assert set(result) == set(_MOCK_MODELS)
def test_filtered_result_is_subset_of_all():
all_models = set(get_model_type_suggestions(None))
openai_models = set(get_model_type_suggestions("openai"))
assert openai_models.issubset(all_models)

View file

@ -1,115 +0,0 @@
// ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========
import { fetchPost } from '@/api/http';
import { Combobox, ComboboxOption } from '@/components/ui/combobox';
import { useEffect, useRef, useState } from 'react';
import { useTranslation } from 'react-i18next';
// Module-level cache so suggestions persist across component instances
// and don't re-fetch when navigating between tabs
const suggestionsCache: Record<string, ComboboxOption[]> = {};
interface ModelTypeComboboxProps {
platform: string;
value: string;
onValueChange: (value: string) => void;
placeholder?: string;
disabled?: boolean;
error?: string;
title?: string;
}
export function ModelTypeCombobox({
platform,
value,
onValueChange,
placeholder,
disabled,
error,
title,
}: ModelTypeComboboxProps) {
const { t } = useTranslation();
const [options, setOptions] = useState<ComboboxOption[]>([]);
const [loading, setLoading] = useState(false);
// Track which cache key we last fetched to avoid redundant fetches
const fetchedKeyRef = useRef<string>('');
const cacheKey = platform;
useEffect(() => {
// If we already have cached results for this key, use them
if (suggestionsCache[cacheKey]) {
setOptions(suggestionsCache[cacheKey]);
return;
}
// If we already fetched (or are fetching) this key, skip
if (fetchedKeyRef.current === cacheKey) {
return;
}
// Don't fetch if platform is empty
if (!platform) {
return;
}
fetchedKeyRef.current = cacheKey;
setLoading(true);
fetchPost('/model/types', { platform })
.then((res) => {
if (res && res.model_types && Array.isArray(res.model_types)) {
const opts: ComboboxOption[] = res.model_types.map(
(type: string) => ({
value: type,
label: type,
})
);
suggestionsCache[cacheKey] = opts;
setOptions(opts);
}
})
.catch((err) => {
console.error('Failed to fetch model type suggestions:', err);
// Reset so the next render can retry
fetchedKeyRef.current = '';
setOptions([]);
})
.finally(() => {
setLoading(false);
});
}, [cacheKey]);
return (
<Combobox
options={options}
value={value}
onValueChange={onValueChange}
placeholder={placeholder}
searchPlaceholder={t('setting.search-model-types')}
emptyText={
loading
? t('setting.loading-suggestions')
: t('setting.no-model-types-found')
}
disabled={disabled}
loading={loading}
allowCustomValue={true}
title={title}
state={error ? 'error' : undefined}
note={error}
/>
);
}

View file

@ -1,212 +0,0 @@
// ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========
import { Check, ChevronDown, Loader2 } from 'lucide-react';
import * as React from 'react';
import {
Command,
CommandEmpty,
CommandGroup,
CommandInput,
CommandItem,
CommandList,
} from '@/components/ui/command';
import {
Popover,
PopoverContent,
PopoverTrigger,
} from '@/components/ui/popover';
import { cn } from '@/lib/utils';
export interface ComboboxOption {
value: string;
label: string;
}
export interface ComboboxProps {
options: ComboboxOption[];
value?: string;
onValueChange?: (value: string) => void;
placeholder?: string;
emptyText?: string;
searchPlaceholder?: string;
disabled?: boolean;
className?: string;
allowCustomValue?: boolean;
onOpenChange?: (open: boolean) => void;
loading?: boolean;
title?: string;
state?: 'default' | 'error' | 'success';
note?: string;
}
export function Combobox({
options,
value,
onValueChange,
placeholder = 'Select option...',
emptyText = 'No option found.',
searchPlaceholder = 'Search...',
disabled = false,
className,
allowCustomValue = false,
onOpenChange,
loading = false,
title,
state,
note,
}: ComboboxProps) {
const [open, setOpen] = React.useState(false);
const [searchValue, setSearchValue] = React.useState('');
const handleOpenChange = (newOpen: boolean) => {
setOpen(newOpen);
onOpenChange?.(newOpen);
if (!newOpen) {
// When closing, if allowCustomValue and user has typed something
// that doesn't match an option, commit it as a custom value
if (allowCustomValue && searchValue && searchValue !== value) {
const matchesOption = options.some(
(opt) => opt.value.toLowerCase() === searchValue.toLowerCase()
);
if (!matchesOption) {
onValueChange?.(searchValue);
}
}
// Reset search when closing
setSearchValue('');
}
};
const selectedOption = options.find((option) => option.value === value);
const handleSelect = (selectedValue: string) => {
onValueChange?.(selectedValue);
setSearchValue('');
setOpen(false);
};
const stateClasses =
state === 'error'
? 'border-input-border-caution bg-input-bg-default'
: state === 'success'
? 'border-input-border-success bg-input-bg-confirm'
: 'border-input-border-default bg-input-bg-default';
return (
<div className={cn('w-full', disabled && 'cursor-not-allowed opacity-50')}>
{title ? (
<div className="mb-1.5 text-body-sm font-bold text-text-heading">
{title}
</div>
) : null}
<Popover open={open} onOpenChange={handleOpenChange}>
<PopoverTrigger asChild>
<button
role="combobox"
aria-expanded={open}
className={cn(
// Match SelectTrigger styling
'relative flex w-full items-center justify-between gap-2 rounded-lg border border-solid px-3 text-text-body outline-none transition-colors',
'h-10 text-body-sm',
'whitespace-nowrap [&>span]:line-clamp-1',
// State-based colors
stateClasses,
// Interactive states (only when no error state)
state !== 'error' && [
'hover:border-input-border-hover hover:bg-input-bg-hover',
'focus-visible:ring-0 data-[state=open]:bg-input-bg-input',
'focus-within:border-input-border-focus',
],
className
)}
disabled={disabled}
type="button"
>
<span
className={cn(
'truncate text-left',
!selectedOption && !value && 'text-input-label-default/50'
)}
>
{selectedOption ? selectedOption.label : value || placeholder}
</span>
{loading ? (
<Loader2 className="h-4 w-4 shrink-0 animate-spin text-icon-primary" />
) : (
<ChevronDown className="h-4 w-4 shrink-0 text-icon-primary" />
)}
</button>
</PopoverTrigger>
<PopoverContent
className={cn(
// Match SelectContent styling
'w-[var(--radix-popover-trigger-width)] rounded-lg border border-solid border-input-border-default bg-input-bg-default p-0 shadow-md backdrop-blur-md',
'data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95',
'data-[side=bottom]:slide-in-from-top-2 data-[side=top]:slide-in-from-bottom-2'
)}
side="bottom"
align="start"
sideOffset={4}
>
<Command shouldFilter={true}>
<CommandInput
placeholder={searchPlaceholder}
value={searchValue}
onValueChange={setSearchValue}
/>
<CommandList>
<CommandEmpty>{emptyText}</CommandEmpty>
<CommandGroup>
{options.map((option) => (
<CommandItem
key={option.value}
value={option.value}
onSelect={() => handleSelect(option.value)}
className="rounded-lg py-1.5 pl-2 pr-8 text-sm hover:bg-menutabs-fill-hover"
>
<Check
className={cn(
'mr-2 h-4 w-4',
value === option.value ? 'opacity-100' : 'opacity-0'
)}
/>
{option.label}
</CommandItem>
))}
</CommandGroup>
</CommandList>
</Command>
</PopoverContent>
</Popover>
{note ? (
<div
className={cn(
'mt-1.5 !text-body-xs',
state === 'error'
? 'text-text-caution'
: state === 'success'
? 'text-text-success'
: 'text-text-label'
)}
>
{note}
</div>
) : null}
</div>
);
}

View file

@ -172,9 +172,6 @@
"api-key-setting": "إعداد مفتاح API",
"api-host-setting": "إعداد مضيف API",
"model-type-setting": "إعداد نوع النموذج",
"search-model-types": "البحث في أنواع النماذج...",
"no-model-types-found": "لم يتم العثور على أنواع النماذج.",
"loading-suggestions": "تحميل الاقتراحات...",
"please-select": "يرجى الاختيار",
"configuring": "جارٍ التكوين...",

View file

@ -234,9 +234,6 @@
"api-key-setting": "API-Schlüssel-Einstellung",
"api-host-setting": "API-Host-Einstellung",
"model-type-setting": "Modelltyp-Einstellung",
"search-model-types": "Modelltypen suchen...",
"no-model-types-found": "Keine Modelltypen gefunden.",
"loading-suggestions": "Vorschläge werden geladen...",
"please-select": "Bitte auswählen",
"configuring": "Wird konfiguriert...",

View file

@ -200,9 +200,6 @@
"api-key-setting": "API Key Setting",
"api-host-setting": "API Host Setting",
"model-type-setting": "Model Type Setting",
"search-model-types": "Search model types...",
"no-model-types-found": "No model types found.",
"loading-suggestions": "Loading suggestions...",
"please-select": "Please select",
"configuring": "Configuring...",

View file

@ -234,9 +234,6 @@
"api-key-setting": "Configuración de clave API",
"api-host-setting": "Configuración de host API",
"model-type-setting": "Configuración de tipo de modelo",
"search-model-types": "Buscar tipos de modelos...",
"no-model-types-found": "No se encontraron tipos de modelos.",
"loading-suggestions": "Cargando sugerencias...",
"please-select": "Por favor seleccione",
"configuring": "Configurando...",

View file

@ -210,9 +210,6 @@
"api-key-setting": "Paramètre de clé API",
"api-host-setting": "Paramètre d'hôte API",
"model-type-setting": "Paramètre de type de modèle",
"search-model-types": "Rechercher des types de modèles...",
"no-model-types-found": "Aucun type de modèle trouvé.",
"loading-suggestions": "Chargement des suggestions...",
"please-select": "Veuillez sélectionner",
"configuring": "Configuration...",

View file

@ -234,9 +234,6 @@
"api-key-setting": "Impostazione chiave API",
"api-host-setting": "Impostazione host API",
"model-type-setting": "Impostazione tipo di modello",
"search-model-types": "Cerca tipi di modello...",
"no-model-types-found": "Nessun tipo di modello trovato.",
"loading-suggestions": "Caricamento suggerimenti...",
"please-select": "Seleziona",
"configuring": "Configurazione in corso...",

View file

@ -235,9 +235,6 @@
"api-key-setting": "APIキー設定",
"api-host-setting": "APIホスト設定",
"model-type-setting": "モデルタイプ設定",
"search-model-types": "モデルタイプを検索...",
"no-model-types-found": "モデルタイプが見つかりません。",
"loading-suggestions": "候補を読み込み中...",
"please-select": "選択してください",
"configuring": "設定中...",

View file

@ -235,9 +235,6 @@
"api-key-setting": "API 키 설정",
"api-host-setting": "API 호스트 설정",
"model-type-setting": "모델 타입 설정",
"search-model-types": "모델 유형 검색...",
"no-model-types-found": "모델 유형을 찾을 수 없습니다.",
"loading-suggestions": "제안 로딩 중...",
"please-select": "선택하세요",
"configuring": "구성 중...",

View file

@ -234,9 +234,6 @@
"api-key-setting": "Настройка API-ключа",
"api-host-setting": "Настройка API-хоста",
"model-type-setting": "Настройка типа модели",
"search-model-types": "Поиск типов моделей...",
"no-model-types-found": "Типы моделей не найдены.",
"loading-suggestions": "Загрузка предложений...",
"please-select": "Пожалуйста, выберите",
"configuring": "Конфигурирование...",

View file

@ -191,9 +191,6 @@
"api-key-setting": "API 密钥设置",
"api-host-setting": "API Host 设置",
"model-type-setting": "模型类型设置",
"search-model-types": "搜索模型类型...",
"no-model-types-found": "未找到模型类型。",
"loading-suggestions": "正在加载建议...",
"please-select": "请选择",
"configuring": "正在配置...",

View file

@ -162,9 +162,6 @@
"api-key-setting": "API 金鑰設定",
"api-host-setting": "API 主機設定",
"model-type-setting": "模型類型設定",
"search-model-types": "搜尋模型類型...",
"no-model-types-found": "未找到模型類型。",
"loading-suggestions": "正在載入建議...",
"please-select": "請選擇",
"configuring": "正在配置...",

View file

@ -19,7 +19,6 @@ import {
proxyFetchPost,
proxyFetchPut,
} from '@/api/http';
import { ModelTypeCombobox } from '@/components/ModelTypeCombobox';
import { Button } from '@/components/ui/button';
import {
DropdownMenu,
@ -183,15 +182,6 @@ export default function SettingModels() {
{}
);
const [localTypes, setLocalTypes] = useState<Record<string, string>>({});
// Saved (persisted) model types — only updated after a successful Save, used for the display label
const [savedModelTypes, setSavedModelTypes] = useState<string[]>(() =>
INIT_PROVODERS.filter((p) => p.id !== 'local').map(
(p) => p.model_type ?? ''
)
);
const [savedLocalTypes, setSavedLocalTypes] = useState<
Record<string, string>
>({});
const [localProviderIds, setLocalProviderIds] = useState<
Record<string, number | undefined>
>({});
@ -278,14 +268,6 @@ export default function SettingModels() {
return fi;
})
);
setSavedModelTypes(
items.map((item) => {
const found = providerList.find(
(p: any) => p.provider_name === item.id
);
return found?.model_type ?? '';
})
);
// Handle local models - load all local providers per platform
const localProviders = providerList.filter((p: any) =>
LOCAL_PROVIDER_NAMES.includes(p.provider_name)
@ -314,7 +296,6 @@ export default function SettingModels() {
setLocalEndpoints(endpoints);
setLocalTypes(types);
setSavedLocalTypes(types);
setLocalProviderIds(providerIds);
// Fetch Ollama models if ollama endpoint is set
@ -376,7 +357,7 @@ export default function SettingModels() {
const preferredIdx = form.findIndex((f) => f.prefer);
if (preferredIdx !== -1) {
const item = items[preferredIdx];
const modelType = savedModelTypes[preferredIdx] || '';
const modelType = form[preferredIdx].model_type || '';
return `${t('setting.custom-model')} / ${item.name}${modelType ? ` (${modelType})` : ''}`;
}
@ -392,7 +373,7 @@ export default function SettingModels() {
: localPlatform === 'sglang'
? 'SGLang'
: 'LM Studio';
const modelType = savedLocalTypes[localPlatform] || '';
const modelType = localTypes[localPlatform] || '';
return `${t('setting.local-model')} / ${platformName}${modelType ? ` (${modelType})` : ''}`;
}
@ -633,14 +614,6 @@ export default function SettingModels() {
return fi;
})
);
setSavedModelTypes(
items.map((item) => {
const found = providerList.find(
(p: any) => p.provider_name === item.id
);
return found?.model_type ?? '';
})
);
// Check if this was a pending default model selection
if (
@ -845,10 +818,6 @@ export default function SettingModels() {
if (local) {
setLocalProviderIds((prev) => ({ ...prev, [localPlatform]: local.id }));
setLocalPrefer(local.prefer ?? false);
setSavedLocalTypes((prev) => ({
...prev,
[localPlatform]: currentType,
}));
// Check if this was a pending default model selection
if (
@ -1431,10 +1400,18 @@ export default function SettingModels() {
}}
/>
{/* Model Type Setting */}
<ModelTypeCombobox
platform={item.id}
<Input
id={`modelType-${item.id}`}
size="default"
title={t('setting.model-type-setting')}
state={errors[idx]?.model_type ? 'error' : 'default'}
note={errors[idx]?.model_type ?? undefined}
placeholder={`${t('setting.enter-your-model-type')} ${
item.name
} ${t('setting.model-type')}`}
value={form[idx].model_type}
onValueChange={(v) => {
onChange={(e) => {
const v = e.target.value;
setForm((f) =>
f.map((fi, i) => (i === idx ? { ...fi, model_type: v } : fi))
);
@ -1444,12 +1421,6 @@ export default function SettingModels() {
)
);
}}
placeholder={`${t('setting.enter-your-model-type')} ${
item.name
} ${t('setting.model-type')}`}
disabled={loading === idx}
error={errors[idx]?.model_type}
title={t('setting.model-type-setting')}
/>
{/* externalConfig render */}
{item.externalConfig &&
@ -1730,19 +1701,19 @@ export default function SettingModels() {
)}
</div>
) : (
<ModelTypeCombobox
platform={platform}
<Input
size="default"
title={t('setting.model-type')}
state={localInputError ? 'error' : 'default'}
placeholder={t('setting.enter-your-local-model-type')}
value={currentType}
onValueChange={(v) =>
onChange={(e) =>
setLocalTypes((prev) => ({
...prev,
[platform]: v,
[platform]: e.target.value,
}))
}
placeholder={t('setting.enter-your-local-model-type')}
disabled={!localEnabled}
error={localInputError ? localError || undefined : undefined}
title={t('setting.model-type')}
/>
)}
</div>

View file

@ -1,193 +0,0 @@
// ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========
import { render, screen } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
import { Combobox } from '../../../src/components/ui/combobox';
vi.mock('lucide-react', () => ({
Check: () => <span data-testid="check-icon" />,
ChevronDown: () => <span data-testid="chevron-down" />,
Loader2: () => <span data-testid="loader2" />,
}));
vi.mock('@/components/ui/popover', () => ({
Popover: ({ children }: any) => <div>{children}</div>,
PopoverTrigger: ({ children }: any) => <>{children}</>,
PopoverContent: ({ children }: any) => (
<div data-testid="popover-content">{children}</div>
),
}));
vi.mock('@/components/ui/command', () => ({
Command: ({ children }: any) => <div>{children}</div>,
CommandInput: ({ placeholder, value, onValueChange }: any) => (
<input
data-testid="command-input"
placeholder={placeholder}
value={value}
onChange={(e) => onValueChange(e.target.value)}
/>
),
CommandList: ({ children }: any) => <div>{children}</div>,
CommandEmpty: ({ children }: any) => (
<div data-testid="command-empty">{children}</div>
),
CommandGroup: ({ children }: any) => <div>{children}</div>,
CommandItem: ({ children, onSelect }: any) => (
<div data-testid="command-item" onClick={onSelect}>
{children}
</div>
),
}));
const OPTIONS = [
{ value: 'gpt-4o', label: 'GPT-4o' },
{ value: 'claude-3-opus', label: 'Claude 3 Opus' },
];
describe('Combobox', () => {
const onValueChange = vi.fn();
beforeEach(() => {
vi.clearAllMocks();
});
afterEach(() => {
vi.clearAllMocks();
});
it('shows placeholder when no value', () => {
render(
<Combobox
options={OPTIONS}
value=""
onValueChange={onValueChange}
placeholder="Pick a model"
/>
);
expect(screen.getByText('Pick a model')).toBeInTheDocument();
});
it('shows the raw value when it does not match any option', () => {
render(
<Combobox
options={OPTIONS}
value="custom-model"
onValueChange={onValueChange}
/>
);
expect(screen.getByText('custom-model')).toBeInTheDocument();
});
it('shows the option label when value matches', () => {
render(
<Combobox
options={OPTIONS}
value="gpt-4o"
onValueChange={onValueChange}
/>
);
// Label appears in both the trigger and the option list — check the trigger
const trigger = screen.getByRole('combobox');
expect(trigger).toHaveTextContent('GPT-4o');
});
it('shows spinner when loading=true', () => {
render(
<Combobox
options={OPTIONS}
value=""
onValueChange={onValueChange}
loading
/>
);
expect(screen.getByTestId('loader2')).toBeInTheDocument();
expect(screen.queryByTestId('chevron-down')).not.toBeInTheDocument();
});
it('shows chevron when loading=false', () => {
render(
<Combobox
options={OPTIONS}
value=""
onValueChange={onValueChange}
loading={false}
/>
);
expect(screen.getByTestId('chevron-down')).toBeInTheDocument();
expect(screen.queryByTestId('loader2')).not.toBeInTheDocument();
});
it('shows emptyText in CommandEmpty', () => {
render(
<Combobox
options={[]}
value=""
onValueChange={onValueChange}
emptyText="No results"
/>
);
expect(screen.getByTestId('command-empty')).toHaveTextContent('No results');
});
it('shows note text when provided', () => {
render(
<Combobox
options={OPTIONS}
value=""
onValueChange={onValueChange}
note="Invalid model"
/>
);
expect(screen.getByText('Invalid model')).toBeInTheDocument();
});
it('does not render note element when note is not provided', () => {
const { container } = render(
<Combobox options={OPTIONS} value="" onValueChange={onValueChange} />
);
expect(container.querySelector('[class*="mt-1"]')).not.toBeInTheDocument();
});
it('renders all options', () => {
render(
<Combobox options={OPTIONS} value="" onValueChange={onValueChange} />
);
const items = screen.getAllByTestId('command-item');
expect(items).toHaveLength(OPTIONS.length);
});
it('calls onValueChange when an option is selected', async () => {
render(
<Combobox options={OPTIONS} value="" onValueChange={onValueChange} />
);
const items = screen.getAllByTestId('command-item');
await userEvent.click(items[0]);
expect(onValueChange).toHaveBeenCalledWith('gpt-4o');
});
it('renders title when provided', () => {
render(
<Combobox
options={OPTIONS}
value=""
onValueChange={onValueChange}
title="Model Type"
/>
);
expect(screen.getByText('Model Type')).toBeInTheDocument();
});
});

View file

@ -1,187 +0,0 @@
// ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========
import { render, waitFor } from '@testing-library/react';
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
import { ModelTypeCombobox } from '../../../src/components/ModelTypeCombobox';
const mockFetchPost = vi.fn();
vi.mock('@/api/http', () => ({
fetchPost: (...args: any[]) => mockFetchPost(...args),
}));
vi.mock('react-i18next', () => ({
useTranslation: () => ({
t: (key: string) => key,
}),
}));
// Capture props passed to Combobox so we can assert on them
let lastComboboxProps: any = {};
vi.mock('@/components/ui/combobox', () => ({
Combobox: (props: any) => {
lastComboboxProps = props;
return (
<div data-testid="combobox">
<div data-testid="options-count">{props.options.length}</div>
<div data-testid="loading">{String(props.loading)}</div>
<div data-testid="value">{props.value}</div>
{props.note && <div data-testid="note">{props.note}</div>}
{props.state && <div data-testid="state">{props.state}</div>}
</div>
);
},
}));
const SUCCESS_RESPONSE = {
model_types: ['gpt-4o', 'gpt-4-turbo', 'gpt-3.5-turbo'],
};
describe('ModelTypeCombobox', () => {
const onValueChange = vi.fn();
beforeEach(() => {
vi.clearAllMocks();
lastComboboxProps = {};
// Reset module-level cache between tests by re-importing would require
// vi.resetModules() — instead we use different platforms per test to avoid
// cache collisions.
});
afterEach(() => {
vi.clearAllMocks();
});
it('calls fetchPost with the platform on mount', async () => {
mockFetchPost.mockResolvedValue(SUCCESS_RESPONSE);
render(
<ModelTypeCombobox
platform="openai"
value=""
onValueChange={onValueChange}
/>
);
await waitFor(() => {
expect(mockFetchPost).toHaveBeenCalledWith('/model/types', {
platform: 'openai',
});
});
});
it('populates options after successful fetch', async () => {
mockFetchPost.mockResolvedValue(SUCCESS_RESPONSE);
const { getByTestId } = render(
<ModelTypeCombobox
platform="anthropic"
value=""
onValueChange={onValueChange}
/>
);
await waitFor(() => {
expect(getByTestId('options-count')).toHaveTextContent('3');
});
});
it('shows loading=true while fetching', async () => {
let resolve: (v: any) => void;
mockFetchPost.mockReturnValue(new Promise((r) => (resolve = r)));
const { getByTestId } = render(
<ModelTypeCombobox
platform="gemini"
value=""
onValueChange={onValueChange}
/>
);
expect(getByTestId('loading')).toHaveTextContent('true');
resolve!(SUCCESS_RESPONSE);
await waitFor(() => {
expect(getByTestId('loading')).toHaveTextContent('false');
});
});
it('does not fetch when platform is empty', () => {
render(
<ModelTypeCombobox platform="" value="" onValueChange={onValueChange} />
);
expect(mockFetchPost).not.toHaveBeenCalled();
});
it('shows empty options and loading=false on fetch error', async () => {
mockFetchPost.mockRejectedValue(new Error('network error'));
const { getByTestId } = render(
<ModelTypeCombobox
platform="deepseek"
value=""
onValueChange={onValueChange}
/>
);
await waitFor(() => {
expect(getByTestId('loading')).toHaveTextContent('false');
expect(getByTestId('options-count')).toHaveTextContent('0');
});
});
it('passes error prop as note and state=error to Combobox', () => {
mockFetchPost.mockResolvedValue(SUCCESS_RESPONSE);
const { getByTestId } = render(
<ModelTypeCombobox
platform="qwen"
value=""
onValueChange={onValueChange}
error="Model type is required"
/>
);
expect(getByTestId('note')).toHaveTextContent('Model type is required');
expect(getByTestId('state')).toHaveTextContent('error');
});
it('passes value through to Combobox', () => {
mockFetchPost.mockResolvedValue(SUCCESS_RESPONSE);
const { getByTestId } = render(
<ModelTypeCombobox
platform="azure"
value="gpt-4o"
onValueChange={onValueChange}
/>
);
expect(getByTestId('value')).toHaveTextContent('gpt-4o');
});
it('sets allowCustomValue=true on Combobox', () => {
mockFetchPost.mockResolvedValue(SUCCESS_RESPONSE);
render(
<ModelTypeCombobox
platform="moonshot"
value=""
onValueChange={onValueChange}
/>
);
expect(lastComboboxProps.allowCustomValue).toBe(true);
});
});