"use client"; import React, { useState, useEffect } from 'react'; import { motion, AnimatePresence } from 'framer-motion'; import { Button } from '@/components/ui/button'; import { Card, CardContent, CardDescription, CardHeader, CardTitle } from '@/components/ui/card'; import { Input } from '@/components/ui/input'; import { Label } from '@/components/ui/label'; import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from '@/components/ui/select'; import { Badge } from '@/components/ui/badge'; import { Dialog, DialogContent, DialogDescription, DialogHeader, DialogTitle } from '@/components/ui/dialog'; import { Plus, Trash2, Bot, AlertCircle, Edit3, Settings2, Eye, EyeOff, CheckCircle, Clock, AlertTriangle, RefreshCw, Loader2 } from 'lucide-react'; import { useLLMConfigs, CreateLLMConfig, UpdateLLMConfig, LLMConfig } from '@/hooks/use-llm-configs'; import { toast } from 'sonner'; import { Alert, AlertDescription } from '@/components/ui/alert'; const LLM_PROVIDERS = [ { value: 'OPENAI', label: 'OpenAI', example: 'gpt-4o, gpt-4, gpt-3.5-turbo', description: 'Most popular and versatile AI models' }, { value: 'ANTHROPIC', label: 'Anthropic', example: 'claude-3-5-sonnet-20241022, claude-3-opus-20240229', description: 'Constitutional AI with strong reasoning' }, { value: 'GROQ', label: 'Groq', example: 'llama3-70b-8192, mixtral-8x7b-32768', description: 'Ultra-fast inference speeds' }, { value: 'COHERE', label: 'Cohere', example: 'command-r-plus, command-r', description: 'Enterprise-focused language models' }, { value: 'HUGGINGFACE', label: 'HuggingFace', example: 'microsoft/DialoGPT-medium', description: 'Open source model hub' }, { value: 'AZURE_OPENAI', label: 'Azure OpenAI', example: 'gpt-4, gpt-35-turbo', description: 'Enterprise OpenAI through Azure' }, { value: 'GOOGLE', label: 'Google', example: 'gemini-pro, gemini-pro-vision', description: 'Google\'s Gemini AI models' }, { value: 'AWS_BEDROCK', label: 'AWS Bedrock', example: 'anthropic.claude-v2', description: 'AWS managed AI service' }, { value: 'OLLAMA', label: 'Ollama', example: 'llama2, codellama', description: 'Run models locally' }, { value: 'MISTRAL', label: 'Mistral', example: 'mistral-large-latest, mistral-medium', description: 'European AI excellence' }, { value: 'TOGETHER_AI', label: 'Together AI', example: 'togethercomputer/llama-2-70b-chat', description: 'Decentralized AI platform' }, { value: 'REPLICATE', label: 'Replicate', example: 'meta/llama-2-70b-chat', description: 'Run models via API' }, { value: 'CUSTOM', label: 'Custom Provider', example: 'your-custom-model', description: 'Your own model endpoint' }, ]; export function ModelConfigManager() { const { llmConfigs, loading, error, createLLMConfig, updateLLMConfig, deleteLLMConfig, refreshConfigs } = useLLMConfigs(); const [isAddingNew, setIsAddingNew] = useState(false); const [editingConfig, setEditingConfig] = useState(null); const [showApiKey, setShowApiKey] = useState>({}); const [formData, setFormData] = useState({ name: '', provider: '', custom_provider: '', model_name: '', api_key: '', api_base: '', litellm_params: {} }); const [isSubmitting, setIsSubmitting] = useState(false); // Populate form when editing useEffect(() => { if (editingConfig) { setFormData({ name: editingConfig.name, provider: editingConfig.provider, custom_provider: editingConfig.custom_provider || '', model_name: editingConfig.model_name, api_key: editingConfig.api_key, api_base: editingConfig.api_base || '', litellm_params: editingConfig.litellm_params || {} }); } }, [editingConfig]); const handleInputChange = (field: keyof CreateLLMConfig, value: string) => { setFormData(prev => ({ ...prev, [field]: value })); }; const handleSubmit = async (e: React.FormEvent) => { e.preventDefault(); if (!formData.name || !formData.provider || !formData.model_name || !formData.api_key) { toast.error('Please fill in all required fields'); return; } setIsSubmitting(true); let result; if (editingConfig) { // Update existing config result = await updateLLMConfig(editingConfig.id, formData); } else { // Create new config result = await createLLMConfig(formData); } setIsSubmitting(false); if (result) { setFormData({ name: '', provider: '', custom_provider: '', model_name: '', api_key: '', api_base: '', litellm_params: {} }); setIsAddingNew(false); setEditingConfig(null); } }; const handleDelete = async (id: number) => { if (confirm('Are you sure you want to delete this configuration? This action cannot be undone.')) { await deleteLLMConfig(id); } }; const toggleApiKeyVisibility = (configId: number) => { setShowApiKey(prev => ({ ...prev, [configId]: !prev[configId] })); }; const selectedProvider = LLM_PROVIDERS.find(p => p.value === formData.provider); const getProviderInfo = (providerValue: string) => { return LLM_PROVIDERS.find(p => p.value === providerValue); }; const maskApiKey = (apiKey: string) => { if (apiKey.length <= 8) return '*'.repeat(apiKey.length); return apiKey.substring(0, 4) + '*'.repeat(apiKey.length - 8) + apiKey.substring(apiKey.length - 4); }; return (
{/* Header */}

Model Configurations

Manage your LLM provider configurations and API settings.

{/* Error Alert */} {error && ( {error} )} {/* Loading State */} {loading && (
Loading configurations...
)} {/* Stats Overview */} {!loading && !error && (

{llmConfigs.length}

Total Configurations

{new Set(llmConfigs.map(c => c.provider)).size}

Unique Providers

Active

System Status

)} {/* Configuration Management */} {!loading && !error && (

Your Configurations

Manage and configure your LLM providers

{llmConfigs.length === 0 ? (

No Configurations Yet

Get started by adding your first LLM provider configuration to begin using the system.

) : (
{llmConfigs.map((config) => { const providerInfo = getProviderInfo(config.provider); return (
{/* Header */}

{config.name}

{config.provider}

{config.model_name}

{/* Provider Description */} {providerInfo && (

{providerInfo.description}

)} {/* Configuration Details */}
{showApiKey[config.id] ? config.api_key : maskApiKey(config.api_key) }
{config.api_base && (
{config.api_base}
)}
{/* Metadata */}
Created {new Date(config.created_at).toLocaleDateString()}
Active
{/* Actions */}
); })}
)}
)} {/* Add/Edit Configuration Dialog */} { if (!open) { setIsAddingNew(false); setEditingConfig(null); setFormData({ name: '', provider: '', custom_provider: '', model_name: '', api_key: '', api_base: '', litellm_params: {} }); } }}> {editingConfig ? : } {editingConfig ? 'Edit LLM Configuration' : 'Add New LLM Configuration'} {editingConfig ? 'Update your language model provider configuration' : 'Configure a new language model provider for your AI assistant' }
handleInputChange('name', e.target.value)} required />
{formData.provider === 'CUSTOM' && (
handleInputChange('custom_provider', e.target.value)} required />
)}
handleInputChange('model_name', e.target.value)} required /> {selectedProvider && (

Examples: {selectedProvider.example}

)}
handleInputChange('api_key', e.target.value)} required />
handleInputChange('api_base', e.target.value)} />
); }