"use client"; import React, { useState } from 'react'; import { motion } from 'framer-motion'; import { Button } from '@/components/ui/button'; import { Card, CardContent, CardDescription, CardHeader, CardTitle } from '@/components/ui/card'; import { Input } from '@/components/ui/input'; import { Label } from '@/components/ui/label'; import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from '@/components/ui/select'; import { Badge } from '@/components/ui/badge'; import { Plus, Trash2, Bot, AlertCircle } from 'lucide-react'; import { useLLMConfigs, CreateLLMConfig } from '@/hooks/use-llm-configs'; import { toast } from 'sonner'; import { Alert, AlertDescription } from '@/components/ui/alert'; const LLM_PROVIDERS = [ { value: 'OPENAI', label: 'OpenAI', example: 'gpt-4o, gpt-4, gpt-3.5-turbo' }, { value: 'ANTHROPIC', label: 'Anthropic', example: 'claude-3-5-sonnet-20241022, claude-3-opus-20240229' }, { value: 'GROQ', label: 'Groq', example: 'llama3-70b-8192, mixtral-8x7b-32768' }, { value: 'COHERE', label: 'Cohere', example: 'command-r-plus, command-r' }, { value: 'HUGGINGFACE', label: 'HuggingFace', example: 'microsoft/DialoGPT-medium' }, { value: 'AZURE_OPENAI', label: 'Azure OpenAI', example: 'gpt-4, gpt-35-turbo' }, { value: 'GOOGLE', label: 'Google', example: 'gemini-pro, gemini-pro-vision' }, { value: 'AWS_BEDROCK', label: 'AWS Bedrock', example: 'anthropic.claude-v2' }, { value: 'OLLAMA', label: 'Ollama', example: 'llama2, codellama' }, { value: 'MISTRAL', label: 'Mistral', example: 'mistral-large-latest, mistral-medium' }, { value: 'TOGETHER_AI', label: 'Together AI', example: 'togethercomputer/llama-2-70b-chat' }, { value: 'REPLICATE', label: 'Replicate', example: 'meta/llama-2-70b-chat' }, { value: 'CUSTOM', label: 'Custom Provider', example: 'your-custom-model' }, ]; export function AddProviderStep() { const { llmConfigs, createLLMConfig, deleteLLMConfig } = useLLMConfigs(); const [isAddingNew, setIsAddingNew] = useState(false); const [formData, setFormData] = useState({ name: '', provider: '', custom_provider: '', model_name: '', api_key: '', api_base: '', litellm_params: {} }); const [isSubmitting, setIsSubmitting] = useState(false); const handleInputChange = (field: keyof CreateLLMConfig, value: string) => { setFormData(prev => ({ ...prev, [field]: value })); }; const handleSubmit = async (e: React.FormEvent) => { e.preventDefault(); if (!formData.name || !formData.provider || !formData.model_name || !formData.api_key) { toast.error('Please fill in all required fields'); return; } setIsSubmitting(true); const result = await createLLMConfig(formData); setIsSubmitting(false); if (result) { setFormData({ name: '', provider: '', custom_provider: '', model_name: '', api_key: '', api_base: '', litellm_params: {} }); setIsAddingNew(false); } }; const selectedProvider = LLM_PROVIDERS.find(p => p.value === formData.provider); return (
{/* Info Alert */} Add at least one LLM provider to continue. You can configure multiple providers and choose specific roles for each one in the next step. {/* Existing Configurations */} {llmConfigs.length > 0 && (

Your LLM Configurations

{llmConfigs.map((config) => (

{config.name}

{config.provider}

Model: {config.model_name} {config.api_base && ` • Base: ${config.api_base}`}

))}
)} {/* Add New Provider */} {!isAddingNew ? (

Add LLM Provider

Configure your first model provider to get started

) : ( Add New LLM Provider Configure a new language model provider for your AI assistant
handleInputChange('name', e.target.value)} required />
{formData.provider === 'CUSTOM' && (
handleInputChange('custom_provider', e.target.value)} required />
)}
handleInputChange('model_name', e.target.value)} required /> {selectedProvider && (

Examples: {selectedProvider.example}

)}
handleInputChange('api_key', e.target.value)} required />
handleInputChange('api_base', e.target.value)} />
)}
); }