'use client' import { useForm } from 'react-hook-form' import { zodResolver } from '@hookform/resolvers/zod' import { z } from 'zod' import { toast } from 'sonner' import { Cog, Loader2, Zap, AlertCircle, RefreshCw, SlidersHorizontal, Info } from 'lucide-react' import { trpc } from '@/lib/trpc/client' import { Button } from '@/components/ui/button' import { Input } from '@/components/ui/input' import { Switch } from '@/components/ui/switch' import { Alert, AlertDescription } from '@/components/ui/alert' import { Skeleton } from '@/components/ui/skeleton' import { Form, FormControl, FormDescription, FormField, FormItem, FormLabel, FormMessage, } from '@/components/ui/form' import { Select, SelectContent, SelectGroup, SelectItem, SelectLabel, SelectTrigger, SelectValue, } from '@/components/ui/select' const formSchema = z.object({ ai_enabled: z.boolean(), ai_provider: z.string(), ai_model: z.string(), ai_send_descriptions: z.boolean(), openai_api_key: z.string().optional(), openai_base_url: z.string().optional(), }) type FormValues = z.infer interface AISettingsFormProps { settings: { ai_enabled?: string ai_provider?: string ai_model?: string ai_send_descriptions?: string openai_api_key?: string openai_base_url?: string } } export function AISettingsForm({ settings }: AISettingsFormProps) { const utils = trpc.useUtils() const form = useForm({ resolver: zodResolver(formSchema), defaultValues: { ai_enabled: settings.ai_enabled === 'true', ai_provider: settings.ai_provider || 'openai', ai_model: settings.ai_model || 'gpt-4o', ai_send_descriptions: settings.ai_send_descriptions === 'true', openai_api_key: '', openai_base_url: settings.openai_base_url || '', }, }) const watchProvider = form.watch('ai_provider') const isLiteLLM = watchProvider === 'litellm' // Fetch available models from OpenAI API (skip for LiteLLM — no models.list support) const { data: modelsData, isLoading: modelsLoading, error: modelsError, refetch: refetchModels, } = trpc.settings.listAIModels.useQuery(undefined, { staleTime: 5 * 60 * 1000, // Cache for 5 minutes retry: false, enabled: !isLiteLLM, }) const updateSettings = trpc.settings.updateMultiple.useMutation({ onSuccess: () => { toast.success('AI settings saved successfully') utils.settings.getByCategory.invalidate({ category: 'AI' }) }, onError: (error) => { toast.error(`Failed to save settings: ${error.message}`) }, }) const testConnection = trpc.settings.testAIConnection.useMutation({ onSuccess: (result) => { if (result.success) { toast.success(`AI connection successful! Model: ${result.model || result.modelTested}`) // Refetch models after successful API key save/test refetchModels() } else { toast.error(`Connection failed: ${result.error}`) } }, onError: (error) => { toast.error(`Test failed: ${error.message}`) }, }) const onSubmit = (data: FormValues) => { const settingsToUpdate = [ { key: 'ai_enabled', value: String(data.ai_enabled) }, { key: 'ai_provider', value: data.ai_provider }, { key: 'ai_model', value: data.ai_model }, { key: 'ai_send_descriptions', value: String(data.ai_send_descriptions) }, ] // Only update API key if a new value was entered if (data.openai_api_key && data.openai_api_key.trim()) { settingsToUpdate.push({ key: 'openai_api_key', value: data.openai_api_key }) } // Save base URL (empty string clears it) settingsToUpdate.push({ key: 'openai_base_url', value: data.openai_base_url?.trim() || '' }) updateSettings.mutate({ settings: settingsToUpdate }) } // Group models by category for better display type ModelInfo = { id: string; name: string; isReasoning: boolean; category: string } const groupedModels = modelsData?.models?.reduce>( (acc, model) => { const category = model.category if (!acc[category]) acc[category] = [] acc[category].push(model) return acc }, {} ) const categoryLabels: Record = { 'gpt-5+': 'GPT-5+ Series (Latest)', 'gpt-4o': 'GPT-4o Series', 'gpt-4': 'GPT-4 Series', 'gpt-3.5': 'GPT-3.5 Series', reasoning: 'Reasoning Models (o1, o3, o4)', other: 'Other Models', } const categoryOrder = ['gpt-5+', 'gpt-4o', 'gpt-4', 'gpt-3.5', 'reasoning', 'other'] return (
(
Enable AI Features Use AI to suggest optimal jury-project assignments
)} /> ( AI Provider {field.value === 'litellm' ? 'Route AI calls through a LiteLLM proxy connected to your ChatGPT Plus/Pro subscription' : 'Direct OpenAI API access using your API key'} )} /> {isLiteLLM && ( LiteLLM Proxy Mode — AI calls will be routed through your LiteLLM proxy using your ChatGPT subscription. Token limits are automatically stripped (not supported by ChatGPT backend). Make sure your LiteLLM proxy is running and accessible. )} ( {isLiteLLM ? 'API Key (Optional)' : 'API Key'} {isLiteLLM ? 'LiteLLM proxy usually does not require an API key. Leave blank to use default.' : 'Your OpenAI API key. Leave blank to keep the existing key.'} )} /> ( {isLiteLLM ? 'LiteLLM Proxy URL' : 'API Base URL (Optional)'} {isLiteLLM ? ( <> URL of your LiteLLM proxy. Typically{' '} http://localhost:4000{' '} or your server address. ) : ( <> Custom base URL for OpenAI-compatible providers. Leave blank for OpenAI. Use https://openrouter.ai/api/v1 for OpenRouter. )} )} /> (
Model {!isLiteLLM && modelsData?.success && !modelsData?.manualEntry && ( )}
{isLiteLLM || modelsData?.manualEntry ? ( field.onChange(e.target.value)} placeholder="chatgpt/gpt-5.2" /> ) : modelsLoading ? ( ) : modelsError || !modelsData?.success ? (
{modelsError?.message || modelsData?.error || 'Failed to load models. Save your API key first and test the connection.'} field.onChange(e.target.value)} placeholder="Enter model ID manually (e.g., gpt-4o)" />
) : ( )} {isLiteLLM ? ( <> Enter the model ID with the{' '} chatgpt/ prefix. Examples:{' '} chatgpt/gpt-5.2,{' '} chatgpt/gpt-5.2-codex ) : form.watch('ai_model')?.startsWith('o') ? ( Reasoning model - optimized for complex analysis tasks ) : ( 'OpenAI model to use for AI features' )}
)} /> (
Send Project Descriptions Include anonymized project descriptions in AI requests for better matching
)} />
) }