'use client' import { useEffect, useRef } from 'react' import { useForm } from 'react-hook-form' import { zodResolver } from '@hookform/resolvers/zod' import { z } from 'zod' import { toast } from 'sonner' import { Cog, Loader2, Zap, AlertCircle, RefreshCw, SlidersHorizontal, Info } from 'lucide-react' import { trpc } from '@/lib/trpc/client' import { Button } from '@/components/ui/button' import { Input } from '@/components/ui/input' import { Switch } from '@/components/ui/switch' import { Alert, AlertDescription } from '@/components/ui/alert' import { Skeleton } from '@/components/ui/skeleton' import { Form, FormControl, FormDescription, FormField, FormItem, FormLabel, FormMessage, } from '@/components/ui/form' import { Select, SelectContent, SelectGroup, SelectItem, SelectLabel, SelectTrigger, SelectValue, } from '@/components/ui/select' const formSchema = z.object({ ai_enabled: z.boolean(), ai_provider: z.string(), ai_model: z.string(), ai_send_descriptions: z.boolean(), openai_api_key: z.string().optional(), anthropic_api_key: z.string().optional(), openai_base_url: z.string().optional(), }) type FormValues = z.infer interface AISettingsFormProps { settings: { ai_enabled?: string ai_provider?: string ai_model?: string ai_send_descriptions?: string openai_api_key?: string anthropic_api_key?: string openai_base_url?: string } } export function AISettingsForm({ settings }: AISettingsFormProps) { const utils = trpc.useUtils() const form = useForm({ resolver: zodResolver(formSchema), defaultValues: { ai_enabled: settings.ai_enabled === 'true', ai_provider: settings.ai_provider || 'openai', ai_model: settings.ai_model || 'gpt-4o', ai_send_descriptions: settings.ai_send_descriptions === 'true', openai_api_key: '', anthropic_api_key: '', openai_base_url: settings.openai_base_url || '', }, }) const watchProvider = form.watch('ai_provider') const isLiteLLM = watchProvider === 'litellm' const isAnthropic = watchProvider === 'anthropic' const prevProviderRef = useRef(settings.ai_provider || 'openai') // Auto-reset model when provider changes useEffect(() => { if (watchProvider !== prevProviderRef.current) { prevProviderRef.current = watchProvider if (watchProvider === 'anthropic') { form.setValue('ai_model', 'claude-sonnet-4-5-20250514') } else if (watchProvider === 'openai') { form.setValue('ai_model', 'gpt-4o') } else if (watchProvider === 'litellm') { form.setValue('ai_model', '') } } }, [watchProvider, form]) // Fetch available models from OpenAI API (skip for LiteLLM — no models.list support) const { data: modelsData, isLoading: modelsLoading, error: modelsError, refetch: refetchModels, } = trpc.settings.listAIModels.useQuery(undefined, { staleTime: 5 * 60 * 1000, // Cache for 5 minutes retry: false, enabled: !isLiteLLM, }) const updateSettings = trpc.settings.updateMultiple.useMutation({ onSuccess: () => { toast.success('AI settings saved successfully') utils.settings.getByCategory.invalidate({ category: 'AI' }) }, onError: (error) => { toast.error(`Failed to save settings: ${error.message}`) }, }) const testConnection = trpc.settings.testAIConnection.useMutation({ onSuccess: (result) => { if (result.success) { toast.success(`AI connection successful! Model: ${result.model || result.modelTested}`) // Refetch models after successful API key save/test refetchModels() } else { toast.error(`Connection failed: ${result.error}`) } }, onError: (error) => { toast.error(`Test failed: ${error.message}`) }, }) const onSubmit = (data: FormValues) => { const settingsToUpdate = [ { key: 'ai_enabled', value: String(data.ai_enabled) }, { key: 'ai_provider', value: data.ai_provider }, { key: 'ai_model', value: data.ai_model }, { key: 'ai_send_descriptions', value: String(data.ai_send_descriptions) }, ] // Only update API key if a new value was entered if (data.openai_api_key && data.openai_api_key.trim()) { settingsToUpdate.push({ key: 'openai_api_key', value: data.openai_api_key }) } if (data.anthropic_api_key && data.anthropic_api_key.trim()) { settingsToUpdate.push({ key: 'anthropic_api_key', value: data.anthropic_api_key }) } // Save base URL (empty string clears it) settingsToUpdate.push({ key: 'openai_base_url', value: data.openai_base_url?.trim() || '' }) updateSettings.mutate({ settings: settingsToUpdate }) } // Group models by category for better display type ModelInfo = { id: string; name: string; isReasoning: boolean; category: string } const groupedModels = modelsData?.models?.reduce>( (acc, model) => { const category = model.category if (!acc[category]) acc[category] = [] acc[category].push(model) return acc }, {} ) const categoryLabels: Record = { 'claude-4.5': 'Claude 4.5 Series (Latest)', 'claude-4': 'Claude 4 Series', 'claude-3.5': 'Claude 3.5 Series', 'gpt-5+': 'GPT-5+ Series (Latest)', 'gpt-4o': 'GPT-4o Series', 'gpt-4': 'GPT-4 Series', 'gpt-3.5': 'GPT-3.5 Series', reasoning: 'Reasoning Models (o1, o3, o4)', other: 'Other Models', } const categoryOrder = ['claude-4.5', 'claude-4', 'claude-3.5', 'gpt-5+', 'gpt-4o', 'gpt-4', 'gpt-3.5', 'reasoning', 'other'] return (
(
Enable AI Features Use AI to suggest optimal jury-project assignments
)} /> ( AI Provider {field.value === 'litellm' ? 'Route AI calls through a LiteLLM proxy connected to your ChatGPT Plus/Pro subscription' : field.value === 'anthropic' ? 'Direct Anthropic API access using Claude models' : 'Direct OpenAI API access using your API key'} )} /> {isLiteLLM && ( LiteLLM Proxy Mode — AI calls will be routed through your LiteLLM proxy using your ChatGPT subscription. Token limits are automatically stripped (not supported by ChatGPT backend). Make sure your LiteLLM proxy is running and accessible. )} {isAnthropic && ( Anthropic Claude Mode — AI calls use the Anthropic Messages API. Claude Opus models include extended thinking for deeper analysis. JSON responses are validated with automatic retry. )} {isAnthropic ? ( ( Anthropic API Key Your Anthropic API key. Leave blank to keep the existing key. )} /> ) : ( ( {isLiteLLM ? 'API Key (Optional)' : 'OpenAI API Key'} {isLiteLLM ? 'LiteLLM proxy usually does not require an API key. Leave blank to use default.' : 'Your OpenAI API key. Leave blank to keep the existing key.'} )} /> )} ( {isLiteLLM ? 'LiteLLM Proxy URL' : isAnthropic ? 'Anthropic Base URL (Optional)' : 'API Base URL (Optional)'} {isLiteLLM ? ( <> URL of your LiteLLM proxy. Typically{' '} http://localhost:4000{' '} or your server address. ) : isAnthropic ? ( <> Custom base URL for Anthropic API proxy or gateway. Leave blank for default Anthropic API. ) : ( <> Custom base URL for OpenAI-compatible providers. Leave blank for OpenAI. Use https://openrouter.ai/api/v1 for OpenRouter. )} )} /> (
Model {!isLiteLLM && modelsData?.success && !modelsData?.manualEntry && ( )}
{isAnthropic ? ( // Anthropic: fetch models from server (hardcoded list) modelsLoading ? ( ) : modelsData?.success && modelsData.models && modelsData.models.length > 0 ? ( ) : ( field.onChange(e.target.value)} placeholder="claude-sonnet-4-5-20250514" /> ) ) : isLiteLLM || modelsData?.manualEntry ? ( field.onChange(e.target.value)} placeholder="chatgpt/gpt-5.2" /> ) : modelsLoading ? ( ) : modelsError || !modelsData?.success ? (
{modelsError?.message || modelsData?.error || 'Failed to load models. Save your API key first and test the connection.'} field.onChange(e.target.value)} placeholder="Enter model ID manually (e.g., gpt-4o)" />
) : ( )} {isAnthropic ? ( form.watch('ai_model')?.includes('opus') ? ( Opus model — includes extended thinking for deeper analysis ) : ( 'Anthropic Claude model to use for AI features' ) ) : isLiteLLM ? ( <> Enter the model ID with the{' '} chatgpt/ prefix. Examples:{' '} chatgpt/gpt-5.2,{' '} chatgpt/gpt-5.2-codex ) : form.watch('ai_model')?.startsWith('o') ? ( Reasoning model - optimized for complex analysis tasks ) : ( 'OpenAI model to use for AI features' )}
)} /> (
Send Project Descriptions Include anonymized project descriptions in AI requests for better matching
)} />
) }