Files
MOPC-Portal/src/components/settings/ai-settings-form.tsx

427 lines
15 KiB
TypeScript
Raw Normal View History

'use client'
import { useForm } from 'react-hook-form'
import { zodResolver } from '@hookform/resolvers/zod'
import { z } from 'zod'
import { toast } from 'sonner'
import { Cog, Loader2, Zap, AlertCircle, RefreshCw, SlidersHorizontal, Info } from 'lucide-react'
import { trpc } from '@/lib/trpc/client'
import { Button } from '@/components/ui/button'
import { Input } from '@/components/ui/input'
import { Switch } from '@/components/ui/switch'
import { Alert, AlertDescription } from '@/components/ui/alert'
import { Skeleton } from '@/components/ui/skeleton'
import {
Form,
FormControl,
FormDescription,
FormField,
FormItem,
FormLabel,
FormMessage,
} from '@/components/ui/form'
import {
Select,
SelectContent,
SelectGroup,
SelectItem,
SelectLabel,
SelectTrigger,
SelectValue,
} from '@/components/ui/select'
const formSchema = z.object({
ai_enabled: z.boolean(),
ai_provider: z.string(),
ai_model: z.string(),
ai_send_descriptions: z.boolean(),
openai_api_key: z.string().optional(),
openai_base_url: z.string().optional(),
})
type FormValues = z.infer<typeof formSchema>
interface AISettingsFormProps {
settings: {
ai_enabled?: string
ai_provider?: string
ai_model?: string
ai_send_descriptions?: string
openai_api_key?: string
openai_base_url?: string
}
}
export function AISettingsForm({ settings }: AISettingsFormProps) {
const utils = trpc.useUtils()
const form = useForm<FormValues>({
resolver: zodResolver(formSchema),
defaultValues: {
ai_enabled: settings.ai_enabled === 'true',
ai_provider: settings.ai_provider || 'openai',
ai_model: settings.ai_model || 'gpt-4o',
ai_send_descriptions: settings.ai_send_descriptions === 'true',
openai_api_key: '',
openai_base_url: settings.openai_base_url || '',
},
})
const watchProvider = form.watch('ai_provider')
const isLiteLLM = watchProvider === 'litellm'
// Fetch available models from OpenAI API (skip for LiteLLM — no models.list support)
const {
data: modelsData,
isLoading: modelsLoading,
error: modelsError,
refetch: refetchModels,
} = trpc.settings.listAIModels.useQuery(undefined, {
staleTime: 5 * 60 * 1000, // Cache for 5 minutes
retry: false,
enabled: !isLiteLLM,
})
const updateSettings = trpc.settings.updateMultiple.useMutation({
onSuccess: () => {
toast.success('AI settings saved successfully')
utils.settings.getByCategory.invalidate({ category: 'AI' })
},
onError: (error) => {
toast.error(`Failed to save settings: ${error.message}`)
},
})
const testConnection = trpc.settings.testAIConnection.useMutation({
onSuccess: (result) => {
if (result.success) {
toast.success(`AI connection successful! Model: ${result.model || result.modelTested}`)
// Refetch models after successful API key save/test
refetchModels()
} else {
toast.error(`Connection failed: ${result.error}`)
}
},
onError: (error) => {
toast.error(`Test failed: ${error.message}`)
},
})
const onSubmit = (data: FormValues) => {
const settingsToUpdate = [
{ key: 'ai_enabled', value: String(data.ai_enabled) },
{ key: 'ai_provider', value: data.ai_provider },
{ key: 'ai_model', value: data.ai_model },
{ key: 'ai_send_descriptions', value: String(data.ai_send_descriptions) },
]
// Only update API key if a new value was entered
if (data.openai_api_key && data.openai_api_key.trim()) {
settingsToUpdate.push({ key: 'openai_api_key', value: data.openai_api_key })
}
// Save base URL (empty string clears it)
settingsToUpdate.push({ key: 'openai_base_url', value: data.openai_base_url?.trim() || '' })
updateSettings.mutate({ settings: settingsToUpdate })
}
// Group models by category for better display
type ModelInfo = { id: string; name: string; isReasoning: boolean; category: string }
const groupedModels = modelsData?.models?.reduce<Record<string, ModelInfo[]>>(
(acc, model) => {
const category = model.category
if (!acc[category]) acc[category] = []
acc[category].push(model)
return acc
},
{}
)
const categoryLabels: Record<string, string> = {
'gpt-5+': 'GPT-5+ Series (Latest)',
'gpt-4o': 'GPT-4o Series',
'gpt-4': 'GPT-4 Series',
'gpt-3.5': 'GPT-3.5 Series',
reasoning: 'Reasoning Models (o1, o3, o4)',
other: 'Other Models',
}
const categoryOrder = ['gpt-5+', 'gpt-4o', 'gpt-4', 'gpt-3.5', 'reasoning', 'other']
return (
<Form {...form}>
<form onSubmit={form.handleSubmit(onSubmit)} className="space-y-6">
<FormField
control={form.control}
name="ai_enabled"
render={({ field }) => (
<FormItem className="flex items-center justify-between rounded-lg border p-4">
<div className="space-y-0.5">
<FormLabel className="text-base">Enable AI Features</FormLabel>
<FormDescription>
Use AI to suggest optimal jury-project assignments
</FormDescription>
</div>
<FormControl>
<Switch
checked={field.value}
onCheckedChange={field.onChange}
/>
</FormControl>
</FormItem>
)}
/>
<FormField
control={form.control}
name="ai_provider"
render={({ field }) => (
<FormItem>
<FormLabel>AI Provider</FormLabel>
<Select onValueChange={field.onChange} defaultValue={field.value}>
<FormControl>
<SelectTrigger>
<SelectValue placeholder="Select provider" />
</SelectTrigger>
</FormControl>
<SelectContent>
<SelectItem value="openai">OpenAI (API Key)</SelectItem>
<SelectItem value="litellm">LiteLLM Proxy (ChatGPT Subscription)</SelectItem>
</SelectContent>
</Select>
<FormDescription>
{field.value === 'litellm'
? 'Route AI calls through a LiteLLM proxy connected to your ChatGPT Plus/Pro subscription'
: 'Direct OpenAI API access using your API key'}
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
{isLiteLLM && (
<Alert>
<Info className="h-4 w-4" />
<AlertDescription>
<strong>LiteLLM Proxy Mode</strong> AI calls will be routed through your LiteLLM proxy
using your ChatGPT subscription. Token limits are automatically stripped (not supported by ChatGPT backend).
Make sure your LiteLLM proxy is running and accessible.
</AlertDescription>
</Alert>
)}
<FormField
control={form.control}
name="openai_api_key"
render={({ field }) => (
<FormItem>
<FormLabel>{isLiteLLM ? 'API Key (Optional)' : 'API Key'}</FormLabel>
<FormControl>
<Input
type="password"
placeholder={isLiteLLM
? 'Optional — leave blank for default'
: (settings.openai_api_key ? '••••••••' : 'Enter API key')}
{...field}
/>
</FormControl>
<FormDescription>
{isLiteLLM
? 'LiteLLM proxy usually does not require an API key. Leave blank to use default.'
: 'Your OpenAI API key. Leave blank to keep the existing key.'}
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="openai_base_url"
render={({ field }) => (
<FormItem>
<FormLabel>{isLiteLLM ? 'LiteLLM Proxy URL' : 'API Base URL (Optional)'}</FormLabel>
<FormControl>
<Input
placeholder={isLiteLLM ? 'http://localhost:4000' : 'https://api.openai.com/v1'}
{...field}
/>
</FormControl>
<FormDescription>
{isLiteLLM ? (
<>
URL of your LiteLLM proxy. Typically{' '}
<code className="text-xs bg-muted px-1 rounded">http://localhost:4000</code>{' '}
or your server address.
</>
) : (
<>
Custom base URL for OpenAI-compatible providers. Leave blank for OpenAI.
Use <code className="text-xs bg-muted px-1 rounded">https://openrouter.ai/api/v1</code> for OpenRouter.
</>
)}
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="ai_model"
render={({ field }) => (
<FormItem>
<div className="flex items-center justify-between">
<FormLabel>Model</FormLabel>
{!isLiteLLM && modelsData?.success && !modelsData?.manualEntry && (
<Button
type="button"
variant="ghost"
size="sm"
onClick={() => refetchModels()}
className="h-6 px-2 text-xs"
>
<RefreshCw className="mr-1 h-3 w-3" />
Refresh
</Button>
)}
</div>
{isLiteLLM || modelsData?.manualEntry ? (
<Input
value={field.value}
onChange={(e) => field.onChange(e.target.value)}
placeholder="chatgpt/gpt-5.2"
/>
) : modelsLoading ? (
<Skeleton className="h-10 w-full" />
) : modelsError || !modelsData?.success ? (
<div className="space-y-2">
<Alert variant="destructive">
<AlertCircle className="h-4 w-4" />
<AlertDescription>
{modelsError?.message || modelsData?.error || 'Failed to load models. Save your API key first and test the connection.'}
</AlertDescription>
</Alert>
<Input
value={field.value}
onChange={(e) => field.onChange(e.target.value)}
placeholder="Enter model ID manually (e.g., gpt-4o)"
/>
</div>
) : (
<Select onValueChange={field.onChange} value={field.value}>
<FormControl>
<SelectTrigger>
<SelectValue placeholder="Select model" />
</SelectTrigger>
</FormControl>
<SelectContent>
{categoryOrder
.filter((cat) => groupedModels?.[cat]?.length)
.map((category) => (
<SelectGroup key={category}>
<SelectLabel className="text-xs font-semibold text-muted-foreground">
{categoryLabels[category] || category}
</SelectLabel>
{groupedModels?.[category]?.map((model) => (
<SelectItem key={model.id} value={model.id}>
<div className="flex items-center gap-2">
{model.isReasoning && (
<SlidersHorizontal className="h-3 w-3 text-purple-500" />
)}
<span>{model.name}</span>
</div>
</SelectItem>
))}
</SelectGroup>
))}
</SelectContent>
</Select>
)}
<FormDescription>
{isLiteLLM ? (
<>
Enter the model ID with the{' '}
<code className="text-xs bg-muted px-1 rounded">chatgpt/</code> prefix.
Examples:{' '}
<code className="text-xs bg-muted px-1 rounded">chatgpt/gpt-5.2</code>,{' '}
<code className="text-xs bg-muted px-1 rounded">chatgpt/gpt-5.2-codex</code>
</>
) : form.watch('ai_model')?.startsWith('o') ? (
<span className="flex items-center gap-1 text-purple-600">
<SlidersHorizontal className="h-3 w-3" />
Reasoning model - optimized for complex analysis tasks
</span>
) : (
'OpenAI model to use for AI features'
)}
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="ai_send_descriptions"
render={({ field }) => (
<FormItem className="flex items-center justify-between rounded-lg border p-4">
<div className="space-y-0.5">
<FormLabel className="text-base">Send Project Descriptions</FormLabel>
<FormDescription>
Include anonymized project descriptions in AI requests for better matching
</FormDescription>
</div>
<FormControl>
<Switch
checked={field.value}
onCheckedChange={field.onChange}
/>
</FormControl>
</FormItem>
)}
/>
<div className="flex gap-2">
<Button
type="submit"
disabled={updateSettings.isPending}
>
{updateSettings.isPending ? (
<>
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
Saving...
</>
) : (
<>
<Cog className="mr-2 h-4 w-4" />
Save AI Settings
</>
)}
</Button>
<Button
type="button"
variant="outline"
onClick={() => testConnection.mutate()}
disabled={testConnection.isPending}
>
{testConnection.isPending ? (
<>
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
Testing...
</>
) : (
<>
<Zap className="mr-2 h-4 w-4" />
Test Connection
</>
)}
</Button>
</div>
</form>
</Form>
)
}