Add LiteLLM proxy support for ChatGPT subscription AI access
All checks were successful
Build and Push Docker Image / build (push) Successful in 8m22s

- Add ai_provider setting: 'openai' (API key) or 'litellm' (ChatGPT subscription proxy)
- Auto-strip max_tokens/max_completion_tokens for chatgpt/ prefix models
  (ChatGPT subscription backend rejects token limit fields)
- LiteLLM mode: dummy API key when none configured, base URL required
- isOpenAIConfigured() checks base URL instead of API key for LiteLLM
- listAvailableModels() returns manualEntry flag for LiteLLM (no models.list)
- Settings UI: conditional fields, info banner, manual model input with
  chatgpt/ prefix examples when LiteLLM selected
- All 7 AI services work transparently via buildCompletionParams()

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Matt
2026-02-16 15:48:34 +01:00
parent 014bb15890
commit b2279067e2
3 changed files with 129 additions and 19 deletions

View File

@@ -4,7 +4,7 @@ import { useForm } from 'react-hook-form'
import { zodResolver } from '@hookform/resolvers/zod'
import { z } from 'zod'
import { toast } from 'sonner'
import { Cog, Loader2, Zap, AlertCircle, RefreshCw, SlidersHorizontal } from 'lucide-react'
import { Cog, Loader2, Zap, AlertCircle, RefreshCw, SlidersHorizontal, Info } from 'lucide-react'
import { trpc } from '@/lib/trpc/client'
import { Button } from '@/components/ui/button'
import { Input } from '@/components/ui/input'
@@ -67,7 +67,10 @@ export function AISettingsForm({ settings }: AISettingsFormProps) {
},
})
// Fetch available models from OpenAI API
const watchProvider = form.watch('ai_provider')
const isLiteLLM = watchProvider === 'litellm'
// Fetch available models from OpenAI API (skip for LiteLLM — no models.list support)
const {
data: modelsData,
isLoading: modelsLoading,
@@ -76,6 +79,7 @@ export function AISettingsForm({ settings }: AISettingsFormProps) {
} = trpc.settings.listAIModels.useQuery(undefined, {
staleTime: 5 * 60 * 1000, // Cache for 5 minutes
retry: false,
enabled: !isLiteLLM,
})
const updateSettings = trpc.settings.updateMultiple.useMutation({
@@ -182,32 +186,50 @@ export function AISettingsForm({ settings }: AISettingsFormProps) {
</SelectTrigger>
</FormControl>
<SelectContent>
<SelectItem value="openai">OpenAI</SelectItem>
<SelectItem value="openai">OpenAI (API Key)</SelectItem>
<SelectItem value="litellm">LiteLLM Proxy (ChatGPT Subscription)</SelectItem>
</SelectContent>
</Select>
<FormDescription>
AI provider for smart assignment suggestions
{field.value === 'litellm'
? 'Route AI calls through a LiteLLM proxy connected to your ChatGPT Plus/Pro subscription'
: 'Direct OpenAI API access using your API key'}
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
{isLiteLLM && (
<Alert>
<Info className="h-4 w-4" />
<AlertDescription>
<strong>LiteLLM Proxy Mode</strong> AI calls will be routed through your LiteLLM proxy
using your ChatGPT subscription. Token limits are automatically stripped (not supported by ChatGPT backend).
Make sure your LiteLLM proxy is running and accessible.
</AlertDescription>
</Alert>
)}
<FormField
control={form.control}
name="openai_api_key"
render={({ field }) => (
<FormItem>
<FormLabel>API Key</FormLabel>
<FormLabel>{isLiteLLM ? 'API Key (Optional)' : 'API Key'}</FormLabel>
<FormControl>
<Input
type="password"
placeholder={settings.openai_api_key ? '••••••••' : 'Enter API key'}
placeholder={isLiteLLM
? 'Optional — leave blank for default'
: (settings.openai_api_key ? '••••••••' : 'Enter API key')}
{...field}
/>
</FormControl>
<FormDescription>
Your OpenAI API key. Leave blank to keep the existing key.
{isLiteLLM
? 'LiteLLM proxy usually does not require an API key. Leave blank to use default.'
: 'Your OpenAI API key. Leave blank to keep the existing key.'}
</FormDescription>
<FormMessage />
</FormItem>
@@ -219,16 +241,26 @@ export function AISettingsForm({ settings }: AISettingsFormProps) {
name="openai_base_url"
render={({ field }) => (
<FormItem>
<FormLabel>API Base URL (Optional)</FormLabel>
<FormLabel>{isLiteLLM ? 'LiteLLM Proxy URL' : 'API Base URL (Optional)'}</FormLabel>
<FormControl>
<Input
placeholder="https://api.openai.com/v1"
placeholder={isLiteLLM ? 'http://localhost:4000' : 'https://api.openai.com/v1'}
{...field}
/>
</FormControl>
<FormDescription>
Custom base URL for OpenAI-compatible providers. Leave blank for OpenAI.
Use <code className="text-xs bg-muted px-1 rounded">https://openrouter.ai/api/v1</code> for OpenRouter (access Claude, Gemini, Llama, etc.)
{isLiteLLM ? (
<>
URL of your LiteLLM proxy. Typically{' '}
<code className="text-xs bg-muted px-1 rounded">http://localhost:4000</code>{' '}
or your server address.
</>
) : (
<>
Custom base URL for OpenAI-compatible providers. Leave blank for OpenAI.
Use <code className="text-xs bg-muted px-1 rounded">https://openrouter.ai/api/v1</code> for OpenRouter.
</>
)}
</FormDescription>
<FormMessage />
</FormItem>
@@ -242,7 +274,7 @@ export function AISettingsForm({ settings }: AISettingsFormProps) {
<FormItem>
<div className="flex items-center justify-between">
<FormLabel>Model</FormLabel>
{modelsData?.success && (
{!isLiteLLM && modelsData?.success && !modelsData?.manualEntry && (
<Button
type="button"
variant="ghost"
@@ -256,7 +288,13 @@ export function AISettingsForm({ settings }: AISettingsFormProps) {
)}
</div>
{modelsLoading ? (
{isLiteLLM || modelsData?.manualEntry ? (
<Input
value={field.value}
onChange={(e) => field.onChange(e.target.value)}
placeholder="chatgpt/gpt-5.2"
/>
) : modelsLoading ? (
<Skeleton className="h-10 w-full" />
) : modelsError || !modelsData?.success ? (
<div className="space-y-2">
@@ -303,7 +341,15 @@ export function AISettingsForm({ settings }: AISettingsFormProps) {
</Select>
)}
<FormDescription>
{form.watch('ai_model')?.startsWith('o') ? (
{isLiteLLM ? (
<>
Enter the model ID with the{' '}
<code className="text-xs bg-muted px-1 rounded">chatgpt/</code> prefix.
Examples:{' '}
<code className="text-xs bg-muted px-1 rounded">chatgpt/gpt-5.2</code>,{' '}
<code className="text-xs bg-muted px-1 rounded">chatgpt/gpt-5.2-codex</code>
</>
) : form.watch('ai_model')?.startsWith('o') ? (
<span className="flex items-center gap-1 text-purple-600">
<SlidersHorizontal className="h-3 w-3" />
Reasoning model - optimized for complex analysis tasks