Add Anthropic API integration, remove locale settings UI
All checks were successful
Build and Push Docker Image / build (push) Successful in 13m15s

Anthropic API:
- Add @anthropic-ai/sdk with adapter wrapping OpenAI-shaped interface
- Support Claude models (opus, sonnet, haiku) with extended thinking
- Auto-reset model on provider switch, JSON retry logic
- Add Claude model pricing to ai-usage tracker
- Update AI settings form with Anthropic provider option
- Add provider field to AIUsageLog for cross-provider cost tracking

Locale Settings Removal:
- Strip Localization tab from admin settings (mobile + desktop)
- Remove i18n settings from router and feature flags
- Remove LOCALIZATION from SettingCategory enum
- Keep franc document language detection intact

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-02-21 17:26:59 +01:00
parent 161cd1684a
commit f42b452899
9 changed files with 453 additions and 213 deletions

40
package-lock.json generated
View File

@@ -8,6 +8,7 @@
"name": "mopc-platform",
"version": "0.1.0",
"dependencies": {
"@anthropic-ai/sdk": "^0.78.0",
"@auth/prisma-adapter": "^2.7.4",
"@blocknote/core": "^0.46.2",
"@blocknote/mantine": "^0.46.2",
@@ -119,6 +120,26 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/@anthropic-ai/sdk": {
"version": "0.78.0",
"resolved": "https://registry.npmjs.org/@anthropic-ai/sdk/-/sdk-0.78.0.tgz",
"integrity": "sha512-PzQhR715td/m1UaaN5hHXjYB8Gl2lF9UVhrrGrZeysiF6Rb74Wc9GCB8hzLdzmQtBd1qe89F9OptgB9Za1Ib5w==",
"license": "MIT",
"dependencies": {
"json-schema-to-ts": "^3.1.1"
},
"bin": {
"anthropic-ai-sdk": "bin/cli"
},
"peerDependencies": {
"zod": "^3.25.0 || ^4.0.0"
},
"peerDependenciesMeta": {
"zod": {
"optional": true
}
}
},
"node_modules/@auth/core": {
"version": "0.41.1",
"resolved": "https://registry.npmjs.org/@auth/core/-/core-0.41.1.tgz",
@@ -9277,6 +9298,19 @@
"dev": true,
"license": "MIT"
},
"node_modules/json-schema-to-ts": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/json-schema-to-ts/-/json-schema-to-ts-3.1.1.tgz",
"integrity": "sha512-+DWg8jCJG2TEnpy7kOm/7/AxaYoaRbjVB4LFZLySZlWn8exGs3A4OLJR966cVvU26N7X9TWxl+Jsw7dzAqKT6g==",
"license": "MIT",
"dependencies": {
"@babel/runtime": "^7.18.3",
"ts-algebra": "^2.0.0"
},
"engines": {
"node": ">=16"
}
},
"node_modules/json-schema-traverse": {
"version": "0.4.1",
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
@@ -13609,6 +13643,12 @@
"url": "https://github.com/sponsors/wooorm"
}
},
"node_modules/ts-algebra": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/ts-algebra/-/ts-algebra-2.0.0.tgz",
"integrity": "sha512-FPAhNPFMrkwz76P7cdjdmiShwMynZYN6SgOujD1urY4oNm80Ou9oMdmbR45LotcKOXoy7wSmHkRFE6Mxbrhefw==",
"license": "MIT"
},
"node_modules/ts-api-utils": {
"version": "2.4.0",
"resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.4.0.tgz",

View File

@@ -21,6 +21,7 @@
"test:e2e": "playwright test"
},
"dependencies": {
"@anthropic-ai/sdk": "^0.78.0",
"@auth/prisma-adapter": "^2.7.4",
"@blocknote/core": "^0.46.2",
"@blocknote/mantine": "^0.46.2",

View File

@@ -0,0 +1,13 @@
-- Delete any existing LOCALIZATION settings
DELETE FROM "SystemSettings" WHERE category = 'LOCALIZATION';
-- Add provider field to AIUsageLog for cross-provider cost tracking
ALTER TABLE "AIUsageLog" ADD COLUMN "provider" TEXT;
-- Remove LOCALIZATION from SettingCategory enum
-- First create new enum without the value, then swap
CREATE TYPE "SettingCategory_new" AS ENUM ('AI', 'BRANDING', 'EMAIL', 'STORAGE', 'SECURITY', 'DEFAULTS', 'WHATSAPP', 'AUDIT_CONFIG', 'DIGEST', 'ANALYTICS', 'INTEGRATIONS', 'COMMUNICATION', 'FEATURE_FLAGS');
ALTER TABLE "SystemSettings" ALTER COLUMN "category" TYPE "SettingCategory_new" USING ("category"::text::"SettingCategory_new");
ALTER TYPE "SettingCategory" RENAME TO "SettingCategory_old";
ALTER TYPE "SettingCategory_new" RENAME TO "SettingCategory";
DROP TYPE "SettingCategory_old";

View File

@@ -101,7 +101,6 @@ enum SettingCategory {
DEFAULTS
WHATSAPP
AUDIT_CONFIG
LOCALIZATION
DIGEST
ANALYTICS
INTEGRATIONS
@@ -907,7 +906,8 @@ model AIUsageLog {
entityId String?
// What was used
model String // gpt-4o, gpt-4o-mini, o1, etc.
model String // gpt-4o, gpt-4o-mini, o1, claude-sonnet-4-5, etc.
provider String? // openai, anthropic, litellm
promptTokens Int
completionTokens Int
totalTokens Int

View File

@@ -1,5 +1,6 @@
'use client'
import { useEffect, useRef } from 'react'
import { useForm } from 'react-hook-form'
import { zodResolver } from '@hookform/resolvers/zod'
import { z } from 'zod'
@@ -36,6 +37,7 @@ const formSchema = z.object({
ai_model: z.string(),
ai_send_descriptions: z.boolean(),
openai_api_key: z.string().optional(),
anthropic_api_key: z.string().optional(),
openai_base_url: z.string().optional(),
})
@@ -48,6 +50,7 @@ interface AISettingsFormProps {
ai_model?: string
ai_send_descriptions?: string
openai_api_key?: string
anthropic_api_key?: string
openai_base_url?: string
}
}
@@ -63,12 +66,29 @@ export function AISettingsForm({ settings }: AISettingsFormProps) {
ai_model: settings.ai_model || 'gpt-4o',
ai_send_descriptions: settings.ai_send_descriptions === 'true',
openai_api_key: '',
anthropic_api_key: '',
openai_base_url: settings.openai_base_url || '',
},
})
const watchProvider = form.watch('ai_provider')
const isLiteLLM = watchProvider === 'litellm'
const isAnthropic = watchProvider === 'anthropic'
const prevProviderRef = useRef(settings.ai_provider || 'openai')
// Auto-reset model when provider changes
useEffect(() => {
if (watchProvider !== prevProviderRef.current) {
prevProviderRef.current = watchProvider
if (watchProvider === 'anthropic') {
form.setValue('ai_model', 'claude-sonnet-4-5-20250514')
} else if (watchProvider === 'openai') {
form.setValue('ai_model', 'gpt-4o')
} else if (watchProvider === 'litellm') {
form.setValue('ai_model', '')
}
}
}, [watchProvider, form])
// Fetch available models from OpenAI API (skip for LiteLLM — no models.list support)
const {
@@ -119,6 +139,9 @@ export function AISettingsForm({ settings }: AISettingsFormProps) {
if (data.openai_api_key && data.openai_api_key.trim()) {
settingsToUpdate.push({ key: 'openai_api_key', value: data.openai_api_key })
}
if (data.anthropic_api_key && data.anthropic_api_key.trim()) {
settingsToUpdate.push({ key: 'anthropic_api_key', value: data.anthropic_api_key })
}
// Save base URL (empty string clears it)
settingsToUpdate.push({ key: 'openai_base_url', value: data.openai_base_url?.trim() || '' })
@@ -139,6 +162,9 @@ export function AISettingsForm({ settings }: AISettingsFormProps) {
)
const categoryLabels: Record<string, string> = {
'claude-4.5': 'Claude 4.5 Series (Latest)',
'claude-4': 'Claude 4 Series',
'claude-3.5': 'Claude 3.5 Series',
'gpt-5+': 'GPT-5+ Series (Latest)',
'gpt-4o': 'GPT-4o Series',
'gpt-4': 'GPT-4 Series',
@@ -147,7 +173,7 @@ export function AISettingsForm({ settings }: AISettingsFormProps) {
other: 'Other Models',
}
const categoryOrder = ['gpt-5+', 'gpt-4o', 'gpt-4', 'gpt-3.5', 'reasoning', 'other']
const categoryOrder = ['claude-4.5', 'claude-4', 'claude-3.5', 'gpt-5+', 'gpt-4o', 'gpt-4', 'gpt-3.5', 'reasoning', 'other']
return (
<Form {...form}>
@@ -187,13 +213,16 @@ export function AISettingsForm({ settings }: AISettingsFormProps) {
</FormControl>
<SelectContent>
<SelectItem value="openai">OpenAI (API Key)</SelectItem>
<SelectItem value="anthropic">Anthropic (Claude API)</SelectItem>
<SelectItem value="litellm">LiteLLM Proxy (ChatGPT Subscription)</SelectItem>
</SelectContent>
</Select>
<FormDescription>
{field.value === 'litellm'
? 'Route AI calls through a LiteLLM proxy connected to your ChatGPT Plus/Pro subscription'
: 'Direct OpenAI API access using your API key'}
: field.value === 'anthropic'
? 'Direct Anthropic API access using Claude models'
: 'Direct OpenAI API access using your API key'}
</FormDescription>
<FormMessage />
</FormItem>
@@ -211,37 +240,71 @@ export function AISettingsForm({ settings }: AISettingsFormProps) {
</Alert>
)}
<FormField
control={form.control}
name="openai_api_key"
render={({ field }) => (
<FormItem>
<FormLabel>{isLiteLLM ? 'API Key (Optional)' : 'API Key'}</FormLabel>
<FormControl>
<Input
type="password"
placeholder={isLiteLLM
? 'Optional — leave blank for default'
: (settings.openai_api_key ? '••••••••' : 'Enter API key')}
{...field}
/>
</FormControl>
<FormDescription>
{isLiteLLM
? 'LiteLLM proxy usually does not require an API key. Leave blank to use default.'
: 'Your OpenAI API key. Leave blank to keep the existing key.'}
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
{isAnthropic && (
<Alert>
<Info className="h-4 w-4" />
<AlertDescription>
<strong>Anthropic Claude Mode</strong> AI calls use the Anthropic Messages API.
Claude Opus models include extended thinking for deeper analysis.
JSON responses are validated with automatic retry.
</AlertDescription>
</Alert>
)}
{isAnthropic ? (
<FormField
control={form.control}
name="anthropic_api_key"
render={({ field }) => (
<FormItem>
<FormLabel>Anthropic API Key</FormLabel>
<FormControl>
<Input
type="password"
placeholder={settings.anthropic_api_key ? '••••••••' : 'Enter Anthropic API key'}
{...field}
/>
</FormControl>
<FormDescription>
Your Anthropic API key. Leave blank to keep the existing key.
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
) : (
<FormField
control={form.control}
name="openai_api_key"
render={({ field }) => (
<FormItem>
<FormLabel>{isLiteLLM ? 'API Key (Optional)' : 'OpenAI API Key'}</FormLabel>
<FormControl>
<Input
type="password"
placeholder={isLiteLLM
? 'Optional — leave blank for default'
: (settings.openai_api_key ? '••••••••' : 'Enter API key')}
{...field}
/>
</FormControl>
<FormDescription>
{isLiteLLM
? 'LiteLLM proxy usually does not require an API key. Leave blank to use default.'
: 'Your OpenAI API key. Leave blank to keep the existing key.'}
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
)}
<FormField
control={form.control}
name="openai_base_url"
render={({ field }) => (
<FormItem>
<FormLabel>{isLiteLLM ? 'LiteLLM Proxy URL' : 'API Base URL (Optional)'}</FormLabel>
<FormLabel>{isLiteLLM ? 'LiteLLM Proxy URL' : isAnthropic ? 'Anthropic Base URL (Optional)' : 'API Base URL (Optional)'}</FormLabel>
<FormControl>
<Input
placeholder={isLiteLLM ? 'http://localhost:4000' : 'https://api.openai.com/v1'}
@@ -255,6 +318,10 @@ export function AISettingsForm({ settings }: AISettingsFormProps) {
<code className="text-xs bg-muted px-1 rounded">http://localhost:4000</code>{' '}
or your server address.
</>
) : isAnthropic ? (
<>
Custom base URL for Anthropic API proxy or gateway. Leave blank for default Anthropic API.
</>
) : (
<>
Custom base URL for OpenAI-compatible providers. Leave blank for OpenAI.
@@ -288,7 +355,42 @@ export function AISettingsForm({ settings }: AISettingsFormProps) {
)}
</div>
{isLiteLLM || modelsData?.manualEntry ? (
{isAnthropic ? (
// Anthropic: fetch models from server (hardcoded list)
modelsLoading ? (
<Skeleton className="h-10 w-full" />
) : modelsData?.success && modelsData.models && modelsData.models.length > 0 ? (
<Select onValueChange={field.onChange} value={field.value}>
<FormControl>
<SelectTrigger>
<SelectValue placeholder="Select Claude model" />
</SelectTrigger>
</FormControl>
<SelectContent>
{categoryOrder
.filter((cat) => groupedModels?.[cat]?.length)
.map((category) => (
<SelectGroup key={category}>
<SelectLabel className="text-xs font-semibold text-muted-foreground">
{categoryLabels[category] || category}
</SelectLabel>
{groupedModels?.[category]?.map((model) => (
<SelectItem key={model.id} value={model.id}>
{model.name}
</SelectItem>
))}
</SelectGroup>
))}
</SelectContent>
</Select>
) : (
<Input
value={field.value}
onChange={(e) => field.onChange(e.target.value)}
placeholder="claude-sonnet-4-5-20250514"
/>
)
) : isLiteLLM || modelsData?.manualEntry ? (
<Input
value={field.value}
onChange={(e) => field.onChange(e.target.value)}
@@ -341,7 +443,16 @@ export function AISettingsForm({ settings }: AISettingsFormProps) {
</Select>
)}
<FormDescription>
{isLiteLLM ? (
{isAnthropic ? (
form.watch('ai_model')?.includes('opus') ? (
<span className="flex items-center gap-1 text-amber-600">
<SlidersHorizontal className="h-3 w-3" />
Opus model includes extended thinking for deeper analysis
</span>
) : (
'Anthropic Claude model to use for AI features'
)
) : isLiteLLM ? (
<>
Enter the model ID with the{' '}
<code className="text-xs bg-muted px-1 rounded">chatgpt/</code> prefix.

View File

@@ -23,7 +23,6 @@ import {
Newspaper,
BarChart3,
ShieldAlert,
Globe,
Webhook,
MessageCircle,
} from 'lucide-react'
@@ -158,11 +157,6 @@ export function SettingsContent({ initialSettings, isSuperAdmin = true }: Settin
'whatsapp_provider',
])
const localizationSettings = getSettingsByKeys([
'localization_enabled_locales',
'localization_default_locale',
])
return (
<>
<Tabs defaultValue="defaults" className="space-y-6">
@@ -176,10 +170,6 @@ export function SettingsContent({ initialSettings, isSuperAdmin = true }: Settin
<Palette className="h-4 w-4" />
Branding
</TabsTrigger>
<TabsTrigger value="localization" className="gap-2 shrink-0">
<Globe className="h-4 w-4" />
Locale
</TabsTrigger>
{isSuperAdmin && (
<TabsTrigger value="email" className="gap-2 shrink-0">
<Mail className="h-4 w-4" />
@@ -253,10 +243,6 @@ export function SettingsContent({ initialSettings, isSuperAdmin = true }: Settin
<Palette className="h-4 w-4" />
Branding
</TabsTrigger>
<TabsTrigger value="localization" className="justify-start gap-2 w-full px-3 py-2 h-auto data-[state=active]:bg-muted">
<Globe className="h-4 w-4" />
Locale
</TabsTrigger>
</TabsList>
</div>
<div>
@@ -510,22 +496,6 @@ export function SettingsContent({ initialSettings, isSuperAdmin = true }: Settin
</AnimatedCard>
</TabsContent>
<TabsContent value="localization" className="space-y-6">
<AnimatedCard>
<Card>
<CardHeader>
<CardTitle>Localization</CardTitle>
<CardDescription>
Configure language and locale settings
</CardDescription>
</CardHeader>
<CardContent>
<LocalizationSettingsSection settings={localizationSettings} />
</CardContent>
</Card>
</AnimatedCard>
</TabsContent>
{isSuperAdmin && (
<TabsContent value="whatsapp" className="space-y-6">
<AnimatedCard>
@@ -858,66 +828,3 @@ function WhatsAppSettingsSection({ settings }: { settings: Record<string, string
)
}
function LocalizationSettingsSection({ settings }: { settings: Record<string, string> }) {
const mutation = useSettingsMutation()
const enabledLocales = (settings.localization_enabled_locales || 'en').split(',')
const toggleLocale = (locale: string) => {
const current = new Set(enabledLocales)
if (current.has(locale)) {
if (current.size <= 1) {
toast.error('At least one locale must be enabled')
return
}
current.delete(locale)
} else {
current.add(locale)
}
mutation.mutate({
key: 'localization_enabled_locales',
value: Array.from(current).join(','),
})
}
return (
<div className="space-y-4">
<div className="space-y-3">
<Label className="text-sm font-medium">Enabled Languages</Label>
<div className="space-y-2">
<div className="flex items-center justify-between rounded-lg border p-3">
<div className="flex items-center gap-2">
<span className="font-medium text-sm">EN</span>
<span className="text-sm text-muted-foreground">English</span>
</div>
<Checkbox
checked={enabledLocales.includes('en')}
onCheckedChange={() => toggleLocale('en')}
disabled={mutation.isPending}
/>
</div>
<div className="flex items-center justify-between rounded-lg border p-3">
<div className="flex items-center gap-2">
<span className="font-medium text-sm">FR</span>
<span className="text-sm text-muted-foreground">Fran&ccedil;ais</span>
</div>
<Checkbox
checked={enabledLocales.includes('fr')}
onCheckedChange={() => toggleLocale('fr')}
disabled={mutation.isPending}
/>
</div>
</div>
</div>
<SettingSelect
label="Default Locale"
description="The default language for new users"
settingKey="localization_default_locale"
value={settings.localization_default_locale || 'en'}
options={[
{ value: 'en', label: 'English' },
{ value: 'fr', label: 'Fran\u00e7ais' },
]}
/>
</div>
)
}

View File

@@ -1,10 +1,36 @@
import OpenAI from 'openai'
import type { ChatCompletionCreateParamsNonStreaming } from 'openai/resources/chat/completions'
import Anthropic from '@anthropic-ai/sdk'
import { prisma } from './prisma'
// Hardcoded Claude model list (Anthropic API doesn't expose a models.list endpoint for all users)
export const ANTHROPIC_CLAUDE_MODELS = [
'claude-opus-4-5-20250514',
'claude-sonnet-4-5-20250514',
'claude-haiku-3-5-20241022',
'claude-opus-4-20250514',
'claude-sonnet-4-20250514',
] as const
/**
* AI client type returned by getOpenAI().
* Both the OpenAI SDK and the Anthropic adapter satisfy this interface.
* All AI services only use .chat.completions.create(), so this is safe.
*/
export type AIClient = OpenAI | AnthropicClientAdapter
type AnthropicClientAdapter = {
__isAnthropicAdapter: true
chat: {
completions: {
create(params: ChatCompletionCreateParamsNonStreaming): Promise<OpenAI.Chat.Completions.ChatCompletion>
}
}
}
// OpenAI client singleton with lazy initialization
const globalForOpenAI = globalThis as unknown as {
openai: OpenAI | undefined
openai: AIClient | undefined
openaiInitialized: boolean
}
@@ -12,15 +38,17 @@ const globalForOpenAI = globalThis as unknown as {
/**
* Get the configured AI provider from SystemSettings.
* Returns 'openai' (default) or 'litellm' (ChatGPT subscription proxy).
* Returns 'openai' (default), 'litellm' (ChatGPT subscription proxy), or 'anthropic' (Claude API).
*/
export async function getConfiguredProvider(): Promise<'openai' | 'litellm'> {
export async function getConfiguredProvider(): Promise<'openai' | 'litellm' | 'anthropic'> {
try {
const setting = await prisma.systemSettings.findUnique({
where: { key: 'ai_provider' },
})
const value = setting?.value || 'openai'
return value === 'litellm' ? 'litellm' : 'openai'
if (value === 'litellm') return 'litellm'
if (value === 'anthropic') return 'anthropic'
return 'openai'
} catch {
return 'openai'
}
@@ -219,6 +247,20 @@ async function getOpenAIApiKey(): Promise<string | null> {
}
}
/**
* Get Anthropic API key from SystemSettings
*/
async function getAnthropicApiKey(): Promise<string | null> {
try {
const setting = await prisma.systemSettings.findUnique({
where: { key: 'anthropic_api_key' },
})
return setting?.value || process.env.ANTHROPIC_API_KEY || null
} catch {
return process.env.ANTHROPIC_API_KEY || null
}
}
/**
* Get custom base URL for OpenAI-compatible providers.
* Supports OpenRouter, Together AI, Groq, local models, etc.
@@ -265,15 +307,165 @@ async function createOpenAIClient(): Promise<OpenAI | null> {
}
/**
* Get the OpenAI client singleton
* Returns null if API key is not configured
* Check if a model is a Claude Opus model (supports extended thinking).
*/
export async function getOpenAI(): Promise<OpenAI | null> {
function isClaudeOpusModel(model: string): boolean {
return model.toLowerCase().includes('opus')
}
/**
* Create an Anthropic adapter that wraps the Anthropic SDK behind the
* same `.chat.completions.create()` surface as OpenAI. This allows all
* AI service files to work with zero changes.
*/
async function createAnthropicAdapter(): Promise<AnthropicClientAdapter | null> {
const apiKey = await getAnthropicApiKey()
if (!apiKey) {
console.warn('Anthropic API key not configured')
return null
}
const baseURL = await getBaseURL()
const anthropic = new Anthropic({
apiKey,
...(baseURL ? { baseURL } : {}),
})
if (baseURL) {
console.log(`[Anthropic] Using custom base URL: ${baseURL}`)
}
return {
__isAnthropicAdapter: true,
chat: {
completions: {
async create(params: ChatCompletionCreateParamsNonStreaming): Promise<OpenAI.Chat.Completions.ChatCompletion> {
// Extract system messages → Anthropic's system parameter
const systemMessages: string[] = []
const userAssistantMessages: Anthropic.MessageParam[] = []
for (const msg of params.messages) {
const content = typeof msg.content === 'string' ? msg.content : ''
if (msg.role === 'system' || msg.role === 'developer') {
systemMessages.push(content)
} else {
userAssistantMessages.push({
role: msg.role === 'assistant' ? 'assistant' : 'user',
content,
})
}
}
// Ensure messages start with a user message (Anthropic requirement)
if (userAssistantMessages.length === 0 || userAssistantMessages[0].role !== 'user') {
userAssistantMessages.unshift({ role: 'user', content: 'Hello' })
}
// Determine max_tokens (required by Anthropic, default 16384)
const maxTokens = params.max_tokens ?? params.max_completion_tokens ?? 16384
// Build Anthropic request
const anthropicParams: Anthropic.MessageCreateParamsNonStreaming = {
model: params.model,
max_tokens: maxTokens,
messages: userAssistantMessages,
...(systemMessages.length > 0 ? { system: systemMessages.join('\n\n') } : {}),
}
// Add temperature if present (Anthropic supports 0-1)
if (params.temperature !== undefined && params.temperature !== null) {
anthropicParams.temperature = params.temperature
}
// Extended thinking for Opus models
if (isClaudeOpusModel(params.model)) {
anthropicParams.thinking = { type: 'enabled', budget_tokens: Math.min(8192, maxTokens - 1) }
}
// Call Anthropic API
let response = await anthropic.messages.create(anthropicParams)
// Extract text from response (skip thinking blocks)
let responseText = response.content
.filter((block): block is Anthropic.TextBlock => block.type === 'text')
.map((block) => block.text)
.join('')
// JSON retry: if response_format was set but response isn't valid JSON
const wantsJson = params.response_format && 'type' in params.response_format && params.response_format.type === 'json_object'
if (wantsJson && responseText) {
try {
JSON.parse(responseText)
} catch {
// Retry once with explicit JSON instruction
const retryMessages = [...userAssistantMessages]
const lastIdx = retryMessages.length - 1
if (lastIdx >= 0 && retryMessages[lastIdx].role === 'user') {
retryMessages[lastIdx] = {
...retryMessages[lastIdx],
content: retryMessages[lastIdx].content + '\n\nIMPORTANT: You MUST respond with valid JSON only. No markdown, no extra text, just a JSON object or array.',
}
}
const retryParams: Anthropic.MessageCreateParamsNonStreaming = {
...anthropicParams,
messages: retryMessages,
}
response = await anthropic.messages.create(retryParams)
responseText = response.content
.filter((block): block is Anthropic.TextBlock => block.type === 'text')
.map((block) => block.text)
.join('')
}
}
// Normalize response to OpenAI shape
return {
id: response.id,
object: 'chat.completion' as const,
created: Math.floor(Date.now() / 1000),
model: response.model,
choices: [
{
index: 0,
message: {
role: 'assistant' as const,
content: responseText || null,
refusal: null,
},
finish_reason: response.stop_reason === 'end_turn' || response.stop_reason === 'stop_sequence' ? 'stop' : response.stop_reason === 'max_tokens' ? 'length' : 'stop',
logprobs: null,
},
],
usage: {
prompt_tokens: response.usage.input_tokens,
completion_tokens: response.usage.output_tokens,
total_tokens: response.usage.input_tokens + response.usage.output_tokens,
prompt_tokens_details: undefined as any,
completion_tokens_details: undefined as any,
},
}
},
},
},
}
}
/**
* Get the AI client singleton.
* Returns an OpenAI client or an Anthropic adapter (both expose .chat.completions.create()).
* Returns null if the API key is not configured.
*/
export async function getOpenAI(): Promise<AIClient | null> {
if (globalForOpenAI.openaiInitialized) {
return globalForOpenAI.openai || null
}
const client = await createOpenAIClient()
const provider = await getConfiguredProvider()
const client = provider === 'anthropic'
? await createAnthropicAdapter()
: await createOpenAIClient()
if (process.env.NODE_ENV !== 'production') {
globalForOpenAI.openai = client || undefined
@@ -298,10 +490,13 @@ export function resetOpenAIClient(): void {
export async function isOpenAIConfigured(): Promise<boolean> {
const provider = await getConfiguredProvider()
if (provider === 'litellm') {
// LiteLLM just needs a base URL configured
const baseURL = await getBaseURL()
return !!baseURL
}
if (provider === 'anthropic') {
const apiKey = await getAnthropicApiKey()
return !!apiKey
}
const apiKey = await getOpenAIApiKey()
return !!apiKey
}
@@ -327,6 +522,18 @@ export async function listAvailableModels(): Promise<{
}
}
// Anthropic: return hardcoded Claude model list
if (provider === 'anthropic') {
const apiKey = await getAnthropicApiKey()
if (!apiKey) {
return { success: false, error: 'Anthropic API key not configured' }
}
return {
success: true,
models: [...ANTHROPIC_CLAUDE_MODELS],
}
}
const client = await getOpenAI()
if (!client) {
@@ -336,7 +543,7 @@ export async function listAvailableModels(): Promise<{
}
}
const response = await client.models.list()
const response = await (client as OpenAI).models.list()
const chatModels = response.data
.filter((m) => m.id.includes('gpt') || m.id.includes('o1') || m.id.includes('o3') || m.id.includes('o4'))
.map((m) => m.id)
@@ -367,14 +574,16 @@ export async function validateModel(modelId: string): Promise<{
if (!client) {
return {
valid: false,
error: 'OpenAI API key not configured',
error: 'AI API key not configured',
}
}
// Try a minimal completion with the model using correct parameters
const provider = await getConfiguredProvider()
// For Anthropic, use minimal max_tokens
const params = buildCompletionParams(modelId, {
messages: [{ role: 'user', content: 'test' }],
maxTokens: 1,
maxTokens: provider === 'anthropic' ? 16 : 1,
})
await client.chat.completions.create(params)
@@ -407,11 +616,13 @@ export async function testOpenAIConnection(): Promise<{
}> {
try {
const client = await getOpenAI()
const provider = await getConfiguredProvider()
if (!client) {
const label = provider === 'anthropic' ? 'Anthropic' : 'OpenAI'
return {
success: false,
error: 'OpenAI API key not configured',
error: `${label} API key not configured`,
}
}
@@ -421,7 +632,7 @@ export async function testOpenAIConnection(): Promise<{
// Test with the configured model using correct parameters
const params = buildCompletionParams(configuredModel, {
messages: [{ role: 'user', content: 'Hello' }],
maxTokens: 5,
maxTokens: provider === 'anthropic' ? 16 : 5,
})
const response = await client.chat.completions.create(params)
@@ -436,7 +647,7 @@ export async function testOpenAIConnection(): Promise<{
const configuredModel = await getConfiguredModel()
// Check for model-specific errors
if (message.includes('does not exist') || message.includes('model_not_found')) {
if (message.includes('does not exist') || message.includes('model_not_found') || message.includes('not_found_error')) {
return {
success: false,
error: `Model "${configuredModel}" is not available. Check Settings → AI to select a valid model.`,

View File

@@ -17,6 +17,11 @@ function categorizeModel(modelId: string): string {
if (id.startsWith('gpt-4')) return 'gpt-4'
if (id.startsWith('gpt-3.5')) return 'gpt-3.5'
if (id.startsWith('o1') || id.startsWith('o3') || id.startsWith('o4')) return 'reasoning'
// Anthropic Claude models
if (id.startsWith('claude-opus-4-5') || id.startsWith('claude-sonnet-4-5')) return 'claude-4.5'
if (id.startsWith('claude-opus-4') || id.startsWith('claude-sonnet-4')) return 'claude-4'
if (id.startsWith('claude-haiku') || id.startsWith('claude-3')) return 'claude-3.5'
if (id.startsWith('claude')) return 'claude-4'
return 'other'
}
@@ -26,16 +31,10 @@ export const settingsRouter = router({
* These are non-sensitive settings that can be exposed to any user
*/
getFeatureFlags: protectedProcedure.query(async ({ ctx }) => {
const [whatsappEnabled, defaultLocale, availableLocales, juryCompareEnabled] = await Promise.all([
const [whatsappEnabled, juryCompareEnabled] = await Promise.all([
ctx.prisma.systemSettings.findUnique({
where: { key: 'whatsapp_enabled' },
}),
ctx.prisma.systemSettings.findUnique({
where: { key: 'i18n_default_locale' },
}),
ctx.prisma.systemSettings.findUnique({
where: { key: 'i18n_available_locales' },
}),
ctx.prisma.systemSettings.findUnique({
where: { key: 'jury_compare_enabled' },
}),
@@ -43,8 +42,6 @@ export const settingsRouter = router({
return {
whatsappEnabled: whatsappEnabled?.value === 'true',
defaultLocale: defaultLocale?.value || 'en',
availableLocales: availableLocales?.value ? JSON.parse(availableLocales.value) : ['en', 'fr'],
juryCompareEnabled: juryCompareEnabled?.value === 'true',
}
}),
@@ -171,14 +168,13 @@ export const settingsRouter = router({
)
.mutation(async ({ ctx, input }) => {
// Infer category from key prefix if not provided
const inferCategory = (key: string): 'AI' | 'BRANDING' | 'EMAIL' | 'STORAGE' | 'SECURITY' | 'DEFAULTS' | 'WHATSAPP' | 'LOCALIZATION' => {
if (key.startsWith('openai') || key.startsWith('ai_')) return 'AI'
const inferCategory = (key: string): 'AI' | 'BRANDING' | 'EMAIL' | 'STORAGE' | 'SECURITY' | 'DEFAULTS' | 'WHATSAPP' => {
if (key.startsWith('openai') || key.startsWith('ai_') || key.startsWith('anthropic')) return 'AI'
if (key.startsWith('smtp_') || key.startsWith('email_')) return 'EMAIL'
if (key.startsWith('storage_') || key.startsWith('local_storage') || key.startsWith('max_file') || key.startsWith('avatar_') || key.startsWith('allowed_file')) return 'STORAGE'
if (key.startsWith('brand_') || key.startsWith('logo_') || key.startsWith('primary_') || key.startsWith('theme_')) return 'BRANDING'
if (key.startsWith('whatsapp_')) return 'WHATSAPP'
if (key.startsWith('security_') || key.startsWith('session_')) return 'SECURITY'
if (key.startsWith('i18n_') || key.startsWith('locale_')) return 'LOCALIZATION'
return 'DEFAULTS'
}
@@ -206,7 +202,7 @@ export const settingsRouter = router({
}
// Reset OpenAI client if API key, base URL, model, or provider changed
if (input.settings.some((s) => s.key === 'openai_api_key' || s.key === 'openai_base_url' || s.key === 'ai_model' || s.key === 'ai_provider')) {
if (input.settings.some((s) => s.key === 'openai_api_key' || s.key === 'anthropic_api_key' || s.key === 'openai_base_url' || s.key === 'ai_model' || s.key === 'ai_provider')) {
const { resetOpenAIClient } = await import('@/lib/openai')
resetOpenAIClient()
}
@@ -276,9 +272,9 @@ export const settingsRouter = router({
category: categorizeModel(model),
}))
// Sort: GPT-5+ first, then GPT-4o, then other GPT-4, then GPT-3.5, then reasoning models
// Sort by category priority
const sorted = categorizedModels.sort((a, b) => {
const order = ['gpt-5+', 'gpt-4o', 'gpt-4', 'gpt-3.5', 'reasoning', 'other']
const order = ['claude-4.5', 'claude-4', 'claude-3.5', 'gpt-5+', 'gpt-4o', 'gpt-4', 'gpt-3.5', 'reasoning', 'other']
const aOrder = order.findIndex(cat => a.category === cat)
const bOrder = order.findIndex(cat => b.category === cat)
if (aOrder !== bOrder) return aOrder - bOrder
@@ -740,62 +736,4 @@ export const settingsRouter = router({
return results
}),
/**
* Get localization settings
*/
getLocalizationSettings: adminProcedure.query(async ({ ctx }) => {
const settings = await ctx.prisma.systemSettings.findMany({
where: { category: 'LOCALIZATION' },
orderBy: { key: 'asc' },
})
return settings
}),
/**
* Update localization settings
*/
updateLocalizationSettings: superAdminProcedure
.input(
z.object({
settings: z.array(
z.object({
key: z.string(),
value: z.string(),
})
),
})
)
.mutation(async ({ ctx, input }) => {
const results = await Promise.all(
input.settings.map((s) =>
ctx.prisma.systemSettings.upsert({
where: { key: s.key },
update: { value: s.value, updatedBy: ctx.user.id },
create: {
key: s.key,
value: s.value,
category: 'LOCALIZATION',
updatedBy: ctx.user.id,
},
})
)
)
try {
await logAudit({
prisma: ctx.prisma,
userId: ctx.user.id,
action: 'UPDATE_LOCALIZATION_SETTINGS',
entityType: 'SystemSettings',
detailsJson: { keys: input.settings.map((s) => s.key) },
ipAddress: ctx.ip,
userAgent: ctx.userAgent,
})
} catch {
// Never throw on audit failure
}
return results
}),
})

View File

@@ -29,6 +29,7 @@ export interface LogAIUsageInput {
entityType?: string
entityId?: string
model: string
provider?: string
promptTokens: number
completionTokens: number
totalTokens: number
@@ -98,6 +99,13 @@ const MODEL_PRICING: Record<string, ModelPricing> = {
// o4 reasoning models (future-proofing)
'o4-mini': { input: 1.1, output: 4.4 },
// Anthropic Claude models
'claude-opus-4-5-20250514': { input: 15.0, output: 75.0 },
'claude-sonnet-4-5-20250514': { input: 3.0, output: 15.0 },
'claude-haiku-3-5-20241022': { input: 0.8, output: 4.0 },
'claude-opus-4-20250514': { input: 15.0, output: 75.0 },
'claude-sonnet-4-20250514': { input: 3.0, output: 15.0 },
}
// Default pricing for unknown models (conservative estimate)
@@ -150,6 +158,16 @@ function getModelPricing(model: string): ModelPricing {
if (modelLower.startsWith('o4')) {
return MODEL_PRICING['o4-mini'] || DEFAULT_PRICING
}
// Anthropic Claude prefix fallbacks
if (modelLower.startsWith('claude-opus')) {
return { input: 15.0, output: 75.0 }
}
if (modelLower.startsWith('claude-sonnet')) {
return { input: 3.0, output: 15.0 }
}
if (modelLower.startsWith('claude-haiku')) {
return { input: 0.8, output: 4.0 }
}
return DEFAULT_PRICING
}
@@ -200,6 +218,7 @@ export async function logAIUsage(input: LogAIUsageInput): Promise<void> {
entityType: input.entityType,
entityId: input.entityId,
model: input.model,
provider: input.provider,
promptTokens: input.promptTokens,
completionTokens: input.completionTokens,
totalTokens: input.totalTokens,