Fix AI assignment returning nothing: cap tokens, optimize prompt, show errors
All checks were successful
Build and Push Docker Image / build (push) Successful in 8m32s
All checks were successful
Build and Push Docker Image / build (push) Successful in 8m32s
- Cap maxTokens at 12000 (was unlimited dynamic calc that could exceed model limits) - Replace massive EXISTING array with compact CURRENT_JUROR_LOAD counts and ALREADY_ASSIGNED per-project map (keeps prompt small across batches) - Add coverage gap-filler: algorithmically fills projects below required reviews - Show error state inline on page when AI fails (red banner with message) - Add server-side logging for debugging assignment flow - Reduce batch size to 10 projects Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -152,6 +152,7 @@ export const roundAssignmentRouter = router({
|
||||
}
|
||||
|
||||
// Call AI service
|
||||
console.log(`[AI Assignment Router] Starting for ${projects.length} projects, ${jurors.length} jurors, ${input.requiredReviews} reviews/project, max ${maxPerJuror}/juror`)
|
||||
const result = await generateAIAssignments(
|
||||
jurors,
|
||||
projects,
|
||||
@@ -159,6 +160,7 @@ export const roundAssignmentRouter = router({
|
||||
ctx.user.id,
|
||||
input.roundId,
|
||||
)
|
||||
console.log(`[AI Assignment Router] Got ${result.suggestions.length} suggestions, success=${result.success}, fallback=${result.fallbackUsed}`)
|
||||
|
||||
// Filter out COI pairs and already-assigned pairs
|
||||
const existingPairSet = new Set(existingAssignments.map((a) => `${a.userId}:${a.projectId}`))
|
||||
|
||||
@@ -173,9 +173,9 @@ async function processAssignmentBatch(
|
||||
|
||||
try {
|
||||
// Calculate maxTokens based on expected assignments
|
||||
// ~150 tokens per assignment JSON object
|
||||
// ~150 tokens per assignment JSON object, capped at 12000
|
||||
const expectedAssignments = batchProjects.length * constraints.requiredReviewsPerProject
|
||||
const estimatedTokens = Math.max(4000, expectedAssignments * 200 + 500)
|
||||
const estimatedTokens = Math.min(12000, Math.max(4000, expectedAssignments * 200 + 500))
|
||||
|
||||
const params = buildCompletionParams(model, {
|
||||
messages: [
|
||||
@@ -359,13 +359,33 @@ function buildBatchPrompt(
|
||||
|
||||
const expectedTotal = projects.length * constraints.requiredReviewsPerProject
|
||||
|
||||
// Instead of full existing assignment list, send per-juror current load counts
|
||||
// This keeps the prompt shorter as batches accumulate
|
||||
const jurorCurrentLoad: Record<string, number> = {}
|
||||
for (const a of constraints.existingAssignments) {
|
||||
const anonId = jurorIdMap.get(a.jurorId)
|
||||
if (anonId) jurorCurrentLoad[anonId] = (jurorCurrentLoad[anonId] || 0) + 1
|
||||
}
|
||||
|
||||
// Also track which projects in this batch already have assignments
|
||||
const projectExistingReviewers: Record<string, string[]> = {}
|
||||
for (const a of constraints.existingAssignments) {
|
||||
const anonProjectId = projectIdMap.get(a.projectId)
|
||||
const anonJurorId = jurorIdMap.get(a.jurorId)
|
||||
if (anonProjectId && anonJurorId) {
|
||||
if (!projectExistingReviewers[anonProjectId]) projectExistingReviewers[anonProjectId] = []
|
||||
projectExistingReviewers[anonProjectId].push(anonJurorId)
|
||||
}
|
||||
}
|
||||
|
||||
return `JURORS: ${JSON.stringify(jurors)}
|
||||
PROJECTS: ${JSON.stringify(projects)}
|
||||
REVIEWS_PER_PROJECT: ${constraints.requiredReviewsPerProject} (each project MUST get exactly ${constraints.requiredReviewsPerProject} different jurors)
|
||||
MAX_PER_JUROR: ${constraints.maxAssignmentsPerJuror || 'unlimited'} (HARD LIMIT — never exceed)${jurorLimitsStr}${targetStr}
|
||||
EXISTING: ${JSON.stringify(anonymousExisting)}
|
||||
EXPECTED_OUTPUT_SIZE: approximately ${expectedTotal} assignment objects (${projects.length} projects × ${constraints.requiredReviewsPerProject} reviewers each)
|
||||
IMPORTANT: Every project must appear ${constraints.requiredReviewsPerProject} times with ${constraints.requiredReviewsPerProject} DIFFERENT juror_ids. Pick the least-loaded jurors first. Never exceed a juror's max.
|
||||
CURRENT_JUROR_LOAD: ${JSON.stringify(jurorCurrentLoad)} (add these to currentAssignmentCount to get true total)
|
||||
ALREADY_ASSIGNED: ${JSON.stringify(projectExistingReviewers)} (do NOT assign these juror-project pairs again)
|
||||
EXPECTED_OUTPUT: ${expectedTotal} assignment objects (${projects.length} projects × ${constraints.requiredReviewsPerProject} reviewers)
|
||||
IMPORTANT: Every project must appear ${constraints.requiredReviewsPerProject} times with ${constraints.requiredReviewsPerProject} DIFFERENT juror_ids. Pick the least-loaded jurors first.
|
||||
Return JSON: {"assignments": [...]}`
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user