Add visual progress indicator for AI assignment batches

- Add AssignmentJob model to track AI assignment progress
- Create startAIAssignmentJob mutation for background processing
- Add getAIAssignmentJobStatus query for polling progress
- Update AI assignment service with progress callback support
- Add progress bar UI showing batch/project processing status
- Add toast notifications for job completion/failure
- Add AI_SUGGESTIONS_READY notification type

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
2026-02-04 17:40:26 +01:00
parent 148925cb95
commit 6f6d5ef501
5 changed files with 439 additions and 21 deletions

View File

@@ -5,14 +5,157 @@ import { getUserAvatarUrl } from '../utils/avatar-url'
import {
generateAIAssignments,
generateFallbackAssignments,
type AssignmentProgressCallback,
} from '../services/ai-assignment'
import { isOpenAIConfigured } from '@/lib/openai'
import { prisma } from '@/lib/prisma'
import {
createNotification,
createBulkNotifications,
notifyAdmins,
NotificationTypes,
} from '../services/in-app-notification'
// Background job execution function
async function runAIAssignmentJob(jobId: string, roundId: string, userId: string) {
try {
// Update job to running
await prisma.assignmentJob.update({
where: { id: jobId },
data: { status: 'RUNNING', startedAt: new Date() },
})
// Get round constraints
const round = await prisma.round.findUniqueOrThrow({
where: { id: roundId },
select: {
name: true,
requiredReviews: true,
minAssignmentsPerJuror: true,
maxAssignmentsPerJuror: true,
},
})
// Get all active jury members with their expertise and current load
const jurors = await prisma.user.findMany({
where: { role: 'JURY_MEMBER', status: 'ACTIVE' },
select: {
id: true,
name: true,
email: true,
expertiseTags: true,
maxAssignments: true,
_count: {
select: {
assignments: { where: { roundId } },
},
},
},
})
// Get all projects in the round
const projects = await prisma.project.findMany({
where: { roundId },
select: {
id: true,
title: true,
description: true,
tags: true,
teamName: true,
_count: { select: { assignments: true } },
},
})
// Get existing assignments
const existingAssignments = await prisma.assignment.findMany({
where: { roundId },
select: { userId: true, projectId: true },
})
// Calculate batch info
const BATCH_SIZE = 15
const totalBatches = Math.ceil(projects.length / BATCH_SIZE)
await prisma.assignmentJob.update({
where: { id: jobId },
data: { totalProjects: projects.length, totalBatches },
})
// Progress callback
const onProgress: AssignmentProgressCallback = async (progress) => {
await prisma.assignmentJob.update({
where: { id: jobId },
data: {
currentBatch: progress.currentBatch,
processedCount: progress.processedCount,
},
})
}
const constraints = {
requiredReviewsPerProject: round.requiredReviews,
minAssignmentsPerJuror: round.minAssignmentsPerJuror,
maxAssignmentsPerJuror: round.maxAssignmentsPerJuror,
existingAssignments: existingAssignments.map((a) => ({
jurorId: a.userId,
projectId: a.projectId,
})),
}
// Execute AI assignment with progress callback
const result = await generateAIAssignments(
jurors,
projects,
constraints,
userId,
roundId,
onProgress
)
// Mark job as completed
await prisma.assignmentJob.update({
where: { id: jobId },
data: {
status: 'COMPLETED',
completedAt: new Date(),
processedCount: projects.length,
suggestionsCount: result.suggestions.length,
fallbackUsed: result.fallbackUsed ?? false,
},
})
// Notify admins that AI assignment is complete
await notifyAdmins({
type: NotificationTypes.AI_SUGGESTIONS_READY,
title: 'AI Assignment Suggestions Ready',
message: `AI generated ${result.suggestions.length} assignment suggestions for ${round.name || 'round'}${result.fallbackUsed ? ' (using fallback algorithm)' : ''}.`,
linkUrl: `/admin/rounds/${roundId}/assignments`,
linkLabel: 'View Suggestions',
priority: 'high',
metadata: {
roundId,
jobId,
projectCount: projects.length,
suggestionsCount: result.suggestions.length,
fallbackUsed: result.fallbackUsed,
},
})
} catch (error) {
console.error('[AI Assignment Job] Error:', error)
// Mark job as failed
await prisma.assignmentJob.update({
where: { id: jobId },
data: {
status: 'FAILED',
errorMessage: error instanceof Error ? error.message : 'Unknown error',
completedAt: new Date(),
},
})
}
}
export const assignmentRouter = router({
/**
* List assignments for a round (admin only)
@@ -851,4 +994,101 @@ export const assignmentRouter = router({
return { created: created.count }
}),
/**
* Start an AI assignment job (background processing)
*/
startAIAssignmentJob: adminProcedure
.input(z.object({ roundId: z.string() }))
.mutation(async ({ ctx, input }) => {
// Check for existing running job
const existingJob = await ctx.prisma.assignmentJob.findFirst({
where: {
roundId: input.roundId,
status: { in: ['PENDING', 'RUNNING'] },
},
})
if (existingJob) {
throw new TRPCError({
code: 'BAD_REQUEST',
message: 'An AI assignment job is already running for this round',
})
}
// Verify AI is available
if (!isOpenAIConfigured()) {
throw new TRPCError({
code: 'BAD_REQUEST',
message: 'OpenAI API is not configured',
})
}
// Create job record
const job = await ctx.prisma.assignmentJob.create({
data: {
roundId: input.roundId,
status: 'PENDING',
},
})
// Start background job (non-blocking)
runAIAssignmentJob(job.id, input.roundId, ctx.user.id).catch(console.error)
return { jobId: job.id }
}),
/**
* Get AI assignment job status (for polling)
*/
getAIAssignmentJobStatus: protectedProcedure
.input(z.object({ jobId: z.string() }))
.query(async ({ ctx, input }) => {
const job = await ctx.prisma.assignmentJob.findUniqueOrThrow({
where: { id: input.jobId },
})
return {
id: job.id,
status: job.status,
totalProjects: job.totalProjects,
totalBatches: job.totalBatches,
currentBatch: job.currentBatch,
processedCount: job.processedCount,
suggestionsCount: job.suggestionsCount,
fallbackUsed: job.fallbackUsed,
errorMessage: job.errorMessage,
startedAt: job.startedAt,
completedAt: job.completedAt,
}
}),
/**
* Get the latest AI assignment job for a round
*/
getLatestAIAssignmentJob: adminProcedure
.input(z.object({ roundId: z.string() }))
.query(async ({ ctx, input }) => {
const job = await ctx.prisma.assignmentJob.findFirst({
where: { roundId: input.roundId },
orderBy: { createdAt: 'desc' },
})
if (!job) return null
return {
id: job.id,
status: job.status,
totalProjects: job.totalProjects,
totalBatches: job.totalBatches,
currentBatch: job.currentBatch,
processedCount: job.processedCount,
suggestionsCount: job.suggestionsCount,
fallbackUsed: job.fallbackUsed,
errorMessage: job.errorMessage,
startedAt: job.startedAt,
completedAt: job.completedAt,
createdAt: job.createdAt,
}
}),
})

View File

@@ -86,6 +86,15 @@ interface AssignmentConstraints {
}>
}
export interface AssignmentProgressCallback {
(progress: {
currentBatch: number
totalBatches: number
processedCount: number
totalProjects: number
}): Promise<void>
}
// ─── AI Processing ───────────────────────────────────────────────────────────
/**
@@ -247,7 +256,8 @@ export async function generateAIAssignments(
projects: ProjectForAssignment[],
constraints: AssignmentConstraints,
userId?: string,
entityId?: string
entityId?: string,
onProgress?: AssignmentProgressCallback
): Promise<AIAssignmentResult> {
// Truncate descriptions before anonymization
const truncatedProjects = projects.map((p) => ({
@@ -279,11 +289,14 @@ export async function generateAIAssignments(
let totalTokens = 0
// Process projects in batches
const totalBatches = Math.ceil(anonymizedData.projects.length / ASSIGNMENT_BATCH_SIZE)
for (let i = 0; i < anonymizedData.projects.length; i += ASSIGNMENT_BATCH_SIZE) {
const batchProjects = anonymizedData.projects.slice(i, i + ASSIGNMENT_BATCH_SIZE)
const batchMappings = anonymizedData.projectMappings.slice(i, i + ASSIGNMENT_BATCH_SIZE)
const currentBatch = Math.floor(i / ASSIGNMENT_BATCH_SIZE) + 1
console.log(`[AI Assignment] Processing batch ${Math.floor(i / ASSIGNMENT_BATCH_SIZE) + 1}/${Math.ceil(anonymizedData.projects.length / ASSIGNMENT_BATCH_SIZE)}`)
console.log(`[AI Assignment] Processing batch ${currentBatch}/${totalBatches}`)
const { suggestions, tokensUsed } = await processAssignmentBatch(
openai,
@@ -298,6 +311,17 @@ export async function generateAIAssignments(
allSuggestions.push(...suggestions)
totalTokens += tokensUsed
// Report progress after each batch
if (onProgress) {
const processedCount = Math.min((currentBatch) * ASSIGNMENT_BATCH_SIZE, projects.length)
await onProgress({
currentBatch,
totalBatches,
processedCount,
totalProjects: projects.length,
})
}
}
console.log(`[AI Assignment] Completed. Total suggestions: ${allSuggestions.length}, Total tokens: ${totalTokens}`)

View File

@@ -16,6 +16,7 @@ export const NotificationTypes = {
// Admin notifications
FILTERING_COMPLETE: 'FILTERING_COMPLETE',
FILTERING_FAILED: 'FILTERING_FAILED',
AI_SUGGESTIONS_READY: 'AI_SUGGESTIONS_READY',
NEW_APPLICATION: 'NEW_APPLICATION',
BULK_APPLICATIONS: 'BULK_APPLICATIONS',
DOCUMENTS_UPLOADED: 'DOCUMENTS_UPLOADED',