Apply full refactor updates plus pipeline/email UX confirmations
All checks were successful
Build and Push Docker Image / build (push) Successful in 10m33s
All checks were successful
Build and Push Docker Image / build (push) Successful in 10m33s
This commit is contained in:
@@ -1,99 +1,99 @@
|
||||
import { router } from '../trpc'
|
||||
import { programRouter } from './program'
|
||||
import { projectRouter } from './project'
|
||||
import { userRouter } from './user'
|
||||
import { assignmentRouter } from './assignment'
|
||||
import { evaluationRouter } from './evaluation'
|
||||
import { fileRouter } from './file'
|
||||
import { exportRouter } from './export'
|
||||
import { auditRouter } from './audit'
|
||||
import { settingsRouter } from './settings'
|
||||
import { gracePeriodRouter } from './gracePeriod'
|
||||
// Phase 2 routers
|
||||
import { learningResourceRouter } from './learningResource'
|
||||
import { partnerRouter } from './partner'
|
||||
import { notionImportRouter } from './notion-import'
|
||||
import { typeformImportRouter } from './typeform-import'
|
||||
// Phase 2B routers
|
||||
import { tagRouter } from './tag'
|
||||
import { applicantRouter } from './applicant'
|
||||
import { liveVotingRouter } from './live-voting'
|
||||
import { analyticsRouter } from './analytics'
|
||||
// Storage routers
|
||||
import { avatarRouter } from './avatar'
|
||||
import { logoRouter } from './logo'
|
||||
// Applicant system routers
|
||||
import { applicationRouter } from './application'
|
||||
import { mentorRouter } from './mentor'
|
||||
import { filteringRouter } from './filtering'
|
||||
import { specialAwardRouter } from './specialAward'
|
||||
import { notificationRouter } from './notification'
|
||||
// Feature expansion routers
|
||||
import { messageRouter } from './message'
|
||||
import { webhookRouter } from './webhook'
|
||||
import { projectPoolRouter } from './project-pool'
|
||||
import { wizardTemplateRouter } from './wizard-template'
|
||||
import { dashboardRouter } from './dashboard'
|
||||
// Round redesign Phase 2 routers
|
||||
import { pipelineRouter } from './pipeline'
|
||||
import { stageRouter } from './stage'
|
||||
import { routingRouter } from './routing'
|
||||
import { stageFilteringRouter } from './stageFiltering'
|
||||
import { stageAssignmentRouter } from './stageAssignment'
|
||||
import { cohortRouter } from './cohort'
|
||||
import { liveRouter } from './live'
|
||||
import { decisionRouter } from './decision'
|
||||
import { awardRouter } from './award'
|
||||
|
||||
/**
|
||||
* Root tRPC router that combines all domain routers
|
||||
*/
|
||||
export const appRouter = router({
|
||||
program: programRouter,
|
||||
project: projectRouter,
|
||||
user: userRouter,
|
||||
assignment: assignmentRouter,
|
||||
evaluation: evaluationRouter,
|
||||
file: fileRouter,
|
||||
export: exportRouter,
|
||||
audit: auditRouter,
|
||||
settings: settingsRouter,
|
||||
gracePeriod: gracePeriodRouter,
|
||||
// Phase 2 routers
|
||||
learningResource: learningResourceRouter,
|
||||
partner: partnerRouter,
|
||||
notionImport: notionImportRouter,
|
||||
typeformImport: typeformImportRouter,
|
||||
// Phase 2B routers
|
||||
tag: tagRouter,
|
||||
applicant: applicantRouter,
|
||||
liveVoting: liveVotingRouter,
|
||||
analytics: analyticsRouter,
|
||||
// Storage routers
|
||||
avatar: avatarRouter,
|
||||
logo: logoRouter,
|
||||
// Applicant system routers
|
||||
application: applicationRouter,
|
||||
mentor: mentorRouter,
|
||||
filtering: filteringRouter,
|
||||
specialAward: specialAwardRouter,
|
||||
notification: notificationRouter,
|
||||
// Feature expansion routers
|
||||
message: messageRouter,
|
||||
webhook: webhookRouter,
|
||||
projectPool: projectPoolRouter,
|
||||
wizardTemplate: wizardTemplateRouter,
|
||||
dashboard: dashboardRouter,
|
||||
// Round redesign Phase 2 routers
|
||||
pipeline: pipelineRouter,
|
||||
stage: stageRouter,
|
||||
routing: routingRouter,
|
||||
stageFiltering: stageFilteringRouter,
|
||||
stageAssignment: stageAssignmentRouter,
|
||||
cohort: cohortRouter,
|
||||
live: liveRouter,
|
||||
decision: decisionRouter,
|
||||
award: awardRouter,
|
||||
})
|
||||
|
||||
export type AppRouter = typeof appRouter
|
||||
import { router } from '../trpc'
|
||||
import { programRouter } from './program'
|
||||
import { projectRouter } from './project'
|
||||
import { userRouter } from './user'
|
||||
import { assignmentRouter } from './assignment'
|
||||
import { evaluationRouter } from './evaluation'
|
||||
import { fileRouter } from './file'
|
||||
import { exportRouter } from './export'
|
||||
import { auditRouter } from './audit'
|
||||
import { settingsRouter } from './settings'
|
||||
import { gracePeriodRouter } from './gracePeriod'
|
||||
// Phase 2 routers
|
||||
import { learningResourceRouter } from './learningResource'
|
||||
import { partnerRouter } from './partner'
|
||||
import { notionImportRouter } from './notion-import'
|
||||
import { typeformImportRouter } from './typeform-import'
|
||||
// Phase 2B routers
|
||||
import { tagRouter } from './tag'
|
||||
import { applicantRouter } from './applicant'
|
||||
import { liveVotingRouter } from './live-voting'
|
||||
import { analyticsRouter } from './analytics'
|
||||
// Storage routers
|
||||
import { avatarRouter } from './avatar'
|
||||
import { logoRouter } from './logo'
|
||||
// Applicant system routers
|
||||
import { applicationRouter } from './application'
|
||||
import { mentorRouter } from './mentor'
|
||||
import { filteringRouter } from './filtering'
|
||||
import { specialAwardRouter } from './specialAward'
|
||||
import { notificationRouter } from './notification'
|
||||
// Feature expansion routers
|
||||
import { messageRouter } from './message'
|
||||
import { webhookRouter } from './webhook'
|
||||
import { projectPoolRouter } from './project-pool'
|
||||
import { wizardTemplateRouter } from './wizard-template'
|
||||
import { dashboardRouter } from './dashboard'
|
||||
// Round redesign Phase 2 routers
|
||||
import { pipelineRouter } from './pipeline'
|
||||
import { stageRouter } from './stage'
|
||||
import { routingRouter } from './routing'
|
||||
import { stageFilteringRouter } from './stageFiltering'
|
||||
import { stageAssignmentRouter } from './stageAssignment'
|
||||
import { cohortRouter } from './cohort'
|
||||
import { liveRouter } from './live'
|
||||
import { decisionRouter } from './decision'
|
||||
import { awardRouter } from './award'
|
||||
|
||||
/**
|
||||
* Root tRPC router that combines all domain routers
|
||||
*/
|
||||
export const appRouter = router({
|
||||
program: programRouter,
|
||||
project: projectRouter,
|
||||
user: userRouter,
|
||||
assignment: assignmentRouter,
|
||||
evaluation: evaluationRouter,
|
||||
file: fileRouter,
|
||||
export: exportRouter,
|
||||
audit: auditRouter,
|
||||
settings: settingsRouter,
|
||||
gracePeriod: gracePeriodRouter,
|
||||
// Phase 2 routers
|
||||
learningResource: learningResourceRouter,
|
||||
partner: partnerRouter,
|
||||
notionImport: notionImportRouter,
|
||||
typeformImport: typeformImportRouter,
|
||||
// Phase 2B routers
|
||||
tag: tagRouter,
|
||||
applicant: applicantRouter,
|
||||
liveVoting: liveVotingRouter,
|
||||
analytics: analyticsRouter,
|
||||
// Storage routers
|
||||
avatar: avatarRouter,
|
||||
logo: logoRouter,
|
||||
// Applicant system routers
|
||||
application: applicationRouter,
|
||||
mentor: mentorRouter,
|
||||
filtering: filteringRouter,
|
||||
specialAward: specialAwardRouter,
|
||||
notification: notificationRouter,
|
||||
// Feature expansion routers
|
||||
message: messageRouter,
|
||||
webhook: webhookRouter,
|
||||
projectPool: projectPoolRouter,
|
||||
wizardTemplate: wizardTemplateRouter,
|
||||
dashboard: dashboardRouter,
|
||||
// Round redesign Phase 2 routers
|
||||
pipeline: pipelineRouter,
|
||||
stage: stageRouter,
|
||||
routing: routingRouter,
|
||||
stageFiltering: stageFilteringRouter,
|
||||
stageAssignment: stageAssignmentRouter,
|
||||
cohort: cohortRouter,
|
||||
live: liveRouter,
|
||||
decision: decisionRouter,
|
||||
award: awardRouter,
|
||||
})
|
||||
|
||||
export type AppRouter = typeof appRouter
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,339 +1,339 @@
|
||||
import { z } from 'zod'
|
||||
import { router, adminProcedure, superAdminProcedure } from '../trpc'
|
||||
import { logAudit } from '../utils/audit'
|
||||
|
||||
export const auditRouter = router({
|
||||
/**
|
||||
* List audit logs with filtering and pagination
|
||||
*/
|
||||
list: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
userId: z.string().optional(),
|
||||
action: z.string().optional(),
|
||||
entityType: z.string().optional(),
|
||||
entityId: z.string().optional(),
|
||||
startDate: z.date().optional(),
|
||||
endDate: z.date().optional(),
|
||||
page: z.number().int().min(1).default(1),
|
||||
perPage: z.number().int().min(1).max(100).default(50),
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
const { userId, action, entityType, entityId, startDate, endDate, page, perPage } = input
|
||||
const skip = (page - 1) * perPage
|
||||
|
||||
const where: Record<string, unknown> = {}
|
||||
|
||||
if (userId) where.userId = userId
|
||||
if (action) where.action = action
|
||||
if (entityType) where.entityType = entityType
|
||||
if (entityId) where.entityId = entityId
|
||||
if (startDate || endDate) {
|
||||
where.timestamp = {}
|
||||
if (startDate) (where.timestamp as Record<string, Date>).gte = startDate
|
||||
if (endDate) (where.timestamp as Record<string, Date>).lte = endDate
|
||||
}
|
||||
|
||||
const [logs, total] = await Promise.all([
|
||||
ctx.prisma.auditLog.findMany({
|
||||
where,
|
||||
skip,
|
||||
take: perPage,
|
||||
orderBy: { timestamp: 'desc' },
|
||||
include: {
|
||||
user: { select: { name: true, email: true } },
|
||||
},
|
||||
}),
|
||||
ctx.prisma.auditLog.count({ where }),
|
||||
])
|
||||
|
||||
return {
|
||||
logs,
|
||||
total,
|
||||
page,
|
||||
perPage,
|
||||
totalPages: Math.ceil(total / perPage),
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get audit logs for a specific entity
|
||||
*/
|
||||
getByEntity: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
entityType: z.string(),
|
||||
entityId: z.string(),
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
return ctx.prisma.auditLog.findMany({
|
||||
where: {
|
||||
entityType: input.entityType,
|
||||
entityId: input.entityId,
|
||||
},
|
||||
orderBy: { timestamp: 'desc' },
|
||||
include: {
|
||||
user: { select: { name: true, email: true } },
|
||||
},
|
||||
})
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get audit logs for a specific user
|
||||
*/
|
||||
getByUser: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
userId: z.string(),
|
||||
limit: z.number().int().min(1).max(100).default(50),
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
return ctx.prisma.auditLog.findMany({
|
||||
where: { userId: input.userId },
|
||||
orderBy: { timestamp: 'desc' },
|
||||
take: input.limit,
|
||||
})
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get recent activity summary
|
||||
*/
|
||||
getRecentActivity: adminProcedure
|
||||
.input(z.object({ limit: z.number().int().min(1).max(50).default(20) }))
|
||||
.query(async ({ ctx, input }) => {
|
||||
return ctx.prisma.auditLog.findMany({
|
||||
orderBy: { timestamp: 'desc' },
|
||||
take: input.limit,
|
||||
include: {
|
||||
user: { select: { name: true, email: true } },
|
||||
},
|
||||
})
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get action statistics
|
||||
*/
|
||||
getStats: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
startDate: z.date().optional(),
|
||||
endDate: z.date().optional(),
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
const where: Record<string, unknown> = {}
|
||||
|
||||
if (input.startDate || input.endDate) {
|
||||
where.timestamp = {}
|
||||
if (input.startDate) (where.timestamp as Record<string, Date>).gte = input.startDate
|
||||
if (input.endDate) (where.timestamp as Record<string, Date>).lte = input.endDate
|
||||
}
|
||||
|
||||
const [byAction, byEntity, byUser] = await Promise.all([
|
||||
ctx.prisma.auditLog.groupBy({
|
||||
by: ['action'],
|
||||
where,
|
||||
_count: true,
|
||||
orderBy: { _count: { action: 'desc' } },
|
||||
}),
|
||||
ctx.prisma.auditLog.groupBy({
|
||||
by: ['entityType'],
|
||||
where,
|
||||
_count: true,
|
||||
orderBy: { _count: { entityType: 'desc' } },
|
||||
}),
|
||||
ctx.prisma.auditLog.groupBy({
|
||||
by: ['userId'],
|
||||
where,
|
||||
_count: true,
|
||||
orderBy: { _count: { userId: 'desc' } },
|
||||
take: 10,
|
||||
}),
|
||||
])
|
||||
|
||||
// Get user names for top users
|
||||
const userIds = byUser
|
||||
.map((u) => u.userId)
|
||||
.filter((id): id is string => id !== null)
|
||||
|
||||
const users = await ctx.prisma.user.findMany({
|
||||
where: { id: { in: userIds } },
|
||||
select: { id: true, name: true, email: true },
|
||||
})
|
||||
|
||||
const userMap = new Map(users.map((u) => [u.id, u]))
|
||||
|
||||
return {
|
||||
byAction: byAction.map((a) => ({
|
||||
action: a.action,
|
||||
count: a._count,
|
||||
})),
|
||||
byEntity: byEntity.map((e) => ({
|
||||
entityType: e.entityType,
|
||||
count: e._count,
|
||||
})),
|
||||
byUser: byUser.map((u) => ({
|
||||
userId: u.userId,
|
||||
user: u.userId ? userMap.get(u.userId) : null,
|
||||
count: u._count,
|
||||
})),
|
||||
}
|
||||
}),
|
||||
|
||||
// =========================================================================
|
||||
// Anomaly Detection & Session Tracking (F14)
|
||||
// =========================================================================
|
||||
|
||||
/**
|
||||
* Detect anomalous activity patterns within a time window
|
||||
*/
|
||||
getAnomalies: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
timeWindowMinutes: z.number().int().min(1).max(1440).default(60),
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
// Load anomaly rules from settings
|
||||
const rulesSetting = await ctx.prisma.systemSettings.findUnique({
|
||||
where: { key: 'audit_anomaly_rules' },
|
||||
})
|
||||
|
||||
const rules = rulesSetting?.value
|
||||
? JSON.parse(rulesSetting.value) as {
|
||||
rapid_changes_per_minute?: number
|
||||
bulk_operations_threshold?: number
|
||||
}
|
||||
: { rapid_changes_per_minute: 30, bulk_operations_threshold: 50 }
|
||||
|
||||
const rapidThreshold = rules.rapid_changes_per_minute || 30
|
||||
const bulkThreshold = rules.bulk_operations_threshold || 50
|
||||
|
||||
const windowStart = new Date()
|
||||
windowStart.setMinutes(windowStart.getMinutes() - input.timeWindowMinutes)
|
||||
|
||||
// Get action counts per user in the time window
|
||||
const userActivity = await ctx.prisma.auditLog.groupBy({
|
||||
by: ['userId'],
|
||||
where: {
|
||||
timestamp: { gte: windowStart },
|
||||
userId: { not: null },
|
||||
},
|
||||
_count: true,
|
||||
})
|
||||
|
||||
// Filter for users exceeding thresholds
|
||||
const suspiciousUserIds = userActivity
|
||||
.filter((u) => u._count >= bulkThreshold)
|
||||
.map((u) => u.userId)
|
||||
.filter((id): id is string => id !== null)
|
||||
|
||||
// Get user details
|
||||
const users = suspiciousUserIds.length > 0
|
||||
? await ctx.prisma.user.findMany({
|
||||
where: { id: { in: suspiciousUserIds } },
|
||||
select: { id: true, name: true, email: true, role: true },
|
||||
})
|
||||
: []
|
||||
|
||||
const userMap = new Map(users.map((u) => [u.id, u]))
|
||||
|
||||
const anomalies = userActivity
|
||||
.filter((u) => u._count >= bulkThreshold)
|
||||
.map((u) => ({
|
||||
userId: u.userId,
|
||||
user: u.userId ? userMap.get(u.userId) || null : null,
|
||||
actionCount: u._count,
|
||||
timeWindowMinutes: input.timeWindowMinutes,
|
||||
actionsPerMinute: u._count / input.timeWindowMinutes,
|
||||
isRapid: (u._count / input.timeWindowMinutes) >= rapidThreshold,
|
||||
isBulk: u._count >= bulkThreshold,
|
||||
}))
|
||||
.sort((a, b) => b.actionCount - a.actionCount)
|
||||
|
||||
return {
|
||||
anomalies,
|
||||
thresholds: {
|
||||
rapidChangesPerMinute: rapidThreshold,
|
||||
bulkOperationsThreshold: bulkThreshold,
|
||||
},
|
||||
timeWindow: {
|
||||
start: windowStart,
|
||||
end: new Date(),
|
||||
minutes: input.timeWindowMinutes,
|
||||
},
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get all audit logs for a specific session
|
||||
*/
|
||||
getSessionTimeline: adminProcedure
|
||||
.input(z.object({ sessionId: z.string() }))
|
||||
.query(async ({ ctx, input }) => {
|
||||
const logs = await ctx.prisma.auditLog.findMany({
|
||||
where: { sessionId: input.sessionId },
|
||||
orderBy: { timestamp: 'asc' },
|
||||
include: {
|
||||
user: { select: { id: true, name: true, email: true } },
|
||||
},
|
||||
})
|
||||
|
||||
return logs
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get current audit retention configuration
|
||||
*/
|
||||
getRetentionConfig: adminProcedure.query(async ({ ctx }) => {
|
||||
const setting = await ctx.prisma.systemSettings.findUnique({
|
||||
where: { key: 'audit_retention_days' },
|
||||
})
|
||||
|
||||
return {
|
||||
retentionDays: setting?.value ? parseInt(setting.value, 10) : 365,
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Update audit retention configuration (super admin only)
|
||||
*/
|
||||
updateRetentionConfig: superAdminProcedure
|
||||
.input(z.object({ retentionDays: z.number().int().min(30) }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const setting = await ctx.prisma.systemSettings.upsert({
|
||||
where: { key: 'audit_retention_days' },
|
||||
update: {
|
||||
value: input.retentionDays.toString(),
|
||||
updatedBy: ctx.user.id,
|
||||
},
|
||||
create: {
|
||||
key: 'audit_retention_days',
|
||||
value: input.retentionDays.toString(),
|
||||
category: 'AUDIT_CONFIG',
|
||||
updatedBy: ctx.user.id,
|
||||
},
|
||||
})
|
||||
|
||||
// Audit log
|
||||
try {
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'UPDATE_RETENTION_CONFIG',
|
||||
entityType: 'SystemSettings',
|
||||
entityId: setting.id,
|
||||
detailsJson: { retentionDays: input.retentionDays },
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
} catch {
|
||||
// Never throw on audit failure
|
||||
}
|
||||
|
||||
return { retentionDays: input.retentionDays }
|
||||
}),
|
||||
})
|
||||
import { z } from 'zod'
|
||||
import { router, adminProcedure, superAdminProcedure } from '../trpc'
|
||||
import { logAudit } from '../utils/audit'
|
||||
|
||||
export const auditRouter = router({
|
||||
/**
|
||||
* List audit logs with filtering and pagination
|
||||
*/
|
||||
list: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
userId: z.string().optional(),
|
||||
action: z.string().optional(),
|
||||
entityType: z.string().optional(),
|
||||
entityId: z.string().optional(),
|
||||
startDate: z.date().optional(),
|
||||
endDate: z.date().optional(),
|
||||
page: z.number().int().min(1).default(1),
|
||||
perPage: z.number().int().min(1).max(100).default(50),
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
const { userId, action, entityType, entityId, startDate, endDate, page, perPage } = input
|
||||
const skip = (page - 1) * perPage
|
||||
|
||||
const where: Record<string, unknown> = {}
|
||||
|
||||
if (userId) where.userId = userId
|
||||
if (action) where.action = action
|
||||
if (entityType) where.entityType = entityType
|
||||
if (entityId) where.entityId = entityId
|
||||
if (startDate || endDate) {
|
||||
where.timestamp = {}
|
||||
if (startDate) (where.timestamp as Record<string, Date>).gte = startDate
|
||||
if (endDate) (where.timestamp as Record<string, Date>).lte = endDate
|
||||
}
|
||||
|
||||
const [logs, total] = await Promise.all([
|
||||
ctx.prisma.auditLog.findMany({
|
||||
where,
|
||||
skip,
|
||||
take: perPage,
|
||||
orderBy: { timestamp: 'desc' },
|
||||
include: {
|
||||
user: { select: { name: true, email: true } },
|
||||
},
|
||||
}),
|
||||
ctx.prisma.auditLog.count({ where }),
|
||||
])
|
||||
|
||||
return {
|
||||
logs,
|
||||
total,
|
||||
page,
|
||||
perPage,
|
||||
totalPages: Math.ceil(total / perPage),
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get audit logs for a specific entity
|
||||
*/
|
||||
getByEntity: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
entityType: z.string(),
|
||||
entityId: z.string(),
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
return ctx.prisma.auditLog.findMany({
|
||||
where: {
|
||||
entityType: input.entityType,
|
||||
entityId: input.entityId,
|
||||
},
|
||||
orderBy: { timestamp: 'desc' },
|
||||
include: {
|
||||
user: { select: { name: true, email: true } },
|
||||
},
|
||||
})
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get audit logs for a specific user
|
||||
*/
|
||||
getByUser: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
userId: z.string(),
|
||||
limit: z.number().int().min(1).max(100).default(50),
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
return ctx.prisma.auditLog.findMany({
|
||||
where: { userId: input.userId },
|
||||
orderBy: { timestamp: 'desc' },
|
||||
take: input.limit,
|
||||
})
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get recent activity summary
|
||||
*/
|
||||
getRecentActivity: adminProcedure
|
||||
.input(z.object({ limit: z.number().int().min(1).max(50).default(20) }))
|
||||
.query(async ({ ctx, input }) => {
|
||||
return ctx.prisma.auditLog.findMany({
|
||||
orderBy: { timestamp: 'desc' },
|
||||
take: input.limit,
|
||||
include: {
|
||||
user: { select: { name: true, email: true } },
|
||||
},
|
||||
})
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get action statistics
|
||||
*/
|
||||
getStats: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
startDate: z.date().optional(),
|
||||
endDate: z.date().optional(),
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
const where: Record<string, unknown> = {}
|
||||
|
||||
if (input.startDate || input.endDate) {
|
||||
where.timestamp = {}
|
||||
if (input.startDate) (where.timestamp as Record<string, Date>).gte = input.startDate
|
||||
if (input.endDate) (where.timestamp as Record<string, Date>).lte = input.endDate
|
||||
}
|
||||
|
||||
const [byAction, byEntity, byUser] = await Promise.all([
|
||||
ctx.prisma.auditLog.groupBy({
|
||||
by: ['action'],
|
||||
where,
|
||||
_count: true,
|
||||
orderBy: { _count: { action: 'desc' } },
|
||||
}),
|
||||
ctx.prisma.auditLog.groupBy({
|
||||
by: ['entityType'],
|
||||
where,
|
||||
_count: true,
|
||||
orderBy: { _count: { entityType: 'desc' } },
|
||||
}),
|
||||
ctx.prisma.auditLog.groupBy({
|
||||
by: ['userId'],
|
||||
where,
|
||||
_count: true,
|
||||
orderBy: { _count: { userId: 'desc' } },
|
||||
take: 10,
|
||||
}),
|
||||
])
|
||||
|
||||
// Get user names for top users
|
||||
const userIds = byUser
|
||||
.map((u) => u.userId)
|
||||
.filter((id): id is string => id !== null)
|
||||
|
||||
const users = await ctx.prisma.user.findMany({
|
||||
where: { id: { in: userIds } },
|
||||
select: { id: true, name: true, email: true },
|
||||
})
|
||||
|
||||
const userMap = new Map(users.map((u) => [u.id, u]))
|
||||
|
||||
return {
|
||||
byAction: byAction.map((a) => ({
|
||||
action: a.action,
|
||||
count: a._count,
|
||||
})),
|
||||
byEntity: byEntity.map((e) => ({
|
||||
entityType: e.entityType,
|
||||
count: e._count,
|
||||
})),
|
||||
byUser: byUser.map((u) => ({
|
||||
userId: u.userId,
|
||||
user: u.userId ? userMap.get(u.userId) : null,
|
||||
count: u._count,
|
||||
})),
|
||||
}
|
||||
}),
|
||||
|
||||
// =========================================================================
|
||||
// Anomaly Detection & Session Tracking (F14)
|
||||
// =========================================================================
|
||||
|
||||
/**
|
||||
* Detect anomalous activity patterns within a time window
|
||||
*/
|
||||
getAnomalies: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
timeWindowMinutes: z.number().int().min(1).max(1440).default(60),
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
// Load anomaly rules from settings
|
||||
const rulesSetting = await ctx.prisma.systemSettings.findUnique({
|
||||
where: { key: 'audit_anomaly_rules' },
|
||||
})
|
||||
|
||||
const rules = rulesSetting?.value
|
||||
? JSON.parse(rulesSetting.value) as {
|
||||
rapid_changes_per_minute?: number
|
||||
bulk_operations_threshold?: number
|
||||
}
|
||||
: { rapid_changes_per_minute: 30, bulk_operations_threshold: 50 }
|
||||
|
||||
const rapidThreshold = rules.rapid_changes_per_minute || 30
|
||||
const bulkThreshold = rules.bulk_operations_threshold || 50
|
||||
|
||||
const windowStart = new Date()
|
||||
windowStart.setMinutes(windowStart.getMinutes() - input.timeWindowMinutes)
|
||||
|
||||
// Get action counts per user in the time window
|
||||
const userActivity = await ctx.prisma.auditLog.groupBy({
|
||||
by: ['userId'],
|
||||
where: {
|
||||
timestamp: { gte: windowStart },
|
||||
userId: { not: null },
|
||||
},
|
||||
_count: true,
|
||||
})
|
||||
|
||||
// Filter for users exceeding thresholds
|
||||
const suspiciousUserIds = userActivity
|
||||
.filter((u) => u._count >= bulkThreshold)
|
||||
.map((u) => u.userId)
|
||||
.filter((id): id is string => id !== null)
|
||||
|
||||
// Get user details
|
||||
const users = suspiciousUserIds.length > 0
|
||||
? await ctx.prisma.user.findMany({
|
||||
where: { id: { in: suspiciousUserIds } },
|
||||
select: { id: true, name: true, email: true, role: true },
|
||||
})
|
||||
: []
|
||||
|
||||
const userMap = new Map(users.map((u) => [u.id, u]))
|
||||
|
||||
const anomalies = userActivity
|
||||
.filter((u) => u._count >= bulkThreshold)
|
||||
.map((u) => ({
|
||||
userId: u.userId,
|
||||
user: u.userId ? userMap.get(u.userId) || null : null,
|
||||
actionCount: u._count,
|
||||
timeWindowMinutes: input.timeWindowMinutes,
|
||||
actionsPerMinute: u._count / input.timeWindowMinutes,
|
||||
isRapid: (u._count / input.timeWindowMinutes) >= rapidThreshold,
|
||||
isBulk: u._count >= bulkThreshold,
|
||||
}))
|
||||
.sort((a, b) => b.actionCount - a.actionCount)
|
||||
|
||||
return {
|
||||
anomalies,
|
||||
thresholds: {
|
||||
rapidChangesPerMinute: rapidThreshold,
|
||||
bulkOperationsThreshold: bulkThreshold,
|
||||
},
|
||||
timeWindow: {
|
||||
start: windowStart,
|
||||
end: new Date(),
|
||||
minutes: input.timeWindowMinutes,
|
||||
},
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get all audit logs for a specific session
|
||||
*/
|
||||
getSessionTimeline: adminProcedure
|
||||
.input(z.object({ sessionId: z.string() }))
|
||||
.query(async ({ ctx, input }) => {
|
||||
const logs = await ctx.prisma.auditLog.findMany({
|
||||
where: { sessionId: input.sessionId },
|
||||
orderBy: { timestamp: 'asc' },
|
||||
include: {
|
||||
user: { select: { id: true, name: true, email: true } },
|
||||
},
|
||||
})
|
||||
|
||||
return logs
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get current audit retention configuration
|
||||
*/
|
||||
getRetentionConfig: adminProcedure.query(async ({ ctx }) => {
|
||||
const setting = await ctx.prisma.systemSettings.findUnique({
|
||||
where: { key: 'audit_retention_days' },
|
||||
})
|
||||
|
||||
return {
|
||||
retentionDays: setting?.value ? parseInt(setting.value, 10) : 365,
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Update audit retention configuration (super admin only)
|
||||
*/
|
||||
updateRetentionConfig: superAdminProcedure
|
||||
.input(z.object({ retentionDays: z.number().int().min(30) }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const setting = await ctx.prisma.systemSettings.upsert({
|
||||
where: { key: 'audit_retention_days' },
|
||||
update: {
|
||||
value: input.retentionDays.toString(),
|
||||
updatedBy: ctx.user.id,
|
||||
},
|
||||
create: {
|
||||
key: 'audit_retention_days',
|
||||
value: input.retentionDays.toString(),
|
||||
category: 'AUDIT_CONFIG',
|
||||
updatedBy: ctx.user.id,
|
||||
},
|
||||
})
|
||||
|
||||
// Audit log
|
||||
try {
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'UPDATE_RETENTION_CONFIG',
|
||||
entityType: 'SystemSettings',
|
||||
entityId: setting.id,
|
||||
detailsJson: { retentionDays: input.retentionDays },
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
} catch {
|
||||
// Never throw on audit failure
|
||||
}
|
||||
|
||||
return { retentionDays: input.retentionDays }
|
||||
}),
|
||||
})
|
||||
|
||||
@@ -1,111 +1,111 @@
|
||||
import { z } from 'zod'
|
||||
import { router, protectedProcedure } from '../trpc'
|
||||
import { generateAvatarKey, type StorageProviderType } from '@/lib/storage'
|
||||
import {
|
||||
getImageUploadUrl,
|
||||
confirmImageUpload,
|
||||
getImageUrl,
|
||||
deleteImage,
|
||||
type ImageUploadConfig,
|
||||
} from '../utils/image-upload'
|
||||
|
||||
type AvatarSelect = {
|
||||
profileImageKey: string | null
|
||||
profileImageProvider: string | null
|
||||
}
|
||||
|
||||
const avatarConfig: ImageUploadConfig<AvatarSelect> = {
|
||||
label: 'avatar',
|
||||
generateKey: generateAvatarKey,
|
||||
findCurrent: (prisma, entityId) =>
|
||||
prisma.user.findUnique({
|
||||
where: { id: entityId },
|
||||
select: { profileImageKey: true, profileImageProvider: true },
|
||||
}),
|
||||
getImageKey: (record) => record.profileImageKey,
|
||||
getProviderType: (record) =>
|
||||
(record.profileImageProvider as StorageProviderType) || 's3',
|
||||
setImage: (prisma, entityId, key, providerType) =>
|
||||
prisma.user.update({
|
||||
where: { id: entityId },
|
||||
data: { profileImageKey: key, profileImageProvider: providerType },
|
||||
}),
|
||||
clearImage: (prisma, entityId) =>
|
||||
prisma.user.update({
|
||||
where: { id: entityId },
|
||||
data: { profileImageKey: null, profileImageProvider: null },
|
||||
}),
|
||||
auditEntityType: 'User',
|
||||
auditFieldName: 'profileImageKey',
|
||||
}
|
||||
|
||||
export const avatarRouter = router({
|
||||
/**
|
||||
* Get a pre-signed URL for uploading an avatar
|
||||
*/
|
||||
getUploadUrl: protectedProcedure
|
||||
.input(
|
||||
z.object({
|
||||
fileName: z.string(),
|
||||
contentType: z.string(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
return getImageUploadUrl(
|
||||
ctx.user.id,
|
||||
input.fileName,
|
||||
input.contentType,
|
||||
generateAvatarKey
|
||||
)
|
||||
}),
|
||||
|
||||
/**
|
||||
* Confirm avatar upload and update user profile
|
||||
*/
|
||||
confirmUpload: protectedProcedure
|
||||
.input(
|
||||
z.object({
|
||||
key: z.string(),
|
||||
providerType: z.enum(['s3', 'local']),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const userId = ctx.user.id
|
||||
|
||||
await confirmImageUpload(ctx.prisma, avatarConfig, userId, input.key, input.providerType, {
|
||||
userId: ctx.user.id,
|
||||
ip: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
// Return the updated user fields to match original API contract
|
||||
const user = await ctx.prisma.user.findUnique({
|
||||
where: { id: userId },
|
||||
select: {
|
||||
id: true,
|
||||
profileImageKey: true,
|
||||
profileImageProvider: true,
|
||||
},
|
||||
})
|
||||
|
||||
return user
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get the current user's avatar URL
|
||||
*/
|
||||
getUrl: protectedProcedure.query(async ({ ctx }) => {
|
||||
return getImageUrl(ctx.prisma, avatarConfig, ctx.user.id)
|
||||
}),
|
||||
|
||||
/**
|
||||
* Delete the current user's avatar
|
||||
*/
|
||||
delete: protectedProcedure.mutation(async ({ ctx }) => {
|
||||
return deleteImage(ctx.prisma, avatarConfig, ctx.user.id, {
|
||||
userId: ctx.user.id,
|
||||
ip: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
}),
|
||||
})
|
||||
import { z } from 'zod'
|
||||
import { router, protectedProcedure } from '../trpc'
|
||||
import { generateAvatarKey, type StorageProviderType } from '@/lib/storage'
|
||||
import {
|
||||
getImageUploadUrl,
|
||||
confirmImageUpload,
|
||||
getImageUrl,
|
||||
deleteImage,
|
||||
type ImageUploadConfig,
|
||||
} from '../utils/image-upload'
|
||||
|
||||
type AvatarSelect = {
|
||||
profileImageKey: string | null
|
||||
profileImageProvider: string | null
|
||||
}
|
||||
|
||||
const avatarConfig: ImageUploadConfig<AvatarSelect> = {
|
||||
label: 'avatar',
|
||||
generateKey: generateAvatarKey,
|
||||
findCurrent: (prisma, entityId) =>
|
||||
prisma.user.findUnique({
|
||||
where: { id: entityId },
|
||||
select: { profileImageKey: true, profileImageProvider: true },
|
||||
}),
|
||||
getImageKey: (record) => record.profileImageKey,
|
||||
getProviderType: (record) =>
|
||||
(record.profileImageProvider as StorageProviderType) || 's3',
|
||||
setImage: (prisma, entityId, key, providerType) =>
|
||||
prisma.user.update({
|
||||
where: { id: entityId },
|
||||
data: { profileImageKey: key, profileImageProvider: providerType },
|
||||
}),
|
||||
clearImage: (prisma, entityId) =>
|
||||
prisma.user.update({
|
||||
where: { id: entityId },
|
||||
data: { profileImageKey: null, profileImageProvider: null },
|
||||
}),
|
||||
auditEntityType: 'User',
|
||||
auditFieldName: 'profileImageKey',
|
||||
}
|
||||
|
||||
export const avatarRouter = router({
|
||||
/**
|
||||
* Get a pre-signed URL for uploading an avatar
|
||||
*/
|
||||
getUploadUrl: protectedProcedure
|
||||
.input(
|
||||
z.object({
|
||||
fileName: z.string(),
|
||||
contentType: z.string(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
return getImageUploadUrl(
|
||||
ctx.user.id,
|
||||
input.fileName,
|
||||
input.contentType,
|
||||
generateAvatarKey
|
||||
)
|
||||
}),
|
||||
|
||||
/**
|
||||
* Confirm avatar upload and update user profile
|
||||
*/
|
||||
confirmUpload: protectedProcedure
|
||||
.input(
|
||||
z.object({
|
||||
key: z.string(),
|
||||
providerType: z.enum(['s3', 'local']),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const userId = ctx.user.id
|
||||
|
||||
await confirmImageUpload(ctx.prisma, avatarConfig, userId, input.key, input.providerType, {
|
||||
userId: ctx.user.id,
|
||||
ip: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
// Return the updated user fields to match original API contract
|
||||
const user = await ctx.prisma.user.findUnique({
|
||||
where: { id: userId },
|
||||
select: {
|
||||
id: true,
|
||||
profileImageKey: true,
|
||||
profileImageProvider: true,
|
||||
},
|
||||
})
|
||||
|
||||
return user
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get the current user's avatar URL
|
||||
*/
|
||||
getUrl: protectedProcedure.query(async ({ ctx }) => {
|
||||
return getImageUrl(ctx.prisma, avatarConfig, ctx.user.id)
|
||||
}),
|
||||
|
||||
/**
|
||||
* Delete the current user's avatar
|
||||
*/
|
||||
delete: protectedProcedure.mutation(async ({ ctx }) => {
|
||||
return deleteImage(ctx.prisma, avatarConfig, ctx.user.id, {
|
||||
userId: ctx.user.id,
|
||||
ip: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
}),
|
||||
})
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,331 +1,331 @@
|
||||
import { z } from 'zod'
|
||||
import { TRPCError } from '@trpc/server'
|
||||
import { router, protectedProcedure, adminProcedure } from '../trpc'
|
||||
import { logAudit } from '@/server/utils/audit'
|
||||
|
||||
export const cohortRouter = router({
|
||||
/**
|
||||
* Create a new cohort within a stage
|
||||
*/
|
||||
create: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
stageId: z.string(),
|
||||
name: z.string().min(1).max(255),
|
||||
votingMode: z.enum(['simple', 'criteria', 'ranked']).default('simple'),
|
||||
windowOpenAt: z.date().optional(),
|
||||
windowCloseAt: z.date().optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
// Verify stage exists and is of a type that supports cohorts
|
||||
const stage = await ctx.prisma.stage.findUniqueOrThrow({
|
||||
where: { id: input.stageId },
|
||||
})
|
||||
|
||||
if (stage.stageType !== 'LIVE_FINAL' && stage.stageType !== 'SELECTION') {
|
||||
throw new TRPCError({
|
||||
code: 'BAD_REQUEST',
|
||||
message: 'Cohorts can only be created in LIVE_FINAL or SELECTION stages',
|
||||
})
|
||||
}
|
||||
|
||||
// Validate window dates
|
||||
if (input.windowOpenAt && input.windowCloseAt) {
|
||||
if (input.windowCloseAt <= input.windowOpenAt) {
|
||||
throw new TRPCError({
|
||||
code: 'BAD_REQUEST',
|
||||
message: 'Window close date must be after open date',
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const cohort = await ctx.prisma.$transaction(async (tx) => {
|
||||
const created = await tx.cohort.create({
|
||||
data: {
|
||||
stageId: input.stageId,
|
||||
name: input.name,
|
||||
votingMode: input.votingMode,
|
||||
windowOpenAt: input.windowOpenAt ?? null,
|
||||
windowCloseAt: input.windowCloseAt ?? null,
|
||||
},
|
||||
})
|
||||
|
||||
await logAudit({
|
||||
prisma: tx,
|
||||
userId: ctx.user.id,
|
||||
action: 'CREATE',
|
||||
entityType: 'Cohort',
|
||||
entityId: created.id,
|
||||
detailsJson: {
|
||||
stageId: input.stageId,
|
||||
name: input.name,
|
||||
votingMode: input.votingMode,
|
||||
},
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return created
|
||||
})
|
||||
|
||||
return cohort
|
||||
}),
|
||||
|
||||
/**
|
||||
* Assign projects to a cohort
|
||||
*/
|
||||
assignProjects: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
cohortId: z.string(),
|
||||
projectIds: z.array(z.string()).min(1).max(200),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
// Verify cohort exists
|
||||
const cohort = await ctx.prisma.cohort.findUniqueOrThrow({
|
||||
where: { id: input.cohortId },
|
||||
})
|
||||
|
||||
if (cohort.isOpen) {
|
||||
throw new TRPCError({
|
||||
code: 'PRECONDITION_FAILED',
|
||||
message: 'Cannot modify projects while voting is open',
|
||||
})
|
||||
}
|
||||
|
||||
// Get current max sortOrder
|
||||
const maxOrder = await ctx.prisma.cohortProject.aggregate({
|
||||
where: { cohortId: input.cohortId },
|
||||
_max: { sortOrder: true },
|
||||
})
|
||||
let nextOrder = (maxOrder._max.sortOrder ?? -1) + 1
|
||||
|
||||
// Create cohort project entries (skip duplicates)
|
||||
const created = await ctx.prisma.cohortProject.createMany({
|
||||
data: input.projectIds.map((projectId) => ({
|
||||
cohortId: input.cohortId,
|
||||
projectId,
|
||||
sortOrder: nextOrder++,
|
||||
})),
|
||||
skipDuplicates: true,
|
||||
})
|
||||
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'COHORT_PROJECTS_ASSIGNED',
|
||||
entityType: 'Cohort',
|
||||
entityId: input.cohortId,
|
||||
detailsJson: {
|
||||
projectCount: created.count,
|
||||
requested: input.projectIds.length,
|
||||
},
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return { assigned: created.count, requested: input.projectIds.length }
|
||||
}),
|
||||
|
||||
/**
|
||||
* Open voting for a cohort
|
||||
*/
|
||||
openVoting: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
cohortId: z.string(),
|
||||
durationMinutes: z.number().int().min(1).max(1440).optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const cohort = await ctx.prisma.cohort.findUniqueOrThrow({
|
||||
where: { id: input.cohortId },
|
||||
include: { _count: { select: { projects: true } } },
|
||||
})
|
||||
|
||||
if (cohort.isOpen) {
|
||||
throw new TRPCError({
|
||||
code: 'CONFLICT',
|
||||
message: 'Voting is already open for this cohort',
|
||||
})
|
||||
}
|
||||
|
||||
if (cohort._count.projects === 0) {
|
||||
throw new TRPCError({
|
||||
code: 'PRECONDITION_FAILED',
|
||||
message: 'Cohort must have at least one project before opening voting',
|
||||
})
|
||||
}
|
||||
|
||||
const now = new Date()
|
||||
const closeAt = input.durationMinutes
|
||||
? new Date(now.getTime() + input.durationMinutes * 60 * 1000)
|
||||
: cohort.windowCloseAt
|
||||
|
||||
const updated = await ctx.prisma.$transaction(async (tx) => {
|
||||
const result = await tx.cohort.update({
|
||||
where: { id: input.cohortId },
|
||||
data: {
|
||||
isOpen: true,
|
||||
windowOpenAt: now,
|
||||
windowCloseAt: closeAt,
|
||||
},
|
||||
})
|
||||
|
||||
await logAudit({
|
||||
prisma: tx,
|
||||
userId: ctx.user.id,
|
||||
action: 'COHORT_VOTING_OPENED',
|
||||
entityType: 'Cohort',
|
||||
entityId: input.cohortId,
|
||||
detailsJson: {
|
||||
openedAt: now.toISOString(),
|
||||
closesAt: closeAt?.toISOString() ?? null,
|
||||
projectCount: cohort._count.projects,
|
||||
},
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return result
|
||||
})
|
||||
|
||||
return updated
|
||||
}),
|
||||
|
||||
/**
|
||||
* Close voting for a cohort
|
||||
*/
|
||||
closeVoting: adminProcedure
|
||||
.input(z.object({ cohortId: z.string() }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const cohort = await ctx.prisma.cohort.findUniqueOrThrow({
|
||||
where: { id: input.cohortId },
|
||||
})
|
||||
|
||||
if (!cohort.isOpen) {
|
||||
throw new TRPCError({
|
||||
code: 'PRECONDITION_FAILED',
|
||||
message: 'Voting is not currently open for this cohort',
|
||||
})
|
||||
}
|
||||
|
||||
const now = new Date()
|
||||
|
||||
const updated = await ctx.prisma.$transaction(async (tx) => {
|
||||
const result = await tx.cohort.update({
|
||||
where: { id: input.cohortId },
|
||||
data: {
|
||||
isOpen: false,
|
||||
windowCloseAt: now,
|
||||
},
|
||||
})
|
||||
|
||||
await logAudit({
|
||||
prisma: tx,
|
||||
userId: ctx.user.id,
|
||||
action: 'COHORT_VOTING_CLOSED',
|
||||
entityType: 'Cohort',
|
||||
entityId: input.cohortId,
|
||||
detailsJson: {
|
||||
closedAt: now.toISOString(),
|
||||
wasOpenSince: cohort.windowOpenAt?.toISOString(),
|
||||
},
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return result
|
||||
})
|
||||
|
||||
return updated
|
||||
}),
|
||||
|
||||
/**
|
||||
* List cohorts for a stage
|
||||
*/
|
||||
list: protectedProcedure
|
||||
.input(z.object({ stageId: z.string() }))
|
||||
.query(async ({ ctx, input }) => {
|
||||
return ctx.prisma.cohort.findMany({
|
||||
where: { stageId: input.stageId },
|
||||
orderBy: { createdAt: 'asc' },
|
||||
include: {
|
||||
_count: { select: { projects: true } },
|
||||
},
|
||||
})
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get cohort with projects and vote summary
|
||||
*/
|
||||
get: protectedProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.query(async ({ ctx, input }) => {
|
||||
const cohort = await ctx.prisma.cohort.findUniqueOrThrow({
|
||||
where: { id: input.id },
|
||||
include: {
|
||||
stage: {
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
stageType: true,
|
||||
track: {
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
pipeline: { select: { id: true, name: true } },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
projects: {
|
||||
orderBy: { sortOrder: 'asc' },
|
||||
include: {
|
||||
project: {
|
||||
select: {
|
||||
id: true,
|
||||
title: true,
|
||||
teamName: true,
|
||||
tags: true,
|
||||
description: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
// Get vote counts per project in the cohort's stage session
|
||||
const projectIds = cohort.projects.map((p) => p.projectId)
|
||||
const voteSummary =
|
||||
projectIds.length > 0
|
||||
? await ctx.prisma.liveVote.groupBy({
|
||||
by: ['projectId'],
|
||||
where: {
|
||||
projectId: { in: projectIds },
|
||||
session: { stageId: cohort.stage.id },
|
||||
},
|
||||
_count: true,
|
||||
_avg: { score: true },
|
||||
})
|
||||
: []
|
||||
|
||||
const voteMap = new Map(
|
||||
voteSummary.map((v) => [
|
||||
v.projectId,
|
||||
{ voteCount: v._count, avgScore: v._avg?.score ?? 0 },
|
||||
])
|
||||
)
|
||||
|
||||
return {
|
||||
...cohort,
|
||||
projects: cohort.projects.map((cp) => ({
|
||||
...cp,
|
||||
votes: voteMap.get(cp.projectId) ?? { voteCount: 0, avgScore: 0 },
|
||||
})),
|
||||
}
|
||||
}),
|
||||
})
|
||||
import { z } from 'zod'
|
||||
import { TRPCError } from '@trpc/server'
|
||||
import { router, protectedProcedure, adminProcedure } from '../trpc'
|
||||
import { logAudit } from '@/server/utils/audit'
|
||||
|
||||
export const cohortRouter = router({
|
||||
/**
|
||||
* Create a new cohort within a stage
|
||||
*/
|
||||
create: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
stageId: z.string(),
|
||||
name: z.string().min(1).max(255),
|
||||
votingMode: z.enum(['simple', 'criteria', 'ranked']).default('simple'),
|
||||
windowOpenAt: z.date().optional(),
|
||||
windowCloseAt: z.date().optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
// Verify stage exists and is of a type that supports cohorts
|
||||
const stage = await ctx.prisma.stage.findUniqueOrThrow({
|
||||
where: { id: input.stageId },
|
||||
})
|
||||
|
||||
if (stage.stageType !== 'LIVE_FINAL' && stage.stageType !== 'SELECTION') {
|
||||
throw new TRPCError({
|
||||
code: 'BAD_REQUEST',
|
||||
message: 'Cohorts can only be created in LIVE_FINAL or SELECTION stages',
|
||||
})
|
||||
}
|
||||
|
||||
// Validate window dates
|
||||
if (input.windowOpenAt && input.windowCloseAt) {
|
||||
if (input.windowCloseAt <= input.windowOpenAt) {
|
||||
throw new TRPCError({
|
||||
code: 'BAD_REQUEST',
|
||||
message: 'Window close date must be after open date',
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const cohort = await ctx.prisma.$transaction(async (tx) => {
|
||||
const created = await tx.cohort.create({
|
||||
data: {
|
||||
stageId: input.stageId,
|
||||
name: input.name,
|
||||
votingMode: input.votingMode,
|
||||
windowOpenAt: input.windowOpenAt ?? null,
|
||||
windowCloseAt: input.windowCloseAt ?? null,
|
||||
},
|
||||
})
|
||||
|
||||
await logAudit({
|
||||
prisma: tx,
|
||||
userId: ctx.user.id,
|
||||
action: 'CREATE',
|
||||
entityType: 'Cohort',
|
||||
entityId: created.id,
|
||||
detailsJson: {
|
||||
stageId: input.stageId,
|
||||
name: input.name,
|
||||
votingMode: input.votingMode,
|
||||
},
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return created
|
||||
})
|
||||
|
||||
return cohort
|
||||
}),
|
||||
|
||||
/**
|
||||
* Assign projects to a cohort
|
||||
*/
|
||||
assignProjects: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
cohortId: z.string(),
|
||||
projectIds: z.array(z.string()).min(1).max(200),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
// Verify cohort exists
|
||||
const cohort = await ctx.prisma.cohort.findUniqueOrThrow({
|
||||
where: { id: input.cohortId },
|
||||
})
|
||||
|
||||
if (cohort.isOpen) {
|
||||
throw new TRPCError({
|
||||
code: 'PRECONDITION_FAILED',
|
||||
message: 'Cannot modify projects while voting is open',
|
||||
})
|
||||
}
|
||||
|
||||
// Get current max sortOrder
|
||||
const maxOrder = await ctx.prisma.cohortProject.aggregate({
|
||||
where: { cohortId: input.cohortId },
|
||||
_max: { sortOrder: true },
|
||||
})
|
||||
let nextOrder = (maxOrder._max.sortOrder ?? -1) + 1
|
||||
|
||||
// Create cohort project entries (skip duplicates)
|
||||
const created = await ctx.prisma.cohortProject.createMany({
|
||||
data: input.projectIds.map((projectId) => ({
|
||||
cohortId: input.cohortId,
|
||||
projectId,
|
||||
sortOrder: nextOrder++,
|
||||
})),
|
||||
skipDuplicates: true,
|
||||
})
|
||||
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'COHORT_PROJECTS_ASSIGNED',
|
||||
entityType: 'Cohort',
|
||||
entityId: input.cohortId,
|
||||
detailsJson: {
|
||||
projectCount: created.count,
|
||||
requested: input.projectIds.length,
|
||||
},
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return { assigned: created.count, requested: input.projectIds.length }
|
||||
}),
|
||||
|
||||
/**
|
||||
* Open voting for a cohort
|
||||
*/
|
||||
openVoting: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
cohortId: z.string(),
|
||||
durationMinutes: z.number().int().min(1).max(1440).optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const cohort = await ctx.prisma.cohort.findUniqueOrThrow({
|
||||
where: { id: input.cohortId },
|
||||
include: { _count: { select: { projects: true } } },
|
||||
})
|
||||
|
||||
if (cohort.isOpen) {
|
||||
throw new TRPCError({
|
||||
code: 'CONFLICT',
|
||||
message: 'Voting is already open for this cohort',
|
||||
})
|
||||
}
|
||||
|
||||
if (cohort._count.projects === 0) {
|
||||
throw new TRPCError({
|
||||
code: 'PRECONDITION_FAILED',
|
||||
message: 'Cohort must have at least one project before opening voting',
|
||||
})
|
||||
}
|
||||
|
||||
const now = new Date()
|
||||
const closeAt = input.durationMinutes
|
||||
? new Date(now.getTime() + input.durationMinutes * 60 * 1000)
|
||||
: cohort.windowCloseAt
|
||||
|
||||
const updated = await ctx.prisma.$transaction(async (tx) => {
|
||||
const result = await tx.cohort.update({
|
||||
where: { id: input.cohortId },
|
||||
data: {
|
||||
isOpen: true,
|
||||
windowOpenAt: now,
|
||||
windowCloseAt: closeAt,
|
||||
},
|
||||
})
|
||||
|
||||
await logAudit({
|
||||
prisma: tx,
|
||||
userId: ctx.user.id,
|
||||
action: 'COHORT_VOTING_OPENED',
|
||||
entityType: 'Cohort',
|
||||
entityId: input.cohortId,
|
||||
detailsJson: {
|
||||
openedAt: now.toISOString(),
|
||||
closesAt: closeAt?.toISOString() ?? null,
|
||||
projectCount: cohort._count.projects,
|
||||
},
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return result
|
||||
})
|
||||
|
||||
return updated
|
||||
}),
|
||||
|
||||
/**
|
||||
* Close voting for a cohort
|
||||
*/
|
||||
closeVoting: adminProcedure
|
||||
.input(z.object({ cohortId: z.string() }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const cohort = await ctx.prisma.cohort.findUniqueOrThrow({
|
||||
where: { id: input.cohortId },
|
||||
})
|
||||
|
||||
if (!cohort.isOpen) {
|
||||
throw new TRPCError({
|
||||
code: 'PRECONDITION_FAILED',
|
||||
message: 'Voting is not currently open for this cohort',
|
||||
})
|
||||
}
|
||||
|
||||
const now = new Date()
|
||||
|
||||
const updated = await ctx.prisma.$transaction(async (tx) => {
|
||||
const result = await tx.cohort.update({
|
||||
where: { id: input.cohortId },
|
||||
data: {
|
||||
isOpen: false,
|
||||
windowCloseAt: now,
|
||||
},
|
||||
})
|
||||
|
||||
await logAudit({
|
||||
prisma: tx,
|
||||
userId: ctx.user.id,
|
||||
action: 'COHORT_VOTING_CLOSED',
|
||||
entityType: 'Cohort',
|
||||
entityId: input.cohortId,
|
||||
detailsJson: {
|
||||
closedAt: now.toISOString(),
|
||||
wasOpenSince: cohort.windowOpenAt?.toISOString(),
|
||||
},
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return result
|
||||
})
|
||||
|
||||
return updated
|
||||
}),
|
||||
|
||||
/**
|
||||
* List cohorts for a stage
|
||||
*/
|
||||
list: protectedProcedure
|
||||
.input(z.object({ stageId: z.string() }))
|
||||
.query(async ({ ctx, input }) => {
|
||||
return ctx.prisma.cohort.findMany({
|
||||
where: { stageId: input.stageId },
|
||||
orderBy: { createdAt: 'asc' },
|
||||
include: {
|
||||
_count: { select: { projects: true } },
|
||||
},
|
||||
})
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get cohort with projects and vote summary
|
||||
*/
|
||||
get: protectedProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.query(async ({ ctx, input }) => {
|
||||
const cohort = await ctx.prisma.cohort.findUniqueOrThrow({
|
||||
where: { id: input.id },
|
||||
include: {
|
||||
stage: {
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
stageType: true,
|
||||
track: {
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
pipeline: { select: { id: true, name: true } },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
projects: {
|
||||
orderBy: { sortOrder: 'asc' },
|
||||
include: {
|
||||
project: {
|
||||
select: {
|
||||
id: true,
|
||||
title: true,
|
||||
teamName: true,
|
||||
tags: true,
|
||||
description: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
// Get vote counts per project in the cohort's stage session
|
||||
const projectIds = cohort.projects.map((p) => p.projectId)
|
||||
const voteSummary =
|
||||
projectIds.length > 0
|
||||
? await ctx.prisma.liveVote.groupBy({
|
||||
by: ['projectId'],
|
||||
where: {
|
||||
projectId: { in: projectIds },
|
||||
session: { stageId: cohort.stage.id },
|
||||
},
|
||||
_count: true,
|
||||
_avg: { score: true },
|
||||
})
|
||||
: []
|
||||
|
||||
const voteMap = new Map(
|
||||
voteSummary.map((v) => [
|
||||
v.projectId,
|
||||
{ voteCount: v._count, avgScore: v._avg?.score ?? 0 },
|
||||
])
|
||||
)
|
||||
|
||||
return {
|
||||
...cohort,
|
||||
projects: cohort.projects.map((cp) => ({
|
||||
...cp,
|
||||
votes: voteMap.get(cp.projectId) ?? { voteCount: 0, avgScore: 0 },
|
||||
})),
|
||||
}
|
||||
}),
|
||||
})
|
||||
|
||||
@@ -1,187 +1,187 @@
|
||||
import { z } from 'zod'
|
||||
import { router, adminProcedure } from '../trpc'
|
||||
|
||||
export const dashboardRouter = router({
|
||||
/**
|
||||
* Get all dashboard stats in a single query batch.
|
||||
* Replaces the 16 parallel Prisma queries that were previously
|
||||
* run during SSR, which blocked the event loop and caused 503s.
|
||||
*/
|
||||
getStats: adminProcedure
|
||||
.input(z.object({ editionId: z.string() }))
|
||||
.query(async ({ ctx, input }) => {
|
||||
const { editionId } = input
|
||||
|
||||
const edition = await ctx.prisma.program.findUnique({
|
||||
where: { id: editionId },
|
||||
select: { name: true, year: true },
|
||||
})
|
||||
|
||||
if (!edition) return null
|
||||
|
||||
const sevenDaysAgo = new Date(Date.now() - 7 * 24 * 60 * 60 * 1000)
|
||||
|
||||
const [
|
||||
activeStageCount,
|
||||
totalStageCount,
|
||||
projectCount,
|
||||
newProjectsThisWeek,
|
||||
totalJurors,
|
||||
activeJurors,
|
||||
evaluationStats,
|
||||
totalAssignments,
|
||||
recentStages,
|
||||
latestProjects,
|
||||
categoryBreakdown,
|
||||
oceanIssueBreakdown,
|
||||
recentActivity,
|
||||
pendingCOIs,
|
||||
draftStages,
|
||||
unassignedProjects,
|
||||
] = await Promise.all([
|
||||
ctx.prisma.stage.count({
|
||||
where: { track: { pipeline: { programId: editionId } }, status: 'STAGE_ACTIVE' },
|
||||
}),
|
||||
ctx.prisma.stage.count({
|
||||
where: { track: { pipeline: { programId: editionId } } },
|
||||
}),
|
||||
ctx.prisma.project.count({
|
||||
where: { programId: editionId },
|
||||
}),
|
||||
ctx.prisma.project.count({
|
||||
where: {
|
||||
programId: editionId,
|
||||
createdAt: { gte: sevenDaysAgo },
|
||||
},
|
||||
}),
|
||||
ctx.prisma.user.count({
|
||||
where: {
|
||||
role: 'JURY_MEMBER',
|
||||
status: { in: ['ACTIVE', 'INVITED', 'NONE'] },
|
||||
assignments: { some: { stage: { track: { pipeline: { programId: editionId } } } } },
|
||||
},
|
||||
}),
|
||||
ctx.prisma.user.count({
|
||||
where: {
|
||||
role: 'JURY_MEMBER',
|
||||
status: 'ACTIVE',
|
||||
assignments: { some: { stage: { track: { pipeline: { programId: editionId } } } } },
|
||||
},
|
||||
}),
|
||||
ctx.prisma.evaluation.groupBy({
|
||||
by: ['status'],
|
||||
where: { assignment: { stage: { track: { pipeline: { programId: editionId } } } } },
|
||||
_count: true,
|
||||
}),
|
||||
ctx.prisma.assignment.count({
|
||||
where: { stage: { track: { pipeline: { programId: editionId } } } },
|
||||
}),
|
||||
ctx.prisma.stage.findMany({
|
||||
where: { track: { pipeline: { programId: editionId } } },
|
||||
orderBy: { createdAt: 'desc' },
|
||||
take: 5,
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
status: true,
|
||||
stageType: true,
|
||||
windowOpenAt: true,
|
||||
windowCloseAt: true,
|
||||
_count: {
|
||||
select: {
|
||||
projectStageStates: true,
|
||||
assignments: true,
|
||||
},
|
||||
},
|
||||
assignments: {
|
||||
select: {
|
||||
evaluation: { select: { status: true } },
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
ctx.prisma.project.findMany({
|
||||
where: { programId: editionId },
|
||||
orderBy: { createdAt: 'desc' },
|
||||
take: 8,
|
||||
select: {
|
||||
id: true,
|
||||
title: true,
|
||||
teamName: true,
|
||||
country: true,
|
||||
competitionCategory: true,
|
||||
oceanIssue: true,
|
||||
logoKey: true,
|
||||
createdAt: true,
|
||||
submittedAt: true,
|
||||
status: true,
|
||||
},
|
||||
}),
|
||||
ctx.prisma.project.groupBy({
|
||||
by: ['competitionCategory'],
|
||||
where: { programId: editionId },
|
||||
_count: true,
|
||||
}),
|
||||
ctx.prisma.project.groupBy({
|
||||
by: ['oceanIssue'],
|
||||
where: { programId: editionId },
|
||||
_count: true,
|
||||
}),
|
||||
ctx.prisma.auditLog.findMany({
|
||||
where: {
|
||||
timestamp: { gte: sevenDaysAgo },
|
||||
},
|
||||
orderBy: { timestamp: 'desc' },
|
||||
take: 8,
|
||||
select: {
|
||||
id: true,
|
||||
action: true,
|
||||
entityType: true,
|
||||
timestamp: true,
|
||||
user: { select: { name: true } },
|
||||
},
|
||||
}),
|
||||
ctx.prisma.conflictOfInterest.count({
|
||||
where: {
|
||||
hasConflict: true,
|
||||
reviewedAt: null,
|
||||
assignment: { stage: { track: { pipeline: { programId: editionId } } } },
|
||||
},
|
||||
}),
|
||||
ctx.prisma.stage.count({
|
||||
where: { track: { pipeline: { programId: editionId } }, status: 'STAGE_DRAFT' },
|
||||
}),
|
||||
ctx.prisma.project.count({
|
||||
where: {
|
||||
programId: editionId,
|
||||
projectStageStates: {
|
||||
some: {
|
||||
stage: { status: 'STAGE_ACTIVE' },
|
||||
},
|
||||
},
|
||||
assignments: { none: {} },
|
||||
},
|
||||
}),
|
||||
])
|
||||
|
||||
return {
|
||||
edition,
|
||||
activeStageCount,
|
||||
totalStageCount,
|
||||
projectCount,
|
||||
newProjectsThisWeek,
|
||||
totalJurors,
|
||||
activeJurors,
|
||||
evaluationStats,
|
||||
totalAssignments,
|
||||
recentStages,
|
||||
latestProjects,
|
||||
categoryBreakdown,
|
||||
oceanIssueBreakdown,
|
||||
recentActivity,
|
||||
pendingCOIs,
|
||||
draftStages,
|
||||
unassignedProjects,
|
||||
}
|
||||
}),
|
||||
})
|
||||
import { z } from 'zod'
|
||||
import { router, adminProcedure } from '../trpc'
|
||||
|
||||
export const dashboardRouter = router({
|
||||
/**
|
||||
* Get all dashboard stats in a single query batch.
|
||||
* Replaces the 16 parallel Prisma queries that were previously
|
||||
* run during SSR, which blocked the event loop and caused 503s.
|
||||
*/
|
||||
getStats: adminProcedure
|
||||
.input(z.object({ editionId: z.string() }))
|
||||
.query(async ({ ctx, input }) => {
|
||||
const { editionId } = input
|
||||
|
||||
const edition = await ctx.prisma.program.findUnique({
|
||||
where: { id: editionId },
|
||||
select: { name: true, year: true },
|
||||
})
|
||||
|
||||
if (!edition) return null
|
||||
|
||||
const sevenDaysAgo = new Date(Date.now() - 7 * 24 * 60 * 60 * 1000)
|
||||
|
||||
const [
|
||||
activeStageCount,
|
||||
totalStageCount,
|
||||
projectCount,
|
||||
newProjectsThisWeek,
|
||||
totalJurors,
|
||||
activeJurors,
|
||||
evaluationStats,
|
||||
totalAssignments,
|
||||
recentStages,
|
||||
latestProjects,
|
||||
categoryBreakdown,
|
||||
oceanIssueBreakdown,
|
||||
recentActivity,
|
||||
pendingCOIs,
|
||||
draftStages,
|
||||
unassignedProjects,
|
||||
] = await Promise.all([
|
||||
ctx.prisma.stage.count({
|
||||
where: { track: { pipeline: { programId: editionId } }, status: 'STAGE_ACTIVE' },
|
||||
}),
|
||||
ctx.prisma.stage.count({
|
||||
where: { track: { pipeline: { programId: editionId } } },
|
||||
}),
|
||||
ctx.prisma.project.count({
|
||||
where: { programId: editionId },
|
||||
}),
|
||||
ctx.prisma.project.count({
|
||||
where: {
|
||||
programId: editionId,
|
||||
createdAt: { gte: sevenDaysAgo },
|
||||
},
|
||||
}),
|
||||
ctx.prisma.user.count({
|
||||
where: {
|
||||
role: 'JURY_MEMBER',
|
||||
status: { in: ['ACTIVE', 'INVITED', 'NONE'] },
|
||||
assignments: { some: { stage: { track: { pipeline: { programId: editionId } } } } },
|
||||
},
|
||||
}),
|
||||
ctx.prisma.user.count({
|
||||
where: {
|
||||
role: 'JURY_MEMBER',
|
||||
status: 'ACTIVE',
|
||||
assignments: { some: { stage: { track: { pipeline: { programId: editionId } } } } },
|
||||
},
|
||||
}),
|
||||
ctx.prisma.evaluation.groupBy({
|
||||
by: ['status'],
|
||||
where: { assignment: { stage: { track: { pipeline: { programId: editionId } } } } },
|
||||
_count: true,
|
||||
}),
|
||||
ctx.prisma.assignment.count({
|
||||
where: { stage: { track: { pipeline: { programId: editionId } } } },
|
||||
}),
|
||||
ctx.prisma.stage.findMany({
|
||||
where: { track: { pipeline: { programId: editionId } } },
|
||||
orderBy: { createdAt: 'desc' },
|
||||
take: 5,
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
status: true,
|
||||
stageType: true,
|
||||
windowOpenAt: true,
|
||||
windowCloseAt: true,
|
||||
_count: {
|
||||
select: {
|
||||
projectStageStates: true,
|
||||
assignments: true,
|
||||
},
|
||||
},
|
||||
assignments: {
|
||||
select: {
|
||||
evaluation: { select: { status: true } },
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
ctx.prisma.project.findMany({
|
||||
where: { programId: editionId },
|
||||
orderBy: { createdAt: 'desc' },
|
||||
take: 8,
|
||||
select: {
|
||||
id: true,
|
||||
title: true,
|
||||
teamName: true,
|
||||
country: true,
|
||||
competitionCategory: true,
|
||||
oceanIssue: true,
|
||||
logoKey: true,
|
||||
createdAt: true,
|
||||
submittedAt: true,
|
||||
status: true,
|
||||
},
|
||||
}),
|
||||
ctx.prisma.project.groupBy({
|
||||
by: ['competitionCategory'],
|
||||
where: { programId: editionId },
|
||||
_count: true,
|
||||
}),
|
||||
ctx.prisma.project.groupBy({
|
||||
by: ['oceanIssue'],
|
||||
where: { programId: editionId },
|
||||
_count: true,
|
||||
}),
|
||||
ctx.prisma.auditLog.findMany({
|
||||
where: {
|
||||
timestamp: { gte: sevenDaysAgo },
|
||||
},
|
||||
orderBy: { timestamp: 'desc' },
|
||||
take: 8,
|
||||
select: {
|
||||
id: true,
|
||||
action: true,
|
||||
entityType: true,
|
||||
timestamp: true,
|
||||
user: { select: { name: true } },
|
||||
},
|
||||
}),
|
||||
ctx.prisma.conflictOfInterest.count({
|
||||
where: {
|
||||
hasConflict: true,
|
||||
reviewedAt: null,
|
||||
assignment: { stage: { track: { pipeline: { programId: editionId } } } },
|
||||
},
|
||||
}),
|
||||
ctx.prisma.stage.count({
|
||||
where: { track: { pipeline: { programId: editionId } }, status: 'STAGE_DRAFT' },
|
||||
}),
|
||||
ctx.prisma.project.count({
|
||||
where: {
|
||||
programId: editionId,
|
||||
projectStageStates: {
|
||||
some: {
|
||||
stage: { status: 'STAGE_ACTIVE' },
|
||||
},
|
||||
},
|
||||
assignments: { none: {} },
|
||||
},
|
||||
}),
|
||||
])
|
||||
|
||||
return {
|
||||
edition,
|
||||
activeStageCount,
|
||||
totalStageCount,
|
||||
projectCount,
|
||||
newProjectsThisWeek,
|
||||
totalJurors,
|
||||
activeJurors,
|
||||
evaluationStats,
|
||||
totalAssignments,
|
||||
recentStages,
|
||||
latestProjects,
|
||||
categoryBreakdown,
|
||||
oceanIssueBreakdown,
|
||||
recentActivity,
|
||||
pendingCOIs,
|
||||
draftStages,
|
||||
unassignedProjects,
|
||||
}
|
||||
}),
|
||||
})
|
||||
|
||||
@@ -1,353 +1,353 @@
|
||||
import { z } from 'zod'
|
||||
import { TRPCError } from '@trpc/server'
|
||||
import { Prisma, FilteringOutcome } from '@prisma/client'
|
||||
import { router, protectedProcedure, adminProcedure } from '../trpc'
|
||||
import { logAudit } from '@/server/utils/audit'
|
||||
|
||||
export const decisionRouter = router({
|
||||
/**
|
||||
* Override a project's stage state or filtering result
|
||||
*/
|
||||
override: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
entityType: z.enum([
|
||||
'ProjectStageState',
|
||||
'FilteringResult',
|
||||
'AwardEligibility',
|
||||
]),
|
||||
entityId: z.string(),
|
||||
newValue: z.record(z.unknown()),
|
||||
reasonCode: z.enum([
|
||||
'DATA_CORRECTION',
|
||||
'POLICY_EXCEPTION',
|
||||
'JURY_CONFLICT',
|
||||
'SPONSOR_DECISION',
|
||||
'ADMIN_DISCRETION',
|
||||
]),
|
||||
reasonText: z.string().max(2000).optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
let previousValue: Record<string, unknown> = {}
|
||||
|
||||
// Fetch current value based on entity type
|
||||
switch (input.entityType) {
|
||||
case 'ProjectStageState': {
|
||||
const pss = await ctx.prisma.projectStageState.findUniqueOrThrow({
|
||||
where: { id: input.entityId },
|
||||
})
|
||||
previousValue = {
|
||||
state: pss.state,
|
||||
metadataJson: pss.metadataJson,
|
||||
}
|
||||
|
||||
// Validate the new state
|
||||
const newState = input.newValue.state as string | undefined
|
||||
if (
|
||||
newState &&
|
||||
!['PENDING', 'IN_PROGRESS', 'PASSED', 'REJECTED', 'ROUTED', 'COMPLETED', 'WITHDRAWN'].includes(newState)
|
||||
) {
|
||||
throw new TRPCError({
|
||||
code: 'BAD_REQUEST',
|
||||
message: `Invalid state: ${newState}`,
|
||||
})
|
||||
}
|
||||
|
||||
await ctx.prisma.$transaction(async (tx) => {
|
||||
await tx.projectStageState.update({
|
||||
where: { id: input.entityId },
|
||||
data: {
|
||||
state: (newState as Prisma.EnumProjectStageStateValueFieldUpdateOperationsInput['set']) ?? pss.state,
|
||||
metadataJson: {
|
||||
...(pss.metadataJson as Record<string, unknown> ?? {}),
|
||||
lastOverride: {
|
||||
by: ctx.user.id,
|
||||
at: new Date().toISOString(),
|
||||
reason: input.reasonCode,
|
||||
},
|
||||
} as Prisma.InputJsonValue,
|
||||
},
|
||||
})
|
||||
|
||||
await tx.overrideAction.create({
|
||||
data: {
|
||||
entityType: input.entityType,
|
||||
entityId: input.entityId,
|
||||
previousValue: previousValue as Prisma.InputJsonValue,
|
||||
newValueJson: input.newValue as Prisma.InputJsonValue,
|
||||
reasonCode: input.reasonCode,
|
||||
reasonText: input.reasonText ?? null,
|
||||
actorId: ctx.user.id,
|
||||
},
|
||||
})
|
||||
|
||||
await tx.decisionAuditLog.create({
|
||||
data: {
|
||||
eventType: 'override.applied',
|
||||
entityType: input.entityType,
|
||||
entityId: input.entityId,
|
||||
actorId: ctx.user.id,
|
||||
detailsJson: {
|
||||
previousValue,
|
||||
newValue: input.newValue,
|
||||
reasonCode: input.reasonCode,
|
||||
reasonText: input.reasonText,
|
||||
} as Prisma.InputJsonValue,
|
||||
snapshotJson: previousValue as Prisma.InputJsonValue,
|
||||
},
|
||||
})
|
||||
|
||||
await logAudit({
|
||||
prisma: tx,
|
||||
userId: ctx.user.id,
|
||||
action: 'DECISION_OVERRIDE',
|
||||
entityType: input.entityType,
|
||||
entityId: input.entityId,
|
||||
detailsJson: {
|
||||
reasonCode: input.reasonCode,
|
||||
reasonText: input.reasonText,
|
||||
previousState: previousValue.state,
|
||||
newState: input.newValue.state,
|
||||
},
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
})
|
||||
break
|
||||
}
|
||||
|
||||
case 'FilteringResult': {
|
||||
const fr = await ctx.prisma.filteringResult.findUniqueOrThrow({
|
||||
where: { id: input.entityId },
|
||||
})
|
||||
previousValue = {
|
||||
outcome: fr.outcome,
|
||||
aiScreeningJson: fr.aiScreeningJson,
|
||||
}
|
||||
|
||||
const newOutcome = input.newValue.outcome as string | undefined
|
||||
|
||||
await ctx.prisma.$transaction(async (tx) => {
|
||||
if (newOutcome) {
|
||||
await tx.filteringResult.update({
|
||||
where: { id: input.entityId },
|
||||
data: { finalOutcome: newOutcome as FilteringOutcome },
|
||||
})
|
||||
}
|
||||
|
||||
await tx.overrideAction.create({
|
||||
data: {
|
||||
entityType: input.entityType,
|
||||
entityId: input.entityId,
|
||||
previousValue: previousValue as Prisma.InputJsonValue,
|
||||
newValueJson: input.newValue as Prisma.InputJsonValue,
|
||||
reasonCode: input.reasonCode,
|
||||
reasonText: input.reasonText ?? null,
|
||||
actorId: ctx.user.id,
|
||||
},
|
||||
})
|
||||
|
||||
await tx.decisionAuditLog.create({
|
||||
data: {
|
||||
eventType: 'override.applied',
|
||||
entityType: input.entityType,
|
||||
entityId: input.entityId,
|
||||
actorId: ctx.user.id,
|
||||
detailsJson: {
|
||||
previousValue,
|
||||
newValue: input.newValue,
|
||||
reasonCode: input.reasonCode,
|
||||
} as Prisma.InputJsonValue,
|
||||
},
|
||||
})
|
||||
|
||||
await logAudit({
|
||||
prisma: tx,
|
||||
userId: ctx.user.id,
|
||||
action: 'DECISION_OVERRIDE',
|
||||
entityType: input.entityType,
|
||||
entityId: input.entityId,
|
||||
detailsJson: {
|
||||
reasonCode: input.reasonCode,
|
||||
previousOutcome: (previousValue as Record<string, unknown>).outcome,
|
||||
newOutcome,
|
||||
},
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
})
|
||||
break
|
||||
}
|
||||
|
||||
case 'AwardEligibility': {
|
||||
const ae = await ctx.prisma.awardEligibility.findUniqueOrThrow({
|
||||
where: { id: input.entityId },
|
||||
})
|
||||
previousValue = {
|
||||
eligible: ae.eligible,
|
||||
method: ae.method,
|
||||
}
|
||||
|
||||
const newEligible = input.newValue.eligible as boolean | undefined
|
||||
|
||||
await ctx.prisma.$transaction(async (tx) => {
|
||||
if (newEligible !== undefined) {
|
||||
await tx.awardEligibility.update({
|
||||
where: { id: input.entityId },
|
||||
data: {
|
||||
eligible: newEligible,
|
||||
method: 'MANUAL',
|
||||
overriddenBy: ctx.user.id,
|
||||
overriddenAt: new Date(),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
await tx.overrideAction.create({
|
||||
data: {
|
||||
entityType: input.entityType,
|
||||
entityId: input.entityId,
|
||||
previousValue: previousValue as Prisma.InputJsonValue,
|
||||
newValueJson: input.newValue as Prisma.InputJsonValue,
|
||||
reasonCode: input.reasonCode,
|
||||
reasonText: input.reasonText ?? null,
|
||||
actorId: ctx.user.id,
|
||||
},
|
||||
})
|
||||
|
||||
await tx.decisionAuditLog.create({
|
||||
data: {
|
||||
eventType: 'override.applied',
|
||||
entityType: input.entityType,
|
||||
entityId: input.entityId,
|
||||
actorId: ctx.user.id,
|
||||
detailsJson: {
|
||||
previousValue,
|
||||
newValue: input.newValue,
|
||||
reasonCode: input.reasonCode,
|
||||
} as Prisma.InputJsonValue,
|
||||
},
|
||||
})
|
||||
|
||||
await logAudit({
|
||||
prisma: tx,
|
||||
userId: ctx.user.id,
|
||||
action: 'DECISION_OVERRIDE',
|
||||
entityType: input.entityType,
|
||||
entityId: input.entityId,
|
||||
detailsJson: {
|
||||
reasonCode: input.reasonCode,
|
||||
previousEligible: previousValue.eligible,
|
||||
newEligible,
|
||||
},
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
})
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return { success: true, entityType: input.entityType, entityId: input.entityId }
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get the full decision audit timeline for an entity
|
||||
*/
|
||||
auditTimeline: protectedProcedure
|
||||
.input(
|
||||
z.object({
|
||||
entityType: z.string(),
|
||||
entityId: z.string(),
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
const [decisionLogs, overrideActions] = await Promise.all([
|
||||
ctx.prisma.decisionAuditLog.findMany({
|
||||
where: {
|
||||
entityType: input.entityType,
|
||||
entityId: input.entityId,
|
||||
},
|
||||
orderBy: { createdAt: 'desc' },
|
||||
}),
|
||||
ctx.prisma.overrideAction.findMany({
|
||||
where: {
|
||||
entityType: input.entityType,
|
||||
entityId: input.entityId,
|
||||
},
|
||||
orderBy: { createdAt: 'desc' },
|
||||
}),
|
||||
])
|
||||
|
||||
// Merge and sort by timestamp
|
||||
const timeline = [
|
||||
...decisionLogs.map((dl) => ({
|
||||
type: 'decision' as const,
|
||||
id: dl.id,
|
||||
eventType: dl.eventType,
|
||||
actorId: dl.actorId,
|
||||
details: dl.detailsJson,
|
||||
snapshot: dl.snapshotJson,
|
||||
createdAt: dl.createdAt,
|
||||
})),
|
||||
...overrideActions.map((oa) => ({
|
||||
type: 'override' as const,
|
||||
id: oa.id,
|
||||
eventType: `override.${oa.reasonCode}`,
|
||||
actorId: oa.actorId,
|
||||
details: {
|
||||
previousValue: oa.previousValue,
|
||||
newValue: oa.newValueJson,
|
||||
reasonCode: oa.reasonCode,
|
||||
reasonText: oa.reasonText,
|
||||
},
|
||||
snapshot: null,
|
||||
createdAt: oa.createdAt,
|
||||
})),
|
||||
].sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime())
|
||||
|
||||
return { entityType: input.entityType, entityId: input.entityId, timeline }
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get override actions (paginated, admin only)
|
||||
*/
|
||||
getOverrides: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
entityType: z.string().optional(),
|
||||
reasonCode: z
|
||||
.enum([
|
||||
'DATA_CORRECTION',
|
||||
'POLICY_EXCEPTION',
|
||||
'JURY_CONFLICT',
|
||||
'SPONSOR_DECISION',
|
||||
'ADMIN_DISCRETION',
|
||||
])
|
||||
.optional(),
|
||||
cursor: z.string().optional(),
|
||||
limit: z.number().int().min(1).max(100).default(50),
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
const where: Prisma.OverrideActionWhereInput = {}
|
||||
if (input.entityType) where.entityType = input.entityType
|
||||
if (input.reasonCode) where.reasonCode = input.reasonCode
|
||||
|
||||
const items = await ctx.prisma.overrideAction.findMany({
|
||||
where,
|
||||
take: input.limit + 1,
|
||||
cursor: input.cursor ? { id: input.cursor } : undefined,
|
||||
orderBy: { createdAt: 'desc' },
|
||||
})
|
||||
|
||||
let nextCursor: string | undefined
|
||||
if (items.length > input.limit) {
|
||||
const next = items.pop()
|
||||
nextCursor = next?.id
|
||||
}
|
||||
|
||||
return { items, nextCursor }
|
||||
}),
|
||||
})
|
||||
import { z } from 'zod'
|
||||
import { TRPCError } from '@trpc/server'
|
||||
import { Prisma, FilteringOutcome } from '@prisma/client'
|
||||
import { router, protectedProcedure, adminProcedure } from '../trpc'
|
||||
import { logAudit } from '@/server/utils/audit'
|
||||
|
||||
export const decisionRouter = router({
|
||||
/**
|
||||
* Override a project's stage state or filtering result
|
||||
*/
|
||||
override: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
entityType: z.enum([
|
||||
'ProjectStageState',
|
||||
'FilteringResult',
|
||||
'AwardEligibility',
|
||||
]),
|
||||
entityId: z.string(),
|
||||
newValue: z.record(z.unknown()),
|
||||
reasonCode: z.enum([
|
||||
'DATA_CORRECTION',
|
||||
'POLICY_EXCEPTION',
|
||||
'JURY_CONFLICT',
|
||||
'SPONSOR_DECISION',
|
||||
'ADMIN_DISCRETION',
|
||||
]),
|
||||
reasonText: z.string().max(2000).optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
let previousValue: Record<string, unknown> = {}
|
||||
|
||||
// Fetch current value based on entity type
|
||||
switch (input.entityType) {
|
||||
case 'ProjectStageState': {
|
||||
const pss = await ctx.prisma.projectStageState.findUniqueOrThrow({
|
||||
where: { id: input.entityId },
|
||||
})
|
||||
previousValue = {
|
||||
state: pss.state,
|
||||
metadataJson: pss.metadataJson,
|
||||
}
|
||||
|
||||
// Validate the new state
|
||||
const newState = input.newValue.state as string | undefined
|
||||
if (
|
||||
newState &&
|
||||
!['PENDING', 'IN_PROGRESS', 'PASSED', 'REJECTED', 'ROUTED', 'COMPLETED', 'WITHDRAWN'].includes(newState)
|
||||
) {
|
||||
throw new TRPCError({
|
||||
code: 'BAD_REQUEST',
|
||||
message: `Invalid state: ${newState}`,
|
||||
})
|
||||
}
|
||||
|
||||
await ctx.prisma.$transaction(async (tx) => {
|
||||
await tx.projectStageState.update({
|
||||
where: { id: input.entityId },
|
||||
data: {
|
||||
state: (newState as Prisma.EnumProjectStageStateValueFieldUpdateOperationsInput['set']) ?? pss.state,
|
||||
metadataJson: {
|
||||
...(pss.metadataJson as Record<string, unknown> ?? {}),
|
||||
lastOverride: {
|
||||
by: ctx.user.id,
|
||||
at: new Date().toISOString(),
|
||||
reason: input.reasonCode,
|
||||
},
|
||||
} as Prisma.InputJsonValue,
|
||||
},
|
||||
})
|
||||
|
||||
await tx.overrideAction.create({
|
||||
data: {
|
||||
entityType: input.entityType,
|
||||
entityId: input.entityId,
|
||||
previousValue: previousValue as Prisma.InputJsonValue,
|
||||
newValueJson: input.newValue as Prisma.InputJsonValue,
|
||||
reasonCode: input.reasonCode,
|
||||
reasonText: input.reasonText ?? null,
|
||||
actorId: ctx.user.id,
|
||||
},
|
||||
})
|
||||
|
||||
await tx.decisionAuditLog.create({
|
||||
data: {
|
||||
eventType: 'override.applied',
|
||||
entityType: input.entityType,
|
||||
entityId: input.entityId,
|
||||
actorId: ctx.user.id,
|
||||
detailsJson: {
|
||||
previousValue,
|
||||
newValue: input.newValue,
|
||||
reasonCode: input.reasonCode,
|
||||
reasonText: input.reasonText,
|
||||
} as Prisma.InputJsonValue,
|
||||
snapshotJson: previousValue as Prisma.InputJsonValue,
|
||||
},
|
||||
})
|
||||
|
||||
await logAudit({
|
||||
prisma: tx,
|
||||
userId: ctx.user.id,
|
||||
action: 'DECISION_OVERRIDE',
|
||||
entityType: input.entityType,
|
||||
entityId: input.entityId,
|
||||
detailsJson: {
|
||||
reasonCode: input.reasonCode,
|
||||
reasonText: input.reasonText,
|
||||
previousState: previousValue.state,
|
||||
newState: input.newValue.state,
|
||||
},
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
})
|
||||
break
|
||||
}
|
||||
|
||||
case 'FilteringResult': {
|
||||
const fr = await ctx.prisma.filteringResult.findUniqueOrThrow({
|
||||
where: { id: input.entityId },
|
||||
})
|
||||
previousValue = {
|
||||
outcome: fr.outcome,
|
||||
aiScreeningJson: fr.aiScreeningJson,
|
||||
}
|
||||
|
||||
const newOutcome = input.newValue.outcome as string | undefined
|
||||
|
||||
await ctx.prisma.$transaction(async (tx) => {
|
||||
if (newOutcome) {
|
||||
await tx.filteringResult.update({
|
||||
where: { id: input.entityId },
|
||||
data: { finalOutcome: newOutcome as FilteringOutcome },
|
||||
})
|
||||
}
|
||||
|
||||
await tx.overrideAction.create({
|
||||
data: {
|
||||
entityType: input.entityType,
|
||||
entityId: input.entityId,
|
||||
previousValue: previousValue as Prisma.InputJsonValue,
|
||||
newValueJson: input.newValue as Prisma.InputJsonValue,
|
||||
reasonCode: input.reasonCode,
|
||||
reasonText: input.reasonText ?? null,
|
||||
actorId: ctx.user.id,
|
||||
},
|
||||
})
|
||||
|
||||
await tx.decisionAuditLog.create({
|
||||
data: {
|
||||
eventType: 'override.applied',
|
||||
entityType: input.entityType,
|
||||
entityId: input.entityId,
|
||||
actorId: ctx.user.id,
|
||||
detailsJson: {
|
||||
previousValue,
|
||||
newValue: input.newValue,
|
||||
reasonCode: input.reasonCode,
|
||||
} as Prisma.InputJsonValue,
|
||||
},
|
||||
})
|
||||
|
||||
await logAudit({
|
||||
prisma: tx,
|
||||
userId: ctx.user.id,
|
||||
action: 'DECISION_OVERRIDE',
|
||||
entityType: input.entityType,
|
||||
entityId: input.entityId,
|
||||
detailsJson: {
|
||||
reasonCode: input.reasonCode,
|
||||
previousOutcome: (previousValue as Record<string, unknown>).outcome,
|
||||
newOutcome,
|
||||
},
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
})
|
||||
break
|
||||
}
|
||||
|
||||
case 'AwardEligibility': {
|
||||
const ae = await ctx.prisma.awardEligibility.findUniqueOrThrow({
|
||||
where: { id: input.entityId },
|
||||
})
|
||||
previousValue = {
|
||||
eligible: ae.eligible,
|
||||
method: ae.method,
|
||||
}
|
||||
|
||||
const newEligible = input.newValue.eligible as boolean | undefined
|
||||
|
||||
await ctx.prisma.$transaction(async (tx) => {
|
||||
if (newEligible !== undefined) {
|
||||
await tx.awardEligibility.update({
|
||||
where: { id: input.entityId },
|
||||
data: {
|
||||
eligible: newEligible,
|
||||
method: 'MANUAL',
|
||||
overriddenBy: ctx.user.id,
|
||||
overriddenAt: new Date(),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
await tx.overrideAction.create({
|
||||
data: {
|
||||
entityType: input.entityType,
|
||||
entityId: input.entityId,
|
||||
previousValue: previousValue as Prisma.InputJsonValue,
|
||||
newValueJson: input.newValue as Prisma.InputJsonValue,
|
||||
reasonCode: input.reasonCode,
|
||||
reasonText: input.reasonText ?? null,
|
||||
actorId: ctx.user.id,
|
||||
},
|
||||
})
|
||||
|
||||
await tx.decisionAuditLog.create({
|
||||
data: {
|
||||
eventType: 'override.applied',
|
||||
entityType: input.entityType,
|
||||
entityId: input.entityId,
|
||||
actorId: ctx.user.id,
|
||||
detailsJson: {
|
||||
previousValue,
|
||||
newValue: input.newValue,
|
||||
reasonCode: input.reasonCode,
|
||||
} as Prisma.InputJsonValue,
|
||||
},
|
||||
})
|
||||
|
||||
await logAudit({
|
||||
prisma: tx,
|
||||
userId: ctx.user.id,
|
||||
action: 'DECISION_OVERRIDE',
|
||||
entityType: input.entityType,
|
||||
entityId: input.entityId,
|
||||
detailsJson: {
|
||||
reasonCode: input.reasonCode,
|
||||
previousEligible: previousValue.eligible,
|
||||
newEligible,
|
||||
},
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
})
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return { success: true, entityType: input.entityType, entityId: input.entityId }
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get the full decision audit timeline for an entity
|
||||
*/
|
||||
auditTimeline: protectedProcedure
|
||||
.input(
|
||||
z.object({
|
||||
entityType: z.string(),
|
||||
entityId: z.string(),
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
const [decisionLogs, overrideActions] = await Promise.all([
|
||||
ctx.prisma.decisionAuditLog.findMany({
|
||||
where: {
|
||||
entityType: input.entityType,
|
||||
entityId: input.entityId,
|
||||
},
|
||||
orderBy: { createdAt: 'desc' },
|
||||
}),
|
||||
ctx.prisma.overrideAction.findMany({
|
||||
where: {
|
||||
entityType: input.entityType,
|
||||
entityId: input.entityId,
|
||||
},
|
||||
orderBy: { createdAt: 'desc' },
|
||||
}),
|
||||
])
|
||||
|
||||
// Merge and sort by timestamp
|
||||
const timeline = [
|
||||
...decisionLogs.map((dl) => ({
|
||||
type: 'decision' as const,
|
||||
id: dl.id,
|
||||
eventType: dl.eventType,
|
||||
actorId: dl.actorId,
|
||||
details: dl.detailsJson,
|
||||
snapshot: dl.snapshotJson,
|
||||
createdAt: dl.createdAt,
|
||||
})),
|
||||
...overrideActions.map((oa) => ({
|
||||
type: 'override' as const,
|
||||
id: oa.id,
|
||||
eventType: `override.${oa.reasonCode}`,
|
||||
actorId: oa.actorId,
|
||||
details: {
|
||||
previousValue: oa.previousValue,
|
||||
newValue: oa.newValueJson,
|
||||
reasonCode: oa.reasonCode,
|
||||
reasonText: oa.reasonText,
|
||||
},
|
||||
snapshot: null,
|
||||
createdAt: oa.createdAt,
|
||||
})),
|
||||
].sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime())
|
||||
|
||||
return { entityType: input.entityType, entityId: input.entityId, timeline }
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get override actions (paginated, admin only)
|
||||
*/
|
||||
getOverrides: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
entityType: z.string().optional(),
|
||||
reasonCode: z
|
||||
.enum([
|
||||
'DATA_CORRECTION',
|
||||
'POLICY_EXCEPTION',
|
||||
'JURY_CONFLICT',
|
||||
'SPONSOR_DECISION',
|
||||
'ADMIN_DISCRETION',
|
||||
])
|
||||
.optional(),
|
||||
cursor: z.string().optional(),
|
||||
limit: z.number().int().min(1).max(100).default(50),
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
const where: Prisma.OverrideActionWhereInput = {}
|
||||
if (input.entityType) where.entityType = input.entityType
|
||||
if (input.reasonCode) where.reasonCode = input.reasonCode
|
||||
|
||||
const items = await ctx.prisma.overrideAction.findMany({
|
||||
where,
|
||||
take: input.limit + 1,
|
||||
cursor: input.cursor ? { id: input.cursor } : undefined,
|
||||
orderBy: { createdAt: 'desc' },
|
||||
})
|
||||
|
||||
let nextCursor: string | undefined
|
||||
if (items.length > input.limit) {
|
||||
const next = items.pop()
|
||||
nextCursor = next?.id
|
||||
}
|
||||
|
||||
return { items, nextCursor }
|
||||
}),
|
||||
})
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,205 +1,205 @@
|
||||
import { z } from 'zod'
|
||||
import { router, adminProcedure } from '../trpc'
|
||||
import { logAudit } from '../utils/audit'
|
||||
|
||||
export const gracePeriodRouter = router({
|
||||
/**
|
||||
* Grant a grace period to a juror
|
||||
*/
|
||||
grant: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
stageId: z.string(),
|
||||
userId: z.string(),
|
||||
projectId: z.string().optional(),
|
||||
extendedUntil: z.date(),
|
||||
reason: z.string().optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const gracePeriod = await ctx.prisma.gracePeriod.create({
|
||||
data: {
|
||||
...input,
|
||||
grantedById: ctx.user.id,
|
||||
},
|
||||
})
|
||||
|
||||
// Audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'GRANT_GRACE_PERIOD',
|
||||
entityType: 'GracePeriod',
|
||||
entityId: gracePeriod.id,
|
||||
detailsJson: {
|
||||
stageId: input.stageId,
|
||||
userId: input.userId,
|
||||
projectId: input.projectId,
|
||||
extendedUntil: input.extendedUntil.toISOString(),
|
||||
},
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return gracePeriod
|
||||
}),
|
||||
|
||||
/**
|
||||
* List grace periods for a stage
|
||||
*/
|
||||
listByStage: adminProcedure
|
||||
.input(z.object({ stageId: z.string() }))
|
||||
.query(async ({ ctx, input }) => {
|
||||
return ctx.prisma.gracePeriod.findMany({
|
||||
where: { stageId: input.stageId },
|
||||
include: {
|
||||
user: { select: { id: true, name: true, email: true } },
|
||||
grantedBy: { select: { id: true, name: true } },
|
||||
},
|
||||
orderBy: { createdAt: 'desc' },
|
||||
})
|
||||
}),
|
||||
|
||||
/**
|
||||
* List active grace periods for a stage
|
||||
*/
|
||||
listActiveByStage: adminProcedure
|
||||
.input(z.object({ stageId: z.string() }))
|
||||
.query(async ({ ctx, input }) => {
|
||||
return ctx.prisma.gracePeriod.findMany({
|
||||
where: {
|
||||
stageId: input.stageId,
|
||||
extendedUntil: { gte: new Date() },
|
||||
},
|
||||
include: {
|
||||
user: { select: { id: true, name: true, email: true } },
|
||||
grantedBy: { select: { id: true, name: true } },
|
||||
},
|
||||
orderBy: { extendedUntil: 'asc' },
|
||||
})
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get grace periods for a specific user in a stage
|
||||
*/
|
||||
getByUser: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
stageId: z.string(),
|
||||
userId: z.string(),
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
return ctx.prisma.gracePeriod.findMany({
|
||||
where: {
|
||||
stageId: input.stageId,
|
||||
userId: input.userId,
|
||||
},
|
||||
orderBy: { createdAt: 'desc' },
|
||||
})
|
||||
}),
|
||||
|
||||
/**
|
||||
* Update a grace period
|
||||
*/
|
||||
update: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
extendedUntil: z.date().optional(),
|
||||
reason: z.string().optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const { id, ...data } = input
|
||||
|
||||
const gracePeriod = await ctx.prisma.gracePeriod.update({
|
||||
where: { id },
|
||||
data,
|
||||
})
|
||||
|
||||
// Audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'UPDATE_GRACE_PERIOD',
|
||||
entityType: 'GracePeriod',
|
||||
entityId: id,
|
||||
detailsJson: data,
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return gracePeriod
|
||||
}),
|
||||
|
||||
/**
|
||||
* Revoke a grace period
|
||||
*/
|
||||
revoke: adminProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const gracePeriod = await ctx.prisma.gracePeriod.delete({
|
||||
where: { id: input.id },
|
||||
})
|
||||
|
||||
// Audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'REVOKE_GRACE_PERIOD',
|
||||
entityType: 'GracePeriod',
|
||||
entityId: input.id,
|
||||
detailsJson: {
|
||||
userId: gracePeriod.userId,
|
||||
stageId: gracePeriod.stageId,
|
||||
},
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return gracePeriod
|
||||
}),
|
||||
|
||||
/**
|
||||
* Bulk grant grace periods
|
||||
*/
|
||||
bulkGrant: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
stageId: z.string(),
|
||||
userIds: z.array(z.string()),
|
||||
extendedUntil: z.date(),
|
||||
reason: z.string().optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const created = await ctx.prisma.gracePeriod.createMany({
|
||||
data: input.userIds.map((userId) => ({
|
||||
stageId: input.stageId,
|
||||
userId,
|
||||
extendedUntil: input.extendedUntil,
|
||||
reason: input.reason,
|
||||
grantedById: ctx.user.id,
|
||||
})),
|
||||
skipDuplicates: true,
|
||||
})
|
||||
|
||||
// Audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'BULK_GRANT_GRACE_PERIOD',
|
||||
entityType: 'GracePeriod',
|
||||
detailsJson: {
|
||||
stageId: input.stageId,
|
||||
userCount: input.userIds.length,
|
||||
created: created.count,
|
||||
},
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return { created: created.count }
|
||||
}),
|
||||
})
|
||||
import { z } from 'zod'
|
||||
import { router, adminProcedure } from '../trpc'
|
||||
import { logAudit } from '../utils/audit'
|
||||
|
||||
export const gracePeriodRouter = router({
|
||||
/**
|
||||
* Grant a grace period to a juror
|
||||
*/
|
||||
grant: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
stageId: z.string(),
|
||||
userId: z.string(),
|
||||
projectId: z.string().optional(),
|
||||
extendedUntil: z.date(),
|
||||
reason: z.string().optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const gracePeriod = await ctx.prisma.gracePeriod.create({
|
||||
data: {
|
||||
...input,
|
||||
grantedById: ctx.user.id,
|
||||
},
|
||||
})
|
||||
|
||||
// Audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'GRANT_GRACE_PERIOD',
|
||||
entityType: 'GracePeriod',
|
||||
entityId: gracePeriod.id,
|
||||
detailsJson: {
|
||||
stageId: input.stageId,
|
||||
userId: input.userId,
|
||||
projectId: input.projectId,
|
||||
extendedUntil: input.extendedUntil.toISOString(),
|
||||
},
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return gracePeriod
|
||||
}),
|
||||
|
||||
/**
|
||||
* List grace periods for a stage
|
||||
*/
|
||||
listByStage: adminProcedure
|
||||
.input(z.object({ stageId: z.string() }))
|
||||
.query(async ({ ctx, input }) => {
|
||||
return ctx.prisma.gracePeriod.findMany({
|
||||
where: { stageId: input.stageId },
|
||||
include: {
|
||||
user: { select: { id: true, name: true, email: true } },
|
||||
grantedBy: { select: { id: true, name: true } },
|
||||
},
|
||||
orderBy: { createdAt: 'desc' },
|
||||
})
|
||||
}),
|
||||
|
||||
/**
|
||||
* List active grace periods for a stage
|
||||
*/
|
||||
listActiveByStage: adminProcedure
|
||||
.input(z.object({ stageId: z.string() }))
|
||||
.query(async ({ ctx, input }) => {
|
||||
return ctx.prisma.gracePeriod.findMany({
|
||||
where: {
|
||||
stageId: input.stageId,
|
||||
extendedUntil: { gte: new Date() },
|
||||
},
|
||||
include: {
|
||||
user: { select: { id: true, name: true, email: true } },
|
||||
grantedBy: { select: { id: true, name: true } },
|
||||
},
|
||||
orderBy: { extendedUntil: 'asc' },
|
||||
})
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get grace periods for a specific user in a stage
|
||||
*/
|
||||
getByUser: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
stageId: z.string(),
|
||||
userId: z.string(),
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
return ctx.prisma.gracePeriod.findMany({
|
||||
where: {
|
||||
stageId: input.stageId,
|
||||
userId: input.userId,
|
||||
},
|
||||
orderBy: { createdAt: 'desc' },
|
||||
})
|
||||
}),
|
||||
|
||||
/**
|
||||
* Update a grace period
|
||||
*/
|
||||
update: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
extendedUntil: z.date().optional(),
|
||||
reason: z.string().optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const { id, ...data } = input
|
||||
|
||||
const gracePeriod = await ctx.prisma.gracePeriod.update({
|
||||
where: { id },
|
||||
data,
|
||||
})
|
||||
|
||||
// Audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'UPDATE_GRACE_PERIOD',
|
||||
entityType: 'GracePeriod',
|
||||
entityId: id,
|
||||
detailsJson: data,
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return gracePeriod
|
||||
}),
|
||||
|
||||
/**
|
||||
* Revoke a grace period
|
||||
*/
|
||||
revoke: adminProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const gracePeriod = await ctx.prisma.gracePeriod.delete({
|
||||
where: { id: input.id },
|
||||
})
|
||||
|
||||
// Audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'REVOKE_GRACE_PERIOD',
|
||||
entityType: 'GracePeriod',
|
||||
entityId: input.id,
|
||||
detailsJson: {
|
||||
userId: gracePeriod.userId,
|
||||
stageId: gracePeriod.stageId,
|
||||
},
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return gracePeriod
|
||||
}),
|
||||
|
||||
/**
|
||||
* Bulk grant grace periods
|
||||
*/
|
||||
bulkGrant: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
stageId: z.string(),
|
||||
userIds: z.array(z.string()),
|
||||
extendedUntil: z.date(),
|
||||
reason: z.string().optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const created = await ctx.prisma.gracePeriod.createMany({
|
||||
data: input.userIds.map((userId) => ({
|
||||
stageId: input.stageId,
|
||||
userId,
|
||||
extendedUntil: input.extendedUntil,
|
||||
reason: input.reason,
|
||||
grantedById: ctx.user.id,
|
||||
})),
|
||||
skipDuplicates: true,
|
||||
})
|
||||
|
||||
// Audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'BULK_GRANT_GRACE_PERIOD',
|
||||
entityType: 'GracePeriod',
|
||||
detailsJson: {
|
||||
stageId: input.stageId,
|
||||
userCount: input.userIds.length,
|
||||
created: created.count,
|
||||
},
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return { created: created.count }
|
||||
}),
|
||||
})
|
||||
|
||||
@@ -1,493 +1,493 @@
|
||||
import { z } from 'zod'
|
||||
import { TRPCError } from '@trpc/server'
|
||||
import {
|
||||
router,
|
||||
protectedProcedure,
|
||||
adminProcedure,
|
||||
} from '../trpc'
|
||||
import { getPresignedUrl } from '@/lib/minio'
|
||||
import { logAudit } from '../utils/audit'
|
||||
|
||||
// Bucket for learning resources
|
||||
export const LEARNING_BUCKET = 'mopc-learning'
|
||||
|
||||
export const learningResourceRouter = router({
|
||||
/**
|
||||
* List all resources (admin view)
|
||||
*/
|
||||
list: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
programId: z.string().optional(),
|
||||
resourceType: z.enum(['PDF', 'VIDEO', 'DOCUMENT', 'LINK', 'OTHER']).optional(),
|
||||
cohortLevel: z.enum(['ALL', 'SEMIFINALIST', 'FINALIST']).optional(),
|
||||
isPublished: z.boolean().optional(),
|
||||
page: z.number().int().min(1).default(1),
|
||||
perPage: z.number().int().min(1).max(100).default(20),
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
const where: Record<string, unknown> = {}
|
||||
|
||||
if (input.programId !== undefined) {
|
||||
where.programId = input.programId
|
||||
}
|
||||
if (input.resourceType) {
|
||||
where.resourceType = input.resourceType
|
||||
}
|
||||
if (input.cohortLevel) {
|
||||
where.cohortLevel = input.cohortLevel
|
||||
}
|
||||
if (input.isPublished !== undefined) {
|
||||
where.isPublished = input.isPublished
|
||||
}
|
||||
|
||||
const [data, total] = await Promise.all([
|
||||
ctx.prisma.learningResource.findMany({
|
||||
where,
|
||||
include: {
|
||||
program: { select: { id: true, name: true, year: true } },
|
||||
createdBy: { select: { id: true, name: true, email: true } },
|
||||
_count: { select: { accessLogs: true } },
|
||||
},
|
||||
orderBy: [{ sortOrder: 'asc' }, { createdAt: 'desc' }],
|
||||
skip: (input.page - 1) * input.perPage,
|
||||
take: input.perPage,
|
||||
}),
|
||||
ctx.prisma.learningResource.count({ where }),
|
||||
])
|
||||
|
||||
return {
|
||||
data,
|
||||
total,
|
||||
page: input.page,
|
||||
perPage: input.perPage,
|
||||
totalPages: Math.ceil(total / input.perPage),
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get resources accessible to the current user (jury view)
|
||||
*/
|
||||
myResources: protectedProcedure
|
||||
.input(
|
||||
z.object({
|
||||
programId: z.string().optional(),
|
||||
resourceType: z.enum(['PDF', 'VIDEO', 'DOCUMENT', 'LINK', 'OTHER']).optional(),
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
// Determine user's cohort level based on their assignments
|
||||
const assignments = await ctx.prisma.assignment.findMany({
|
||||
where: { userId: ctx.user.id },
|
||||
include: {
|
||||
project: {
|
||||
select: {
|
||||
status: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
// Determine highest cohort level
|
||||
let userCohortLevel: 'ALL' | 'SEMIFINALIST' | 'FINALIST' = 'ALL'
|
||||
for (const assignment of assignments) {
|
||||
const projectStatus = assignment.project.status
|
||||
if (projectStatus === 'FINALIST') {
|
||||
userCohortLevel = 'FINALIST'
|
||||
break
|
||||
}
|
||||
if (projectStatus === 'SEMIFINALIST') {
|
||||
userCohortLevel = 'SEMIFINALIST'
|
||||
}
|
||||
}
|
||||
|
||||
// Build query based on cohort level
|
||||
const cohortLevels = ['ALL']
|
||||
if (userCohortLevel === 'SEMIFINALIST' || userCohortLevel === 'FINALIST') {
|
||||
cohortLevels.push('SEMIFINALIST')
|
||||
}
|
||||
if (userCohortLevel === 'FINALIST') {
|
||||
cohortLevels.push('FINALIST')
|
||||
}
|
||||
|
||||
const where: Record<string, unknown> = {
|
||||
isPublished: true,
|
||||
cohortLevel: { in: cohortLevels },
|
||||
}
|
||||
|
||||
if (input.programId) {
|
||||
where.OR = [{ programId: input.programId }, { programId: null }]
|
||||
}
|
||||
if (input.resourceType) {
|
||||
where.resourceType = input.resourceType
|
||||
}
|
||||
|
||||
const resources = await ctx.prisma.learningResource.findMany({
|
||||
where,
|
||||
orderBy: [{ sortOrder: 'asc' }, { createdAt: 'desc' }],
|
||||
})
|
||||
|
||||
return {
|
||||
resources,
|
||||
userCohortLevel,
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get a single resource by ID
|
||||
*/
|
||||
get: protectedProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.query(async ({ ctx, input }) => {
|
||||
const resource = await ctx.prisma.learningResource.findUniqueOrThrow({
|
||||
where: { id: input.id },
|
||||
include: {
|
||||
program: { select: { id: true, name: true, year: true } },
|
||||
createdBy: { select: { id: true, name: true, email: true } },
|
||||
},
|
||||
})
|
||||
|
||||
// Check access for non-admins
|
||||
if (ctx.user.role === 'JURY_MEMBER') {
|
||||
if (!resource.isPublished) {
|
||||
throw new TRPCError({
|
||||
code: 'FORBIDDEN',
|
||||
message: 'This resource is not available',
|
||||
})
|
||||
}
|
||||
|
||||
// Check cohort level access
|
||||
const assignments = await ctx.prisma.assignment.findMany({
|
||||
where: { userId: ctx.user.id },
|
||||
include: {
|
||||
project: {
|
||||
select: {
|
||||
status: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
let userCohortLevel: 'ALL' | 'SEMIFINALIST' | 'FINALIST' = 'ALL'
|
||||
for (const assignment of assignments) {
|
||||
const projectStatus = assignment.project.status
|
||||
if (projectStatus === 'FINALIST') {
|
||||
userCohortLevel = 'FINALIST'
|
||||
break
|
||||
}
|
||||
if (projectStatus === 'SEMIFINALIST') {
|
||||
userCohortLevel = 'SEMIFINALIST'
|
||||
}
|
||||
}
|
||||
|
||||
const accessibleLevels = ['ALL']
|
||||
if (userCohortLevel === 'SEMIFINALIST' || userCohortLevel === 'FINALIST') {
|
||||
accessibleLevels.push('SEMIFINALIST')
|
||||
}
|
||||
if (userCohortLevel === 'FINALIST') {
|
||||
accessibleLevels.push('FINALIST')
|
||||
}
|
||||
|
||||
if (!accessibleLevels.includes(resource.cohortLevel)) {
|
||||
throw new TRPCError({
|
||||
code: 'FORBIDDEN',
|
||||
message: 'You do not have access to this resource',
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return resource
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get download URL for a resource file
|
||||
* Checks cohort level access for non-admin users
|
||||
*/
|
||||
getDownloadUrl: protectedProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.query(async ({ ctx, input }) => {
|
||||
const resource = await ctx.prisma.learningResource.findUniqueOrThrow({
|
||||
where: { id: input.id },
|
||||
})
|
||||
|
||||
if (!resource.bucket || !resource.objectKey) {
|
||||
throw new TRPCError({
|
||||
code: 'BAD_REQUEST',
|
||||
message: 'This resource does not have a file',
|
||||
})
|
||||
}
|
||||
|
||||
// Check access for non-admins
|
||||
const isAdmin = ['SUPER_ADMIN', 'PROGRAM_ADMIN'].includes(ctx.user.role)
|
||||
if (!isAdmin) {
|
||||
if (!resource.isPublished) {
|
||||
throw new TRPCError({
|
||||
code: 'FORBIDDEN',
|
||||
message: 'This resource is not available',
|
||||
})
|
||||
}
|
||||
|
||||
// Check cohort level access
|
||||
const assignments = await ctx.prisma.assignment.findMany({
|
||||
where: { userId: ctx.user.id },
|
||||
include: {
|
||||
project: {
|
||||
select: {
|
||||
status: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
let userCohortLevel: 'ALL' | 'SEMIFINALIST' | 'FINALIST' = 'ALL'
|
||||
for (const assignment of assignments) {
|
||||
const projectStatus = assignment.project.status
|
||||
if (projectStatus === 'FINALIST') {
|
||||
userCohortLevel = 'FINALIST'
|
||||
break
|
||||
}
|
||||
if (projectStatus === 'SEMIFINALIST') {
|
||||
userCohortLevel = 'SEMIFINALIST'
|
||||
}
|
||||
}
|
||||
|
||||
const accessibleLevels = ['ALL']
|
||||
if (userCohortLevel === 'SEMIFINALIST' || userCohortLevel === 'FINALIST') {
|
||||
accessibleLevels.push('SEMIFINALIST')
|
||||
}
|
||||
if (userCohortLevel === 'FINALIST') {
|
||||
accessibleLevels.push('FINALIST')
|
||||
}
|
||||
|
||||
if (!accessibleLevels.includes(resource.cohortLevel)) {
|
||||
throw new TRPCError({
|
||||
code: 'FORBIDDEN',
|
||||
message: 'You do not have access to this resource',
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Log access
|
||||
await ctx.prisma.resourceAccess.create({
|
||||
data: {
|
||||
resourceId: resource.id,
|
||||
userId: ctx.user.id,
|
||||
ipAddress: ctx.ip,
|
||||
},
|
||||
})
|
||||
|
||||
const url = await getPresignedUrl(resource.bucket, resource.objectKey, 'GET', 900)
|
||||
return { url }
|
||||
}),
|
||||
|
||||
/**
|
||||
* Create a new resource (admin only)
|
||||
*/
|
||||
create: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
programId: z.string().nullable(),
|
||||
title: z.string().min(1).max(255),
|
||||
description: z.string().optional(),
|
||||
contentJson: z.any().optional(), // BlockNote document structure
|
||||
resourceType: z.enum(['PDF', 'VIDEO', 'DOCUMENT', 'LINK', 'OTHER']),
|
||||
cohortLevel: z.enum(['ALL', 'SEMIFINALIST', 'FINALIST']).default('ALL'),
|
||||
externalUrl: z.string().url().optional(),
|
||||
sortOrder: z.number().int().default(0),
|
||||
isPublished: z.boolean().default(false),
|
||||
// File info (set after upload)
|
||||
fileName: z.string().optional(),
|
||||
mimeType: z.string().optional(),
|
||||
size: z.number().int().optional(),
|
||||
bucket: z.string().optional(),
|
||||
objectKey: z.string().optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const resource = await ctx.prisma.learningResource.create({
|
||||
data: {
|
||||
...input,
|
||||
createdById: ctx.user.id,
|
||||
},
|
||||
})
|
||||
|
||||
// Audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'CREATE',
|
||||
entityType: 'LearningResource',
|
||||
entityId: resource.id,
|
||||
detailsJson: { title: input.title, resourceType: input.resourceType },
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return resource
|
||||
}),
|
||||
|
||||
/**
|
||||
* Update a resource (admin only)
|
||||
*/
|
||||
update: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
title: z.string().min(1).max(255).optional(),
|
||||
description: z.string().optional(),
|
||||
contentJson: z.any().optional(), // BlockNote document structure
|
||||
resourceType: z.enum(['PDF', 'VIDEO', 'DOCUMENT', 'LINK', 'OTHER']).optional(),
|
||||
cohortLevel: z.enum(['ALL', 'SEMIFINALIST', 'FINALIST']).optional(),
|
||||
externalUrl: z.string().url().optional().nullable(),
|
||||
sortOrder: z.number().int().optional(),
|
||||
isPublished: z.boolean().optional(),
|
||||
// File info (set after upload)
|
||||
fileName: z.string().optional(),
|
||||
mimeType: z.string().optional(),
|
||||
size: z.number().int().optional(),
|
||||
bucket: z.string().optional(),
|
||||
objectKey: z.string().optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const { id, ...data } = input
|
||||
|
||||
const resource = await ctx.prisma.learningResource.update({
|
||||
where: { id },
|
||||
data,
|
||||
})
|
||||
|
||||
// Audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'UPDATE',
|
||||
entityType: 'LearningResource',
|
||||
entityId: id,
|
||||
detailsJson: data,
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return resource
|
||||
}),
|
||||
|
||||
/**
|
||||
* Delete a resource (admin only)
|
||||
*/
|
||||
delete: adminProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const resource = await ctx.prisma.learningResource.delete({
|
||||
where: { id: input.id },
|
||||
})
|
||||
|
||||
// Audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'DELETE',
|
||||
entityType: 'LearningResource',
|
||||
entityId: input.id,
|
||||
detailsJson: { title: resource.title },
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return resource
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get upload URL for a resource file (admin only)
|
||||
*/
|
||||
getUploadUrl: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
fileName: z.string(),
|
||||
mimeType: z.string(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ input }) => {
|
||||
const timestamp = Date.now()
|
||||
const sanitizedName = input.fileName.replace(/[^a-zA-Z0-9.-]/g, '_')
|
||||
const objectKey = `resources/${timestamp}-${sanitizedName}`
|
||||
|
||||
const url = await getPresignedUrl(LEARNING_BUCKET, objectKey, 'PUT', 3600)
|
||||
|
||||
return {
|
||||
url,
|
||||
bucket: LEARNING_BUCKET,
|
||||
objectKey,
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get access statistics for a resource (admin only)
|
||||
*/
|
||||
getStats: adminProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.query(async ({ ctx, input }) => {
|
||||
const [totalViews, uniqueUsers, recentAccess] = await Promise.all([
|
||||
ctx.prisma.resourceAccess.count({
|
||||
where: { resourceId: input.id },
|
||||
}),
|
||||
ctx.prisma.resourceAccess.groupBy({
|
||||
by: ['userId'],
|
||||
where: { resourceId: input.id },
|
||||
}),
|
||||
ctx.prisma.resourceAccess.findMany({
|
||||
where: { resourceId: input.id },
|
||||
include: {
|
||||
user: { select: { id: true, name: true, email: true } },
|
||||
},
|
||||
orderBy: { accessedAt: 'desc' },
|
||||
take: 10,
|
||||
}),
|
||||
])
|
||||
|
||||
return {
|
||||
totalViews,
|
||||
uniqueUsers: uniqueUsers.length,
|
||||
recentAccess,
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Reorder resources (admin only)
|
||||
*/
|
||||
reorder: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
items: z.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
sortOrder: z.number().int(),
|
||||
})
|
||||
),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
await ctx.prisma.$transaction(
|
||||
input.items.map((item) =>
|
||||
ctx.prisma.learningResource.update({
|
||||
where: { id: item.id },
|
||||
data: { sortOrder: item.sortOrder },
|
||||
})
|
||||
)
|
||||
)
|
||||
|
||||
// Audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'REORDER',
|
||||
entityType: 'LearningResource',
|
||||
detailsJson: { count: input.items.length },
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return { success: true }
|
||||
}),
|
||||
})
|
||||
import { z } from 'zod'
|
||||
import { TRPCError } from '@trpc/server'
|
||||
import {
|
||||
router,
|
||||
protectedProcedure,
|
||||
adminProcedure,
|
||||
} from '../trpc'
|
||||
import { getPresignedUrl } from '@/lib/minio'
|
||||
import { logAudit } from '../utils/audit'
|
||||
|
||||
// Bucket for learning resources
|
||||
export const LEARNING_BUCKET = 'mopc-learning'
|
||||
|
||||
export const learningResourceRouter = router({
|
||||
/**
|
||||
* List all resources (admin view)
|
||||
*/
|
||||
list: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
programId: z.string().optional(),
|
||||
resourceType: z.enum(['PDF', 'VIDEO', 'DOCUMENT', 'LINK', 'OTHER']).optional(),
|
||||
cohortLevel: z.enum(['ALL', 'SEMIFINALIST', 'FINALIST']).optional(),
|
||||
isPublished: z.boolean().optional(),
|
||||
page: z.number().int().min(1).default(1),
|
||||
perPage: z.number().int().min(1).max(100).default(20),
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
const where: Record<string, unknown> = {}
|
||||
|
||||
if (input.programId !== undefined) {
|
||||
where.programId = input.programId
|
||||
}
|
||||
if (input.resourceType) {
|
||||
where.resourceType = input.resourceType
|
||||
}
|
||||
if (input.cohortLevel) {
|
||||
where.cohortLevel = input.cohortLevel
|
||||
}
|
||||
if (input.isPublished !== undefined) {
|
||||
where.isPublished = input.isPublished
|
||||
}
|
||||
|
||||
const [data, total] = await Promise.all([
|
||||
ctx.prisma.learningResource.findMany({
|
||||
where,
|
||||
include: {
|
||||
program: { select: { id: true, name: true, year: true } },
|
||||
createdBy: { select: { id: true, name: true, email: true } },
|
||||
_count: { select: { accessLogs: true } },
|
||||
},
|
||||
orderBy: [{ sortOrder: 'asc' }, { createdAt: 'desc' }],
|
||||
skip: (input.page - 1) * input.perPage,
|
||||
take: input.perPage,
|
||||
}),
|
||||
ctx.prisma.learningResource.count({ where }),
|
||||
])
|
||||
|
||||
return {
|
||||
data,
|
||||
total,
|
||||
page: input.page,
|
||||
perPage: input.perPage,
|
||||
totalPages: Math.ceil(total / input.perPage),
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get resources accessible to the current user (jury view)
|
||||
*/
|
||||
myResources: protectedProcedure
|
||||
.input(
|
||||
z.object({
|
||||
programId: z.string().optional(),
|
||||
resourceType: z.enum(['PDF', 'VIDEO', 'DOCUMENT', 'LINK', 'OTHER']).optional(),
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
// Determine user's cohort level based on their assignments
|
||||
const assignments = await ctx.prisma.assignment.findMany({
|
||||
where: { userId: ctx.user.id },
|
||||
include: {
|
||||
project: {
|
||||
select: {
|
||||
status: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
// Determine highest cohort level
|
||||
let userCohortLevel: 'ALL' | 'SEMIFINALIST' | 'FINALIST' = 'ALL'
|
||||
for (const assignment of assignments) {
|
||||
const projectStatus = assignment.project.status
|
||||
if (projectStatus === 'FINALIST') {
|
||||
userCohortLevel = 'FINALIST'
|
||||
break
|
||||
}
|
||||
if (projectStatus === 'SEMIFINALIST') {
|
||||
userCohortLevel = 'SEMIFINALIST'
|
||||
}
|
||||
}
|
||||
|
||||
// Build query based on cohort level
|
||||
const cohortLevels = ['ALL']
|
||||
if (userCohortLevel === 'SEMIFINALIST' || userCohortLevel === 'FINALIST') {
|
||||
cohortLevels.push('SEMIFINALIST')
|
||||
}
|
||||
if (userCohortLevel === 'FINALIST') {
|
||||
cohortLevels.push('FINALIST')
|
||||
}
|
||||
|
||||
const where: Record<string, unknown> = {
|
||||
isPublished: true,
|
||||
cohortLevel: { in: cohortLevels },
|
||||
}
|
||||
|
||||
if (input.programId) {
|
||||
where.OR = [{ programId: input.programId }, { programId: null }]
|
||||
}
|
||||
if (input.resourceType) {
|
||||
where.resourceType = input.resourceType
|
||||
}
|
||||
|
||||
const resources = await ctx.prisma.learningResource.findMany({
|
||||
where,
|
||||
orderBy: [{ sortOrder: 'asc' }, { createdAt: 'desc' }],
|
||||
})
|
||||
|
||||
return {
|
||||
resources,
|
||||
userCohortLevel,
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get a single resource by ID
|
||||
*/
|
||||
get: protectedProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.query(async ({ ctx, input }) => {
|
||||
const resource = await ctx.prisma.learningResource.findUniqueOrThrow({
|
||||
where: { id: input.id },
|
||||
include: {
|
||||
program: { select: { id: true, name: true, year: true } },
|
||||
createdBy: { select: { id: true, name: true, email: true } },
|
||||
},
|
||||
})
|
||||
|
||||
// Check access for non-admins
|
||||
if (ctx.user.role === 'JURY_MEMBER') {
|
||||
if (!resource.isPublished) {
|
||||
throw new TRPCError({
|
||||
code: 'FORBIDDEN',
|
||||
message: 'This resource is not available',
|
||||
})
|
||||
}
|
||||
|
||||
// Check cohort level access
|
||||
const assignments = await ctx.prisma.assignment.findMany({
|
||||
where: { userId: ctx.user.id },
|
||||
include: {
|
||||
project: {
|
||||
select: {
|
||||
status: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
let userCohortLevel: 'ALL' | 'SEMIFINALIST' | 'FINALIST' = 'ALL'
|
||||
for (const assignment of assignments) {
|
||||
const projectStatus = assignment.project.status
|
||||
if (projectStatus === 'FINALIST') {
|
||||
userCohortLevel = 'FINALIST'
|
||||
break
|
||||
}
|
||||
if (projectStatus === 'SEMIFINALIST') {
|
||||
userCohortLevel = 'SEMIFINALIST'
|
||||
}
|
||||
}
|
||||
|
||||
const accessibleLevels = ['ALL']
|
||||
if (userCohortLevel === 'SEMIFINALIST' || userCohortLevel === 'FINALIST') {
|
||||
accessibleLevels.push('SEMIFINALIST')
|
||||
}
|
||||
if (userCohortLevel === 'FINALIST') {
|
||||
accessibleLevels.push('FINALIST')
|
||||
}
|
||||
|
||||
if (!accessibleLevels.includes(resource.cohortLevel)) {
|
||||
throw new TRPCError({
|
||||
code: 'FORBIDDEN',
|
||||
message: 'You do not have access to this resource',
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return resource
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get download URL for a resource file
|
||||
* Checks cohort level access for non-admin users
|
||||
*/
|
||||
getDownloadUrl: protectedProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.query(async ({ ctx, input }) => {
|
||||
const resource = await ctx.prisma.learningResource.findUniqueOrThrow({
|
||||
where: { id: input.id },
|
||||
})
|
||||
|
||||
if (!resource.bucket || !resource.objectKey) {
|
||||
throw new TRPCError({
|
||||
code: 'BAD_REQUEST',
|
||||
message: 'This resource does not have a file',
|
||||
})
|
||||
}
|
||||
|
||||
// Check access for non-admins
|
||||
const isAdmin = ['SUPER_ADMIN', 'PROGRAM_ADMIN'].includes(ctx.user.role)
|
||||
if (!isAdmin) {
|
||||
if (!resource.isPublished) {
|
||||
throw new TRPCError({
|
||||
code: 'FORBIDDEN',
|
||||
message: 'This resource is not available',
|
||||
})
|
||||
}
|
||||
|
||||
// Check cohort level access
|
||||
const assignments = await ctx.prisma.assignment.findMany({
|
||||
where: { userId: ctx.user.id },
|
||||
include: {
|
||||
project: {
|
||||
select: {
|
||||
status: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
let userCohortLevel: 'ALL' | 'SEMIFINALIST' | 'FINALIST' = 'ALL'
|
||||
for (const assignment of assignments) {
|
||||
const projectStatus = assignment.project.status
|
||||
if (projectStatus === 'FINALIST') {
|
||||
userCohortLevel = 'FINALIST'
|
||||
break
|
||||
}
|
||||
if (projectStatus === 'SEMIFINALIST') {
|
||||
userCohortLevel = 'SEMIFINALIST'
|
||||
}
|
||||
}
|
||||
|
||||
const accessibleLevels = ['ALL']
|
||||
if (userCohortLevel === 'SEMIFINALIST' || userCohortLevel === 'FINALIST') {
|
||||
accessibleLevels.push('SEMIFINALIST')
|
||||
}
|
||||
if (userCohortLevel === 'FINALIST') {
|
||||
accessibleLevels.push('FINALIST')
|
||||
}
|
||||
|
||||
if (!accessibleLevels.includes(resource.cohortLevel)) {
|
||||
throw new TRPCError({
|
||||
code: 'FORBIDDEN',
|
||||
message: 'You do not have access to this resource',
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Log access
|
||||
await ctx.prisma.resourceAccess.create({
|
||||
data: {
|
||||
resourceId: resource.id,
|
||||
userId: ctx.user.id,
|
||||
ipAddress: ctx.ip,
|
||||
},
|
||||
})
|
||||
|
||||
const url = await getPresignedUrl(resource.bucket, resource.objectKey, 'GET', 900)
|
||||
return { url }
|
||||
}),
|
||||
|
||||
/**
|
||||
* Create a new resource (admin only)
|
||||
*/
|
||||
create: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
programId: z.string().nullable(),
|
||||
title: z.string().min(1).max(255),
|
||||
description: z.string().optional(),
|
||||
contentJson: z.any().optional(), // BlockNote document structure
|
||||
resourceType: z.enum(['PDF', 'VIDEO', 'DOCUMENT', 'LINK', 'OTHER']),
|
||||
cohortLevel: z.enum(['ALL', 'SEMIFINALIST', 'FINALIST']).default('ALL'),
|
||||
externalUrl: z.string().url().optional(),
|
||||
sortOrder: z.number().int().default(0),
|
||||
isPublished: z.boolean().default(false),
|
||||
// File info (set after upload)
|
||||
fileName: z.string().optional(),
|
||||
mimeType: z.string().optional(),
|
||||
size: z.number().int().optional(),
|
||||
bucket: z.string().optional(),
|
||||
objectKey: z.string().optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const resource = await ctx.prisma.learningResource.create({
|
||||
data: {
|
||||
...input,
|
||||
createdById: ctx.user.id,
|
||||
},
|
||||
})
|
||||
|
||||
// Audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'CREATE',
|
||||
entityType: 'LearningResource',
|
||||
entityId: resource.id,
|
||||
detailsJson: { title: input.title, resourceType: input.resourceType },
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return resource
|
||||
}),
|
||||
|
||||
/**
|
||||
* Update a resource (admin only)
|
||||
*/
|
||||
update: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
title: z.string().min(1).max(255).optional(),
|
||||
description: z.string().optional(),
|
||||
contentJson: z.any().optional(), // BlockNote document structure
|
||||
resourceType: z.enum(['PDF', 'VIDEO', 'DOCUMENT', 'LINK', 'OTHER']).optional(),
|
||||
cohortLevel: z.enum(['ALL', 'SEMIFINALIST', 'FINALIST']).optional(),
|
||||
externalUrl: z.string().url().optional().nullable(),
|
||||
sortOrder: z.number().int().optional(),
|
||||
isPublished: z.boolean().optional(),
|
||||
// File info (set after upload)
|
||||
fileName: z.string().optional(),
|
||||
mimeType: z.string().optional(),
|
||||
size: z.number().int().optional(),
|
||||
bucket: z.string().optional(),
|
||||
objectKey: z.string().optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const { id, ...data } = input
|
||||
|
||||
const resource = await ctx.prisma.learningResource.update({
|
||||
where: { id },
|
||||
data,
|
||||
})
|
||||
|
||||
// Audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'UPDATE',
|
||||
entityType: 'LearningResource',
|
||||
entityId: id,
|
||||
detailsJson: data,
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return resource
|
||||
}),
|
||||
|
||||
/**
|
||||
* Delete a resource (admin only)
|
||||
*/
|
||||
delete: adminProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const resource = await ctx.prisma.learningResource.delete({
|
||||
where: { id: input.id },
|
||||
})
|
||||
|
||||
// Audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'DELETE',
|
||||
entityType: 'LearningResource',
|
||||
entityId: input.id,
|
||||
detailsJson: { title: resource.title },
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return resource
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get upload URL for a resource file (admin only)
|
||||
*/
|
||||
getUploadUrl: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
fileName: z.string(),
|
||||
mimeType: z.string(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ input }) => {
|
||||
const timestamp = Date.now()
|
||||
const sanitizedName = input.fileName.replace(/[^a-zA-Z0-9.-]/g, '_')
|
||||
const objectKey = `resources/${timestamp}-${sanitizedName}`
|
||||
|
||||
const url = await getPresignedUrl(LEARNING_BUCKET, objectKey, 'PUT', 3600)
|
||||
|
||||
return {
|
||||
url,
|
||||
bucket: LEARNING_BUCKET,
|
||||
objectKey,
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get access statistics for a resource (admin only)
|
||||
*/
|
||||
getStats: adminProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.query(async ({ ctx, input }) => {
|
||||
const [totalViews, uniqueUsers, recentAccess] = await Promise.all([
|
||||
ctx.prisma.resourceAccess.count({
|
||||
where: { resourceId: input.id },
|
||||
}),
|
||||
ctx.prisma.resourceAccess.groupBy({
|
||||
by: ['userId'],
|
||||
where: { resourceId: input.id },
|
||||
}),
|
||||
ctx.prisma.resourceAccess.findMany({
|
||||
where: { resourceId: input.id },
|
||||
include: {
|
||||
user: { select: { id: true, name: true, email: true } },
|
||||
},
|
||||
orderBy: { accessedAt: 'desc' },
|
||||
take: 10,
|
||||
}),
|
||||
])
|
||||
|
||||
return {
|
||||
totalViews,
|
||||
uniqueUsers: uniqueUsers.length,
|
||||
recentAccess,
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Reorder resources (admin only)
|
||||
*/
|
||||
reorder: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
items: z.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
sortOrder: z.number().int(),
|
||||
})
|
||||
),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
await ctx.prisma.$transaction(
|
||||
input.items.map((item) =>
|
||||
ctx.prisma.learningResource.update({
|
||||
where: { id: item.id },
|
||||
data: { sortOrder: item.sortOrder },
|
||||
})
|
||||
)
|
||||
)
|
||||
|
||||
// Audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'REORDER',
|
||||
entityType: 'LearningResource',
|
||||
detailsJson: { count: input.items.length },
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return { success: true }
|
||||
}),
|
||||
})
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,133 +1,133 @@
|
||||
import { z } from 'zod'
|
||||
import { TRPCError } from '@trpc/server'
|
||||
import { router, adminProcedure } from '../trpc'
|
||||
import { generateLogoKey, type StorageProviderType } from '@/lib/storage'
|
||||
import {
|
||||
getImageUploadUrl,
|
||||
confirmImageUpload,
|
||||
getImageUrl,
|
||||
deleteImage,
|
||||
type ImageUploadConfig,
|
||||
} from '../utils/image-upload'
|
||||
|
||||
type LogoSelect = {
|
||||
logoKey: string | null
|
||||
logoProvider: string | null
|
||||
}
|
||||
|
||||
const logoConfig: ImageUploadConfig<LogoSelect> = {
|
||||
label: 'logo',
|
||||
generateKey: generateLogoKey,
|
||||
findCurrent: (prisma, entityId) =>
|
||||
prisma.project.findUnique({
|
||||
where: { id: entityId },
|
||||
select: { logoKey: true, logoProvider: true },
|
||||
}),
|
||||
getImageKey: (record) => record.logoKey,
|
||||
getProviderType: (record) =>
|
||||
(record.logoProvider as StorageProviderType) || 's3',
|
||||
setImage: (prisma, entityId, key, providerType) =>
|
||||
prisma.project.update({
|
||||
where: { id: entityId },
|
||||
data: { logoKey: key, logoProvider: providerType },
|
||||
}),
|
||||
clearImage: (prisma, entityId) =>
|
||||
prisma.project.update({
|
||||
where: { id: entityId },
|
||||
data: { logoKey: null, logoProvider: null },
|
||||
}),
|
||||
auditEntityType: 'Project',
|
||||
auditFieldName: 'logoKey',
|
||||
}
|
||||
|
||||
export const logoRouter = router({
|
||||
/**
|
||||
* Get a pre-signed URL for uploading a project logo
|
||||
*/
|
||||
getUploadUrl: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
projectId: z.string(),
|
||||
fileName: z.string(),
|
||||
contentType: z.string(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
// Verify project exists
|
||||
const project = await ctx.prisma.project.findUnique({
|
||||
where: { id: input.projectId },
|
||||
select: { id: true },
|
||||
})
|
||||
|
||||
if (!project) {
|
||||
throw new TRPCError({ code: 'NOT_FOUND', message: 'Project not found' })
|
||||
}
|
||||
|
||||
return getImageUploadUrl(
|
||||
input.projectId,
|
||||
input.fileName,
|
||||
input.contentType,
|
||||
generateLogoKey
|
||||
)
|
||||
}),
|
||||
|
||||
/**
|
||||
* Confirm logo upload and update project
|
||||
*/
|
||||
confirmUpload: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
projectId: z.string(),
|
||||
key: z.string(),
|
||||
providerType: z.enum(['s3', 'local']),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
await confirmImageUpload(
|
||||
ctx.prisma,
|
||||
logoConfig,
|
||||
input.projectId,
|
||||
input.key,
|
||||
input.providerType,
|
||||
{
|
||||
userId: ctx.user.id,
|
||||
ip: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
}
|
||||
)
|
||||
|
||||
// Return the updated project fields to match original API contract
|
||||
const project = await ctx.prisma.project.findUnique({
|
||||
where: { id: input.projectId },
|
||||
select: {
|
||||
id: true,
|
||||
logoKey: true,
|
||||
logoProvider: true,
|
||||
},
|
||||
})
|
||||
|
||||
return project
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get a project's logo URL
|
||||
*/
|
||||
getUrl: adminProcedure
|
||||
.input(z.object({ projectId: z.string() }))
|
||||
.query(async ({ ctx, input }) => {
|
||||
return getImageUrl(ctx.prisma, logoConfig, input.projectId)
|
||||
}),
|
||||
|
||||
/**
|
||||
* Delete a project's logo
|
||||
*/
|
||||
delete: adminProcedure
|
||||
.input(z.object({ projectId: z.string() }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
return deleteImage(ctx.prisma, logoConfig, input.projectId, {
|
||||
userId: ctx.user.id,
|
||||
ip: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
}),
|
||||
})
|
||||
import { z } from 'zod'
|
||||
import { TRPCError } from '@trpc/server'
|
||||
import { router, adminProcedure } from '../trpc'
|
||||
import { generateLogoKey, type StorageProviderType } from '@/lib/storage'
|
||||
import {
|
||||
getImageUploadUrl,
|
||||
confirmImageUpload,
|
||||
getImageUrl,
|
||||
deleteImage,
|
||||
type ImageUploadConfig,
|
||||
} from '../utils/image-upload'
|
||||
|
||||
type LogoSelect = {
|
||||
logoKey: string | null
|
||||
logoProvider: string | null
|
||||
}
|
||||
|
||||
const logoConfig: ImageUploadConfig<LogoSelect> = {
|
||||
label: 'logo',
|
||||
generateKey: generateLogoKey,
|
||||
findCurrent: (prisma, entityId) =>
|
||||
prisma.project.findUnique({
|
||||
where: { id: entityId },
|
||||
select: { logoKey: true, logoProvider: true },
|
||||
}),
|
||||
getImageKey: (record) => record.logoKey,
|
||||
getProviderType: (record) =>
|
||||
(record.logoProvider as StorageProviderType) || 's3',
|
||||
setImage: (prisma, entityId, key, providerType) =>
|
||||
prisma.project.update({
|
||||
where: { id: entityId },
|
||||
data: { logoKey: key, logoProvider: providerType },
|
||||
}),
|
||||
clearImage: (prisma, entityId) =>
|
||||
prisma.project.update({
|
||||
where: { id: entityId },
|
||||
data: { logoKey: null, logoProvider: null },
|
||||
}),
|
||||
auditEntityType: 'Project',
|
||||
auditFieldName: 'logoKey',
|
||||
}
|
||||
|
||||
export const logoRouter = router({
|
||||
/**
|
||||
* Get a pre-signed URL for uploading a project logo
|
||||
*/
|
||||
getUploadUrl: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
projectId: z.string(),
|
||||
fileName: z.string(),
|
||||
contentType: z.string(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
// Verify project exists
|
||||
const project = await ctx.prisma.project.findUnique({
|
||||
where: { id: input.projectId },
|
||||
select: { id: true },
|
||||
})
|
||||
|
||||
if (!project) {
|
||||
throw new TRPCError({ code: 'NOT_FOUND', message: 'Project not found' })
|
||||
}
|
||||
|
||||
return getImageUploadUrl(
|
||||
input.projectId,
|
||||
input.fileName,
|
||||
input.contentType,
|
||||
generateLogoKey
|
||||
)
|
||||
}),
|
||||
|
||||
/**
|
||||
* Confirm logo upload and update project
|
||||
*/
|
||||
confirmUpload: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
projectId: z.string(),
|
||||
key: z.string(),
|
||||
providerType: z.enum(['s3', 'local']),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
await confirmImageUpload(
|
||||
ctx.prisma,
|
||||
logoConfig,
|
||||
input.projectId,
|
||||
input.key,
|
||||
input.providerType,
|
||||
{
|
||||
userId: ctx.user.id,
|
||||
ip: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
}
|
||||
)
|
||||
|
||||
// Return the updated project fields to match original API contract
|
||||
const project = await ctx.prisma.project.findUnique({
|
||||
where: { id: input.projectId },
|
||||
select: {
|
||||
id: true,
|
||||
logoKey: true,
|
||||
logoProvider: true,
|
||||
},
|
||||
})
|
||||
|
||||
return project
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get a project's logo URL
|
||||
*/
|
||||
getUrl: adminProcedure
|
||||
.input(z.object({ projectId: z.string() }))
|
||||
.query(async ({ ctx, input }) => {
|
||||
return getImageUrl(ctx.prisma, logoConfig, input.projectId)
|
||||
}),
|
||||
|
||||
/**
|
||||
* Delete a project's logo
|
||||
*/
|
||||
delete: adminProcedure
|
||||
.input(z.object({ projectId: z.string() }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
return deleteImage(ctx.prisma, logoConfig, input.projectId, {
|
||||
userId: ctx.user.id,
|
||||
ip: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
}),
|
||||
})
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,405 +1,405 @@
|
||||
import { z } from 'zod'
|
||||
import { TRPCError } from '@trpc/server'
|
||||
import { router, protectedProcedure, adminProcedure } from '../trpc'
|
||||
import { logAudit } from '@/server/utils/audit'
|
||||
import { sendStyledNotificationEmail } from '@/lib/email'
|
||||
|
||||
export const messageRouter = router({
|
||||
/**
|
||||
* Send a message to recipients.
|
||||
* Resolves recipient list based on recipientType and delivers via specified channels.
|
||||
*/
|
||||
send: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
recipientType: z.enum(['USER', 'ROLE', 'STAGE_JURY', 'PROGRAM_TEAM', 'ALL']),
|
||||
recipientFilter: z.any().optional(),
|
||||
stageId: z.string().optional(),
|
||||
subject: z.string().min(1).max(500),
|
||||
body: z.string().min(1),
|
||||
deliveryChannels: z.array(z.string()).min(1),
|
||||
scheduledAt: z.string().datetime().optional(),
|
||||
templateId: z.string().optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
// Resolve recipients based on type
|
||||
const recipientUserIds = await resolveRecipients(
|
||||
ctx.prisma,
|
||||
input.recipientType,
|
||||
input.recipientFilter,
|
||||
input.stageId
|
||||
)
|
||||
|
||||
if (recipientUserIds.length === 0) {
|
||||
throw new TRPCError({
|
||||
code: 'BAD_REQUEST',
|
||||
message: 'No recipients found for the given criteria',
|
||||
})
|
||||
}
|
||||
|
||||
const isScheduled = !!input.scheduledAt
|
||||
const now = new Date()
|
||||
|
||||
// Create message
|
||||
const message = await ctx.prisma.message.create({
|
||||
data: {
|
||||
senderId: ctx.user.id,
|
||||
recipientType: input.recipientType,
|
||||
recipientFilter: input.recipientFilter ?? undefined,
|
||||
stageId: input.stageId,
|
||||
templateId: input.templateId,
|
||||
subject: input.subject,
|
||||
body: input.body,
|
||||
deliveryChannels: input.deliveryChannels,
|
||||
scheduledAt: input.scheduledAt ? new Date(input.scheduledAt) : undefined,
|
||||
sentAt: isScheduled ? undefined : now,
|
||||
recipients: {
|
||||
create: recipientUserIds.flatMap((userId) =>
|
||||
input.deliveryChannels.map((channel) => ({
|
||||
userId,
|
||||
channel,
|
||||
}))
|
||||
),
|
||||
},
|
||||
},
|
||||
include: {
|
||||
recipients: true,
|
||||
},
|
||||
})
|
||||
|
||||
// If not scheduled, deliver immediately for EMAIL channel
|
||||
if (!isScheduled && input.deliveryChannels.includes('EMAIL')) {
|
||||
const users = await ctx.prisma.user.findMany({
|
||||
where: { id: { in: recipientUserIds } },
|
||||
select: { id: true, name: true, email: true },
|
||||
})
|
||||
|
||||
const baseUrl = process.env.NEXTAUTH_URL || 'https://monaco-opc.com'
|
||||
|
||||
for (const user of users) {
|
||||
try {
|
||||
await sendStyledNotificationEmail(
|
||||
user.email,
|
||||
user.name || '',
|
||||
'MESSAGE',
|
||||
{
|
||||
name: user.name || undefined,
|
||||
title: input.subject,
|
||||
message: input.body,
|
||||
linkUrl: `${baseUrl}/messages`,
|
||||
}
|
||||
)
|
||||
} catch (error) {
|
||||
console.error(`[Message] Failed to send email to ${user.email}:`, error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'SEND_MESSAGE',
|
||||
entityType: 'Message',
|
||||
entityId: message.id,
|
||||
detailsJson: {
|
||||
recipientType: input.recipientType,
|
||||
recipientCount: recipientUserIds.length,
|
||||
channels: input.deliveryChannels,
|
||||
scheduled: isScheduled,
|
||||
},
|
||||
})
|
||||
} catch {}
|
||||
|
||||
return {
|
||||
...message,
|
||||
recipientCount: recipientUserIds.length,
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get the current user's inbox (messages sent to them).
|
||||
*/
|
||||
inbox: protectedProcedure
|
||||
.input(
|
||||
z.object({
|
||||
page: z.number().int().min(1).default(1),
|
||||
pageSize: z.number().int().min(1).max(100).default(20),
|
||||
}).optional()
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
const page = input?.page ?? 1
|
||||
const pageSize = input?.pageSize ?? 20
|
||||
const skip = (page - 1) * pageSize
|
||||
|
||||
const [items, total] = await Promise.all([
|
||||
ctx.prisma.messageRecipient.findMany({
|
||||
where: { userId: ctx.user.id },
|
||||
include: {
|
||||
message: {
|
||||
include: {
|
||||
sender: {
|
||||
select: { id: true, name: true, email: true },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
orderBy: { message: { createdAt: 'desc' } },
|
||||
skip,
|
||||
take: pageSize,
|
||||
}),
|
||||
ctx.prisma.messageRecipient.count({
|
||||
where: { userId: ctx.user.id },
|
||||
}),
|
||||
])
|
||||
|
||||
return {
|
||||
items,
|
||||
total,
|
||||
page,
|
||||
pageSize,
|
||||
totalPages: Math.ceil(total / pageSize),
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Mark a message as read.
|
||||
*/
|
||||
markRead: protectedProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const recipient = await ctx.prisma.messageRecipient.findUnique({
|
||||
where: { id: input.id },
|
||||
})
|
||||
|
||||
if (!recipient || recipient.userId !== ctx.user.id) {
|
||||
throw new TRPCError({
|
||||
code: 'NOT_FOUND',
|
||||
message: 'Message not found',
|
||||
})
|
||||
}
|
||||
|
||||
return ctx.prisma.messageRecipient.update({
|
||||
where: { id: input.id },
|
||||
data: {
|
||||
isRead: true,
|
||||
readAt: new Date(),
|
||||
},
|
||||
})
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get unread message count for the current user.
|
||||
*/
|
||||
getUnreadCount: protectedProcedure.query(async ({ ctx }) => {
|
||||
const count = await ctx.prisma.messageRecipient.count({
|
||||
where: {
|
||||
userId: ctx.user.id,
|
||||
isRead: false,
|
||||
},
|
||||
})
|
||||
return { count }
|
||||
}),
|
||||
|
||||
// =========================================================================
|
||||
// Template procedures
|
||||
// =========================================================================
|
||||
|
||||
/**
|
||||
* List all message templates.
|
||||
*/
|
||||
listTemplates: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
category: z.string().optional(),
|
||||
activeOnly: z.boolean().default(true),
|
||||
}).optional()
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
return ctx.prisma.messageTemplate.findMany({
|
||||
where: {
|
||||
...(input?.category ? { category: input.category } : {}),
|
||||
...(input?.activeOnly !== false ? { isActive: true } : {}),
|
||||
},
|
||||
orderBy: { createdAt: 'desc' },
|
||||
})
|
||||
}),
|
||||
|
||||
/**
|
||||
* Create a message template.
|
||||
*/
|
||||
createTemplate: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
name: z.string().min(1).max(200),
|
||||
category: z.string().min(1).max(100),
|
||||
subject: z.string().min(1).max(500),
|
||||
body: z.string().min(1),
|
||||
variables: z.any().optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const template = await ctx.prisma.messageTemplate.create({
|
||||
data: {
|
||||
name: input.name,
|
||||
category: input.category,
|
||||
subject: input.subject,
|
||||
body: input.body,
|
||||
variables: input.variables ?? undefined,
|
||||
createdBy: ctx.user.id,
|
||||
},
|
||||
})
|
||||
|
||||
try {
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'CREATE_MESSAGE_TEMPLATE',
|
||||
entityType: 'MessageTemplate',
|
||||
entityId: template.id,
|
||||
detailsJson: { name: input.name, category: input.category },
|
||||
})
|
||||
} catch {}
|
||||
|
||||
return template
|
||||
}),
|
||||
|
||||
/**
|
||||
* Update a message template.
|
||||
*/
|
||||
updateTemplate: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
name: z.string().min(1).max(200).optional(),
|
||||
category: z.string().min(1).max(100).optional(),
|
||||
subject: z.string().min(1).max(500).optional(),
|
||||
body: z.string().min(1).optional(),
|
||||
variables: z.any().optional(),
|
||||
isActive: z.boolean().optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const { id, ...data } = input
|
||||
|
||||
const template = await ctx.prisma.messageTemplate.update({
|
||||
where: { id },
|
||||
data: {
|
||||
...(data.name !== undefined ? { name: data.name } : {}),
|
||||
...(data.category !== undefined ? { category: data.category } : {}),
|
||||
...(data.subject !== undefined ? { subject: data.subject } : {}),
|
||||
...(data.body !== undefined ? { body: data.body } : {}),
|
||||
...(data.variables !== undefined ? { variables: data.variables } : {}),
|
||||
...(data.isActive !== undefined ? { isActive: data.isActive } : {}),
|
||||
},
|
||||
})
|
||||
|
||||
try {
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'UPDATE_MESSAGE_TEMPLATE',
|
||||
entityType: 'MessageTemplate',
|
||||
entityId: id,
|
||||
detailsJson: { updatedFields: Object.keys(data) },
|
||||
})
|
||||
} catch {}
|
||||
|
||||
return template
|
||||
}),
|
||||
|
||||
/**
|
||||
* Soft-delete a message template (set isActive=false).
|
||||
*/
|
||||
deleteTemplate: adminProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const template = await ctx.prisma.messageTemplate.update({
|
||||
where: { id: input.id },
|
||||
data: { isActive: false },
|
||||
})
|
||||
|
||||
try {
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'DELETE_MESSAGE_TEMPLATE',
|
||||
entityType: 'MessageTemplate',
|
||||
entityId: input.id,
|
||||
})
|
||||
} catch {}
|
||||
|
||||
return template
|
||||
}),
|
||||
})
|
||||
|
||||
// =============================================================================
|
||||
// Helper: Resolve recipient user IDs based on recipientType
|
||||
// =============================================================================
|
||||
|
||||
type PrismaClient = Parameters<Parameters<typeof adminProcedure.mutation>[0]>[0]['ctx']['prisma']
|
||||
|
||||
async function resolveRecipients(
|
||||
prisma: PrismaClient,
|
||||
recipientType: string,
|
||||
recipientFilter: unknown,
|
||||
stageId?: string
|
||||
): Promise<string[]> {
|
||||
const filter = recipientFilter as Record<string, unknown> | undefined
|
||||
|
||||
switch (recipientType) {
|
||||
case 'USER': {
|
||||
const userId = filter?.userId as string
|
||||
if (!userId) return []
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { id: userId },
|
||||
select: { id: true },
|
||||
})
|
||||
return user ? [user.id] : []
|
||||
}
|
||||
|
||||
case 'ROLE': {
|
||||
const role = filter?.role as string
|
||||
if (!role) return []
|
||||
const users = await prisma.user.findMany({
|
||||
where: { role: role as any, status: 'ACTIVE' },
|
||||
select: { id: true },
|
||||
})
|
||||
return users.map((u) => u.id)
|
||||
}
|
||||
|
||||
case 'STAGE_JURY': {
|
||||
const targetStageId = stageId || (filter?.stageId as string)
|
||||
if (!targetStageId) return []
|
||||
const assignments = await prisma.assignment.findMany({
|
||||
where: { stageId: targetStageId },
|
||||
select: { userId: true },
|
||||
distinct: ['userId'],
|
||||
})
|
||||
return assignments.map((a) => a.userId)
|
||||
}
|
||||
|
||||
case 'PROGRAM_TEAM': {
|
||||
const programId = filter?.programId as string
|
||||
if (!programId) return []
|
||||
const projects = await prisma.project.findMany({
|
||||
where: { programId },
|
||||
select: { submittedByUserId: true },
|
||||
})
|
||||
const ids = new Set(projects.map((p) => p.submittedByUserId).filter(Boolean) as string[])
|
||||
return [...ids]
|
||||
}
|
||||
|
||||
case 'ALL': {
|
||||
const users = await prisma.user.findMany({
|
||||
where: { status: 'ACTIVE' },
|
||||
select: { id: true },
|
||||
})
|
||||
return users.map((u) => u.id)
|
||||
}
|
||||
|
||||
default:
|
||||
return []
|
||||
}
|
||||
}
|
||||
import { z } from 'zod'
|
||||
import { TRPCError } from '@trpc/server'
|
||||
import { router, protectedProcedure, adminProcedure } from '../trpc'
|
||||
import { logAudit } from '@/server/utils/audit'
|
||||
import { sendStyledNotificationEmail } from '@/lib/email'
|
||||
|
||||
export const messageRouter = router({
|
||||
/**
|
||||
* Send a message to recipients.
|
||||
* Resolves recipient list based on recipientType and delivers via specified channels.
|
||||
*/
|
||||
send: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
recipientType: z.enum(['USER', 'ROLE', 'STAGE_JURY', 'PROGRAM_TEAM', 'ALL']),
|
||||
recipientFilter: z.any().optional(),
|
||||
stageId: z.string().optional(),
|
||||
subject: z.string().min(1).max(500),
|
||||
body: z.string().min(1),
|
||||
deliveryChannels: z.array(z.string()).min(1),
|
||||
scheduledAt: z.string().datetime().optional(),
|
||||
templateId: z.string().optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
// Resolve recipients based on type
|
||||
const recipientUserIds = await resolveRecipients(
|
||||
ctx.prisma,
|
||||
input.recipientType,
|
||||
input.recipientFilter,
|
||||
input.stageId
|
||||
)
|
||||
|
||||
if (recipientUserIds.length === 0) {
|
||||
throw new TRPCError({
|
||||
code: 'BAD_REQUEST',
|
||||
message: 'No recipients found for the given criteria',
|
||||
})
|
||||
}
|
||||
|
||||
const isScheduled = !!input.scheduledAt
|
||||
const now = new Date()
|
||||
|
||||
// Create message
|
||||
const message = await ctx.prisma.message.create({
|
||||
data: {
|
||||
senderId: ctx.user.id,
|
||||
recipientType: input.recipientType,
|
||||
recipientFilter: input.recipientFilter ?? undefined,
|
||||
stageId: input.stageId,
|
||||
templateId: input.templateId,
|
||||
subject: input.subject,
|
||||
body: input.body,
|
||||
deliveryChannels: input.deliveryChannels,
|
||||
scheduledAt: input.scheduledAt ? new Date(input.scheduledAt) : undefined,
|
||||
sentAt: isScheduled ? undefined : now,
|
||||
recipients: {
|
||||
create: recipientUserIds.flatMap((userId) =>
|
||||
input.deliveryChannels.map((channel) => ({
|
||||
userId,
|
||||
channel,
|
||||
}))
|
||||
),
|
||||
},
|
||||
},
|
||||
include: {
|
||||
recipients: true,
|
||||
},
|
||||
})
|
||||
|
||||
// If not scheduled, deliver immediately for EMAIL channel
|
||||
if (!isScheduled && input.deliveryChannels.includes('EMAIL')) {
|
||||
const users = await ctx.prisma.user.findMany({
|
||||
where: { id: { in: recipientUserIds } },
|
||||
select: { id: true, name: true, email: true },
|
||||
})
|
||||
|
||||
const baseUrl = process.env.NEXTAUTH_URL || 'https://monaco-opc.com'
|
||||
|
||||
for (const user of users) {
|
||||
try {
|
||||
await sendStyledNotificationEmail(
|
||||
user.email,
|
||||
user.name || '',
|
||||
'MESSAGE',
|
||||
{
|
||||
name: user.name || undefined,
|
||||
title: input.subject,
|
||||
message: input.body,
|
||||
linkUrl: `${baseUrl}/messages`,
|
||||
}
|
||||
)
|
||||
} catch (error) {
|
||||
console.error(`[Message] Failed to send email to ${user.email}:`, error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'SEND_MESSAGE',
|
||||
entityType: 'Message',
|
||||
entityId: message.id,
|
||||
detailsJson: {
|
||||
recipientType: input.recipientType,
|
||||
recipientCount: recipientUserIds.length,
|
||||
channels: input.deliveryChannels,
|
||||
scheduled: isScheduled,
|
||||
},
|
||||
})
|
||||
} catch {}
|
||||
|
||||
return {
|
||||
...message,
|
||||
recipientCount: recipientUserIds.length,
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get the current user's inbox (messages sent to them).
|
||||
*/
|
||||
inbox: protectedProcedure
|
||||
.input(
|
||||
z.object({
|
||||
page: z.number().int().min(1).default(1),
|
||||
pageSize: z.number().int().min(1).max(100).default(20),
|
||||
}).optional()
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
const page = input?.page ?? 1
|
||||
const pageSize = input?.pageSize ?? 20
|
||||
const skip = (page - 1) * pageSize
|
||||
|
||||
const [items, total] = await Promise.all([
|
||||
ctx.prisma.messageRecipient.findMany({
|
||||
where: { userId: ctx.user.id },
|
||||
include: {
|
||||
message: {
|
||||
include: {
|
||||
sender: {
|
||||
select: { id: true, name: true, email: true },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
orderBy: { message: { createdAt: 'desc' } },
|
||||
skip,
|
||||
take: pageSize,
|
||||
}),
|
||||
ctx.prisma.messageRecipient.count({
|
||||
where: { userId: ctx.user.id },
|
||||
}),
|
||||
])
|
||||
|
||||
return {
|
||||
items,
|
||||
total,
|
||||
page,
|
||||
pageSize,
|
||||
totalPages: Math.ceil(total / pageSize),
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Mark a message as read.
|
||||
*/
|
||||
markRead: protectedProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const recipient = await ctx.prisma.messageRecipient.findUnique({
|
||||
where: { id: input.id },
|
||||
})
|
||||
|
||||
if (!recipient || recipient.userId !== ctx.user.id) {
|
||||
throw new TRPCError({
|
||||
code: 'NOT_FOUND',
|
||||
message: 'Message not found',
|
||||
})
|
||||
}
|
||||
|
||||
return ctx.prisma.messageRecipient.update({
|
||||
where: { id: input.id },
|
||||
data: {
|
||||
isRead: true,
|
||||
readAt: new Date(),
|
||||
},
|
||||
})
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get unread message count for the current user.
|
||||
*/
|
||||
getUnreadCount: protectedProcedure.query(async ({ ctx }) => {
|
||||
const count = await ctx.prisma.messageRecipient.count({
|
||||
where: {
|
||||
userId: ctx.user.id,
|
||||
isRead: false,
|
||||
},
|
||||
})
|
||||
return { count }
|
||||
}),
|
||||
|
||||
// =========================================================================
|
||||
// Template procedures
|
||||
// =========================================================================
|
||||
|
||||
/**
|
||||
* List all message templates.
|
||||
*/
|
||||
listTemplates: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
category: z.string().optional(),
|
||||
activeOnly: z.boolean().default(true),
|
||||
}).optional()
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
return ctx.prisma.messageTemplate.findMany({
|
||||
where: {
|
||||
...(input?.category ? { category: input.category } : {}),
|
||||
...(input?.activeOnly !== false ? { isActive: true } : {}),
|
||||
},
|
||||
orderBy: { createdAt: 'desc' },
|
||||
})
|
||||
}),
|
||||
|
||||
/**
|
||||
* Create a message template.
|
||||
*/
|
||||
createTemplate: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
name: z.string().min(1).max(200),
|
||||
category: z.string().min(1).max(100),
|
||||
subject: z.string().min(1).max(500),
|
||||
body: z.string().min(1),
|
||||
variables: z.any().optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const template = await ctx.prisma.messageTemplate.create({
|
||||
data: {
|
||||
name: input.name,
|
||||
category: input.category,
|
||||
subject: input.subject,
|
||||
body: input.body,
|
||||
variables: input.variables ?? undefined,
|
||||
createdBy: ctx.user.id,
|
||||
},
|
||||
})
|
||||
|
||||
try {
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'CREATE_MESSAGE_TEMPLATE',
|
||||
entityType: 'MessageTemplate',
|
||||
entityId: template.id,
|
||||
detailsJson: { name: input.name, category: input.category },
|
||||
})
|
||||
} catch {}
|
||||
|
||||
return template
|
||||
}),
|
||||
|
||||
/**
|
||||
* Update a message template.
|
||||
*/
|
||||
updateTemplate: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
name: z.string().min(1).max(200).optional(),
|
||||
category: z.string().min(1).max(100).optional(),
|
||||
subject: z.string().min(1).max(500).optional(),
|
||||
body: z.string().min(1).optional(),
|
||||
variables: z.any().optional(),
|
||||
isActive: z.boolean().optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const { id, ...data } = input
|
||||
|
||||
const template = await ctx.prisma.messageTemplate.update({
|
||||
where: { id },
|
||||
data: {
|
||||
...(data.name !== undefined ? { name: data.name } : {}),
|
||||
...(data.category !== undefined ? { category: data.category } : {}),
|
||||
...(data.subject !== undefined ? { subject: data.subject } : {}),
|
||||
...(data.body !== undefined ? { body: data.body } : {}),
|
||||
...(data.variables !== undefined ? { variables: data.variables } : {}),
|
||||
...(data.isActive !== undefined ? { isActive: data.isActive } : {}),
|
||||
},
|
||||
})
|
||||
|
||||
try {
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'UPDATE_MESSAGE_TEMPLATE',
|
||||
entityType: 'MessageTemplate',
|
||||
entityId: id,
|
||||
detailsJson: { updatedFields: Object.keys(data) },
|
||||
})
|
||||
} catch {}
|
||||
|
||||
return template
|
||||
}),
|
||||
|
||||
/**
|
||||
* Soft-delete a message template (set isActive=false).
|
||||
*/
|
||||
deleteTemplate: adminProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const template = await ctx.prisma.messageTemplate.update({
|
||||
where: { id: input.id },
|
||||
data: { isActive: false },
|
||||
})
|
||||
|
||||
try {
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'DELETE_MESSAGE_TEMPLATE',
|
||||
entityType: 'MessageTemplate',
|
||||
entityId: input.id,
|
||||
})
|
||||
} catch {}
|
||||
|
||||
return template
|
||||
}),
|
||||
})
|
||||
|
||||
// =============================================================================
|
||||
// Helper: Resolve recipient user IDs based on recipientType
|
||||
// =============================================================================
|
||||
|
||||
type PrismaClient = Parameters<Parameters<typeof adminProcedure.mutation>[0]>[0]['ctx']['prisma']
|
||||
|
||||
async function resolveRecipients(
|
||||
prisma: PrismaClient,
|
||||
recipientType: string,
|
||||
recipientFilter: unknown,
|
||||
stageId?: string
|
||||
): Promise<string[]> {
|
||||
const filter = recipientFilter as Record<string, unknown> | undefined
|
||||
|
||||
switch (recipientType) {
|
||||
case 'USER': {
|
||||
const userId = filter?.userId as string
|
||||
if (!userId) return []
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { id: userId },
|
||||
select: { id: true },
|
||||
})
|
||||
return user ? [user.id] : []
|
||||
}
|
||||
|
||||
case 'ROLE': {
|
||||
const role = filter?.role as string
|
||||
if (!role) return []
|
||||
const users = await prisma.user.findMany({
|
||||
where: { role: role as any, status: 'ACTIVE' },
|
||||
select: { id: true },
|
||||
})
|
||||
return users.map((u) => u.id)
|
||||
}
|
||||
|
||||
case 'STAGE_JURY': {
|
||||
const targetStageId = stageId || (filter?.stageId as string)
|
||||
if (!targetStageId) return []
|
||||
const assignments = await prisma.assignment.findMany({
|
||||
where: { stageId: targetStageId },
|
||||
select: { userId: true },
|
||||
distinct: ['userId'],
|
||||
})
|
||||
return assignments.map((a) => a.userId)
|
||||
}
|
||||
|
||||
case 'PROGRAM_TEAM': {
|
||||
const programId = filter?.programId as string
|
||||
if (!programId) return []
|
||||
const projects = await prisma.project.findMany({
|
||||
where: { programId },
|
||||
select: { submittedByUserId: true },
|
||||
})
|
||||
const ids = new Set(projects.map((p) => p.submittedByUserId).filter(Boolean) as string[])
|
||||
return [...ids]
|
||||
}
|
||||
|
||||
case 'ALL': {
|
||||
const users = await prisma.user.findMany({
|
||||
where: { status: 'ACTIVE' },
|
||||
select: { id: true },
|
||||
})
|
||||
return users.map((u) => u.id)
|
||||
}
|
||||
|
||||
default:
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,381 +1,381 @@
|
||||
/**
|
||||
* Notification Router
|
||||
*
|
||||
* Handles in-app notification CRUD operations for users.
|
||||
*/
|
||||
|
||||
import { z } from 'zod'
|
||||
import { router, protectedProcedure, adminProcedure } from '../trpc'
|
||||
import {
|
||||
markNotificationAsRead,
|
||||
markAllNotificationsAsRead,
|
||||
getUnreadCount,
|
||||
deleteExpiredNotifications,
|
||||
deleteOldNotifications,
|
||||
NotificationIcons,
|
||||
NotificationPriorities,
|
||||
} from '../services/in-app-notification'
|
||||
import { sendStyledNotificationEmail, NOTIFICATION_EMAIL_TEMPLATES } from '@/lib/email'
|
||||
|
||||
export const notificationRouter = router({
|
||||
/**
|
||||
* List notifications for the current user
|
||||
*/
|
||||
list: protectedProcedure
|
||||
.input(
|
||||
z.object({
|
||||
unreadOnly: z.boolean().default(false),
|
||||
limit: z.number().int().min(1).max(100).default(50),
|
||||
cursor: z.string().optional(), // For infinite scroll pagination
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
const { unreadOnly, limit, cursor } = input
|
||||
const userId = ctx.user.id
|
||||
|
||||
const where = {
|
||||
userId,
|
||||
...(unreadOnly && { isRead: false }),
|
||||
// Don't show expired notifications
|
||||
OR: [{ expiresAt: null }, { expiresAt: { gt: new Date() } }],
|
||||
}
|
||||
|
||||
const notifications = await ctx.prisma.inAppNotification.findMany({
|
||||
where,
|
||||
take: limit + 1, // Fetch one extra to check if there are more
|
||||
orderBy: { createdAt: 'desc' },
|
||||
...(cursor && {
|
||||
cursor: { id: cursor },
|
||||
skip: 1, // Skip the cursor item
|
||||
}),
|
||||
})
|
||||
|
||||
let nextCursor: string | undefined
|
||||
if (notifications.length > limit) {
|
||||
const nextItem = notifications.pop()
|
||||
nextCursor = nextItem?.id
|
||||
}
|
||||
|
||||
return {
|
||||
notifications,
|
||||
nextCursor,
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get unread notification count for the current user
|
||||
*/
|
||||
getUnreadCount: protectedProcedure.query(async ({ ctx }) => {
|
||||
return getUnreadCount(ctx.user.id)
|
||||
}),
|
||||
|
||||
/**
|
||||
* Check if there are any urgent unread notifications
|
||||
*/
|
||||
hasUrgent: protectedProcedure.query(async ({ ctx }) => {
|
||||
const count = await ctx.prisma.inAppNotification.count({
|
||||
where: {
|
||||
userId: ctx.user.id,
|
||||
isRead: false,
|
||||
priority: 'urgent',
|
||||
OR: [{ expiresAt: null }, { expiresAt: { gt: new Date() } }],
|
||||
},
|
||||
})
|
||||
return count > 0
|
||||
}),
|
||||
|
||||
/**
|
||||
* Mark a single notification as read
|
||||
*/
|
||||
markAsRead: protectedProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
await markNotificationAsRead(input.id, ctx.user.id)
|
||||
return { success: true }
|
||||
}),
|
||||
|
||||
/**
|
||||
* Mark multiple notifications as read by IDs
|
||||
*/
|
||||
markBatchAsRead: protectedProcedure
|
||||
.input(z.object({ ids: z.array(z.string()).min(1).max(50) }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
await ctx.prisma.inAppNotification.updateMany({
|
||||
where: {
|
||||
id: { in: input.ids },
|
||||
userId: ctx.user.id,
|
||||
isRead: false,
|
||||
},
|
||||
data: { isRead: true, readAt: new Date() },
|
||||
})
|
||||
return { success: true }
|
||||
}),
|
||||
|
||||
/**
|
||||
* Mark all notifications as read for the current user
|
||||
*/
|
||||
markAllAsRead: protectedProcedure.mutation(async ({ ctx }) => {
|
||||
await markAllNotificationsAsRead(ctx.user.id)
|
||||
return { success: true }
|
||||
}),
|
||||
|
||||
/**
|
||||
* Delete a notification (user can only delete their own)
|
||||
*/
|
||||
delete: protectedProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
await ctx.prisma.inAppNotification.deleteMany({
|
||||
where: {
|
||||
id: input.id,
|
||||
userId: ctx.user.id, // Ensure user can only delete their own
|
||||
},
|
||||
})
|
||||
return { success: true }
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get notification email settings (admin only)
|
||||
*/
|
||||
getEmailSettings: adminProcedure.query(async ({ ctx }) => {
|
||||
return ctx.prisma.notificationEmailSetting.findMany({
|
||||
orderBy: [{ category: 'asc' }, { label: 'asc' }],
|
||||
include: {
|
||||
updatedBy: { select: { name: true, email: true } },
|
||||
},
|
||||
})
|
||||
}),
|
||||
|
||||
/**
|
||||
* Update a notification email setting (admin only)
|
||||
*/
|
||||
updateEmailSetting: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
notificationType: z.string(),
|
||||
sendEmail: z.boolean(),
|
||||
emailSubject: z.string().optional(),
|
||||
emailTemplate: z.string().optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const { notificationType, sendEmail, emailSubject, emailTemplate } = input
|
||||
|
||||
return ctx.prisma.notificationEmailSetting.upsert({
|
||||
where: { notificationType },
|
||||
update: {
|
||||
sendEmail,
|
||||
emailSubject,
|
||||
emailTemplate,
|
||||
updatedById: ctx.user.id,
|
||||
},
|
||||
create: {
|
||||
notificationType,
|
||||
category: 'custom',
|
||||
label: notificationType,
|
||||
sendEmail,
|
||||
emailSubject,
|
||||
emailTemplate,
|
||||
updatedById: ctx.user.id,
|
||||
},
|
||||
})
|
||||
}),
|
||||
|
||||
/**
|
||||
* Delete expired notifications (admin cleanup)
|
||||
*/
|
||||
deleteExpired: adminProcedure.mutation(async () => {
|
||||
const count = await deleteExpiredNotifications()
|
||||
return { deletedCount: count }
|
||||
}),
|
||||
|
||||
/**
|
||||
* Delete old read notifications (admin cleanup)
|
||||
*/
|
||||
deleteOld: adminProcedure
|
||||
.input(z.object({ olderThanDays: z.number().int().min(1).max(365).default(30) }))
|
||||
.mutation(async ({ input }) => {
|
||||
const count = await deleteOldNotifications(input.olderThanDays)
|
||||
return { deletedCount: count }
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get notification icon and priority mappings (for UI)
|
||||
*/
|
||||
getMappings: protectedProcedure.query(() => {
|
||||
return {
|
||||
icons: NotificationIcons,
|
||||
priorities: NotificationPriorities,
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Admin: Get notification statistics
|
||||
*/
|
||||
getStats: adminProcedure.query(async ({ ctx }) => {
|
||||
const [total, unread, byType, byPriority] = await Promise.all([
|
||||
ctx.prisma.inAppNotification.count(),
|
||||
ctx.prisma.inAppNotification.count({ where: { isRead: false } }),
|
||||
ctx.prisma.inAppNotification.groupBy({
|
||||
by: ['type'],
|
||||
_count: true,
|
||||
orderBy: { _count: { type: 'desc' } },
|
||||
take: 10,
|
||||
}),
|
||||
ctx.prisma.inAppNotification.groupBy({
|
||||
by: ['priority'],
|
||||
_count: true,
|
||||
}),
|
||||
])
|
||||
|
||||
return {
|
||||
total,
|
||||
unread,
|
||||
readRate: total > 0 ? ((total - unread) / total) * 100 : 0,
|
||||
byType: byType.map((t) => ({ type: t.type, count: t._count })),
|
||||
byPriority: byPriority.map((p) => ({ priority: p.priority, count: p._count })),
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Send a test notification email to the current admin
|
||||
*/
|
||||
sendTestEmail: adminProcedure
|
||||
.input(z.object({ notificationType: z.string() }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const { notificationType } = input
|
||||
|
||||
// Check if this notification type has a styled template
|
||||
const hasStyledTemplate = notificationType in NOTIFICATION_EMAIL_TEMPLATES
|
||||
|
||||
// Get setting for label
|
||||
const setting = await ctx.prisma.notificationEmailSetting.findUnique({
|
||||
where: { notificationType },
|
||||
})
|
||||
|
||||
// Sample data for test emails based on category
|
||||
const sampleData: Record<string, Record<string, unknown>> = {
|
||||
// Team notifications
|
||||
ADVANCED_SEMIFINAL: {
|
||||
projectName: 'Ocean Cleanup Initiative',
|
||||
programName: 'Monaco Ocean Protection Challenge 2026',
|
||||
nextSteps: 'Prepare your presentation for the semi-final round.',
|
||||
},
|
||||
ADVANCED_FINAL: {
|
||||
projectName: 'Ocean Cleanup Initiative',
|
||||
programName: 'Monaco Ocean Protection Challenge 2026',
|
||||
nextSteps: 'Get ready for the final presentation in Monaco.',
|
||||
},
|
||||
MENTOR_ASSIGNED: {
|
||||
projectName: 'Ocean Cleanup Initiative',
|
||||
mentorName: 'Dr. Marine Expert',
|
||||
mentorBio: 'Expert in marine conservation with 20 years of experience.',
|
||||
},
|
||||
NOT_SELECTED: {
|
||||
projectName: 'Ocean Cleanup Initiative',
|
||||
roundName: 'Semi-Final Round',
|
||||
},
|
||||
WINNER_ANNOUNCEMENT: {
|
||||
projectName: 'Ocean Cleanup Initiative',
|
||||
awardName: 'Grand Prize',
|
||||
prizeDetails: '€50,000 and mentorship program',
|
||||
},
|
||||
|
||||
// Jury notifications
|
||||
ASSIGNED_TO_PROJECT: {
|
||||
projectName: 'Ocean Cleanup Initiative',
|
||||
roundName: 'Semi-Final Round',
|
||||
deadline: 'Friday, March 15, 2026',
|
||||
},
|
||||
BATCH_ASSIGNED: {
|
||||
projectCount: 5,
|
||||
roundName: 'Semi-Final Round',
|
||||
deadline: 'Friday, March 15, 2026',
|
||||
},
|
||||
ROUND_NOW_OPEN: {
|
||||
roundName: 'Semi-Final Round',
|
||||
projectCount: 12,
|
||||
deadline: 'Friday, March 15, 2026',
|
||||
},
|
||||
REMINDER_24H: {
|
||||
pendingCount: 3,
|
||||
roundName: 'Semi-Final Round',
|
||||
deadline: 'Tomorrow at 5:00 PM',
|
||||
},
|
||||
REMINDER_1H: {
|
||||
pendingCount: 2,
|
||||
roundName: 'Semi-Final Round',
|
||||
deadline: 'Today at 5:00 PM',
|
||||
},
|
||||
AWARD_VOTING_OPEN: {
|
||||
awardName: 'Innovation Award',
|
||||
finalistCount: 6,
|
||||
deadline: 'Friday, March 15, 2026',
|
||||
},
|
||||
|
||||
// Mentor notifications
|
||||
MENTEE_ASSIGNED: {
|
||||
projectName: 'Ocean Cleanup Initiative',
|
||||
teamLeadName: 'John Smith',
|
||||
teamLeadEmail: 'john@example.com',
|
||||
},
|
||||
MENTEE_ADVANCED: {
|
||||
projectName: 'Ocean Cleanup Initiative',
|
||||
roundName: 'Semi-Final Round',
|
||||
nextRoundName: 'Final Round',
|
||||
},
|
||||
MENTEE_WON: {
|
||||
projectName: 'Ocean Cleanup Initiative',
|
||||
awardName: 'Innovation Award',
|
||||
},
|
||||
|
||||
// Admin notifications
|
||||
NEW_APPLICATION: {
|
||||
projectName: 'New Ocean Project',
|
||||
applicantName: 'Jane Doe',
|
||||
applicantEmail: 'jane@example.com',
|
||||
programName: 'Monaco Ocean Protection Challenge 2026',
|
||||
},
|
||||
FILTERING_COMPLETE: {
|
||||
roundName: 'Initial Review',
|
||||
passedCount: 45,
|
||||
flaggedCount: 12,
|
||||
filteredCount: 8,
|
||||
},
|
||||
FILTERING_FAILED: {
|
||||
roundName: 'Initial Review',
|
||||
error: 'Connection timeout',
|
||||
},
|
||||
}
|
||||
|
||||
const metadata = sampleData[notificationType] || {}
|
||||
const label = setting?.label || notificationType
|
||||
|
||||
try {
|
||||
await sendStyledNotificationEmail(
|
||||
ctx.user.email,
|
||||
ctx.user.name || 'Admin',
|
||||
notificationType,
|
||||
{
|
||||
title: `[TEST] ${label}`,
|
||||
message: `This is a test email for the "${label}" notification type.`,
|
||||
linkUrl: `${process.env.NEXTAUTH_URL || 'https://portal.monaco-opc.com'}/admin/settings`,
|
||||
linkLabel: 'Back to Settings',
|
||||
metadata,
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `Test email sent to ${ctx.user.email}`,
|
||||
hasStyledTemplate,
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
message: error instanceof Error ? error.message : 'Failed to send test email',
|
||||
hasStyledTemplate,
|
||||
}
|
||||
}
|
||||
}),
|
||||
})
|
||||
/**
|
||||
* Notification Router
|
||||
*
|
||||
* Handles in-app notification CRUD operations for users.
|
||||
*/
|
||||
|
||||
import { z } from 'zod'
|
||||
import { router, protectedProcedure, adminProcedure } from '../trpc'
|
||||
import {
|
||||
markNotificationAsRead,
|
||||
markAllNotificationsAsRead,
|
||||
getUnreadCount,
|
||||
deleteExpiredNotifications,
|
||||
deleteOldNotifications,
|
||||
NotificationIcons,
|
||||
NotificationPriorities,
|
||||
} from '../services/in-app-notification'
|
||||
import { sendStyledNotificationEmail, NOTIFICATION_EMAIL_TEMPLATES } from '@/lib/email'
|
||||
|
||||
export const notificationRouter = router({
|
||||
/**
|
||||
* List notifications for the current user
|
||||
*/
|
||||
list: protectedProcedure
|
||||
.input(
|
||||
z.object({
|
||||
unreadOnly: z.boolean().default(false),
|
||||
limit: z.number().int().min(1).max(100).default(50),
|
||||
cursor: z.string().optional(), // For infinite scroll pagination
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
const { unreadOnly, limit, cursor } = input
|
||||
const userId = ctx.user.id
|
||||
|
||||
const where = {
|
||||
userId,
|
||||
...(unreadOnly && { isRead: false }),
|
||||
// Don't show expired notifications
|
||||
OR: [{ expiresAt: null }, { expiresAt: { gt: new Date() } }],
|
||||
}
|
||||
|
||||
const notifications = await ctx.prisma.inAppNotification.findMany({
|
||||
where,
|
||||
take: limit + 1, // Fetch one extra to check if there are more
|
||||
orderBy: { createdAt: 'desc' },
|
||||
...(cursor && {
|
||||
cursor: { id: cursor },
|
||||
skip: 1, // Skip the cursor item
|
||||
}),
|
||||
})
|
||||
|
||||
let nextCursor: string | undefined
|
||||
if (notifications.length > limit) {
|
||||
const nextItem = notifications.pop()
|
||||
nextCursor = nextItem?.id
|
||||
}
|
||||
|
||||
return {
|
||||
notifications,
|
||||
nextCursor,
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get unread notification count for the current user
|
||||
*/
|
||||
getUnreadCount: protectedProcedure.query(async ({ ctx }) => {
|
||||
return getUnreadCount(ctx.user.id)
|
||||
}),
|
||||
|
||||
/**
|
||||
* Check if there are any urgent unread notifications
|
||||
*/
|
||||
hasUrgent: protectedProcedure.query(async ({ ctx }) => {
|
||||
const count = await ctx.prisma.inAppNotification.count({
|
||||
where: {
|
||||
userId: ctx.user.id,
|
||||
isRead: false,
|
||||
priority: 'urgent',
|
||||
OR: [{ expiresAt: null }, { expiresAt: { gt: new Date() } }],
|
||||
},
|
||||
})
|
||||
return count > 0
|
||||
}),
|
||||
|
||||
/**
|
||||
* Mark a single notification as read
|
||||
*/
|
||||
markAsRead: protectedProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
await markNotificationAsRead(input.id, ctx.user.id)
|
||||
return { success: true }
|
||||
}),
|
||||
|
||||
/**
|
||||
* Mark multiple notifications as read by IDs
|
||||
*/
|
||||
markBatchAsRead: protectedProcedure
|
||||
.input(z.object({ ids: z.array(z.string()).min(1).max(50) }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
await ctx.prisma.inAppNotification.updateMany({
|
||||
where: {
|
||||
id: { in: input.ids },
|
||||
userId: ctx.user.id,
|
||||
isRead: false,
|
||||
},
|
||||
data: { isRead: true, readAt: new Date() },
|
||||
})
|
||||
return { success: true }
|
||||
}),
|
||||
|
||||
/**
|
||||
* Mark all notifications as read for the current user
|
||||
*/
|
||||
markAllAsRead: protectedProcedure.mutation(async ({ ctx }) => {
|
||||
await markAllNotificationsAsRead(ctx.user.id)
|
||||
return { success: true }
|
||||
}),
|
||||
|
||||
/**
|
||||
* Delete a notification (user can only delete their own)
|
||||
*/
|
||||
delete: protectedProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
await ctx.prisma.inAppNotification.deleteMany({
|
||||
where: {
|
||||
id: input.id,
|
||||
userId: ctx.user.id, // Ensure user can only delete their own
|
||||
},
|
||||
})
|
||||
return { success: true }
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get notification email settings (admin only)
|
||||
*/
|
||||
getEmailSettings: adminProcedure.query(async ({ ctx }) => {
|
||||
return ctx.prisma.notificationEmailSetting.findMany({
|
||||
orderBy: [{ category: 'asc' }, { label: 'asc' }],
|
||||
include: {
|
||||
updatedBy: { select: { name: true, email: true } },
|
||||
},
|
||||
})
|
||||
}),
|
||||
|
||||
/**
|
||||
* Update a notification email setting (admin only)
|
||||
*/
|
||||
updateEmailSetting: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
notificationType: z.string(),
|
||||
sendEmail: z.boolean(),
|
||||
emailSubject: z.string().optional(),
|
||||
emailTemplate: z.string().optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const { notificationType, sendEmail, emailSubject, emailTemplate } = input
|
||||
|
||||
return ctx.prisma.notificationEmailSetting.upsert({
|
||||
where: { notificationType },
|
||||
update: {
|
||||
sendEmail,
|
||||
emailSubject,
|
||||
emailTemplate,
|
||||
updatedById: ctx.user.id,
|
||||
},
|
||||
create: {
|
||||
notificationType,
|
||||
category: 'custom',
|
||||
label: notificationType,
|
||||
sendEmail,
|
||||
emailSubject,
|
||||
emailTemplate,
|
||||
updatedById: ctx.user.id,
|
||||
},
|
||||
})
|
||||
}),
|
||||
|
||||
/**
|
||||
* Delete expired notifications (admin cleanup)
|
||||
*/
|
||||
deleteExpired: adminProcedure.mutation(async () => {
|
||||
const count = await deleteExpiredNotifications()
|
||||
return { deletedCount: count }
|
||||
}),
|
||||
|
||||
/**
|
||||
* Delete old read notifications (admin cleanup)
|
||||
*/
|
||||
deleteOld: adminProcedure
|
||||
.input(z.object({ olderThanDays: z.number().int().min(1).max(365).default(30) }))
|
||||
.mutation(async ({ input }) => {
|
||||
const count = await deleteOldNotifications(input.olderThanDays)
|
||||
return { deletedCount: count }
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get notification icon and priority mappings (for UI)
|
||||
*/
|
||||
getMappings: protectedProcedure.query(() => {
|
||||
return {
|
||||
icons: NotificationIcons,
|
||||
priorities: NotificationPriorities,
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Admin: Get notification statistics
|
||||
*/
|
||||
getStats: adminProcedure.query(async ({ ctx }) => {
|
||||
const [total, unread, byType, byPriority] = await Promise.all([
|
||||
ctx.prisma.inAppNotification.count(),
|
||||
ctx.prisma.inAppNotification.count({ where: { isRead: false } }),
|
||||
ctx.prisma.inAppNotification.groupBy({
|
||||
by: ['type'],
|
||||
_count: true,
|
||||
orderBy: { _count: { type: 'desc' } },
|
||||
take: 10,
|
||||
}),
|
||||
ctx.prisma.inAppNotification.groupBy({
|
||||
by: ['priority'],
|
||||
_count: true,
|
||||
}),
|
||||
])
|
||||
|
||||
return {
|
||||
total,
|
||||
unread,
|
||||
readRate: total > 0 ? ((total - unread) / total) * 100 : 0,
|
||||
byType: byType.map((t) => ({ type: t.type, count: t._count })),
|
||||
byPriority: byPriority.map((p) => ({ priority: p.priority, count: p._count })),
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Send a test notification email to the current admin
|
||||
*/
|
||||
sendTestEmail: adminProcedure
|
||||
.input(z.object({ notificationType: z.string() }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const { notificationType } = input
|
||||
|
||||
// Check if this notification type has a styled template
|
||||
const hasStyledTemplate = notificationType in NOTIFICATION_EMAIL_TEMPLATES
|
||||
|
||||
// Get setting for label
|
||||
const setting = await ctx.prisma.notificationEmailSetting.findUnique({
|
||||
where: { notificationType },
|
||||
})
|
||||
|
||||
// Sample data for test emails based on category
|
||||
const sampleData: Record<string, Record<string, unknown>> = {
|
||||
// Team notifications
|
||||
ADVANCED_SEMIFINAL: {
|
||||
projectName: 'Ocean Cleanup Initiative',
|
||||
programName: 'Monaco Ocean Protection Challenge 2026',
|
||||
nextSteps: 'Prepare your presentation for the semi-final round.',
|
||||
},
|
||||
ADVANCED_FINAL: {
|
||||
projectName: 'Ocean Cleanup Initiative',
|
||||
programName: 'Monaco Ocean Protection Challenge 2026',
|
||||
nextSteps: 'Get ready for the final presentation in Monaco.',
|
||||
},
|
||||
MENTOR_ASSIGNED: {
|
||||
projectName: 'Ocean Cleanup Initiative',
|
||||
mentorName: 'Dr. Marine Expert',
|
||||
mentorBio: 'Expert in marine conservation with 20 years of experience.',
|
||||
},
|
||||
NOT_SELECTED: {
|
||||
projectName: 'Ocean Cleanup Initiative',
|
||||
roundName: 'Semi-Final Round',
|
||||
},
|
||||
WINNER_ANNOUNCEMENT: {
|
||||
projectName: 'Ocean Cleanup Initiative',
|
||||
awardName: 'Grand Prize',
|
||||
prizeDetails: '€50,000 and mentorship program',
|
||||
},
|
||||
|
||||
// Jury notifications
|
||||
ASSIGNED_TO_PROJECT: {
|
||||
projectName: 'Ocean Cleanup Initiative',
|
||||
roundName: 'Semi-Final Round',
|
||||
deadline: 'Friday, March 15, 2026',
|
||||
},
|
||||
BATCH_ASSIGNED: {
|
||||
projectCount: 5,
|
||||
roundName: 'Semi-Final Round',
|
||||
deadline: 'Friday, March 15, 2026',
|
||||
},
|
||||
ROUND_NOW_OPEN: {
|
||||
roundName: 'Semi-Final Round',
|
||||
projectCount: 12,
|
||||
deadline: 'Friday, March 15, 2026',
|
||||
},
|
||||
REMINDER_24H: {
|
||||
pendingCount: 3,
|
||||
roundName: 'Semi-Final Round',
|
||||
deadline: 'Tomorrow at 5:00 PM',
|
||||
},
|
||||
REMINDER_1H: {
|
||||
pendingCount: 2,
|
||||
roundName: 'Semi-Final Round',
|
||||
deadline: 'Today at 5:00 PM',
|
||||
},
|
||||
AWARD_VOTING_OPEN: {
|
||||
awardName: 'Innovation Award',
|
||||
finalistCount: 6,
|
||||
deadline: 'Friday, March 15, 2026',
|
||||
},
|
||||
|
||||
// Mentor notifications
|
||||
MENTEE_ASSIGNED: {
|
||||
projectName: 'Ocean Cleanup Initiative',
|
||||
teamLeadName: 'John Smith',
|
||||
teamLeadEmail: 'john@example.com',
|
||||
},
|
||||
MENTEE_ADVANCED: {
|
||||
projectName: 'Ocean Cleanup Initiative',
|
||||
roundName: 'Semi-Final Round',
|
||||
nextRoundName: 'Final Round',
|
||||
},
|
||||
MENTEE_WON: {
|
||||
projectName: 'Ocean Cleanup Initiative',
|
||||
awardName: 'Innovation Award',
|
||||
},
|
||||
|
||||
// Admin notifications
|
||||
NEW_APPLICATION: {
|
||||
projectName: 'New Ocean Project',
|
||||
applicantName: 'Jane Doe',
|
||||
applicantEmail: 'jane@example.com',
|
||||
programName: 'Monaco Ocean Protection Challenge 2026',
|
||||
},
|
||||
FILTERING_COMPLETE: {
|
||||
roundName: 'Initial Review',
|
||||
passedCount: 45,
|
||||
flaggedCount: 12,
|
||||
filteredCount: 8,
|
||||
},
|
||||
FILTERING_FAILED: {
|
||||
roundName: 'Initial Review',
|
||||
error: 'Connection timeout',
|
||||
},
|
||||
}
|
||||
|
||||
const metadata = sampleData[notificationType] || {}
|
||||
const label = setting?.label || notificationType
|
||||
|
||||
try {
|
||||
await sendStyledNotificationEmail(
|
||||
ctx.user.email,
|
||||
ctx.user.name || 'Admin',
|
||||
notificationType,
|
||||
{
|
||||
title: `[TEST] ${label}`,
|
||||
message: `This is a test email for the "${label}" notification type.`,
|
||||
linkUrl: `${process.env.NEXTAUTH_URL || 'https://portal.monaco-opc.com'}/admin/settings`,
|
||||
linkLabel: 'Back to Settings',
|
||||
metadata,
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `Test email sent to ${ctx.user.email}`,
|
||||
hasStyledTemplate,
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
message: error instanceof Error ? error.message : 'Failed to send test email',
|
||||
hasStyledTemplate,
|
||||
}
|
||||
}
|
||||
}),
|
||||
})
|
||||
|
||||
@@ -1,239 +1,239 @@
|
||||
import { z } from 'zod'
|
||||
import { TRPCError } from '@trpc/server'
|
||||
import { Prisma } from '@prisma/client'
|
||||
import { router, adminProcedure } from '../trpc'
|
||||
import {
|
||||
testNotionConnection,
|
||||
getNotionDatabaseSchema,
|
||||
queryNotionDatabase,
|
||||
} from '@/lib/notion'
|
||||
import { normalizeCountryToCode } from '@/lib/countries'
|
||||
|
||||
export const notionImportRouter = router({
|
||||
/**
|
||||
* Test connection to Notion API
|
||||
*/
|
||||
testConnection: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
apiKey: z.string().min(1),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ input }) => {
|
||||
return testNotionConnection(input.apiKey)
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get database schema (properties) for mapping
|
||||
*/
|
||||
getDatabaseSchema: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
apiKey: z.string().min(1),
|
||||
databaseId: z.string().min(1),
|
||||
})
|
||||
)
|
||||
.query(async ({ input }) => {
|
||||
try {
|
||||
return await getNotionDatabaseSchema(input.apiKey, input.databaseId)
|
||||
} catch (error) {
|
||||
throw new TRPCError({
|
||||
code: 'BAD_REQUEST',
|
||||
message:
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: 'Failed to fetch database schema',
|
||||
})
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Preview data from Notion database
|
||||
*/
|
||||
previewData: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
apiKey: z.string().min(1),
|
||||
databaseId: z.string().min(1),
|
||||
limit: z.number().int().min(1).max(10).default(5),
|
||||
})
|
||||
)
|
||||
.query(async ({ input }) => {
|
||||
try {
|
||||
const records = await queryNotionDatabase(
|
||||
input.apiKey,
|
||||
input.databaseId,
|
||||
input.limit
|
||||
)
|
||||
return { records, count: records.length }
|
||||
} catch (error) {
|
||||
throw new TRPCError({
|
||||
code: 'BAD_REQUEST',
|
||||
message:
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: 'Failed to fetch data from Notion',
|
||||
})
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Import projects from Notion database
|
||||
*/
|
||||
importProjects: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
apiKey: z.string().min(1),
|
||||
databaseId: z.string().min(1),
|
||||
programId: z.string(),
|
||||
mappings: z.object({
|
||||
title: z.string(),
|
||||
teamName: z.string().optional(),
|
||||
description: z.string().optional(),
|
||||
tags: z.string().optional(),
|
||||
country: z.string().optional(),
|
||||
}),
|
||||
includeUnmappedInMetadata: z.boolean().default(true),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
await ctx.prisma.program.findUniqueOrThrow({
|
||||
where: { id: input.programId },
|
||||
})
|
||||
|
||||
// Fetch all records from Notion
|
||||
const records = await queryNotionDatabase(input.apiKey, input.databaseId)
|
||||
|
||||
if (records.length === 0) {
|
||||
return { imported: 0, skipped: 0, errors: [] }
|
||||
}
|
||||
|
||||
const results = {
|
||||
imported: 0,
|
||||
skipped: 0,
|
||||
errors: [] as Array<{ recordId: string; error: string }>,
|
||||
}
|
||||
|
||||
// Process each record
|
||||
for (const record of records) {
|
||||
try {
|
||||
// Get mapped values
|
||||
const title = getPropertyValue(record.properties, input.mappings.title)
|
||||
|
||||
if (!title || typeof title !== 'string' || !title.trim()) {
|
||||
results.errors.push({
|
||||
recordId: record.id,
|
||||
error: 'Missing or invalid title',
|
||||
})
|
||||
results.skipped++
|
||||
continue
|
||||
}
|
||||
|
||||
const teamName = input.mappings.teamName
|
||||
? getPropertyValue(record.properties, input.mappings.teamName)
|
||||
: null
|
||||
|
||||
const description = input.mappings.description
|
||||
? getPropertyValue(record.properties, input.mappings.description)
|
||||
: null
|
||||
|
||||
let tags: string[] = []
|
||||
if (input.mappings.tags) {
|
||||
const tagsValue = getPropertyValue(record.properties, input.mappings.tags)
|
||||
if (Array.isArray(tagsValue)) {
|
||||
tags = tagsValue.filter((t): t is string => typeof t === 'string')
|
||||
} else if (typeof tagsValue === 'string') {
|
||||
tags = tagsValue.split(',').map((t) => t.trim()).filter(Boolean)
|
||||
}
|
||||
}
|
||||
|
||||
// Get country and normalize to ISO code
|
||||
let country: string | null = null
|
||||
if (input.mappings.country) {
|
||||
const countryValue = getPropertyValue(record.properties, input.mappings.country)
|
||||
if (typeof countryValue === 'string') {
|
||||
country = normalizeCountryToCode(countryValue)
|
||||
}
|
||||
}
|
||||
|
||||
// Build metadata from unmapped columns
|
||||
let metadataJson: Record<string, unknown> | null = null
|
||||
if (input.includeUnmappedInMetadata) {
|
||||
const mappedKeys = new Set([
|
||||
input.mappings.title,
|
||||
input.mappings.teamName,
|
||||
input.mappings.description,
|
||||
input.mappings.tags,
|
||||
input.mappings.country,
|
||||
].filter(Boolean))
|
||||
|
||||
metadataJson = {}
|
||||
for (const [key, value] of Object.entries(record.properties)) {
|
||||
if (!mappedKeys.has(key) && value !== null && value !== undefined) {
|
||||
metadataJson[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
if (Object.keys(metadataJson).length === 0) {
|
||||
metadataJson = null
|
||||
}
|
||||
}
|
||||
|
||||
// Create project
|
||||
await ctx.prisma.project.create({
|
||||
data: {
|
||||
programId: input.programId,
|
||||
status: 'SUBMITTED',
|
||||
title: title.trim(),
|
||||
teamName: typeof teamName === 'string' ? teamName.trim() : null,
|
||||
description: typeof description === 'string' ? description : null,
|
||||
tags,
|
||||
country,
|
||||
metadataJson: metadataJson as Prisma.InputJsonValue ?? undefined,
|
||||
externalIdsJson: {
|
||||
notionPageId: record.id,
|
||||
notionDatabaseId: input.databaseId,
|
||||
} as Prisma.InputJsonValue,
|
||||
},
|
||||
})
|
||||
|
||||
results.imported++
|
||||
} catch (error) {
|
||||
results.errors.push({
|
||||
recordId: record.id,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
})
|
||||
results.skipped++
|
||||
}
|
||||
}
|
||||
|
||||
// Audit log
|
||||
await ctx.prisma.auditLog.create({
|
||||
data: {
|
||||
userId: ctx.user.id,
|
||||
action: 'IMPORT',
|
||||
entityType: 'Project',
|
||||
detailsJson: {
|
||||
source: 'notion',
|
||||
databaseId: input.databaseId,
|
||||
imported: results.imported,
|
||||
skipped: results.skipped,
|
||||
},
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
},
|
||||
})
|
||||
|
||||
return results
|
||||
}),
|
||||
})
|
||||
|
||||
/**
|
||||
* Helper to get a property value from a record
|
||||
*/
|
||||
function getPropertyValue(
|
||||
properties: Record<string, unknown>,
|
||||
propertyName: string
|
||||
): unknown {
|
||||
return properties[propertyName] ?? null
|
||||
}
|
||||
import { z } from 'zod'
|
||||
import { TRPCError } from '@trpc/server'
|
||||
import { Prisma } from '@prisma/client'
|
||||
import { router, adminProcedure } from '../trpc'
|
||||
import {
|
||||
testNotionConnection,
|
||||
getNotionDatabaseSchema,
|
||||
queryNotionDatabase,
|
||||
} from '@/lib/notion'
|
||||
import { normalizeCountryToCode } from '@/lib/countries'
|
||||
|
||||
export const notionImportRouter = router({
|
||||
/**
|
||||
* Test connection to Notion API
|
||||
*/
|
||||
testConnection: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
apiKey: z.string().min(1),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ input }) => {
|
||||
return testNotionConnection(input.apiKey)
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get database schema (properties) for mapping
|
||||
*/
|
||||
getDatabaseSchema: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
apiKey: z.string().min(1),
|
||||
databaseId: z.string().min(1),
|
||||
})
|
||||
)
|
||||
.query(async ({ input }) => {
|
||||
try {
|
||||
return await getNotionDatabaseSchema(input.apiKey, input.databaseId)
|
||||
} catch (error) {
|
||||
throw new TRPCError({
|
||||
code: 'BAD_REQUEST',
|
||||
message:
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: 'Failed to fetch database schema',
|
||||
})
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Preview data from Notion database
|
||||
*/
|
||||
previewData: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
apiKey: z.string().min(1),
|
||||
databaseId: z.string().min(1),
|
||||
limit: z.number().int().min(1).max(10).default(5),
|
||||
})
|
||||
)
|
||||
.query(async ({ input }) => {
|
||||
try {
|
||||
const records = await queryNotionDatabase(
|
||||
input.apiKey,
|
||||
input.databaseId,
|
||||
input.limit
|
||||
)
|
||||
return { records, count: records.length }
|
||||
} catch (error) {
|
||||
throw new TRPCError({
|
||||
code: 'BAD_REQUEST',
|
||||
message:
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: 'Failed to fetch data from Notion',
|
||||
})
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Import projects from Notion database
|
||||
*/
|
||||
importProjects: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
apiKey: z.string().min(1),
|
||||
databaseId: z.string().min(1),
|
||||
programId: z.string(),
|
||||
mappings: z.object({
|
||||
title: z.string(),
|
||||
teamName: z.string().optional(),
|
||||
description: z.string().optional(),
|
||||
tags: z.string().optional(),
|
||||
country: z.string().optional(),
|
||||
}),
|
||||
includeUnmappedInMetadata: z.boolean().default(true),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
await ctx.prisma.program.findUniqueOrThrow({
|
||||
where: { id: input.programId },
|
||||
})
|
||||
|
||||
// Fetch all records from Notion
|
||||
const records = await queryNotionDatabase(input.apiKey, input.databaseId)
|
||||
|
||||
if (records.length === 0) {
|
||||
return { imported: 0, skipped: 0, errors: [] }
|
||||
}
|
||||
|
||||
const results = {
|
||||
imported: 0,
|
||||
skipped: 0,
|
||||
errors: [] as Array<{ recordId: string; error: string }>,
|
||||
}
|
||||
|
||||
// Process each record
|
||||
for (const record of records) {
|
||||
try {
|
||||
// Get mapped values
|
||||
const title = getPropertyValue(record.properties, input.mappings.title)
|
||||
|
||||
if (!title || typeof title !== 'string' || !title.trim()) {
|
||||
results.errors.push({
|
||||
recordId: record.id,
|
||||
error: 'Missing or invalid title',
|
||||
})
|
||||
results.skipped++
|
||||
continue
|
||||
}
|
||||
|
||||
const teamName = input.mappings.teamName
|
||||
? getPropertyValue(record.properties, input.mappings.teamName)
|
||||
: null
|
||||
|
||||
const description = input.mappings.description
|
||||
? getPropertyValue(record.properties, input.mappings.description)
|
||||
: null
|
||||
|
||||
let tags: string[] = []
|
||||
if (input.mappings.tags) {
|
||||
const tagsValue = getPropertyValue(record.properties, input.mappings.tags)
|
||||
if (Array.isArray(tagsValue)) {
|
||||
tags = tagsValue.filter((t): t is string => typeof t === 'string')
|
||||
} else if (typeof tagsValue === 'string') {
|
||||
tags = tagsValue.split(',').map((t) => t.trim()).filter(Boolean)
|
||||
}
|
||||
}
|
||||
|
||||
// Get country and normalize to ISO code
|
||||
let country: string | null = null
|
||||
if (input.mappings.country) {
|
||||
const countryValue = getPropertyValue(record.properties, input.mappings.country)
|
||||
if (typeof countryValue === 'string') {
|
||||
country = normalizeCountryToCode(countryValue)
|
||||
}
|
||||
}
|
||||
|
||||
// Build metadata from unmapped columns
|
||||
let metadataJson: Record<string, unknown> | null = null
|
||||
if (input.includeUnmappedInMetadata) {
|
||||
const mappedKeys = new Set([
|
||||
input.mappings.title,
|
||||
input.mappings.teamName,
|
||||
input.mappings.description,
|
||||
input.mappings.tags,
|
||||
input.mappings.country,
|
||||
].filter(Boolean))
|
||||
|
||||
metadataJson = {}
|
||||
for (const [key, value] of Object.entries(record.properties)) {
|
||||
if (!mappedKeys.has(key) && value !== null && value !== undefined) {
|
||||
metadataJson[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
if (Object.keys(metadataJson).length === 0) {
|
||||
metadataJson = null
|
||||
}
|
||||
}
|
||||
|
||||
// Create project
|
||||
await ctx.prisma.project.create({
|
||||
data: {
|
||||
programId: input.programId,
|
||||
status: 'SUBMITTED',
|
||||
title: title.trim(),
|
||||
teamName: typeof teamName === 'string' ? teamName.trim() : null,
|
||||
description: typeof description === 'string' ? description : null,
|
||||
tags,
|
||||
country,
|
||||
metadataJson: metadataJson as Prisma.InputJsonValue ?? undefined,
|
||||
externalIdsJson: {
|
||||
notionPageId: record.id,
|
||||
notionDatabaseId: input.databaseId,
|
||||
} as Prisma.InputJsonValue,
|
||||
},
|
||||
})
|
||||
|
||||
results.imported++
|
||||
} catch (error) {
|
||||
results.errors.push({
|
||||
recordId: record.id,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
})
|
||||
results.skipped++
|
||||
}
|
||||
}
|
||||
|
||||
// Audit log
|
||||
await ctx.prisma.auditLog.create({
|
||||
data: {
|
||||
userId: ctx.user.id,
|
||||
action: 'IMPORT',
|
||||
entityType: 'Project',
|
||||
detailsJson: {
|
||||
source: 'notion',
|
||||
databaseId: input.databaseId,
|
||||
imported: results.imported,
|
||||
skipped: results.skipped,
|
||||
},
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
},
|
||||
})
|
||||
|
||||
return results
|
||||
}),
|
||||
})
|
||||
|
||||
/**
|
||||
* Helper to get a property value from a record
|
||||
*/
|
||||
function getPropertyValue(
|
||||
properties: Record<string, unknown>,
|
||||
propertyName: string
|
||||
): unknown {
|
||||
return properties[propertyName] ?? null
|
||||
}
|
||||
|
||||
@@ -1,351 +1,351 @@
|
||||
import { z } from 'zod'
|
||||
import { router, protectedProcedure, adminProcedure } from '../trpc'
|
||||
import { getPresignedUrl } from '@/lib/minio'
|
||||
import { logAudit } from '../utils/audit'
|
||||
|
||||
// Bucket for partner logos
|
||||
export const PARTNER_BUCKET = 'mopc-partners'
|
||||
|
||||
export const partnerRouter = router({
|
||||
/**
|
||||
* List all partners (admin view)
|
||||
*/
|
||||
list: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
programId: z.string().optional(),
|
||||
partnerType: z.enum(['SPONSOR', 'PARTNER', 'SUPPORTER', 'MEDIA', 'OTHER']).optional(),
|
||||
visibility: z.enum(['ADMIN_ONLY', 'JURY_VISIBLE', 'PUBLIC']).optional(),
|
||||
isActive: z.boolean().optional(),
|
||||
page: z.number().int().min(1).default(1),
|
||||
perPage: z.number().int().min(1).max(100).default(50),
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
const where: Record<string, unknown> = {}
|
||||
|
||||
if (input.programId !== undefined) {
|
||||
where.programId = input.programId
|
||||
}
|
||||
if (input.partnerType) {
|
||||
where.partnerType = input.partnerType
|
||||
}
|
||||
if (input.visibility) {
|
||||
where.visibility = input.visibility
|
||||
}
|
||||
if (input.isActive !== undefined) {
|
||||
where.isActive = input.isActive
|
||||
}
|
||||
|
||||
const [data, total] = await Promise.all([
|
||||
ctx.prisma.partner.findMany({
|
||||
where,
|
||||
include: {
|
||||
program: { select: { id: true, name: true, year: true } },
|
||||
},
|
||||
orderBy: [{ sortOrder: 'asc' }, { name: 'asc' }],
|
||||
skip: (input.page - 1) * input.perPage,
|
||||
take: input.perPage,
|
||||
}),
|
||||
ctx.prisma.partner.count({ where }),
|
||||
])
|
||||
|
||||
return {
|
||||
data,
|
||||
total,
|
||||
page: input.page,
|
||||
perPage: input.perPage,
|
||||
totalPages: Math.ceil(total / input.perPage),
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get partners visible to jury members
|
||||
*/
|
||||
getJuryVisible: protectedProcedure
|
||||
.input(
|
||||
z.object({
|
||||
programId: z.string().optional(),
|
||||
partnerType: z.enum(['SPONSOR', 'PARTNER', 'SUPPORTER', 'MEDIA', 'OTHER']).optional(),
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
const where: Record<string, unknown> = {
|
||||
isActive: true,
|
||||
visibility: { in: ['JURY_VISIBLE', 'PUBLIC'] },
|
||||
}
|
||||
|
||||
if (input.programId) {
|
||||
where.OR = [{ programId: input.programId }, { programId: null }]
|
||||
}
|
||||
if (input.partnerType) {
|
||||
where.partnerType = input.partnerType
|
||||
}
|
||||
|
||||
return ctx.prisma.partner.findMany({
|
||||
where,
|
||||
orderBy: [{ sortOrder: 'asc' }, { name: 'asc' }],
|
||||
})
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get public partners (for public website)
|
||||
*/
|
||||
getPublic: protectedProcedure
|
||||
.input(
|
||||
z.object({
|
||||
programId: z.string().optional(),
|
||||
partnerType: z.enum(['SPONSOR', 'PARTNER', 'SUPPORTER', 'MEDIA', 'OTHER']).optional(),
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
const where: Record<string, unknown> = {
|
||||
isActive: true,
|
||||
visibility: 'PUBLIC',
|
||||
}
|
||||
|
||||
if (input.programId) {
|
||||
where.OR = [{ programId: input.programId }, { programId: null }]
|
||||
}
|
||||
if (input.partnerType) {
|
||||
where.partnerType = input.partnerType
|
||||
}
|
||||
|
||||
return ctx.prisma.partner.findMany({
|
||||
where,
|
||||
orderBy: [{ sortOrder: 'asc' }, { name: 'asc' }],
|
||||
})
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get a single partner by ID
|
||||
*/
|
||||
get: adminProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.query(async ({ ctx, input }) => {
|
||||
return ctx.prisma.partner.findUniqueOrThrow({
|
||||
where: { id: input.id },
|
||||
include: {
|
||||
program: { select: { id: true, name: true, year: true } },
|
||||
},
|
||||
})
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get logo URL for a partner
|
||||
*/
|
||||
getLogoUrl: protectedProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.query(async ({ ctx, input }) => {
|
||||
const partner = await ctx.prisma.partner.findUniqueOrThrow({
|
||||
where: { id: input.id },
|
||||
})
|
||||
|
||||
if (!partner.logoBucket || !partner.logoObjectKey) {
|
||||
return { url: null }
|
||||
}
|
||||
|
||||
const url = await getPresignedUrl(partner.logoBucket, partner.logoObjectKey, 'GET', 900)
|
||||
return { url }
|
||||
}),
|
||||
|
||||
/**
|
||||
* Create a new partner (admin only)
|
||||
*/
|
||||
create: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
programId: z.string().nullable(),
|
||||
name: z.string().min(1).max(255),
|
||||
description: z.string().optional(),
|
||||
website: z.string().url().optional(),
|
||||
partnerType: z.enum(['SPONSOR', 'PARTNER', 'SUPPORTER', 'MEDIA', 'OTHER']).default('PARTNER'),
|
||||
visibility: z.enum(['ADMIN_ONLY', 'JURY_VISIBLE', 'PUBLIC']).default('ADMIN_ONLY'),
|
||||
sortOrder: z.number().int().default(0),
|
||||
isActive: z.boolean().default(true),
|
||||
// Logo info (set after upload)
|
||||
logoFileName: z.string().optional(),
|
||||
logoBucket: z.string().optional(),
|
||||
logoObjectKey: z.string().optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const partner = await ctx.prisma.partner.create({
|
||||
data: input,
|
||||
})
|
||||
|
||||
// Audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'CREATE',
|
||||
entityType: 'Partner',
|
||||
entityId: partner.id,
|
||||
detailsJson: { name: input.name, partnerType: input.partnerType },
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return partner
|
||||
}),
|
||||
|
||||
/**
|
||||
* Update a partner (admin only)
|
||||
*/
|
||||
update: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
name: z.string().min(1).max(255).optional(),
|
||||
description: z.string().optional().nullable(),
|
||||
website: z.string().url().optional().nullable(),
|
||||
partnerType: z.enum(['SPONSOR', 'PARTNER', 'SUPPORTER', 'MEDIA', 'OTHER']).optional(),
|
||||
visibility: z.enum(['ADMIN_ONLY', 'JURY_VISIBLE', 'PUBLIC']).optional(),
|
||||
sortOrder: z.number().int().optional(),
|
||||
isActive: z.boolean().optional(),
|
||||
// Logo info
|
||||
logoFileName: z.string().optional().nullable(),
|
||||
logoBucket: z.string().optional().nullable(),
|
||||
logoObjectKey: z.string().optional().nullable(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const { id, ...data } = input
|
||||
|
||||
const partner = await ctx.prisma.partner.update({
|
||||
where: { id },
|
||||
data,
|
||||
})
|
||||
|
||||
// Audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'UPDATE',
|
||||
entityType: 'Partner',
|
||||
entityId: id,
|
||||
detailsJson: data,
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return partner
|
||||
}),
|
||||
|
||||
/**
|
||||
* Delete a partner (admin only)
|
||||
*/
|
||||
delete: adminProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const partner = await ctx.prisma.partner.delete({
|
||||
where: { id: input.id },
|
||||
})
|
||||
|
||||
// Audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'DELETE',
|
||||
entityType: 'Partner',
|
||||
entityId: input.id,
|
||||
detailsJson: { name: partner.name },
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return partner
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get upload URL for a partner logo (admin only)
|
||||
*/
|
||||
getUploadUrl: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
fileName: z.string(),
|
||||
mimeType: z.string(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ input }) => {
|
||||
const timestamp = Date.now()
|
||||
const sanitizedName = input.fileName.replace(/[^a-zA-Z0-9.-]/g, '_')
|
||||
const objectKey = `logos/${timestamp}-${sanitizedName}`
|
||||
|
||||
const url = await getPresignedUrl(PARTNER_BUCKET, objectKey, 'PUT', 3600)
|
||||
|
||||
return {
|
||||
url,
|
||||
bucket: PARTNER_BUCKET,
|
||||
objectKey,
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Reorder partners (admin only)
|
||||
*/
|
||||
reorder: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
items: z.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
sortOrder: z.number().int(),
|
||||
})
|
||||
),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
await ctx.prisma.$transaction(
|
||||
input.items.map((item) =>
|
||||
ctx.prisma.partner.update({
|
||||
where: { id: item.id },
|
||||
data: { sortOrder: item.sortOrder },
|
||||
})
|
||||
)
|
||||
)
|
||||
|
||||
// Audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'REORDER',
|
||||
entityType: 'Partner',
|
||||
detailsJson: { count: input.items.length },
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return { success: true }
|
||||
}),
|
||||
|
||||
/**
|
||||
* Bulk update visibility (admin only)
|
||||
*/
|
||||
bulkUpdateVisibility: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
ids: z.array(z.string()),
|
||||
visibility: z.enum(['ADMIN_ONLY', 'JURY_VISIBLE', 'PUBLIC']),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
await ctx.prisma.partner.updateMany({
|
||||
where: { id: { in: input.ids } },
|
||||
data: { visibility: input.visibility },
|
||||
})
|
||||
|
||||
// Audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'BULK_UPDATE',
|
||||
entityType: 'Partner',
|
||||
detailsJson: { ids: input.ids, visibility: input.visibility },
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return { updated: input.ids.length }
|
||||
}),
|
||||
})
|
||||
import { z } from 'zod'
|
||||
import { router, protectedProcedure, adminProcedure } from '../trpc'
|
||||
import { getPresignedUrl } from '@/lib/minio'
|
||||
import { logAudit } from '../utils/audit'
|
||||
|
||||
// Bucket for partner logos
|
||||
export const PARTNER_BUCKET = 'mopc-partners'
|
||||
|
||||
export const partnerRouter = router({
|
||||
/**
|
||||
* List all partners (admin view)
|
||||
*/
|
||||
list: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
programId: z.string().optional(),
|
||||
partnerType: z.enum(['SPONSOR', 'PARTNER', 'SUPPORTER', 'MEDIA', 'OTHER']).optional(),
|
||||
visibility: z.enum(['ADMIN_ONLY', 'JURY_VISIBLE', 'PUBLIC']).optional(),
|
||||
isActive: z.boolean().optional(),
|
||||
page: z.number().int().min(1).default(1),
|
||||
perPage: z.number().int().min(1).max(100).default(50),
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
const where: Record<string, unknown> = {}
|
||||
|
||||
if (input.programId !== undefined) {
|
||||
where.programId = input.programId
|
||||
}
|
||||
if (input.partnerType) {
|
||||
where.partnerType = input.partnerType
|
||||
}
|
||||
if (input.visibility) {
|
||||
where.visibility = input.visibility
|
||||
}
|
||||
if (input.isActive !== undefined) {
|
||||
where.isActive = input.isActive
|
||||
}
|
||||
|
||||
const [data, total] = await Promise.all([
|
||||
ctx.prisma.partner.findMany({
|
||||
where,
|
||||
include: {
|
||||
program: { select: { id: true, name: true, year: true } },
|
||||
},
|
||||
orderBy: [{ sortOrder: 'asc' }, { name: 'asc' }],
|
||||
skip: (input.page - 1) * input.perPage,
|
||||
take: input.perPage,
|
||||
}),
|
||||
ctx.prisma.partner.count({ where }),
|
||||
])
|
||||
|
||||
return {
|
||||
data,
|
||||
total,
|
||||
page: input.page,
|
||||
perPage: input.perPage,
|
||||
totalPages: Math.ceil(total / input.perPage),
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get partners visible to jury members
|
||||
*/
|
||||
getJuryVisible: protectedProcedure
|
||||
.input(
|
||||
z.object({
|
||||
programId: z.string().optional(),
|
||||
partnerType: z.enum(['SPONSOR', 'PARTNER', 'SUPPORTER', 'MEDIA', 'OTHER']).optional(),
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
const where: Record<string, unknown> = {
|
||||
isActive: true,
|
||||
visibility: { in: ['JURY_VISIBLE', 'PUBLIC'] },
|
||||
}
|
||||
|
||||
if (input.programId) {
|
||||
where.OR = [{ programId: input.programId }, { programId: null }]
|
||||
}
|
||||
if (input.partnerType) {
|
||||
where.partnerType = input.partnerType
|
||||
}
|
||||
|
||||
return ctx.prisma.partner.findMany({
|
||||
where,
|
||||
orderBy: [{ sortOrder: 'asc' }, { name: 'asc' }],
|
||||
})
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get public partners (for public website)
|
||||
*/
|
||||
getPublic: protectedProcedure
|
||||
.input(
|
||||
z.object({
|
||||
programId: z.string().optional(),
|
||||
partnerType: z.enum(['SPONSOR', 'PARTNER', 'SUPPORTER', 'MEDIA', 'OTHER']).optional(),
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
const where: Record<string, unknown> = {
|
||||
isActive: true,
|
||||
visibility: 'PUBLIC',
|
||||
}
|
||||
|
||||
if (input.programId) {
|
||||
where.OR = [{ programId: input.programId }, { programId: null }]
|
||||
}
|
||||
if (input.partnerType) {
|
||||
where.partnerType = input.partnerType
|
||||
}
|
||||
|
||||
return ctx.prisma.partner.findMany({
|
||||
where,
|
||||
orderBy: [{ sortOrder: 'asc' }, { name: 'asc' }],
|
||||
})
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get a single partner by ID
|
||||
*/
|
||||
get: adminProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.query(async ({ ctx, input }) => {
|
||||
return ctx.prisma.partner.findUniqueOrThrow({
|
||||
where: { id: input.id },
|
||||
include: {
|
||||
program: { select: { id: true, name: true, year: true } },
|
||||
},
|
||||
})
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get logo URL for a partner
|
||||
*/
|
||||
getLogoUrl: protectedProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.query(async ({ ctx, input }) => {
|
||||
const partner = await ctx.prisma.partner.findUniqueOrThrow({
|
||||
where: { id: input.id },
|
||||
})
|
||||
|
||||
if (!partner.logoBucket || !partner.logoObjectKey) {
|
||||
return { url: null }
|
||||
}
|
||||
|
||||
const url = await getPresignedUrl(partner.logoBucket, partner.logoObjectKey, 'GET', 900)
|
||||
return { url }
|
||||
}),
|
||||
|
||||
/**
|
||||
* Create a new partner (admin only)
|
||||
*/
|
||||
create: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
programId: z.string().nullable(),
|
||||
name: z.string().min(1).max(255),
|
||||
description: z.string().optional(),
|
||||
website: z.string().url().optional(),
|
||||
partnerType: z.enum(['SPONSOR', 'PARTNER', 'SUPPORTER', 'MEDIA', 'OTHER']).default('PARTNER'),
|
||||
visibility: z.enum(['ADMIN_ONLY', 'JURY_VISIBLE', 'PUBLIC']).default('ADMIN_ONLY'),
|
||||
sortOrder: z.number().int().default(0),
|
||||
isActive: z.boolean().default(true),
|
||||
// Logo info (set after upload)
|
||||
logoFileName: z.string().optional(),
|
||||
logoBucket: z.string().optional(),
|
||||
logoObjectKey: z.string().optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const partner = await ctx.prisma.partner.create({
|
||||
data: input,
|
||||
})
|
||||
|
||||
// Audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'CREATE',
|
||||
entityType: 'Partner',
|
||||
entityId: partner.id,
|
||||
detailsJson: { name: input.name, partnerType: input.partnerType },
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return partner
|
||||
}),
|
||||
|
||||
/**
|
||||
* Update a partner (admin only)
|
||||
*/
|
||||
update: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
name: z.string().min(1).max(255).optional(),
|
||||
description: z.string().optional().nullable(),
|
||||
website: z.string().url().optional().nullable(),
|
||||
partnerType: z.enum(['SPONSOR', 'PARTNER', 'SUPPORTER', 'MEDIA', 'OTHER']).optional(),
|
||||
visibility: z.enum(['ADMIN_ONLY', 'JURY_VISIBLE', 'PUBLIC']).optional(),
|
||||
sortOrder: z.number().int().optional(),
|
||||
isActive: z.boolean().optional(),
|
||||
// Logo info
|
||||
logoFileName: z.string().optional().nullable(),
|
||||
logoBucket: z.string().optional().nullable(),
|
||||
logoObjectKey: z.string().optional().nullable(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const { id, ...data } = input
|
||||
|
||||
const partner = await ctx.prisma.partner.update({
|
||||
where: { id },
|
||||
data,
|
||||
})
|
||||
|
||||
// Audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'UPDATE',
|
||||
entityType: 'Partner',
|
||||
entityId: id,
|
||||
detailsJson: data,
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return partner
|
||||
}),
|
||||
|
||||
/**
|
||||
* Delete a partner (admin only)
|
||||
*/
|
||||
delete: adminProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const partner = await ctx.prisma.partner.delete({
|
||||
where: { id: input.id },
|
||||
})
|
||||
|
||||
// Audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'DELETE',
|
||||
entityType: 'Partner',
|
||||
entityId: input.id,
|
||||
detailsJson: { name: partner.name },
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return partner
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get upload URL for a partner logo (admin only)
|
||||
*/
|
||||
getUploadUrl: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
fileName: z.string(),
|
||||
mimeType: z.string(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ input }) => {
|
||||
const timestamp = Date.now()
|
||||
const sanitizedName = input.fileName.replace(/[^a-zA-Z0-9.-]/g, '_')
|
||||
const objectKey = `logos/${timestamp}-${sanitizedName}`
|
||||
|
||||
const url = await getPresignedUrl(PARTNER_BUCKET, objectKey, 'PUT', 3600)
|
||||
|
||||
return {
|
||||
url,
|
||||
bucket: PARTNER_BUCKET,
|
||||
objectKey,
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Reorder partners (admin only)
|
||||
*/
|
||||
reorder: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
items: z.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
sortOrder: z.number().int(),
|
||||
})
|
||||
),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
await ctx.prisma.$transaction(
|
||||
input.items.map((item) =>
|
||||
ctx.prisma.partner.update({
|
||||
where: { id: item.id },
|
||||
data: { sortOrder: item.sortOrder },
|
||||
})
|
||||
)
|
||||
)
|
||||
|
||||
// Audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'REORDER',
|
||||
entityType: 'Partner',
|
||||
detailsJson: { count: input.items.length },
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return { success: true }
|
||||
}),
|
||||
|
||||
/**
|
||||
* Bulk update visibility (admin only)
|
||||
*/
|
||||
bulkUpdateVisibility: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
ids: z.array(z.string()),
|
||||
visibility: z.enum(['ADMIN_ONLY', 'JURY_VISIBLE', 'PUBLIC']),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
await ctx.prisma.partner.updateMany({
|
||||
where: { id: { in: input.ids } },
|
||||
data: { visibility: input.visibility },
|
||||
})
|
||||
|
||||
// Audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'BULK_UPDATE',
|
||||
entityType: 'Partner',
|
||||
detailsJson: { ids: input.ids, visibility: input.visibility },
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return { updated: input.ids.length }
|
||||
}),
|
||||
})
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,299 +1,299 @@
|
||||
import { z } from 'zod'
|
||||
import type { Prisma } from '@prisma/client'
|
||||
import { router, protectedProcedure, adminProcedure } from '../trpc'
|
||||
import { logAudit } from '../utils/audit'
|
||||
import { wizardConfigSchema } from '@/types/wizard-config'
|
||||
import { parseWizardConfig } from '@/lib/wizard-config'
|
||||
|
||||
export const programRouter = router({
|
||||
/**
|
||||
* List all programs with optional filtering.
|
||||
* When includeStages is true, returns stages nested under
|
||||
* pipelines -> tracks -> stages, flattened as `stages` for convenience.
|
||||
*/
|
||||
list: protectedProcedure
|
||||
.input(
|
||||
z.object({
|
||||
status: z.enum(['DRAFT', 'ACTIVE', 'ARCHIVED']).optional(),
|
||||
includeStages: z.boolean().optional(),
|
||||
}).optional()
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
const includeStages = input?.includeStages || false
|
||||
|
||||
const programs = await ctx.prisma.program.findMany({
|
||||
where: input?.status ? { status: input.status } : undefined,
|
||||
orderBy: { year: 'desc' },
|
||||
include: includeStages
|
||||
? {
|
||||
pipelines: {
|
||||
include: {
|
||||
tracks: {
|
||||
include: {
|
||||
stages: {
|
||||
orderBy: { sortOrder: 'asc' },
|
||||
include: {
|
||||
_count: {
|
||||
select: { assignments: true, projectStageStates: true },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
: undefined,
|
||||
})
|
||||
|
||||
// Flatten stages into a rounds-compatible shape for backward compatibility
|
||||
return programs.map((p) => ({
|
||||
...p,
|
||||
// Provide a flat `stages` array for convenience
|
||||
stages: (p as any).pipelines?.flatMap((pipeline: any) =>
|
||||
pipeline.tracks?.flatMap((track: any) =>
|
||||
(track.stages || []).map((stage: any) => ({
|
||||
...stage,
|
||||
pipelineName: pipeline.name,
|
||||
trackName: track.name,
|
||||
// Backward-compatible _count shape
|
||||
_count: {
|
||||
projects: stage._count?.projectStageStates || 0,
|
||||
assignments: stage._count?.assignments || 0,
|
||||
},
|
||||
}))
|
||||
) || []
|
||||
) || [],
|
||||
// Legacy alias
|
||||
rounds: (p as any).pipelines?.flatMap((pipeline: any) =>
|
||||
pipeline.tracks?.flatMap((track: any) =>
|
||||
(track.stages || []).map((stage: any) => ({
|
||||
id: stage.id,
|
||||
name: stage.name,
|
||||
status: stage.status === 'STAGE_ACTIVE' ? 'ACTIVE'
|
||||
: stage.status === 'STAGE_CLOSED' ? 'CLOSED'
|
||||
: stage.status,
|
||||
votingEndAt: stage.windowCloseAt,
|
||||
_count: {
|
||||
projects: stage._count?.projectStageStates || 0,
|
||||
assignments: stage._count?.assignments || 0,
|
||||
},
|
||||
}))
|
||||
) || []
|
||||
) || [],
|
||||
}))
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get a single program with its stages (via pipelines)
|
||||
*/
|
||||
get: protectedProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.query(async ({ ctx, input }) => {
|
||||
const program = await ctx.prisma.program.findUniqueOrThrow({
|
||||
where: { id: input.id },
|
||||
include: {
|
||||
pipelines: {
|
||||
include: {
|
||||
tracks: {
|
||||
include: {
|
||||
stages: {
|
||||
orderBy: { sortOrder: 'asc' },
|
||||
include: {
|
||||
_count: {
|
||||
select: { assignments: true, projectStageStates: true },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
// Flatten stages for convenience
|
||||
const stages = (program as any).pipelines?.flatMap((pipeline: any) =>
|
||||
pipeline.tracks?.flatMap((track: any) =>
|
||||
(track.stages || []).map((stage: any) => ({
|
||||
...stage,
|
||||
_count: {
|
||||
projects: stage._count?.projectStageStates || 0,
|
||||
assignments: stage._count?.assignments || 0,
|
||||
},
|
||||
}))
|
||||
) || []
|
||||
) || []
|
||||
|
||||
return {
|
||||
...program,
|
||||
stages,
|
||||
// Legacy alias
|
||||
rounds: stages.map((s: any) => ({
|
||||
id: s.id,
|
||||
name: s.name,
|
||||
status: s.status === 'STAGE_ACTIVE' ? 'ACTIVE'
|
||||
: s.status === 'STAGE_CLOSED' ? 'CLOSED'
|
||||
: s.status,
|
||||
votingEndAt: s.windowCloseAt,
|
||||
_count: s._count,
|
||||
})),
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Create a new program (admin only)
|
||||
*/
|
||||
create: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
name: z.string().min(1).max(255),
|
||||
year: z.number().int().min(2020).max(2100),
|
||||
description: z.string().optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const program = await ctx.prisma.program.create({
|
||||
data: input,
|
||||
})
|
||||
|
||||
// Audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'CREATE',
|
||||
entityType: 'Program',
|
||||
entityId: program.id,
|
||||
detailsJson: input,
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return program
|
||||
}),
|
||||
|
||||
/**
|
||||
* Update a program (admin only)
|
||||
*/
|
||||
update: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
name: z.string().min(1).max(255).optional(),
|
||||
slug: z.string().min(1).max(100).optional(),
|
||||
status: z.enum(['DRAFT', 'ACTIVE', 'ARCHIVED']).optional(),
|
||||
description: z.string().optional(),
|
||||
settingsJson: z.record(z.any()).optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const { id, ...data } = input
|
||||
|
||||
const program = await ctx.prisma.program.update({
|
||||
where: { id },
|
||||
data,
|
||||
})
|
||||
|
||||
// Audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'UPDATE',
|
||||
entityType: 'Program',
|
||||
entityId: id,
|
||||
detailsJson: data,
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return program
|
||||
}),
|
||||
|
||||
/**
|
||||
* Delete a program (admin only)
|
||||
* Note: This will cascade delete all rounds, projects, etc.
|
||||
*/
|
||||
delete: adminProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const program = await ctx.prisma.program.delete({
|
||||
where: { id: input.id },
|
||||
})
|
||||
|
||||
// Audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'DELETE',
|
||||
entityType: 'Program',
|
||||
entityId: input.id,
|
||||
detailsJson: { name: program.name, year: program.year },
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return program
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get wizard config for a program (parsed from settingsJson)
|
||||
*/
|
||||
getWizardConfig: protectedProcedure
|
||||
.input(z.object({ programId: z.string() }))
|
||||
.query(async ({ ctx, input }) => {
|
||||
const program = await ctx.prisma.program.findUniqueOrThrow({
|
||||
where: { id: input.programId },
|
||||
select: { settingsJson: true },
|
||||
})
|
||||
return parseWizardConfig(program.settingsJson)
|
||||
}),
|
||||
|
||||
/**
|
||||
* Update wizard config for a program (admin only)
|
||||
*/
|
||||
updateWizardConfig: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
programId: z.string(),
|
||||
wizardConfig: wizardConfigSchema,
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const program = await ctx.prisma.program.findUniqueOrThrow({
|
||||
where: { id: input.programId },
|
||||
select: { settingsJson: true },
|
||||
})
|
||||
|
||||
const currentSettings = (program.settingsJson || {}) as Record<string, unknown>
|
||||
|
||||
const updatedSettings = {
|
||||
...currentSettings,
|
||||
wizardConfig: input.wizardConfig,
|
||||
}
|
||||
|
||||
await ctx.prisma.program.update({
|
||||
where: { id: input.programId },
|
||||
data: {
|
||||
settingsJson: updatedSettings as Prisma.InputJsonValue,
|
||||
},
|
||||
})
|
||||
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'UPDATE',
|
||||
entityType: 'Program',
|
||||
entityId: input.programId,
|
||||
detailsJson: {
|
||||
field: 'wizardConfig',
|
||||
stepsEnabled: input.wizardConfig.steps.filter((s) => s.enabled).length,
|
||||
totalSteps: input.wizardConfig.steps.length,
|
||||
customFieldsCount: input.wizardConfig.customFields?.length ?? 0,
|
||||
},
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return { success: true }
|
||||
}),
|
||||
})
|
||||
import { z } from 'zod'
|
||||
import type { Prisma } from '@prisma/client'
|
||||
import { router, protectedProcedure, adminProcedure } from '../trpc'
|
||||
import { logAudit } from '../utils/audit'
|
||||
import { wizardConfigSchema } from '@/types/wizard-config'
|
||||
import { parseWizardConfig } from '@/lib/wizard-config'
|
||||
|
||||
export const programRouter = router({
|
||||
/**
|
||||
* List all programs with optional filtering.
|
||||
* When includeStages is true, returns stages nested under
|
||||
* pipelines -> tracks -> stages, flattened as `stages` for convenience.
|
||||
*/
|
||||
list: protectedProcedure
|
||||
.input(
|
||||
z.object({
|
||||
status: z.enum(['DRAFT', 'ACTIVE', 'ARCHIVED']).optional(),
|
||||
includeStages: z.boolean().optional(),
|
||||
}).optional()
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
const includeStages = input?.includeStages || false
|
||||
|
||||
const programs = await ctx.prisma.program.findMany({
|
||||
where: input?.status ? { status: input.status } : undefined,
|
||||
orderBy: { year: 'desc' },
|
||||
include: includeStages
|
||||
? {
|
||||
pipelines: {
|
||||
include: {
|
||||
tracks: {
|
||||
include: {
|
||||
stages: {
|
||||
orderBy: { sortOrder: 'asc' },
|
||||
include: {
|
||||
_count: {
|
||||
select: { assignments: true, projectStageStates: true },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
: undefined,
|
||||
})
|
||||
|
||||
// Flatten stages into a rounds-compatible shape for backward compatibility
|
||||
return programs.map((p) => ({
|
||||
...p,
|
||||
// Provide a flat `stages` array for convenience
|
||||
stages: (p as any).pipelines?.flatMap((pipeline: any) =>
|
||||
pipeline.tracks?.flatMap((track: any) =>
|
||||
(track.stages || []).map((stage: any) => ({
|
||||
...stage,
|
||||
pipelineName: pipeline.name,
|
||||
trackName: track.name,
|
||||
// Backward-compatible _count shape
|
||||
_count: {
|
||||
projects: stage._count?.projectStageStates || 0,
|
||||
assignments: stage._count?.assignments || 0,
|
||||
},
|
||||
}))
|
||||
) || []
|
||||
) || [],
|
||||
// Legacy alias
|
||||
rounds: (p as any).pipelines?.flatMap((pipeline: any) =>
|
||||
pipeline.tracks?.flatMap((track: any) =>
|
||||
(track.stages || []).map((stage: any) => ({
|
||||
id: stage.id,
|
||||
name: stage.name,
|
||||
status: stage.status === 'STAGE_ACTIVE' ? 'ACTIVE'
|
||||
: stage.status === 'STAGE_CLOSED' ? 'CLOSED'
|
||||
: stage.status,
|
||||
votingEndAt: stage.windowCloseAt,
|
||||
_count: {
|
||||
projects: stage._count?.projectStageStates || 0,
|
||||
assignments: stage._count?.assignments || 0,
|
||||
},
|
||||
}))
|
||||
) || []
|
||||
) || [],
|
||||
}))
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get a single program with its stages (via pipelines)
|
||||
*/
|
||||
get: protectedProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.query(async ({ ctx, input }) => {
|
||||
const program = await ctx.prisma.program.findUniqueOrThrow({
|
||||
where: { id: input.id },
|
||||
include: {
|
||||
pipelines: {
|
||||
include: {
|
||||
tracks: {
|
||||
include: {
|
||||
stages: {
|
||||
orderBy: { sortOrder: 'asc' },
|
||||
include: {
|
||||
_count: {
|
||||
select: { assignments: true, projectStageStates: true },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
// Flatten stages for convenience
|
||||
const stages = (program as any).pipelines?.flatMap((pipeline: any) =>
|
||||
pipeline.tracks?.flatMap((track: any) =>
|
||||
(track.stages || []).map((stage: any) => ({
|
||||
...stage,
|
||||
_count: {
|
||||
projects: stage._count?.projectStageStates || 0,
|
||||
assignments: stage._count?.assignments || 0,
|
||||
},
|
||||
}))
|
||||
) || []
|
||||
) || []
|
||||
|
||||
return {
|
||||
...program,
|
||||
stages,
|
||||
// Legacy alias
|
||||
rounds: stages.map((s: any) => ({
|
||||
id: s.id,
|
||||
name: s.name,
|
||||
status: s.status === 'STAGE_ACTIVE' ? 'ACTIVE'
|
||||
: s.status === 'STAGE_CLOSED' ? 'CLOSED'
|
||||
: s.status,
|
||||
votingEndAt: s.windowCloseAt,
|
||||
_count: s._count,
|
||||
})),
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Create a new program (admin only)
|
||||
*/
|
||||
create: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
name: z.string().min(1).max(255),
|
||||
year: z.number().int().min(2020).max(2100),
|
||||
description: z.string().optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const program = await ctx.prisma.program.create({
|
||||
data: input,
|
||||
})
|
||||
|
||||
// Audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'CREATE',
|
||||
entityType: 'Program',
|
||||
entityId: program.id,
|
||||
detailsJson: input,
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return program
|
||||
}),
|
||||
|
||||
/**
|
||||
* Update a program (admin only)
|
||||
*/
|
||||
update: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
name: z.string().min(1).max(255).optional(),
|
||||
slug: z.string().min(1).max(100).optional(),
|
||||
status: z.enum(['DRAFT', 'ACTIVE', 'ARCHIVED']).optional(),
|
||||
description: z.string().optional(),
|
||||
settingsJson: z.record(z.any()).optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const { id, ...data } = input
|
||||
|
||||
const program = await ctx.prisma.program.update({
|
||||
where: { id },
|
||||
data,
|
||||
})
|
||||
|
||||
// Audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'UPDATE',
|
||||
entityType: 'Program',
|
||||
entityId: id,
|
||||
detailsJson: data,
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return program
|
||||
}),
|
||||
|
||||
/**
|
||||
* Delete a program (admin only)
|
||||
* Note: This will cascade delete all rounds, projects, etc.
|
||||
*/
|
||||
delete: adminProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const program = await ctx.prisma.program.delete({
|
||||
where: { id: input.id },
|
||||
})
|
||||
|
||||
// Audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'DELETE',
|
||||
entityType: 'Program',
|
||||
entityId: input.id,
|
||||
detailsJson: { name: program.name, year: program.year },
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return program
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get wizard config for a program (parsed from settingsJson)
|
||||
*/
|
||||
getWizardConfig: protectedProcedure
|
||||
.input(z.object({ programId: z.string() }))
|
||||
.query(async ({ ctx, input }) => {
|
||||
const program = await ctx.prisma.program.findUniqueOrThrow({
|
||||
where: { id: input.programId },
|
||||
select: { settingsJson: true },
|
||||
})
|
||||
return parseWizardConfig(program.settingsJson)
|
||||
}),
|
||||
|
||||
/**
|
||||
* Update wizard config for a program (admin only)
|
||||
*/
|
||||
updateWizardConfig: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
programId: z.string(),
|
||||
wizardConfig: wizardConfigSchema,
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const program = await ctx.prisma.program.findUniqueOrThrow({
|
||||
where: { id: input.programId },
|
||||
select: { settingsJson: true },
|
||||
})
|
||||
|
||||
const currentSettings = (program.settingsJson || {}) as Record<string, unknown>
|
||||
|
||||
const updatedSettings = {
|
||||
...currentSettings,
|
||||
wizardConfig: input.wizardConfig,
|
||||
}
|
||||
|
||||
await ctx.prisma.program.update({
|
||||
where: { id: input.programId },
|
||||
data: {
|
||||
settingsJson: updatedSettings as Prisma.InputJsonValue,
|
||||
},
|
||||
})
|
||||
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'UPDATE',
|
||||
entityType: 'Program',
|
||||
entityId: input.programId,
|
||||
detailsJson: {
|
||||
field: 'wizardConfig',
|
||||
stepsEnabled: input.wizardConfig.steps.filter((s) => s.enabled).length,
|
||||
totalSteps: input.wizardConfig.steps.length,
|
||||
customFieldsCount: input.wizardConfig.customFields?.length ?? 0,
|
||||
},
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return { success: true }
|
||||
}),
|
||||
})
|
||||
|
||||
@@ -1,203 +1,203 @@
|
||||
import { z } from 'zod'
|
||||
import { TRPCError } from '@trpc/server'
|
||||
import { router, adminProcedure } from '../trpc'
|
||||
import { logAudit } from '../utils/audit'
|
||||
|
||||
/**
|
||||
* Project Pool Router
|
||||
*
|
||||
* Manages the pool of unassigned projects (projects not yet assigned to any stage).
|
||||
* Provides procedures for listing unassigned projects and bulk assigning them to stages.
|
||||
*/
|
||||
export const projectPoolRouter = router({
|
||||
/**
|
||||
* List unassigned projects with filtering and pagination
|
||||
* Projects not assigned to any stage
|
||||
*/
|
||||
listUnassigned: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
programId: z.string(), // Required - must specify which program
|
||||
competitionCategory: z
|
||||
.enum(['STARTUP', 'BUSINESS_CONCEPT'])
|
||||
.optional(),
|
||||
search: z.string().optional(), // Search in title, teamName, description
|
||||
page: z.number().int().min(1).default(1),
|
||||
perPage: z.number().int().min(1).max(200).default(20),
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
const { programId, competitionCategory, search, page, perPage } = input
|
||||
const skip = (page - 1) * perPage
|
||||
|
||||
// Build where clause
|
||||
const where: Record<string, unknown> = {
|
||||
programId,
|
||||
stageStates: { none: {} }, // Only unassigned projects (not in any stage)
|
||||
}
|
||||
|
||||
// Filter by competition category
|
||||
if (competitionCategory) {
|
||||
where.competitionCategory = competitionCategory
|
||||
}
|
||||
|
||||
// Search in title, teamName, description
|
||||
if (search) {
|
||||
where.OR = [
|
||||
{ title: { contains: search, mode: 'insensitive' } },
|
||||
{ teamName: { contains: search, mode: 'insensitive' } },
|
||||
{ description: { contains: search, mode: 'insensitive' } },
|
||||
]
|
||||
}
|
||||
|
||||
// Execute queries in parallel
|
||||
const [projects, total] = await Promise.all([
|
||||
ctx.prisma.project.findMany({
|
||||
where,
|
||||
skip,
|
||||
take: perPage,
|
||||
orderBy: { createdAt: 'desc' },
|
||||
select: {
|
||||
id: true,
|
||||
title: true,
|
||||
teamName: true,
|
||||
description: true,
|
||||
competitionCategory: true,
|
||||
oceanIssue: true,
|
||||
country: true,
|
||||
status: true,
|
||||
submittedAt: true,
|
||||
createdAt: true,
|
||||
tags: true,
|
||||
wantsMentorship: true,
|
||||
programId: true,
|
||||
_count: {
|
||||
select: {
|
||||
files: true,
|
||||
teamMembers: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
ctx.prisma.project.count({ where }),
|
||||
])
|
||||
|
||||
return {
|
||||
projects,
|
||||
total,
|
||||
page,
|
||||
perPage,
|
||||
totalPages: Math.ceil(total / perPage),
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Bulk assign projects to a stage
|
||||
*
|
||||
* Validates that:
|
||||
* - All projects exist
|
||||
* - Stage exists
|
||||
*
|
||||
* Creates:
|
||||
* - ProjectStageState entries for each project
|
||||
* - Project.status updated to 'ASSIGNED'
|
||||
* - ProjectStatusHistory records for each project
|
||||
* - Audit log
|
||||
*/
|
||||
assignToStage: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
projectIds: z.array(z.string()).min(1).max(200), // Max 200 projects at once
|
||||
stageId: z.string(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const { projectIds, stageId } = input
|
||||
|
||||
// Step 1: Fetch all projects to validate
|
||||
const projects = await ctx.prisma.project.findMany({
|
||||
where: {
|
||||
id: { in: projectIds },
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
title: true,
|
||||
programId: true,
|
||||
},
|
||||
})
|
||||
|
||||
// Validate all projects were found
|
||||
if (projects.length !== projectIds.length) {
|
||||
const foundIds = new Set(projects.map((p) => p.id))
|
||||
const missingIds = projectIds.filter((id) => !foundIds.has(id))
|
||||
throw new TRPCError({
|
||||
code: 'BAD_REQUEST',
|
||||
message: `Some projects were not found: ${missingIds.join(', ')}`,
|
||||
})
|
||||
}
|
||||
|
||||
// Verify stage exists and get its trackId
|
||||
const stage = await ctx.prisma.stage.findUniqueOrThrow({
|
||||
where: { id: stageId },
|
||||
select: { id: true, trackId: true },
|
||||
})
|
||||
|
||||
// Step 2: Perform bulk assignment in a transaction
|
||||
const result = await ctx.prisma.$transaction(async (tx) => {
|
||||
// Create ProjectStageState entries for each project (skip existing)
|
||||
const stageStateData = projectIds.map((projectId) => ({
|
||||
projectId,
|
||||
stageId,
|
||||
trackId: stage.trackId,
|
||||
state: 'PENDING' as const,
|
||||
}))
|
||||
|
||||
await tx.projectStageState.createMany({
|
||||
data: stageStateData,
|
||||
skipDuplicates: true,
|
||||
})
|
||||
|
||||
// Update project statuses
|
||||
const updatedProjects = await tx.project.updateMany({
|
||||
where: {
|
||||
id: { in: projectIds },
|
||||
},
|
||||
data: {
|
||||
status: 'ASSIGNED',
|
||||
},
|
||||
})
|
||||
|
||||
// Create status history records for each project
|
||||
await tx.projectStatusHistory.createMany({
|
||||
data: projectIds.map((projectId) => ({
|
||||
projectId,
|
||||
status: 'ASSIGNED',
|
||||
changedBy: ctx.user?.id,
|
||||
})),
|
||||
})
|
||||
|
||||
// Create audit log
|
||||
await logAudit({
|
||||
prisma: tx,
|
||||
userId: ctx.user?.id,
|
||||
action: 'BULK_ASSIGN_TO_STAGE',
|
||||
entityType: 'Project',
|
||||
detailsJson: {
|
||||
stageId,
|
||||
projectCount: projectIds.length,
|
||||
projectIds,
|
||||
},
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return updatedProjects
|
||||
})
|
||||
|
||||
return {
|
||||
success: true,
|
||||
assignedCount: result.count,
|
||||
stageId,
|
||||
}
|
||||
}),
|
||||
})
|
||||
import { z } from 'zod'
|
||||
import { TRPCError } from '@trpc/server'
|
||||
import { router, adminProcedure } from '../trpc'
|
||||
import { logAudit } from '../utils/audit'
|
||||
|
||||
/**
|
||||
* Project Pool Router
|
||||
*
|
||||
* Manages the pool of unassigned projects (projects not yet assigned to any stage).
|
||||
* Provides procedures for listing unassigned projects and bulk assigning them to stages.
|
||||
*/
|
||||
export const projectPoolRouter = router({
|
||||
/**
|
||||
* List unassigned projects with filtering and pagination
|
||||
* Projects not assigned to any stage
|
||||
*/
|
||||
listUnassigned: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
programId: z.string(), // Required - must specify which program
|
||||
competitionCategory: z
|
||||
.enum(['STARTUP', 'BUSINESS_CONCEPT'])
|
||||
.optional(),
|
||||
search: z.string().optional(), // Search in title, teamName, description
|
||||
page: z.number().int().min(1).default(1),
|
||||
perPage: z.number().int().min(1).max(200).default(20),
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
const { programId, competitionCategory, search, page, perPage } = input
|
||||
const skip = (page - 1) * perPage
|
||||
|
||||
// Build where clause
|
||||
const where: Record<string, unknown> = {
|
||||
programId,
|
||||
stageStates: { none: {} }, // Only unassigned projects (not in any stage)
|
||||
}
|
||||
|
||||
// Filter by competition category
|
||||
if (competitionCategory) {
|
||||
where.competitionCategory = competitionCategory
|
||||
}
|
||||
|
||||
// Search in title, teamName, description
|
||||
if (search) {
|
||||
where.OR = [
|
||||
{ title: { contains: search, mode: 'insensitive' } },
|
||||
{ teamName: { contains: search, mode: 'insensitive' } },
|
||||
{ description: { contains: search, mode: 'insensitive' } },
|
||||
]
|
||||
}
|
||||
|
||||
// Execute queries in parallel
|
||||
const [projects, total] = await Promise.all([
|
||||
ctx.prisma.project.findMany({
|
||||
where,
|
||||
skip,
|
||||
take: perPage,
|
||||
orderBy: { createdAt: 'desc' },
|
||||
select: {
|
||||
id: true,
|
||||
title: true,
|
||||
teamName: true,
|
||||
description: true,
|
||||
competitionCategory: true,
|
||||
oceanIssue: true,
|
||||
country: true,
|
||||
status: true,
|
||||
submittedAt: true,
|
||||
createdAt: true,
|
||||
tags: true,
|
||||
wantsMentorship: true,
|
||||
programId: true,
|
||||
_count: {
|
||||
select: {
|
||||
files: true,
|
||||
teamMembers: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
ctx.prisma.project.count({ where }),
|
||||
])
|
||||
|
||||
return {
|
||||
projects,
|
||||
total,
|
||||
page,
|
||||
perPage,
|
||||
totalPages: Math.ceil(total / perPage),
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Bulk assign projects to a stage
|
||||
*
|
||||
* Validates that:
|
||||
* - All projects exist
|
||||
* - Stage exists
|
||||
*
|
||||
* Creates:
|
||||
* - ProjectStageState entries for each project
|
||||
* - Project.status updated to 'ASSIGNED'
|
||||
* - ProjectStatusHistory records for each project
|
||||
* - Audit log
|
||||
*/
|
||||
assignToStage: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
projectIds: z.array(z.string()).min(1).max(200), // Max 200 projects at once
|
||||
stageId: z.string(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const { projectIds, stageId } = input
|
||||
|
||||
// Step 1: Fetch all projects to validate
|
||||
const projects = await ctx.prisma.project.findMany({
|
||||
where: {
|
||||
id: { in: projectIds },
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
title: true,
|
||||
programId: true,
|
||||
},
|
||||
})
|
||||
|
||||
// Validate all projects were found
|
||||
if (projects.length !== projectIds.length) {
|
||||
const foundIds = new Set(projects.map((p) => p.id))
|
||||
const missingIds = projectIds.filter((id) => !foundIds.has(id))
|
||||
throw new TRPCError({
|
||||
code: 'BAD_REQUEST',
|
||||
message: `Some projects were not found: ${missingIds.join(', ')}`,
|
||||
})
|
||||
}
|
||||
|
||||
// Verify stage exists and get its trackId
|
||||
const stage = await ctx.prisma.stage.findUniqueOrThrow({
|
||||
where: { id: stageId },
|
||||
select: { id: true, trackId: true },
|
||||
})
|
||||
|
||||
// Step 2: Perform bulk assignment in a transaction
|
||||
const result = await ctx.prisma.$transaction(async (tx) => {
|
||||
// Create ProjectStageState entries for each project (skip existing)
|
||||
const stageStateData = projectIds.map((projectId) => ({
|
||||
projectId,
|
||||
stageId,
|
||||
trackId: stage.trackId,
|
||||
state: 'PENDING' as const,
|
||||
}))
|
||||
|
||||
await tx.projectStageState.createMany({
|
||||
data: stageStateData,
|
||||
skipDuplicates: true,
|
||||
})
|
||||
|
||||
// Update project statuses
|
||||
const updatedProjects = await tx.project.updateMany({
|
||||
where: {
|
||||
id: { in: projectIds },
|
||||
},
|
||||
data: {
|
||||
status: 'ASSIGNED',
|
||||
},
|
||||
})
|
||||
|
||||
// Create status history records for each project
|
||||
await tx.projectStatusHistory.createMany({
|
||||
data: projectIds.map((projectId) => ({
|
||||
projectId,
|
||||
status: 'ASSIGNED',
|
||||
changedBy: ctx.user?.id,
|
||||
})),
|
||||
})
|
||||
|
||||
// Create audit log
|
||||
await logAudit({
|
||||
prisma: tx,
|
||||
userId: ctx.user?.id,
|
||||
action: 'BULK_ASSIGN_TO_STAGE',
|
||||
entityType: 'Project',
|
||||
detailsJson: {
|
||||
stageId,
|
||||
projectCount: projectIds.length,
|
||||
projectIds,
|
||||
},
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return updatedProjects
|
||||
})
|
||||
|
||||
return {
|
||||
success: true,
|
||||
assignedCount: result.count,
|
||||
stageId,
|
||||
}
|
||||
}),
|
||||
})
|
||||
|
||||
@@ -13,6 +13,7 @@ import { logAudit } from '../utils/audit'
|
||||
import { sendInvitationEmail } from '@/lib/email'
|
||||
|
||||
const INVITE_TOKEN_EXPIRY_MS = 7 * 24 * 60 * 60 * 1000 // 7 days
|
||||
const STATUSES_WITH_TEAM_NOTIFICATIONS = ['SEMIFINALIST', 'FINALIST', 'REJECTED'] as const
|
||||
|
||||
// Valid project status transitions
|
||||
const VALID_PROJECT_TRANSITIONS: Record<string, string[]> = {
|
||||
@@ -245,6 +246,98 @@ export const projectRouter = router({
|
||||
return { ids: projects.map((p) => p.id) }
|
||||
}),
|
||||
|
||||
/**
|
||||
* Preview project-team recipients before bulk status update notifications.
|
||||
* Used by admin UI confirmation dialog to verify notification audience.
|
||||
*/
|
||||
previewStatusNotificationRecipients: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
ids: z.array(z.string()).min(1).max(10000),
|
||||
status: z.enum([
|
||||
'SUBMITTED',
|
||||
'ELIGIBLE',
|
||||
'ASSIGNED',
|
||||
'SEMIFINALIST',
|
||||
'FINALIST',
|
||||
'REJECTED',
|
||||
]),
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
const statusTriggersNotification = STATUSES_WITH_TEAM_NOTIFICATIONS.includes(
|
||||
input.status as (typeof STATUSES_WITH_TEAM_NOTIFICATIONS)[number]
|
||||
)
|
||||
|
||||
if (!statusTriggersNotification) {
|
||||
return {
|
||||
status: input.status,
|
||||
statusTriggersNotification,
|
||||
totalProjects: 0,
|
||||
projectsWithRecipients: 0,
|
||||
totalRecipients: 0,
|
||||
projects: [] as Array<{
|
||||
id: string
|
||||
title: string
|
||||
recipientCount: number
|
||||
recipientsPreview: string[]
|
||||
hasMoreRecipients: boolean
|
||||
}>,
|
||||
}
|
||||
}
|
||||
|
||||
const projects = await ctx.prisma.project.findMany({
|
||||
where: { id: { in: input.ids } },
|
||||
select: {
|
||||
id: true,
|
||||
title: true,
|
||||
teamMembers: {
|
||||
select: {
|
||||
userId: true,
|
||||
user: {
|
||||
select: {
|
||||
email: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
orderBy: { title: 'asc' },
|
||||
})
|
||||
|
||||
const MAX_PREVIEW_RECIPIENTS_PER_PROJECT = 8
|
||||
|
||||
const mappedProjects = projects.map((project) => {
|
||||
const uniqueEmails = Array.from(
|
||||
new Set(
|
||||
project.teamMembers
|
||||
.map((member) => member.user?.email?.toLowerCase().trim() ?? '')
|
||||
.filter((email) => email.length > 0)
|
||||
)
|
||||
)
|
||||
|
||||
return {
|
||||
id: project.id,
|
||||
title: project.title,
|
||||
recipientCount: uniqueEmails.length,
|
||||
recipientsPreview: uniqueEmails.slice(0, MAX_PREVIEW_RECIPIENTS_PER_PROJECT),
|
||||
hasMoreRecipients: uniqueEmails.length > MAX_PREVIEW_RECIPIENTS_PER_PROJECT,
|
||||
}
|
||||
})
|
||||
|
||||
const projectsWithRecipients = mappedProjects.filter((p) => p.recipientCount > 0).length
|
||||
const totalRecipients = mappedProjects.reduce((sum, project) => sum + project.recipientCount, 0)
|
||||
|
||||
return {
|
||||
status: input.status,
|
||||
statusTriggersNotification,
|
||||
totalProjects: mappedProjects.length,
|
||||
projectsWithRecipients,
|
||||
totalRecipients,
|
||||
projects: mappedProjects,
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get filter options for the project list (distinct values)
|
||||
*/
|
||||
|
||||
@@ -1,291 +1,383 @@
|
||||
import { z } from 'zod'
|
||||
import { TRPCError } from '@trpc/server'
|
||||
import { Prisma } from '@prisma/client'
|
||||
import { router, adminProcedure } from '../trpc'
|
||||
import { logAudit } from '@/server/utils/audit'
|
||||
import {
|
||||
previewRouting,
|
||||
evaluateRoutingRules,
|
||||
executeRouting,
|
||||
} from '@/server/services/routing-engine'
|
||||
|
||||
export const routingRouter = router({
|
||||
/**
|
||||
* Preview routing: show where projects would land without executing.
|
||||
* Delegates to routing-engine service for proper predicate evaluation.
|
||||
*/
|
||||
preview: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
pipelineId: z.string(),
|
||||
projectIds: z.array(z.string()).min(1).max(500),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const results = await previewRouting(
|
||||
input.projectIds,
|
||||
input.pipelineId,
|
||||
ctx.prisma
|
||||
)
|
||||
|
||||
return {
|
||||
pipelineId: input.pipelineId,
|
||||
totalProjects: results.length,
|
||||
results: results.map((r) => ({
|
||||
projectId: r.projectId,
|
||||
projectTitle: r.projectTitle,
|
||||
matchedRuleId: r.matchedRule?.ruleId ?? null,
|
||||
matchedRuleName: r.matchedRule?.ruleName ?? null,
|
||||
targetTrackId: r.matchedRule?.destinationTrackId ?? null,
|
||||
targetTrackName: null as string | null,
|
||||
targetStageId: r.matchedRule?.destinationStageId ?? null,
|
||||
targetStageName: null as string | null,
|
||||
routingMode: r.matchedRule?.routingMode ?? null,
|
||||
reason: r.reason,
|
||||
})),
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Execute routing: evaluate rules and move projects into tracks/stages.
|
||||
* Delegates to routing-engine service which enforces PARALLEL/EXCLUSIVE/POST_MAIN modes.
|
||||
*/
|
||||
execute: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
pipelineId: z.string(),
|
||||
projectIds: z.array(z.string()).min(1).max(500),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
// Verify pipeline is ACTIVE
|
||||
const pipeline = await ctx.prisma.pipeline.findUniqueOrThrow({
|
||||
where: { id: input.pipelineId },
|
||||
})
|
||||
|
||||
if (pipeline.status !== 'ACTIVE') {
|
||||
throw new TRPCError({
|
||||
code: 'PRECONDITION_FAILED',
|
||||
message: 'Pipeline must be ACTIVE to route projects',
|
||||
})
|
||||
}
|
||||
|
||||
// Load projects to get their current active stage states
|
||||
const projects = await ctx.prisma.project.findMany({
|
||||
where: { id: { in: input.projectIds } },
|
||||
select: {
|
||||
id: true,
|
||||
title: true,
|
||||
projectStageStates: {
|
||||
where: { exitedAt: null },
|
||||
select: { stageId: true },
|
||||
take: 1,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
if (projects.length === 0) {
|
||||
throw new TRPCError({
|
||||
code: 'NOT_FOUND',
|
||||
message: 'No matching projects found',
|
||||
})
|
||||
}
|
||||
|
||||
let routedCount = 0
|
||||
let skippedCount = 0
|
||||
const errors: Array<{ projectId: string; error: string }> = []
|
||||
|
||||
for (const project of projects) {
|
||||
const activePSS = project.projectStageStates[0]
|
||||
if (!activePSS) {
|
||||
skippedCount++
|
||||
continue
|
||||
}
|
||||
|
||||
// Evaluate routing rules using the service
|
||||
const matchedRule = await evaluateRoutingRules(
|
||||
project.id,
|
||||
activePSS.stageId,
|
||||
input.pipelineId,
|
||||
ctx.prisma
|
||||
)
|
||||
|
||||
if (!matchedRule) {
|
||||
skippedCount++
|
||||
continue
|
||||
}
|
||||
|
||||
// Execute routing using the service (handles PARALLEL/EXCLUSIVE/POST_MAIN)
|
||||
const result = await executeRouting(
|
||||
project.id,
|
||||
matchedRule,
|
||||
ctx.user.id,
|
||||
ctx.prisma
|
||||
)
|
||||
|
||||
if (result.success) {
|
||||
routedCount++
|
||||
} else {
|
||||
skippedCount++
|
||||
if (result.errors?.length) {
|
||||
errors.push({ projectId: project.id, error: result.errors[0] })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Record batch-level audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'ROUTING_EXECUTED',
|
||||
entityType: 'Pipeline',
|
||||
entityId: input.pipelineId,
|
||||
detailsJson: {
|
||||
projectCount: projects.length,
|
||||
routedCount,
|
||||
skippedCount,
|
||||
errors: errors.length > 0 ? errors : undefined,
|
||||
},
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return { routedCount, skippedCount, totalProjects: projects.length }
|
||||
}),
|
||||
|
||||
/**
|
||||
* List routing rules for a pipeline
|
||||
*/
|
||||
listRules: adminProcedure
|
||||
.input(z.object({ pipelineId: z.string() }))
|
||||
.query(async ({ ctx, input }) => {
|
||||
return ctx.prisma.routingRule.findMany({
|
||||
where: { pipelineId: input.pipelineId },
|
||||
orderBy: [{ isActive: 'desc' }, { priority: 'desc' }],
|
||||
include: {
|
||||
sourceTrack: { select: { id: true, name: true } },
|
||||
destinationTrack: { select: { id: true, name: true } },
|
||||
},
|
||||
})
|
||||
}),
|
||||
|
||||
/**
|
||||
* Create or update a routing rule
|
||||
*/
|
||||
import { z } from 'zod'
|
||||
import { TRPCError } from '@trpc/server'
|
||||
import { Prisma } from '@prisma/client'
|
||||
import { router, adminProcedure } from '../trpc'
|
||||
import { logAudit } from '@/server/utils/audit'
|
||||
import {
|
||||
previewRouting,
|
||||
evaluateRoutingRules,
|
||||
executeRouting,
|
||||
} from '@/server/services/routing-engine'
|
||||
|
||||
export const routingRouter = router({
|
||||
/**
|
||||
* Preview routing: show where projects would land without executing.
|
||||
* Delegates to routing-engine service for proper predicate evaluation.
|
||||
*/
|
||||
preview: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
pipelineId: z.string(),
|
||||
projectIds: z.array(z.string()).min(1).max(500),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const results = await previewRouting(
|
||||
input.projectIds,
|
||||
input.pipelineId,
|
||||
ctx.prisma
|
||||
)
|
||||
|
||||
return {
|
||||
pipelineId: input.pipelineId,
|
||||
totalProjects: results.length,
|
||||
results: results.map((r) => ({
|
||||
projectId: r.projectId,
|
||||
projectTitle: r.projectTitle,
|
||||
matchedRuleId: r.matchedRule?.ruleId ?? null,
|
||||
matchedRuleName: r.matchedRule?.ruleName ?? null,
|
||||
targetTrackId: r.matchedRule?.destinationTrackId ?? null,
|
||||
targetTrackName: null as string | null,
|
||||
targetStageId: r.matchedRule?.destinationStageId ?? null,
|
||||
targetStageName: null as string | null,
|
||||
routingMode: r.matchedRule?.routingMode ?? null,
|
||||
reason: r.reason,
|
||||
})),
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Execute routing: evaluate rules and move projects into tracks/stages.
|
||||
* Delegates to routing-engine service which enforces PARALLEL/EXCLUSIVE/POST_MAIN modes.
|
||||
*/
|
||||
execute: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
pipelineId: z.string(),
|
||||
projectIds: z.array(z.string()).min(1).max(500),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
// Verify pipeline is ACTIVE
|
||||
const pipeline = await ctx.prisma.pipeline.findUniqueOrThrow({
|
||||
where: { id: input.pipelineId },
|
||||
})
|
||||
|
||||
if (pipeline.status !== 'ACTIVE') {
|
||||
throw new TRPCError({
|
||||
code: 'PRECONDITION_FAILED',
|
||||
message: 'Pipeline must be ACTIVE to route projects',
|
||||
})
|
||||
}
|
||||
|
||||
// Load projects to get their current active stage states
|
||||
const projects = await ctx.prisma.project.findMany({
|
||||
where: { id: { in: input.projectIds } },
|
||||
select: {
|
||||
id: true,
|
||||
title: true,
|
||||
projectStageStates: {
|
||||
where: { exitedAt: null },
|
||||
select: { stageId: true },
|
||||
take: 1,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
if (projects.length === 0) {
|
||||
throw new TRPCError({
|
||||
code: 'NOT_FOUND',
|
||||
message: 'No matching projects found',
|
||||
})
|
||||
}
|
||||
|
||||
let routedCount = 0
|
||||
let skippedCount = 0
|
||||
const errors: Array<{ projectId: string; error: string }> = []
|
||||
|
||||
for (const project of projects) {
|
||||
const activePSS = project.projectStageStates[0]
|
||||
if (!activePSS) {
|
||||
skippedCount++
|
||||
continue
|
||||
}
|
||||
|
||||
// Evaluate routing rules using the service
|
||||
const matchedRule = await evaluateRoutingRules(
|
||||
project.id,
|
||||
activePSS.stageId,
|
||||
input.pipelineId,
|
||||
ctx.prisma
|
||||
)
|
||||
|
||||
if (!matchedRule) {
|
||||
skippedCount++
|
||||
continue
|
||||
}
|
||||
|
||||
// Execute routing using the service (handles PARALLEL/EXCLUSIVE/POST_MAIN)
|
||||
const result = await executeRouting(
|
||||
project.id,
|
||||
matchedRule,
|
||||
ctx.user.id,
|
||||
ctx.prisma
|
||||
)
|
||||
|
||||
if (result.success) {
|
||||
routedCount++
|
||||
} else {
|
||||
skippedCount++
|
||||
if (result.errors?.length) {
|
||||
errors.push({ projectId: project.id, error: result.errors[0] })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Record batch-level audit log
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'ROUTING_EXECUTED',
|
||||
entityType: 'Pipeline',
|
||||
entityId: input.pipelineId,
|
||||
detailsJson: {
|
||||
projectCount: projects.length,
|
||||
routedCount,
|
||||
skippedCount,
|
||||
errors: errors.length > 0 ? errors : undefined,
|
||||
},
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return { routedCount, skippedCount, totalProjects: projects.length }
|
||||
}),
|
||||
|
||||
/**
|
||||
* List routing rules for a pipeline
|
||||
*/
|
||||
listRules: adminProcedure
|
||||
.input(z.object({ pipelineId: z.string() }))
|
||||
.query(async ({ ctx, input }) => {
|
||||
return ctx.prisma.routingRule.findMany({
|
||||
where: { pipelineId: input.pipelineId },
|
||||
orderBy: [{ isActive: 'desc' }, { priority: 'desc' }],
|
||||
include: {
|
||||
sourceTrack: { select: { id: true, name: true } },
|
||||
destinationTrack: { select: { id: true, name: true } },
|
||||
},
|
||||
})
|
||||
}),
|
||||
|
||||
/**
|
||||
* Create or update a routing rule
|
||||
*/
|
||||
upsertRule: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
id: z.string().optional(), // If provided, update existing
|
||||
pipelineId: z.string(),
|
||||
name: z.string().min(1).max(255),
|
||||
scope: z.enum(['global', 'track', 'stage']).default('global'),
|
||||
sourceTrackId: z.string().optional().nullable(),
|
||||
destinationTrackId: z.string(),
|
||||
destinationStageId: z.string().optional().nullable(),
|
||||
predicateJson: z.record(z.unknown()),
|
||||
priority: z.number().int().min(0).max(1000).default(0),
|
||||
isActive: z.boolean().default(true),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const { id, predicateJson, ...data } = input
|
||||
|
||||
// Verify destination track exists in this pipeline
|
||||
const destTrack = await ctx.prisma.track.findFirst({
|
||||
where: { id: input.destinationTrackId, pipelineId: input.pipelineId },
|
||||
})
|
||||
if (!destTrack) {
|
||||
throw new TRPCError({
|
||||
code: 'BAD_REQUEST',
|
||||
message: 'Destination track must belong to the same pipeline',
|
||||
})
|
||||
}
|
||||
|
||||
if (id) {
|
||||
// Update existing rule
|
||||
const rule = await ctx.prisma.$transaction(async (tx) => {
|
||||
const updated = await tx.routingRule.update({
|
||||
where: { id },
|
||||
data: {
|
||||
...data,
|
||||
predicateJson: predicateJson as Prisma.InputJsonValue,
|
||||
},
|
||||
})
|
||||
|
||||
await logAudit({
|
||||
prisma: tx,
|
||||
userId: ctx.user.id,
|
||||
action: 'UPDATE',
|
||||
entityType: 'RoutingRule',
|
||||
entityId: id,
|
||||
detailsJson: { name: input.name, priority: input.priority },
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return updated
|
||||
})
|
||||
|
||||
return rule
|
||||
} else {
|
||||
// Create new rule
|
||||
const rule = await ctx.prisma.$transaction(async (tx) => {
|
||||
const created = await tx.routingRule.create({
|
||||
data: {
|
||||
...data,
|
||||
predicateJson: predicateJson as Prisma.InputJsonValue,
|
||||
},
|
||||
})
|
||||
|
||||
await logAudit({
|
||||
prisma: tx,
|
||||
userId: ctx.user.id,
|
||||
action: 'CREATE',
|
||||
entityType: 'RoutingRule',
|
||||
entityId: created.id,
|
||||
detailsJson: { name: input.name, priority: input.priority },
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return created
|
||||
})
|
||||
|
||||
.input(
|
||||
z.object({
|
||||
id: z.string().optional(), // If provided, update existing
|
||||
pipelineId: z.string(),
|
||||
name: z.string().min(1).max(255),
|
||||
scope: z.enum(['global', 'track', 'stage']).default('global'),
|
||||
sourceTrackId: z.string().optional().nullable(),
|
||||
destinationTrackId: z.string(),
|
||||
destinationStageId: z.string().optional().nullable(),
|
||||
predicateJson: z.record(z.unknown()),
|
||||
priority: z.number().int().min(0).max(1000).default(0),
|
||||
isActive: z.boolean().default(true),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const { id, predicateJson, ...data } = input
|
||||
|
||||
// Verify destination track exists in this pipeline
|
||||
const destTrack = await ctx.prisma.track.findFirst({
|
||||
where: { id: input.destinationTrackId, pipelineId: input.pipelineId },
|
||||
})
|
||||
if (!destTrack) {
|
||||
throw new TRPCError({
|
||||
code: 'BAD_REQUEST',
|
||||
message: 'Destination track must belong to the same pipeline',
|
||||
})
|
||||
}
|
||||
|
||||
if (id) {
|
||||
// Update existing rule
|
||||
const rule = await ctx.prisma.$transaction(async (tx) => {
|
||||
const updated = await tx.routingRule.update({
|
||||
where: { id },
|
||||
data: {
|
||||
...data,
|
||||
predicateJson: predicateJson as Prisma.InputJsonValue,
|
||||
},
|
||||
})
|
||||
|
||||
await logAudit({
|
||||
prisma: tx,
|
||||
userId: ctx.user.id,
|
||||
action: 'UPDATE',
|
||||
entityType: 'RoutingRule',
|
||||
entityId: id,
|
||||
detailsJson: { name: input.name, priority: input.priority },
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return updated
|
||||
})
|
||||
|
||||
return rule
|
||||
} else {
|
||||
// Create new rule
|
||||
const rule = await ctx.prisma.$transaction(async (tx) => {
|
||||
const created = await tx.routingRule.create({
|
||||
data: {
|
||||
...data,
|
||||
predicateJson: predicateJson as Prisma.InputJsonValue,
|
||||
},
|
||||
})
|
||||
|
||||
await logAudit({
|
||||
prisma: tx,
|
||||
userId: ctx.user.id,
|
||||
action: 'CREATE',
|
||||
entityType: 'RoutingRule',
|
||||
entityId: created.id,
|
||||
detailsJson: { name: input.name, priority: input.priority },
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return created
|
||||
})
|
||||
|
||||
return rule
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Toggle a routing rule on/off
|
||||
* Delete a routing rule
|
||||
*/
|
||||
toggleRule: adminProcedure
|
||||
deleteRule: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
isActive: z.boolean(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const rule = await ctx.prisma.$transaction(async (tx) => {
|
||||
const updated = await tx.routingRule.update({
|
||||
const existing = await ctx.prisma.routingRule.findUniqueOrThrow({
|
||||
where: { id: input.id },
|
||||
select: { id: true, name: true, pipelineId: true },
|
||||
})
|
||||
|
||||
await ctx.prisma.$transaction(async (tx) => {
|
||||
await tx.routingRule.delete({
|
||||
where: { id: input.id },
|
||||
data: { isActive: input.isActive },
|
||||
})
|
||||
|
||||
await logAudit({
|
||||
prisma: tx,
|
||||
userId: ctx.user.id,
|
||||
action: input.isActive ? 'ROUTING_RULE_ENABLED' : 'ROUTING_RULE_DISABLED',
|
||||
action: 'DELETE',
|
||||
entityType: 'RoutingRule',
|
||||
entityId: input.id,
|
||||
detailsJson: { isActive: input.isActive, name: updated.name },
|
||||
detailsJson: { name: existing.name, pipelineId: existing.pipelineId },
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return updated
|
||||
})
|
||||
|
||||
return rule
|
||||
return { success: true }
|
||||
}),
|
||||
})
|
||||
|
||||
/**
|
||||
* Reorder routing rules by priority (highest first)
|
||||
*/
|
||||
reorderRules: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
pipelineId: z.string(),
|
||||
orderedIds: z.array(z.string()).min(1),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const rules = await ctx.prisma.routingRule.findMany({
|
||||
where: { pipelineId: input.pipelineId },
|
||||
select: { id: true },
|
||||
})
|
||||
const ruleIds = new Set(rules.map((rule) => rule.id))
|
||||
|
||||
for (const id of input.orderedIds) {
|
||||
if (!ruleIds.has(id)) {
|
||||
throw new TRPCError({
|
||||
code: 'BAD_REQUEST',
|
||||
message: `Routing rule ${id} does not belong to this pipeline`,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
await ctx.prisma.$transaction(async (tx) => {
|
||||
const maxPriority = input.orderedIds.length
|
||||
await Promise.all(
|
||||
input.orderedIds.map((id, index) =>
|
||||
tx.routingRule.update({
|
||||
where: { id },
|
||||
data: {
|
||||
priority: maxPriority - index,
|
||||
},
|
||||
})
|
||||
)
|
||||
)
|
||||
|
||||
await logAudit({
|
||||
prisma: tx,
|
||||
userId: ctx.user.id,
|
||||
action: 'UPDATE',
|
||||
entityType: 'Pipeline',
|
||||
entityId: input.pipelineId,
|
||||
detailsJson: {
|
||||
action: 'ROUTING_RULES_REORDERED',
|
||||
ruleCount: input.orderedIds.length,
|
||||
},
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
})
|
||||
|
||||
return { success: true }
|
||||
}),
|
||||
|
||||
/**
|
||||
* Toggle a routing rule on/off
|
||||
*/
|
||||
toggleRule: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
isActive: z.boolean(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const rule = await ctx.prisma.$transaction(async (tx) => {
|
||||
const updated = await tx.routingRule.update({
|
||||
where: { id: input.id },
|
||||
data: { isActive: input.isActive },
|
||||
})
|
||||
|
||||
await logAudit({
|
||||
prisma: tx,
|
||||
userId: ctx.user.id,
|
||||
action: input.isActive ? 'ROUTING_RULE_ENABLED' : 'ROUTING_RULE_DISABLED',
|
||||
entityType: 'RoutingRule',
|
||||
entityId: input.id,
|
||||
detailsJson: { isActive: input.isActive, name: updated.name },
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return updated
|
||||
})
|
||||
|
||||
return rule
|
||||
}),
|
||||
})
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,257 +1,257 @@
|
||||
import { z } from 'zod'
|
||||
import { TRPCError } from '@trpc/server'
|
||||
import { Prisma } from '@prisma/client'
|
||||
import { router, adminProcedure } from '../trpc'
|
||||
import {
|
||||
testTypeformConnection,
|
||||
getTypeformSchema,
|
||||
getAllTypeformResponses,
|
||||
responseToObject,
|
||||
} from '@/lib/typeform'
|
||||
import { normalizeCountryToCode } from '@/lib/countries'
|
||||
|
||||
export const typeformImportRouter = router({
|
||||
/**
|
||||
* Test connection to Typeform API
|
||||
*/
|
||||
testConnection: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
apiKey: z.string().min(1),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ input }) => {
|
||||
return testTypeformConnection(input.apiKey)
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get form schema (questions/fields) for mapping
|
||||
*/
|
||||
getFormSchema: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
apiKey: z.string().min(1),
|
||||
formId: z.string().min(1),
|
||||
})
|
||||
)
|
||||
.query(async ({ input }) => {
|
||||
try {
|
||||
return await getTypeformSchema(input.apiKey, input.formId)
|
||||
} catch (error) {
|
||||
throw new TRPCError({
|
||||
code: 'BAD_REQUEST',
|
||||
message:
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: 'Failed to fetch form schema',
|
||||
})
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Preview responses from Typeform
|
||||
*/
|
||||
previewResponses: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
apiKey: z.string().min(1),
|
||||
formId: z.string().min(1),
|
||||
limit: z.number().int().min(1).max(10).default(5),
|
||||
})
|
||||
)
|
||||
.query(async ({ input }) => {
|
||||
try {
|
||||
const schema = await getTypeformSchema(input.apiKey, input.formId)
|
||||
const responses = await getAllTypeformResponses(
|
||||
input.apiKey,
|
||||
input.formId,
|
||||
input.limit
|
||||
)
|
||||
|
||||
// Convert responses to flat objects for preview
|
||||
const records = responses.map((r) => responseToObject(r, schema.fields))
|
||||
|
||||
return {
|
||||
records,
|
||||
count: records.length,
|
||||
formTitle: schema.title,
|
||||
}
|
||||
} catch (error) {
|
||||
throw new TRPCError({
|
||||
code: 'BAD_REQUEST',
|
||||
message:
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: 'Failed to fetch responses from Typeform',
|
||||
})
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Import projects from Typeform responses
|
||||
*/
|
||||
importProjects: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
apiKey: z.string().min(1),
|
||||
formId: z.string().min(1),
|
||||
programId: z.string(),
|
||||
mappings: z.object({
|
||||
title: z.string(),
|
||||
teamName: z.string().optional(),
|
||||
description: z.string().optional(),
|
||||
tags: z.string().optional(),
|
||||
email: z.string().optional(),
|
||||
country: z.string().optional(),
|
||||
}),
|
||||
includeUnmappedInMetadata: z.boolean().default(true),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
await ctx.prisma.program.findUniqueOrThrow({
|
||||
where: { id: input.programId },
|
||||
})
|
||||
|
||||
// Fetch form schema and all responses
|
||||
const schema = await getTypeformSchema(input.apiKey, input.formId)
|
||||
const responses = await getAllTypeformResponses(input.apiKey, input.formId)
|
||||
|
||||
if (responses.length === 0) {
|
||||
return { imported: 0, skipped: 0, errors: [] }
|
||||
}
|
||||
|
||||
const results = {
|
||||
imported: 0,
|
||||
skipped: 0,
|
||||
errors: [] as Array<{ responseId: string; error: string }>,
|
||||
}
|
||||
|
||||
// Process each response
|
||||
for (const response of responses) {
|
||||
try {
|
||||
const record = responseToObject(response, schema.fields)
|
||||
|
||||
// Get mapped values
|
||||
const title = record[input.mappings.title]
|
||||
|
||||
if (!title || typeof title !== 'string' || !title.trim()) {
|
||||
results.errors.push({
|
||||
responseId: response.response_id,
|
||||
error: 'Missing or invalid title',
|
||||
})
|
||||
results.skipped++
|
||||
continue
|
||||
}
|
||||
|
||||
const teamName = input.mappings.teamName
|
||||
? record[input.mappings.teamName]
|
||||
: null
|
||||
|
||||
const description = input.mappings.description
|
||||
? record[input.mappings.description]
|
||||
: null
|
||||
|
||||
let tags: string[] = []
|
||||
if (input.mappings.tags) {
|
||||
const tagsValue = record[input.mappings.tags]
|
||||
if (Array.isArray(tagsValue)) {
|
||||
tags = tagsValue.filter((t): t is string => typeof t === 'string')
|
||||
} else if (typeof tagsValue === 'string') {
|
||||
tags = tagsValue.split(',').map((t) => t.trim()).filter(Boolean)
|
||||
}
|
||||
}
|
||||
|
||||
// Get country and normalize to ISO code
|
||||
let country: string | null = null
|
||||
if (input.mappings.country) {
|
||||
const countryValue = record[input.mappings.country]
|
||||
if (typeof countryValue === 'string') {
|
||||
country = normalizeCountryToCode(countryValue)
|
||||
}
|
||||
}
|
||||
|
||||
// Build metadata from unmapped columns
|
||||
let metadataJson: Record<string, unknown> | null = null
|
||||
if (input.includeUnmappedInMetadata) {
|
||||
const mappedKeys = new Set([
|
||||
input.mappings.title,
|
||||
input.mappings.teamName,
|
||||
input.mappings.description,
|
||||
input.mappings.tags,
|
||||
input.mappings.email,
|
||||
input.mappings.country,
|
||||
'_response_id',
|
||||
'_submitted_at',
|
||||
].filter(Boolean))
|
||||
|
||||
metadataJson = {}
|
||||
for (const [key, value] of Object.entries(record)) {
|
||||
if (!mappedKeys.has(key) && value !== null && value !== undefined) {
|
||||
metadataJson[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
// Add submission email if mapped
|
||||
if (input.mappings.email) {
|
||||
const email = record[input.mappings.email]
|
||||
if (email) {
|
||||
metadataJson._submissionEmail = email
|
||||
}
|
||||
}
|
||||
|
||||
// Add submission timestamp
|
||||
metadataJson._submittedAt = response.submitted_at
|
||||
|
||||
if (Object.keys(metadataJson).length === 0) {
|
||||
metadataJson = null
|
||||
}
|
||||
}
|
||||
|
||||
// Create project
|
||||
await ctx.prisma.project.create({
|
||||
data: {
|
||||
programId: input.programId,
|
||||
status: 'SUBMITTED',
|
||||
title: String(title).trim(),
|
||||
teamName: typeof teamName === 'string' ? teamName.trim() : null,
|
||||
description: typeof description === 'string' ? description : null,
|
||||
tags,
|
||||
country,
|
||||
metadataJson: metadataJson as Prisma.InputJsonValue ?? undefined,
|
||||
externalIdsJson: {
|
||||
typeformResponseId: response.response_id,
|
||||
typeformFormId: input.formId,
|
||||
} as Prisma.InputJsonValue,
|
||||
},
|
||||
})
|
||||
|
||||
results.imported++
|
||||
} catch (error) {
|
||||
results.errors.push({
|
||||
responseId: response.response_id,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
})
|
||||
results.skipped++
|
||||
}
|
||||
}
|
||||
|
||||
// Audit log
|
||||
await ctx.prisma.auditLog.create({
|
||||
data: {
|
||||
userId: ctx.user.id,
|
||||
action: 'IMPORT',
|
||||
entityType: 'Project',
|
||||
detailsJson: {
|
||||
source: 'typeform',
|
||||
formId: input.formId,
|
||||
imported: results.imported,
|
||||
skipped: results.skipped,
|
||||
},
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
},
|
||||
})
|
||||
|
||||
return results
|
||||
}),
|
||||
})
|
||||
import { z } from 'zod'
|
||||
import { TRPCError } from '@trpc/server'
|
||||
import { Prisma } from '@prisma/client'
|
||||
import { router, adminProcedure } from '../trpc'
|
||||
import {
|
||||
testTypeformConnection,
|
||||
getTypeformSchema,
|
||||
getAllTypeformResponses,
|
||||
responseToObject,
|
||||
} from '@/lib/typeform'
|
||||
import { normalizeCountryToCode } from '@/lib/countries'
|
||||
|
||||
export const typeformImportRouter = router({
|
||||
/**
|
||||
* Test connection to Typeform API
|
||||
*/
|
||||
testConnection: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
apiKey: z.string().min(1),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ input }) => {
|
||||
return testTypeformConnection(input.apiKey)
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get form schema (questions/fields) for mapping
|
||||
*/
|
||||
getFormSchema: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
apiKey: z.string().min(1),
|
||||
formId: z.string().min(1),
|
||||
})
|
||||
)
|
||||
.query(async ({ input }) => {
|
||||
try {
|
||||
return await getTypeformSchema(input.apiKey, input.formId)
|
||||
} catch (error) {
|
||||
throw new TRPCError({
|
||||
code: 'BAD_REQUEST',
|
||||
message:
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: 'Failed to fetch form schema',
|
||||
})
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Preview responses from Typeform
|
||||
*/
|
||||
previewResponses: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
apiKey: z.string().min(1),
|
||||
formId: z.string().min(1),
|
||||
limit: z.number().int().min(1).max(10).default(5),
|
||||
})
|
||||
)
|
||||
.query(async ({ input }) => {
|
||||
try {
|
||||
const schema = await getTypeformSchema(input.apiKey, input.formId)
|
||||
const responses = await getAllTypeformResponses(
|
||||
input.apiKey,
|
||||
input.formId,
|
||||
input.limit
|
||||
)
|
||||
|
||||
// Convert responses to flat objects for preview
|
||||
const records = responses.map((r) => responseToObject(r, schema.fields))
|
||||
|
||||
return {
|
||||
records,
|
||||
count: records.length,
|
||||
formTitle: schema.title,
|
||||
}
|
||||
} catch (error) {
|
||||
throw new TRPCError({
|
||||
code: 'BAD_REQUEST',
|
||||
message:
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: 'Failed to fetch responses from Typeform',
|
||||
})
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Import projects from Typeform responses
|
||||
*/
|
||||
importProjects: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
apiKey: z.string().min(1),
|
||||
formId: z.string().min(1),
|
||||
programId: z.string(),
|
||||
mappings: z.object({
|
||||
title: z.string(),
|
||||
teamName: z.string().optional(),
|
||||
description: z.string().optional(),
|
||||
tags: z.string().optional(),
|
||||
email: z.string().optional(),
|
||||
country: z.string().optional(),
|
||||
}),
|
||||
includeUnmappedInMetadata: z.boolean().default(true),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
await ctx.prisma.program.findUniqueOrThrow({
|
||||
where: { id: input.programId },
|
||||
})
|
||||
|
||||
// Fetch form schema and all responses
|
||||
const schema = await getTypeformSchema(input.apiKey, input.formId)
|
||||
const responses = await getAllTypeformResponses(input.apiKey, input.formId)
|
||||
|
||||
if (responses.length === 0) {
|
||||
return { imported: 0, skipped: 0, errors: [] }
|
||||
}
|
||||
|
||||
const results = {
|
||||
imported: 0,
|
||||
skipped: 0,
|
||||
errors: [] as Array<{ responseId: string; error: string }>,
|
||||
}
|
||||
|
||||
// Process each response
|
||||
for (const response of responses) {
|
||||
try {
|
||||
const record = responseToObject(response, schema.fields)
|
||||
|
||||
// Get mapped values
|
||||
const title = record[input.mappings.title]
|
||||
|
||||
if (!title || typeof title !== 'string' || !title.trim()) {
|
||||
results.errors.push({
|
||||
responseId: response.response_id,
|
||||
error: 'Missing or invalid title',
|
||||
})
|
||||
results.skipped++
|
||||
continue
|
||||
}
|
||||
|
||||
const teamName = input.mappings.teamName
|
||||
? record[input.mappings.teamName]
|
||||
: null
|
||||
|
||||
const description = input.mappings.description
|
||||
? record[input.mappings.description]
|
||||
: null
|
||||
|
||||
let tags: string[] = []
|
||||
if (input.mappings.tags) {
|
||||
const tagsValue = record[input.mappings.tags]
|
||||
if (Array.isArray(tagsValue)) {
|
||||
tags = tagsValue.filter((t): t is string => typeof t === 'string')
|
||||
} else if (typeof tagsValue === 'string') {
|
||||
tags = tagsValue.split(',').map((t) => t.trim()).filter(Boolean)
|
||||
}
|
||||
}
|
||||
|
||||
// Get country and normalize to ISO code
|
||||
let country: string | null = null
|
||||
if (input.mappings.country) {
|
||||
const countryValue = record[input.mappings.country]
|
||||
if (typeof countryValue === 'string') {
|
||||
country = normalizeCountryToCode(countryValue)
|
||||
}
|
||||
}
|
||||
|
||||
// Build metadata from unmapped columns
|
||||
let metadataJson: Record<string, unknown> | null = null
|
||||
if (input.includeUnmappedInMetadata) {
|
||||
const mappedKeys = new Set([
|
||||
input.mappings.title,
|
||||
input.mappings.teamName,
|
||||
input.mappings.description,
|
||||
input.mappings.tags,
|
||||
input.mappings.email,
|
||||
input.mappings.country,
|
||||
'_response_id',
|
||||
'_submitted_at',
|
||||
].filter(Boolean))
|
||||
|
||||
metadataJson = {}
|
||||
for (const [key, value] of Object.entries(record)) {
|
||||
if (!mappedKeys.has(key) && value !== null && value !== undefined) {
|
||||
metadataJson[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
// Add submission email if mapped
|
||||
if (input.mappings.email) {
|
||||
const email = record[input.mappings.email]
|
||||
if (email) {
|
||||
metadataJson._submissionEmail = email
|
||||
}
|
||||
}
|
||||
|
||||
// Add submission timestamp
|
||||
metadataJson._submittedAt = response.submitted_at
|
||||
|
||||
if (Object.keys(metadataJson).length === 0) {
|
||||
metadataJson = null
|
||||
}
|
||||
}
|
||||
|
||||
// Create project
|
||||
await ctx.prisma.project.create({
|
||||
data: {
|
||||
programId: input.programId,
|
||||
status: 'SUBMITTED',
|
||||
title: String(title).trim(),
|
||||
teamName: typeof teamName === 'string' ? teamName.trim() : null,
|
||||
description: typeof description === 'string' ? description : null,
|
||||
tags,
|
||||
country,
|
||||
metadataJson: metadataJson as Prisma.InputJsonValue ?? undefined,
|
||||
externalIdsJson: {
|
||||
typeformResponseId: response.response_id,
|
||||
typeformFormId: input.formId,
|
||||
} as Prisma.InputJsonValue,
|
||||
},
|
||||
})
|
||||
|
||||
results.imported++
|
||||
} catch (error) {
|
||||
results.errors.push({
|
||||
responseId: response.response_id,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
})
|
||||
results.skipped++
|
||||
}
|
||||
}
|
||||
|
||||
// Audit log
|
||||
await ctx.prisma.auditLog.create({
|
||||
data: {
|
||||
userId: ctx.user.id,
|
||||
action: 'IMPORT',
|
||||
entityType: 'Project',
|
||||
detailsJson: {
|
||||
source: 'typeform',
|
||||
formId: input.formId,
|
||||
imported: results.imported,
|
||||
skipped: results.skipped,
|
||||
},
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
},
|
||||
})
|
||||
|
||||
return results
|
||||
}),
|
||||
})
|
||||
|
||||
@@ -87,6 +87,7 @@ export const userRouter = router({
|
||||
updateProfile: protectedProcedure
|
||||
.input(
|
||||
z.object({
|
||||
email: z.string().email().optional(),
|
||||
name: z.string().min(1).max(255).optional(),
|
||||
bio: z.string().max(1000).optional(),
|
||||
phoneNumber: z.string().max(20).optional().nullable(),
|
||||
@@ -98,7 +99,34 @@ export const userRouter = router({
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const { bio, expertiseTags, availabilityJson, preferredWorkload, digestFrequency, ...directFields } = input
|
||||
const {
|
||||
bio,
|
||||
expertiseTags,
|
||||
availabilityJson,
|
||||
preferredWorkload,
|
||||
digestFrequency,
|
||||
email,
|
||||
...directFields
|
||||
} = input
|
||||
|
||||
const normalizedEmail = email?.toLowerCase().trim()
|
||||
|
||||
if (normalizedEmail !== undefined) {
|
||||
const existing = await ctx.prisma.user.findFirst({
|
||||
where: {
|
||||
email: normalizedEmail,
|
||||
NOT: { id: ctx.user.id },
|
||||
},
|
||||
select: { id: true },
|
||||
})
|
||||
|
||||
if (existing) {
|
||||
throw new TRPCError({
|
||||
code: 'CONFLICT',
|
||||
message: 'Another account already uses this email address',
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// If bio is provided, merge it into metadataJson
|
||||
let metadataJson: Prisma.InputJsonValue | undefined
|
||||
@@ -115,6 +143,7 @@ export const userRouter = router({
|
||||
where: { id: ctx.user.id },
|
||||
data: {
|
||||
...directFields,
|
||||
...(normalizedEmail !== undefined && { email: normalizedEmail }),
|
||||
...(metadataJson !== undefined && { metadataJson }),
|
||||
...(expertiseTags !== undefined && { expertiseTags }),
|
||||
...(digestFrequency !== undefined && { digestFrequency }),
|
||||
@@ -258,6 +287,46 @@ export const userRouter = router({
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* List all invitable user IDs for current filters (not paginated)
|
||||
*/
|
||||
listInvitableIds: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
role: z.enum(['SUPER_ADMIN', 'PROGRAM_ADMIN', 'AWARD_MASTER', 'JURY_MEMBER', 'MENTOR', 'OBSERVER']).optional(),
|
||||
roles: z.array(z.enum(['SUPER_ADMIN', 'PROGRAM_ADMIN', 'AWARD_MASTER', 'JURY_MEMBER', 'MENTOR', 'OBSERVER'])).optional(),
|
||||
search: z.string().optional(),
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
const where: Record<string, unknown> = {
|
||||
status: { in: ['NONE', 'INVITED'] },
|
||||
}
|
||||
|
||||
if (input.roles && input.roles.length > 0) {
|
||||
where.role = { in: input.roles }
|
||||
} else if (input.role) {
|
||||
where.role = input.role
|
||||
}
|
||||
|
||||
if (input.search) {
|
||||
where.OR = [
|
||||
{ email: { contains: input.search, mode: 'insensitive' } },
|
||||
{ name: { contains: input.search, mode: 'insensitive' } },
|
||||
]
|
||||
}
|
||||
|
||||
const users = await ctx.prisma.user.findMany({
|
||||
where,
|
||||
select: { id: true },
|
||||
})
|
||||
|
||||
return {
|
||||
userIds: users.map((u) => u.id),
|
||||
total: users.length,
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get a single user (admin only)
|
||||
*/
|
||||
@@ -347,6 +416,7 @@ export const userRouter = router({
|
||||
.input(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
email: z.string().email().optional(),
|
||||
name: z.string().optional().nullable(),
|
||||
role: z.enum(['SUPER_ADMIN', 'PROGRAM_ADMIN', 'AWARD_MASTER', 'JURY_MEMBER', 'MENTOR', 'OBSERVER']).optional(),
|
||||
status: z.enum(['NONE', 'INVITED', 'ACTIVE', 'SUSPENDED']).optional(),
|
||||
@@ -358,6 +428,7 @@ export const userRouter = router({
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const { id, ...data } = input
|
||||
const normalizedEmail = data.email?.toLowerCase().trim()
|
||||
|
||||
// Prevent changing super admin role
|
||||
const targetUser = await ctx.prisma.user.findUniqueOrThrow({
|
||||
@@ -393,10 +464,32 @@ export const userRouter = router({
|
||||
})
|
||||
}
|
||||
|
||||
if (normalizedEmail !== undefined) {
|
||||
const existing = await ctx.prisma.user.findFirst({
|
||||
where: {
|
||||
email: normalizedEmail,
|
||||
NOT: { id },
|
||||
},
|
||||
select: { id: true },
|
||||
})
|
||||
|
||||
if (existing) {
|
||||
throw new TRPCError({
|
||||
code: 'CONFLICT',
|
||||
message: 'Another user already uses this email address',
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const updateData = {
|
||||
...data,
|
||||
...(normalizedEmail !== undefined && { email: normalizedEmail }),
|
||||
}
|
||||
|
||||
const user = await ctx.prisma.$transaction(async (tx) => {
|
||||
const updated = await tx.user.update({
|
||||
where: { id },
|
||||
data,
|
||||
data: updateData,
|
||||
})
|
||||
|
||||
await logAudit({
|
||||
@@ -405,7 +498,7 @@ export const userRouter = router({
|
||||
action: 'UPDATE',
|
||||
entityType: 'User',
|
||||
entityId: id,
|
||||
detailsJson: data,
|
||||
detailsJson: updateData,
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
@@ -1,306 +1,306 @@
|
||||
import { z } from 'zod'
|
||||
import { router, superAdminProcedure } from '../trpc'
|
||||
import { logAudit } from '@/server/utils/audit'
|
||||
import {
|
||||
generateWebhookSecret,
|
||||
deliverWebhook,
|
||||
} from '@/server/services/webhook-dispatcher'
|
||||
|
||||
export const WEBHOOK_EVENTS = [
|
||||
'evaluation.submitted',
|
||||
'evaluation.updated',
|
||||
'project.created',
|
||||
'project.statusChanged',
|
||||
'round.activated',
|
||||
'round.closed',
|
||||
'stage.activated',
|
||||
'stage.closed',
|
||||
'assignment.created',
|
||||
'assignment.completed',
|
||||
'user.invited',
|
||||
'user.activated',
|
||||
] as const
|
||||
|
||||
export const webhookRouter = router({
|
||||
/**
|
||||
* List all webhooks with delivery stats.
|
||||
*/
|
||||
list: superAdminProcedure.query(async ({ ctx }) => {
|
||||
const webhooks = await ctx.prisma.webhook.findMany({
|
||||
include: {
|
||||
_count: {
|
||||
select: { deliveries: true },
|
||||
},
|
||||
createdBy: {
|
||||
select: { id: true, name: true, email: true },
|
||||
},
|
||||
},
|
||||
orderBy: { createdAt: 'desc' },
|
||||
})
|
||||
|
||||
// Compute recent delivery stats for each webhook
|
||||
const now = new Date()
|
||||
const twentyFourHoursAgo = new Date(now.getTime() - 24 * 60 * 60 * 1000)
|
||||
|
||||
const stats = await Promise.all(
|
||||
webhooks.map(async (wh) => {
|
||||
const [delivered, failed] = await Promise.all([
|
||||
ctx.prisma.webhookDelivery.count({
|
||||
where: {
|
||||
webhookId: wh.id,
|
||||
status: 'DELIVERED',
|
||||
createdAt: { gte: twentyFourHoursAgo },
|
||||
},
|
||||
}),
|
||||
ctx.prisma.webhookDelivery.count({
|
||||
where: {
|
||||
webhookId: wh.id,
|
||||
status: 'FAILED',
|
||||
createdAt: { gte: twentyFourHoursAgo },
|
||||
},
|
||||
}),
|
||||
])
|
||||
return {
|
||||
...wh,
|
||||
recentDelivered: delivered,
|
||||
recentFailed: failed,
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
return stats
|
||||
}),
|
||||
|
||||
/**
|
||||
* Create a new webhook.
|
||||
*/
|
||||
create: superAdminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
name: z.string().min(1).max(200),
|
||||
url: z.string().url(),
|
||||
events: z.array(z.string()).min(1),
|
||||
headers: z.any().optional(),
|
||||
maxRetries: z.number().int().min(0).max(10).default(3),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const secret = generateWebhookSecret()
|
||||
|
||||
const webhook = await ctx.prisma.webhook.create({
|
||||
data: {
|
||||
name: input.name,
|
||||
url: input.url,
|
||||
secret,
|
||||
events: input.events,
|
||||
headers: input.headers ?? undefined,
|
||||
maxRetries: input.maxRetries,
|
||||
createdById: ctx.user.id,
|
||||
},
|
||||
})
|
||||
|
||||
try {
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'CREATE_WEBHOOK',
|
||||
entityType: 'Webhook',
|
||||
entityId: webhook.id,
|
||||
detailsJson: { name: input.name, url: input.url, events: input.events },
|
||||
})
|
||||
} catch {}
|
||||
|
||||
return webhook
|
||||
}),
|
||||
|
||||
/**
|
||||
* Update a webhook.
|
||||
*/
|
||||
update: superAdminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
name: z.string().min(1).max(200).optional(),
|
||||
url: z.string().url().optional(),
|
||||
events: z.array(z.string()).min(1).optional(),
|
||||
headers: z.any().optional(),
|
||||
isActive: z.boolean().optional(),
|
||||
maxRetries: z.number().int().min(0).max(10).optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const { id, ...data } = input
|
||||
|
||||
const webhook = await ctx.prisma.webhook.update({
|
||||
where: { id },
|
||||
data: {
|
||||
...(data.name !== undefined ? { name: data.name } : {}),
|
||||
...(data.url !== undefined ? { url: data.url } : {}),
|
||||
...(data.events !== undefined ? { events: data.events } : {}),
|
||||
...(data.headers !== undefined ? { headers: data.headers } : {}),
|
||||
...(data.isActive !== undefined ? { isActive: data.isActive } : {}),
|
||||
...(data.maxRetries !== undefined ? { maxRetries: data.maxRetries } : {}),
|
||||
},
|
||||
})
|
||||
|
||||
try {
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'UPDATE_WEBHOOK',
|
||||
entityType: 'Webhook',
|
||||
entityId: id,
|
||||
detailsJson: { updatedFields: Object.keys(data) },
|
||||
})
|
||||
} catch {}
|
||||
|
||||
return webhook
|
||||
}),
|
||||
|
||||
/**
|
||||
* Delete a webhook and its delivery history.
|
||||
*/
|
||||
delete: superAdminProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
// Cascade delete is defined in schema, so just delete the webhook
|
||||
await ctx.prisma.webhook.delete({
|
||||
where: { id: input.id },
|
||||
})
|
||||
|
||||
try {
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'DELETE_WEBHOOK',
|
||||
entityType: 'Webhook',
|
||||
entityId: input.id,
|
||||
})
|
||||
} catch {}
|
||||
|
||||
return { success: true }
|
||||
}),
|
||||
|
||||
/**
|
||||
* Send a test payload to a webhook.
|
||||
*/
|
||||
test: superAdminProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const webhook = await ctx.prisma.webhook.findUnique({
|
||||
where: { id: input.id },
|
||||
})
|
||||
|
||||
if (!webhook) {
|
||||
throw new Error('Webhook not found')
|
||||
}
|
||||
|
||||
const testPayload = {
|
||||
event: 'test',
|
||||
timestamp: new Date().toISOString(),
|
||||
data: {
|
||||
message: 'This is a test webhook delivery from MOPC Platform.',
|
||||
webhookId: webhook.id,
|
||||
webhookName: webhook.name,
|
||||
},
|
||||
}
|
||||
|
||||
const delivery = await ctx.prisma.webhookDelivery.create({
|
||||
data: {
|
||||
webhookId: webhook.id,
|
||||
event: 'test',
|
||||
payload: testPayload,
|
||||
status: 'PENDING',
|
||||
attempts: 0,
|
||||
},
|
||||
})
|
||||
|
||||
await deliverWebhook(delivery.id)
|
||||
|
||||
// Fetch updated delivery to get the result
|
||||
const result = await ctx.prisma.webhookDelivery.findUnique({
|
||||
where: { id: delivery.id },
|
||||
})
|
||||
|
||||
try {
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'TEST_WEBHOOK',
|
||||
entityType: 'Webhook',
|
||||
entityId: input.id,
|
||||
detailsJson: { deliveryStatus: result?.status },
|
||||
})
|
||||
} catch {}
|
||||
|
||||
return result
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get paginated delivery log for a webhook.
|
||||
*/
|
||||
getDeliveryLog: superAdminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
webhookId: z.string(),
|
||||
page: z.number().int().min(1).default(1),
|
||||
pageSize: z.number().int().min(1).max(100).default(20),
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
const skip = (input.page - 1) * input.pageSize
|
||||
|
||||
const [items, total] = await Promise.all([
|
||||
ctx.prisma.webhookDelivery.findMany({
|
||||
where: { webhookId: input.webhookId },
|
||||
orderBy: { createdAt: 'desc' },
|
||||
skip,
|
||||
take: input.pageSize,
|
||||
}),
|
||||
ctx.prisma.webhookDelivery.count({
|
||||
where: { webhookId: input.webhookId },
|
||||
}),
|
||||
])
|
||||
|
||||
return {
|
||||
items,
|
||||
total,
|
||||
page: input.page,
|
||||
pageSize: input.pageSize,
|
||||
totalPages: Math.ceil(total / input.pageSize),
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Regenerate the HMAC secret for a webhook.
|
||||
*/
|
||||
regenerateSecret: superAdminProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const newSecret = generateWebhookSecret()
|
||||
|
||||
const webhook = await ctx.prisma.webhook.update({
|
||||
where: { id: input.id },
|
||||
data: { secret: newSecret },
|
||||
})
|
||||
|
||||
try {
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'REGENERATE_WEBHOOK_SECRET',
|
||||
entityType: 'Webhook',
|
||||
entityId: input.id,
|
||||
})
|
||||
} catch {}
|
||||
|
||||
return webhook
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get available webhook events.
|
||||
*/
|
||||
getAvailableEvents: superAdminProcedure.query(() => {
|
||||
return WEBHOOK_EVENTS
|
||||
}),
|
||||
})
|
||||
import { z } from 'zod'
|
||||
import { router, superAdminProcedure } from '../trpc'
|
||||
import { logAudit } from '@/server/utils/audit'
|
||||
import {
|
||||
generateWebhookSecret,
|
||||
deliverWebhook,
|
||||
} from '@/server/services/webhook-dispatcher'
|
||||
|
||||
export const WEBHOOK_EVENTS = [
|
||||
'evaluation.submitted',
|
||||
'evaluation.updated',
|
||||
'project.created',
|
||||
'project.statusChanged',
|
||||
'round.activated',
|
||||
'round.closed',
|
||||
'stage.activated',
|
||||
'stage.closed',
|
||||
'assignment.created',
|
||||
'assignment.completed',
|
||||
'user.invited',
|
||||
'user.activated',
|
||||
] as const
|
||||
|
||||
export const webhookRouter = router({
|
||||
/**
|
||||
* List all webhooks with delivery stats.
|
||||
*/
|
||||
list: superAdminProcedure.query(async ({ ctx }) => {
|
||||
const webhooks = await ctx.prisma.webhook.findMany({
|
||||
include: {
|
||||
_count: {
|
||||
select: { deliveries: true },
|
||||
},
|
||||
createdBy: {
|
||||
select: { id: true, name: true, email: true },
|
||||
},
|
||||
},
|
||||
orderBy: { createdAt: 'desc' },
|
||||
})
|
||||
|
||||
// Compute recent delivery stats for each webhook
|
||||
const now = new Date()
|
||||
const twentyFourHoursAgo = new Date(now.getTime() - 24 * 60 * 60 * 1000)
|
||||
|
||||
const stats = await Promise.all(
|
||||
webhooks.map(async (wh) => {
|
||||
const [delivered, failed] = await Promise.all([
|
||||
ctx.prisma.webhookDelivery.count({
|
||||
where: {
|
||||
webhookId: wh.id,
|
||||
status: 'DELIVERED',
|
||||
createdAt: { gte: twentyFourHoursAgo },
|
||||
},
|
||||
}),
|
||||
ctx.prisma.webhookDelivery.count({
|
||||
where: {
|
||||
webhookId: wh.id,
|
||||
status: 'FAILED',
|
||||
createdAt: { gte: twentyFourHoursAgo },
|
||||
},
|
||||
}),
|
||||
])
|
||||
return {
|
||||
...wh,
|
||||
recentDelivered: delivered,
|
||||
recentFailed: failed,
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
return stats
|
||||
}),
|
||||
|
||||
/**
|
||||
* Create a new webhook.
|
||||
*/
|
||||
create: superAdminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
name: z.string().min(1).max(200),
|
||||
url: z.string().url(),
|
||||
events: z.array(z.string()).min(1),
|
||||
headers: z.any().optional(),
|
||||
maxRetries: z.number().int().min(0).max(10).default(3),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const secret = generateWebhookSecret()
|
||||
|
||||
const webhook = await ctx.prisma.webhook.create({
|
||||
data: {
|
||||
name: input.name,
|
||||
url: input.url,
|
||||
secret,
|
||||
events: input.events,
|
||||
headers: input.headers ?? undefined,
|
||||
maxRetries: input.maxRetries,
|
||||
createdById: ctx.user.id,
|
||||
},
|
||||
})
|
||||
|
||||
try {
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'CREATE_WEBHOOK',
|
||||
entityType: 'Webhook',
|
||||
entityId: webhook.id,
|
||||
detailsJson: { name: input.name, url: input.url, events: input.events },
|
||||
})
|
||||
} catch {}
|
||||
|
||||
return webhook
|
||||
}),
|
||||
|
||||
/**
|
||||
* Update a webhook.
|
||||
*/
|
||||
update: superAdminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
name: z.string().min(1).max(200).optional(),
|
||||
url: z.string().url().optional(),
|
||||
events: z.array(z.string()).min(1).optional(),
|
||||
headers: z.any().optional(),
|
||||
isActive: z.boolean().optional(),
|
||||
maxRetries: z.number().int().min(0).max(10).optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const { id, ...data } = input
|
||||
|
||||
const webhook = await ctx.prisma.webhook.update({
|
||||
where: { id },
|
||||
data: {
|
||||
...(data.name !== undefined ? { name: data.name } : {}),
|
||||
...(data.url !== undefined ? { url: data.url } : {}),
|
||||
...(data.events !== undefined ? { events: data.events } : {}),
|
||||
...(data.headers !== undefined ? { headers: data.headers } : {}),
|
||||
...(data.isActive !== undefined ? { isActive: data.isActive } : {}),
|
||||
...(data.maxRetries !== undefined ? { maxRetries: data.maxRetries } : {}),
|
||||
},
|
||||
})
|
||||
|
||||
try {
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'UPDATE_WEBHOOK',
|
||||
entityType: 'Webhook',
|
||||
entityId: id,
|
||||
detailsJson: { updatedFields: Object.keys(data) },
|
||||
})
|
||||
} catch {}
|
||||
|
||||
return webhook
|
||||
}),
|
||||
|
||||
/**
|
||||
* Delete a webhook and its delivery history.
|
||||
*/
|
||||
delete: superAdminProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
// Cascade delete is defined in schema, so just delete the webhook
|
||||
await ctx.prisma.webhook.delete({
|
||||
where: { id: input.id },
|
||||
})
|
||||
|
||||
try {
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'DELETE_WEBHOOK',
|
||||
entityType: 'Webhook',
|
||||
entityId: input.id,
|
||||
})
|
||||
} catch {}
|
||||
|
||||
return { success: true }
|
||||
}),
|
||||
|
||||
/**
|
||||
* Send a test payload to a webhook.
|
||||
*/
|
||||
test: superAdminProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const webhook = await ctx.prisma.webhook.findUnique({
|
||||
where: { id: input.id },
|
||||
})
|
||||
|
||||
if (!webhook) {
|
||||
throw new Error('Webhook not found')
|
||||
}
|
||||
|
||||
const testPayload = {
|
||||
event: 'test',
|
||||
timestamp: new Date().toISOString(),
|
||||
data: {
|
||||
message: 'This is a test webhook delivery from MOPC Platform.',
|
||||
webhookId: webhook.id,
|
||||
webhookName: webhook.name,
|
||||
},
|
||||
}
|
||||
|
||||
const delivery = await ctx.prisma.webhookDelivery.create({
|
||||
data: {
|
||||
webhookId: webhook.id,
|
||||
event: 'test',
|
||||
payload: testPayload,
|
||||
status: 'PENDING',
|
||||
attempts: 0,
|
||||
},
|
||||
})
|
||||
|
||||
await deliverWebhook(delivery.id)
|
||||
|
||||
// Fetch updated delivery to get the result
|
||||
const result = await ctx.prisma.webhookDelivery.findUnique({
|
||||
where: { id: delivery.id },
|
||||
})
|
||||
|
||||
try {
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'TEST_WEBHOOK',
|
||||
entityType: 'Webhook',
|
||||
entityId: input.id,
|
||||
detailsJson: { deliveryStatus: result?.status },
|
||||
})
|
||||
} catch {}
|
||||
|
||||
return result
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get paginated delivery log for a webhook.
|
||||
*/
|
||||
getDeliveryLog: superAdminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
webhookId: z.string(),
|
||||
page: z.number().int().min(1).default(1),
|
||||
pageSize: z.number().int().min(1).max(100).default(20),
|
||||
})
|
||||
)
|
||||
.query(async ({ ctx, input }) => {
|
||||
const skip = (input.page - 1) * input.pageSize
|
||||
|
||||
const [items, total] = await Promise.all([
|
||||
ctx.prisma.webhookDelivery.findMany({
|
||||
where: { webhookId: input.webhookId },
|
||||
orderBy: { createdAt: 'desc' },
|
||||
skip,
|
||||
take: input.pageSize,
|
||||
}),
|
||||
ctx.prisma.webhookDelivery.count({
|
||||
where: { webhookId: input.webhookId },
|
||||
}),
|
||||
])
|
||||
|
||||
return {
|
||||
items,
|
||||
total,
|
||||
page: input.page,
|
||||
pageSize: input.pageSize,
|
||||
totalPages: Math.ceil(total / input.pageSize),
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Regenerate the HMAC secret for a webhook.
|
||||
*/
|
||||
regenerateSecret: superAdminProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const newSecret = generateWebhookSecret()
|
||||
|
||||
const webhook = await ctx.prisma.webhook.update({
|
||||
where: { id: input.id },
|
||||
data: { secret: newSecret },
|
||||
})
|
||||
|
||||
try {
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'REGENERATE_WEBHOOK_SECRET',
|
||||
entityType: 'Webhook',
|
||||
entityId: input.id,
|
||||
})
|
||||
} catch {}
|
||||
|
||||
return webhook
|
||||
}),
|
||||
|
||||
/**
|
||||
* Get available webhook events.
|
||||
*/
|
||||
getAvailableEvents: superAdminProcedure.query(() => {
|
||||
return WEBHOOK_EVENTS
|
||||
}),
|
||||
})
|
||||
|
||||
@@ -1,76 +1,76 @@
|
||||
import { z } from 'zod'
|
||||
import type { Prisma } from '@prisma/client'
|
||||
import { router, adminProcedure } from '../trpc'
|
||||
import { wizardConfigSchema } from '@/types/wizard-config'
|
||||
import { logAudit } from '../utils/audit'
|
||||
|
||||
export const wizardTemplateRouter = router({
|
||||
list: adminProcedure
|
||||
.input(z.object({ programId: z.string().optional() }).optional())
|
||||
.query(async ({ ctx, input }) => {
|
||||
return ctx.prisma.wizardTemplate.findMany({
|
||||
where: {
|
||||
OR: [
|
||||
{ isGlobal: true },
|
||||
...(input?.programId ? [{ programId: input.programId }] : []),
|
||||
],
|
||||
},
|
||||
orderBy: { createdAt: 'desc' },
|
||||
include: { creator: { select: { name: true } } },
|
||||
})
|
||||
}),
|
||||
|
||||
create: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
name: z.string().min(1).max(100),
|
||||
description: z.string().max(500).optional(),
|
||||
config: wizardConfigSchema,
|
||||
isGlobal: z.boolean().default(false),
|
||||
programId: z.string().optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const template = await ctx.prisma.wizardTemplate.create({
|
||||
data: {
|
||||
name: input.name,
|
||||
description: input.description,
|
||||
config: input.config as unknown as Prisma.InputJsonValue,
|
||||
isGlobal: input.isGlobal,
|
||||
programId: input.programId,
|
||||
createdBy: ctx.user.id,
|
||||
},
|
||||
})
|
||||
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'CREATE',
|
||||
entityType: 'WizardTemplate',
|
||||
entityId: template.id,
|
||||
detailsJson: { name: input.name },
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return template
|
||||
}),
|
||||
|
||||
delete: adminProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
await ctx.prisma.wizardTemplate.delete({ where: { id: input.id } })
|
||||
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'DELETE',
|
||||
entityType: 'WizardTemplate',
|
||||
entityId: input.id,
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return { success: true }
|
||||
}),
|
||||
})
|
||||
import { z } from 'zod'
|
||||
import type { Prisma } from '@prisma/client'
|
||||
import { router, adminProcedure } from '../trpc'
|
||||
import { wizardConfigSchema } from '@/types/wizard-config'
|
||||
import { logAudit } from '../utils/audit'
|
||||
|
||||
export const wizardTemplateRouter = router({
|
||||
list: adminProcedure
|
||||
.input(z.object({ programId: z.string().optional() }).optional())
|
||||
.query(async ({ ctx, input }) => {
|
||||
return ctx.prisma.wizardTemplate.findMany({
|
||||
where: {
|
||||
OR: [
|
||||
{ isGlobal: true },
|
||||
...(input?.programId ? [{ programId: input.programId }] : []),
|
||||
],
|
||||
},
|
||||
orderBy: { createdAt: 'desc' },
|
||||
include: { creator: { select: { name: true } } },
|
||||
})
|
||||
}),
|
||||
|
||||
create: adminProcedure
|
||||
.input(
|
||||
z.object({
|
||||
name: z.string().min(1).max(100),
|
||||
description: z.string().max(500).optional(),
|
||||
config: wizardConfigSchema,
|
||||
isGlobal: z.boolean().default(false),
|
||||
programId: z.string().optional(),
|
||||
})
|
||||
)
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const template = await ctx.prisma.wizardTemplate.create({
|
||||
data: {
|
||||
name: input.name,
|
||||
description: input.description,
|
||||
config: input.config as unknown as Prisma.InputJsonValue,
|
||||
isGlobal: input.isGlobal,
|
||||
programId: input.programId,
|
||||
createdBy: ctx.user.id,
|
||||
},
|
||||
})
|
||||
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'CREATE',
|
||||
entityType: 'WizardTemplate',
|
||||
entityId: template.id,
|
||||
detailsJson: { name: input.name },
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return template
|
||||
}),
|
||||
|
||||
delete: adminProcedure
|
||||
.input(z.object({ id: z.string() }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
await ctx.prisma.wizardTemplate.delete({ where: { id: input.id } })
|
||||
|
||||
await logAudit({
|
||||
prisma: ctx.prisma,
|
||||
userId: ctx.user.id,
|
||||
action: 'DELETE',
|
||||
entityType: 'WizardTemplate',
|
||||
entityId: input.id,
|
||||
ipAddress: ctx.ip,
|
||||
userAgent: ctx.userAgent,
|
||||
})
|
||||
|
||||
return { success: true }
|
||||
}),
|
||||
})
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,342 +1,342 @@
|
||||
/**
|
||||
* AI-Powered Award Eligibility Service
|
||||
*
|
||||
* Determines project eligibility for special awards using:
|
||||
* - Deterministic field matching (tags, country, category)
|
||||
* - AI interpretation of plain-language criteria
|
||||
*
|
||||
* GDPR Compliance:
|
||||
* - All project data is anonymized before AI processing
|
||||
* - IDs replaced with sequential identifiers
|
||||
* - No personal information sent to OpenAI
|
||||
*/
|
||||
|
||||
import { getOpenAI, getConfiguredModel, buildCompletionParams } from '@/lib/openai'
|
||||
import { logAIUsage, extractTokenUsage } from '@/server/utils/ai-usage'
|
||||
import { classifyAIError, createParseError, logAIError } from './ai-errors'
|
||||
import {
|
||||
anonymizeProjectsForAI,
|
||||
validateAnonymizedProjects,
|
||||
toProjectWithRelations,
|
||||
type AnonymizedProjectForAI,
|
||||
type ProjectAIMapping,
|
||||
} from './anonymization'
|
||||
import type { SubmissionSource } from '@prisma/client'
|
||||
|
||||
// ─── Constants ───────────────────────────────────────────────────────────────
|
||||
|
||||
const BATCH_SIZE = 20
|
||||
|
||||
// Optimized system prompt
|
||||
const AI_ELIGIBILITY_SYSTEM_PROMPT = `Award eligibility evaluator. Evaluate projects against criteria, return JSON.
|
||||
Format: {"evaluations": [{project_id, eligible: bool, confidence: 0-1, reasoning: str}]}
|
||||
Be objective. Base evaluation only on provided data. No personal identifiers in reasoning.`
|
||||
|
||||
// ─── Types ──────────────────────────────────────────────────────────────────
|
||||
|
||||
export type AutoTagRule = {
|
||||
field: 'competitionCategory' | 'country' | 'geographicZone' | 'tags' | 'oceanIssue'
|
||||
operator: 'equals' | 'contains' | 'in'
|
||||
value: string | string[]
|
||||
}
|
||||
|
||||
export interface EligibilityResult {
|
||||
projectId: string
|
||||
eligible: boolean
|
||||
confidence: number
|
||||
reasoning: string
|
||||
method: 'AUTO' | 'AI'
|
||||
}
|
||||
|
||||
interface ProjectForEligibility {
|
||||
id: string
|
||||
title: string
|
||||
description?: string | null
|
||||
competitionCategory?: string | null
|
||||
country?: string | null
|
||||
geographicZone?: string | null
|
||||
tags: string[]
|
||||
oceanIssue?: string | null
|
||||
institution?: string | null
|
||||
foundedAt?: Date | null
|
||||
wantsMentorship?: boolean
|
||||
submissionSource?: SubmissionSource
|
||||
submittedAt?: Date | null
|
||||
_count?: {
|
||||
teamMembers?: number
|
||||
files?: number
|
||||
}
|
||||
files?: Array<{ fileType: string | null }>
|
||||
}
|
||||
|
||||
// ─── Auto Tag Rules ─────────────────────────────────────────────────────────
|
||||
|
||||
export function applyAutoTagRules(
|
||||
rules: AutoTagRule[],
|
||||
projects: ProjectForEligibility[]
|
||||
): Map<string, boolean> {
|
||||
const results = new Map<string, boolean>()
|
||||
|
||||
for (const project of projects) {
|
||||
const matches = rules.every((rule) => {
|
||||
const fieldValue = getFieldValue(project, rule.field)
|
||||
|
||||
switch (rule.operator) {
|
||||
case 'equals':
|
||||
return String(fieldValue).toLowerCase() === String(rule.value).toLowerCase()
|
||||
case 'contains':
|
||||
if (Array.isArray(fieldValue)) {
|
||||
return fieldValue.some((v) =>
|
||||
String(v).toLowerCase().includes(String(rule.value).toLowerCase())
|
||||
)
|
||||
}
|
||||
return String(fieldValue || '').toLowerCase().includes(String(rule.value).toLowerCase())
|
||||
case 'in':
|
||||
if (Array.isArray(rule.value)) {
|
||||
return rule.value.some((v) =>
|
||||
String(v).toLowerCase() === String(fieldValue).toLowerCase()
|
||||
)
|
||||
}
|
||||
return false
|
||||
default:
|
||||
return false
|
||||
}
|
||||
})
|
||||
|
||||
results.set(project.id, matches)
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
function getFieldValue(
|
||||
project: ProjectForEligibility,
|
||||
field: AutoTagRule['field']
|
||||
): unknown {
|
||||
switch (field) {
|
||||
case 'competitionCategory':
|
||||
return project.competitionCategory
|
||||
case 'country':
|
||||
return project.country
|
||||
case 'geographicZone':
|
||||
return project.geographicZone
|
||||
case 'tags':
|
||||
return project.tags
|
||||
case 'oceanIssue':
|
||||
return project.oceanIssue
|
||||
default:
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
// ─── AI Criteria Interpretation ─────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Process a batch for AI eligibility evaluation
|
||||
*/
|
||||
async function processEligibilityBatch(
|
||||
openai: NonNullable<Awaited<ReturnType<typeof getOpenAI>>>,
|
||||
model: string,
|
||||
criteriaText: string,
|
||||
anonymized: AnonymizedProjectForAI[],
|
||||
mappings: ProjectAIMapping[],
|
||||
userId?: string,
|
||||
entityId?: string
|
||||
): Promise<{
|
||||
results: EligibilityResult[]
|
||||
tokensUsed: number
|
||||
}> {
|
||||
const results: EligibilityResult[] = []
|
||||
let tokensUsed = 0
|
||||
|
||||
const userPrompt = `CRITERIA: ${criteriaText}
|
||||
PROJECTS: ${JSON.stringify(anonymized)}
|
||||
Evaluate eligibility for each project.`
|
||||
|
||||
try {
|
||||
const params = buildCompletionParams(model, {
|
||||
messages: [
|
||||
{ role: 'system', content: AI_ELIGIBILITY_SYSTEM_PROMPT },
|
||||
{ role: 'user', content: userPrompt },
|
||||
],
|
||||
jsonMode: true,
|
||||
temperature: 0.3,
|
||||
maxTokens: 4000,
|
||||
})
|
||||
|
||||
const response = await openai.chat.completions.create(params)
|
||||
const usage = extractTokenUsage(response)
|
||||
tokensUsed = usage.totalTokens
|
||||
|
||||
// Log usage
|
||||
await logAIUsage({
|
||||
userId,
|
||||
action: 'AWARD_ELIGIBILITY',
|
||||
entityType: 'Award',
|
||||
entityId,
|
||||
model,
|
||||
promptTokens: usage.promptTokens,
|
||||
completionTokens: usage.completionTokens,
|
||||
totalTokens: usage.totalTokens,
|
||||
batchSize: anonymized.length,
|
||||
itemsProcessed: anonymized.length,
|
||||
status: 'SUCCESS',
|
||||
})
|
||||
|
||||
const content = response.choices[0]?.message?.content
|
||||
if (!content) {
|
||||
throw new Error('Empty response from AI')
|
||||
}
|
||||
|
||||
const parsed = JSON.parse(content) as {
|
||||
evaluations: Array<{
|
||||
project_id: string
|
||||
eligible: boolean
|
||||
confidence: number
|
||||
reasoning: string
|
||||
}>
|
||||
}
|
||||
|
||||
// Map results back to real IDs
|
||||
for (const eval_ of parsed.evaluations || []) {
|
||||
const mapping = mappings.find((m) => m.anonymousId === eval_.project_id)
|
||||
if (mapping) {
|
||||
results.push({
|
||||
projectId: mapping.realId,
|
||||
eligible: eval_.eligible,
|
||||
confidence: eval_.confidence,
|
||||
reasoning: eval_.reasoning,
|
||||
method: 'AI',
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
if (error instanceof SyntaxError) {
|
||||
const parseError = createParseError(error.message)
|
||||
logAIError('AwardEligibility', 'batch processing', parseError)
|
||||
|
||||
await logAIUsage({
|
||||
userId,
|
||||
action: 'AWARD_ELIGIBILITY',
|
||||
entityType: 'Award',
|
||||
entityId,
|
||||
model,
|
||||
promptTokens: 0,
|
||||
completionTokens: 0,
|
||||
totalTokens: tokensUsed,
|
||||
batchSize: anonymized.length,
|
||||
itemsProcessed: 0,
|
||||
status: 'ERROR',
|
||||
errorMessage: parseError.message,
|
||||
})
|
||||
|
||||
// Flag all for manual review
|
||||
for (const mapping of mappings) {
|
||||
results.push({
|
||||
projectId: mapping.realId,
|
||||
eligible: false,
|
||||
confidence: 0,
|
||||
reasoning: 'AI response parse error — requires manual review',
|
||||
method: 'AI',
|
||||
})
|
||||
}
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
return { results, tokensUsed }
|
||||
}
|
||||
|
||||
export async function aiInterpretCriteria(
|
||||
criteriaText: string,
|
||||
projects: ProjectForEligibility[],
|
||||
userId?: string,
|
||||
awardId?: string
|
||||
): Promise<EligibilityResult[]> {
|
||||
const results: EligibilityResult[] = []
|
||||
|
||||
try {
|
||||
const openai = await getOpenAI()
|
||||
if (!openai) {
|
||||
console.warn('[AI Eligibility] OpenAI not configured')
|
||||
return projects.map((p) => ({
|
||||
projectId: p.id,
|
||||
eligible: false,
|
||||
confidence: 0,
|
||||
reasoning: 'AI unavailable — requires manual eligibility review',
|
||||
method: 'AI' as const,
|
||||
}))
|
||||
}
|
||||
|
||||
const model = await getConfiguredModel()
|
||||
console.log(`[AI Eligibility] Using model: ${model} for ${projects.length} projects`)
|
||||
|
||||
// Convert and anonymize projects
|
||||
const projectsWithRelations = projects.map(toProjectWithRelations)
|
||||
const { anonymized, mappings } = anonymizeProjectsForAI(projectsWithRelations, 'ELIGIBILITY')
|
||||
|
||||
// Validate anonymization
|
||||
if (!validateAnonymizedProjects(anonymized)) {
|
||||
console.error('[AI Eligibility] Anonymization validation failed')
|
||||
throw new Error('GDPR compliance check failed: PII detected in anonymized data')
|
||||
}
|
||||
|
||||
let totalTokens = 0
|
||||
|
||||
// Process in batches
|
||||
for (let i = 0; i < anonymized.length; i += BATCH_SIZE) {
|
||||
const batchAnon = anonymized.slice(i, i + BATCH_SIZE)
|
||||
const batchMappings = mappings.slice(i, i + BATCH_SIZE)
|
||||
|
||||
console.log(`[AI Eligibility] Processing batch ${Math.floor(i / BATCH_SIZE) + 1}/${Math.ceil(anonymized.length / BATCH_SIZE)}`)
|
||||
|
||||
const { results: batchResults, tokensUsed } = await processEligibilityBatch(
|
||||
openai,
|
||||
model,
|
||||
criteriaText,
|
||||
batchAnon,
|
||||
batchMappings,
|
||||
userId,
|
||||
awardId
|
||||
)
|
||||
|
||||
results.push(...batchResults)
|
||||
totalTokens += tokensUsed
|
||||
}
|
||||
|
||||
console.log(`[AI Eligibility] Completed. Total tokens: ${totalTokens}`)
|
||||
|
||||
} catch (error) {
|
||||
const classified = classifyAIError(error)
|
||||
logAIError('AwardEligibility', 'aiInterpretCriteria', classified)
|
||||
|
||||
// Log failed attempt
|
||||
await logAIUsage({
|
||||
userId,
|
||||
action: 'AWARD_ELIGIBILITY',
|
||||
entityType: 'Award',
|
||||
entityId: awardId,
|
||||
model: 'unknown',
|
||||
promptTokens: 0,
|
||||
completionTokens: 0,
|
||||
totalTokens: 0,
|
||||
batchSize: projects.length,
|
||||
itemsProcessed: 0,
|
||||
status: 'ERROR',
|
||||
errorMessage: classified.message,
|
||||
})
|
||||
|
||||
// Return all as needing manual review
|
||||
return projects.map((p) => ({
|
||||
projectId: p.id,
|
||||
eligible: false,
|
||||
confidence: 0,
|
||||
reasoning: `AI error: ${classified.message}`,
|
||||
method: 'AI' as const,
|
||||
}))
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
/**
|
||||
* AI-Powered Award Eligibility Service
|
||||
*
|
||||
* Determines project eligibility for special awards using:
|
||||
* - Deterministic field matching (tags, country, category)
|
||||
* - AI interpretation of plain-language criteria
|
||||
*
|
||||
* GDPR Compliance:
|
||||
* - All project data is anonymized before AI processing
|
||||
* - IDs replaced with sequential identifiers
|
||||
* - No personal information sent to OpenAI
|
||||
*/
|
||||
|
||||
import { getOpenAI, getConfiguredModel, buildCompletionParams } from '@/lib/openai'
|
||||
import { logAIUsage, extractTokenUsage } from '@/server/utils/ai-usage'
|
||||
import { classifyAIError, createParseError, logAIError } from './ai-errors'
|
||||
import {
|
||||
anonymizeProjectsForAI,
|
||||
validateAnonymizedProjects,
|
||||
toProjectWithRelations,
|
||||
type AnonymizedProjectForAI,
|
||||
type ProjectAIMapping,
|
||||
} from './anonymization'
|
||||
import type { SubmissionSource } from '@prisma/client'
|
||||
|
||||
// ─── Constants ───────────────────────────────────────────────────────────────
|
||||
|
||||
const BATCH_SIZE = 20
|
||||
|
||||
// Optimized system prompt
|
||||
const AI_ELIGIBILITY_SYSTEM_PROMPT = `Award eligibility evaluator. Evaluate projects against criteria, return JSON.
|
||||
Format: {"evaluations": [{project_id, eligible: bool, confidence: 0-1, reasoning: str}]}
|
||||
Be objective. Base evaluation only on provided data. No personal identifiers in reasoning.`
|
||||
|
||||
// ─── Types ──────────────────────────────────────────────────────────────────
|
||||
|
||||
export type AutoTagRule = {
|
||||
field: 'competitionCategory' | 'country' | 'geographicZone' | 'tags' | 'oceanIssue'
|
||||
operator: 'equals' | 'contains' | 'in'
|
||||
value: string | string[]
|
||||
}
|
||||
|
||||
export interface EligibilityResult {
|
||||
projectId: string
|
||||
eligible: boolean
|
||||
confidence: number
|
||||
reasoning: string
|
||||
method: 'AUTO' | 'AI'
|
||||
}
|
||||
|
||||
interface ProjectForEligibility {
|
||||
id: string
|
||||
title: string
|
||||
description?: string | null
|
||||
competitionCategory?: string | null
|
||||
country?: string | null
|
||||
geographicZone?: string | null
|
||||
tags: string[]
|
||||
oceanIssue?: string | null
|
||||
institution?: string | null
|
||||
foundedAt?: Date | null
|
||||
wantsMentorship?: boolean
|
||||
submissionSource?: SubmissionSource
|
||||
submittedAt?: Date | null
|
||||
_count?: {
|
||||
teamMembers?: number
|
||||
files?: number
|
||||
}
|
||||
files?: Array<{ fileType: string | null }>
|
||||
}
|
||||
|
||||
// ─── Auto Tag Rules ─────────────────────────────────────────────────────────
|
||||
|
||||
export function applyAutoTagRules(
|
||||
rules: AutoTagRule[],
|
||||
projects: ProjectForEligibility[]
|
||||
): Map<string, boolean> {
|
||||
const results = new Map<string, boolean>()
|
||||
|
||||
for (const project of projects) {
|
||||
const matches = rules.every((rule) => {
|
||||
const fieldValue = getFieldValue(project, rule.field)
|
||||
|
||||
switch (rule.operator) {
|
||||
case 'equals':
|
||||
return String(fieldValue).toLowerCase() === String(rule.value).toLowerCase()
|
||||
case 'contains':
|
||||
if (Array.isArray(fieldValue)) {
|
||||
return fieldValue.some((v) =>
|
||||
String(v).toLowerCase().includes(String(rule.value).toLowerCase())
|
||||
)
|
||||
}
|
||||
return String(fieldValue || '').toLowerCase().includes(String(rule.value).toLowerCase())
|
||||
case 'in':
|
||||
if (Array.isArray(rule.value)) {
|
||||
return rule.value.some((v) =>
|
||||
String(v).toLowerCase() === String(fieldValue).toLowerCase()
|
||||
)
|
||||
}
|
||||
return false
|
||||
default:
|
||||
return false
|
||||
}
|
||||
})
|
||||
|
||||
results.set(project.id, matches)
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
function getFieldValue(
|
||||
project: ProjectForEligibility,
|
||||
field: AutoTagRule['field']
|
||||
): unknown {
|
||||
switch (field) {
|
||||
case 'competitionCategory':
|
||||
return project.competitionCategory
|
||||
case 'country':
|
||||
return project.country
|
||||
case 'geographicZone':
|
||||
return project.geographicZone
|
||||
case 'tags':
|
||||
return project.tags
|
||||
case 'oceanIssue':
|
||||
return project.oceanIssue
|
||||
default:
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
// ─── AI Criteria Interpretation ─────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Process a batch for AI eligibility evaluation
|
||||
*/
|
||||
async function processEligibilityBatch(
|
||||
openai: NonNullable<Awaited<ReturnType<typeof getOpenAI>>>,
|
||||
model: string,
|
||||
criteriaText: string,
|
||||
anonymized: AnonymizedProjectForAI[],
|
||||
mappings: ProjectAIMapping[],
|
||||
userId?: string,
|
||||
entityId?: string
|
||||
): Promise<{
|
||||
results: EligibilityResult[]
|
||||
tokensUsed: number
|
||||
}> {
|
||||
const results: EligibilityResult[] = []
|
||||
let tokensUsed = 0
|
||||
|
||||
const userPrompt = `CRITERIA: ${criteriaText}
|
||||
PROJECTS: ${JSON.stringify(anonymized)}
|
||||
Evaluate eligibility for each project.`
|
||||
|
||||
try {
|
||||
const params = buildCompletionParams(model, {
|
||||
messages: [
|
||||
{ role: 'system', content: AI_ELIGIBILITY_SYSTEM_PROMPT },
|
||||
{ role: 'user', content: userPrompt },
|
||||
],
|
||||
jsonMode: true,
|
||||
temperature: 0.3,
|
||||
maxTokens: 4000,
|
||||
})
|
||||
|
||||
const response = await openai.chat.completions.create(params)
|
||||
const usage = extractTokenUsage(response)
|
||||
tokensUsed = usage.totalTokens
|
||||
|
||||
// Log usage
|
||||
await logAIUsage({
|
||||
userId,
|
||||
action: 'AWARD_ELIGIBILITY',
|
||||
entityType: 'Award',
|
||||
entityId,
|
||||
model,
|
||||
promptTokens: usage.promptTokens,
|
||||
completionTokens: usage.completionTokens,
|
||||
totalTokens: usage.totalTokens,
|
||||
batchSize: anonymized.length,
|
||||
itemsProcessed: anonymized.length,
|
||||
status: 'SUCCESS',
|
||||
})
|
||||
|
||||
const content = response.choices[0]?.message?.content
|
||||
if (!content) {
|
||||
throw new Error('Empty response from AI')
|
||||
}
|
||||
|
||||
const parsed = JSON.parse(content) as {
|
||||
evaluations: Array<{
|
||||
project_id: string
|
||||
eligible: boolean
|
||||
confidence: number
|
||||
reasoning: string
|
||||
}>
|
||||
}
|
||||
|
||||
// Map results back to real IDs
|
||||
for (const eval_ of parsed.evaluations || []) {
|
||||
const mapping = mappings.find((m) => m.anonymousId === eval_.project_id)
|
||||
if (mapping) {
|
||||
results.push({
|
||||
projectId: mapping.realId,
|
||||
eligible: eval_.eligible,
|
||||
confidence: eval_.confidence,
|
||||
reasoning: eval_.reasoning,
|
||||
method: 'AI',
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
if (error instanceof SyntaxError) {
|
||||
const parseError = createParseError(error.message)
|
||||
logAIError('AwardEligibility', 'batch processing', parseError)
|
||||
|
||||
await logAIUsage({
|
||||
userId,
|
||||
action: 'AWARD_ELIGIBILITY',
|
||||
entityType: 'Award',
|
||||
entityId,
|
||||
model,
|
||||
promptTokens: 0,
|
||||
completionTokens: 0,
|
||||
totalTokens: tokensUsed,
|
||||
batchSize: anonymized.length,
|
||||
itemsProcessed: 0,
|
||||
status: 'ERROR',
|
||||
errorMessage: parseError.message,
|
||||
})
|
||||
|
||||
// Flag all for manual review
|
||||
for (const mapping of mappings) {
|
||||
results.push({
|
||||
projectId: mapping.realId,
|
||||
eligible: false,
|
||||
confidence: 0,
|
||||
reasoning: 'AI response parse error — requires manual review',
|
||||
method: 'AI',
|
||||
})
|
||||
}
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
return { results, tokensUsed }
|
||||
}
|
||||
|
||||
export async function aiInterpretCriteria(
|
||||
criteriaText: string,
|
||||
projects: ProjectForEligibility[],
|
||||
userId?: string,
|
||||
awardId?: string
|
||||
): Promise<EligibilityResult[]> {
|
||||
const results: EligibilityResult[] = []
|
||||
|
||||
try {
|
||||
const openai = await getOpenAI()
|
||||
if (!openai) {
|
||||
console.warn('[AI Eligibility] OpenAI not configured')
|
||||
return projects.map((p) => ({
|
||||
projectId: p.id,
|
||||
eligible: false,
|
||||
confidence: 0,
|
||||
reasoning: 'AI unavailable — requires manual eligibility review',
|
||||
method: 'AI' as const,
|
||||
}))
|
||||
}
|
||||
|
||||
const model = await getConfiguredModel()
|
||||
console.log(`[AI Eligibility] Using model: ${model} for ${projects.length} projects`)
|
||||
|
||||
// Convert and anonymize projects
|
||||
const projectsWithRelations = projects.map(toProjectWithRelations)
|
||||
const { anonymized, mappings } = anonymizeProjectsForAI(projectsWithRelations, 'ELIGIBILITY')
|
||||
|
||||
// Validate anonymization
|
||||
if (!validateAnonymizedProjects(anonymized)) {
|
||||
console.error('[AI Eligibility] Anonymization validation failed')
|
||||
throw new Error('GDPR compliance check failed: PII detected in anonymized data')
|
||||
}
|
||||
|
||||
let totalTokens = 0
|
||||
|
||||
// Process in batches
|
||||
for (let i = 0; i < anonymized.length; i += BATCH_SIZE) {
|
||||
const batchAnon = anonymized.slice(i, i + BATCH_SIZE)
|
||||
const batchMappings = mappings.slice(i, i + BATCH_SIZE)
|
||||
|
||||
console.log(`[AI Eligibility] Processing batch ${Math.floor(i / BATCH_SIZE) + 1}/${Math.ceil(anonymized.length / BATCH_SIZE)}`)
|
||||
|
||||
const { results: batchResults, tokensUsed } = await processEligibilityBatch(
|
||||
openai,
|
||||
model,
|
||||
criteriaText,
|
||||
batchAnon,
|
||||
batchMappings,
|
||||
userId,
|
||||
awardId
|
||||
)
|
||||
|
||||
results.push(...batchResults)
|
||||
totalTokens += tokensUsed
|
||||
}
|
||||
|
||||
console.log(`[AI Eligibility] Completed. Total tokens: ${totalTokens}`)
|
||||
|
||||
} catch (error) {
|
||||
const classified = classifyAIError(error)
|
||||
logAIError('AwardEligibility', 'aiInterpretCriteria', classified)
|
||||
|
||||
// Log failed attempt
|
||||
await logAIUsage({
|
||||
userId,
|
||||
action: 'AWARD_ELIGIBILITY',
|
||||
entityType: 'Award',
|
||||
entityId: awardId,
|
||||
model: 'unknown',
|
||||
promptTokens: 0,
|
||||
completionTokens: 0,
|
||||
totalTokens: 0,
|
||||
batchSize: projects.length,
|
||||
itemsProcessed: 0,
|
||||
status: 'ERROR',
|
||||
errorMessage: classified.message,
|
||||
})
|
||||
|
||||
// Return all as needing manual review
|
||||
return projects.map((p) => ({
|
||||
projectId: p.id,
|
||||
eligible: false,
|
||||
confidence: 0,
|
||||
reasoning: `AI error: ${classified.message}`,
|
||||
method: 'AI' as const,
|
||||
}))
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
@@ -1,404 +1,404 @@
|
||||
/**
|
||||
* AI-Powered Evaluation Summary Service
|
||||
*
|
||||
* Generates AI summaries of jury evaluations for a project in a given round.
|
||||
* Combines OpenAI analysis with server-side scoring pattern calculations.
|
||||
*
|
||||
* GDPR Compliance:
|
||||
* - All evaluation data is anonymized before AI processing
|
||||
* - No juror names, emails, or identifiers are sent to OpenAI
|
||||
* - Only scores, feedback text, and binary decisions are included
|
||||
*/
|
||||
|
||||
import { TRPCError } from '@trpc/server'
|
||||
import { getOpenAI, getConfiguredModel, buildCompletionParams, AI_MODELS } from '@/lib/openai'
|
||||
import { logAIUsage, extractTokenUsage } from '@/server/utils/ai-usage'
|
||||
import { classifyAIError, createParseError, logAIError } from './ai-errors'
|
||||
import { sanitizeText } from './anonymization'
|
||||
import type { PrismaClient, Prisma } from '@prisma/client'
|
||||
|
||||
// ─── Types ──────────────────────────────────────────────────────────────────
|
||||
|
||||
interface EvaluationForSummary {
|
||||
id: string
|
||||
criterionScoresJson: Record<string, number> | null
|
||||
globalScore: number | null
|
||||
binaryDecision: boolean | null
|
||||
feedbackText: string | null
|
||||
assignment: {
|
||||
user: {
|
||||
id: string
|
||||
name: string | null
|
||||
email: string
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
interface AnonymizedEvaluation {
|
||||
criterionScores: Record<string, number> | null
|
||||
globalScore: number | null
|
||||
binaryDecision: boolean | null
|
||||
feedbackText: string | null
|
||||
}
|
||||
|
||||
interface CriterionDef {
|
||||
id: string
|
||||
label: string
|
||||
}
|
||||
|
||||
interface AIResponsePayload {
|
||||
overallAssessment: string
|
||||
strengths: string[]
|
||||
weaknesses: string[]
|
||||
themes: Array<{
|
||||
theme: string
|
||||
sentiment: 'positive' | 'negative' | 'mixed'
|
||||
frequency: number
|
||||
}>
|
||||
recommendation: string
|
||||
}
|
||||
|
||||
interface ScoringPatterns {
|
||||
averageGlobalScore: number | null
|
||||
consensus: number
|
||||
criterionAverages: Record<string, number>
|
||||
evaluatorCount: number
|
||||
}
|
||||
|
||||
export interface EvaluationSummaryResult {
|
||||
id: string
|
||||
projectId: string
|
||||
stageId: string
|
||||
summaryJson: AIResponsePayload & { scoringPatterns: ScoringPatterns }
|
||||
generatedAt: Date
|
||||
model: string
|
||||
tokensUsed: number
|
||||
}
|
||||
|
||||
// ─── Anonymization ──────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Strip juror names/emails from evaluations, keeping only scores and feedback.
|
||||
*/
|
||||
export function anonymizeEvaluations(
|
||||
evaluations: EvaluationForSummary[]
|
||||
): AnonymizedEvaluation[] {
|
||||
return evaluations.map((ev) => ({
|
||||
criterionScores: ev.criterionScoresJson as Record<string, number> | null,
|
||||
globalScore: ev.globalScore,
|
||||
binaryDecision: ev.binaryDecision,
|
||||
feedbackText: ev.feedbackText ? sanitizeText(ev.feedbackText) : null,
|
||||
}))
|
||||
}
|
||||
|
||||
// ─── Prompt Building ────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Build the OpenAI prompt for evaluation summary generation.
|
||||
*/
|
||||
export function buildSummaryPrompt(
|
||||
anonymizedEvaluations: AnonymizedEvaluation[],
|
||||
projectTitle: string,
|
||||
criteriaLabels: string[]
|
||||
): string {
|
||||
const sanitizedTitle = sanitizeText(projectTitle)
|
||||
|
||||
return `You are analyzing jury evaluations for a project competition.
|
||||
|
||||
PROJECT: "${sanitizedTitle}"
|
||||
|
||||
EVALUATION CRITERIA: ${criteriaLabels.join(', ')}
|
||||
|
||||
EVALUATIONS (${anonymizedEvaluations.length} total):
|
||||
${JSON.stringify(anonymizedEvaluations, null, 2)}
|
||||
|
||||
Analyze these evaluations and return a JSON object with this exact structure:
|
||||
{
|
||||
"overallAssessment": "A 2-3 sentence summary of how the project was evaluated overall",
|
||||
"strengths": ["strength 1", "strength 2", ...],
|
||||
"weaknesses": ["weakness 1", "weakness 2", ...],
|
||||
"themes": [
|
||||
{ "theme": "theme name", "sentiment": "positive" | "negative" | "mixed", "frequency": <number of evaluators mentioning this> }
|
||||
],
|
||||
"recommendation": "A brief recommendation based on the evaluation consensus"
|
||||
}
|
||||
|
||||
Guidelines:
|
||||
- Base your analysis only on the provided evaluation data
|
||||
- Identify common themes across evaluator feedback
|
||||
- Note areas of agreement and disagreement
|
||||
- Keep the assessment objective and balanced
|
||||
- Do not include any personal identifiers`
|
||||
}
|
||||
|
||||
// ─── Scoring Patterns (Server-Side) ─────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Compute scoring patterns from evaluations without AI.
|
||||
*/
|
||||
export function computeScoringPatterns(
|
||||
evaluations: EvaluationForSummary[],
|
||||
criteriaLabels: CriterionDef[]
|
||||
): ScoringPatterns {
|
||||
const globalScores = evaluations
|
||||
.map((e) => e.globalScore)
|
||||
.filter((s): s is number => s !== null)
|
||||
|
||||
// Average global score
|
||||
const averageGlobalScore =
|
||||
globalScores.length > 0
|
||||
? globalScores.reduce((a, b) => a + b, 0) / globalScores.length
|
||||
: null
|
||||
|
||||
// Consensus: 1 - normalized standard deviation (1.0 = full consensus)
|
||||
let consensus = 1
|
||||
if (globalScores.length > 1 && averageGlobalScore !== null) {
|
||||
const variance =
|
||||
globalScores.reduce(
|
||||
(sum, score) => sum + Math.pow(score - averageGlobalScore, 2),
|
||||
0
|
||||
) / globalScores.length
|
||||
const stdDev = Math.sqrt(variance)
|
||||
// Normalize by the scoring scale (1-10, so max possible std dev is ~4.5)
|
||||
consensus = Math.max(0, 1 - stdDev / 4.5)
|
||||
}
|
||||
|
||||
// Criterion averages
|
||||
const criterionAverages: Record<string, number> = {}
|
||||
for (const criterion of criteriaLabels) {
|
||||
const scores: number[] = []
|
||||
for (const ev of evaluations) {
|
||||
const criterionScores = ev.criterionScoresJson as Record<string, number> | null
|
||||
if (criterionScores && criterionScores[criterion.id] !== undefined) {
|
||||
scores.push(criterionScores[criterion.id])
|
||||
}
|
||||
}
|
||||
if (scores.length > 0) {
|
||||
criterionAverages[criterion.label] =
|
||||
scores.reduce((a, b) => a + b, 0) / scores.length
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
averageGlobalScore,
|
||||
consensus: Math.round(consensus * 100) / 100,
|
||||
criterionAverages,
|
||||
evaluatorCount: evaluations.length,
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Main Orchestrator ──────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Generate an AI-powered evaluation summary for a project in a round.
|
||||
*/
|
||||
export async function generateSummary({
|
||||
projectId,
|
||||
stageId,
|
||||
userId,
|
||||
prisma,
|
||||
}: {
|
||||
projectId: string
|
||||
stageId: string
|
||||
userId: string
|
||||
prisma: PrismaClient
|
||||
}): Promise<EvaluationSummaryResult> {
|
||||
// 1. Fetch project with evaluations and form criteria
|
||||
const project = await prisma.project.findUnique({
|
||||
where: { id: projectId },
|
||||
select: {
|
||||
id: true,
|
||||
title: true,
|
||||
},
|
||||
})
|
||||
|
||||
if (!project) {
|
||||
throw new TRPCError({ code: 'NOT_FOUND', message: 'Project not found' })
|
||||
}
|
||||
|
||||
// Fetch submitted evaluations for this project in this stage
|
||||
const evaluations = await prisma.evaluation.findMany({
|
||||
where: {
|
||||
status: 'SUBMITTED',
|
||||
assignment: {
|
||||
projectId,
|
||||
stageId,
|
||||
},
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
criterionScoresJson: true,
|
||||
globalScore: true,
|
||||
binaryDecision: true,
|
||||
feedbackText: true,
|
||||
assignment: {
|
||||
select: {
|
||||
user: {
|
||||
select: { id: true, name: true, email: true },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
if (evaluations.length === 0) {
|
||||
throw new TRPCError({
|
||||
code: 'BAD_REQUEST',
|
||||
message: 'No submitted evaluations found for this project in this stage',
|
||||
})
|
||||
}
|
||||
|
||||
// Get evaluation form criteria for this stage
|
||||
const form = await prisma.evaluationForm.findFirst({
|
||||
where: { stageId, isActive: true },
|
||||
select: { criteriaJson: true },
|
||||
})
|
||||
|
||||
const criteria: CriterionDef[] = form?.criteriaJson
|
||||
? (form.criteriaJson as unknown as CriterionDef[])
|
||||
: []
|
||||
const criteriaLabels = criteria.map((c) => c.label)
|
||||
|
||||
// 2. Anonymize evaluations
|
||||
const typedEvaluations = evaluations as unknown as EvaluationForSummary[]
|
||||
const anonymized = anonymizeEvaluations(typedEvaluations)
|
||||
|
||||
// 3. Build prompt and call OpenAI
|
||||
const openai = await getOpenAI()
|
||||
if (!openai) {
|
||||
throw new TRPCError({
|
||||
code: 'PRECONDITION_FAILED',
|
||||
message: 'OpenAI is not configured. Please set up your API key in Settings.',
|
||||
})
|
||||
}
|
||||
|
||||
const model = await getConfiguredModel(AI_MODELS.QUICK)
|
||||
const prompt = buildSummaryPrompt(anonymized, project.title, criteriaLabels)
|
||||
|
||||
let aiResponse: AIResponsePayload
|
||||
let tokensUsed = 0
|
||||
|
||||
try {
|
||||
const params = buildCompletionParams(model, {
|
||||
messages: [
|
||||
{ role: 'user', content: prompt },
|
||||
],
|
||||
jsonMode: true,
|
||||
temperature: 0.3,
|
||||
maxTokens: 2000,
|
||||
})
|
||||
|
||||
const response = await openai.chat.completions.create(params)
|
||||
const usage = extractTokenUsage(response)
|
||||
tokensUsed = usage.totalTokens
|
||||
|
||||
const content = response.choices[0]?.message?.content
|
||||
if (!content) {
|
||||
throw new Error('Empty response from AI')
|
||||
}
|
||||
|
||||
aiResponse = JSON.parse(content) as AIResponsePayload
|
||||
} catch (error) {
|
||||
if (error instanceof SyntaxError) {
|
||||
const parseError = createParseError(error.message)
|
||||
logAIError('EvaluationSummary', 'generateSummary', parseError)
|
||||
|
||||
await logAIUsage({
|
||||
userId,
|
||||
action: 'EVALUATION_SUMMARY',
|
||||
entityType: 'Project',
|
||||
entityId: projectId,
|
||||
model,
|
||||
promptTokens: 0,
|
||||
completionTokens: 0,
|
||||
totalTokens: tokensUsed,
|
||||
itemsProcessed: 0,
|
||||
status: 'ERROR',
|
||||
errorMessage: parseError.message,
|
||||
})
|
||||
|
||||
throw new TRPCError({
|
||||
code: 'INTERNAL_SERVER_ERROR',
|
||||
message: 'Failed to parse AI response. Please try again.',
|
||||
})
|
||||
}
|
||||
|
||||
const classified = classifyAIError(error)
|
||||
logAIError('EvaluationSummary', 'generateSummary', classified)
|
||||
|
||||
await logAIUsage({
|
||||
userId,
|
||||
action: 'EVALUATION_SUMMARY',
|
||||
entityType: 'Project',
|
||||
entityId: projectId,
|
||||
model,
|
||||
promptTokens: 0,
|
||||
completionTokens: 0,
|
||||
totalTokens: 0,
|
||||
itemsProcessed: 0,
|
||||
status: 'ERROR',
|
||||
errorMessage: classified.message,
|
||||
})
|
||||
|
||||
throw new TRPCError({
|
||||
code: 'INTERNAL_SERVER_ERROR',
|
||||
message: classified.message,
|
||||
})
|
||||
}
|
||||
|
||||
// 4. Compute scoring patterns (server-side, no AI)
|
||||
const scoringPatterns = computeScoringPatterns(typedEvaluations, criteria)
|
||||
|
||||
// 5. Merge and upsert
|
||||
const summaryJson = {
|
||||
...aiResponse,
|
||||
scoringPatterns,
|
||||
}
|
||||
|
||||
const summaryJsonValue = summaryJson as unknown as Prisma.InputJsonValue
|
||||
|
||||
const summary = await prisma.evaluationSummary.upsert({
|
||||
where: {
|
||||
projectId_stageId: { projectId, stageId },
|
||||
},
|
||||
create: {
|
||||
projectId,
|
||||
stageId,
|
||||
summaryJson: summaryJsonValue,
|
||||
generatedById: userId,
|
||||
model,
|
||||
tokensUsed,
|
||||
},
|
||||
update: {
|
||||
summaryJson: summaryJsonValue,
|
||||
generatedAt: new Date(),
|
||||
generatedById: userId,
|
||||
model,
|
||||
tokensUsed,
|
||||
},
|
||||
})
|
||||
|
||||
// 6. Log AI usage
|
||||
await logAIUsage({
|
||||
userId,
|
||||
action: 'EVALUATION_SUMMARY',
|
||||
entityType: 'Project',
|
||||
entityId: projectId,
|
||||
model,
|
||||
promptTokens: 0, // Detailed breakdown not always available
|
||||
completionTokens: 0,
|
||||
totalTokens: tokensUsed,
|
||||
itemsProcessed: evaluations.length,
|
||||
status: 'SUCCESS',
|
||||
})
|
||||
|
||||
return {
|
||||
id: summary.id,
|
||||
projectId: summary.projectId,
|
||||
stageId: summary.stageId,
|
||||
summaryJson: summaryJson as AIResponsePayload & { scoringPatterns: ScoringPatterns },
|
||||
generatedAt: summary.generatedAt,
|
||||
model: summary.model,
|
||||
tokensUsed: summary.tokensUsed,
|
||||
}
|
||||
}
|
||||
/**
|
||||
* AI-Powered Evaluation Summary Service
|
||||
*
|
||||
* Generates AI summaries of jury evaluations for a project in a given round.
|
||||
* Combines OpenAI analysis with server-side scoring pattern calculations.
|
||||
*
|
||||
* GDPR Compliance:
|
||||
* - All evaluation data is anonymized before AI processing
|
||||
* - No juror names, emails, or identifiers are sent to OpenAI
|
||||
* - Only scores, feedback text, and binary decisions are included
|
||||
*/
|
||||
|
||||
import { TRPCError } from '@trpc/server'
|
||||
import { getOpenAI, getConfiguredModel, buildCompletionParams, AI_MODELS } from '@/lib/openai'
|
||||
import { logAIUsage, extractTokenUsage } from '@/server/utils/ai-usage'
|
||||
import { classifyAIError, createParseError, logAIError } from './ai-errors'
|
||||
import { sanitizeText } from './anonymization'
|
||||
import type { PrismaClient, Prisma } from '@prisma/client'
|
||||
|
||||
// ─── Types ──────────────────────────────────────────────────────────────────
|
||||
|
||||
interface EvaluationForSummary {
|
||||
id: string
|
||||
criterionScoresJson: Record<string, number> | null
|
||||
globalScore: number | null
|
||||
binaryDecision: boolean | null
|
||||
feedbackText: string | null
|
||||
assignment: {
|
||||
user: {
|
||||
id: string
|
||||
name: string | null
|
||||
email: string
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
interface AnonymizedEvaluation {
|
||||
criterionScores: Record<string, number> | null
|
||||
globalScore: number | null
|
||||
binaryDecision: boolean | null
|
||||
feedbackText: string | null
|
||||
}
|
||||
|
||||
interface CriterionDef {
|
||||
id: string
|
||||
label: string
|
||||
}
|
||||
|
||||
interface AIResponsePayload {
|
||||
overallAssessment: string
|
||||
strengths: string[]
|
||||
weaknesses: string[]
|
||||
themes: Array<{
|
||||
theme: string
|
||||
sentiment: 'positive' | 'negative' | 'mixed'
|
||||
frequency: number
|
||||
}>
|
||||
recommendation: string
|
||||
}
|
||||
|
||||
interface ScoringPatterns {
|
||||
averageGlobalScore: number | null
|
||||
consensus: number
|
||||
criterionAverages: Record<string, number>
|
||||
evaluatorCount: number
|
||||
}
|
||||
|
||||
export interface EvaluationSummaryResult {
|
||||
id: string
|
||||
projectId: string
|
||||
stageId: string
|
||||
summaryJson: AIResponsePayload & { scoringPatterns: ScoringPatterns }
|
||||
generatedAt: Date
|
||||
model: string
|
||||
tokensUsed: number
|
||||
}
|
||||
|
||||
// ─── Anonymization ──────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Strip juror names/emails from evaluations, keeping only scores and feedback.
|
||||
*/
|
||||
export function anonymizeEvaluations(
|
||||
evaluations: EvaluationForSummary[]
|
||||
): AnonymizedEvaluation[] {
|
||||
return evaluations.map((ev) => ({
|
||||
criterionScores: ev.criterionScoresJson as Record<string, number> | null,
|
||||
globalScore: ev.globalScore,
|
||||
binaryDecision: ev.binaryDecision,
|
||||
feedbackText: ev.feedbackText ? sanitizeText(ev.feedbackText) : null,
|
||||
}))
|
||||
}
|
||||
|
||||
// ─── Prompt Building ────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Build the OpenAI prompt for evaluation summary generation.
|
||||
*/
|
||||
export function buildSummaryPrompt(
|
||||
anonymizedEvaluations: AnonymizedEvaluation[],
|
||||
projectTitle: string,
|
||||
criteriaLabels: string[]
|
||||
): string {
|
||||
const sanitizedTitle = sanitizeText(projectTitle)
|
||||
|
||||
return `You are analyzing jury evaluations for a project competition.
|
||||
|
||||
PROJECT: "${sanitizedTitle}"
|
||||
|
||||
EVALUATION CRITERIA: ${criteriaLabels.join(', ')}
|
||||
|
||||
EVALUATIONS (${anonymizedEvaluations.length} total):
|
||||
${JSON.stringify(anonymizedEvaluations, null, 2)}
|
||||
|
||||
Analyze these evaluations and return a JSON object with this exact structure:
|
||||
{
|
||||
"overallAssessment": "A 2-3 sentence summary of how the project was evaluated overall",
|
||||
"strengths": ["strength 1", "strength 2", ...],
|
||||
"weaknesses": ["weakness 1", "weakness 2", ...],
|
||||
"themes": [
|
||||
{ "theme": "theme name", "sentiment": "positive" | "negative" | "mixed", "frequency": <number of evaluators mentioning this> }
|
||||
],
|
||||
"recommendation": "A brief recommendation based on the evaluation consensus"
|
||||
}
|
||||
|
||||
Guidelines:
|
||||
- Base your analysis only on the provided evaluation data
|
||||
- Identify common themes across evaluator feedback
|
||||
- Note areas of agreement and disagreement
|
||||
- Keep the assessment objective and balanced
|
||||
- Do not include any personal identifiers`
|
||||
}
|
||||
|
||||
// ─── Scoring Patterns (Server-Side) ─────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Compute scoring patterns from evaluations without AI.
|
||||
*/
|
||||
export function computeScoringPatterns(
|
||||
evaluations: EvaluationForSummary[],
|
||||
criteriaLabels: CriterionDef[]
|
||||
): ScoringPatterns {
|
||||
const globalScores = evaluations
|
||||
.map((e) => e.globalScore)
|
||||
.filter((s): s is number => s !== null)
|
||||
|
||||
// Average global score
|
||||
const averageGlobalScore =
|
||||
globalScores.length > 0
|
||||
? globalScores.reduce((a, b) => a + b, 0) / globalScores.length
|
||||
: null
|
||||
|
||||
// Consensus: 1 - normalized standard deviation (1.0 = full consensus)
|
||||
let consensus = 1
|
||||
if (globalScores.length > 1 && averageGlobalScore !== null) {
|
||||
const variance =
|
||||
globalScores.reduce(
|
||||
(sum, score) => sum + Math.pow(score - averageGlobalScore, 2),
|
||||
0
|
||||
) / globalScores.length
|
||||
const stdDev = Math.sqrt(variance)
|
||||
// Normalize by the scoring scale (1-10, so max possible std dev is ~4.5)
|
||||
consensus = Math.max(0, 1 - stdDev / 4.5)
|
||||
}
|
||||
|
||||
// Criterion averages
|
||||
const criterionAverages: Record<string, number> = {}
|
||||
for (const criterion of criteriaLabels) {
|
||||
const scores: number[] = []
|
||||
for (const ev of evaluations) {
|
||||
const criterionScores = ev.criterionScoresJson as Record<string, number> | null
|
||||
if (criterionScores && criterionScores[criterion.id] !== undefined) {
|
||||
scores.push(criterionScores[criterion.id])
|
||||
}
|
||||
}
|
||||
if (scores.length > 0) {
|
||||
criterionAverages[criterion.label] =
|
||||
scores.reduce((a, b) => a + b, 0) / scores.length
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
averageGlobalScore,
|
||||
consensus: Math.round(consensus * 100) / 100,
|
||||
criterionAverages,
|
||||
evaluatorCount: evaluations.length,
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Main Orchestrator ──────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Generate an AI-powered evaluation summary for a project in a round.
|
||||
*/
|
||||
export async function generateSummary({
|
||||
projectId,
|
||||
stageId,
|
||||
userId,
|
||||
prisma,
|
||||
}: {
|
||||
projectId: string
|
||||
stageId: string
|
||||
userId: string
|
||||
prisma: PrismaClient
|
||||
}): Promise<EvaluationSummaryResult> {
|
||||
// 1. Fetch project with evaluations and form criteria
|
||||
const project = await prisma.project.findUnique({
|
||||
where: { id: projectId },
|
||||
select: {
|
||||
id: true,
|
||||
title: true,
|
||||
},
|
||||
})
|
||||
|
||||
if (!project) {
|
||||
throw new TRPCError({ code: 'NOT_FOUND', message: 'Project not found' })
|
||||
}
|
||||
|
||||
// Fetch submitted evaluations for this project in this stage
|
||||
const evaluations = await prisma.evaluation.findMany({
|
||||
where: {
|
||||
status: 'SUBMITTED',
|
||||
assignment: {
|
||||
projectId,
|
||||
stageId,
|
||||
},
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
criterionScoresJson: true,
|
||||
globalScore: true,
|
||||
binaryDecision: true,
|
||||
feedbackText: true,
|
||||
assignment: {
|
||||
select: {
|
||||
user: {
|
||||
select: { id: true, name: true, email: true },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
if (evaluations.length === 0) {
|
||||
throw new TRPCError({
|
||||
code: 'BAD_REQUEST',
|
||||
message: 'No submitted evaluations found for this project in this stage',
|
||||
})
|
||||
}
|
||||
|
||||
// Get evaluation form criteria for this stage
|
||||
const form = await prisma.evaluationForm.findFirst({
|
||||
where: { stageId, isActive: true },
|
||||
select: { criteriaJson: true },
|
||||
})
|
||||
|
||||
const criteria: CriterionDef[] = form?.criteriaJson
|
||||
? (form.criteriaJson as unknown as CriterionDef[])
|
||||
: []
|
||||
const criteriaLabels = criteria.map((c) => c.label)
|
||||
|
||||
// 2. Anonymize evaluations
|
||||
const typedEvaluations = evaluations as unknown as EvaluationForSummary[]
|
||||
const anonymized = anonymizeEvaluations(typedEvaluations)
|
||||
|
||||
// 3. Build prompt and call OpenAI
|
||||
const openai = await getOpenAI()
|
||||
if (!openai) {
|
||||
throw new TRPCError({
|
||||
code: 'PRECONDITION_FAILED',
|
||||
message: 'OpenAI is not configured. Please set up your API key in Settings.',
|
||||
})
|
||||
}
|
||||
|
||||
const model = await getConfiguredModel(AI_MODELS.QUICK)
|
||||
const prompt = buildSummaryPrompt(anonymized, project.title, criteriaLabels)
|
||||
|
||||
let aiResponse: AIResponsePayload
|
||||
let tokensUsed = 0
|
||||
|
||||
try {
|
||||
const params = buildCompletionParams(model, {
|
||||
messages: [
|
||||
{ role: 'user', content: prompt },
|
||||
],
|
||||
jsonMode: true,
|
||||
temperature: 0.3,
|
||||
maxTokens: 2000,
|
||||
})
|
||||
|
||||
const response = await openai.chat.completions.create(params)
|
||||
const usage = extractTokenUsage(response)
|
||||
tokensUsed = usage.totalTokens
|
||||
|
||||
const content = response.choices[0]?.message?.content
|
||||
if (!content) {
|
||||
throw new Error('Empty response from AI')
|
||||
}
|
||||
|
||||
aiResponse = JSON.parse(content) as AIResponsePayload
|
||||
} catch (error) {
|
||||
if (error instanceof SyntaxError) {
|
||||
const parseError = createParseError(error.message)
|
||||
logAIError('EvaluationSummary', 'generateSummary', parseError)
|
||||
|
||||
await logAIUsage({
|
||||
userId,
|
||||
action: 'EVALUATION_SUMMARY',
|
||||
entityType: 'Project',
|
||||
entityId: projectId,
|
||||
model,
|
||||
promptTokens: 0,
|
||||
completionTokens: 0,
|
||||
totalTokens: tokensUsed,
|
||||
itemsProcessed: 0,
|
||||
status: 'ERROR',
|
||||
errorMessage: parseError.message,
|
||||
})
|
||||
|
||||
throw new TRPCError({
|
||||
code: 'INTERNAL_SERVER_ERROR',
|
||||
message: 'Failed to parse AI response. Please try again.',
|
||||
})
|
||||
}
|
||||
|
||||
const classified = classifyAIError(error)
|
||||
logAIError('EvaluationSummary', 'generateSummary', classified)
|
||||
|
||||
await logAIUsage({
|
||||
userId,
|
||||
action: 'EVALUATION_SUMMARY',
|
||||
entityType: 'Project',
|
||||
entityId: projectId,
|
||||
model,
|
||||
promptTokens: 0,
|
||||
completionTokens: 0,
|
||||
totalTokens: 0,
|
||||
itemsProcessed: 0,
|
||||
status: 'ERROR',
|
||||
errorMessage: classified.message,
|
||||
})
|
||||
|
||||
throw new TRPCError({
|
||||
code: 'INTERNAL_SERVER_ERROR',
|
||||
message: classified.message,
|
||||
})
|
||||
}
|
||||
|
||||
// 4. Compute scoring patterns (server-side, no AI)
|
||||
const scoringPatterns = computeScoringPatterns(typedEvaluations, criteria)
|
||||
|
||||
// 5. Merge and upsert
|
||||
const summaryJson = {
|
||||
...aiResponse,
|
||||
scoringPatterns,
|
||||
}
|
||||
|
||||
const summaryJsonValue = summaryJson as unknown as Prisma.InputJsonValue
|
||||
|
||||
const summary = await prisma.evaluationSummary.upsert({
|
||||
where: {
|
||||
projectId_stageId: { projectId, stageId },
|
||||
},
|
||||
create: {
|
||||
projectId,
|
||||
stageId,
|
||||
summaryJson: summaryJsonValue,
|
||||
generatedById: userId,
|
||||
model,
|
||||
tokensUsed,
|
||||
},
|
||||
update: {
|
||||
summaryJson: summaryJsonValue,
|
||||
generatedAt: new Date(),
|
||||
generatedById: userId,
|
||||
model,
|
||||
tokensUsed,
|
||||
},
|
||||
})
|
||||
|
||||
// 6. Log AI usage
|
||||
await logAIUsage({
|
||||
userId,
|
||||
action: 'EVALUATION_SUMMARY',
|
||||
entityType: 'Project',
|
||||
entityId: projectId,
|
||||
model,
|
||||
promptTokens: 0, // Detailed breakdown not always available
|
||||
completionTokens: 0,
|
||||
totalTokens: tokensUsed,
|
||||
itemsProcessed: evaluations.length,
|
||||
status: 'SUCCESS',
|
||||
})
|
||||
|
||||
return {
|
||||
id: summary.id,
|
||||
projectId: summary.projectId,
|
||||
stageId: summary.stageId,
|
||||
summaryJson: summaryJson as AIResponsePayload & { scoringPatterns: ScoringPatterns },
|
||||
generatedAt: summary.generatedAt,
|
||||
model: summary.model,
|
||||
tokensUsed: summary.tokensUsed,
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,422 +1,422 @@
|
||||
/**
|
||||
* AI-Powered Project Tagging Service
|
||||
*
|
||||
* Analyzes projects and assigns expertise tags automatically.
|
||||
*
|
||||
* Features:
|
||||
* - Single project tagging (on-submit or manual)
|
||||
* - Batch tagging for rounds
|
||||
* - Confidence scores for each tag
|
||||
* - Additive only - never removes existing tags
|
||||
*
|
||||
* GDPR Compliance:
|
||||
* - All project data is anonymized before AI processing
|
||||
* - Only necessary fields sent to OpenAI
|
||||
* - No personal identifiers in prompts or responses
|
||||
*/
|
||||
|
||||
import { prisma } from '@/lib/prisma'
|
||||
import { getOpenAI, getConfiguredModel, buildCompletionParams } from '@/lib/openai'
|
||||
import { logAIUsage, extractTokenUsage } from '@/server/utils/ai-usage'
|
||||
import { classifyAIError, createParseError, logAIError } from './ai-errors'
|
||||
import {
|
||||
anonymizeProjectsForAI,
|
||||
validateAnonymizedProjects,
|
||||
toProjectWithRelations,
|
||||
type AnonymizedProjectForAI,
|
||||
} from './anonymization'
|
||||
|
||||
// ─── Types ──────────────────────────────────────────────────────────────────
|
||||
|
||||
export interface TagSuggestion {
|
||||
tagId: string
|
||||
tagName: string
|
||||
confidence: number
|
||||
reasoning: string
|
||||
}
|
||||
|
||||
export interface TaggingResult {
|
||||
projectId: string
|
||||
suggestions: TagSuggestion[]
|
||||
applied: TagSuggestion[]
|
||||
tokensUsed: number
|
||||
}
|
||||
|
||||
interface AvailableTag {
|
||||
id: string
|
||||
name: string
|
||||
category: string | null
|
||||
description: string | null
|
||||
}
|
||||
|
||||
// ─── Constants ───────────────────────────────────────────────────────────────
|
||||
|
||||
const CONFIDENCE_THRESHOLD = 0.5
|
||||
const DEFAULT_MAX_TAGS = 5
|
||||
|
||||
// System prompt optimized for tag suggestion
|
||||
const TAG_SUGGESTION_SYSTEM_PROMPT = `You are an expert at categorizing ocean conservation and sustainability projects.
|
||||
|
||||
Analyze the project and suggest the most relevant expertise tags from the provided list.
|
||||
Consider the project's focus areas, technology, methodology, and domain.
|
||||
|
||||
Return JSON with this format:
|
||||
{
|
||||
"suggestions": [
|
||||
{
|
||||
"tag_name": "exact tag name from list",
|
||||
"confidence": 0.0-1.0,
|
||||
"reasoning": "brief explanation why this tag fits"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Rules:
|
||||
- Only suggest tags from the provided list (exact names)
|
||||
- Order by relevance (most relevant first)
|
||||
- Confidence should reflect how well the tag matches
|
||||
- Maximum 7 suggestions per project
|
||||
- Be conservative - only suggest tags that truly apply`
|
||||
|
||||
// ─── Helper Functions ────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Get system settings for AI tagging
|
||||
*/
|
||||
export async function getTaggingSettings(): Promise<{
|
||||
enabled: boolean
|
||||
maxTags: number
|
||||
}> {
|
||||
const settings = await prisma.systemSettings.findMany({
|
||||
where: {
|
||||
key: {
|
||||
in: ['ai_tagging_enabled', 'ai_tagging_max_tags', 'ai_enabled'],
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const settingsMap = new Map(settings.map((s) => [s.key, s.value]))
|
||||
|
||||
// AI tagging is enabled if:
|
||||
// 1. ai_tagging_enabled is explicitly 'true', OR
|
||||
// 2. ai_tagging_enabled is not set but ai_enabled is 'true' (fall back to general AI setting)
|
||||
const taggingEnabled = settingsMap.get('ai_tagging_enabled')
|
||||
const aiEnabled = settingsMap.get('ai_enabled')
|
||||
|
||||
const enabled = taggingEnabled === 'true' || (taggingEnabled === undefined && aiEnabled === 'true')
|
||||
|
||||
return {
|
||||
enabled,
|
||||
maxTags: parseInt(settingsMap.get('ai_tagging_max_tags') || String(DEFAULT_MAX_TAGS)),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all active expertise tags
|
||||
*/
|
||||
export async function getAvailableTags(): Promise<AvailableTag[]> {
|
||||
return prisma.expertiseTag.findMany({
|
||||
where: { isActive: true },
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
category: true,
|
||||
description: true,
|
||||
},
|
||||
orderBy: [{ category: 'asc' }, { sortOrder: 'asc' }],
|
||||
})
|
||||
}
|
||||
|
||||
// ─── AI Tagging Core ─────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Call OpenAI to get tag suggestions for a project
|
||||
*/
|
||||
async function getAISuggestions(
|
||||
anonymizedProject: AnonymizedProjectForAI,
|
||||
availableTags: AvailableTag[],
|
||||
userId?: string
|
||||
): Promise<{ suggestions: TagSuggestion[]; tokensUsed: number }> {
|
||||
const openai = await getOpenAI()
|
||||
if (!openai) {
|
||||
console.warn('[AI Tagging] OpenAI not configured')
|
||||
return { suggestions: [], tokensUsed: 0 }
|
||||
}
|
||||
|
||||
const model = await getConfiguredModel()
|
||||
|
||||
// Build tag list for prompt
|
||||
const tagList = availableTags.map((t) => ({
|
||||
name: t.name,
|
||||
category: t.category,
|
||||
description: t.description,
|
||||
}))
|
||||
|
||||
const userPrompt = `PROJECT:
|
||||
${JSON.stringify(anonymizedProject, null, 2)}
|
||||
|
||||
AVAILABLE TAGS:
|
||||
${JSON.stringify(tagList, null, 2)}
|
||||
|
||||
Suggest relevant tags for this project.`
|
||||
|
||||
try {
|
||||
const params = buildCompletionParams(model, {
|
||||
messages: [
|
||||
{ role: 'system', content: TAG_SUGGESTION_SYSTEM_PROMPT },
|
||||
{ role: 'user', content: userPrompt },
|
||||
],
|
||||
jsonMode: true,
|
||||
temperature: 0.3,
|
||||
maxTokens: 2000,
|
||||
})
|
||||
|
||||
const response = await openai.chat.completions.create(params)
|
||||
const usage = extractTokenUsage(response)
|
||||
|
||||
// Log usage
|
||||
await logAIUsage({
|
||||
userId,
|
||||
action: 'PROJECT_TAGGING',
|
||||
entityType: 'Project',
|
||||
entityId: anonymizedProject.project_id,
|
||||
model,
|
||||
promptTokens: usage.promptTokens,
|
||||
completionTokens: usage.completionTokens,
|
||||
totalTokens: usage.totalTokens,
|
||||
batchSize: 1,
|
||||
itemsProcessed: 1,
|
||||
status: 'SUCCESS',
|
||||
})
|
||||
|
||||
const content = response.choices[0]?.message?.content
|
||||
if (!content) {
|
||||
throw new Error('Empty response from AI')
|
||||
}
|
||||
|
||||
const parsed = JSON.parse(content) as {
|
||||
suggestions: Array<{
|
||||
tag_name: string
|
||||
confidence: number
|
||||
reasoning: string
|
||||
}>
|
||||
}
|
||||
|
||||
// Map to TagSuggestion format, matching tag names to IDs
|
||||
const suggestions: TagSuggestion[] = []
|
||||
for (const s of parsed.suggestions || []) {
|
||||
const tag = availableTags.find(
|
||||
(t) => t.name.toLowerCase() === s.tag_name.toLowerCase()
|
||||
)
|
||||
if (tag) {
|
||||
suggestions.push({
|
||||
tagId: tag.id,
|
||||
tagName: tag.name,
|
||||
confidence: Math.max(0, Math.min(1, s.confidence)),
|
||||
reasoning: s.reasoning,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return { suggestions, tokensUsed: usage.totalTokens }
|
||||
} catch (error) {
|
||||
if (error instanceof SyntaxError) {
|
||||
const parseError = createParseError(error.message)
|
||||
logAIError('Tagging', 'getAISuggestions', parseError)
|
||||
}
|
||||
|
||||
await logAIUsage({
|
||||
userId,
|
||||
action: 'PROJECT_TAGGING',
|
||||
entityType: 'Project',
|
||||
entityId: anonymizedProject.project_id,
|
||||
model,
|
||||
promptTokens: 0,
|
||||
completionTokens: 0,
|
||||
totalTokens: 0,
|
||||
batchSize: 1,
|
||||
itemsProcessed: 0,
|
||||
status: 'ERROR',
|
||||
errorMessage: error instanceof Error ? error.message : 'Unknown error',
|
||||
})
|
||||
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Public API ──────────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Tag a single project with AI-suggested expertise tags
|
||||
*
|
||||
* Behavior:
|
||||
* - Only applies tags with confidence >= 0.5
|
||||
* - Additive only - never removes existing tags
|
||||
* - Respects maxTags setting
|
||||
*/
|
||||
export async function tagProject(
|
||||
projectId: string,
|
||||
userId?: string
|
||||
): Promise<TaggingResult> {
|
||||
const settings = await getTaggingSettings()
|
||||
if (!settings.enabled) {
|
||||
return {
|
||||
projectId,
|
||||
suggestions: [],
|
||||
applied: [],
|
||||
tokensUsed: 0,
|
||||
}
|
||||
}
|
||||
|
||||
// Fetch project with needed fields
|
||||
const project = await prisma.project.findUnique({
|
||||
where: { id: projectId },
|
||||
include: {
|
||||
projectTags: true,
|
||||
files: { select: { fileType: true } },
|
||||
_count: { select: { teamMembers: true, files: true } },
|
||||
},
|
||||
})
|
||||
|
||||
if (!project) {
|
||||
throw new Error(`Project not found: ${projectId}`)
|
||||
}
|
||||
|
||||
// Get available tags
|
||||
const availableTags = await getAvailableTags()
|
||||
if (availableTags.length === 0) {
|
||||
return {
|
||||
projectId,
|
||||
suggestions: [],
|
||||
applied: [],
|
||||
tokensUsed: 0,
|
||||
}
|
||||
}
|
||||
|
||||
// Anonymize project data
|
||||
const projectWithRelations = toProjectWithRelations(project)
|
||||
const { anonymized, mappings } = anonymizeProjectsForAI([projectWithRelations], 'FILTERING')
|
||||
|
||||
// Validate anonymization
|
||||
if (!validateAnonymizedProjects(anonymized)) {
|
||||
throw new Error('GDPR compliance check failed: PII detected in anonymized data')
|
||||
}
|
||||
|
||||
// Get AI suggestions
|
||||
const { suggestions, tokensUsed } = await getAISuggestions(
|
||||
anonymized[0],
|
||||
availableTags,
|
||||
userId
|
||||
)
|
||||
|
||||
// Filter by confidence threshold
|
||||
const validSuggestions = suggestions.filter(
|
||||
(s) => s.confidence >= CONFIDENCE_THRESHOLD
|
||||
)
|
||||
|
||||
// Get existing tag IDs to avoid duplicates
|
||||
const existingTagIds = new Set(project.projectTags.map((pt) => pt.tagId))
|
||||
|
||||
// Calculate how many more tags we can add
|
||||
const currentTagCount = project.projectTags.length
|
||||
const remainingSlots = Math.max(0, settings.maxTags - currentTagCount)
|
||||
|
||||
// Filter out existing tags and limit to remaining slots
|
||||
const newSuggestions = validSuggestions
|
||||
.filter((s) => !existingTagIds.has(s.tagId))
|
||||
.slice(0, remainingSlots)
|
||||
|
||||
// Apply new tags
|
||||
const applied: TagSuggestion[] = []
|
||||
for (const suggestion of newSuggestions) {
|
||||
try {
|
||||
await prisma.projectTag.create({
|
||||
data: {
|
||||
projectId,
|
||||
tagId: suggestion.tagId,
|
||||
confidence: suggestion.confidence,
|
||||
source: 'AI',
|
||||
},
|
||||
})
|
||||
applied.push(suggestion)
|
||||
} catch (error) {
|
||||
// Skip if tag already exists (race condition)
|
||||
console.warn(`[AI Tagging] Failed to apply tag ${suggestion.tagName}: ${error}`)
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
projectId,
|
||||
suggestions,
|
||||
applied,
|
||||
tokensUsed,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get tag suggestions for a project without applying them
|
||||
* Useful for preview/review before applying
|
||||
*/
|
||||
export async function getTagSuggestions(
|
||||
projectId: string,
|
||||
userId?: string
|
||||
): Promise<TagSuggestion[]> {
|
||||
// Fetch project
|
||||
const project = await prisma.project.findUnique({
|
||||
where: { id: projectId },
|
||||
include: {
|
||||
files: { select: { fileType: true } },
|
||||
_count: { select: { teamMembers: true, files: true } },
|
||||
},
|
||||
})
|
||||
|
||||
if (!project) {
|
||||
throw new Error(`Project not found: ${projectId}`)
|
||||
}
|
||||
|
||||
// Get available tags
|
||||
const availableTags = await getAvailableTags()
|
||||
if (availableTags.length === 0) {
|
||||
return []
|
||||
}
|
||||
|
||||
// Anonymize project data
|
||||
const projectWithRelations = toProjectWithRelations(project)
|
||||
const { anonymized } = anonymizeProjectsForAI([projectWithRelations], 'FILTERING')
|
||||
|
||||
// Validate anonymization
|
||||
if (!validateAnonymizedProjects(anonymized)) {
|
||||
throw new Error('GDPR compliance check failed')
|
||||
}
|
||||
|
||||
// Get AI suggestions
|
||||
const { suggestions } = await getAISuggestions(anonymized[0], availableTags, userId)
|
||||
|
||||
return suggestions
|
||||
}
|
||||
|
||||
/**
|
||||
* Manually add a tag to a project
|
||||
*/
|
||||
export async function addProjectTag(
|
||||
projectId: string,
|
||||
tagId: string
|
||||
): Promise<void> {
|
||||
await prisma.projectTag.upsert({
|
||||
where: { projectId_tagId: { projectId, tagId } },
|
||||
create: { projectId, tagId, source: 'MANUAL', confidence: 1.0 },
|
||||
update: { source: 'MANUAL', confidence: 1.0 },
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a tag from a project
|
||||
*/
|
||||
export async function removeProjectTag(
|
||||
projectId: string,
|
||||
tagId: string
|
||||
): Promise<void> {
|
||||
await prisma.projectTag.deleteMany({
|
||||
where: { projectId, tagId },
|
||||
})
|
||||
}
|
||||
/**
|
||||
* AI-Powered Project Tagging Service
|
||||
*
|
||||
* Analyzes projects and assigns expertise tags automatically.
|
||||
*
|
||||
* Features:
|
||||
* - Single project tagging (on-submit or manual)
|
||||
* - Batch tagging for rounds
|
||||
* - Confidence scores for each tag
|
||||
* - Additive only - never removes existing tags
|
||||
*
|
||||
* GDPR Compliance:
|
||||
* - All project data is anonymized before AI processing
|
||||
* - Only necessary fields sent to OpenAI
|
||||
* - No personal identifiers in prompts or responses
|
||||
*/
|
||||
|
||||
import { prisma } from '@/lib/prisma'
|
||||
import { getOpenAI, getConfiguredModel, buildCompletionParams } from '@/lib/openai'
|
||||
import { logAIUsage, extractTokenUsage } from '@/server/utils/ai-usage'
|
||||
import { classifyAIError, createParseError, logAIError } from './ai-errors'
|
||||
import {
|
||||
anonymizeProjectsForAI,
|
||||
validateAnonymizedProjects,
|
||||
toProjectWithRelations,
|
||||
type AnonymizedProjectForAI,
|
||||
} from './anonymization'
|
||||
|
||||
// ─── Types ──────────────────────────────────────────────────────────────────
|
||||
|
||||
export interface TagSuggestion {
|
||||
tagId: string
|
||||
tagName: string
|
||||
confidence: number
|
||||
reasoning: string
|
||||
}
|
||||
|
||||
export interface TaggingResult {
|
||||
projectId: string
|
||||
suggestions: TagSuggestion[]
|
||||
applied: TagSuggestion[]
|
||||
tokensUsed: number
|
||||
}
|
||||
|
||||
interface AvailableTag {
|
||||
id: string
|
||||
name: string
|
||||
category: string | null
|
||||
description: string | null
|
||||
}
|
||||
|
||||
// ─── Constants ───────────────────────────────────────────────────────────────
|
||||
|
||||
const CONFIDENCE_THRESHOLD = 0.5
|
||||
const DEFAULT_MAX_TAGS = 5
|
||||
|
||||
// System prompt optimized for tag suggestion
|
||||
const TAG_SUGGESTION_SYSTEM_PROMPT = `You are an expert at categorizing ocean conservation and sustainability projects.
|
||||
|
||||
Analyze the project and suggest the most relevant expertise tags from the provided list.
|
||||
Consider the project's focus areas, technology, methodology, and domain.
|
||||
|
||||
Return JSON with this format:
|
||||
{
|
||||
"suggestions": [
|
||||
{
|
||||
"tag_name": "exact tag name from list",
|
||||
"confidence": 0.0-1.0,
|
||||
"reasoning": "brief explanation why this tag fits"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Rules:
|
||||
- Only suggest tags from the provided list (exact names)
|
||||
- Order by relevance (most relevant first)
|
||||
- Confidence should reflect how well the tag matches
|
||||
- Maximum 7 suggestions per project
|
||||
- Be conservative - only suggest tags that truly apply`
|
||||
|
||||
// ─── Helper Functions ────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Get system settings for AI tagging
|
||||
*/
|
||||
export async function getTaggingSettings(): Promise<{
|
||||
enabled: boolean
|
||||
maxTags: number
|
||||
}> {
|
||||
const settings = await prisma.systemSettings.findMany({
|
||||
where: {
|
||||
key: {
|
||||
in: ['ai_tagging_enabled', 'ai_tagging_max_tags', 'ai_enabled'],
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const settingsMap = new Map(settings.map((s) => [s.key, s.value]))
|
||||
|
||||
// AI tagging is enabled if:
|
||||
// 1. ai_tagging_enabled is explicitly 'true', OR
|
||||
// 2. ai_tagging_enabled is not set but ai_enabled is 'true' (fall back to general AI setting)
|
||||
const taggingEnabled = settingsMap.get('ai_tagging_enabled')
|
||||
const aiEnabled = settingsMap.get('ai_enabled')
|
||||
|
||||
const enabled = taggingEnabled === 'true' || (taggingEnabled === undefined && aiEnabled === 'true')
|
||||
|
||||
return {
|
||||
enabled,
|
||||
maxTags: parseInt(settingsMap.get('ai_tagging_max_tags') || String(DEFAULT_MAX_TAGS)),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all active expertise tags
|
||||
*/
|
||||
export async function getAvailableTags(): Promise<AvailableTag[]> {
|
||||
return prisma.expertiseTag.findMany({
|
||||
where: { isActive: true },
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
category: true,
|
||||
description: true,
|
||||
},
|
||||
orderBy: [{ category: 'asc' }, { sortOrder: 'asc' }],
|
||||
})
|
||||
}
|
||||
|
||||
// ─── AI Tagging Core ─────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Call OpenAI to get tag suggestions for a project
|
||||
*/
|
||||
async function getAISuggestions(
|
||||
anonymizedProject: AnonymizedProjectForAI,
|
||||
availableTags: AvailableTag[],
|
||||
userId?: string
|
||||
): Promise<{ suggestions: TagSuggestion[]; tokensUsed: number }> {
|
||||
const openai = await getOpenAI()
|
||||
if (!openai) {
|
||||
console.warn('[AI Tagging] OpenAI not configured')
|
||||
return { suggestions: [], tokensUsed: 0 }
|
||||
}
|
||||
|
||||
const model = await getConfiguredModel()
|
||||
|
||||
// Build tag list for prompt
|
||||
const tagList = availableTags.map((t) => ({
|
||||
name: t.name,
|
||||
category: t.category,
|
||||
description: t.description,
|
||||
}))
|
||||
|
||||
const userPrompt = `PROJECT:
|
||||
${JSON.stringify(anonymizedProject, null, 2)}
|
||||
|
||||
AVAILABLE TAGS:
|
||||
${JSON.stringify(tagList, null, 2)}
|
||||
|
||||
Suggest relevant tags for this project.`
|
||||
|
||||
try {
|
||||
const params = buildCompletionParams(model, {
|
||||
messages: [
|
||||
{ role: 'system', content: TAG_SUGGESTION_SYSTEM_PROMPT },
|
||||
{ role: 'user', content: userPrompt },
|
||||
],
|
||||
jsonMode: true,
|
||||
temperature: 0.3,
|
||||
maxTokens: 2000,
|
||||
})
|
||||
|
||||
const response = await openai.chat.completions.create(params)
|
||||
const usage = extractTokenUsage(response)
|
||||
|
||||
// Log usage
|
||||
await logAIUsage({
|
||||
userId,
|
||||
action: 'PROJECT_TAGGING',
|
||||
entityType: 'Project',
|
||||
entityId: anonymizedProject.project_id,
|
||||
model,
|
||||
promptTokens: usage.promptTokens,
|
||||
completionTokens: usage.completionTokens,
|
||||
totalTokens: usage.totalTokens,
|
||||
batchSize: 1,
|
||||
itemsProcessed: 1,
|
||||
status: 'SUCCESS',
|
||||
})
|
||||
|
||||
const content = response.choices[0]?.message?.content
|
||||
if (!content) {
|
||||
throw new Error('Empty response from AI')
|
||||
}
|
||||
|
||||
const parsed = JSON.parse(content) as {
|
||||
suggestions: Array<{
|
||||
tag_name: string
|
||||
confidence: number
|
||||
reasoning: string
|
||||
}>
|
||||
}
|
||||
|
||||
// Map to TagSuggestion format, matching tag names to IDs
|
||||
const suggestions: TagSuggestion[] = []
|
||||
for (const s of parsed.suggestions || []) {
|
||||
const tag = availableTags.find(
|
||||
(t) => t.name.toLowerCase() === s.tag_name.toLowerCase()
|
||||
)
|
||||
if (tag) {
|
||||
suggestions.push({
|
||||
tagId: tag.id,
|
||||
tagName: tag.name,
|
||||
confidence: Math.max(0, Math.min(1, s.confidence)),
|
||||
reasoning: s.reasoning,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return { suggestions, tokensUsed: usage.totalTokens }
|
||||
} catch (error) {
|
||||
if (error instanceof SyntaxError) {
|
||||
const parseError = createParseError(error.message)
|
||||
logAIError('Tagging', 'getAISuggestions', parseError)
|
||||
}
|
||||
|
||||
await logAIUsage({
|
||||
userId,
|
||||
action: 'PROJECT_TAGGING',
|
||||
entityType: 'Project',
|
||||
entityId: anonymizedProject.project_id,
|
||||
model,
|
||||
promptTokens: 0,
|
||||
completionTokens: 0,
|
||||
totalTokens: 0,
|
||||
batchSize: 1,
|
||||
itemsProcessed: 0,
|
||||
status: 'ERROR',
|
||||
errorMessage: error instanceof Error ? error.message : 'Unknown error',
|
||||
})
|
||||
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Public API ──────────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Tag a single project with AI-suggested expertise tags
|
||||
*
|
||||
* Behavior:
|
||||
* - Only applies tags with confidence >= 0.5
|
||||
* - Additive only - never removes existing tags
|
||||
* - Respects maxTags setting
|
||||
*/
|
||||
export async function tagProject(
|
||||
projectId: string,
|
||||
userId?: string
|
||||
): Promise<TaggingResult> {
|
||||
const settings = await getTaggingSettings()
|
||||
if (!settings.enabled) {
|
||||
return {
|
||||
projectId,
|
||||
suggestions: [],
|
||||
applied: [],
|
||||
tokensUsed: 0,
|
||||
}
|
||||
}
|
||||
|
||||
// Fetch project with needed fields
|
||||
const project = await prisma.project.findUnique({
|
||||
where: { id: projectId },
|
||||
include: {
|
||||
projectTags: true,
|
||||
files: { select: { fileType: true } },
|
||||
_count: { select: { teamMembers: true, files: true } },
|
||||
},
|
||||
})
|
||||
|
||||
if (!project) {
|
||||
throw new Error(`Project not found: ${projectId}`)
|
||||
}
|
||||
|
||||
// Get available tags
|
||||
const availableTags = await getAvailableTags()
|
||||
if (availableTags.length === 0) {
|
||||
return {
|
||||
projectId,
|
||||
suggestions: [],
|
||||
applied: [],
|
||||
tokensUsed: 0,
|
||||
}
|
||||
}
|
||||
|
||||
// Anonymize project data
|
||||
const projectWithRelations = toProjectWithRelations(project)
|
||||
const { anonymized, mappings } = anonymizeProjectsForAI([projectWithRelations], 'FILTERING')
|
||||
|
||||
// Validate anonymization
|
||||
if (!validateAnonymizedProjects(anonymized)) {
|
||||
throw new Error('GDPR compliance check failed: PII detected in anonymized data')
|
||||
}
|
||||
|
||||
// Get AI suggestions
|
||||
const { suggestions, tokensUsed } = await getAISuggestions(
|
||||
anonymized[0],
|
||||
availableTags,
|
||||
userId
|
||||
)
|
||||
|
||||
// Filter by confidence threshold
|
||||
const validSuggestions = suggestions.filter(
|
||||
(s) => s.confidence >= CONFIDENCE_THRESHOLD
|
||||
)
|
||||
|
||||
// Get existing tag IDs to avoid duplicates
|
||||
const existingTagIds = new Set(project.projectTags.map((pt) => pt.tagId))
|
||||
|
||||
// Calculate how many more tags we can add
|
||||
const currentTagCount = project.projectTags.length
|
||||
const remainingSlots = Math.max(0, settings.maxTags - currentTagCount)
|
||||
|
||||
// Filter out existing tags and limit to remaining slots
|
||||
const newSuggestions = validSuggestions
|
||||
.filter((s) => !existingTagIds.has(s.tagId))
|
||||
.slice(0, remainingSlots)
|
||||
|
||||
// Apply new tags
|
||||
const applied: TagSuggestion[] = []
|
||||
for (const suggestion of newSuggestions) {
|
||||
try {
|
||||
await prisma.projectTag.create({
|
||||
data: {
|
||||
projectId,
|
||||
tagId: suggestion.tagId,
|
||||
confidence: suggestion.confidence,
|
||||
source: 'AI',
|
||||
},
|
||||
})
|
||||
applied.push(suggestion)
|
||||
} catch (error) {
|
||||
// Skip if tag already exists (race condition)
|
||||
console.warn(`[AI Tagging] Failed to apply tag ${suggestion.tagName}: ${error}`)
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
projectId,
|
||||
suggestions,
|
||||
applied,
|
||||
tokensUsed,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get tag suggestions for a project without applying them
|
||||
* Useful for preview/review before applying
|
||||
*/
|
||||
export async function getTagSuggestions(
|
||||
projectId: string,
|
||||
userId?: string
|
||||
): Promise<TagSuggestion[]> {
|
||||
// Fetch project
|
||||
const project = await prisma.project.findUnique({
|
||||
where: { id: projectId },
|
||||
include: {
|
||||
files: { select: { fileType: true } },
|
||||
_count: { select: { teamMembers: true, files: true } },
|
||||
},
|
||||
})
|
||||
|
||||
if (!project) {
|
||||
throw new Error(`Project not found: ${projectId}`)
|
||||
}
|
||||
|
||||
// Get available tags
|
||||
const availableTags = await getAvailableTags()
|
||||
if (availableTags.length === 0) {
|
||||
return []
|
||||
}
|
||||
|
||||
// Anonymize project data
|
||||
const projectWithRelations = toProjectWithRelations(project)
|
||||
const { anonymized } = anonymizeProjectsForAI([projectWithRelations], 'FILTERING')
|
||||
|
||||
// Validate anonymization
|
||||
if (!validateAnonymizedProjects(anonymized)) {
|
||||
throw new Error('GDPR compliance check failed')
|
||||
}
|
||||
|
||||
// Get AI suggestions
|
||||
const { suggestions } = await getAISuggestions(anonymized[0], availableTags, userId)
|
||||
|
||||
return suggestions
|
||||
}
|
||||
|
||||
/**
|
||||
* Manually add a tag to a project
|
||||
*/
|
||||
export async function addProjectTag(
|
||||
projectId: string,
|
||||
tagId: string
|
||||
): Promise<void> {
|
||||
await prisma.projectTag.upsert({
|
||||
where: { projectId_tagId: { projectId, tagId } },
|
||||
create: { projectId, tagId, source: 'MANUAL', confidence: 1.0 },
|
||||
update: { source: 'MANUAL', confidence: 1.0 },
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a tag from a project
|
||||
*/
|
||||
export async function removeProjectTag(
|
||||
projectId: string,
|
||||
tagId: string
|
||||
): Promise<void> {
|
||||
await prisma.projectTag.deleteMany({
|
||||
where: { projectId, tagId },
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,184 +1,184 @@
|
||||
import { prisma } from '@/lib/prisma'
|
||||
import {
|
||||
applyAutoTagRules,
|
||||
aiInterpretCriteria,
|
||||
type AutoTagRule,
|
||||
} from './ai-award-eligibility'
|
||||
|
||||
const BATCH_SIZE = 20
|
||||
|
||||
/**
|
||||
* Process eligibility for an award in the background.
|
||||
* Updates progress in the database as it goes so the frontend can poll.
|
||||
*/
|
||||
export async function processEligibilityJob(
|
||||
awardId: string,
|
||||
includeSubmitted: boolean,
|
||||
userId: string
|
||||
): Promise<void> {
|
||||
try {
|
||||
// Mark job as PROCESSING
|
||||
const award = await prisma.specialAward.findUniqueOrThrow({
|
||||
where: { id: awardId },
|
||||
include: { program: true },
|
||||
})
|
||||
|
||||
// Get projects
|
||||
const statusFilter = includeSubmitted
|
||||
? (['SUBMITTED', 'ELIGIBLE', 'ASSIGNED', 'SEMIFINALIST', 'FINALIST'] as const)
|
||||
: (['ELIGIBLE', 'ASSIGNED', 'SEMIFINALIST', 'FINALIST'] as const)
|
||||
|
||||
const projects = await prisma.project.findMany({
|
||||
where: {
|
||||
programId: award.programId,
|
||||
status: { in: [...statusFilter] },
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
title: true,
|
||||
description: true,
|
||||
competitionCategory: true,
|
||||
country: true,
|
||||
geographicZone: true,
|
||||
tags: true,
|
||||
oceanIssue: true,
|
||||
},
|
||||
})
|
||||
|
||||
if (projects.length === 0) {
|
||||
await prisma.specialAward.update({
|
||||
where: { id: awardId },
|
||||
data: {
|
||||
eligibilityJobStatus: 'COMPLETED',
|
||||
eligibilityJobTotal: 0,
|
||||
eligibilityJobDone: 0,
|
||||
},
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
await prisma.specialAward.update({
|
||||
where: { id: awardId },
|
||||
data: {
|
||||
eligibilityJobStatus: 'PROCESSING',
|
||||
eligibilityJobTotal: projects.length,
|
||||
eligibilityJobDone: 0,
|
||||
eligibilityJobError: null,
|
||||
eligibilityJobStarted: new Date(),
|
||||
},
|
||||
})
|
||||
|
||||
// Phase 1: Auto-tag rules (deterministic, fast)
|
||||
const autoTagRules = award.autoTagRulesJson as unknown as AutoTagRule[] | null
|
||||
let autoResults: Map<string, boolean> | undefined
|
||||
if (autoTagRules && Array.isArray(autoTagRules) && autoTagRules.length > 0) {
|
||||
autoResults = applyAutoTagRules(autoTagRules, projects)
|
||||
}
|
||||
|
||||
// Phase 2: AI interpretation (if criteria text exists AND AI eligibility is enabled)
|
||||
// Process in batches to avoid timeouts
|
||||
let aiResults: Map<string, { eligible: boolean; confidence: number; reasoning: string }> | undefined
|
||||
|
||||
if (award.criteriaText && award.useAiEligibility) {
|
||||
aiResults = new Map()
|
||||
|
||||
for (let i = 0; i < projects.length; i += BATCH_SIZE) {
|
||||
const batch = projects.slice(i, i + BATCH_SIZE)
|
||||
const aiEvals = await aiInterpretCriteria(award.criteriaText, batch)
|
||||
|
||||
for (const e of aiEvals) {
|
||||
aiResults.set(e.projectId, {
|
||||
eligible: e.eligible,
|
||||
confidence: e.confidence,
|
||||
reasoning: e.reasoning,
|
||||
})
|
||||
}
|
||||
|
||||
// Update progress
|
||||
await prisma.specialAward.update({
|
||||
where: { id: awardId },
|
||||
data: {
|
||||
eligibilityJobDone: Math.min(i + BATCH_SIZE, projects.length),
|
||||
},
|
||||
})
|
||||
}
|
||||
} else {
|
||||
// No AI needed, mark all as done
|
||||
await prisma.specialAward.update({
|
||||
where: { id: awardId },
|
||||
data: { eligibilityJobDone: projects.length },
|
||||
})
|
||||
}
|
||||
|
||||
// Combine results: auto-tag AND AI must agree (or just one if only one configured)
|
||||
const eligibilities = projects.map((project) => {
|
||||
const autoEligible = autoResults?.get(project.id) ?? true
|
||||
const aiEval = aiResults?.get(project.id)
|
||||
const aiEligible = aiEval?.eligible ?? true
|
||||
|
||||
const eligible = autoEligible && aiEligible
|
||||
const method = autoResults && aiResults ? 'AUTO' : autoResults ? 'AUTO' : 'MANUAL'
|
||||
|
||||
return {
|
||||
projectId: project.id,
|
||||
eligible,
|
||||
method,
|
||||
aiReasoningJson: aiEval
|
||||
? { confidence: aiEval.confidence, reasoning: aiEval.reasoning }
|
||||
: null,
|
||||
}
|
||||
})
|
||||
|
||||
// Upsert eligibilities
|
||||
await prisma.$transaction(
|
||||
eligibilities.map((e) =>
|
||||
prisma.awardEligibility.upsert({
|
||||
where: {
|
||||
awardId_projectId: {
|
||||
awardId,
|
||||
projectId: e.projectId,
|
||||
},
|
||||
},
|
||||
create: {
|
||||
awardId,
|
||||
projectId: e.projectId,
|
||||
eligible: e.eligible,
|
||||
method: e.method as 'AUTO' | 'MANUAL',
|
||||
aiReasoningJson: e.aiReasoningJson ?? undefined,
|
||||
},
|
||||
update: {
|
||||
eligible: e.eligible,
|
||||
method: e.method as 'AUTO' | 'MANUAL',
|
||||
aiReasoningJson: e.aiReasoningJson ?? undefined,
|
||||
overriddenBy: null,
|
||||
overriddenAt: null,
|
||||
},
|
||||
})
|
||||
)
|
||||
)
|
||||
|
||||
// Mark as completed
|
||||
await prisma.specialAward.update({
|
||||
where: { id: awardId },
|
||||
data: {
|
||||
eligibilityJobStatus: 'COMPLETED',
|
||||
eligibilityJobDone: projects.length,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
// Mark as failed
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||
try {
|
||||
await prisma.specialAward.update({
|
||||
where: { id: awardId },
|
||||
data: {
|
||||
eligibilityJobStatus: 'FAILED',
|
||||
eligibilityJobError: errorMessage,
|
||||
},
|
||||
})
|
||||
} catch {
|
||||
// If we can't even update the status, log and give up
|
||||
console.error('Failed to update eligibility job status:', error)
|
||||
}
|
||||
}
|
||||
}
|
||||
import { prisma } from '@/lib/prisma'
|
||||
import {
|
||||
applyAutoTagRules,
|
||||
aiInterpretCriteria,
|
||||
type AutoTagRule,
|
||||
} from './ai-award-eligibility'
|
||||
|
||||
const BATCH_SIZE = 20
|
||||
|
||||
/**
|
||||
* Process eligibility for an award in the background.
|
||||
* Updates progress in the database as it goes so the frontend can poll.
|
||||
*/
|
||||
export async function processEligibilityJob(
|
||||
awardId: string,
|
||||
includeSubmitted: boolean,
|
||||
userId: string
|
||||
): Promise<void> {
|
||||
try {
|
||||
// Mark job as PROCESSING
|
||||
const award = await prisma.specialAward.findUniqueOrThrow({
|
||||
where: { id: awardId },
|
||||
include: { program: true },
|
||||
})
|
||||
|
||||
// Get projects
|
||||
const statusFilter = includeSubmitted
|
||||
? (['SUBMITTED', 'ELIGIBLE', 'ASSIGNED', 'SEMIFINALIST', 'FINALIST'] as const)
|
||||
: (['ELIGIBLE', 'ASSIGNED', 'SEMIFINALIST', 'FINALIST'] as const)
|
||||
|
||||
const projects = await prisma.project.findMany({
|
||||
where: {
|
||||
programId: award.programId,
|
||||
status: { in: [...statusFilter] },
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
title: true,
|
||||
description: true,
|
||||
competitionCategory: true,
|
||||
country: true,
|
||||
geographicZone: true,
|
||||
tags: true,
|
||||
oceanIssue: true,
|
||||
},
|
||||
})
|
||||
|
||||
if (projects.length === 0) {
|
||||
await prisma.specialAward.update({
|
||||
where: { id: awardId },
|
||||
data: {
|
||||
eligibilityJobStatus: 'COMPLETED',
|
||||
eligibilityJobTotal: 0,
|
||||
eligibilityJobDone: 0,
|
||||
},
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
await prisma.specialAward.update({
|
||||
where: { id: awardId },
|
||||
data: {
|
||||
eligibilityJobStatus: 'PROCESSING',
|
||||
eligibilityJobTotal: projects.length,
|
||||
eligibilityJobDone: 0,
|
||||
eligibilityJobError: null,
|
||||
eligibilityJobStarted: new Date(),
|
||||
},
|
||||
})
|
||||
|
||||
// Phase 1: Auto-tag rules (deterministic, fast)
|
||||
const autoTagRules = award.autoTagRulesJson as unknown as AutoTagRule[] | null
|
||||
let autoResults: Map<string, boolean> | undefined
|
||||
if (autoTagRules && Array.isArray(autoTagRules) && autoTagRules.length > 0) {
|
||||
autoResults = applyAutoTagRules(autoTagRules, projects)
|
||||
}
|
||||
|
||||
// Phase 2: AI interpretation (if criteria text exists AND AI eligibility is enabled)
|
||||
// Process in batches to avoid timeouts
|
||||
let aiResults: Map<string, { eligible: boolean; confidence: number; reasoning: string }> | undefined
|
||||
|
||||
if (award.criteriaText && award.useAiEligibility) {
|
||||
aiResults = new Map()
|
||||
|
||||
for (let i = 0; i < projects.length; i += BATCH_SIZE) {
|
||||
const batch = projects.slice(i, i + BATCH_SIZE)
|
||||
const aiEvals = await aiInterpretCriteria(award.criteriaText, batch)
|
||||
|
||||
for (const e of aiEvals) {
|
||||
aiResults.set(e.projectId, {
|
||||
eligible: e.eligible,
|
||||
confidence: e.confidence,
|
||||
reasoning: e.reasoning,
|
||||
})
|
||||
}
|
||||
|
||||
// Update progress
|
||||
await prisma.specialAward.update({
|
||||
where: { id: awardId },
|
||||
data: {
|
||||
eligibilityJobDone: Math.min(i + BATCH_SIZE, projects.length),
|
||||
},
|
||||
})
|
||||
}
|
||||
} else {
|
||||
// No AI needed, mark all as done
|
||||
await prisma.specialAward.update({
|
||||
where: { id: awardId },
|
||||
data: { eligibilityJobDone: projects.length },
|
||||
})
|
||||
}
|
||||
|
||||
// Combine results: auto-tag AND AI must agree (or just one if only one configured)
|
||||
const eligibilities = projects.map((project) => {
|
||||
const autoEligible = autoResults?.get(project.id) ?? true
|
||||
const aiEval = aiResults?.get(project.id)
|
||||
const aiEligible = aiEval?.eligible ?? true
|
||||
|
||||
const eligible = autoEligible && aiEligible
|
||||
const method = autoResults && aiResults ? 'AUTO' : autoResults ? 'AUTO' : 'MANUAL'
|
||||
|
||||
return {
|
||||
projectId: project.id,
|
||||
eligible,
|
||||
method,
|
||||
aiReasoningJson: aiEval
|
||||
? { confidence: aiEval.confidence, reasoning: aiEval.reasoning }
|
||||
: null,
|
||||
}
|
||||
})
|
||||
|
||||
// Upsert eligibilities
|
||||
await prisma.$transaction(
|
||||
eligibilities.map((e) =>
|
||||
prisma.awardEligibility.upsert({
|
||||
where: {
|
||||
awardId_projectId: {
|
||||
awardId,
|
||||
projectId: e.projectId,
|
||||
},
|
||||
},
|
||||
create: {
|
||||
awardId,
|
||||
projectId: e.projectId,
|
||||
eligible: e.eligible,
|
||||
method: e.method as 'AUTO' | 'MANUAL',
|
||||
aiReasoningJson: e.aiReasoningJson ?? undefined,
|
||||
},
|
||||
update: {
|
||||
eligible: e.eligible,
|
||||
method: e.method as 'AUTO' | 'MANUAL',
|
||||
aiReasoningJson: e.aiReasoningJson ?? undefined,
|
||||
overriddenBy: null,
|
||||
overriddenAt: null,
|
||||
},
|
||||
})
|
||||
)
|
||||
)
|
||||
|
||||
// Mark as completed
|
||||
await prisma.specialAward.update({
|
||||
where: { id: awardId },
|
||||
data: {
|
||||
eligibilityJobStatus: 'COMPLETED',
|
||||
eligibilityJobDone: projects.length,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
// Mark as failed
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||
try {
|
||||
await prisma.specialAward.update({
|
||||
where: { id: awardId },
|
||||
data: {
|
||||
eligibilityJobStatus: 'FAILED',
|
||||
eligibilityJobError: errorMessage,
|
||||
},
|
||||
})
|
||||
} catch {
|
||||
// If we can't even update the status, log and give up
|
||||
console.error('Failed to update eligibility job status:', error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,276 +1,276 @@
|
||||
import { prisma } from '@/lib/prisma'
|
||||
import { sendStyledNotificationEmail } from '@/lib/email'
|
||||
|
||||
interface DigestResult {
|
||||
sent: number
|
||||
errors: number
|
||||
}
|
||||
|
||||
interface DigestSection {
|
||||
title: string
|
||||
items: string[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Process and send email digests for all opted-in users.
|
||||
* Called by cron endpoint.
|
||||
*/
|
||||
export async function processDigests(
|
||||
type: 'daily' | 'weekly'
|
||||
): Promise<DigestResult> {
|
||||
let sent = 0
|
||||
let errors = 0
|
||||
|
||||
// Check if digest feature is enabled
|
||||
const enabledSetting = await prisma.systemSettings.findUnique({
|
||||
where: { key: 'digest_enabled' },
|
||||
})
|
||||
if (enabledSetting?.value === 'false') {
|
||||
return { sent: 0, errors: 0 }
|
||||
}
|
||||
|
||||
// Find users who opted in for this digest frequency
|
||||
const users = await prisma.user.findMany({
|
||||
where: {
|
||||
digestFrequency: type,
|
||||
status: 'ACTIVE',
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
email: true,
|
||||
},
|
||||
})
|
||||
|
||||
if (users.length === 0) {
|
||||
return { sent: 0, errors: 0 }
|
||||
}
|
||||
|
||||
// Load enabled sections from settings
|
||||
const sectionsSetting = await prisma.systemSettings.findUnique({
|
||||
where: { key: 'digest_sections' },
|
||||
})
|
||||
const enabledSections: string[] = sectionsSetting?.value
|
||||
? JSON.parse(sectionsSetting.value)
|
||||
: ['pending_evaluations', 'upcoming_deadlines', 'new_assignments', 'unread_notifications']
|
||||
|
||||
const baseUrl = process.env.NEXTAUTH_URL || 'https://monaco-opc.com'
|
||||
|
||||
for (const user of users) {
|
||||
try {
|
||||
const content = await getDigestContent(user.id, enabledSections)
|
||||
|
||||
// Skip if there's nothing to report
|
||||
if (content.sections.length === 0) continue
|
||||
|
||||
// Build email body from sections
|
||||
const bodyParts: string[] = []
|
||||
for (const section of content.sections) {
|
||||
bodyParts.push(`**${section.title}**`)
|
||||
for (const item of section.items) {
|
||||
bodyParts.push(`- ${item}`)
|
||||
}
|
||||
bodyParts.push('')
|
||||
}
|
||||
|
||||
await sendStyledNotificationEmail(
|
||||
user.email,
|
||||
user.name || '',
|
||||
'DIGEST',
|
||||
{
|
||||
name: user.name || undefined,
|
||||
title: `Your ${type === 'daily' ? 'Daily' : 'Weekly'} Digest`,
|
||||
message: bodyParts.join('\n'),
|
||||
linkUrl: `${baseUrl}/dashboard`,
|
||||
metadata: {
|
||||
digestType: type,
|
||||
pendingEvaluations: content.pendingEvaluations,
|
||||
upcomingDeadlines: content.upcomingDeadlines,
|
||||
newAssignments: content.newAssignments,
|
||||
unreadNotifications: content.unreadNotifications,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
// Log the digest
|
||||
await prisma.digestLog.create({
|
||||
data: {
|
||||
userId: user.id,
|
||||
digestType: type,
|
||||
contentJson: {
|
||||
pendingEvaluations: content.pendingEvaluations,
|
||||
upcomingDeadlines: content.upcomingDeadlines,
|
||||
newAssignments: content.newAssignments,
|
||||
unreadNotifications: content.unreadNotifications,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
sent++
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`[Digest] Failed to send ${type} digest to ${user.email}:`,
|
||||
error
|
||||
)
|
||||
errors++
|
||||
}
|
||||
}
|
||||
|
||||
return { sent, errors }
|
||||
}
|
||||
|
||||
/**
|
||||
* Compile digest content for a single user.
|
||||
*/
|
||||
async function getDigestContent(
|
||||
userId: string,
|
||||
enabledSections: string[]
|
||||
): Promise<{
|
||||
sections: DigestSection[]
|
||||
pendingEvaluations: number
|
||||
upcomingDeadlines: number
|
||||
newAssignments: number
|
||||
unreadNotifications: number
|
||||
}> {
|
||||
const now = new Date()
|
||||
const sections: DigestSection[] = []
|
||||
let pendingEvaluations = 0
|
||||
let upcomingDeadlines = 0
|
||||
let newAssignments = 0
|
||||
let unreadNotifications = 0
|
||||
|
||||
// 1. Pending evaluations
|
||||
if (enabledSections.includes('pending_evaluations')) {
|
||||
const pendingAssignments = await prisma.assignment.findMany({
|
||||
where: {
|
||||
userId,
|
||||
isCompleted: false,
|
||||
stage: {
|
||||
status: 'STAGE_ACTIVE',
|
||||
windowCloseAt: { gt: now },
|
||||
},
|
||||
},
|
||||
include: {
|
||||
project: { select: { id: true, title: true } },
|
||||
stage: { select: { name: true, windowCloseAt: true } },
|
||||
},
|
||||
})
|
||||
|
||||
pendingEvaluations = pendingAssignments.length
|
||||
if (pendingAssignments.length > 0) {
|
||||
sections.push({
|
||||
title: `Pending Evaluations (${pendingAssignments.length})`,
|
||||
items: pendingAssignments.map(
|
||||
(a) =>
|
||||
`${a.project.title} - ${a.stage?.name ?? 'Unknown'}${
|
||||
a.stage?.windowCloseAt
|
||||
? ` (due ${a.stage.windowCloseAt.toLocaleDateString('en-US', {
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
})})`
|
||||
: ''
|
||||
}`
|
||||
),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Upcoming deadlines (stages closing within 7 days)
|
||||
if (enabledSections.includes('upcoming_deadlines')) {
|
||||
const sevenDaysFromNow = new Date(now.getTime() + 7 * 24 * 60 * 60 * 1000)
|
||||
const upcomingStages = await prisma.stage.findMany({
|
||||
where: {
|
||||
status: 'STAGE_ACTIVE',
|
||||
windowCloseAt: {
|
||||
gt: now,
|
||||
lte: sevenDaysFromNow,
|
||||
},
|
||||
assignments: {
|
||||
some: {
|
||||
userId,
|
||||
isCompleted: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
select: {
|
||||
name: true,
|
||||
windowCloseAt: true,
|
||||
},
|
||||
})
|
||||
|
||||
upcomingDeadlines = upcomingStages.length
|
||||
if (upcomingStages.length > 0) {
|
||||
sections.push({
|
||||
title: 'Upcoming Deadlines',
|
||||
items: upcomingStages.map(
|
||||
(s) =>
|
||||
`${s.name} - ${s.windowCloseAt?.toLocaleDateString('en-US', {
|
||||
weekday: 'short',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
})}`
|
||||
),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// 3. New assignments since last digest
|
||||
if (enabledSections.includes('new_assignments')) {
|
||||
const lastDigest = await prisma.digestLog.findFirst({
|
||||
where: { userId },
|
||||
orderBy: { sentAt: 'desc' },
|
||||
select: { sentAt: true },
|
||||
})
|
||||
|
||||
const sinceDate = lastDigest?.sentAt || new Date(now.getTime() - 24 * 60 * 60 * 1000)
|
||||
|
||||
const recentAssignments = await prisma.assignment.findMany({
|
||||
where: {
|
||||
userId,
|
||||
createdAt: { gt: sinceDate },
|
||||
},
|
||||
include: {
|
||||
project: { select: { id: true, title: true } },
|
||||
stage: { select: { name: true } },
|
||||
},
|
||||
})
|
||||
|
||||
newAssignments = recentAssignments.length
|
||||
if (recentAssignments.length > 0) {
|
||||
sections.push({
|
||||
title: `New Assignments (${recentAssignments.length})`,
|
||||
items: recentAssignments.map(
|
||||
(a) => `${a.project.title} - ${a.stage?.name ?? 'Unknown'}`
|
||||
),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Unread notifications count
|
||||
if (enabledSections.includes('unread_notifications')) {
|
||||
const unreadCount = await prisma.inAppNotification.count({
|
||||
where: {
|
||||
userId,
|
||||
isRead: false,
|
||||
},
|
||||
})
|
||||
|
||||
unreadNotifications = unreadCount
|
||||
if (unreadCount > 0) {
|
||||
sections.push({
|
||||
title: 'Notifications',
|
||||
items: [`You have ${unreadCount} unread notification${unreadCount !== 1 ? 's' : ''}`],
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
sections,
|
||||
pendingEvaluations,
|
||||
upcomingDeadlines,
|
||||
newAssignments,
|
||||
unreadNotifications,
|
||||
}
|
||||
}
|
||||
import { prisma } from '@/lib/prisma'
|
||||
import { sendStyledNotificationEmail } from '@/lib/email'
|
||||
|
||||
interface DigestResult {
|
||||
sent: number
|
||||
errors: number
|
||||
}
|
||||
|
||||
interface DigestSection {
|
||||
title: string
|
||||
items: string[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Process and send email digests for all opted-in users.
|
||||
* Called by cron endpoint.
|
||||
*/
|
||||
export async function processDigests(
|
||||
type: 'daily' | 'weekly'
|
||||
): Promise<DigestResult> {
|
||||
let sent = 0
|
||||
let errors = 0
|
||||
|
||||
// Check if digest feature is enabled
|
||||
const enabledSetting = await prisma.systemSettings.findUnique({
|
||||
where: { key: 'digest_enabled' },
|
||||
})
|
||||
if (enabledSetting?.value === 'false') {
|
||||
return { sent: 0, errors: 0 }
|
||||
}
|
||||
|
||||
// Find users who opted in for this digest frequency
|
||||
const users = await prisma.user.findMany({
|
||||
where: {
|
||||
digestFrequency: type,
|
||||
status: 'ACTIVE',
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
email: true,
|
||||
},
|
||||
})
|
||||
|
||||
if (users.length === 0) {
|
||||
return { sent: 0, errors: 0 }
|
||||
}
|
||||
|
||||
// Load enabled sections from settings
|
||||
const sectionsSetting = await prisma.systemSettings.findUnique({
|
||||
where: { key: 'digest_sections' },
|
||||
})
|
||||
const enabledSections: string[] = sectionsSetting?.value
|
||||
? JSON.parse(sectionsSetting.value)
|
||||
: ['pending_evaluations', 'upcoming_deadlines', 'new_assignments', 'unread_notifications']
|
||||
|
||||
const baseUrl = process.env.NEXTAUTH_URL || 'https://monaco-opc.com'
|
||||
|
||||
for (const user of users) {
|
||||
try {
|
||||
const content = await getDigestContent(user.id, enabledSections)
|
||||
|
||||
// Skip if there's nothing to report
|
||||
if (content.sections.length === 0) continue
|
||||
|
||||
// Build email body from sections
|
||||
const bodyParts: string[] = []
|
||||
for (const section of content.sections) {
|
||||
bodyParts.push(`**${section.title}**`)
|
||||
for (const item of section.items) {
|
||||
bodyParts.push(`- ${item}`)
|
||||
}
|
||||
bodyParts.push('')
|
||||
}
|
||||
|
||||
await sendStyledNotificationEmail(
|
||||
user.email,
|
||||
user.name || '',
|
||||
'DIGEST',
|
||||
{
|
||||
name: user.name || undefined,
|
||||
title: `Your ${type === 'daily' ? 'Daily' : 'Weekly'} Digest`,
|
||||
message: bodyParts.join('\n'),
|
||||
linkUrl: `${baseUrl}/dashboard`,
|
||||
metadata: {
|
||||
digestType: type,
|
||||
pendingEvaluations: content.pendingEvaluations,
|
||||
upcomingDeadlines: content.upcomingDeadlines,
|
||||
newAssignments: content.newAssignments,
|
||||
unreadNotifications: content.unreadNotifications,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
// Log the digest
|
||||
await prisma.digestLog.create({
|
||||
data: {
|
||||
userId: user.id,
|
||||
digestType: type,
|
||||
contentJson: {
|
||||
pendingEvaluations: content.pendingEvaluations,
|
||||
upcomingDeadlines: content.upcomingDeadlines,
|
||||
newAssignments: content.newAssignments,
|
||||
unreadNotifications: content.unreadNotifications,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
sent++
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`[Digest] Failed to send ${type} digest to ${user.email}:`,
|
||||
error
|
||||
)
|
||||
errors++
|
||||
}
|
||||
}
|
||||
|
||||
return { sent, errors }
|
||||
}
|
||||
|
||||
/**
|
||||
* Compile digest content for a single user.
|
||||
*/
|
||||
async function getDigestContent(
|
||||
userId: string,
|
||||
enabledSections: string[]
|
||||
): Promise<{
|
||||
sections: DigestSection[]
|
||||
pendingEvaluations: number
|
||||
upcomingDeadlines: number
|
||||
newAssignments: number
|
||||
unreadNotifications: number
|
||||
}> {
|
||||
const now = new Date()
|
||||
const sections: DigestSection[] = []
|
||||
let pendingEvaluations = 0
|
||||
let upcomingDeadlines = 0
|
||||
let newAssignments = 0
|
||||
let unreadNotifications = 0
|
||||
|
||||
// 1. Pending evaluations
|
||||
if (enabledSections.includes('pending_evaluations')) {
|
||||
const pendingAssignments = await prisma.assignment.findMany({
|
||||
where: {
|
||||
userId,
|
||||
isCompleted: false,
|
||||
stage: {
|
||||
status: 'STAGE_ACTIVE',
|
||||
windowCloseAt: { gt: now },
|
||||
},
|
||||
},
|
||||
include: {
|
||||
project: { select: { id: true, title: true } },
|
||||
stage: { select: { name: true, windowCloseAt: true } },
|
||||
},
|
||||
})
|
||||
|
||||
pendingEvaluations = pendingAssignments.length
|
||||
if (pendingAssignments.length > 0) {
|
||||
sections.push({
|
||||
title: `Pending Evaluations (${pendingAssignments.length})`,
|
||||
items: pendingAssignments.map(
|
||||
(a) =>
|
||||
`${a.project.title} - ${a.stage?.name ?? 'Unknown'}${
|
||||
a.stage?.windowCloseAt
|
||||
? ` (due ${a.stage.windowCloseAt.toLocaleDateString('en-US', {
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
})})`
|
||||
: ''
|
||||
}`
|
||||
),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Upcoming deadlines (stages closing within 7 days)
|
||||
if (enabledSections.includes('upcoming_deadlines')) {
|
||||
const sevenDaysFromNow = new Date(now.getTime() + 7 * 24 * 60 * 60 * 1000)
|
||||
const upcomingStages = await prisma.stage.findMany({
|
||||
where: {
|
||||
status: 'STAGE_ACTIVE',
|
||||
windowCloseAt: {
|
||||
gt: now,
|
||||
lte: sevenDaysFromNow,
|
||||
},
|
||||
assignments: {
|
||||
some: {
|
||||
userId,
|
||||
isCompleted: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
select: {
|
||||
name: true,
|
||||
windowCloseAt: true,
|
||||
},
|
||||
})
|
||||
|
||||
upcomingDeadlines = upcomingStages.length
|
||||
if (upcomingStages.length > 0) {
|
||||
sections.push({
|
||||
title: 'Upcoming Deadlines',
|
||||
items: upcomingStages.map(
|
||||
(s) =>
|
||||
`${s.name} - ${s.windowCloseAt?.toLocaleDateString('en-US', {
|
||||
weekday: 'short',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
})}`
|
||||
),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// 3. New assignments since last digest
|
||||
if (enabledSections.includes('new_assignments')) {
|
||||
const lastDigest = await prisma.digestLog.findFirst({
|
||||
where: { userId },
|
||||
orderBy: { sentAt: 'desc' },
|
||||
select: { sentAt: true },
|
||||
})
|
||||
|
||||
const sinceDate = lastDigest?.sentAt || new Date(now.getTime() - 24 * 60 * 60 * 1000)
|
||||
|
||||
const recentAssignments = await prisma.assignment.findMany({
|
||||
where: {
|
||||
userId,
|
||||
createdAt: { gt: sinceDate },
|
||||
},
|
||||
include: {
|
||||
project: { select: { id: true, title: true } },
|
||||
stage: { select: { name: true } },
|
||||
},
|
||||
})
|
||||
|
||||
newAssignments = recentAssignments.length
|
||||
if (recentAssignments.length > 0) {
|
||||
sections.push({
|
||||
title: `New Assignments (${recentAssignments.length})`,
|
||||
items: recentAssignments.map(
|
||||
(a) => `${a.project.title} - ${a.stage?.name ?? 'Unknown'}`
|
||||
),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Unread notifications count
|
||||
if (enabledSections.includes('unread_notifications')) {
|
||||
const unreadCount = await prisma.inAppNotification.count({
|
||||
where: {
|
||||
userId,
|
||||
isRead: false,
|
||||
},
|
||||
})
|
||||
|
||||
unreadNotifications = unreadCount
|
||||
if (unreadCount > 0) {
|
||||
sections.push({
|
||||
title: 'Notifications',
|
||||
items: [`You have ${unreadCount} unread notification${unreadCount !== 1 ? 's' : ''}`],
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
sections,
|
||||
pendingEvaluations,
|
||||
upcomingDeadlines,
|
||||
newAssignments,
|
||||
unreadNotifications,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,178 +1,178 @@
|
||||
import { prisma } from '@/lib/prisma'
|
||||
import { sendStyledNotificationEmail } from '@/lib/email'
|
||||
|
||||
const REMINDER_TYPES = [
|
||||
{ type: '3_DAYS', thresholdMs: 3 * 24 * 60 * 60 * 1000 },
|
||||
{ type: '24H', thresholdMs: 24 * 60 * 60 * 1000 },
|
||||
{ type: '1H', thresholdMs: 60 * 60 * 1000 },
|
||||
] as const
|
||||
|
||||
type ReminderType = (typeof REMINDER_TYPES)[number]['type']
|
||||
|
||||
interface ReminderResult {
|
||||
sent: number
|
||||
errors: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Find active stages with approaching deadlines and send reminders
|
||||
* to jurors who have incomplete assignments.
|
||||
*/
|
||||
export async function processEvaluationReminders(stageId?: string): Promise<ReminderResult> {
|
||||
const now = new Date()
|
||||
let totalSent = 0
|
||||
let totalErrors = 0
|
||||
|
||||
// Find active stages with window close dates in the future
|
||||
const stages = await prisma.stage.findMany({
|
||||
where: {
|
||||
status: 'STAGE_ACTIVE',
|
||||
windowCloseAt: { gt: now },
|
||||
windowOpenAt: { lte: now },
|
||||
...(stageId && { id: stageId }),
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
windowCloseAt: true,
|
||||
track: { select: { name: true } },
|
||||
},
|
||||
})
|
||||
|
||||
for (const stage of stages) {
|
||||
if (!stage.windowCloseAt) continue
|
||||
|
||||
const msUntilDeadline = stage.windowCloseAt.getTime() - now.getTime()
|
||||
|
||||
// Determine which reminder types should fire for this stage
|
||||
const applicableTypes = REMINDER_TYPES.filter(
|
||||
({ thresholdMs }) => msUntilDeadline <= thresholdMs
|
||||
)
|
||||
|
||||
if (applicableTypes.length === 0) continue
|
||||
|
||||
for (const { type } of applicableTypes) {
|
||||
const result = await sendRemindersForStage(stage, type, now)
|
||||
totalSent += result.sent
|
||||
totalErrors += result.errors
|
||||
}
|
||||
}
|
||||
|
||||
return { sent: totalSent, errors: totalErrors }
|
||||
}
|
||||
|
||||
async function sendRemindersForStage(
|
||||
stage: {
|
||||
id: string
|
||||
name: string
|
||||
windowCloseAt: Date | null
|
||||
track: { name: string }
|
||||
},
|
||||
type: ReminderType,
|
||||
now: Date
|
||||
): Promise<ReminderResult> {
|
||||
let sent = 0
|
||||
let errors = 0
|
||||
|
||||
if (!stage.windowCloseAt) return { sent, errors }
|
||||
|
||||
// Find jurors with incomplete assignments for this stage
|
||||
const incompleteAssignments = await prisma.assignment.findMany({
|
||||
where: {
|
||||
stageId: stage.id,
|
||||
isCompleted: false,
|
||||
},
|
||||
select: {
|
||||
userId: true,
|
||||
},
|
||||
})
|
||||
|
||||
// Get unique user IDs with incomplete work
|
||||
const userIds = [...new Set(incompleteAssignments.map((a) => a.userId))]
|
||||
|
||||
if (userIds.length === 0) return { sent, errors }
|
||||
|
||||
// Check which users already received this reminder type for this stage
|
||||
const existingReminders = await prisma.reminderLog.findMany({
|
||||
where: {
|
||||
stageId: stage.id,
|
||||
type,
|
||||
userId: { in: userIds },
|
||||
},
|
||||
select: { userId: true },
|
||||
})
|
||||
|
||||
const alreadySent = new Set(existingReminders.map((r) => r.userId))
|
||||
const usersToNotify = userIds.filter((id) => !alreadySent.has(id))
|
||||
|
||||
if (usersToNotify.length === 0) return { sent, errors }
|
||||
|
||||
// Get user details and their pending counts
|
||||
const users = await prisma.user.findMany({
|
||||
where: { id: { in: usersToNotify } },
|
||||
select: { id: true, name: true, email: true },
|
||||
})
|
||||
|
||||
const baseUrl = process.env.NEXTAUTH_URL || 'https://monaco-opc.com'
|
||||
const deadlineStr = stage.windowCloseAt.toLocaleDateString('en-US', {
|
||||
weekday: 'long',
|
||||
year: 'numeric',
|
||||
month: 'long',
|
||||
day: 'numeric',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
timeZoneName: 'short',
|
||||
})
|
||||
|
||||
// Map to get pending count per user
|
||||
const pendingCounts = new Map<string, number>()
|
||||
for (const a of incompleteAssignments) {
|
||||
pendingCounts.set(a.userId, (pendingCounts.get(a.userId) || 0) + 1)
|
||||
}
|
||||
|
||||
// Select email template type based on reminder type
|
||||
const emailTemplateType = type === '1H' ? 'REMINDER_1H' : 'REMINDER_24H'
|
||||
|
||||
for (const user of users) {
|
||||
const pendingCount = pendingCounts.get(user.id) || 0
|
||||
if (pendingCount === 0) continue
|
||||
|
||||
try {
|
||||
await sendStyledNotificationEmail(
|
||||
user.email,
|
||||
user.name || '',
|
||||
emailTemplateType,
|
||||
{
|
||||
name: user.name || undefined,
|
||||
title: `Evaluation Reminder - ${stage.name}`,
|
||||
message: `You have ${pendingCount} pending evaluation${pendingCount !== 1 ? 's' : ''} for ${stage.name}.`,
|
||||
linkUrl: `${baseUrl}/jury/stages/${stage.id}/assignments`,
|
||||
metadata: {
|
||||
pendingCount,
|
||||
stageName: stage.name,
|
||||
deadline: deadlineStr,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
// Log the sent reminder
|
||||
await prisma.reminderLog.create({
|
||||
data: {
|
||||
stageId: stage.id,
|
||||
userId: user.id,
|
||||
type,
|
||||
},
|
||||
})
|
||||
|
||||
sent++
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`Failed to send ${type} reminder to ${user.email} for stage ${stage.name}:`,
|
||||
error
|
||||
)
|
||||
errors++
|
||||
}
|
||||
}
|
||||
|
||||
return { sent, errors }
|
||||
}
|
||||
import { prisma } from '@/lib/prisma'
|
||||
import { sendStyledNotificationEmail } from '@/lib/email'
|
||||
|
||||
const REMINDER_TYPES = [
|
||||
{ type: '3_DAYS', thresholdMs: 3 * 24 * 60 * 60 * 1000 },
|
||||
{ type: '24H', thresholdMs: 24 * 60 * 60 * 1000 },
|
||||
{ type: '1H', thresholdMs: 60 * 60 * 1000 },
|
||||
] as const
|
||||
|
||||
type ReminderType = (typeof REMINDER_TYPES)[number]['type']
|
||||
|
||||
interface ReminderResult {
|
||||
sent: number
|
||||
errors: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Find active stages with approaching deadlines and send reminders
|
||||
* to jurors who have incomplete assignments.
|
||||
*/
|
||||
export async function processEvaluationReminders(stageId?: string): Promise<ReminderResult> {
|
||||
const now = new Date()
|
||||
let totalSent = 0
|
||||
let totalErrors = 0
|
||||
|
||||
// Find active stages with window close dates in the future
|
||||
const stages = await prisma.stage.findMany({
|
||||
where: {
|
||||
status: 'STAGE_ACTIVE',
|
||||
windowCloseAt: { gt: now },
|
||||
windowOpenAt: { lte: now },
|
||||
...(stageId && { id: stageId }),
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
windowCloseAt: true,
|
||||
track: { select: { name: true } },
|
||||
},
|
||||
})
|
||||
|
||||
for (const stage of stages) {
|
||||
if (!stage.windowCloseAt) continue
|
||||
|
||||
const msUntilDeadline = stage.windowCloseAt.getTime() - now.getTime()
|
||||
|
||||
// Determine which reminder types should fire for this stage
|
||||
const applicableTypes = REMINDER_TYPES.filter(
|
||||
({ thresholdMs }) => msUntilDeadline <= thresholdMs
|
||||
)
|
||||
|
||||
if (applicableTypes.length === 0) continue
|
||||
|
||||
for (const { type } of applicableTypes) {
|
||||
const result = await sendRemindersForStage(stage, type, now)
|
||||
totalSent += result.sent
|
||||
totalErrors += result.errors
|
||||
}
|
||||
}
|
||||
|
||||
return { sent: totalSent, errors: totalErrors }
|
||||
}
|
||||
|
||||
async function sendRemindersForStage(
|
||||
stage: {
|
||||
id: string
|
||||
name: string
|
||||
windowCloseAt: Date | null
|
||||
track: { name: string }
|
||||
},
|
||||
type: ReminderType,
|
||||
now: Date
|
||||
): Promise<ReminderResult> {
|
||||
let sent = 0
|
||||
let errors = 0
|
||||
|
||||
if (!stage.windowCloseAt) return { sent, errors }
|
||||
|
||||
// Find jurors with incomplete assignments for this stage
|
||||
const incompleteAssignments = await prisma.assignment.findMany({
|
||||
where: {
|
||||
stageId: stage.id,
|
||||
isCompleted: false,
|
||||
},
|
||||
select: {
|
||||
userId: true,
|
||||
},
|
||||
})
|
||||
|
||||
// Get unique user IDs with incomplete work
|
||||
const userIds = [...new Set(incompleteAssignments.map((a) => a.userId))]
|
||||
|
||||
if (userIds.length === 0) return { sent, errors }
|
||||
|
||||
// Check which users already received this reminder type for this stage
|
||||
const existingReminders = await prisma.reminderLog.findMany({
|
||||
where: {
|
||||
stageId: stage.id,
|
||||
type,
|
||||
userId: { in: userIds },
|
||||
},
|
||||
select: { userId: true },
|
||||
})
|
||||
|
||||
const alreadySent = new Set(existingReminders.map((r) => r.userId))
|
||||
const usersToNotify = userIds.filter((id) => !alreadySent.has(id))
|
||||
|
||||
if (usersToNotify.length === 0) return { sent, errors }
|
||||
|
||||
// Get user details and their pending counts
|
||||
const users = await prisma.user.findMany({
|
||||
where: { id: { in: usersToNotify } },
|
||||
select: { id: true, name: true, email: true },
|
||||
})
|
||||
|
||||
const baseUrl = process.env.NEXTAUTH_URL || 'https://monaco-opc.com'
|
||||
const deadlineStr = stage.windowCloseAt.toLocaleDateString('en-US', {
|
||||
weekday: 'long',
|
||||
year: 'numeric',
|
||||
month: 'long',
|
||||
day: 'numeric',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
timeZoneName: 'short',
|
||||
})
|
||||
|
||||
// Map to get pending count per user
|
||||
const pendingCounts = new Map<string, number>()
|
||||
for (const a of incompleteAssignments) {
|
||||
pendingCounts.set(a.userId, (pendingCounts.get(a.userId) || 0) + 1)
|
||||
}
|
||||
|
||||
// Select email template type based on reminder type
|
||||
const emailTemplateType = type === '1H' ? 'REMINDER_1H' : 'REMINDER_24H'
|
||||
|
||||
for (const user of users) {
|
||||
const pendingCount = pendingCounts.get(user.id) || 0
|
||||
if (pendingCount === 0) continue
|
||||
|
||||
try {
|
||||
await sendStyledNotificationEmail(
|
||||
user.email,
|
||||
user.name || '',
|
||||
emailTemplateType,
|
||||
{
|
||||
name: user.name || undefined,
|
||||
title: `Evaluation Reminder - ${stage.name}`,
|
||||
message: `You have ${pendingCount} pending evaluation${pendingCount !== 1 ? 's' : ''} for ${stage.name}.`,
|
||||
linkUrl: `${baseUrl}/jury/stages/${stage.id}/assignments`,
|
||||
metadata: {
|
||||
pendingCount,
|
||||
stageName: stage.name,
|
||||
deadline: deadlineStr,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
// Log the sent reminder
|
||||
await prisma.reminderLog.create({
|
||||
data: {
|
||||
stageId: stage.id,
|
||||
userId: user.id,
|
||||
type,
|
||||
},
|
||||
})
|
||||
|
||||
sent++
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`Failed to send ${type} reminder to ${user.email} for stage ${stage.name}:`,
|
||||
error
|
||||
)
|
||||
errors++
|
||||
}
|
||||
}
|
||||
|
||||
return { sent, errors }
|
||||
}
|
||||
|
||||
@@ -1,482 +1,482 @@
|
||||
/**
|
||||
* In-App Notification Service
|
||||
*
|
||||
* Creates and manages in-app notifications for users.
|
||||
* Optionally sends email notifications based on admin settings.
|
||||
*/
|
||||
|
||||
import { prisma } from '@/lib/prisma'
|
||||
import { sendStyledNotificationEmail } from '@/lib/email'
|
||||
|
||||
// Notification priority levels
|
||||
export type NotificationPriority = 'low' | 'normal' | 'high' | 'urgent'
|
||||
|
||||
// Notification type constants
|
||||
export const NotificationTypes = {
|
||||
// Admin notifications
|
||||
FILTERING_COMPLETE: 'FILTERING_COMPLETE',
|
||||
FILTERING_FAILED: 'FILTERING_FAILED',
|
||||
AI_SUGGESTIONS_READY: 'AI_SUGGESTIONS_READY',
|
||||
NEW_APPLICATION: 'NEW_APPLICATION',
|
||||
BULK_APPLICATIONS: 'BULK_APPLICATIONS',
|
||||
DOCUMENTS_UPLOADED: 'DOCUMENTS_UPLOADED',
|
||||
EVALUATION_MILESTONE: 'EVALUATION_MILESTONE',
|
||||
ALL_EVALUATIONS_DONE: 'ALL_EVALUATIONS_DONE',
|
||||
JURY_INACTIVE: 'JURY_INACTIVE',
|
||||
DEADLINE_24H: 'DEADLINE_24H',
|
||||
DEADLINE_1H: 'DEADLINE_1H',
|
||||
ROUND_AUTO_CLOSED: 'ROUND_AUTO_CLOSED',
|
||||
EXPORT_READY: 'EXPORT_READY',
|
||||
SYSTEM_ERROR: 'SYSTEM_ERROR',
|
||||
|
||||
// Jury notifications
|
||||
ASSIGNED_TO_PROJECT: 'ASSIGNED_TO_PROJECT',
|
||||
BATCH_ASSIGNED: 'BATCH_ASSIGNED',
|
||||
PROJECT_UPDATED: 'PROJECT_UPDATED',
|
||||
ROUND_NOW_OPEN: 'ROUND_NOW_OPEN',
|
||||
REMINDER_3_DAYS: 'REMINDER_3_DAYS',
|
||||
REMINDER_24H: 'REMINDER_24H',
|
||||
REMINDER_1H: 'REMINDER_1H',
|
||||
ROUND_EXTENDED: 'ROUND_EXTENDED',
|
||||
ROUND_CLOSED: 'ROUND_CLOSED',
|
||||
THANK_YOU: 'THANK_YOU',
|
||||
RESULTS_AVAILABLE: 'RESULTS_AVAILABLE',
|
||||
|
||||
// Jury - Award specific
|
||||
AWARD_JURY_SELECTED: 'AWARD_JURY_SELECTED',
|
||||
AWARD_VOTING_OPEN: 'AWARD_VOTING_OPEN',
|
||||
AWARD_REMINDER: 'AWARD_REMINDER',
|
||||
AWARD_RESULTS: 'AWARD_RESULTS',
|
||||
|
||||
// Mentor notifications
|
||||
MENTEE_ASSIGNED: 'MENTEE_ASSIGNED',
|
||||
MENTEE_BATCH_ASSIGNED: 'MENTEE_BATCH_ASSIGNED',
|
||||
MENTEE_INTRO: 'MENTEE_INTRO',
|
||||
MENTEE_UPLOADED_DOCS: 'MENTEE_UPLOADED_DOCS',
|
||||
MENTEE_UPDATED_PROJECT: 'MENTEE_UPDATED_PROJECT',
|
||||
MENTEE_ADVANCED: 'MENTEE_ADVANCED',
|
||||
MENTEE_FINALIST: 'MENTEE_FINALIST',
|
||||
MENTEE_WON: 'MENTEE_WON',
|
||||
MENTEE_ELIMINATED: 'MENTEE_ELIMINATED',
|
||||
MENTORSHIP_TIP: 'MENTORSHIP_TIP',
|
||||
NEW_RESOURCE: 'NEW_RESOURCE',
|
||||
|
||||
// Team/Applicant notifications
|
||||
APPLICATION_SUBMITTED: 'APPLICATION_SUBMITTED',
|
||||
APPLICATION_INCOMPLETE: 'APPLICATION_INCOMPLETE',
|
||||
TEAM_INVITE_RECEIVED: 'TEAM_INVITE_RECEIVED',
|
||||
TEAM_MEMBER_JOINED: 'TEAM_MEMBER_JOINED',
|
||||
TEAM_MEMBER_LEFT: 'TEAM_MEMBER_LEFT',
|
||||
DOCUMENTS_RECEIVED: 'DOCUMENTS_RECEIVED',
|
||||
REVIEW_IN_PROGRESS: 'REVIEW_IN_PROGRESS',
|
||||
ADVANCED_SEMIFINAL: 'ADVANCED_SEMIFINAL',
|
||||
ADVANCED_FINAL: 'ADVANCED_FINAL',
|
||||
MENTOR_ASSIGNED: 'MENTOR_ASSIGNED',
|
||||
MENTOR_MESSAGE: 'MENTOR_MESSAGE',
|
||||
NOT_SELECTED: 'NOT_SELECTED',
|
||||
FEEDBACK_AVAILABLE: 'FEEDBACK_AVAILABLE',
|
||||
EVENT_INVITATION: 'EVENT_INVITATION',
|
||||
WINNER_ANNOUNCEMENT: 'WINNER_ANNOUNCEMENT',
|
||||
SUBMISSION_RECEIVED: 'SUBMISSION_RECEIVED',
|
||||
CERTIFICATE_READY: 'CERTIFICATE_READY',
|
||||
PROGRAM_NEWSLETTER: 'PROGRAM_NEWSLETTER',
|
||||
|
||||
// Observer notifications
|
||||
ROUND_STARTED: 'ROUND_STARTED',
|
||||
ROUND_PROGRESS: 'ROUND_PROGRESS',
|
||||
ROUND_COMPLETED: 'ROUND_COMPLETED',
|
||||
FINALISTS_ANNOUNCED: 'FINALISTS_ANNOUNCED',
|
||||
WINNERS_ANNOUNCED: 'WINNERS_ANNOUNCED',
|
||||
REPORT_AVAILABLE: 'REPORT_AVAILABLE',
|
||||
} as const
|
||||
|
||||
export type NotificationType = (typeof NotificationTypes)[keyof typeof NotificationTypes]
|
||||
|
||||
// Notification icons by type
|
||||
export const NotificationIcons: Record<string, string> = {
|
||||
[NotificationTypes.FILTERING_COMPLETE]: 'Brain',
|
||||
[NotificationTypes.FILTERING_FAILED]: 'AlertTriangle',
|
||||
[NotificationTypes.NEW_APPLICATION]: 'FileText',
|
||||
[NotificationTypes.BULK_APPLICATIONS]: 'Files',
|
||||
[NotificationTypes.DOCUMENTS_UPLOADED]: 'Upload',
|
||||
[NotificationTypes.ASSIGNED_TO_PROJECT]: 'ClipboardList',
|
||||
[NotificationTypes.ROUND_NOW_OPEN]: 'PlayCircle',
|
||||
[NotificationTypes.REMINDER_24H]: 'Clock',
|
||||
[NotificationTypes.REMINDER_1H]: 'AlertCircle',
|
||||
[NotificationTypes.ROUND_CLOSED]: 'Lock',
|
||||
[NotificationTypes.MENTEE_ASSIGNED]: 'Users',
|
||||
[NotificationTypes.MENTEE_ADVANCED]: 'TrendingUp',
|
||||
[NotificationTypes.MENTEE_WON]: 'Trophy',
|
||||
[NotificationTypes.APPLICATION_SUBMITTED]: 'CheckCircle',
|
||||
[NotificationTypes.SUBMISSION_RECEIVED]: 'Inbox',
|
||||
[NotificationTypes.ADVANCED_SEMIFINAL]: 'TrendingUp',
|
||||
[NotificationTypes.ADVANCED_FINAL]: 'Star',
|
||||
[NotificationTypes.MENTOR_ASSIGNED]: 'GraduationCap',
|
||||
[NotificationTypes.WINNER_ANNOUNCEMENT]: 'Trophy',
|
||||
[NotificationTypes.AWARD_VOTING_OPEN]: 'Vote',
|
||||
[NotificationTypes.AWARD_RESULTS]: 'Trophy',
|
||||
}
|
||||
|
||||
// Priority by notification type
|
||||
export const NotificationPriorities: Record<string, NotificationPriority> = {
|
||||
[NotificationTypes.FILTERING_COMPLETE]: 'high',
|
||||
[NotificationTypes.FILTERING_FAILED]: 'urgent',
|
||||
[NotificationTypes.DEADLINE_1H]: 'urgent',
|
||||
[NotificationTypes.REMINDER_1H]: 'urgent',
|
||||
[NotificationTypes.SYSTEM_ERROR]: 'urgent',
|
||||
[NotificationTypes.ASSIGNED_TO_PROJECT]: 'high',
|
||||
[NotificationTypes.ROUND_NOW_OPEN]: 'high',
|
||||
[NotificationTypes.DEADLINE_24H]: 'high',
|
||||
[NotificationTypes.REMINDER_24H]: 'high',
|
||||
[NotificationTypes.MENTEE_ASSIGNED]: 'high',
|
||||
[NotificationTypes.APPLICATION_SUBMITTED]: 'high',
|
||||
[NotificationTypes.ADVANCED_SEMIFINAL]: 'high',
|
||||
[NotificationTypes.ADVANCED_FINAL]: 'high',
|
||||
[NotificationTypes.WINNER_ANNOUNCEMENT]: 'high',
|
||||
[NotificationTypes.AWARD_VOTING_OPEN]: 'high',
|
||||
}
|
||||
|
||||
interface CreateNotificationParams {
|
||||
userId: string
|
||||
type: string
|
||||
title: string
|
||||
message: string
|
||||
linkUrl?: string
|
||||
linkLabel?: string
|
||||
icon?: string
|
||||
priority?: NotificationPriority
|
||||
metadata?: Record<string, unknown>
|
||||
groupKey?: string
|
||||
expiresAt?: Date
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a single in-app notification
|
||||
*/
|
||||
export async function createNotification(
|
||||
params: CreateNotificationParams
|
||||
): Promise<void> {
|
||||
const {
|
||||
userId,
|
||||
type,
|
||||
title,
|
||||
message,
|
||||
linkUrl,
|
||||
linkLabel,
|
||||
icon,
|
||||
priority,
|
||||
metadata,
|
||||
groupKey,
|
||||
expiresAt,
|
||||
} = params
|
||||
|
||||
// Determine icon and priority if not provided
|
||||
const finalIcon = icon || NotificationIcons[type] || 'Bell'
|
||||
const finalPriority = priority || NotificationPriorities[type] || 'normal'
|
||||
|
||||
// Check for existing notification with same groupKey (for batching)
|
||||
if (groupKey) {
|
||||
const existingNotification = await prisma.inAppNotification.findFirst({
|
||||
where: {
|
||||
userId,
|
||||
groupKey,
|
||||
isRead: false,
|
||||
createdAt: {
|
||||
gte: new Date(Date.now() - 60 * 60 * 1000), // Within last hour
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
if (existingNotification) {
|
||||
// Update existing notification instead of creating new one
|
||||
const existingMeta = existingNotification.metadata as Record<string, unknown> || {}
|
||||
const currentCount = (existingMeta.count as number) || 1
|
||||
await prisma.inAppNotification.update({
|
||||
where: { id: existingNotification.id },
|
||||
data: {
|
||||
message,
|
||||
metadata: { ...existingMeta, ...metadata, count: currentCount + 1 },
|
||||
createdAt: new Date(), // Bump to top
|
||||
},
|
||||
})
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Create the in-app notification
|
||||
await prisma.inAppNotification.create({
|
||||
data: {
|
||||
userId,
|
||||
type,
|
||||
title,
|
||||
message,
|
||||
linkUrl,
|
||||
linkLabel,
|
||||
icon: finalIcon,
|
||||
priority: finalPriority,
|
||||
metadata: metadata as object | undefined,
|
||||
groupKey,
|
||||
expiresAt,
|
||||
},
|
||||
})
|
||||
|
||||
// Check if we should also send an email
|
||||
await maybeSendEmail(userId, type, title, message, linkUrl, metadata)
|
||||
}
|
||||
|
||||
/**
|
||||
* Create notifications for multiple users
|
||||
*/
|
||||
export async function createBulkNotifications(params: {
|
||||
userIds: string[]
|
||||
type: string
|
||||
title: string
|
||||
message: string
|
||||
linkUrl?: string
|
||||
linkLabel?: string
|
||||
icon?: string
|
||||
priority?: NotificationPriority
|
||||
metadata?: Record<string, unknown>
|
||||
}): Promise<void> {
|
||||
const {
|
||||
userIds,
|
||||
type,
|
||||
title,
|
||||
message,
|
||||
linkUrl,
|
||||
linkLabel,
|
||||
icon,
|
||||
priority,
|
||||
metadata,
|
||||
} = params
|
||||
|
||||
const finalIcon = icon || NotificationIcons[type] || 'Bell'
|
||||
const finalPriority = priority || NotificationPriorities[type] || 'normal'
|
||||
|
||||
// Create notifications in bulk
|
||||
await prisma.inAppNotification.createMany({
|
||||
data: userIds.map((userId) => ({
|
||||
userId,
|
||||
type,
|
||||
title,
|
||||
message,
|
||||
linkUrl,
|
||||
linkLabel,
|
||||
icon: finalIcon,
|
||||
priority: finalPriority,
|
||||
metadata: metadata as object | undefined,
|
||||
})),
|
||||
})
|
||||
|
||||
// Check email settings and send emails
|
||||
for (const userId of userIds) {
|
||||
await maybeSendEmail(userId, type, title, message, linkUrl, metadata)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Notify all admin users
|
||||
*/
|
||||
export async function notifyAdmins(params: {
|
||||
type: string
|
||||
title: string
|
||||
message: string
|
||||
linkUrl?: string
|
||||
linkLabel?: string
|
||||
icon?: string
|
||||
priority?: NotificationPriority
|
||||
metadata?: Record<string, unknown>
|
||||
}): Promise<void> {
|
||||
const admins = await prisma.user.findMany({
|
||||
where: {
|
||||
role: { in: ['SUPER_ADMIN', 'PROGRAM_ADMIN'] },
|
||||
status: 'ACTIVE',
|
||||
},
|
||||
select: { id: true },
|
||||
})
|
||||
|
||||
if (admins.length === 0) return
|
||||
|
||||
await createBulkNotifications({
|
||||
...params,
|
||||
userIds: admins.map((a) => a.id),
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Notify all jury members for a specific stage
|
||||
*/
|
||||
export async function notifyStageJury(
|
||||
stageId: string,
|
||||
params: Omit<CreateNotificationParams, 'userId'>
|
||||
): Promise<void> {
|
||||
const assignments = await prisma.assignment.findMany({
|
||||
where: { stageId },
|
||||
select: { userId: true },
|
||||
distinct: ['userId'],
|
||||
})
|
||||
|
||||
if (assignments.length === 0) return
|
||||
|
||||
await createBulkNotifications({
|
||||
...params,
|
||||
userIds: assignments.map((a) => a.userId),
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Notify team members of a project
|
||||
*/
|
||||
export async function notifyProjectTeam(
|
||||
projectId: string,
|
||||
params: Omit<CreateNotificationParams, 'userId'>
|
||||
): Promise<void> {
|
||||
const teamMembers = await prisma.teamMember.findMany({
|
||||
where: { projectId },
|
||||
include: { user: { select: { id: true } } },
|
||||
})
|
||||
|
||||
const userIds = teamMembers
|
||||
.filter((tm) => tm.user)
|
||||
.map((tm) => tm.user!.id)
|
||||
|
||||
if (userIds.length === 0) return
|
||||
|
||||
await createBulkNotifications({
|
||||
...params,
|
||||
userIds,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Notify assigned mentors of a project
|
||||
*/
|
||||
export async function notifyProjectMentors(
|
||||
projectId: string,
|
||||
params: Omit<CreateNotificationParams, 'userId'>
|
||||
): Promise<void> {
|
||||
const mentorAssignments = await prisma.mentorAssignment.findMany({
|
||||
where: { projectId },
|
||||
select: { mentorId: true },
|
||||
})
|
||||
|
||||
if (mentorAssignments.length === 0) return
|
||||
|
||||
await createBulkNotifications({
|
||||
...params,
|
||||
userIds: mentorAssignments.map((ma) => ma.mentorId),
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Check email settings and send email if enabled
|
||||
*/
|
||||
async function maybeSendEmail(
|
||||
userId: string,
|
||||
type: string,
|
||||
title: string,
|
||||
message: string,
|
||||
linkUrl?: string,
|
||||
metadata?: Record<string, unknown>
|
||||
): Promise<void> {
|
||||
try {
|
||||
// Check if email is enabled for this notification type
|
||||
const emailSetting = await prisma.notificationEmailSetting.findUnique({
|
||||
where: { notificationType: type },
|
||||
})
|
||||
|
||||
// If no setting exists, don't send email by default
|
||||
if (!emailSetting || !emailSetting.sendEmail) {
|
||||
return
|
||||
}
|
||||
|
||||
// Check user's notification preference
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { id: userId },
|
||||
select: { email: true, name: true, notificationPreference: true },
|
||||
})
|
||||
|
||||
if (!user || user.notificationPreference === 'NONE') {
|
||||
return
|
||||
}
|
||||
|
||||
// Send styled email with full context
|
||||
// The styled template will use metadata for rich content
|
||||
// Subject can be overridden by admin settings
|
||||
await sendStyledNotificationEmail(
|
||||
user.email,
|
||||
user.name || 'User',
|
||||
type,
|
||||
{
|
||||
title,
|
||||
message,
|
||||
linkUrl,
|
||||
metadata,
|
||||
},
|
||||
emailSetting.emailSubject || undefined
|
||||
)
|
||||
} catch (error) {
|
||||
// Log but don't fail the notification creation
|
||||
console.error('[Notification] Failed to send email:', error)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark a notification as read
|
||||
*/
|
||||
export async function markNotificationAsRead(
|
||||
notificationId: string,
|
||||
userId: string
|
||||
): Promise<void> {
|
||||
await prisma.inAppNotification.updateMany({
|
||||
where: { id: notificationId, userId },
|
||||
data: { isRead: true, readAt: new Date() },
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark all notifications as read for a user
|
||||
*/
|
||||
export async function markAllNotificationsAsRead(userId: string): Promise<void> {
|
||||
await prisma.inAppNotification.updateMany({
|
||||
where: { userId, isRead: false },
|
||||
data: { isRead: true, readAt: new Date() },
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Get unread notification count for a user
|
||||
*/
|
||||
export async function getUnreadCount(userId: string): Promise<number> {
|
||||
return prisma.inAppNotification.count({
|
||||
where: { userId, isRead: false },
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete expired notifications
|
||||
*/
|
||||
export async function deleteExpiredNotifications(): Promise<number> {
|
||||
const result = await prisma.inAppNotification.deleteMany({
|
||||
where: {
|
||||
expiresAt: { lt: new Date() },
|
||||
},
|
||||
})
|
||||
return result.count
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete old read notifications (cleanup job)
|
||||
*/
|
||||
export async function deleteOldNotifications(olderThanDays: number): Promise<number> {
|
||||
const cutoffDate = new Date()
|
||||
cutoffDate.setDate(cutoffDate.getDate() - olderThanDays)
|
||||
|
||||
const result = await prisma.inAppNotification.deleteMany({
|
||||
where: {
|
||||
isRead: true,
|
||||
createdAt: { lt: cutoffDate },
|
||||
},
|
||||
})
|
||||
return result.count
|
||||
}
|
||||
/**
|
||||
* In-App Notification Service
|
||||
*
|
||||
* Creates and manages in-app notifications for users.
|
||||
* Optionally sends email notifications based on admin settings.
|
||||
*/
|
||||
|
||||
import { prisma } from '@/lib/prisma'
|
||||
import { sendStyledNotificationEmail } from '@/lib/email'
|
||||
|
||||
// Notification priority levels
|
||||
export type NotificationPriority = 'low' | 'normal' | 'high' | 'urgent'
|
||||
|
||||
// Notification type constants
|
||||
export const NotificationTypes = {
|
||||
// Admin notifications
|
||||
FILTERING_COMPLETE: 'FILTERING_COMPLETE',
|
||||
FILTERING_FAILED: 'FILTERING_FAILED',
|
||||
AI_SUGGESTIONS_READY: 'AI_SUGGESTIONS_READY',
|
||||
NEW_APPLICATION: 'NEW_APPLICATION',
|
||||
BULK_APPLICATIONS: 'BULK_APPLICATIONS',
|
||||
DOCUMENTS_UPLOADED: 'DOCUMENTS_UPLOADED',
|
||||
EVALUATION_MILESTONE: 'EVALUATION_MILESTONE',
|
||||
ALL_EVALUATIONS_DONE: 'ALL_EVALUATIONS_DONE',
|
||||
JURY_INACTIVE: 'JURY_INACTIVE',
|
||||
DEADLINE_24H: 'DEADLINE_24H',
|
||||
DEADLINE_1H: 'DEADLINE_1H',
|
||||
ROUND_AUTO_CLOSED: 'ROUND_AUTO_CLOSED',
|
||||
EXPORT_READY: 'EXPORT_READY',
|
||||
SYSTEM_ERROR: 'SYSTEM_ERROR',
|
||||
|
||||
// Jury notifications
|
||||
ASSIGNED_TO_PROJECT: 'ASSIGNED_TO_PROJECT',
|
||||
BATCH_ASSIGNED: 'BATCH_ASSIGNED',
|
||||
PROJECT_UPDATED: 'PROJECT_UPDATED',
|
||||
ROUND_NOW_OPEN: 'ROUND_NOW_OPEN',
|
||||
REMINDER_3_DAYS: 'REMINDER_3_DAYS',
|
||||
REMINDER_24H: 'REMINDER_24H',
|
||||
REMINDER_1H: 'REMINDER_1H',
|
||||
ROUND_EXTENDED: 'ROUND_EXTENDED',
|
||||
ROUND_CLOSED: 'ROUND_CLOSED',
|
||||
THANK_YOU: 'THANK_YOU',
|
||||
RESULTS_AVAILABLE: 'RESULTS_AVAILABLE',
|
||||
|
||||
// Jury - Award specific
|
||||
AWARD_JURY_SELECTED: 'AWARD_JURY_SELECTED',
|
||||
AWARD_VOTING_OPEN: 'AWARD_VOTING_OPEN',
|
||||
AWARD_REMINDER: 'AWARD_REMINDER',
|
||||
AWARD_RESULTS: 'AWARD_RESULTS',
|
||||
|
||||
// Mentor notifications
|
||||
MENTEE_ASSIGNED: 'MENTEE_ASSIGNED',
|
||||
MENTEE_BATCH_ASSIGNED: 'MENTEE_BATCH_ASSIGNED',
|
||||
MENTEE_INTRO: 'MENTEE_INTRO',
|
||||
MENTEE_UPLOADED_DOCS: 'MENTEE_UPLOADED_DOCS',
|
||||
MENTEE_UPDATED_PROJECT: 'MENTEE_UPDATED_PROJECT',
|
||||
MENTEE_ADVANCED: 'MENTEE_ADVANCED',
|
||||
MENTEE_FINALIST: 'MENTEE_FINALIST',
|
||||
MENTEE_WON: 'MENTEE_WON',
|
||||
MENTEE_ELIMINATED: 'MENTEE_ELIMINATED',
|
||||
MENTORSHIP_TIP: 'MENTORSHIP_TIP',
|
||||
NEW_RESOURCE: 'NEW_RESOURCE',
|
||||
|
||||
// Team/Applicant notifications
|
||||
APPLICATION_SUBMITTED: 'APPLICATION_SUBMITTED',
|
||||
APPLICATION_INCOMPLETE: 'APPLICATION_INCOMPLETE',
|
||||
TEAM_INVITE_RECEIVED: 'TEAM_INVITE_RECEIVED',
|
||||
TEAM_MEMBER_JOINED: 'TEAM_MEMBER_JOINED',
|
||||
TEAM_MEMBER_LEFT: 'TEAM_MEMBER_LEFT',
|
||||
DOCUMENTS_RECEIVED: 'DOCUMENTS_RECEIVED',
|
||||
REVIEW_IN_PROGRESS: 'REVIEW_IN_PROGRESS',
|
||||
ADVANCED_SEMIFINAL: 'ADVANCED_SEMIFINAL',
|
||||
ADVANCED_FINAL: 'ADVANCED_FINAL',
|
||||
MENTOR_ASSIGNED: 'MENTOR_ASSIGNED',
|
||||
MENTOR_MESSAGE: 'MENTOR_MESSAGE',
|
||||
NOT_SELECTED: 'NOT_SELECTED',
|
||||
FEEDBACK_AVAILABLE: 'FEEDBACK_AVAILABLE',
|
||||
EVENT_INVITATION: 'EVENT_INVITATION',
|
||||
WINNER_ANNOUNCEMENT: 'WINNER_ANNOUNCEMENT',
|
||||
SUBMISSION_RECEIVED: 'SUBMISSION_RECEIVED',
|
||||
CERTIFICATE_READY: 'CERTIFICATE_READY',
|
||||
PROGRAM_NEWSLETTER: 'PROGRAM_NEWSLETTER',
|
||||
|
||||
// Observer notifications
|
||||
ROUND_STARTED: 'ROUND_STARTED',
|
||||
ROUND_PROGRESS: 'ROUND_PROGRESS',
|
||||
ROUND_COMPLETED: 'ROUND_COMPLETED',
|
||||
FINALISTS_ANNOUNCED: 'FINALISTS_ANNOUNCED',
|
||||
WINNERS_ANNOUNCED: 'WINNERS_ANNOUNCED',
|
||||
REPORT_AVAILABLE: 'REPORT_AVAILABLE',
|
||||
} as const
|
||||
|
||||
export type NotificationType = (typeof NotificationTypes)[keyof typeof NotificationTypes]
|
||||
|
||||
// Notification icons by type
|
||||
export const NotificationIcons: Record<string, string> = {
|
||||
[NotificationTypes.FILTERING_COMPLETE]: 'Brain',
|
||||
[NotificationTypes.FILTERING_FAILED]: 'AlertTriangle',
|
||||
[NotificationTypes.NEW_APPLICATION]: 'FileText',
|
||||
[NotificationTypes.BULK_APPLICATIONS]: 'Files',
|
||||
[NotificationTypes.DOCUMENTS_UPLOADED]: 'Upload',
|
||||
[NotificationTypes.ASSIGNED_TO_PROJECT]: 'ClipboardList',
|
||||
[NotificationTypes.ROUND_NOW_OPEN]: 'PlayCircle',
|
||||
[NotificationTypes.REMINDER_24H]: 'Clock',
|
||||
[NotificationTypes.REMINDER_1H]: 'AlertCircle',
|
||||
[NotificationTypes.ROUND_CLOSED]: 'Lock',
|
||||
[NotificationTypes.MENTEE_ASSIGNED]: 'Users',
|
||||
[NotificationTypes.MENTEE_ADVANCED]: 'TrendingUp',
|
||||
[NotificationTypes.MENTEE_WON]: 'Trophy',
|
||||
[NotificationTypes.APPLICATION_SUBMITTED]: 'CheckCircle',
|
||||
[NotificationTypes.SUBMISSION_RECEIVED]: 'Inbox',
|
||||
[NotificationTypes.ADVANCED_SEMIFINAL]: 'TrendingUp',
|
||||
[NotificationTypes.ADVANCED_FINAL]: 'Star',
|
||||
[NotificationTypes.MENTOR_ASSIGNED]: 'GraduationCap',
|
||||
[NotificationTypes.WINNER_ANNOUNCEMENT]: 'Trophy',
|
||||
[NotificationTypes.AWARD_VOTING_OPEN]: 'Vote',
|
||||
[NotificationTypes.AWARD_RESULTS]: 'Trophy',
|
||||
}
|
||||
|
||||
// Priority by notification type
|
||||
export const NotificationPriorities: Record<string, NotificationPriority> = {
|
||||
[NotificationTypes.FILTERING_COMPLETE]: 'high',
|
||||
[NotificationTypes.FILTERING_FAILED]: 'urgent',
|
||||
[NotificationTypes.DEADLINE_1H]: 'urgent',
|
||||
[NotificationTypes.REMINDER_1H]: 'urgent',
|
||||
[NotificationTypes.SYSTEM_ERROR]: 'urgent',
|
||||
[NotificationTypes.ASSIGNED_TO_PROJECT]: 'high',
|
||||
[NotificationTypes.ROUND_NOW_OPEN]: 'high',
|
||||
[NotificationTypes.DEADLINE_24H]: 'high',
|
||||
[NotificationTypes.REMINDER_24H]: 'high',
|
||||
[NotificationTypes.MENTEE_ASSIGNED]: 'high',
|
||||
[NotificationTypes.APPLICATION_SUBMITTED]: 'high',
|
||||
[NotificationTypes.ADVANCED_SEMIFINAL]: 'high',
|
||||
[NotificationTypes.ADVANCED_FINAL]: 'high',
|
||||
[NotificationTypes.WINNER_ANNOUNCEMENT]: 'high',
|
||||
[NotificationTypes.AWARD_VOTING_OPEN]: 'high',
|
||||
}
|
||||
|
||||
interface CreateNotificationParams {
|
||||
userId: string
|
||||
type: string
|
||||
title: string
|
||||
message: string
|
||||
linkUrl?: string
|
||||
linkLabel?: string
|
||||
icon?: string
|
||||
priority?: NotificationPriority
|
||||
metadata?: Record<string, unknown>
|
||||
groupKey?: string
|
||||
expiresAt?: Date
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a single in-app notification
|
||||
*/
|
||||
export async function createNotification(
|
||||
params: CreateNotificationParams
|
||||
): Promise<void> {
|
||||
const {
|
||||
userId,
|
||||
type,
|
||||
title,
|
||||
message,
|
||||
linkUrl,
|
||||
linkLabel,
|
||||
icon,
|
||||
priority,
|
||||
metadata,
|
||||
groupKey,
|
||||
expiresAt,
|
||||
} = params
|
||||
|
||||
// Determine icon and priority if not provided
|
||||
const finalIcon = icon || NotificationIcons[type] || 'Bell'
|
||||
const finalPriority = priority || NotificationPriorities[type] || 'normal'
|
||||
|
||||
// Check for existing notification with same groupKey (for batching)
|
||||
if (groupKey) {
|
||||
const existingNotification = await prisma.inAppNotification.findFirst({
|
||||
where: {
|
||||
userId,
|
||||
groupKey,
|
||||
isRead: false,
|
||||
createdAt: {
|
||||
gte: new Date(Date.now() - 60 * 60 * 1000), // Within last hour
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
if (existingNotification) {
|
||||
// Update existing notification instead of creating new one
|
||||
const existingMeta = existingNotification.metadata as Record<string, unknown> || {}
|
||||
const currentCount = (existingMeta.count as number) || 1
|
||||
await prisma.inAppNotification.update({
|
||||
where: { id: existingNotification.id },
|
||||
data: {
|
||||
message,
|
||||
metadata: { ...existingMeta, ...metadata, count: currentCount + 1 },
|
||||
createdAt: new Date(), // Bump to top
|
||||
},
|
||||
})
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Create the in-app notification
|
||||
await prisma.inAppNotification.create({
|
||||
data: {
|
||||
userId,
|
||||
type,
|
||||
title,
|
||||
message,
|
||||
linkUrl,
|
||||
linkLabel,
|
||||
icon: finalIcon,
|
||||
priority: finalPriority,
|
||||
metadata: metadata as object | undefined,
|
||||
groupKey,
|
||||
expiresAt,
|
||||
},
|
||||
})
|
||||
|
||||
// Check if we should also send an email
|
||||
await maybeSendEmail(userId, type, title, message, linkUrl, metadata)
|
||||
}
|
||||
|
||||
/**
|
||||
* Create notifications for multiple users
|
||||
*/
|
||||
export async function createBulkNotifications(params: {
|
||||
userIds: string[]
|
||||
type: string
|
||||
title: string
|
||||
message: string
|
||||
linkUrl?: string
|
||||
linkLabel?: string
|
||||
icon?: string
|
||||
priority?: NotificationPriority
|
||||
metadata?: Record<string, unknown>
|
||||
}): Promise<void> {
|
||||
const {
|
||||
userIds,
|
||||
type,
|
||||
title,
|
||||
message,
|
||||
linkUrl,
|
||||
linkLabel,
|
||||
icon,
|
||||
priority,
|
||||
metadata,
|
||||
} = params
|
||||
|
||||
const finalIcon = icon || NotificationIcons[type] || 'Bell'
|
||||
const finalPriority = priority || NotificationPriorities[type] || 'normal'
|
||||
|
||||
// Create notifications in bulk
|
||||
await prisma.inAppNotification.createMany({
|
||||
data: userIds.map((userId) => ({
|
||||
userId,
|
||||
type,
|
||||
title,
|
||||
message,
|
||||
linkUrl,
|
||||
linkLabel,
|
||||
icon: finalIcon,
|
||||
priority: finalPriority,
|
||||
metadata: metadata as object | undefined,
|
||||
})),
|
||||
})
|
||||
|
||||
// Check email settings and send emails
|
||||
for (const userId of userIds) {
|
||||
await maybeSendEmail(userId, type, title, message, linkUrl, metadata)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Notify all admin users
|
||||
*/
|
||||
export async function notifyAdmins(params: {
|
||||
type: string
|
||||
title: string
|
||||
message: string
|
||||
linkUrl?: string
|
||||
linkLabel?: string
|
||||
icon?: string
|
||||
priority?: NotificationPriority
|
||||
metadata?: Record<string, unknown>
|
||||
}): Promise<void> {
|
||||
const admins = await prisma.user.findMany({
|
||||
where: {
|
||||
role: { in: ['SUPER_ADMIN', 'PROGRAM_ADMIN'] },
|
||||
status: 'ACTIVE',
|
||||
},
|
||||
select: { id: true },
|
||||
})
|
||||
|
||||
if (admins.length === 0) return
|
||||
|
||||
await createBulkNotifications({
|
||||
...params,
|
||||
userIds: admins.map((a) => a.id),
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Notify all jury members for a specific stage
|
||||
*/
|
||||
export async function notifyStageJury(
|
||||
stageId: string,
|
||||
params: Omit<CreateNotificationParams, 'userId'>
|
||||
): Promise<void> {
|
||||
const assignments = await prisma.assignment.findMany({
|
||||
where: { stageId },
|
||||
select: { userId: true },
|
||||
distinct: ['userId'],
|
||||
})
|
||||
|
||||
if (assignments.length === 0) return
|
||||
|
||||
await createBulkNotifications({
|
||||
...params,
|
||||
userIds: assignments.map((a) => a.userId),
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Notify team members of a project
|
||||
*/
|
||||
export async function notifyProjectTeam(
|
||||
projectId: string,
|
||||
params: Omit<CreateNotificationParams, 'userId'>
|
||||
): Promise<void> {
|
||||
const teamMembers = await prisma.teamMember.findMany({
|
||||
where: { projectId },
|
||||
include: { user: { select: { id: true } } },
|
||||
})
|
||||
|
||||
const userIds = teamMembers
|
||||
.filter((tm) => tm.user)
|
||||
.map((tm) => tm.user!.id)
|
||||
|
||||
if (userIds.length === 0) return
|
||||
|
||||
await createBulkNotifications({
|
||||
...params,
|
||||
userIds,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Notify assigned mentors of a project
|
||||
*/
|
||||
export async function notifyProjectMentors(
|
||||
projectId: string,
|
||||
params: Omit<CreateNotificationParams, 'userId'>
|
||||
): Promise<void> {
|
||||
const mentorAssignments = await prisma.mentorAssignment.findMany({
|
||||
where: { projectId },
|
||||
select: { mentorId: true },
|
||||
})
|
||||
|
||||
if (mentorAssignments.length === 0) return
|
||||
|
||||
await createBulkNotifications({
|
||||
...params,
|
||||
userIds: mentorAssignments.map((ma) => ma.mentorId),
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Check email settings and send email if enabled
|
||||
*/
|
||||
async function maybeSendEmail(
|
||||
userId: string,
|
||||
type: string,
|
||||
title: string,
|
||||
message: string,
|
||||
linkUrl?: string,
|
||||
metadata?: Record<string, unknown>
|
||||
): Promise<void> {
|
||||
try {
|
||||
// Check if email is enabled for this notification type
|
||||
const emailSetting = await prisma.notificationEmailSetting.findUnique({
|
||||
where: { notificationType: type },
|
||||
})
|
||||
|
||||
// If no setting exists, don't send email by default
|
||||
if (!emailSetting || !emailSetting.sendEmail) {
|
||||
return
|
||||
}
|
||||
|
||||
// Check user's notification preference
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { id: userId },
|
||||
select: { email: true, name: true, notificationPreference: true },
|
||||
})
|
||||
|
||||
if (!user || user.notificationPreference === 'NONE') {
|
||||
return
|
||||
}
|
||||
|
||||
// Send styled email with full context
|
||||
// The styled template will use metadata for rich content
|
||||
// Subject can be overridden by admin settings
|
||||
await sendStyledNotificationEmail(
|
||||
user.email,
|
||||
user.name || 'User',
|
||||
type,
|
||||
{
|
||||
title,
|
||||
message,
|
||||
linkUrl,
|
||||
metadata,
|
||||
},
|
||||
emailSetting.emailSubject || undefined
|
||||
)
|
||||
} catch (error) {
|
||||
// Log but don't fail the notification creation
|
||||
console.error('[Notification] Failed to send email:', error)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark a notification as read
|
||||
*/
|
||||
export async function markNotificationAsRead(
|
||||
notificationId: string,
|
||||
userId: string
|
||||
): Promise<void> {
|
||||
await prisma.inAppNotification.updateMany({
|
||||
where: { id: notificationId, userId },
|
||||
data: { isRead: true, readAt: new Date() },
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark all notifications as read for a user
|
||||
*/
|
||||
export async function markAllNotificationsAsRead(userId: string): Promise<void> {
|
||||
await prisma.inAppNotification.updateMany({
|
||||
where: { userId, isRead: false },
|
||||
data: { isRead: true, readAt: new Date() },
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Get unread notification count for a user
|
||||
*/
|
||||
export async function getUnreadCount(userId: string): Promise<number> {
|
||||
return prisma.inAppNotification.count({
|
||||
where: { userId, isRead: false },
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete expired notifications
|
||||
*/
|
||||
export async function deleteExpiredNotifications(): Promise<number> {
|
||||
const result = await prisma.inAppNotification.deleteMany({
|
||||
where: {
|
||||
expiresAt: { lt: new Date() },
|
||||
},
|
||||
})
|
||||
return result.count
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete old read notifications (cleanup job)
|
||||
*/
|
||||
export async function deleteOldNotifications(olderThanDays: number): Promise<number> {
|
||||
const cutoffDate = new Date()
|
||||
cutoffDate.setDate(cutoffDate.getDate() - olderThanDays)
|
||||
|
||||
const result = await prisma.inAppNotification.deleteMany({
|
||||
where: {
|
||||
isRead: true,
|
||||
createdAt: { lt: cutoffDate },
|
||||
},
|
||||
})
|
||||
return result.count
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,486 +1,486 @@
|
||||
/**
|
||||
* AI-Powered Mentor Matching Service
|
||||
*
|
||||
* Matches mentors to projects based on expertise alignment.
|
||||
*
|
||||
* Optimization:
|
||||
* - Batched processing (15 projects per batch)
|
||||
* - Token tracking and cost logging
|
||||
* - Fallback to algorithmic matching
|
||||
*
|
||||
* GDPR Compliance:
|
||||
* - All data anonymized before AI processing
|
||||
* - No personal information sent to OpenAI
|
||||
*/
|
||||
|
||||
import { PrismaClient, OceanIssue, CompetitionCategory } from '@prisma/client'
|
||||
import { getOpenAI, getConfiguredModel, buildCompletionParams } from '@/lib/openai'
|
||||
import { logAIUsage, extractTokenUsage } from '@/server/utils/ai-usage'
|
||||
import { classifyAIError, createParseError, logAIError } from './ai-errors'
|
||||
|
||||
// ─── Constants ───────────────────────────────────────────────────────────────
|
||||
|
||||
const MENTOR_BATCH_SIZE = 15
|
||||
|
||||
// Optimized system prompt
|
||||
const MENTOR_MATCHING_SYSTEM_PROMPT = `Match mentors to projects by expertise. Return JSON.
|
||||
Format for each project: {"matches": [{project_id, mentor_matches: [{mentor_index, confidence_score: 0-1, expertise_match_score: 0-1, reasoning: str}]}]}
|
||||
Rank by suitability. Consider expertise alignment and availability.`
|
||||
|
||||
// ─── Types ───────────────────────────────────────────────────────────────────
|
||||
|
||||
interface ProjectInfo {
|
||||
id: string
|
||||
title: string
|
||||
description: string | null
|
||||
oceanIssue: OceanIssue | null
|
||||
competitionCategory: CompetitionCategory | null
|
||||
tags: string[]
|
||||
}
|
||||
|
||||
interface MentorInfo {
|
||||
id: string
|
||||
name: string | null
|
||||
email: string
|
||||
expertiseTags: string[]
|
||||
currentAssignments: number
|
||||
maxAssignments: number | null
|
||||
}
|
||||
|
||||
interface MentorMatch {
|
||||
mentorId: string
|
||||
confidenceScore: number
|
||||
expertiseMatchScore: number
|
||||
reasoning: string
|
||||
}
|
||||
|
||||
// ─── Batched AI Matching ─────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Process a batch of projects for mentor matching
|
||||
*/
|
||||
async function processMatchingBatch(
|
||||
openai: NonNullable<Awaited<ReturnType<typeof getOpenAI>>>,
|
||||
model: string,
|
||||
projects: ProjectInfo[],
|
||||
mentors: MentorInfo[],
|
||||
limit: number,
|
||||
userId?: string
|
||||
): Promise<{
|
||||
results: Map<string, MentorMatch[]>
|
||||
tokensUsed: number
|
||||
}> {
|
||||
const results = new Map<string, MentorMatch[]>()
|
||||
let tokensUsed = 0
|
||||
|
||||
// Anonymize project data
|
||||
const anonymizedProjects = projects.map((p, index) => ({
|
||||
project_id: `P${index + 1}`,
|
||||
real_id: p.id,
|
||||
description: p.description?.slice(0, 350) || 'No description',
|
||||
category: p.competitionCategory,
|
||||
oceanIssue: p.oceanIssue,
|
||||
tags: p.tags,
|
||||
}))
|
||||
|
||||
// Anonymize mentor data
|
||||
const anonymizedMentors = mentors.map((m, index) => ({
|
||||
index,
|
||||
expertise: m.expertiseTags,
|
||||
availability: m.maxAssignments
|
||||
? `${m.currentAssignments}/${m.maxAssignments}`
|
||||
: 'unlimited',
|
||||
}))
|
||||
|
||||
const userPrompt = `PROJECTS:
|
||||
${anonymizedProjects.map(p => `${p.project_id}: Category=${p.category || 'N/A'}, Issue=${p.oceanIssue || 'N/A'}, Tags=[${p.tags.join(', ')}], Desc=${p.description.slice(0, 200)}`).join('\n')}
|
||||
|
||||
MENTORS:
|
||||
${anonymizedMentors.map(m => `${m.index}: Expertise=[${m.expertise.join(', ')}], Availability=${m.availability}`).join('\n')}
|
||||
|
||||
For each project, rank top ${limit} mentors.`
|
||||
|
||||
try {
|
||||
const params = buildCompletionParams(model, {
|
||||
messages: [
|
||||
{ role: 'system', content: MENTOR_MATCHING_SYSTEM_PROMPT },
|
||||
{ role: 'user', content: userPrompt },
|
||||
],
|
||||
jsonMode: true,
|
||||
temperature: 0.3,
|
||||
maxTokens: 4000,
|
||||
})
|
||||
|
||||
const response = await openai.chat.completions.create(params)
|
||||
const usage = extractTokenUsage(response)
|
||||
tokensUsed = usage.totalTokens
|
||||
|
||||
// Log usage
|
||||
await logAIUsage({
|
||||
userId,
|
||||
action: 'MENTOR_MATCHING',
|
||||
entityType: 'Project',
|
||||
model,
|
||||
promptTokens: usage.promptTokens,
|
||||
completionTokens: usage.completionTokens,
|
||||
totalTokens: usage.totalTokens,
|
||||
batchSize: projects.length,
|
||||
itemsProcessed: projects.length,
|
||||
status: 'SUCCESS',
|
||||
})
|
||||
|
||||
const content = response.choices[0]?.message?.content
|
||||
if (!content) {
|
||||
throw new Error('No response from AI')
|
||||
}
|
||||
|
||||
const parsed = JSON.parse(content) as {
|
||||
matches: Array<{
|
||||
project_id: string
|
||||
mentor_matches: Array<{
|
||||
mentor_index: number
|
||||
confidence_score: number
|
||||
expertise_match_score: number
|
||||
reasoning: string
|
||||
}>
|
||||
}>
|
||||
}
|
||||
|
||||
// Map results back to real IDs
|
||||
for (const projectMatch of parsed.matches || []) {
|
||||
const project = anonymizedProjects.find(p => p.project_id === projectMatch.project_id)
|
||||
if (!project) continue
|
||||
|
||||
const mentorMatches: MentorMatch[] = []
|
||||
for (const match of projectMatch.mentor_matches || []) {
|
||||
if (match.mentor_index >= 0 && match.mentor_index < mentors.length) {
|
||||
mentorMatches.push({
|
||||
mentorId: mentors[match.mentor_index].id,
|
||||
confidenceScore: Math.min(1, Math.max(0, match.confidence_score)),
|
||||
expertiseMatchScore: Math.min(1, Math.max(0, match.expertise_match_score)),
|
||||
reasoning: match.reasoning,
|
||||
})
|
||||
}
|
||||
}
|
||||
results.set(project.real_id, mentorMatches)
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
if (error instanceof SyntaxError) {
|
||||
const parseError = createParseError(error.message)
|
||||
logAIError('MentorMatching', 'batch processing', parseError)
|
||||
|
||||
await logAIUsage({
|
||||
userId,
|
||||
action: 'MENTOR_MATCHING',
|
||||
entityType: 'Project',
|
||||
model,
|
||||
promptTokens: 0,
|
||||
completionTokens: 0,
|
||||
totalTokens: tokensUsed,
|
||||
batchSize: projects.length,
|
||||
itemsProcessed: 0,
|
||||
status: 'ERROR',
|
||||
errorMessage: parseError.message,
|
||||
})
|
||||
|
||||
// Return empty results for batch (will fall back to algorithm)
|
||||
for (const project of projects) {
|
||||
results.set(project.id, [])
|
||||
}
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
return { results, tokensUsed }
|
||||
}
|
||||
|
||||
/**
|
||||
* Get AI-suggested mentor matches for multiple projects (batched)
|
||||
*/
|
||||
export async function getAIMentorSuggestionsBatch(
|
||||
prisma: PrismaClient,
|
||||
projectIds: string[],
|
||||
limit: number = 5,
|
||||
userId?: string
|
||||
): Promise<Map<string, MentorMatch[]>> {
|
||||
const allResults = new Map<string, MentorMatch[]>()
|
||||
|
||||
// Get projects
|
||||
const projects = await prisma.project.findMany({
|
||||
where: { id: { in: projectIds } },
|
||||
select: {
|
||||
id: true,
|
||||
title: true,
|
||||
description: true,
|
||||
oceanIssue: true,
|
||||
competitionCategory: true,
|
||||
tags: true,
|
||||
},
|
||||
})
|
||||
|
||||
if (projects.length === 0) {
|
||||
return allResults
|
||||
}
|
||||
|
||||
// Get available mentors
|
||||
const mentors = await prisma.user.findMany({
|
||||
where: {
|
||||
OR: [
|
||||
{ expertiseTags: { isEmpty: false } },
|
||||
{ role: 'JURY_MEMBER' },
|
||||
],
|
||||
status: 'ACTIVE',
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
email: true,
|
||||
expertiseTags: true,
|
||||
maxAssignments: true,
|
||||
mentorAssignments: {
|
||||
select: { id: true },
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
// Filter mentors who haven't reached max assignments
|
||||
const availableMentors: MentorInfo[] = mentors
|
||||
.filter((m) => {
|
||||
const currentAssignments = m.mentorAssignments.length
|
||||
return !m.maxAssignments || currentAssignments < m.maxAssignments
|
||||
})
|
||||
.map((m) => ({
|
||||
id: m.id,
|
||||
name: m.name,
|
||||
email: m.email,
|
||||
expertiseTags: m.expertiseTags,
|
||||
currentAssignments: m.mentorAssignments.length,
|
||||
maxAssignments: m.maxAssignments,
|
||||
}))
|
||||
|
||||
if (availableMentors.length === 0) {
|
||||
return allResults
|
||||
}
|
||||
|
||||
// Try AI matching
|
||||
try {
|
||||
const openai = await getOpenAI()
|
||||
if (!openai) {
|
||||
console.log('[Mentor Matching] OpenAI not configured, using algorithm')
|
||||
return getAlgorithmicMatchesBatch(projects, availableMentors, limit)
|
||||
}
|
||||
|
||||
const model = await getConfiguredModel()
|
||||
console.log(`[Mentor Matching] Using model: ${model} for ${projects.length} projects in batches of ${MENTOR_BATCH_SIZE}`)
|
||||
|
||||
let totalTokens = 0
|
||||
|
||||
// Process in batches
|
||||
for (let i = 0; i < projects.length; i += MENTOR_BATCH_SIZE) {
|
||||
const batchProjects = projects.slice(i, i + MENTOR_BATCH_SIZE)
|
||||
|
||||
console.log(`[Mentor Matching] Processing batch ${Math.floor(i / MENTOR_BATCH_SIZE) + 1}/${Math.ceil(projects.length / MENTOR_BATCH_SIZE)}`)
|
||||
|
||||
const { results, tokensUsed } = await processMatchingBatch(
|
||||
openai,
|
||||
model,
|
||||
batchProjects,
|
||||
availableMentors,
|
||||
limit,
|
||||
userId
|
||||
)
|
||||
|
||||
totalTokens += tokensUsed
|
||||
|
||||
// Merge results
|
||||
for (const [projectId, matches] of results) {
|
||||
allResults.set(projectId, matches)
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`[Mentor Matching] Completed. Total tokens: ${totalTokens}`)
|
||||
|
||||
// Fill in any missing projects with algorithmic fallback
|
||||
for (const project of projects) {
|
||||
if (!allResults.has(project.id) || allResults.get(project.id)?.length === 0) {
|
||||
const fallbackMatches = getAlgorithmicMatches(project, availableMentors, limit)
|
||||
allResults.set(project.id, fallbackMatches)
|
||||
}
|
||||
}
|
||||
|
||||
return allResults
|
||||
|
||||
} catch (error) {
|
||||
const classified = classifyAIError(error)
|
||||
logAIError('MentorMatching', 'getAIMentorSuggestionsBatch', classified)
|
||||
|
||||
// Log failed attempt
|
||||
await logAIUsage({
|
||||
userId,
|
||||
action: 'MENTOR_MATCHING',
|
||||
entityType: 'Project',
|
||||
model: 'unknown',
|
||||
promptTokens: 0,
|
||||
completionTokens: 0,
|
||||
totalTokens: 0,
|
||||
batchSize: projects.length,
|
||||
itemsProcessed: 0,
|
||||
status: 'ERROR',
|
||||
errorMessage: classified.message,
|
||||
})
|
||||
|
||||
console.error('[Mentor Matching] AI failed, using algorithm:', classified.message)
|
||||
return getAlgorithmicMatchesBatch(projects, availableMentors, limit)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get AI-suggested mentor matches for a single project
|
||||
*/
|
||||
export async function getAIMentorSuggestions(
|
||||
prisma: PrismaClient,
|
||||
projectId: string,
|
||||
limit: number = 5,
|
||||
userId?: string
|
||||
): Promise<MentorMatch[]> {
|
||||
const results = await getAIMentorSuggestionsBatch(prisma, [projectId], limit, userId)
|
||||
return results.get(projectId) || []
|
||||
}
|
||||
|
||||
// ─── Algorithmic Fallback ────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Algorithmic fallback for multiple projects
|
||||
*/
|
||||
function getAlgorithmicMatchesBatch(
|
||||
projects: ProjectInfo[],
|
||||
mentors: MentorInfo[],
|
||||
limit: number
|
||||
): Map<string, MentorMatch[]> {
|
||||
const results = new Map<string, MentorMatch[]>()
|
||||
|
||||
for (const project of projects) {
|
||||
results.set(project.id, getAlgorithmicMatches(project, mentors, limit))
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
/**
|
||||
* Algorithmic fallback for mentor matching
|
||||
*/
|
||||
function getAlgorithmicMatches(
|
||||
project: ProjectInfo,
|
||||
mentors: MentorInfo[],
|
||||
limit: number
|
||||
): MentorMatch[] {
|
||||
// Build keyword set from project
|
||||
const projectKeywords = new Set<string>()
|
||||
|
||||
if (project.oceanIssue) {
|
||||
projectKeywords.add(project.oceanIssue.toLowerCase().replace(/_/g, ' '))
|
||||
}
|
||||
|
||||
if (project.competitionCategory) {
|
||||
projectKeywords.add(project.competitionCategory.toLowerCase().replace(/_/g, ' '))
|
||||
}
|
||||
|
||||
project.tags.forEach((tag) => {
|
||||
tag.toLowerCase().split(/\s+/).forEach((word) => {
|
||||
if (word.length > 3) projectKeywords.add(word)
|
||||
})
|
||||
})
|
||||
|
||||
if (project.description) {
|
||||
const words = project.description.toLowerCase().split(/\s+/)
|
||||
words.forEach((word) => {
|
||||
if (word.length > 4) projectKeywords.add(word.replace(/[^a-z]/g, ''))
|
||||
})
|
||||
}
|
||||
|
||||
// Score each mentor
|
||||
const scored = mentors.map((mentor) => {
|
||||
const mentorKeywords = new Set<string>()
|
||||
mentor.expertiseTags.forEach((tag) => {
|
||||
tag.toLowerCase().split(/\s+/).forEach((word) => {
|
||||
if (word.length > 2) mentorKeywords.add(word)
|
||||
})
|
||||
})
|
||||
|
||||
// Calculate overlap
|
||||
let matchCount = 0
|
||||
projectKeywords.forEach((keyword) => {
|
||||
mentorKeywords.forEach((mentorKeyword) => {
|
||||
if (keyword.includes(mentorKeyword) || mentorKeyword.includes(keyword)) {
|
||||
matchCount++
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
const expertiseMatchScore = mentorKeywords.size > 0
|
||||
? Math.min(1, matchCount / mentorKeywords.size)
|
||||
: 0
|
||||
|
||||
// Factor in availability
|
||||
const availabilityScore = mentor.maxAssignments
|
||||
? 1 - (mentor.currentAssignments / mentor.maxAssignments)
|
||||
: 1
|
||||
|
||||
const confidenceScore = (expertiseMatchScore * 0.7 + availabilityScore * 0.3)
|
||||
|
||||
return {
|
||||
mentorId: mentor.id,
|
||||
confidenceScore: Math.round(confidenceScore * 100) / 100,
|
||||
expertiseMatchScore: Math.round(expertiseMatchScore * 100) / 100,
|
||||
reasoning: `Matched ${matchCount} keyword(s). Availability: ${availabilityScore > 0.5 ? 'Good' : 'Limited'}.`,
|
||||
}
|
||||
})
|
||||
|
||||
// Sort by confidence and return top matches
|
||||
return scored
|
||||
.sort((a, b) => b.confidenceScore - a.confidenceScore)
|
||||
.slice(0, limit)
|
||||
}
|
||||
|
||||
/**
|
||||
* Round-robin assignment for load balancing
|
||||
*/
|
||||
export async function getRoundRobinMentor(
|
||||
prisma: PrismaClient,
|
||||
excludeMentorIds: string[] = []
|
||||
): Promise<string | null> {
|
||||
const mentors = await prisma.user.findMany({
|
||||
where: {
|
||||
OR: [
|
||||
{ expertiseTags: { isEmpty: false } },
|
||||
{ role: 'JURY_MEMBER' },
|
||||
],
|
||||
status: 'ACTIVE',
|
||||
id: { notIn: excludeMentorIds },
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
maxAssignments: true,
|
||||
mentorAssignments: {
|
||||
select: { id: true },
|
||||
},
|
||||
},
|
||||
orderBy: {
|
||||
mentorAssignments: {
|
||||
_count: 'asc',
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
// Find mentor with fewest assignments who hasn't reached max
|
||||
for (const mentor of mentors) {
|
||||
const currentCount = mentor.mentorAssignments.length
|
||||
if (!mentor.maxAssignments || currentCount < mentor.maxAssignments) {
|
||||
return mentor.id
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
/**
|
||||
* AI-Powered Mentor Matching Service
|
||||
*
|
||||
* Matches mentors to projects based on expertise alignment.
|
||||
*
|
||||
* Optimization:
|
||||
* - Batched processing (15 projects per batch)
|
||||
* - Token tracking and cost logging
|
||||
* - Fallback to algorithmic matching
|
||||
*
|
||||
* GDPR Compliance:
|
||||
* - All data anonymized before AI processing
|
||||
* - No personal information sent to OpenAI
|
||||
*/
|
||||
|
||||
import { PrismaClient, OceanIssue, CompetitionCategory } from '@prisma/client'
|
||||
import { getOpenAI, getConfiguredModel, buildCompletionParams } from '@/lib/openai'
|
||||
import { logAIUsage, extractTokenUsage } from '@/server/utils/ai-usage'
|
||||
import { classifyAIError, createParseError, logAIError } from './ai-errors'
|
||||
|
||||
// ─── Constants ───────────────────────────────────────────────────────────────
|
||||
|
||||
const MENTOR_BATCH_SIZE = 15
|
||||
|
||||
// Optimized system prompt
|
||||
const MENTOR_MATCHING_SYSTEM_PROMPT = `Match mentors to projects by expertise. Return JSON.
|
||||
Format for each project: {"matches": [{project_id, mentor_matches: [{mentor_index, confidence_score: 0-1, expertise_match_score: 0-1, reasoning: str}]}]}
|
||||
Rank by suitability. Consider expertise alignment and availability.`
|
||||
|
||||
// ─── Types ───────────────────────────────────────────────────────────────────
|
||||
|
||||
interface ProjectInfo {
|
||||
id: string
|
||||
title: string
|
||||
description: string | null
|
||||
oceanIssue: OceanIssue | null
|
||||
competitionCategory: CompetitionCategory | null
|
||||
tags: string[]
|
||||
}
|
||||
|
||||
interface MentorInfo {
|
||||
id: string
|
||||
name: string | null
|
||||
email: string
|
||||
expertiseTags: string[]
|
||||
currentAssignments: number
|
||||
maxAssignments: number | null
|
||||
}
|
||||
|
||||
interface MentorMatch {
|
||||
mentorId: string
|
||||
confidenceScore: number
|
||||
expertiseMatchScore: number
|
||||
reasoning: string
|
||||
}
|
||||
|
||||
// ─── Batched AI Matching ─────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Process a batch of projects for mentor matching
|
||||
*/
|
||||
async function processMatchingBatch(
|
||||
openai: NonNullable<Awaited<ReturnType<typeof getOpenAI>>>,
|
||||
model: string,
|
||||
projects: ProjectInfo[],
|
||||
mentors: MentorInfo[],
|
||||
limit: number,
|
||||
userId?: string
|
||||
): Promise<{
|
||||
results: Map<string, MentorMatch[]>
|
||||
tokensUsed: number
|
||||
}> {
|
||||
const results = new Map<string, MentorMatch[]>()
|
||||
let tokensUsed = 0
|
||||
|
||||
// Anonymize project data
|
||||
const anonymizedProjects = projects.map((p, index) => ({
|
||||
project_id: `P${index + 1}`,
|
||||
real_id: p.id,
|
||||
description: p.description?.slice(0, 350) || 'No description',
|
||||
category: p.competitionCategory,
|
||||
oceanIssue: p.oceanIssue,
|
||||
tags: p.tags,
|
||||
}))
|
||||
|
||||
// Anonymize mentor data
|
||||
const anonymizedMentors = mentors.map((m, index) => ({
|
||||
index,
|
||||
expertise: m.expertiseTags,
|
||||
availability: m.maxAssignments
|
||||
? `${m.currentAssignments}/${m.maxAssignments}`
|
||||
: 'unlimited',
|
||||
}))
|
||||
|
||||
const userPrompt = `PROJECTS:
|
||||
${anonymizedProjects.map(p => `${p.project_id}: Category=${p.category || 'N/A'}, Issue=${p.oceanIssue || 'N/A'}, Tags=[${p.tags.join(', ')}], Desc=${p.description.slice(0, 200)}`).join('\n')}
|
||||
|
||||
MENTORS:
|
||||
${anonymizedMentors.map(m => `${m.index}: Expertise=[${m.expertise.join(', ')}], Availability=${m.availability}`).join('\n')}
|
||||
|
||||
For each project, rank top ${limit} mentors.`
|
||||
|
||||
try {
|
||||
const params = buildCompletionParams(model, {
|
||||
messages: [
|
||||
{ role: 'system', content: MENTOR_MATCHING_SYSTEM_PROMPT },
|
||||
{ role: 'user', content: userPrompt },
|
||||
],
|
||||
jsonMode: true,
|
||||
temperature: 0.3,
|
||||
maxTokens: 4000,
|
||||
})
|
||||
|
||||
const response = await openai.chat.completions.create(params)
|
||||
const usage = extractTokenUsage(response)
|
||||
tokensUsed = usage.totalTokens
|
||||
|
||||
// Log usage
|
||||
await logAIUsage({
|
||||
userId,
|
||||
action: 'MENTOR_MATCHING',
|
||||
entityType: 'Project',
|
||||
model,
|
||||
promptTokens: usage.promptTokens,
|
||||
completionTokens: usage.completionTokens,
|
||||
totalTokens: usage.totalTokens,
|
||||
batchSize: projects.length,
|
||||
itemsProcessed: projects.length,
|
||||
status: 'SUCCESS',
|
||||
})
|
||||
|
||||
const content = response.choices[0]?.message?.content
|
||||
if (!content) {
|
||||
throw new Error('No response from AI')
|
||||
}
|
||||
|
||||
const parsed = JSON.parse(content) as {
|
||||
matches: Array<{
|
||||
project_id: string
|
||||
mentor_matches: Array<{
|
||||
mentor_index: number
|
||||
confidence_score: number
|
||||
expertise_match_score: number
|
||||
reasoning: string
|
||||
}>
|
||||
}>
|
||||
}
|
||||
|
||||
// Map results back to real IDs
|
||||
for (const projectMatch of parsed.matches || []) {
|
||||
const project = anonymizedProjects.find(p => p.project_id === projectMatch.project_id)
|
||||
if (!project) continue
|
||||
|
||||
const mentorMatches: MentorMatch[] = []
|
||||
for (const match of projectMatch.mentor_matches || []) {
|
||||
if (match.mentor_index >= 0 && match.mentor_index < mentors.length) {
|
||||
mentorMatches.push({
|
||||
mentorId: mentors[match.mentor_index].id,
|
||||
confidenceScore: Math.min(1, Math.max(0, match.confidence_score)),
|
||||
expertiseMatchScore: Math.min(1, Math.max(0, match.expertise_match_score)),
|
||||
reasoning: match.reasoning,
|
||||
})
|
||||
}
|
||||
}
|
||||
results.set(project.real_id, mentorMatches)
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
if (error instanceof SyntaxError) {
|
||||
const parseError = createParseError(error.message)
|
||||
logAIError('MentorMatching', 'batch processing', parseError)
|
||||
|
||||
await logAIUsage({
|
||||
userId,
|
||||
action: 'MENTOR_MATCHING',
|
||||
entityType: 'Project',
|
||||
model,
|
||||
promptTokens: 0,
|
||||
completionTokens: 0,
|
||||
totalTokens: tokensUsed,
|
||||
batchSize: projects.length,
|
||||
itemsProcessed: 0,
|
||||
status: 'ERROR',
|
||||
errorMessage: parseError.message,
|
||||
})
|
||||
|
||||
// Return empty results for batch (will fall back to algorithm)
|
||||
for (const project of projects) {
|
||||
results.set(project.id, [])
|
||||
}
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
return { results, tokensUsed }
|
||||
}
|
||||
|
||||
/**
|
||||
* Get AI-suggested mentor matches for multiple projects (batched)
|
||||
*/
|
||||
export async function getAIMentorSuggestionsBatch(
|
||||
prisma: PrismaClient,
|
||||
projectIds: string[],
|
||||
limit: number = 5,
|
||||
userId?: string
|
||||
): Promise<Map<string, MentorMatch[]>> {
|
||||
const allResults = new Map<string, MentorMatch[]>()
|
||||
|
||||
// Get projects
|
||||
const projects = await prisma.project.findMany({
|
||||
where: { id: { in: projectIds } },
|
||||
select: {
|
||||
id: true,
|
||||
title: true,
|
||||
description: true,
|
||||
oceanIssue: true,
|
||||
competitionCategory: true,
|
||||
tags: true,
|
||||
},
|
||||
})
|
||||
|
||||
if (projects.length === 0) {
|
||||
return allResults
|
||||
}
|
||||
|
||||
// Get available mentors
|
||||
const mentors = await prisma.user.findMany({
|
||||
where: {
|
||||
OR: [
|
||||
{ expertiseTags: { isEmpty: false } },
|
||||
{ role: 'JURY_MEMBER' },
|
||||
],
|
||||
status: 'ACTIVE',
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
email: true,
|
||||
expertiseTags: true,
|
||||
maxAssignments: true,
|
||||
mentorAssignments: {
|
||||
select: { id: true },
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
// Filter mentors who haven't reached max assignments
|
||||
const availableMentors: MentorInfo[] = mentors
|
||||
.filter((m) => {
|
||||
const currentAssignments = m.mentorAssignments.length
|
||||
return !m.maxAssignments || currentAssignments < m.maxAssignments
|
||||
})
|
||||
.map((m) => ({
|
||||
id: m.id,
|
||||
name: m.name,
|
||||
email: m.email,
|
||||
expertiseTags: m.expertiseTags,
|
||||
currentAssignments: m.mentorAssignments.length,
|
||||
maxAssignments: m.maxAssignments,
|
||||
}))
|
||||
|
||||
if (availableMentors.length === 0) {
|
||||
return allResults
|
||||
}
|
||||
|
||||
// Try AI matching
|
||||
try {
|
||||
const openai = await getOpenAI()
|
||||
if (!openai) {
|
||||
console.log('[Mentor Matching] OpenAI not configured, using algorithm')
|
||||
return getAlgorithmicMatchesBatch(projects, availableMentors, limit)
|
||||
}
|
||||
|
||||
const model = await getConfiguredModel()
|
||||
console.log(`[Mentor Matching] Using model: ${model} for ${projects.length} projects in batches of ${MENTOR_BATCH_SIZE}`)
|
||||
|
||||
let totalTokens = 0
|
||||
|
||||
// Process in batches
|
||||
for (let i = 0; i < projects.length; i += MENTOR_BATCH_SIZE) {
|
||||
const batchProjects = projects.slice(i, i + MENTOR_BATCH_SIZE)
|
||||
|
||||
console.log(`[Mentor Matching] Processing batch ${Math.floor(i / MENTOR_BATCH_SIZE) + 1}/${Math.ceil(projects.length / MENTOR_BATCH_SIZE)}`)
|
||||
|
||||
const { results, tokensUsed } = await processMatchingBatch(
|
||||
openai,
|
||||
model,
|
||||
batchProjects,
|
||||
availableMentors,
|
||||
limit,
|
||||
userId
|
||||
)
|
||||
|
||||
totalTokens += tokensUsed
|
||||
|
||||
// Merge results
|
||||
for (const [projectId, matches] of results) {
|
||||
allResults.set(projectId, matches)
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`[Mentor Matching] Completed. Total tokens: ${totalTokens}`)
|
||||
|
||||
// Fill in any missing projects with algorithmic fallback
|
||||
for (const project of projects) {
|
||||
if (!allResults.has(project.id) || allResults.get(project.id)?.length === 0) {
|
||||
const fallbackMatches = getAlgorithmicMatches(project, availableMentors, limit)
|
||||
allResults.set(project.id, fallbackMatches)
|
||||
}
|
||||
}
|
||||
|
||||
return allResults
|
||||
|
||||
} catch (error) {
|
||||
const classified = classifyAIError(error)
|
||||
logAIError('MentorMatching', 'getAIMentorSuggestionsBatch', classified)
|
||||
|
||||
// Log failed attempt
|
||||
await logAIUsage({
|
||||
userId,
|
||||
action: 'MENTOR_MATCHING',
|
||||
entityType: 'Project',
|
||||
model: 'unknown',
|
||||
promptTokens: 0,
|
||||
completionTokens: 0,
|
||||
totalTokens: 0,
|
||||
batchSize: projects.length,
|
||||
itemsProcessed: 0,
|
||||
status: 'ERROR',
|
||||
errorMessage: classified.message,
|
||||
})
|
||||
|
||||
console.error('[Mentor Matching] AI failed, using algorithm:', classified.message)
|
||||
return getAlgorithmicMatchesBatch(projects, availableMentors, limit)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get AI-suggested mentor matches for a single project
|
||||
*/
|
||||
export async function getAIMentorSuggestions(
|
||||
prisma: PrismaClient,
|
||||
projectId: string,
|
||||
limit: number = 5,
|
||||
userId?: string
|
||||
): Promise<MentorMatch[]> {
|
||||
const results = await getAIMentorSuggestionsBatch(prisma, [projectId], limit, userId)
|
||||
return results.get(projectId) || []
|
||||
}
|
||||
|
||||
// ─── Algorithmic Fallback ────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Algorithmic fallback for multiple projects
|
||||
*/
|
||||
function getAlgorithmicMatchesBatch(
|
||||
projects: ProjectInfo[],
|
||||
mentors: MentorInfo[],
|
||||
limit: number
|
||||
): Map<string, MentorMatch[]> {
|
||||
const results = new Map<string, MentorMatch[]>()
|
||||
|
||||
for (const project of projects) {
|
||||
results.set(project.id, getAlgorithmicMatches(project, mentors, limit))
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
/**
|
||||
* Algorithmic fallback for mentor matching
|
||||
*/
|
||||
function getAlgorithmicMatches(
|
||||
project: ProjectInfo,
|
||||
mentors: MentorInfo[],
|
||||
limit: number
|
||||
): MentorMatch[] {
|
||||
// Build keyword set from project
|
||||
const projectKeywords = new Set<string>()
|
||||
|
||||
if (project.oceanIssue) {
|
||||
projectKeywords.add(project.oceanIssue.toLowerCase().replace(/_/g, ' '))
|
||||
}
|
||||
|
||||
if (project.competitionCategory) {
|
||||
projectKeywords.add(project.competitionCategory.toLowerCase().replace(/_/g, ' '))
|
||||
}
|
||||
|
||||
project.tags.forEach((tag) => {
|
||||
tag.toLowerCase().split(/\s+/).forEach((word) => {
|
||||
if (word.length > 3) projectKeywords.add(word)
|
||||
})
|
||||
})
|
||||
|
||||
if (project.description) {
|
||||
const words = project.description.toLowerCase().split(/\s+/)
|
||||
words.forEach((word) => {
|
||||
if (word.length > 4) projectKeywords.add(word.replace(/[^a-z]/g, ''))
|
||||
})
|
||||
}
|
||||
|
||||
// Score each mentor
|
||||
const scored = mentors.map((mentor) => {
|
||||
const mentorKeywords = new Set<string>()
|
||||
mentor.expertiseTags.forEach((tag) => {
|
||||
tag.toLowerCase().split(/\s+/).forEach((word) => {
|
||||
if (word.length > 2) mentorKeywords.add(word)
|
||||
})
|
||||
})
|
||||
|
||||
// Calculate overlap
|
||||
let matchCount = 0
|
||||
projectKeywords.forEach((keyword) => {
|
||||
mentorKeywords.forEach((mentorKeyword) => {
|
||||
if (keyword.includes(mentorKeyword) || mentorKeyword.includes(keyword)) {
|
||||
matchCount++
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
const expertiseMatchScore = mentorKeywords.size > 0
|
||||
? Math.min(1, matchCount / mentorKeywords.size)
|
||||
: 0
|
||||
|
||||
// Factor in availability
|
||||
const availabilityScore = mentor.maxAssignments
|
||||
? 1 - (mentor.currentAssignments / mentor.maxAssignments)
|
||||
: 1
|
||||
|
||||
const confidenceScore = (expertiseMatchScore * 0.7 + availabilityScore * 0.3)
|
||||
|
||||
return {
|
||||
mentorId: mentor.id,
|
||||
confidenceScore: Math.round(confidenceScore * 100) / 100,
|
||||
expertiseMatchScore: Math.round(expertiseMatchScore * 100) / 100,
|
||||
reasoning: `Matched ${matchCount} keyword(s). Availability: ${availabilityScore > 0.5 ? 'Good' : 'Limited'}.`,
|
||||
}
|
||||
})
|
||||
|
||||
// Sort by confidence and return top matches
|
||||
return scored
|
||||
.sort((a, b) => b.confidenceScore - a.confidenceScore)
|
||||
.slice(0, limit)
|
||||
}
|
||||
|
||||
/**
|
||||
* Round-robin assignment for load balancing
|
||||
*/
|
||||
export async function getRoundRobinMentor(
|
||||
prisma: PrismaClient,
|
||||
excludeMentorIds: string[] = []
|
||||
): Promise<string | null> {
|
||||
const mentors = await prisma.user.findMany({
|
||||
where: {
|
||||
OR: [
|
||||
{ expertiseTags: { isEmpty: false } },
|
||||
{ role: 'JURY_MEMBER' },
|
||||
],
|
||||
status: 'ACTIVE',
|
||||
id: { notIn: excludeMentorIds },
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
maxAssignments: true,
|
||||
mentorAssignments: {
|
||||
select: { id: true },
|
||||
},
|
||||
},
|
||||
orderBy: {
|
||||
mentorAssignments: {
|
||||
_count: 'asc',
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
// Find mentor with fewest assignments who hasn't reached max
|
||||
for (const mentor of mentors) {
|
||||
const currentCount = mentor.mentorAssignments.length
|
||||
if (!mentor.maxAssignments || currentCount < mentor.maxAssignments) {
|
||||
return mentor.id
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,464 +1,464 @@
|
||||
/**
|
||||
* Stage Engine Service
|
||||
*
|
||||
* State machine service for managing project transitions between stages in
|
||||
* the pipeline. Handles validation of transitions (guard evaluation, window
|
||||
* constraints, PSS existence) and atomic execution with full audit logging.
|
||||
*
|
||||
* Key invariants:
|
||||
* - A project can only be in one active PSS per track/stage combination
|
||||
* - Transitions must follow defined StageTransition records
|
||||
* - Guard conditions (guardJson) on transitions are evaluated before execution
|
||||
* - All transitions are logged in DecisionAuditLog and AuditLog
|
||||
*/
|
||||
|
||||
import type { PrismaClient, ProjectStageStateValue, Prisma } from '@prisma/client'
|
||||
import { logAudit } from '@/server/utils/audit'
|
||||
|
||||
// ─── Types ──────────────────────────────────────────────────────────────────
|
||||
|
||||
export interface TransitionValidationResult {
|
||||
valid: boolean
|
||||
errors: string[]
|
||||
}
|
||||
|
||||
export interface TransitionExecutionResult {
|
||||
success: boolean
|
||||
projectStageState: {
|
||||
id: string
|
||||
projectId: string
|
||||
trackId: string
|
||||
stageId: string
|
||||
state: ProjectStageStateValue
|
||||
} | null
|
||||
errors?: string[]
|
||||
}
|
||||
|
||||
export interface BatchTransitionResult {
|
||||
succeeded: string[]
|
||||
failed: Array<{ projectId: string; errors: string[] }>
|
||||
total: number
|
||||
}
|
||||
|
||||
interface GuardCondition {
|
||||
field: string
|
||||
operator: 'eq' | 'neq' | 'in' | 'contains' | 'gt' | 'lt' | 'exists'
|
||||
value: unknown
|
||||
}
|
||||
|
||||
interface GuardConfig {
|
||||
conditions?: GuardCondition[]
|
||||
logic?: 'AND' | 'OR'
|
||||
requireAllEvaluationsComplete?: boolean
|
||||
requireMinScore?: number
|
||||
}
|
||||
|
||||
// ─── Constants ──────────────────────────────────────────────────────────────
|
||||
|
||||
const BATCH_SIZE = 50
|
||||
|
||||
// ─── Guard Evaluation ───────────────────────────────────────────────────────
|
||||
|
||||
function evaluateGuardCondition(
|
||||
condition: GuardCondition,
|
||||
context: Record<string, unknown>
|
||||
): boolean {
|
||||
const fieldValue = context[condition.field]
|
||||
|
||||
switch (condition.operator) {
|
||||
case 'eq':
|
||||
return fieldValue === condition.value
|
||||
case 'neq':
|
||||
return fieldValue !== condition.value
|
||||
case 'in': {
|
||||
if (!Array.isArray(condition.value)) return false
|
||||
return condition.value.includes(fieldValue)
|
||||
}
|
||||
case 'contains': {
|
||||
if (typeof fieldValue === 'string' && typeof condition.value === 'string') {
|
||||
return fieldValue.toLowerCase().includes(condition.value.toLowerCase())
|
||||
}
|
||||
if (Array.isArray(fieldValue)) {
|
||||
return fieldValue.includes(condition.value)
|
||||
}
|
||||
return false
|
||||
}
|
||||
case 'gt':
|
||||
return Number(fieldValue) > Number(condition.value)
|
||||
case 'lt':
|
||||
return Number(fieldValue) < Number(condition.value)
|
||||
case 'exists':
|
||||
return fieldValue !== null && fieldValue !== undefined
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
function evaluateGuard(
|
||||
guardJson: Prisma.JsonValue | null | undefined,
|
||||
context: Record<string, unknown>
|
||||
): { passed: boolean; failedConditions: string[] } {
|
||||
if (!guardJson || typeof guardJson !== 'object') {
|
||||
return { passed: true, failedConditions: [] }
|
||||
}
|
||||
|
||||
const guard = guardJson as unknown as GuardConfig
|
||||
const conditions = guard.conditions ?? []
|
||||
|
||||
if (conditions.length === 0) {
|
||||
return { passed: true, failedConditions: [] }
|
||||
}
|
||||
|
||||
const failedConditions: string[] = []
|
||||
const results = conditions.map((condition) => {
|
||||
const result = evaluateGuardCondition(condition, context)
|
||||
if (!result) {
|
||||
failedConditions.push(
|
||||
`Guard failed: ${condition.field} ${condition.operator} ${JSON.stringify(condition.value)}`
|
||||
)
|
||||
}
|
||||
return result
|
||||
})
|
||||
|
||||
const logic = guard.logic ?? 'AND'
|
||||
const passed = logic === 'AND'
|
||||
? results.every(Boolean)
|
||||
: results.some(Boolean)
|
||||
|
||||
return { passed, failedConditions: passed ? [] : failedConditions }
|
||||
}
|
||||
|
||||
// ─── Validate Transition ────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Validate whether a project can transition from one stage to another.
|
||||
* Checks:
|
||||
* 1. Source PSS exists and is not already exited
|
||||
* 2. A StageTransition record exists for fromStage -> toStage
|
||||
* 3. Destination stage is active (not DRAFT or ARCHIVED)
|
||||
* 4. Voting/evaluation window constraints on the destination stage
|
||||
* 5. Guard conditions on the transition
|
||||
*/
|
||||
export async function validateTransition(
|
||||
projectId: string,
|
||||
fromStageId: string,
|
||||
toStageId: string,
|
||||
prisma: PrismaClient | any
|
||||
): Promise<TransitionValidationResult> {
|
||||
const errors: string[] = []
|
||||
|
||||
// 1. Check source PSS exists and is active (no exitedAt)
|
||||
const sourcePSS = await prisma.projectStageState.findFirst({
|
||||
where: {
|
||||
projectId,
|
||||
stageId: fromStageId,
|
||||
exitedAt: null,
|
||||
},
|
||||
})
|
||||
|
||||
if (!sourcePSS) {
|
||||
errors.push(
|
||||
`Project ${projectId} has no active state in stage ${fromStageId}`
|
||||
)
|
||||
}
|
||||
|
||||
// 2. Check StageTransition record exists
|
||||
const transition = await prisma.stageTransition.findUnique({
|
||||
where: {
|
||||
fromStageId_toStageId: {
|
||||
fromStageId,
|
||||
toStageId,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
if (!transition) {
|
||||
errors.push(
|
||||
`No transition defined from stage ${fromStageId} to stage ${toStageId}`
|
||||
)
|
||||
return { valid: false, errors }
|
||||
}
|
||||
|
||||
// 3. Check destination stage is active
|
||||
const destStage = await prisma.stage.findUnique({
|
||||
where: { id: toStageId },
|
||||
})
|
||||
|
||||
if (!destStage) {
|
||||
errors.push(`Destination stage ${toStageId} not found`)
|
||||
return { valid: false, errors }
|
||||
}
|
||||
|
||||
if (destStage.status === 'STAGE_ARCHIVED') {
|
||||
errors.push(`Destination stage "${destStage.name}" is archived`)
|
||||
}
|
||||
|
||||
// 4. Check window constraints on destination stage
|
||||
const now = new Date()
|
||||
if (destStage.windowOpenAt && now < destStage.windowOpenAt) {
|
||||
errors.push(
|
||||
`Destination stage "${destStage.name}" window has not opened yet (opens ${destStage.windowOpenAt.toISOString()})`
|
||||
)
|
||||
}
|
||||
if (destStage.windowCloseAt && now > destStage.windowCloseAt) {
|
||||
errors.push(
|
||||
`Destination stage "${destStage.name}" window has already closed (closed ${destStage.windowCloseAt.toISOString()})`
|
||||
)
|
||||
}
|
||||
|
||||
// 5. Evaluate guard conditions
|
||||
if (transition.guardJson && sourcePSS) {
|
||||
// Build context from the project and its current state for guard evaluation
|
||||
const project = await prisma.project.findUnique({
|
||||
where: { id: projectId },
|
||||
include: {
|
||||
assignments: {
|
||||
where: { stageId: fromStageId },
|
||||
include: { evaluation: true },
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const evaluations = project?.assignments
|
||||
?.map((a: any) => a.evaluation)
|
||||
.filter(Boolean) ?? []
|
||||
const submittedEvaluations = evaluations.filter(
|
||||
(e: any) => e.status === 'SUBMITTED'
|
||||
)
|
||||
const avgScore =
|
||||
submittedEvaluations.length > 0
|
||||
? submittedEvaluations.reduce(
|
||||
(sum: number, e: any) => sum + (e.globalScore ?? 0),
|
||||
0
|
||||
) / submittedEvaluations.length
|
||||
: 0
|
||||
|
||||
const guardContext: Record<string, unknown> = {
|
||||
state: sourcePSS?.state,
|
||||
evaluationCount: evaluations.length,
|
||||
submittedEvaluationCount: submittedEvaluations.length,
|
||||
averageScore: avgScore,
|
||||
status: project?.status,
|
||||
country: project?.country,
|
||||
competitionCategory: project?.competitionCategory,
|
||||
tags: project?.tags ?? [],
|
||||
}
|
||||
|
||||
const guardResult = evaluateGuard(transition.guardJson, guardContext)
|
||||
if (!guardResult.passed) {
|
||||
errors.push(...guardResult.failedConditions)
|
||||
}
|
||||
}
|
||||
|
||||
return { valid: errors.length === 0, errors }
|
||||
}
|
||||
|
||||
// ─── Execute Transition ─────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Execute a stage transition for a single project atomically.
|
||||
* Within a transaction:
|
||||
* 1. Sets exitedAt on the source PSS
|
||||
* 2. Creates or updates the destination PSS with the new state
|
||||
* 3. Logs the transition in DecisionAuditLog
|
||||
* 4. Logs the transition in AuditLog
|
||||
*/
|
||||
export async function executeTransition(
|
||||
projectId: string,
|
||||
trackId: string,
|
||||
fromStageId: string,
|
||||
toStageId: string,
|
||||
newState: ProjectStageStateValue,
|
||||
actorId: string,
|
||||
prisma: PrismaClient | any
|
||||
): Promise<TransitionExecutionResult> {
|
||||
try {
|
||||
const result = await prisma.$transaction(async (tx: any) => {
|
||||
const now = new Date()
|
||||
|
||||
// 1. Exit the source PSS
|
||||
const sourcePSS = await tx.projectStageState.findFirst({
|
||||
where: {
|
||||
projectId,
|
||||
stageId: fromStageId,
|
||||
exitedAt: null,
|
||||
},
|
||||
})
|
||||
|
||||
if (sourcePSS) {
|
||||
await tx.projectStageState.update({
|
||||
where: { id: sourcePSS.id },
|
||||
data: {
|
||||
exitedAt: now,
|
||||
state: sourcePSS.state === 'PENDING' || sourcePSS.state === 'IN_PROGRESS'
|
||||
? 'COMPLETED'
|
||||
: sourcePSS.state,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// 2. Create or update destination PSS
|
||||
const existingDestPSS = await tx.projectStageState.findUnique({
|
||||
where: {
|
||||
projectId_trackId_stageId: {
|
||||
projectId,
|
||||
trackId,
|
||||
stageId: toStageId,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
let destPSS
|
||||
if (existingDestPSS) {
|
||||
destPSS = await tx.projectStageState.update({
|
||||
where: { id: existingDestPSS.id },
|
||||
data: {
|
||||
state: newState,
|
||||
enteredAt: now,
|
||||
exitedAt: null,
|
||||
},
|
||||
})
|
||||
} else {
|
||||
destPSS = await tx.projectStageState.create({
|
||||
data: {
|
||||
projectId,
|
||||
trackId,
|
||||
stageId: toStageId,
|
||||
state: newState,
|
||||
enteredAt: now,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// 3. Log in DecisionAuditLog
|
||||
await tx.decisionAuditLog.create({
|
||||
data: {
|
||||
eventType: 'stage.transitioned',
|
||||
entityType: 'ProjectStageState',
|
||||
entityId: destPSS.id,
|
||||
actorId,
|
||||
detailsJson: {
|
||||
projectId,
|
||||
trackId,
|
||||
fromStageId,
|
||||
toStageId,
|
||||
previousState: sourcePSS?.state ?? null,
|
||||
newState,
|
||||
},
|
||||
snapshotJson: {
|
||||
sourcePSSId: sourcePSS?.id ?? null,
|
||||
destPSSId: destPSS.id,
|
||||
timestamp: now.toISOString(),
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
// 4. Audit log (never throws)
|
||||
await logAudit({
|
||||
prisma: tx,
|
||||
userId: actorId,
|
||||
action: 'STAGE_TRANSITION',
|
||||
entityType: 'ProjectStageState',
|
||||
entityId: destPSS.id,
|
||||
detailsJson: {
|
||||
projectId,
|
||||
fromStageId,
|
||||
toStageId,
|
||||
newState,
|
||||
},
|
||||
})
|
||||
|
||||
return destPSS
|
||||
})
|
||||
|
||||
return {
|
||||
success: true,
|
||||
projectStageState: {
|
||||
id: result.id,
|
||||
projectId: result.projectId,
|
||||
trackId: result.trackId,
|
||||
stageId: result.stageId,
|
||||
state: result.state,
|
||||
},
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[StageEngine] Transition execution failed:', error)
|
||||
return {
|
||||
success: false,
|
||||
projectStageState: null,
|
||||
errors: [
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: 'Unknown error during transition execution',
|
||||
],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Batch Transition ───────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Execute transitions for multiple projects in batches of 50.
|
||||
* Each project is processed independently so a failure in one does not
|
||||
* block others.
|
||||
*/
|
||||
export async function executeBatchTransition(
|
||||
projectIds: string[],
|
||||
trackId: string,
|
||||
fromStageId: string,
|
||||
toStageId: string,
|
||||
newState: ProjectStageStateValue,
|
||||
actorId: string,
|
||||
prisma: PrismaClient | any
|
||||
): Promise<BatchTransitionResult> {
|
||||
const succeeded: string[] = []
|
||||
const failed: Array<{ projectId: string; errors: string[] }> = []
|
||||
|
||||
// Process in batches
|
||||
for (let i = 0; i < projectIds.length; i += BATCH_SIZE) {
|
||||
const batch = projectIds.slice(i, i + BATCH_SIZE)
|
||||
|
||||
const batchPromises = batch.map(async (projectId) => {
|
||||
// Validate first
|
||||
const validation = await validateTransition(
|
||||
projectId,
|
||||
fromStageId,
|
||||
toStageId,
|
||||
prisma
|
||||
)
|
||||
|
||||
if (!validation.valid) {
|
||||
failed.push({ projectId, errors: validation.errors })
|
||||
return
|
||||
}
|
||||
|
||||
// Execute transition
|
||||
const result = await executeTransition(
|
||||
projectId,
|
||||
trackId,
|
||||
fromStageId,
|
||||
toStageId,
|
||||
newState,
|
||||
actorId,
|
||||
prisma
|
||||
)
|
||||
|
||||
if (result.success) {
|
||||
succeeded.push(projectId)
|
||||
} else {
|
||||
failed.push({
|
||||
projectId,
|
||||
errors: result.errors ?? ['Transition execution failed'],
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
await Promise.all(batchPromises)
|
||||
}
|
||||
|
||||
return {
|
||||
succeeded,
|
||||
failed,
|
||||
total: projectIds.length,
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Stage Engine Service
|
||||
*
|
||||
* State machine service for managing project transitions between stages in
|
||||
* the pipeline. Handles validation of transitions (guard evaluation, window
|
||||
* constraints, PSS existence) and atomic execution with full audit logging.
|
||||
*
|
||||
* Key invariants:
|
||||
* - A project can only be in one active PSS per track/stage combination
|
||||
* - Transitions must follow defined StageTransition records
|
||||
* - Guard conditions (guardJson) on transitions are evaluated before execution
|
||||
* - All transitions are logged in DecisionAuditLog and AuditLog
|
||||
*/
|
||||
|
||||
import type { PrismaClient, ProjectStageStateValue, Prisma } from '@prisma/client'
|
||||
import { logAudit } from '@/server/utils/audit'
|
||||
|
||||
// ─── Types ──────────────────────────────────────────────────────────────────
|
||||
|
||||
export interface TransitionValidationResult {
|
||||
valid: boolean
|
||||
errors: string[]
|
||||
}
|
||||
|
||||
export interface TransitionExecutionResult {
|
||||
success: boolean
|
||||
projectStageState: {
|
||||
id: string
|
||||
projectId: string
|
||||
trackId: string
|
||||
stageId: string
|
||||
state: ProjectStageStateValue
|
||||
} | null
|
||||
errors?: string[]
|
||||
}
|
||||
|
||||
export interface BatchTransitionResult {
|
||||
succeeded: string[]
|
||||
failed: Array<{ projectId: string; errors: string[] }>
|
||||
total: number
|
||||
}
|
||||
|
||||
interface GuardCondition {
|
||||
field: string
|
||||
operator: 'eq' | 'neq' | 'in' | 'contains' | 'gt' | 'lt' | 'exists'
|
||||
value: unknown
|
||||
}
|
||||
|
||||
interface GuardConfig {
|
||||
conditions?: GuardCondition[]
|
||||
logic?: 'AND' | 'OR'
|
||||
requireAllEvaluationsComplete?: boolean
|
||||
requireMinScore?: number
|
||||
}
|
||||
|
||||
// ─── Constants ──────────────────────────────────────────────────────────────
|
||||
|
||||
const BATCH_SIZE = 50
|
||||
|
||||
// ─── Guard Evaluation ───────────────────────────────────────────────────────
|
||||
|
||||
function evaluateGuardCondition(
|
||||
condition: GuardCondition,
|
||||
context: Record<string, unknown>
|
||||
): boolean {
|
||||
const fieldValue = context[condition.field]
|
||||
|
||||
switch (condition.operator) {
|
||||
case 'eq':
|
||||
return fieldValue === condition.value
|
||||
case 'neq':
|
||||
return fieldValue !== condition.value
|
||||
case 'in': {
|
||||
if (!Array.isArray(condition.value)) return false
|
||||
return condition.value.includes(fieldValue)
|
||||
}
|
||||
case 'contains': {
|
||||
if (typeof fieldValue === 'string' && typeof condition.value === 'string') {
|
||||
return fieldValue.toLowerCase().includes(condition.value.toLowerCase())
|
||||
}
|
||||
if (Array.isArray(fieldValue)) {
|
||||
return fieldValue.includes(condition.value)
|
||||
}
|
||||
return false
|
||||
}
|
||||
case 'gt':
|
||||
return Number(fieldValue) > Number(condition.value)
|
||||
case 'lt':
|
||||
return Number(fieldValue) < Number(condition.value)
|
||||
case 'exists':
|
||||
return fieldValue !== null && fieldValue !== undefined
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
function evaluateGuard(
|
||||
guardJson: Prisma.JsonValue | null | undefined,
|
||||
context: Record<string, unknown>
|
||||
): { passed: boolean; failedConditions: string[] } {
|
||||
if (!guardJson || typeof guardJson !== 'object') {
|
||||
return { passed: true, failedConditions: [] }
|
||||
}
|
||||
|
||||
const guard = guardJson as unknown as GuardConfig
|
||||
const conditions = guard.conditions ?? []
|
||||
|
||||
if (conditions.length === 0) {
|
||||
return { passed: true, failedConditions: [] }
|
||||
}
|
||||
|
||||
const failedConditions: string[] = []
|
||||
const results = conditions.map((condition) => {
|
||||
const result = evaluateGuardCondition(condition, context)
|
||||
if (!result) {
|
||||
failedConditions.push(
|
||||
`Guard failed: ${condition.field} ${condition.operator} ${JSON.stringify(condition.value)}`
|
||||
)
|
||||
}
|
||||
return result
|
||||
})
|
||||
|
||||
const logic = guard.logic ?? 'AND'
|
||||
const passed = logic === 'AND'
|
||||
? results.every(Boolean)
|
||||
: results.some(Boolean)
|
||||
|
||||
return { passed, failedConditions: passed ? [] : failedConditions }
|
||||
}
|
||||
|
||||
// ─── Validate Transition ────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Validate whether a project can transition from one stage to another.
|
||||
* Checks:
|
||||
* 1. Source PSS exists and is not already exited
|
||||
* 2. A StageTransition record exists for fromStage -> toStage
|
||||
* 3. Destination stage is active (not DRAFT or ARCHIVED)
|
||||
* 4. Voting/evaluation window constraints on the destination stage
|
||||
* 5. Guard conditions on the transition
|
||||
*/
|
||||
export async function validateTransition(
|
||||
projectId: string,
|
||||
fromStageId: string,
|
||||
toStageId: string,
|
||||
prisma: PrismaClient | any
|
||||
): Promise<TransitionValidationResult> {
|
||||
const errors: string[] = []
|
||||
|
||||
// 1. Check source PSS exists and is active (no exitedAt)
|
||||
const sourcePSS = await prisma.projectStageState.findFirst({
|
||||
where: {
|
||||
projectId,
|
||||
stageId: fromStageId,
|
||||
exitedAt: null,
|
||||
},
|
||||
})
|
||||
|
||||
if (!sourcePSS) {
|
||||
errors.push(
|
||||
`Project ${projectId} has no active state in stage ${fromStageId}`
|
||||
)
|
||||
}
|
||||
|
||||
// 2. Check StageTransition record exists
|
||||
const transition = await prisma.stageTransition.findUnique({
|
||||
where: {
|
||||
fromStageId_toStageId: {
|
||||
fromStageId,
|
||||
toStageId,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
if (!transition) {
|
||||
errors.push(
|
||||
`No transition defined from stage ${fromStageId} to stage ${toStageId}`
|
||||
)
|
||||
return { valid: false, errors }
|
||||
}
|
||||
|
||||
// 3. Check destination stage is active
|
||||
const destStage = await prisma.stage.findUnique({
|
||||
where: { id: toStageId },
|
||||
})
|
||||
|
||||
if (!destStage) {
|
||||
errors.push(`Destination stage ${toStageId} not found`)
|
||||
return { valid: false, errors }
|
||||
}
|
||||
|
||||
if (destStage.status === 'STAGE_ARCHIVED') {
|
||||
errors.push(`Destination stage "${destStage.name}" is archived`)
|
||||
}
|
||||
|
||||
// 4. Check window constraints on destination stage
|
||||
const now = new Date()
|
||||
if (destStage.windowOpenAt && now < destStage.windowOpenAt) {
|
||||
errors.push(
|
||||
`Destination stage "${destStage.name}" window has not opened yet (opens ${destStage.windowOpenAt.toISOString()})`
|
||||
)
|
||||
}
|
||||
if (destStage.windowCloseAt && now > destStage.windowCloseAt) {
|
||||
errors.push(
|
||||
`Destination stage "${destStage.name}" window has already closed (closed ${destStage.windowCloseAt.toISOString()})`
|
||||
)
|
||||
}
|
||||
|
||||
// 5. Evaluate guard conditions
|
||||
if (transition.guardJson && sourcePSS) {
|
||||
// Build context from the project and its current state for guard evaluation
|
||||
const project = await prisma.project.findUnique({
|
||||
where: { id: projectId },
|
||||
include: {
|
||||
assignments: {
|
||||
where: { stageId: fromStageId },
|
||||
include: { evaluation: true },
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const evaluations = project?.assignments
|
||||
?.map((a: any) => a.evaluation)
|
||||
.filter(Boolean) ?? []
|
||||
const submittedEvaluations = evaluations.filter(
|
||||
(e: any) => e.status === 'SUBMITTED'
|
||||
)
|
||||
const avgScore =
|
||||
submittedEvaluations.length > 0
|
||||
? submittedEvaluations.reduce(
|
||||
(sum: number, e: any) => sum + (e.globalScore ?? 0),
|
||||
0
|
||||
) / submittedEvaluations.length
|
||||
: 0
|
||||
|
||||
const guardContext: Record<string, unknown> = {
|
||||
state: sourcePSS?.state,
|
||||
evaluationCount: evaluations.length,
|
||||
submittedEvaluationCount: submittedEvaluations.length,
|
||||
averageScore: avgScore,
|
||||
status: project?.status,
|
||||
country: project?.country,
|
||||
competitionCategory: project?.competitionCategory,
|
||||
tags: project?.tags ?? [],
|
||||
}
|
||||
|
||||
const guardResult = evaluateGuard(transition.guardJson, guardContext)
|
||||
if (!guardResult.passed) {
|
||||
errors.push(...guardResult.failedConditions)
|
||||
}
|
||||
}
|
||||
|
||||
return { valid: errors.length === 0, errors }
|
||||
}
|
||||
|
||||
// ─── Execute Transition ─────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Execute a stage transition for a single project atomically.
|
||||
* Within a transaction:
|
||||
* 1. Sets exitedAt on the source PSS
|
||||
* 2. Creates or updates the destination PSS with the new state
|
||||
* 3. Logs the transition in DecisionAuditLog
|
||||
* 4. Logs the transition in AuditLog
|
||||
*/
|
||||
export async function executeTransition(
|
||||
projectId: string,
|
||||
trackId: string,
|
||||
fromStageId: string,
|
||||
toStageId: string,
|
||||
newState: ProjectStageStateValue,
|
||||
actorId: string,
|
||||
prisma: PrismaClient | any
|
||||
): Promise<TransitionExecutionResult> {
|
||||
try {
|
||||
const result = await prisma.$transaction(async (tx: any) => {
|
||||
const now = new Date()
|
||||
|
||||
// 1. Exit the source PSS
|
||||
const sourcePSS = await tx.projectStageState.findFirst({
|
||||
where: {
|
||||
projectId,
|
||||
stageId: fromStageId,
|
||||
exitedAt: null,
|
||||
},
|
||||
})
|
||||
|
||||
if (sourcePSS) {
|
||||
await tx.projectStageState.update({
|
||||
where: { id: sourcePSS.id },
|
||||
data: {
|
||||
exitedAt: now,
|
||||
state: sourcePSS.state === 'PENDING' || sourcePSS.state === 'IN_PROGRESS'
|
||||
? 'COMPLETED'
|
||||
: sourcePSS.state,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// 2. Create or update destination PSS
|
||||
const existingDestPSS = await tx.projectStageState.findUnique({
|
||||
where: {
|
||||
projectId_trackId_stageId: {
|
||||
projectId,
|
||||
trackId,
|
||||
stageId: toStageId,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
let destPSS
|
||||
if (existingDestPSS) {
|
||||
destPSS = await tx.projectStageState.update({
|
||||
where: { id: existingDestPSS.id },
|
||||
data: {
|
||||
state: newState,
|
||||
enteredAt: now,
|
||||
exitedAt: null,
|
||||
},
|
||||
})
|
||||
} else {
|
||||
destPSS = await tx.projectStageState.create({
|
||||
data: {
|
||||
projectId,
|
||||
trackId,
|
||||
stageId: toStageId,
|
||||
state: newState,
|
||||
enteredAt: now,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// 3. Log in DecisionAuditLog
|
||||
await tx.decisionAuditLog.create({
|
||||
data: {
|
||||
eventType: 'stage.transitioned',
|
||||
entityType: 'ProjectStageState',
|
||||
entityId: destPSS.id,
|
||||
actorId,
|
||||
detailsJson: {
|
||||
projectId,
|
||||
trackId,
|
||||
fromStageId,
|
||||
toStageId,
|
||||
previousState: sourcePSS?.state ?? null,
|
||||
newState,
|
||||
},
|
||||
snapshotJson: {
|
||||
sourcePSSId: sourcePSS?.id ?? null,
|
||||
destPSSId: destPSS.id,
|
||||
timestamp: now.toISOString(),
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
// 4. Audit log (never throws)
|
||||
await logAudit({
|
||||
prisma: tx,
|
||||
userId: actorId,
|
||||
action: 'STAGE_TRANSITION',
|
||||
entityType: 'ProjectStageState',
|
||||
entityId: destPSS.id,
|
||||
detailsJson: {
|
||||
projectId,
|
||||
fromStageId,
|
||||
toStageId,
|
||||
newState,
|
||||
},
|
||||
})
|
||||
|
||||
return destPSS
|
||||
})
|
||||
|
||||
return {
|
||||
success: true,
|
||||
projectStageState: {
|
||||
id: result.id,
|
||||
projectId: result.projectId,
|
||||
trackId: result.trackId,
|
||||
stageId: result.stageId,
|
||||
state: result.state,
|
||||
},
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[StageEngine] Transition execution failed:', error)
|
||||
return {
|
||||
success: false,
|
||||
projectStageState: null,
|
||||
errors: [
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: 'Unknown error during transition execution',
|
||||
],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Batch Transition ───────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Execute transitions for multiple projects in batches of 50.
|
||||
* Each project is processed independently so a failure in one does not
|
||||
* block others.
|
||||
*/
|
||||
export async function executeBatchTransition(
|
||||
projectIds: string[],
|
||||
trackId: string,
|
||||
fromStageId: string,
|
||||
toStageId: string,
|
||||
newState: ProjectStageStateValue,
|
||||
actorId: string,
|
||||
prisma: PrismaClient | any
|
||||
): Promise<BatchTransitionResult> {
|
||||
const succeeded: string[] = []
|
||||
const failed: Array<{ projectId: string; errors: string[] }> = []
|
||||
|
||||
// Process in batches
|
||||
for (let i = 0; i < projectIds.length; i += BATCH_SIZE) {
|
||||
const batch = projectIds.slice(i, i + BATCH_SIZE)
|
||||
|
||||
const batchPromises = batch.map(async (projectId) => {
|
||||
// Validate first
|
||||
const validation = await validateTransition(
|
||||
projectId,
|
||||
fromStageId,
|
||||
toStageId,
|
||||
prisma
|
||||
)
|
||||
|
||||
if (!validation.valid) {
|
||||
failed.push({ projectId, errors: validation.errors })
|
||||
return
|
||||
}
|
||||
|
||||
// Execute transition
|
||||
const result = await executeTransition(
|
||||
projectId,
|
||||
trackId,
|
||||
fromStageId,
|
||||
toStageId,
|
||||
newState,
|
||||
actorId,
|
||||
prisma
|
||||
)
|
||||
|
||||
if (result.success) {
|
||||
succeeded.push(projectId)
|
||||
} else {
|
||||
failed.push({
|
||||
projectId,
|
||||
errors: result.errors ?? ['Transition execution failed'],
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
await Promise.all(batchPromises)
|
||||
}
|
||||
|
||||
return {
|
||||
succeeded,
|
||||
failed,
|
||||
total: projectIds.length,
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,174 +1,174 @@
|
||||
import crypto from 'crypto'
|
||||
import { Prisma } from '@prisma/client'
|
||||
import { prisma } from '@/lib/prisma'
|
||||
|
||||
/**
|
||||
* Dispatch a webhook event to all active webhooks subscribed to this event.
|
||||
*/
|
||||
export async function dispatchWebhookEvent(
|
||||
event: string,
|
||||
payload: Record<string, unknown>
|
||||
): Promise<number> {
|
||||
const webhooks = await prisma.webhook.findMany({
|
||||
where: {
|
||||
isActive: true,
|
||||
events: { has: event },
|
||||
},
|
||||
})
|
||||
|
||||
if (webhooks.length === 0) return 0
|
||||
|
||||
let deliveryCount = 0
|
||||
|
||||
for (const webhook of webhooks) {
|
||||
try {
|
||||
const delivery = await prisma.webhookDelivery.create({
|
||||
data: {
|
||||
webhookId: webhook.id,
|
||||
event,
|
||||
payload: payload as Prisma.InputJsonValue,
|
||||
status: 'PENDING',
|
||||
attempts: 0,
|
||||
},
|
||||
})
|
||||
|
||||
// Attempt delivery asynchronously (don't block the caller)
|
||||
deliverWebhook(delivery.id).catch((err) => {
|
||||
console.error(`[Webhook] Background delivery failed for ${delivery.id}:`, err)
|
||||
})
|
||||
|
||||
deliveryCount++
|
||||
} catch (error) {
|
||||
console.error(`[Webhook] Failed to create delivery for webhook ${webhook.id}:`, error)
|
||||
}
|
||||
}
|
||||
|
||||
return deliveryCount
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempt to deliver a single webhook.
|
||||
*/
|
||||
export async function deliverWebhook(deliveryId: string): Promise<void> {
|
||||
const delivery = await prisma.webhookDelivery.findUnique({
|
||||
where: { id: deliveryId },
|
||||
include: { webhook: true },
|
||||
})
|
||||
|
||||
if (!delivery || !delivery.webhook) {
|
||||
console.error(`[Webhook] Delivery ${deliveryId} not found`)
|
||||
return
|
||||
}
|
||||
|
||||
const { webhook } = delivery
|
||||
const payloadStr = JSON.stringify(delivery.payload)
|
||||
|
||||
// Sign payload with HMAC-SHA256
|
||||
const signature = crypto
|
||||
.createHmac('sha256', webhook.secret)
|
||||
.update(payloadStr)
|
||||
.digest('hex')
|
||||
|
||||
// Build headers
|
||||
const headers: Record<string, string> = {
|
||||
'Content-Type': 'application/json',
|
||||
'X-Webhook-Signature': `sha256=${signature}`,
|
||||
'X-Webhook-Event': delivery.event,
|
||||
'X-Webhook-Delivery': delivery.id,
|
||||
}
|
||||
|
||||
// Merge custom headers from webhook config
|
||||
if (webhook.headers && typeof webhook.headers === 'object') {
|
||||
const customHeaders = webhook.headers as Record<string, string>
|
||||
for (const [key, value] of Object.entries(customHeaders)) {
|
||||
if (typeof value === 'string') {
|
||||
headers[key] = value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const controller = new AbortController()
|
||||
const timeout = setTimeout(() => controller.abort(), 30000) // 30s timeout
|
||||
|
||||
const response = await fetch(webhook.url, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: payloadStr,
|
||||
signal: controller.signal,
|
||||
})
|
||||
|
||||
clearTimeout(timeout)
|
||||
|
||||
const responseBody = await response.text().catch(() => '')
|
||||
|
||||
await prisma.webhookDelivery.update({
|
||||
where: { id: deliveryId },
|
||||
data: {
|
||||
status: response.ok ? 'DELIVERED' : 'FAILED',
|
||||
responseStatus: response.status,
|
||||
responseBody: responseBody.slice(0, 4000), // Truncate long responses
|
||||
attempts: delivery.attempts + 1,
|
||||
lastAttemptAt: new Date(),
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||
|
||||
await prisma.webhookDelivery.update({
|
||||
where: { id: deliveryId },
|
||||
data: {
|
||||
status: 'FAILED',
|
||||
responseBody: errorMessage.slice(0, 4000),
|
||||
attempts: delivery.attempts + 1,
|
||||
lastAttemptAt: new Date(),
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Retry all failed webhook deliveries that haven't exceeded max retries.
|
||||
* Called by cron.
|
||||
*/
|
||||
export async function retryFailedDeliveries(): Promise<{
|
||||
retried: number
|
||||
errors: number
|
||||
}> {
|
||||
let retried = 0
|
||||
let errors = 0
|
||||
|
||||
const failedDeliveries = await prisma.webhookDelivery.findMany({
|
||||
where: {
|
||||
status: 'FAILED',
|
||||
},
|
||||
include: {
|
||||
webhook: {
|
||||
select: { maxRetries: true, isActive: true },
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
for (const delivery of failedDeliveries) {
|
||||
// Skip if webhook is inactive or max retries exceeded
|
||||
if (!delivery.webhook.isActive) continue
|
||||
if (delivery.attempts >= delivery.webhook.maxRetries) continue
|
||||
|
||||
try {
|
||||
await deliverWebhook(delivery.id)
|
||||
retried++
|
||||
} catch (error) {
|
||||
console.error(`[Webhook] Retry failed for delivery ${delivery.id}:`, error)
|
||||
errors++
|
||||
}
|
||||
}
|
||||
|
||||
return { retried, errors }
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a random HMAC secret for webhook signing.
|
||||
*/
|
||||
export function generateWebhookSecret(): string {
|
||||
return crypto.randomBytes(32).toString('hex')
|
||||
}
|
||||
import crypto from 'crypto'
|
||||
import { Prisma } from '@prisma/client'
|
||||
import { prisma } from '@/lib/prisma'
|
||||
|
||||
/**
|
||||
* Dispatch a webhook event to all active webhooks subscribed to this event.
|
||||
*/
|
||||
export async function dispatchWebhookEvent(
|
||||
event: string,
|
||||
payload: Record<string, unknown>
|
||||
): Promise<number> {
|
||||
const webhooks = await prisma.webhook.findMany({
|
||||
where: {
|
||||
isActive: true,
|
||||
events: { has: event },
|
||||
},
|
||||
})
|
||||
|
||||
if (webhooks.length === 0) return 0
|
||||
|
||||
let deliveryCount = 0
|
||||
|
||||
for (const webhook of webhooks) {
|
||||
try {
|
||||
const delivery = await prisma.webhookDelivery.create({
|
||||
data: {
|
||||
webhookId: webhook.id,
|
||||
event,
|
||||
payload: payload as Prisma.InputJsonValue,
|
||||
status: 'PENDING',
|
||||
attempts: 0,
|
||||
},
|
||||
})
|
||||
|
||||
// Attempt delivery asynchronously (don't block the caller)
|
||||
deliverWebhook(delivery.id).catch((err) => {
|
||||
console.error(`[Webhook] Background delivery failed for ${delivery.id}:`, err)
|
||||
})
|
||||
|
||||
deliveryCount++
|
||||
} catch (error) {
|
||||
console.error(`[Webhook] Failed to create delivery for webhook ${webhook.id}:`, error)
|
||||
}
|
||||
}
|
||||
|
||||
return deliveryCount
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempt to deliver a single webhook.
|
||||
*/
|
||||
export async function deliverWebhook(deliveryId: string): Promise<void> {
|
||||
const delivery = await prisma.webhookDelivery.findUnique({
|
||||
where: { id: deliveryId },
|
||||
include: { webhook: true },
|
||||
})
|
||||
|
||||
if (!delivery || !delivery.webhook) {
|
||||
console.error(`[Webhook] Delivery ${deliveryId} not found`)
|
||||
return
|
||||
}
|
||||
|
||||
const { webhook } = delivery
|
||||
const payloadStr = JSON.stringify(delivery.payload)
|
||||
|
||||
// Sign payload with HMAC-SHA256
|
||||
const signature = crypto
|
||||
.createHmac('sha256', webhook.secret)
|
||||
.update(payloadStr)
|
||||
.digest('hex')
|
||||
|
||||
// Build headers
|
||||
const headers: Record<string, string> = {
|
||||
'Content-Type': 'application/json',
|
||||
'X-Webhook-Signature': `sha256=${signature}`,
|
||||
'X-Webhook-Event': delivery.event,
|
||||
'X-Webhook-Delivery': delivery.id,
|
||||
}
|
||||
|
||||
// Merge custom headers from webhook config
|
||||
if (webhook.headers && typeof webhook.headers === 'object') {
|
||||
const customHeaders = webhook.headers as Record<string, string>
|
||||
for (const [key, value] of Object.entries(customHeaders)) {
|
||||
if (typeof value === 'string') {
|
||||
headers[key] = value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const controller = new AbortController()
|
||||
const timeout = setTimeout(() => controller.abort(), 30000) // 30s timeout
|
||||
|
||||
const response = await fetch(webhook.url, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: payloadStr,
|
||||
signal: controller.signal,
|
||||
})
|
||||
|
||||
clearTimeout(timeout)
|
||||
|
||||
const responseBody = await response.text().catch(() => '')
|
||||
|
||||
await prisma.webhookDelivery.update({
|
||||
where: { id: deliveryId },
|
||||
data: {
|
||||
status: response.ok ? 'DELIVERED' : 'FAILED',
|
||||
responseStatus: response.status,
|
||||
responseBody: responseBody.slice(0, 4000), // Truncate long responses
|
||||
attempts: delivery.attempts + 1,
|
||||
lastAttemptAt: new Date(),
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||
|
||||
await prisma.webhookDelivery.update({
|
||||
where: { id: deliveryId },
|
||||
data: {
|
||||
status: 'FAILED',
|
||||
responseBody: errorMessage.slice(0, 4000),
|
||||
attempts: delivery.attempts + 1,
|
||||
lastAttemptAt: new Date(),
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Retry all failed webhook deliveries that haven't exceeded max retries.
|
||||
* Called by cron.
|
||||
*/
|
||||
export async function retryFailedDeliveries(): Promise<{
|
||||
retried: number
|
||||
errors: number
|
||||
}> {
|
||||
let retried = 0
|
||||
let errors = 0
|
||||
|
||||
const failedDeliveries = await prisma.webhookDelivery.findMany({
|
||||
where: {
|
||||
status: 'FAILED',
|
||||
},
|
||||
include: {
|
||||
webhook: {
|
||||
select: { maxRetries: true, isActive: true },
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
for (const delivery of failedDeliveries) {
|
||||
// Skip if webhook is inactive or max retries exceeded
|
||||
if (!delivery.webhook.isActive) continue
|
||||
if (delivery.attempts >= delivery.webhook.maxRetries) continue
|
||||
|
||||
try {
|
||||
await deliverWebhook(delivery.id)
|
||||
retried++
|
||||
} catch (error) {
|
||||
console.error(`[Webhook] Retry failed for delivery ${delivery.id}:`, error)
|
||||
errors++
|
||||
}
|
||||
}
|
||||
|
||||
return { retried, errors }
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a random HMAC secret for webhook signing.
|
||||
*/
|
||||
export function generateWebhookSecret(): string {
|
||||
return crypto.randomBytes(32).toString('hex')
|
||||
}
|
||||
|
||||
@@ -1,159 +1,159 @@
|
||||
import { initTRPC, TRPCError } from '@trpc/server'
|
||||
import superjson from 'superjson'
|
||||
import { ZodError } from 'zod'
|
||||
import type { Context } from './context'
|
||||
import type { UserRole } from '@prisma/client'
|
||||
|
||||
/**
|
||||
* Initialize tRPC with context type and configuration
|
||||
*/
|
||||
const t = initTRPC.context<Context>().create({
|
||||
transformer: superjson,
|
||||
errorFormatter({ shape, error }) {
|
||||
return {
|
||||
...shape,
|
||||
data: {
|
||||
...shape.data,
|
||||
zodError:
|
||||
error.cause instanceof ZodError ? error.cause.flatten() : null,
|
||||
},
|
||||
}
|
||||
},
|
||||
})
|
||||
|
||||
/**
|
||||
* Export reusable router and procedure helpers
|
||||
*/
|
||||
export const router = t.router
|
||||
export const publicProcedure = t.procedure
|
||||
export const middleware = t.middleware
|
||||
export const createCallerFactory = t.createCallerFactory
|
||||
|
||||
// =============================================================================
|
||||
// Middleware
|
||||
// =============================================================================
|
||||
|
||||
/**
|
||||
* Middleware to require authenticated user
|
||||
*/
|
||||
const isAuthenticated = middleware(async ({ ctx, next }) => {
|
||||
if (!ctx.session?.user) {
|
||||
throw new TRPCError({
|
||||
code: 'UNAUTHORIZED',
|
||||
message: 'You must be logged in to perform this action',
|
||||
})
|
||||
}
|
||||
|
||||
return next({
|
||||
ctx: {
|
||||
...ctx,
|
||||
user: ctx.session.user,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Middleware to require specific role(s)
|
||||
*/
|
||||
const hasRole = (...roles: UserRole[]) =>
|
||||
middleware(async ({ ctx, next }) => {
|
||||
if (!ctx.session?.user) {
|
||||
throw new TRPCError({
|
||||
code: 'UNAUTHORIZED',
|
||||
message: 'You must be logged in to perform this action',
|
||||
})
|
||||
}
|
||||
|
||||
if (!roles.includes(ctx.session.user.role)) {
|
||||
throw new TRPCError({
|
||||
code: 'FORBIDDEN',
|
||||
message: 'You do not have permission to perform this action',
|
||||
})
|
||||
}
|
||||
|
||||
return next({
|
||||
ctx: {
|
||||
...ctx,
|
||||
user: ctx.session.user,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Middleware for audit logging
|
||||
*/
|
||||
const withAuditLog = middleware(async ({ ctx, next, path }) => {
|
||||
const result = await next()
|
||||
|
||||
// Log successful mutations
|
||||
if (result.ok && path.includes('.')) {
|
||||
const [, action] = path.split('.')
|
||||
const mutationActions = ['create', 'update', 'delete', 'import', 'submit', 'grant', 'revoke']
|
||||
|
||||
if (mutationActions.some((a) => action?.toLowerCase().includes(a))) {
|
||||
// Audit logging would happen here
|
||||
// We'll implement this in the audit service
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
})
|
||||
|
||||
// =============================================================================
|
||||
// Procedure Types
|
||||
// =============================================================================
|
||||
|
||||
/**
|
||||
* Protected procedure - requires authenticated user
|
||||
*/
|
||||
export const protectedProcedure = t.procedure.use(isAuthenticated)
|
||||
|
||||
/**
|
||||
* Admin procedure - requires SUPER_ADMIN or PROGRAM_ADMIN role
|
||||
*/
|
||||
export const adminProcedure = t.procedure.use(
|
||||
hasRole('SUPER_ADMIN', 'PROGRAM_ADMIN')
|
||||
)
|
||||
|
||||
/**
|
||||
* Super admin procedure - requires SUPER_ADMIN role
|
||||
*/
|
||||
export const superAdminProcedure = t.procedure.use(hasRole('SUPER_ADMIN'))
|
||||
|
||||
/**
|
||||
* Jury procedure - requires JURY_MEMBER role
|
||||
*/
|
||||
export const juryProcedure = t.procedure.use(hasRole('JURY_MEMBER'))
|
||||
|
||||
/**
|
||||
* Mentor procedure - requires MENTOR role (or admin)
|
||||
*/
|
||||
export const mentorProcedure = t.procedure.use(
|
||||
hasRole('SUPER_ADMIN', 'PROGRAM_ADMIN', 'MENTOR')
|
||||
)
|
||||
|
||||
/**
|
||||
* Observer procedure - requires OBSERVER role (read-only access)
|
||||
*/
|
||||
export const observerProcedure = t.procedure.use(
|
||||
hasRole('SUPER_ADMIN', 'PROGRAM_ADMIN', 'OBSERVER')
|
||||
)
|
||||
|
||||
/**
|
||||
* Award master procedure - requires AWARD_MASTER role (or admin)
|
||||
*/
|
||||
export const awardMasterProcedure = t.procedure.use(
|
||||
hasRole('SUPER_ADMIN', 'PROGRAM_ADMIN', 'AWARD_MASTER')
|
||||
)
|
||||
|
||||
/**
|
||||
* Audience procedure - requires any authenticated user
|
||||
*/
|
||||
export const audienceProcedure = t.procedure.use(isAuthenticated)
|
||||
|
||||
/**
|
||||
* Protected procedure with audit logging
|
||||
*/
|
||||
export const auditedProcedure = t.procedure
|
||||
.use(isAuthenticated)
|
||||
.use(withAuditLog)
|
||||
import { initTRPC, TRPCError } from '@trpc/server'
|
||||
import superjson from 'superjson'
|
||||
import { ZodError } from 'zod'
|
||||
import type { Context } from './context'
|
||||
import type { UserRole } from '@prisma/client'
|
||||
|
||||
/**
|
||||
* Initialize tRPC with context type and configuration
|
||||
*/
|
||||
const t = initTRPC.context<Context>().create({
|
||||
transformer: superjson,
|
||||
errorFormatter({ shape, error }) {
|
||||
return {
|
||||
...shape,
|
||||
data: {
|
||||
...shape.data,
|
||||
zodError:
|
||||
error.cause instanceof ZodError ? error.cause.flatten() : null,
|
||||
},
|
||||
}
|
||||
},
|
||||
})
|
||||
|
||||
/**
|
||||
* Export reusable router and procedure helpers
|
||||
*/
|
||||
export const router = t.router
|
||||
export const publicProcedure = t.procedure
|
||||
export const middleware = t.middleware
|
||||
export const createCallerFactory = t.createCallerFactory
|
||||
|
||||
// =============================================================================
|
||||
// Middleware
|
||||
// =============================================================================
|
||||
|
||||
/**
|
||||
* Middleware to require authenticated user
|
||||
*/
|
||||
const isAuthenticated = middleware(async ({ ctx, next }) => {
|
||||
if (!ctx.session?.user) {
|
||||
throw new TRPCError({
|
||||
code: 'UNAUTHORIZED',
|
||||
message: 'You must be logged in to perform this action',
|
||||
})
|
||||
}
|
||||
|
||||
return next({
|
||||
ctx: {
|
||||
...ctx,
|
||||
user: ctx.session.user,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Middleware to require specific role(s)
|
||||
*/
|
||||
const hasRole = (...roles: UserRole[]) =>
|
||||
middleware(async ({ ctx, next }) => {
|
||||
if (!ctx.session?.user) {
|
||||
throw new TRPCError({
|
||||
code: 'UNAUTHORIZED',
|
||||
message: 'You must be logged in to perform this action',
|
||||
})
|
||||
}
|
||||
|
||||
if (!roles.includes(ctx.session.user.role)) {
|
||||
throw new TRPCError({
|
||||
code: 'FORBIDDEN',
|
||||
message: 'You do not have permission to perform this action',
|
||||
})
|
||||
}
|
||||
|
||||
return next({
|
||||
ctx: {
|
||||
...ctx,
|
||||
user: ctx.session.user,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Middleware for audit logging
|
||||
*/
|
||||
const withAuditLog = middleware(async ({ ctx, next, path }) => {
|
||||
const result = await next()
|
||||
|
||||
// Log successful mutations
|
||||
if (result.ok && path.includes('.')) {
|
||||
const [, action] = path.split('.')
|
||||
const mutationActions = ['create', 'update', 'delete', 'import', 'submit', 'grant', 'revoke']
|
||||
|
||||
if (mutationActions.some((a) => action?.toLowerCase().includes(a))) {
|
||||
// Audit logging would happen here
|
||||
// We'll implement this in the audit service
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
})
|
||||
|
||||
// =============================================================================
|
||||
// Procedure Types
|
||||
// =============================================================================
|
||||
|
||||
/**
|
||||
* Protected procedure - requires authenticated user
|
||||
*/
|
||||
export const protectedProcedure = t.procedure.use(isAuthenticated)
|
||||
|
||||
/**
|
||||
* Admin procedure - requires SUPER_ADMIN or PROGRAM_ADMIN role
|
||||
*/
|
||||
export const adminProcedure = t.procedure.use(
|
||||
hasRole('SUPER_ADMIN', 'PROGRAM_ADMIN')
|
||||
)
|
||||
|
||||
/**
|
||||
* Super admin procedure - requires SUPER_ADMIN role
|
||||
*/
|
||||
export const superAdminProcedure = t.procedure.use(hasRole('SUPER_ADMIN'))
|
||||
|
||||
/**
|
||||
* Jury procedure - requires JURY_MEMBER role
|
||||
*/
|
||||
export const juryProcedure = t.procedure.use(hasRole('JURY_MEMBER'))
|
||||
|
||||
/**
|
||||
* Mentor procedure - requires MENTOR role (or admin)
|
||||
*/
|
||||
export const mentorProcedure = t.procedure.use(
|
||||
hasRole('SUPER_ADMIN', 'PROGRAM_ADMIN', 'MENTOR')
|
||||
)
|
||||
|
||||
/**
|
||||
* Observer procedure - requires OBSERVER role (read-only access)
|
||||
*/
|
||||
export const observerProcedure = t.procedure.use(
|
||||
hasRole('SUPER_ADMIN', 'PROGRAM_ADMIN', 'OBSERVER')
|
||||
)
|
||||
|
||||
/**
|
||||
* Award master procedure - requires AWARD_MASTER role (or admin)
|
||||
*/
|
||||
export const awardMasterProcedure = t.procedure.use(
|
||||
hasRole('SUPER_ADMIN', 'PROGRAM_ADMIN', 'AWARD_MASTER')
|
||||
)
|
||||
|
||||
/**
|
||||
* Audience procedure - requires any authenticated user
|
||||
*/
|
||||
export const audienceProcedure = t.procedure.use(isAuthenticated)
|
||||
|
||||
/**
|
||||
* Protected procedure with audit logging
|
||||
*/
|
||||
export const auditedProcedure = t.procedure
|
||||
.use(isAuthenticated)
|
||||
.use(withAuditLog)
|
||||
|
||||
@@ -1,42 +1,42 @@
|
||||
import { prisma as globalPrisma } from '@/lib/prisma'
|
||||
import type { Prisma, PrismaClient } from '@prisma/client'
|
||||
|
||||
/** Minimal Prisma-like client that supports auditLog.create (works with PrismaClient and transaction clients). */
|
||||
type AuditPrismaClient = Pick<PrismaClient, 'auditLog'>
|
||||
|
||||
/**
|
||||
* Shared utility for creating audit log entries.
|
||||
* Wrapped in try-catch so audit failures never break the calling operation.
|
||||
*
|
||||
* @param input.prisma - Optional Prisma client instance. When omitted the global
|
||||
* singleton is used. Pass `ctx.prisma` from tRPC handlers so audit writes
|
||||
* participate in the same transaction when applicable.
|
||||
*/
|
||||
export async function logAudit(input: {
|
||||
prisma?: AuditPrismaClient
|
||||
userId?: string | null
|
||||
action: string
|
||||
entityType: string
|
||||
entityId?: string
|
||||
detailsJson?: Record<string, unknown>
|
||||
ipAddress?: string
|
||||
userAgent?: string
|
||||
}): Promise<void> {
|
||||
try {
|
||||
const db = input.prisma ?? globalPrisma
|
||||
await db.auditLog.create({
|
||||
data: {
|
||||
userId: input.userId ?? null,
|
||||
action: input.action,
|
||||
entityType: input.entityType,
|
||||
entityId: input.entityId,
|
||||
detailsJson: input.detailsJson as Prisma.InputJsonValue ?? undefined,
|
||||
ipAddress: input.ipAddress,
|
||||
userAgent: input.userAgent,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
// Never break the calling operation on audit failure
|
||||
console.error('[Audit] Failed to create audit log entry:', error)
|
||||
}
|
||||
}
|
||||
import { prisma as globalPrisma } from '@/lib/prisma'
|
||||
import type { Prisma, PrismaClient } from '@prisma/client'
|
||||
|
||||
/** Minimal Prisma-like client that supports auditLog.create (works with PrismaClient and transaction clients). */
|
||||
type AuditPrismaClient = Pick<PrismaClient, 'auditLog'>
|
||||
|
||||
/**
|
||||
* Shared utility for creating audit log entries.
|
||||
* Wrapped in try-catch so audit failures never break the calling operation.
|
||||
*
|
||||
* @param input.prisma - Optional Prisma client instance. When omitted the global
|
||||
* singleton is used. Pass `ctx.prisma` from tRPC handlers so audit writes
|
||||
* participate in the same transaction when applicable.
|
||||
*/
|
||||
export async function logAudit(input: {
|
||||
prisma?: AuditPrismaClient
|
||||
userId?: string | null
|
||||
action: string
|
||||
entityType: string
|
||||
entityId?: string
|
||||
detailsJson?: Record<string, unknown>
|
||||
ipAddress?: string
|
||||
userAgent?: string
|
||||
}): Promise<void> {
|
||||
try {
|
||||
const db = input.prisma ?? globalPrisma
|
||||
await db.auditLog.create({
|
||||
data: {
|
||||
userId: input.userId ?? null,
|
||||
action: input.action,
|
||||
entityType: input.entityType,
|
||||
entityId: input.entityId,
|
||||
detailsJson: input.detailsJson as Prisma.InputJsonValue ?? undefined,
|
||||
ipAddress: input.ipAddress,
|
||||
userAgent: input.userAgent,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
// Never break the calling operation on audit failure
|
||||
console.error('[Audit] Failed to create audit log entry:', error)
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user