Add visual progress indicator for AI assignment batches
Build and Push Docker Image / build (push) Successful in 9m13s Details

- Add AssignmentJob model to track AI assignment progress
- Create startAIAssignmentJob mutation for background processing
- Add getAIAssignmentJobStatus query for polling progress
- Update AI assignment service with progress callback support
- Add progress bar UI showing batch/project processing status
- Add toast notifications for job completion/failure
- Add AI_SUGGESTIONS_READY notification type

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
Matt 2026-02-04 17:40:26 +01:00
parent 148925cb95
commit 6f6d5ef501
5 changed files with 439 additions and 21 deletions

View File

@ -374,6 +374,7 @@ model Round {
filteringRules FilteringRule[] filteringRules FilteringRule[]
filteringResults FilteringResult[] filteringResults FilteringResult[]
filteringJobs FilteringJob[] filteringJobs FilteringJob[]
assignmentJobs AssignmentJob[]
@@index([programId]) @@index([programId])
@@index([status]) @@index([status])
@ -1092,6 +1093,38 @@ enum FilteringJobStatus {
FAILED FAILED
} }
// Tracks progress of long-running AI assignment jobs
model AssignmentJob {
id String @id @default(cuid())
roundId String
status AssignmentJobStatus @default(PENDING)
totalProjects Int @default(0)
totalBatches Int @default(0)
currentBatch Int @default(0)
processedCount Int @default(0)
suggestionsCount Int @default(0)
errorMessage String? @db.Text
startedAt DateTime?
completedAt DateTime?
fallbackUsed Boolean @default(false)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
// Relations
round Round @relation(fields: [roundId], references: [id], onDelete: Cascade)
@@index([roundId])
@@index([status])
}
enum AssignmentJobStatus {
PENDING
RUNNING
COMPLETED
FAILED
}
// ============================================================================= // =============================================================================
// SPECIAL AWARDS SYSTEM // SPECIAL AWARDS SYSTEM
// ============================================================================= // =============================================================================

View File

@ -1,6 +1,6 @@
'use client' 'use client'
import { Suspense, use, useState } from 'react' import { Suspense, use, useState, useEffect } from 'react'
import Link from 'next/link' import Link from 'next/link'
import { trpc } from '@/lib/trpc/client' import { trpc } from '@/lib/trpc/client'
import { import {
@ -77,23 +77,47 @@ function AssignmentManagementContent({ roundId }: { roundId: string }) {
const [selectedJuror, setSelectedJuror] = useState<string>('') const [selectedJuror, setSelectedJuror] = useState<string>('')
const [selectedProject, setSelectedProject] = useState<string>('') const [selectedProject, setSelectedProject] = useState<string>('')
const [useAI, setUseAI] = useState(false) const [useAI, setUseAI] = useState(false)
const [activeJobId, setActiveJobId] = useState<string | null>(null)
const { data: round, isLoading: loadingRound } = trpc.round.get.useQuery({ id: roundId }) const { data: round, isLoading: loadingRound } = trpc.round.get.useQuery({ id: roundId })
const { data: assignments, isLoading: loadingAssignments } = trpc.assignment.listByRound.useQuery({ roundId }) const { data: assignments, isLoading: loadingAssignments } = trpc.assignment.listByRound.useQuery({ roundId })
const { data: stats, isLoading: loadingStats } = trpc.assignment.getStats.useQuery({ roundId }) const { data: stats, isLoading: loadingStats } = trpc.assignment.getStats.useQuery({ roundId })
const { data: isAIAvailable } = trpc.assignment.isAIAvailable.useQuery() const { data: isAIAvailable } = trpc.assignment.isAIAvailable.useQuery()
// AI Assignment job queries
const { data: latestJob, refetch: refetchLatestJob } = trpc.assignment.getLatestAIAssignmentJob.useQuery(
{ roundId },
{ enabled: useAI }
)
// Poll for job status when there's an active job
const { data: jobStatus } = trpc.assignment.getAIAssignmentJobStatus.useQuery(
{ jobId: activeJobId! },
{
enabled: !!activeJobId,
refetchInterval: activeJobId ? 2000 : false,
}
)
// Start AI assignment job mutation
const startAIJob = trpc.assignment.startAIAssignmentJob.useMutation()
const isAIJobRunning = jobStatus?.status === 'RUNNING' || jobStatus?.status === 'PENDING'
const aiJobProgressPercent = jobStatus?.totalBatches
? Math.round((jobStatus.currentBatch / jobStatus.totalBatches) * 100)
: 0
// Algorithmic suggestions (default) // Algorithmic suggestions (default)
const { data: algorithmicSuggestions, isLoading: loadingAlgorithmic, refetch: refetchAlgorithmic } = trpc.assignment.getSuggestions.useQuery( const { data: algorithmicSuggestions, isLoading: loadingAlgorithmic, refetch: refetchAlgorithmic } = trpc.assignment.getSuggestions.useQuery(
{ roundId }, { roundId },
{ enabled: !!round && !useAI } { enabled: !!round && !useAI }
) )
// AI-powered suggestions (expensive - disable auto refetch) // AI-powered suggestions (expensive - only used after job completes)
const { data: aiSuggestionsRaw, isLoading: loadingAI, refetch: refetchAI } = trpc.assignment.getAISuggestions.useQuery( const { data: aiSuggestionsRaw, isLoading: loadingAI, refetch: refetchAI } = trpc.assignment.getAISuggestions.useQuery(
{ roundId, useAI: true }, { roundId, useAI: true },
{ {
enabled: !!round && useAI, enabled: !!round && useAI && !isAIJobRunning,
staleTime: Infinity, // Never consider stale (only refetch manually) staleTime: Infinity, // Never consider stale (only refetch manually)
refetchOnWindowFocus: false, refetchOnWindowFocus: false,
refetchOnReconnect: false, refetchOnReconnect: false,
@ -101,6 +125,41 @@ function AssignmentManagementContent({ roundId }: { roundId: string }) {
} }
) )
// Set active job from latest job on load
useEffect(() => {
if (latestJob && (latestJob.status === 'RUNNING' || latestJob.status === 'PENDING')) {
setActiveJobId(latestJob.id)
}
}, [latestJob])
// Handle job completion
useEffect(() => {
if (jobStatus?.status === 'COMPLETED') {
toast.success(
`AI Assignment complete: ${jobStatus.suggestionsCount} suggestions generated${jobStatus.fallbackUsed ? ' (using fallback algorithm)' : ''}`
)
setActiveJobId(null)
refetchLatestJob()
refetchAI()
} else if (jobStatus?.status === 'FAILED') {
toast.error(`AI Assignment failed: ${jobStatus.errorMessage || 'Unknown error'}`)
setActiveJobId(null)
refetchLatestJob()
}
}, [jobStatus?.status, jobStatus?.suggestionsCount, jobStatus?.fallbackUsed, jobStatus?.errorMessage, refetchLatestJob, refetchAI])
const handleStartAIJob = async () => {
try {
const result = await startAIJob.mutateAsync({ roundId })
setActiveJobId(result.jobId)
toast.info('AI Assignment job started. Progress will update automatically.')
} catch (error) {
toast.error(
error instanceof Error ? error.message : 'Failed to start AI assignment'
)
}
}
// Normalize AI suggestions to match algorithmic format // Normalize AI suggestions to match algorithmic format
const aiSuggestions = aiSuggestionsRaw?.suggestions?.map((s) => ({ const aiSuggestions = aiSuggestionsRaw?.suggestions?.map((s) => ({
userId: s.jurorId, userId: s.jurorId,
@ -113,7 +172,7 @@ function AssignmentManagementContent({ roundId }: { roundId: string }) {
// Use the appropriate suggestions based on mode // Use the appropriate suggestions based on mode
const suggestions = useAI ? aiSuggestions : (algorithmicSuggestions ?? []) const suggestions = useAI ? aiSuggestions : (algorithmicSuggestions ?? [])
const loadingSuggestions = useAI ? loadingAI : loadingAlgorithmic const loadingSuggestions = useAI ? (loadingAI || isAIJobRunning) : loadingAlgorithmic
const refetchSuggestions = useAI ? refetchAI : refetchAlgorithmic const refetchSuggestions = useAI ? refetchAI : refetchAlgorithmic
// Get available jurors for manual assignment // Get available jurors for manual assignment
@ -483,31 +542,92 @@ function AssignmentManagementContent({ roundId }: { roundId: string }) {
variant={useAI ? 'default' : 'outline'} variant={useAI ? 'default' : 'outline'}
size="sm" size="sm"
onClick={() => { onClick={() => {
setUseAI(!useAI) if (!useAI) {
setSelectedSuggestions(new Set()) setUseAI(true)
setSelectedSuggestions(new Set())
// Start AI job if no suggestions yet
if (!aiSuggestionsRaw?.suggestions?.length && !isAIJobRunning) {
handleStartAIJob()
}
} else {
setUseAI(false)
setSelectedSuggestions(new Set())
}
}} }}
disabled={!isAIAvailable && !useAI} disabled={(!isAIAvailable && !useAI) || isAIJobRunning}
title={!isAIAvailable ? 'OpenAI API key not configured' : undefined} title={!isAIAvailable ? 'OpenAI API key not configured' : undefined}
> >
<Sparkles className={`mr-2 h-4 w-4 ${useAI ? 'text-amber-300' : ''}`} /> <Sparkles className={`mr-2 h-4 w-4 ${useAI ? 'text-amber-300' : ''}`} />
{useAI ? 'AI Mode' : 'Use AI'} {useAI ? 'AI Mode' : 'Use AI'}
</Button> </Button>
<Button {useAI && !isAIJobRunning && (
variant="outline" <Button
size="sm" variant="outline"
onClick={() => refetchSuggestions()} size="sm"
disabled={loadingSuggestions} onClick={handleStartAIJob}
> disabled={startAIJob.isPending}
<RefreshCw title="Run AI analysis again"
className={`mr-2 h-4 w-4 ${loadingSuggestions ? 'animate-spin' : ''}`} >
/> {startAIJob.isPending ? (
Refresh <Loader2 className="mr-2 h-4 w-4 animate-spin" />
</Button> ) : (
<RefreshCw className="mr-2 h-4 w-4" />
)}
Re-analyze
</Button>
)}
{!useAI && (
<Button
variant="outline"
size="sm"
onClick={() => refetchSuggestions()}
disabled={loadingSuggestions}
>
<RefreshCw
className={`mr-2 h-4 w-4 ${loadingSuggestions ? 'animate-spin' : ''}`}
/>
Refresh
</Button>
)}
</div> </div>
</div> </div>
</CardHeader> </CardHeader>
<CardContent> <CardContent>
{loadingSuggestions ? ( {/* AI Job Progress Indicator */}
{isAIJobRunning && jobStatus && (
<div className="mb-4 p-4 rounded-lg bg-blue-50 dark:bg-blue-950/20 border border-blue-200 dark:border-blue-900">
<div className="space-y-3">
<div className="flex items-center gap-3">
<Loader2 className="h-5 w-5 animate-spin text-blue-600" />
<div className="flex-1">
<p className="font-medium text-blue-900 dark:text-blue-100">
AI Assignment Analysis in Progress
</p>
<p className="text-sm text-blue-700 dark:text-blue-300">
Processing {jobStatus.totalProjects} projects in {jobStatus.totalBatches} batches
</p>
</div>
<Badge variant="outline" className="border-blue-300 text-blue-700">
<Clock className="mr-1 h-3 w-3" />
Batch {jobStatus.currentBatch} of {jobStatus.totalBatches}
</Badge>
</div>
<div className="space-y-1">
<div className="flex justify-between text-sm">
<span className="text-blue-700 dark:text-blue-300">
{jobStatus.processedCount} of {jobStatus.totalProjects} projects processed
</span>
<span className="font-medium text-blue-900 dark:text-blue-100">
{aiJobProgressPercent}%
</span>
</div>
<Progress value={aiJobProgressPercent} className="h-2" />
</div>
</div>
</div>
)}
{loadingSuggestions && !isAIJobRunning ? (
<div className="flex items-center justify-center py-8"> <div className="flex items-center justify-center py-8">
<Loader2 className="h-8 w-8 animate-spin text-muted-foreground" /> <Loader2 className="h-8 w-8 animate-spin text-muted-foreground" />
</div> </div>

View File

@ -5,14 +5,157 @@ import { getUserAvatarUrl } from '../utils/avatar-url'
import { import {
generateAIAssignments, generateAIAssignments,
generateFallbackAssignments, generateFallbackAssignments,
type AssignmentProgressCallback,
} from '../services/ai-assignment' } from '../services/ai-assignment'
import { isOpenAIConfigured } from '@/lib/openai' import { isOpenAIConfigured } from '@/lib/openai'
import { prisma } from '@/lib/prisma'
import { import {
createNotification, createNotification,
createBulkNotifications, createBulkNotifications,
notifyAdmins,
NotificationTypes, NotificationTypes,
} from '../services/in-app-notification' } from '../services/in-app-notification'
// Background job execution function
async function runAIAssignmentJob(jobId: string, roundId: string, userId: string) {
try {
// Update job to running
await prisma.assignmentJob.update({
where: { id: jobId },
data: { status: 'RUNNING', startedAt: new Date() },
})
// Get round constraints
const round = await prisma.round.findUniqueOrThrow({
where: { id: roundId },
select: {
name: true,
requiredReviews: true,
minAssignmentsPerJuror: true,
maxAssignmentsPerJuror: true,
},
})
// Get all active jury members with their expertise and current load
const jurors = await prisma.user.findMany({
where: { role: 'JURY_MEMBER', status: 'ACTIVE' },
select: {
id: true,
name: true,
email: true,
expertiseTags: true,
maxAssignments: true,
_count: {
select: {
assignments: { where: { roundId } },
},
},
},
})
// Get all projects in the round
const projects = await prisma.project.findMany({
where: { roundId },
select: {
id: true,
title: true,
description: true,
tags: true,
teamName: true,
_count: { select: { assignments: true } },
},
})
// Get existing assignments
const existingAssignments = await prisma.assignment.findMany({
where: { roundId },
select: { userId: true, projectId: true },
})
// Calculate batch info
const BATCH_SIZE = 15
const totalBatches = Math.ceil(projects.length / BATCH_SIZE)
await prisma.assignmentJob.update({
where: { id: jobId },
data: { totalProjects: projects.length, totalBatches },
})
// Progress callback
const onProgress: AssignmentProgressCallback = async (progress) => {
await prisma.assignmentJob.update({
where: { id: jobId },
data: {
currentBatch: progress.currentBatch,
processedCount: progress.processedCount,
},
})
}
const constraints = {
requiredReviewsPerProject: round.requiredReviews,
minAssignmentsPerJuror: round.minAssignmentsPerJuror,
maxAssignmentsPerJuror: round.maxAssignmentsPerJuror,
existingAssignments: existingAssignments.map((a) => ({
jurorId: a.userId,
projectId: a.projectId,
})),
}
// Execute AI assignment with progress callback
const result = await generateAIAssignments(
jurors,
projects,
constraints,
userId,
roundId,
onProgress
)
// Mark job as completed
await prisma.assignmentJob.update({
where: { id: jobId },
data: {
status: 'COMPLETED',
completedAt: new Date(),
processedCount: projects.length,
suggestionsCount: result.suggestions.length,
fallbackUsed: result.fallbackUsed ?? false,
},
})
// Notify admins that AI assignment is complete
await notifyAdmins({
type: NotificationTypes.AI_SUGGESTIONS_READY,
title: 'AI Assignment Suggestions Ready',
message: `AI generated ${result.suggestions.length} assignment suggestions for ${round.name || 'round'}${result.fallbackUsed ? ' (using fallback algorithm)' : ''}.`,
linkUrl: `/admin/rounds/${roundId}/assignments`,
linkLabel: 'View Suggestions',
priority: 'high',
metadata: {
roundId,
jobId,
projectCount: projects.length,
suggestionsCount: result.suggestions.length,
fallbackUsed: result.fallbackUsed,
},
})
} catch (error) {
console.error('[AI Assignment Job] Error:', error)
// Mark job as failed
await prisma.assignmentJob.update({
where: { id: jobId },
data: {
status: 'FAILED',
errorMessage: error instanceof Error ? error.message : 'Unknown error',
completedAt: new Date(),
},
})
}
}
export const assignmentRouter = router({ export const assignmentRouter = router({
/** /**
* List assignments for a round (admin only) * List assignments for a round (admin only)
@ -851,4 +994,101 @@ export const assignmentRouter = router({
return { created: created.count } return { created: created.count }
}), }),
/**
* Start an AI assignment job (background processing)
*/
startAIAssignmentJob: adminProcedure
.input(z.object({ roundId: z.string() }))
.mutation(async ({ ctx, input }) => {
// Check for existing running job
const existingJob = await ctx.prisma.assignmentJob.findFirst({
where: {
roundId: input.roundId,
status: { in: ['PENDING', 'RUNNING'] },
},
})
if (existingJob) {
throw new TRPCError({
code: 'BAD_REQUEST',
message: 'An AI assignment job is already running for this round',
})
}
// Verify AI is available
if (!isOpenAIConfigured()) {
throw new TRPCError({
code: 'BAD_REQUEST',
message: 'OpenAI API is not configured',
})
}
// Create job record
const job = await ctx.prisma.assignmentJob.create({
data: {
roundId: input.roundId,
status: 'PENDING',
},
})
// Start background job (non-blocking)
runAIAssignmentJob(job.id, input.roundId, ctx.user.id).catch(console.error)
return { jobId: job.id }
}),
/**
* Get AI assignment job status (for polling)
*/
getAIAssignmentJobStatus: protectedProcedure
.input(z.object({ jobId: z.string() }))
.query(async ({ ctx, input }) => {
const job = await ctx.prisma.assignmentJob.findUniqueOrThrow({
where: { id: input.jobId },
})
return {
id: job.id,
status: job.status,
totalProjects: job.totalProjects,
totalBatches: job.totalBatches,
currentBatch: job.currentBatch,
processedCount: job.processedCount,
suggestionsCount: job.suggestionsCount,
fallbackUsed: job.fallbackUsed,
errorMessage: job.errorMessage,
startedAt: job.startedAt,
completedAt: job.completedAt,
}
}),
/**
* Get the latest AI assignment job for a round
*/
getLatestAIAssignmentJob: adminProcedure
.input(z.object({ roundId: z.string() }))
.query(async ({ ctx, input }) => {
const job = await ctx.prisma.assignmentJob.findFirst({
where: { roundId: input.roundId },
orderBy: { createdAt: 'desc' },
})
if (!job) return null
return {
id: job.id,
status: job.status,
totalProjects: job.totalProjects,
totalBatches: job.totalBatches,
currentBatch: job.currentBatch,
processedCount: job.processedCount,
suggestionsCount: job.suggestionsCount,
fallbackUsed: job.fallbackUsed,
errorMessage: job.errorMessage,
startedAt: job.startedAt,
completedAt: job.completedAt,
createdAt: job.createdAt,
}
}),
}) })

View File

@ -86,6 +86,15 @@ interface AssignmentConstraints {
}> }>
} }
export interface AssignmentProgressCallback {
(progress: {
currentBatch: number
totalBatches: number
processedCount: number
totalProjects: number
}): Promise<void>
}
// ─── AI Processing ─────────────────────────────────────────────────────────── // ─── AI Processing ───────────────────────────────────────────────────────────
/** /**
@ -247,7 +256,8 @@ export async function generateAIAssignments(
projects: ProjectForAssignment[], projects: ProjectForAssignment[],
constraints: AssignmentConstraints, constraints: AssignmentConstraints,
userId?: string, userId?: string,
entityId?: string entityId?: string,
onProgress?: AssignmentProgressCallback
): Promise<AIAssignmentResult> { ): Promise<AIAssignmentResult> {
// Truncate descriptions before anonymization // Truncate descriptions before anonymization
const truncatedProjects = projects.map((p) => ({ const truncatedProjects = projects.map((p) => ({
@ -279,11 +289,14 @@ export async function generateAIAssignments(
let totalTokens = 0 let totalTokens = 0
// Process projects in batches // Process projects in batches
const totalBatches = Math.ceil(anonymizedData.projects.length / ASSIGNMENT_BATCH_SIZE)
for (let i = 0; i < anonymizedData.projects.length; i += ASSIGNMENT_BATCH_SIZE) { for (let i = 0; i < anonymizedData.projects.length; i += ASSIGNMENT_BATCH_SIZE) {
const batchProjects = anonymizedData.projects.slice(i, i + ASSIGNMENT_BATCH_SIZE) const batchProjects = anonymizedData.projects.slice(i, i + ASSIGNMENT_BATCH_SIZE)
const batchMappings = anonymizedData.projectMappings.slice(i, i + ASSIGNMENT_BATCH_SIZE) const batchMappings = anonymizedData.projectMappings.slice(i, i + ASSIGNMENT_BATCH_SIZE)
const currentBatch = Math.floor(i / ASSIGNMENT_BATCH_SIZE) + 1
console.log(`[AI Assignment] Processing batch ${Math.floor(i / ASSIGNMENT_BATCH_SIZE) + 1}/${Math.ceil(anonymizedData.projects.length / ASSIGNMENT_BATCH_SIZE)}`) console.log(`[AI Assignment] Processing batch ${currentBatch}/${totalBatches}`)
const { suggestions, tokensUsed } = await processAssignmentBatch( const { suggestions, tokensUsed } = await processAssignmentBatch(
openai, openai,
@ -298,6 +311,17 @@ export async function generateAIAssignments(
allSuggestions.push(...suggestions) allSuggestions.push(...suggestions)
totalTokens += tokensUsed totalTokens += tokensUsed
// Report progress after each batch
if (onProgress) {
const processedCount = Math.min((currentBatch) * ASSIGNMENT_BATCH_SIZE, projects.length)
await onProgress({
currentBatch,
totalBatches,
processedCount,
totalProjects: projects.length,
})
}
} }
console.log(`[AI Assignment] Completed. Total suggestions: ${allSuggestions.length}, Total tokens: ${totalTokens}`) console.log(`[AI Assignment] Completed. Total suggestions: ${allSuggestions.length}, Total tokens: ${totalTokens}`)

View File

@ -16,6 +16,7 @@ export const NotificationTypes = {
// Admin notifications // Admin notifications
FILTERING_COMPLETE: 'FILTERING_COMPLETE', FILTERING_COMPLETE: 'FILTERING_COMPLETE',
FILTERING_FAILED: 'FILTERING_FAILED', FILTERING_FAILED: 'FILTERING_FAILED',
AI_SUGGESTIONS_READY: 'AI_SUGGESTIONS_READY',
NEW_APPLICATION: 'NEW_APPLICATION', NEW_APPLICATION: 'NEW_APPLICATION',
BULK_APPLICATIONS: 'BULK_APPLICATIONS', BULK_APPLICATIONS: 'BULK_APPLICATIONS',
DOCUMENTS_UPLOADED: 'DOCUMENTS_UPLOADED', DOCUMENTS_UPLOADED: 'DOCUMENTS_UPLOADED',