Use admin-configured AI model and add GPT-5/o-series options
Build and Push Docker Image / build (push) Successful in 4m6s Details

- Add getConfiguredModel() that reads ai_model from SystemSettings
- AI assignment and mentor matching now use the admin-selected model
- Remove duplicate OpenAI client in mentor-matching (use shared singleton)
- Add GPT-5, GPT-5 Mini, o3, o3 Mini, o4 Mini to model dropdown

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
Matt 2026-01-30 16:24:46 +01:00
parent 8c598ba3ee
commit bfcfd84008
4 changed files with 29 additions and 20 deletions

View File

@ -162,6 +162,11 @@ export function AISettingsForm({ settings }: AISettingsFormProps) {
<SelectContent>
<SelectItem value="gpt-4o">GPT-4o (Recommended)</SelectItem>
<SelectItem value="gpt-4o-mini">GPT-4o Mini</SelectItem>
<SelectItem value="gpt-5">GPT-5</SelectItem>
<SelectItem value="gpt-5-mini">GPT-5 Mini</SelectItem>
<SelectItem value="o3">o3</SelectItem>
<SelectItem value="o3-mini">o3 Mini</SelectItem>
<SelectItem value="o4-mini">o4 Mini</SelectItem>
<SelectItem value="gpt-4-turbo">GPT-4 Turbo</SelectItem>
<SelectItem value="gpt-4">GPT-4</SelectItem>
</SelectContent>

View File

@ -107,3 +107,18 @@ export const AI_MODELS = {
ASSIGNMENT: 'gpt-4o', // Best for complex reasoning
QUICK: 'gpt-4o-mini', // Faster, cheaper for simple tasks
} as const
/**
* Get the admin-configured AI model from SystemSettings.
* Falls back to the provided default if not configured.
*/
export async function getConfiguredModel(fallback: string = AI_MODELS.ASSIGNMENT): Promise<string> {
try {
const setting = await prisma.systemSettings.findUnique({
where: { key: 'ai_model' },
})
return setting?.value || process.env.OPENAI_MODEL || fallback
} catch {
return process.env.OPENAI_MODEL || fallback
}
}

View File

@ -5,7 +5,7 @@
* to generate optimal assignment suggestions.
*/
import { getOpenAI, AI_MODELS } from '@/lib/openai'
import { getOpenAI, getConfiguredModel } from '@/lib/openai'
import {
anonymizeForAI,
deanonymizeResults,
@ -153,8 +153,10 @@ async function callAIForAssignments(
// Build the user prompt
const userPrompt = buildAssignmentPrompt(anonymizedData, constraints)
const model = await getConfiguredModel()
const response = await openai.chat.completions.create({
model: AI_MODELS.ASSIGNMENT,
model,
messages: [
{ role: 'system', content: ASSIGNMENT_SYSTEM_PROMPT },
{ role: 'user', content: userPrompt },

View File

@ -1,20 +1,5 @@
import { PrismaClient, OceanIssue, CompetitionCategory } from '@prisma/client'
import OpenAI from 'openai'
// Lazy initialization to avoid errors when API key is not set
let openaiClient: OpenAI | null = null
function getOpenAIClient(): OpenAI | null {
if (!process.env.OPENAI_API_KEY) {
return null
}
if (!openaiClient) {
openaiClient = new OpenAI({
apiKey: process.env.OPENAI_API_KEY,
})
}
return openaiClient
}
import { getOpenAI, getConfiguredModel } from '@/lib/openai'
interface ProjectInfo {
id: string
@ -170,13 +155,15 @@ Respond in JSON format:
]
}`
const openai = getOpenAIClient()
const openai = await getOpenAI()
if (!openai) {
throw new Error('OpenAI client not available')
}
const model = await getConfiguredModel()
const response = await openai.chat.completions.create({
model: 'gpt-4o-mini',
model,
messages: [
{
role: 'system',