diff --git a/src/components/settings/ai-settings-form.tsx b/src/components/settings/ai-settings-form.tsx
index 9a30036..fb24720 100644
--- a/src/components/settings/ai-settings-form.tsx
+++ b/src/components/settings/ai-settings-form.tsx
@@ -162,6 +162,11 @@ export function AISettingsForm({ settings }: AISettingsFormProps) {
GPT-4o (Recommended)
GPT-4o Mini
+ GPT-5
+ GPT-5 Mini
+ o3
+ o3 Mini
+ o4 Mini
GPT-4 Turbo
GPT-4
diff --git a/src/lib/openai.ts b/src/lib/openai.ts
index b195168..8682a50 100644
--- a/src/lib/openai.ts
+++ b/src/lib/openai.ts
@@ -107,3 +107,18 @@ export const AI_MODELS = {
ASSIGNMENT: 'gpt-4o', // Best for complex reasoning
QUICK: 'gpt-4o-mini', // Faster, cheaper for simple tasks
} as const
+
+/**
+ * Get the admin-configured AI model from SystemSettings.
+ * Falls back to the provided default if not configured.
+ */
+export async function getConfiguredModel(fallback: string = AI_MODELS.ASSIGNMENT): Promise {
+ try {
+ const setting = await prisma.systemSettings.findUnique({
+ where: { key: 'ai_model' },
+ })
+ return setting?.value || process.env.OPENAI_MODEL || fallback
+ } catch {
+ return process.env.OPENAI_MODEL || fallback
+ }
+}
diff --git a/src/server/services/ai-assignment.ts b/src/server/services/ai-assignment.ts
index 9ad8932..d140de0 100644
--- a/src/server/services/ai-assignment.ts
+++ b/src/server/services/ai-assignment.ts
@@ -5,7 +5,7 @@
* to generate optimal assignment suggestions.
*/
-import { getOpenAI, AI_MODELS } from '@/lib/openai'
+import { getOpenAI, getConfiguredModel } from '@/lib/openai'
import {
anonymizeForAI,
deanonymizeResults,
@@ -153,8 +153,10 @@ async function callAIForAssignments(
// Build the user prompt
const userPrompt = buildAssignmentPrompt(anonymizedData, constraints)
+ const model = await getConfiguredModel()
+
const response = await openai.chat.completions.create({
- model: AI_MODELS.ASSIGNMENT,
+ model,
messages: [
{ role: 'system', content: ASSIGNMENT_SYSTEM_PROMPT },
{ role: 'user', content: userPrompt },
diff --git a/src/server/services/mentor-matching.ts b/src/server/services/mentor-matching.ts
index ae3bd67..99f48b1 100644
--- a/src/server/services/mentor-matching.ts
+++ b/src/server/services/mentor-matching.ts
@@ -1,20 +1,5 @@
import { PrismaClient, OceanIssue, CompetitionCategory } from '@prisma/client'
-import OpenAI from 'openai'
-
-// Lazy initialization to avoid errors when API key is not set
-let openaiClient: OpenAI | null = null
-
-function getOpenAIClient(): OpenAI | null {
- if (!process.env.OPENAI_API_KEY) {
- return null
- }
- if (!openaiClient) {
- openaiClient = new OpenAI({
- apiKey: process.env.OPENAI_API_KEY,
- })
- }
- return openaiClient
-}
+import { getOpenAI, getConfiguredModel } from '@/lib/openai'
interface ProjectInfo {
id: string
@@ -170,13 +155,15 @@ Respond in JSON format:
]
}`
- const openai = getOpenAIClient()
+ const openai = await getOpenAI()
if (!openai) {
throw new Error('OpenAI client not available')
}
+ const model = await getConfiguredModel()
+
const response = await openai.chat.completions.create({
- model: 'gpt-4o-mini',
+ model,
messages: [
{
role: 'system',