) => {
- const hasKeyField = fieldKey === 'openaiApiKey' ? 'hasOpenaiKey' : 'hasAnthropicKey'
+ const hasKeyField = fieldKey === 'openaiApiKey' ? 'hasOpenaiKey' : fieldKey === 'minimaxApiKey' ? 'hasMinimaxKey' : 'hasAnthropicKey'
const hasKey = d[hasKeyField]
const masked = d[fieldKey] as string | null
if (hasKey && masked) setSavedMasked(masked)
@@ -305,7 +311,7 @@ function ModelSelector({
onToast,
}: {
models: { value: string; label: string; description: string }[]
- settingKey: 'anthropicModel' | 'openaiModel'
+ settingKey: 'anthropicModel' | 'openaiModel' | 'minimaxModel'
defaultValue: string
onToast: (t: Toast) => void
}) {
@@ -495,7 +501,7 @@ function CodexCliStatusBox() {
)
}
-function ProviderToggle({ value, onChange }: { value: 'anthropic' | 'openai'; onChange: (v: 'anthropic' | 'openai') => void }) {
+function ProviderToggle({ value, onChange }: { value: 'anthropic' | 'openai' | 'minimax'; onChange: (v: 'anthropic' | 'openai' | 'minimax') => void }) {
return (
+
)
}
function ApiKeySection({ onToast }: { onToast: (t: Toast) => void }) {
- const [provider, setProvider] = useState<'anthropic' | 'openai' | null>(null)
+ const [provider, setProvider] = useState<'anthropic' | 'openai' | 'minimax' | null>(null)
useEffect(() => {
fetch('/api/settings')
.then((r) => r.json())
.then((d: { provider?: string }) => {
- setProvider(d.provider === 'openai' ? 'openai' : 'anthropic')
+ setProvider(d.provider === 'openai' ? 'openai' : d.provider === 'minimax' ? 'minimax' : 'anthropic')
})
.catch(() => setProvider('anthropic'))
}, [])
- async function handleProviderChange(newProvider: 'anthropic' | 'openai') {
+ async function handleProviderChange(newProvider: 'anthropic' | 'openai' | 'minimax') {
const prev = provider
setProvider(newProvider)
+ const labels: Record = { anthropic: 'Anthropic', openai: 'OpenAI', minimax: 'MiniMax' }
try {
const res = await fetch('/api/settings', {
method: 'POST',
@@ -544,7 +561,7 @@ function ApiKeySection({ onToast }: { onToast: (t: Toast) => void }) {
body: JSON.stringify({ provider: newProvider }),
})
if (!res.ok) throw new Error('Failed to save provider')
- onToast({ type: 'success', message: `Switched to ${newProvider === 'openai' ? 'OpenAI' : 'Anthropic'}` })
+ onToast({ type: 'success', message: `Switched to ${labels[newProvider]}` })
} catch {
setProvider(prev) // revert on failure
onToast({ type: 'error', message: 'Failed to save provider preference' })
@@ -598,7 +615,7 @@ function ApiKeySection({ onToast }: { onToast: (t: Toast) => void }) {
>
- ) : (
+ ) : provider === 'openai' ? (
<>
@@ -622,6 +639,27 @@ function ApiKeySection({ onToast }: { onToast: (t: Toast) => void }) {
>
+ ) : (
+
+
+
+
+
MiniMax M2.7 supports 1M context window — great for large batch categorization
+
+
)}
Keys are stored in plaintext in your local SQLite database (prisma/dev.db). Do not expose the database file.
@@ -756,7 +794,7 @@ function DangerZoneSection({ onToast }: { onToast: (t: Toast) => void }) {
const TECH_STACK = [
{ label: 'Next.js 15', color: 'bg-zinc-800 text-zinc-300 border-zinc-700' },
{ label: 'Prisma + SQLite', color: 'bg-zinc-800 text-zinc-300 border-zinc-700' },
- { label: 'Anthropic / OpenAI', color: 'bg-blue-500/10 text-blue-300 border-blue-500/20' },
+ { label: 'Anthropic / OpenAI / MiniMax', color: 'bg-blue-500/10 text-blue-300 border-blue-500/20' },
{ label: 'React Flow', color: 'bg-zinc-800 text-zinc-300 border-zinc-700' },
{ label: 'Tailwind CSS', color: 'bg-cyan-500/10 text-cyan-300 border-cyan-500/20' },
]
diff --git a/lib/ai-client.ts b/lib/ai-client.ts
index ca8135b..affff04 100644
--- a/lib/ai-client.ts
+++ b/lib/ai-client.ts
@@ -2,6 +2,7 @@ import Anthropic from '@anthropic-ai/sdk'
import OpenAI from 'openai'
import { resolveAnthropicClient } from './claude-cli-auth'
import { resolveOpenAIClient } from './openai-auth'
+import { resolveMiniMaxClient } from './minimax-auth'
import { getProvider } from './settings'
export interface AIContentBlock {
@@ -20,7 +21,7 @@ export interface AIResponse {
}
export interface AIClient {
- provider: 'anthropic' | 'openai'
+ provider: 'anthropic' | 'openai' | 'minimax'
createMessage(params: {
model: string
max_tokens: number
@@ -99,12 +100,54 @@ export class OpenAIAIClient implements AIClient {
}
}
+// Wrap MiniMax via OpenAI-compatible SDK (temperature clamped to (0, 1])
+export class MiniMaxAIClient implements AIClient {
+ provider = 'minimax' as const
+ constructor(private sdk: OpenAI) {}
+
+ async createMessage(params: { model: string; max_tokens: number; messages: AIMessage[] }): Promise {
+ const messages: OpenAI.ChatCompletionMessageParam[] = params.messages.map((m): OpenAI.ChatCompletionMessageParam => {
+ if (typeof m.content === 'string') {
+ if (m.role === 'assistant') return { role: 'assistant' as const, content: m.content }
+ return { role: 'user' as const, content: m.content }
+ }
+ const parts: OpenAI.ChatCompletionContentPart[] = m.content.map(b => {
+ if (b.type === 'image' && b.source) {
+ return {
+ type: 'image_url' as const,
+ image_url: { url: `data:${b.source.media_type};base64,${b.source.data}` },
+ }
+ }
+ return { type: 'text' as const, text: b.text ?? '' }
+ })
+ if (m.role === 'assistant') return { role: 'assistant' as const, content: parts.filter((p): p is OpenAI.ChatCompletionContentPartText => p.type === 'text') }
+ return { role: 'user' as const, content: parts }
+ })
+
+ const completion = await this.sdk.chat.completions.create({
+ model: params.model,
+ max_tokens: params.max_tokens,
+ messages,
+ })
+
+ let text = completion.choices[0]?.message?.content ?? ''
+ // Strip thinking tags that MiniMax M2.5+ may include
+ text = text.replace(/[\s\S]*?<\/think>\s*/g, '')
+ return { text }
+ }
+}
+
export async function resolveAIClient(options: {
overrideKey?: string
dbKey?: string
} = {}): Promise {
const provider = await getProvider()
+ if (provider === 'minimax') {
+ const client = resolveMiniMaxClient(options)
+ return new MiniMaxAIClient(client)
+ }
+
if (provider === 'openai') {
const client = resolveOpenAIClient(options)
return new OpenAIAIClient(client)
diff --git a/lib/minimax-auth.ts b/lib/minimax-auth.ts
new file mode 100644
index 0000000..d0a8527
--- /dev/null
+++ b/lib/minimax-auth.ts
@@ -0,0 +1,34 @@
+import OpenAI from 'openai'
+
+/**
+ * Resolve a MiniMax-compatible OpenAI client.
+ *
+ * MiniMax exposes an OpenAI-compatible API at https://api.minimax.io/v1.
+ * Auth priority:
+ * 1. Override key (from request body)
+ * 2. DB-saved key
+ * 3. MINIMAX_API_KEY env var
+ * 4. Custom base URL (proxy)
+ */
+export function resolveMiniMaxClient(options: {
+ overrideKey?: string
+ dbKey?: string
+ baseURL?: string
+} = {}): OpenAI {
+ const baseURL = options.baseURL ?? process.env.MINIMAX_BASE_URL ?? 'https://api.minimax.io/v1'
+
+ if (options.overrideKey?.trim()) {
+ return new OpenAI({ apiKey: options.overrideKey.trim(), baseURL })
+ }
+
+ if (options.dbKey?.trim()) {
+ return new OpenAI({ apiKey: options.dbKey.trim(), baseURL })
+ }
+
+ const envKey = process.env.MINIMAX_API_KEY?.trim()
+ if (envKey) return new OpenAI({ apiKey: envKey, baseURL })
+
+ if (options.baseURL) return new OpenAI({ apiKey: 'proxy', baseURL })
+
+ throw new Error('No MiniMax API key found. Add your key in Settings, or set MINIMAX_API_KEY.')
+}
diff --git a/lib/settings.ts b/lib/settings.ts
index f1d7810..7da5eb8 100644
--- a/lib/settings.ts
+++ b/lib/settings.ts
@@ -4,12 +4,15 @@ import prisma from '@/lib/db'
let _cachedModel: string | null = null
let _modelCacheExpiry = 0
-let _cachedProvider: 'anthropic' | 'openai' | null = null
+let _cachedProvider: 'anthropic' | 'openai' | 'minimax' | null = null
let _providerCacheExpiry = 0
let _cachedOpenAIModel: string | null = null
let _openAIModelCacheExpiry = 0
+let _cachedMiniMaxModel: string | null = null
+let _miniMaxModelCacheExpiry = 0
+
const CACHE_TTL = 5 * 60 * 1000
/**
@@ -26,10 +29,11 @@ export async function getAnthropicModel(): Promise {
/**
* Get the active AI provider (cached for 5 minutes).
*/
-export async function getProvider(): Promise<'anthropic' | 'openai'> {
+export async function getProvider(): Promise<'anthropic' | 'openai' | 'minimax'> {
if (_cachedProvider && Date.now() < _providerCacheExpiry) return _cachedProvider
const setting = await prisma.setting.findUnique({ where: { key: 'aiProvider' } })
- _cachedProvider = setting?.value === 'openai' ? 'openai' : 'anthropic'
+ const val = setting?.value
+ _cachedProvider = val === 'openai' ? 'openai' : val === 'minimax' ? 'minimax' : 'anthropic'
_providerCacheExpiry = Date.now() + CACHE_TTL
return _cachedProvider
}
@@ -45,11 +49,23 @@ export async function getOpenAIModel(): Promise {
return _cachedOpenAIModel
}
+/**
+ * Get the configured MiniMax model from settings (cached for 5 minutes).
+ */
+export async function getMiniMaxModel(): Promise {
+ if (_cachedMiniMaxModel && Date.now() < _miniMaxModelCacheExpiry) return _cachedMiniMaxModel
+ const setting = await prisma.setting.findUnique({ where: { key: 'minimaxModel' } })
+ _cachedMiniMaxModel = setting?.value ?? 'MiniMax-M2.7'
+ _miniMaxModelCacheExpiry = Date.now() + CACHE_TTL
+ return _cachedMiniMaxModel
+}
+
/**
* Get the model for the currently active provider.
*/
export async function getActiveModel(): Promise {
const provider = await getProvider()
+ if (provider === 'minimax') return getMiniMaxModel()
return provider === 'openai' ? getOpenAIModel() : getAnthropicModel()
}
@@ -63,4 +79,6 @@ export function invalidateSettingsCache(): void {
_providerCacheExpiry = 0
_cachedOpenAIModel = null
_openAIModelCacheExpiry = 0
+ _cachedMiniMaxModel = null
+ _miniMaxModelCacheExpiry = 0
}
diff --git a/package.json b/package.json
index 69404bb..104ac87 100644
--- a/package.json
+++ b/package.json
@@ -12,6 +12,7 @@
"build": "next build",
"start": "next start",
"lint": "eslint",
+ "test": "vitest run",
"siftly": "tsx cli/siftly.ts"
},
"dependencies": {
@@ -46,6 +47,7 @@
"prisma": "^7.4.2",
"tailwindcss": "^4",
"tsx": "^4.21.0",
- "typescript": "^5"
+ "typescript": "^5",
+ "vitest": "^4.1.1"
}
}
diff --git a/vitest.config.ts b/vitest.config.ts
new file mode 100644
index 0000000..e99a7d4
--- /dev/null
+++ b/vitest.config.ts
@@ -0,0 +1,14 @@
+import { defineConfig } from 'vitest/config'
+import { resolve } from 'path'
+
+export default defineConfig({
+ test: {
+ globals: true,
+ environment: 'node',
+ },
+ resolve: {
+ alias: {
+ '@': resolve(__dirname, '.'),
+ },
+ },
+})