Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
Expand Up @@ -638,35 +638,13 @@ Important Notes:
&vision_model.provider,
)?;

let custom_request_body = vision_model
.custom_request_body
.clone()
.map(|body| {
serde_json::from_str(&body).map_err(|e| {
BitFunError::parse(format!(
"Failed to parse custom request body for model {}: {}",
vision_model.name, e
))
})
})
.transpose()?;

// Vision models cannot set max_tokens (e.g., glm-4v doesn't support this parameter)
let model_config = ModelConfig {
name: vision_model.name.clone(),
model: vision_model.model_name.clone(),
api_key: vision_model.api_key.clone(),
base_url: vision_model.base_url.clone(),
format: vision_model.provider.clone(),
context_window: vision_model.context_window.unwrap_or(128000),
max_tokens: None,
enable_thinking_process: false,
support_preserved_thinking: false,
custom_headers: vision_model.custom_headers.clone(),
custom_headers_mode: vision_model.custom_headers_mode.clone(),
skip_ssl_verify: vision_model.skip_ssl_verify,
custom_request_body,
};
// and should never use the thinking process.
let mut model_config = ModelConfig::try_from(vision_model.clone())
.map_err(|e| BitFunError::parse(format!("Config conversion failed for vision model {}: {}", vision_model.name, e)))?;
model_config.max_tokens = None;
model_config.enable_thinking_process = false;
model_config.support_preserved_thinking = false;

let ai_client = Arc::new(AIClient::new(model_config));

Expand Down
12 changes: 6 additions & 6 deletions src/crates/core/src/infrastructure/ai/client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -449,10 +449,10 @@ impl AIClient {
extra_body: Option<serde_json::Value>,
max_tries: usize,
) -> Result<StreamResponse> {
let url = self.config.base_url.clone();
let url = self.config.request_url.clone();
debug!(
"OpenAI config: model={}, base_url={}, max_tries={}",
self.config.model, self.config.base_url, max_tries
"OpenAI config: model={}, request_url={}, max_tries={}",
self.config.model, self.config.request_url, max_tries
);

// Use OpenAI message converter
Expand Down Expand Up @@ -582,10 +582,10 @@ impl AIClient {
extra_body: Option<serde_json::Value>,
max_tries: usize,
) -> Result<StreamResponse> {
let url = self.config.base_url.clone();
let url = self.config.request_url.clone();
debug!(
"Anthropic config: model={}, base_url={}, max_tries={}",
self.config.model, self.config.base_url, max_tries
"Anthropic config: model={}, request_url={}, max_tries={}",
self.config.model, self.config.request_url, max_tries
);

// Use Anthropic message converter
Expand Down
7 changes: 7 additions & 0 deletions src/crates/core/src/service/config/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -697,6 +697,12 @@ pub struct AIModelConfig {
pub provider: String,
pub model_name: String,
pub base_url: String,

/// Computed actual request URL (auto-derived from base_url + provider format).
/// Stored by the frontend when config is saved; falls back to base_url if absent.
#[serde(default)]
pub request_url: Option<String>,

pub api_key: String,
/// Context window size (total token limit for input + output).
pub context_window: Option<u32>,
Expand Down Expand Up @@ -1108,6 +1114,7 @@ impl Default for AIModelConfig {
provider: String::new(),
model_name: String::new(),
base_url: String::new(),
request_url: None,
api_key: String::new(),
context_window: None,
max_tokens: None,
Expand Down
10 changes: 10 additions & 0 deletions src/crates/core/src/util/types/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@ use serde::{Deserialize, Serialize};
pub struct AIConfig {
pub name: String,
pub base_url: String,
/// Actual request URL
/// Falls back to base_url when absent
pub request_url: String,
pub api_key: String,
pub model: String,
pub format: String,
Expand Down Expand Up @@ -38,9 +41,16 @@ impl TryFrom<AIModelConfig> for AIConfig {
None
};

// Use stored request_url if present, otherwise fall back to base_url (legacy configs)
let request_url = other
.request_url
.filter(|u| !u.is_empty())
.unwrap_or_else(|| other.base_url.clone());

Ok(AIConfig {
name: other.name.clone(),
base_url: other.base_url.clone(),
request_url,
api_key: other.api_key.clone(),
model: other.model_name.clone(),
format: other.provider.clone(),
Expand Down
29 changes: 29 additions & 0 deletions src/web-ui/src/infrastructure/config/components/AIModelConfig.scss
Original file line number Diff line number Diff line change
Expand Up @@ -692,6 +692,35 @@
}
}

&__resolved-url {
display: flex;
flex-direction: column;
gap: 2px;
margin-top: 6px;

.resolved-url__label {
font-size: $font-size-xs;
color: var(--color-text-secondary);
}

.resolved-url__value {
display: block;
font-size: $font-size-xs;
font-family: $font-family-mono;
color: var(--color-text-primary);
background: var(--element-bg-subtle, rgba(255, 255, 255, 0.05));
border: 1px solid var(--border-base, rgba(255, 255, 255, 0.1));
border-radius: var(--size-radius-sm, 4px);
padding: 3px 8px;
word-break: break-all;
}

.resolved-url__hint {
font-size: 11px;
color: var(--color-text-muted);
}
}

&__form-actions {
display: flex;
gap: $size-gap-3;
Expand Down
77 changes: 69 additions & 8 deletions src/web-ui/src/infrastructure/config/components/AIModelConfig.tsx
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@


import React, { useState, useEffect, useMemo } from 'react';
import { useTranslation } from 'react-i18next';
import { Plus, Edit2, Trash2, Wifi, Loader, Search, ChevronDown, ChevronUp, AlertTriangle, X, Settings, ArrowLeft, ExternalLink } from 'lucide-react';
Expand All @@ -21,6 +19,28 @@ import './AIModelConfig.scss';

const log = createLogger('AIModelConfig');

/**
* Compute the actual request URL from a base URL and provider format.
* Rules:
* - Ends with '#' → strip '#', use as-is (force override)
* - openai → append '/chat/completions' unless already present
* - anthropic → append '/v1/messages' unless already present
* - other → use base_url as-is
*/
function resolveRequestUrl(baseUrl: string, provider: string): string {
const trimmed = baseUrl.trim().replace(/\/+$/, '');
if (trimmed.endsWith('#')) {
return trimmed.slice(0, -1).replace(/\/+$/, '');
}
if (provider === 'openai') {
return trimmed.endsWith('chat/completions') ? trimmed : `${trimmed}/chat/completions`;
}
if (provider === 'anthropic') {
return trimmed.endsWith('v1/messages') ? trimmed : `${trimmed}/v1/messages`;
}
return trimmed;
}

const AIModelConfig: React.FC = () => {
const { t } = useTranslation('settings/ai-model');
const { t: tDefault } = useTranslation('settings/default-model');
Expand Down Expand Up @@ -166,6 +186,7 @@ const AIModelConfig: React.FC = () => {
setEditingConfig({
name: defaultModel ? `${providerName} - ${defaultModel}` : '',
base_url: template.baseUrl,
request_url: resolveRequestUrl(template.baseUrl, template.format),
api_key: '',
model_name: defaultModel,
provider: template.format,
Expand All @@ -187,7 +208,8 @@ const AIModelConfig: React.FC = () => {
setSelectedProviderId(null);
setEditingConfig({
name: '',
base_url: 'https://open.bigmodel.cn/api/paas/v4/chat/completions',
base_url: 'https://open.bigmodel.cn/api/paas/v4',
request_url: resolveRequestUrl('https://open.bigmodel.cn/api/paas/v4', 'openai'),
api_key: '',
model_name: '',
provider: 'openai',
Expand Down Expand Up @@ -239,6 +261,7 @@ const AIModelConfig: React.FC = () => {
id: editingConfig.id || `model_${Date.now()}`,
name: editingConfig.name,
base_url: editingConfig.base_url,
request_url: editingConfig.request_url || resolveRequestUrl(editingConfig.base_url, editingConfig.provider || 'openai'),
api_key: editingConfig.api_key || '',
model_name: editingConfig.model_name || 'search-api',
provider: editingConfig.provider || 'openai',
Expand Down Expand Up @@ -638,10 +661,12 @@ const AIModelConfig: React.FC = () => {
value={editingConfig.base_url || currentTemplate.baseUrl}
onChange={(value) => {
const selectedOption = currentTemplate.baseUrlOptions!.find(opt => opt.url === value);
const newProvider = selectedOption?.format || editingConfig.provider || 'openai';
setEditingConfig(prev => ({
...prev,
base_url: value as string,
provider: selectedOption?.format || prev?.provider
request_url: resolveRequestUrl(value as string, newProvider),
provider: newProvider
}));
}}
placeholder={t('form.baseUrl')}
Expand All @@ -655,19 +680,38 @@ const AIModelConfig: React.FC = () => {
<input
type="url"
value={editingConfig.base_url || ''}
onChange={(e) => setEditingConfig(prev => ({ ...prev, base_url: e.target.value }))}
onChange={(e) => setEditingConfig(prev => ({
...prev,
base_url: e.target.value,
request_url: resolveRequestUrl(e.target.value, prev?.provider || 'openai')
}))}
onFocus={(e) => e.target.select()}
placeholder={currentTemplate.baseUrl}
/>
)}
{editingConfig.base_url && (
<div className="bitfun-ai-model-config__resolved-url">
<span className="resolved-url__label">{t('form.resolvedUrlLabel')}</span>
<code className="resolved-url__value">
{resolveRequestUrl(editingConfig.base_url, editingConfig.provider || 'openai')}
</code>
{!(currentTemplate.baseUrlOptions && currentTemplate.baseUrlOptions.length > 0) && (
<small className="resolved-url__hint">{t('form.forceUrlHint')}</small>
)}
</div>
)}
</div>


<div className="bitfun-ai-model-config__form-field">
<label>{t('form.provider')}</label>
<Select
value={editingConfig.provider || 'openai'}
onChange={(value) => setEditingConfig(prev => ({ ...prev, provider: value as string }))}
onChange={(value) => setEditingConfig(prev => ({
...prev,
provider: value as string,
request_url: resolveRequestUrl(prev?.base_url || '', value as string)
}))}
placeholder={t('form.providerPlaceholder')}
options={[
{ label: 'OpenAI', value: 'openai' },
Expand Down Expand Up @@ -747,19 +791,22 @@ const AIModelConfig: React.FC = () => {
case 'general_chat':
defaultCapabilities = ['text_chat', 'function_calling'];
updates.base_url = 'https://open.bigmodel.cn/api/paas/v4/chat/completions';
updates.request_url = resolveRequestUrl(updates.base_url, prev?.provider || 'openai');
break;
case 'multimodal':
defaultCapabilities = ['text_chat', 'image_understanding', 'function_calling'];
updates.base_url = 'https://open.bigmodel.cn/api/paas/v4/chat/completions';
updates.request_url = resolveRequestUrl(updates.base_url, prev?.provider || 'openai');
break;
case 'image_generation':
defaultCapabilities = ['image_generation'];
updates.base_url = 'https://open.bigmodel.cn/api/paas/v4/images/generations';
updates.request_url = resolveRequestUrl(updates.base_url, prev?.provider || 'openai');
break;
case 'search_enhanced':
defaultCapabilities = ['search'];

updates.base_url = 'https://open.bigmodel.cn/api/paas/v4/web_search';
updates.request_url = resolveRequestUrl(updates.base_url, 'openai');
updates.model_name = 'search-api';
updates.provider = 'openai';
updates.context_window = 128000;
Expand All @@ -768,6 +815,7 @@ const AIModelConfig: React.FC = () => {
case 'speech_recognition':
defaultCapabilities = ['speech_recognition'];
updates.base_url = 'https://open.bigmodel.cn/api/paas/v4/chat/completions';
updates.request_url = resolveRequestUrl(updates.base_url, prev?.provider || 'openai');
break;
}
updates.capabilities = defaultCapabilities;
Expand Down Expand Up @@ -805,7 +853,11 @@ const AIModelConfig: React.FC = () => {
<input
type="url"
value={editingConfig.base_url || ''}
onChange={(e) => setEditingConfig(prev => ({ ...prev, base_url: e.target.value }))}
onChange={(e) => setEditingConfig(prev => ({
...prev,
base_url: e.target.value,
request_url: resolveRequestUrl(e.target.value, prev?.provider || 'openai')
}))}
onFocus={(e) => e.target.select()}
placeholder={
editingConfig.category === 'search_enhanced'
Expand All @@ -818,6 +870,15 @@ const AIModelConfig: React.FC = () => {
{t('form.searchApiHint')}
</small>
)}
{editingConfig.base_url && (
<div className="bitfun-ai-model-config__resolved-url">
<span className="resolved-url__label">{t('form.resolvedUrlLabel')}</span>
<code className="resolved-url__value">
{resolveRequestUrl(editingConfig.base_url, editingConfig.provider || 'openai')}
</code>
<small className="resolved-url__hint">{t('form.forceUrlHint')}</small>
</div>
)}
</div>


Expand Down
Loading