feat(ai): 更新 AI 服务配置和 API 网关设置

- 移除 AI_API_URL 配置项,统一使用直接 OpenAI 兼容网关地址
- 添加强制调用托管 OpenAI 兼容网关的配置选项
- 更新 BaseURL 初始值逻辑,支持 OpenAI 和 Ollama 两种提供商
- 修改 API 密钥认证错误提示信息,增加本地代理配置说明
- 更新模型加载和聊天请求的 API 调用基础 URL 配置
- 添加 OpenAI API 网关 Nginx 代理配置文件
- 配置 HTTPS 重定向和 SSL 安全设置
- 设置 CORS 跨域资源共享策略,支持 websoft.top 域名访问
This commit is contained in:
2026-02-28 00:17:20 +08:00
parent 2afd831d11
commit 096e78da4c
3 changed files with 99 additions and 9 deletions

View File

@@ -8,7 +8,7 @@
type OpenAIChatMessage,
type OpenAIModel
} from '@/api/ai/openai';
import { AI_API_URL, OLLAMA_API_URL } from '@/config/setting';
import { OLLAMA_API_URL } from '@/config/setting';
import {
listOllamaModels,
ollamaChat,
@@ -21,8 +21,11 @@
type Provider = 'openai' | 'ollama';
const provider = ref<Provider>('ollama');
// Default to Ollama native API when provider is ollama.
const baseURL = ref(provider.value === OLLAMA_API_URL);
// Force direct calls to the hosted OpenAI-compatible gateway (no local /ai-proxy).
const DIRECT_OPENAI_BASE_URL = 'https://ai-api.websoft.top/api/v1';
const baseURL = ref<string>(
provider.value === 'openai' ? DIRECT_OPENAI_BASE_URL : OLLAMA_API_URL
);
const apiKey = ref<string>('');
const modelLoading = ref(false);
@@ -60,10 +63,10 @@
try {
if (provider.value === 'openai') {
if (!baseURL.value.trim()) {
baseURL.value = AI_API_URL;
baseURL.value = DIRECT_OPENAI_BASE_URL;
}
const res = await listModels({
baseURL: baseURL.value.trim() || AI_API_URL,
baseURL: baseURL.value.trim() || DIRECT_OPENAI_BASE_URL,
apiKey: apiKey.value.trim() || undefined
});
models.value = res.data ?? [];
@@ -93,7 +96,7 @@
String(errorText.value).includes('401')
) {
message.error(
'未认证(401):请填写 API Key,或在本地用 AI_API_KEY 通过 /ai-proxy 注入'
'未认证(401):请填写 API Key(如需走本地代理注入,可将 BaseURL 改为 /ai-proxy'
);
} else {
message.error('加载模型列表失败');
@@ -137,7 +140,7 @@
temperature: temperature.value
},
{
baseURL: baseURL.value.trim() || AI_API_URL,
baseURL: baseURL.value.trim() || DIRECT_OPENAI_BASE_URL,
apiKey: apiKey.value.trim() || undefined,
signal: controller.signal,
onDelta: (t) => {
@@ -153,7 +156,7 @@
temperature: temperature.value
},
{
baseURL: baseURL.value.trim() || AI_API_URL,
baseURL: baseURL.value.trim() || DIRECT_OPENAI_BASE_URL,
apiKey: apiKey.value.trim() || undefined,
signal: controller.signal
}
@@ -231,7 +234,7 @@
models.value = [];
modelId.value = '';
errorText.value = '';
baseURL.value = p === 'openai' ? AI_API_URL : OLLAMA_API_URL;
baseURL.value = p === 'openai' ? DIRECT_OPENAI_BASE_URL : OLLAMA_API_URL;
loadModels();
}
);