fix(config): 更新AI代理配置为本地Ollama服务

- 将AI代理目标从远程服务器改为本地127.0.1:11434
- 更新Nginx配置中的代理地址和Host头部设置
- 修改AI_API_URL默认值指向本地Ollama服务
- 调整AI视图组件中的基础URL配置逻辑
- 更新环境变量示例文件中的默认API地址
- 修正Vite开发服务器代理配置指向本地服务
This commit is contained in:
2026-02-28 01:42:18 +08:00
parent 91708315f3
commit ac9712819a
5 changed files with 14 additions and 12 deletions

View File

@@ -20,7 +20,7 @@ export const FILE_SERVER =
export const AI_API_URL =
import.meta.env.VITE_AI_API_URL ||
// Prefer same-origin reverse proxy during local development to avoid CORS.
(import.meta.env.DEV ? '/ai-proxy' : 'https://ai-api.websoft.top/api/v1');
(import.meta.env.DEV ? '/ai-proxy' : 'http://127.0.0.1:11434/api/v1');
// Ollama native API endpoint (usually http://host:11434).
// Note: browsers cannot call http from an https site (mixed-content); prefer same-origin proxy.

View File

@@ -12,7 +12,9 @@
// Hardcode endpoint to avoid going through mp.websoft.top `/proxy`.
// The API methods append `/api/*` paths.
const BASE_URL = 'https://ai-api.websoft.top';
const BASE_URL = import.meta.env.PROD
? 'http://127.0.0.1:11434'
: 'https://ai-api.websoft.top';
const modelLoading = ref(false);
const models = ref<Array<{ id: string; name?: string }>>([]);