fix(config): 更新AI代理配置为本地Ollama服务
- 将AI代理目标从远程服务器改为本地127.0.1:11434 - 更新Nginx配置中的代理地址和Host头部设置 - 修改AI_API_URL默认值指向本地Ollama服务 - 调整AI视图组件中的基础URL配置逻辑 - 更新环境变量示例文件中的默认API地址 - 修正Vite开发服务器代理配置指向本地服务
This commit is contained in:
@@ -9,12 +9,12 @@ VITE_FILE_SERVER=https://your-file-server.com
|
|||||||
# AI 网关(OpenAI兼容)
|
# AI 网关(OpenAI兼容)
|
||||||
# - 开发环境推荐走同源反代:VITE_AI_API_URL=/ai-proxy(配合 vite.config.ts)
|
# - 开发环境推荐走同源反代:VITE_AI_API_URL=/ai-proxy(配合 vite.config.ts)
|
||||||
# - 生产环境可直连(需 AI 服务允许 CORS),或在 Nginx 里配置 /ai-proxy 反代
|
# - 生产环境可直连(需 AI 服务允许 CORS),或在 Nginx 里配置 /ai-proxy 反代
|
||||||
VITE_AI_API_URL=https://ai-api.websoft.top/api/v1
|
VITE_AI_API_URL=http://127.0.0.1:11434/api/v1
|
||||||
|
|
||||||
# Ollama 原生接口(默认端口 11434)
|
# Ollama 原生接口(默认端口 11434)
|
||||||
# - 开发环境推荐走同源反代:VITE_OLLAMA_API_URL=/proxy(配合 vite.config.ts)
|
# - 开发环境推荐走同源反代:VITE_OLLAMA_API_URL=/proxy(配合 vite.config.ts)
|
||||||
# - 生产环境不要直接用 http(会混合内容被拦截),建议 Nginx 反代成同源 https
|
# - 生产环境不要直接用 http(会混合内容被拦截),建议 Nginx 反代成同源 https
|
||||||
VITE_OLLAMA_API_URL=http://47.119.165.234:11434
|
VITE_OLLAMA_API_URL=http://127.0.0.1:11434
|
||||||
|
|
||||||
# 仅用于本地开发反代注入(vite.config.ts 会读取并注入到 /ai-proxy 请求头)
|
# 仅用于本地开发反代注入(vite.config.ts 会读取并注入到 /ai-proxy 请求头)
|
||||||
# 不要加 VITE_ 前缀,避免被打包到前端产物里
|
# 不要加 VITE_ 前缀,避免被打包到前端产物里
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
|
|
||||||
项目已在 `vite.config.ts` 配置(默认目标可通过 `AI_PROXY_TARGET` 调整):
|
项目已在 `vite.config.ts` 配置(默认目标可通过 `AI_PROXY_TARGET` 调整):
|
||||||
|
|
||||||
- `/ai-proxy/*` -> `https://ai-api.websoft.top/api/v1/*`
|
- `/ai-proxy/*` -> `http://127.0.0.1:11434/api/v1/*`
|
||||||
|
|
||||||
配合 `.env.development`:
|
配合 `.env.development`:
|
||||||
|
|
||||||
@@ -20,7 +20,7 @@ VITE_AI_API_URL=/ai-proxy
|
|||||||
|
|
||||||
```nginx
|
```nginx
|
||||||
location /ai-proxy/ {
|
location /ai-proxy/ {
|
||||||
proxy_pass https://ai-api.websoft.top/api/v1/;
|
proxy_pass http://127.0.0.1:11434/api/v1/;
|
||||||
proxy_http_version 1.1;
|
proxy_http_version 1.1;
|
||||||
proxy_set_header Host ai-api.websoft.top;
|
proxy_set_header Host ai-api.websoft.top;
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
@@ -48,9 +48,9 @@ VITE_AI_API_URL=/ai-proxy
|
|||||||
|
|
||||||
```nginx
|
```nginx
|
||||||
location /proxy/ {
|
location /proxy/ {
|
||||||
proxy_pass http://47.119.165.234:11434/;
|
proxy_pass http://127.0.0.1:11434/;
|
||||||
proxy_http_version 1.1;
|
proxy_http_version 1.1;
|
||||||
proxy_set_header Host 47.119.165.234;
|
proxy_set_header Host 127.0.0.1;
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ export const FILE_SERVER =
|
|||||||
export const AI_API_URL =
|
export const AI_API_URL =
|
||||||
import.meta.env.VITE_AI_API_URL ||
|
import.meta.env.VITE_AI_API_URL ||
|
||||||
// Prefer same-origin reverse proxy during local development to avoid CORS.
|
// Prefer same-origin reverse proxy during local development to avoid CORS.
|
||||||
(import.meta.env.DEV ? '/ai-proxy' : 'https://ai-api.websoft.top/api/v1');
|
(import.meta.env.DEV ? '/ai-proxy' : 'http://127.0.0.1:11434/api/v1');
|
||||||
|
|
||||||
// Ollama native API endpoint (usually http://host:11434).
|
// Ollama native API endpoint (usually http://host:11434).
|
||||||
// Note: browsers cannot call http from an https site (mixed-content); prefer same-origin proxy.
|
// Note: browsers cannot call http from an https site (mixed-content); prefer same-origin proxy.
|
||||||
|
|||||||
@@ -12,7 +12,9 @@
|
|||||||
|
|
||||||
// Hardcode endpoint to avoid going through mp.websoft.top `/proxy`.
|
// Hardcode endpoint to avoid going through mp.websoft.top `/proxy`.
|
||||||
// The API methods append `/api/*` paths.
|
// The API methods append `/api/*` paths.
|
||||||
const BASE_URL = 'https://ai-api.websoft.top';
|
const BASE_URL = import.meta.env.PROD
|
||||||
|
? 'http://127.0.0.1:11434'
|
||||||
|
: 'https://ai-api.websoft.top';
|
||||||
|
|
||||||
const modelLoading = ref(false);
|
const modelLoading = ref(false);
|
||||||
const models = ref<Array<{ id: string; name?: string }>>([]);
|
const models = ref<Array<{ id: string; name?: string }>>([]);
|
||||||
|
|||||||
@@ -101,7 +101,7 @@ export default defineConfig(({ command, mode }) => {
|
|||||||
// GET /ai-proxy/models -> https://ai.websoft.top/api/v1/models
|
// GET /ai-proxy/models -> https://ai.websoft.top/api/v1/models
|
||||||
// POST /ai-proxy/chat/completions -> https://ai.websoft.top/api/v1/chat/completions
|
// POST /ai-proxy/chat/completions -> https://ai.websoft.top/api/v1/chat/completions
|
||||||
'/ai-proxy': {
|
'/ai-proxy': {
|
||||||
target: env.AI_PROXY_TARGET || 'https://ai-api.websoft.top',
|
target: env.AI_PROXY_TARGET || 'http://127.0.0.1:11434',
|
||||||
changeOrigin: true,
|
changeOrigin: true,
|
||||||
secure: false,
|
secure: false,
|
||||||
rewrite: (path) =>
|
rewrite: (path) =>
|
||||||
@@ -124,10 +124,10 @@ export default defineConfig(({ command, mode }) => {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
// Ollama native API reverse proxy (dev only).
|
// Ollama native API reverse proxy (dev only).
|
||||||
// GET /proxy/api/tags -> http://47.119.165.234:11434/api/tags
|
// GET /proxy/api/tags -> http://127.0.0.1:11434/api/tags
|
||||||
// POST /proxy/api/chat -> http://47.119.165.234:11434/api/chat
|
// POST /proxy/api/chat -> http://127.0.0.1:11434/api/chat
|
||||||
'/proxy': {
|
'/proxy': {
|
||||||
target: 'http://47.119.165.234:11434',
|
target: 'http://127.0.0.1:11434',
|
||||||
changeOrigin: true,
|
changeOrigin: true,
|
||||||
secure: false,
|
secure: false,
|
||||||
rewrite: (path) => path.replace(/^\/proxy/, '')
|
rewrite: (path) => path.replace(/^\/proxy/, '')
|
||||||
|
|||||||
Reference in New Issue
Block a user