From 096e78da4c47454c0829be3415fcda351f0a85ba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E5=BF=A0=E6=9E=97?= <170083662@qq.com> Date: Sat, 28 Feb 2026 00:17:20 +0800 Subject: [PATCH] =?UTF-8?q?feat(ai):=20=E6=9B=B4=E6=96=B0=20AI=20=E6=9C=8D?= =?UTF-8?q?=E5=8A=A1=E9=85=8D=E7=BD=AE=E5=92=8C=20API=20=E7=BD=91=E5=85=B3?= =?UTF-8?q?=E8=AE=BE=E7=BD=AE?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - 移除 AI_API_URL 配置项,统一使用直接 OpenAI 兼容网关地址 - 添加强制调用托管 OpenAI 兼容网关的配置选项 - 更新 BaseURL 初始值逻辑,支持 OpenAI 和 Ollama 两种提供商 - 修改 API 密钥认证错误提示信息,增加本地代理配置说明 - 更新模型加载和聊天请求的 API 调用基础 URL 配置 - 添加 OpenAI API 网关 Nginx 代理配置文件 - 配置 HTTPS 重定向和 SSL 安全设置 - 设置 CORS 跨域资源共享策略,支持 websoft.top 域名访问 --- proxy.conf | 52 ++++++++++++++++++++++++++++++++++++++++++ proxy_.conf | 35 ++++++++++++++++++++++++++++ src/views/ai/index.vue | 21 +++++++++-------- 3 files changed, 99 insertions(+), 9 deletions(-) create mode 100644 proxy.conf create mode 100644 proxy_.conf diff --git a/proxy.conf b/proxy.conf new file mode 100644 index 0000000..81c0da3 --- /dev/null +++ b/proxy.conf @@ -0,0 +1,52 @@ +server { + listen 80 ; + listen 443 ssl ; + server_name ai-api.websoft.top; + index index.php index.html index.htm default.php default.htm default.html; + access_log /www/sites/ai-api.websoft.top/log/access.log main; + error_log /www/sites/ai-api.websoft.top/log/error.log; + location ~ ^/(\.user.ini|\.htaccess|\.git|\.env|\.svn|\.project|LICENSE|README.md) { + return 404; + } + location ^~ /.well-known/acme-challenge { + allow all; + root /usr/share/nginx/html; + } + if ( $uri ~ "^/\.well-known/.*\.(php|jsp|py|js|css|lua|ts|go|zip|tar\.gz|rar|7z|sql|bak)$" ) { + return 403; + } + + http2 on; + if ($scheme = http) { + return 301 https://$host$request_uri; + } + ssl_certificate /www/sites/ai-api.websoft.top/ssl/fullchain.pem; + ssl_certificate_key /www/sites/ai-api.websoft.top/ssl/privkey.pem; + ssl_protocols TLSv1.3 TLSv1.2; + ssl_ciphers ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA256:!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK:!KRB5:!SRP:!CAMELLIA:!SEED; + ssl_prefer_server_ciphers off; + ssl_session_cache shared:SSL:10m; + ssl_session_timeout 10m; + error_page 497 https://$host$request_uri; + proxy_set_header X-Forwarded-Proto https; + add_header Strict-Transport-Security "max-age=31536000; includeSubDomains"; + # CORS: allow any *.websoft.top (and websoft.top) Origin; echo Origin for credentialed requests. + # Keep wildcard for non-websoft origins without credentials. + set $cors_origin "*"; + set $cors_credentials "false"; + if ($http_origin ~* '^https?://([a-z0-9-]+\.)*websoft\.top(?::[0-9]+)?$') { + set $cors_origin $http_origin; + set $cors_credentials "true"; + } + add_header Access-Control-Allow-Origin $cors_origin always; + add_header Access-Control-Allow-Credentials $cors_credentials always; + add_header Access-Control-Allow-Methods "GET,POST,PUT,PATCH,DELETE,OPTIONS" always; + add_header Access-Control-Allow-Headers "$http_access_control_request_headers" always; + add_header Vary "Origin" always; + if ($request_method = OPTIONS) { + add_header Content-Length 0; + add_header Content-Type text/plain; + return 204; + } + include /www/sites/ai-api.websoft.top/proxy/*.conf; +} diff --git a/proxy_.conf b/proxy_.conf new file mode 100644 index 0000000..f3600fa --- /dev/null +++ b/proxy_.conf @@ -0,0 +1,35 @@ +server { + listen 80 ; + listen 443 ssl ; + server_name ai-api.websoft.top; + index index.php index.html index.htm default.php default.htm default.html; + access_log /www/sites/ai-api.websoft.top/log/access.log main; + error_log /www/sites/ai-api.websoft.top/log/error.log; + location ~ ^/(\.user.ini|\.htaccess|\.git|\.env|\.svn|\.project|LICENSE|README.md) { + return 404; + } + location ^~ /.well-known/acme-challenge { + allow all; + root /usr/share/nginx/html; + } + if ( $uri ~ "^/\.well-known/.*\.(php|jsp|py|js|css|lua|ts|go|zip|tar\.gz|rar|7z|sql|bak)$" ) { + return 403; + } + + http2 on; + if ($scheme = http) { + return 301 https://$host$request_uri; + } + ssl_certificate /www/sites/ai-api.websoft.top/ssl/fullchain.pem; + ssl_certificate_key /www/sites/ai-api.websoft.top/ssl/privkey.pem; + ssl_protocols TLSv1.3 TLSv1.2; + ssl_ciphers ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA256:!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK:!KRB5:!SRP:!CAMELLIA:!SEED; + ssl_prefer_server_ciphers off; + ssl_session_cache shared:SSL:10m; + ssl_session_timeout 10m; + error_page 497 https://$host$request_uri; + proxy_set_header X-Forwarded-Proto https; + add_header Strict-Transport-Security "max-age=31536000; includeSubDomains"; + add_header Access-Control-Allow-Origin "*" always; + include /www/sites/ai-api.websoft.top/proxy/*.conf; +} diff --git a/src/views/ai/index.vue b/src/views/ai/index.vue index ad07990..49c282a 100644 --- a/src/views/ai/index.vue +++ b/src/views/ai/index.vue @@ -8,7 +8,7 @@ type OpenAIChatMessage, type OpenAIModel } from '@/api/ai/openai'; - import { AI_API_URL, OLLAMA_API_URL } from '@/config/setting'; + import { OLLAMA_API_URL } from '@/config/setting'; import { listOllamaModels, ollamaChat, @@ -21,8 +21,11 @@ type Provider = 'openai' | 'ollama'; const provider = ref('ollama'); - // Default to Ollama native API when provider is ollama. - const baseURL = ref(provider.value === OLLAMA_API_URL); + // Force direct calls to the hosted OpenAI-compatible gateway (no local /ai-proxy). + const DIRECT_OPENAI_BASE_URL = 'https://ai-api.websoft.top/api/v1'; + const baseURL = ref( + provider.value === 'openai' ? DIRECT_OPENAI_BASE_URL : OLLAMA_API_URL + ); const apiKey = ref(''); const modelLoading = ref(false); @@ -60,10 +63,10 @@ try { if (provider.value === 'openai') { if (!baseURL.value.trim()) { - baseURL.value = AI_API_URL; + baseURL.value = DIRECT_OPENAI_BASE_URL; } const res = await listModels({ - baseURL: baseURL.value.trim() || AI_API_URL, + baseURL: baseURL.value.trim() || DIRECT_OPENAI_BASE_URL, apiKey: apiKey.value.trim() || undefined }); models.value = res.data ?? []; @@ -93,7 +96,7 @@ String(errorText.value).includes('401') ) { message.error( - '未认证(401):请填写 API Key,或在本地用 AI_API_KEY 通过 /ai-proxy 注入' + '未认证(401):请填写 API Key(如需走本地代理注入,可将 BaseURL 改为 /ai-proxy)' ); } else { message.error('加载模型列表失败'); @@ -137,7 +140,7 @@ temperature: temperature.value }, { - baseURL: baseURL.value.trim() || AI_API_URL, + baseURL: baseURL.value.trim() || DIRECT_OPENAI_BASE_URL, apiKey: apiKey.value.trim() || undefined, signal: controller.signal, onDelta: (t) => { @@ -153,7 +156,7 @@ temperature: temperature.value }, { - baseURL: baseURL.value.trim() || AI_API_URL, + baseURL: baseURL.value.trim() || DIRECT_OPENAI_BASE_URL, apiKey: apiKey.value.trim() || undefined, signal: controller.signal } @@ -231,7 +234,7 @@ models.value = []; modelId.value = ''; errorText.value = ''; - baseURL.value = p === 'openai' ? AI_API_URL : OLLAMA_API_URL; + baseURL.value = p === 'openai' ? DIRECT_OPENAI_BASE_URL : OLLAMA_API_URL; loadModels(); } );