refactor(ai): 移除 OpenAI 支持并优化 Ollama 集成

- 移除了 OpenAI 相关的 API 导入和类型定义
- 删除了 provider 切换逻辑和相关的条件分支代码
- 简化了模型列表加载逻辑,仅保留 Ollama 模型获取
- 移除了聊天消息发送中的 provider 分支判断
- 删除了历史消息 JSON 计算属性
- 移除了 API Key 输入字段和相关错误处理
- 统一使用 Ollama API 类型定义替代 OpenAI 类型
- 移除了 provider 监听器和切换功能组件
This commit is contained in:
2026-02-28 00:38:19 +08:00
parent 096e78da4c
commit cc01095107

View File

@@ -1,13 +1,6 @@
<script setup lang="ts"> <script setup lang="ts">
import { computed, onBeforeUnmount, ref, watch } from 'vue'; import { computed, onBeforeUnmount, ref } from 'vue';
import { message } from 'ant-design-vue'; import { message } from 'ant-design-vue';
import {
chatCompletions,
chatCompletionsStream,
listModels,
type OpenAIChatMessage,
type OpenAIModel
} from '@/api/ai/openai';
import { OLLAMA_API_URL } from '@/config/setting'; import { OLLAMA_API_URL } from '@/config/setting';
import { import {
listOllamaModels, listOllamaModels,
@@ -16,20 +9,13 @@
type OllamaChatMessage type OllamaChatMessage
} from '@/api/ai/ollama'; } from '@/api/ai/ollama';
type Msg = OpenAIChatMessage; type Msg = OllamaChatMessage;
type Provider = 'openai' | 'ollama'; // Only keep Ollama native API mode.
const provider = ref<Provider>('ollama'); const baseURL = ref<string>(OLLAMA_API_URL);
// Force direct calls to the hosted OpenAI-compatible gateway (no local /ai-proxy).
const DIRECT_OPENAI_BASE_URL = 'https://ai-api.websoft.top/api/v1';
const baseURL = ref<string>(
provider.value === 'openai' ? DIRECT_OPENAI_BASE_URL : OLLAMA_API_URL
);
const apiKey = ref<string>('');
const modelLoading = ref(false); const modelLoading = ref(false);
const models = ref<OpenAIModel[]>([]); const models = ref<Array<{ id: string; name?: string }>>([]);
const modelId = ref<string>(''); const modelId = ref<string>('');
const systemPrompt = ref<string>('你是一个有帮助的助手。'); const systemPrompt = ref<string>('你是一个有帮助的助手。');
@@ -43,7 +29,6 @@
const errorText = ref<string>(''); const errorText = ref<string>('');
const history = ref<Msg[]>([]); const history = ref<Msg[]>([]);
const historyJson = computed(() => JSON.stringify(history.value, null, 2));
const canSend = computed(() => { const canSend = computed(() => {
return !!modelId.value && !!userPrompt.value.trim() && !sending.value; return !!modelId.value && !!userPrompt.value.trim() && !sending.value;
@@ -61,46 +46,22 @@
modelLoading.value = true; modelLoading.value = true;
errorText.value = ''; errorText.value = '';
try { try {
if (provider.value === 'openai') { if (!baseURL.value.trim()) {
if (!baseURL.value.trim()) { baseURL.value = OLLAMA_API_URL;
baseURL.value = DIRECT_OPENAI_BASE_URL; }
} const res = await listOllamaModels({
const res = await listModels({ baseURL: baseURL.value.trim() || OLLAMA_API_URL
baseURL: baseURL.value.trim() || DIRECT_OPENAI_BASE_URL, });
apiKey: apiKey.value.trim() || undefined models.value = (res.models ?? []).map((m) => ({
}); id: m.name,
models.value = res.data ?? []; name: m.name
if (!modelId.value && models.value.length) { }));
modelId.value = models.value[0].id; if (!modelId.value && models.value.length) {
} modelId.value = models.value[0].id;
} else {
if (!baseURL.value.trim()) {
baseURL.value = OLLAMA_API_URL;
}
const res = await listOllamaModels({
baseURL: baseURL.value.trim() || OLLAMA_API_URL
});
models.value = (res.models ?? []).map((m) => ({
id: m.name,
name: m.name,
object: 'model'
}));
if (!modelId.value && models.value.length) {
modelId.value = models.value[0].id;
}
} }
} catch (e: any) { } catch (e: any) {
errorText.value = e?.message ?? String(e); errorText.value = e?.message ?? String(e);
if ( message.error('加载模型列表失败');
provider.value === 'openai' &&
String(errorText.value).includes('401')
) {
message.error(
'未认证(401):请填写 API Key如需走本地代理注入可将 BaseURL 改为 /ai-proxy'
);
} else {
message.error('加载模型列表失败');
}
} finally { } finally {
modelLoading.value = false; modelLoading.value = false;
} }
@@ -131,72 +92,34 @@
abortController.value = controller; abortController.value = controller;
try { try {
if (provider.value === 'openai') { if (stream.value) {
if (stream.value) { await ollamaChatStream(
await chatCompletionsStream( {
{ model: modelId.value,
model: modelId.value, messages,
messages, options: { temperature: temperature.value }
temperature: temperature.value },
}, {
{ baseURL: baseURL.value.trim() || OLLAMA_API_URL,
baseURL: baseURL.value.trim() || DIRECT_OPENAI_BASE_URL, signal: controller.signal,
apiKey: apiKey.value.trim() || undefined, onDelta: (t) => {
signal: controller.signal, assistantText.value += t;
onDelta: (t) => {
assistantText.value += t;
}
} }
); }
} else { );
const res = await chatCompletions(
{
model: modelId.value,
messages,
temperature: temperature.value
},
{
baseURL: baseURL.value.trim() || DIRECT_OPENAI_BASE_URL,
apiKey: apiKey.value.trim() || undefined,
signal: controller.signal
}
);
assistantText.value = res.choices?.[0]?.message?.content ?? '';
}
} else { } else {
const ollamaMessages: OllamaChatMessage[] = messages.map((m) => ({ const res = await ollamaChat(
role: m.role as any, {
content: m.content model: modelId.value,
})); messages,
if (stream.value) { options: { temperature: temperature.value }
await ollamaChatStream( },
{ {
model: modelId.value, baseURL: baseURL.value.trim() || OLLAMA_API_URL,
messages: ollamaMessages, signal: controller.signal
options: { temperature: temperature.value } }
}, );
{ assistantText.value = res.message?.content ?? '';
baseURL: baseURL.value.trim() || OLLAMA_API_URL,
signal: controller.signal,
onDelta: (t) => {
assistantText.value += t;
}
}
);
} else {
const res = await ollamaChat(
{
model: modelId.value,
messages: ollamaMessages,
options: { temperature: temperature.value }
},
{
baseURL: baseURL.value.trim() || OLLAMA_API_URL,
signal: controller.signal
}
);
assistantText.value = res.message?.content ?? '';
}
} }
history.value = [ history.value = [
@@ -226,19 +149,6 @@
// Load once for convenience; if the gateway blocks CORS you can still paste output from curl. // Load once for convenience; if the gateway blocks CORS you can still paste output from curl.
loadModels(); loadModels();
watch(
() => provider.value,
(p) => {
stop();
clearChat();
models.value = [];
modelId.value = '';
errorText.value = '';
baseURL.value = p === 'openai' ? DIRECT_OPENAI_BASE_URL : OLLAMA_API_URL;
loadModels();
}
);
onBeforeUnmount(() => { onBeforeUnmount(() => {
stop(); stop();
}); });
@@ -256,28 +166,11 @@
/> />
<a-row :gutter="12"> <a-row :gutter="12">
<a-col :xs="24" :md="6">
<a-select
v-model:value="provider"
:options="[
{ label: 'OpenAI兼容(/v1)', value: 'openai' },
{ label: 'Ollama原生(/api)', value: 'ollama' }
]"
style="width: 100%"
/>
</a-col>
<a-col :xs="24" :md="12"> <a-col :xs="24" :md="12">
<a-input <a-input
v-model:value="baseURL" v-model:value="baseURL"
addon-before="BaseURL" addon-before="BaseURL"
placeholder="https://ai-api.websoft.top/api/v1" placeholder="http://localhost:11434"
/>
</a-col>
<a-col :xs="24" :md="12" v-if="provider === 'openai'">
<a-input-password
v-model:value="apiKey"
addon-before="API Key"
placeholder="可选(不建议在前端保存)"
/> />
</a-col> </a-col>
</a-row> </a-row>