feat(ai): 添加AI模块文档和重构前端AI组件

- 新增 docs/ai/README.md 包含完整的AI模块配置、建表、API文档
- 重构 src/views/ai/index.vue 组件,移除硬编码BASE_URL和多余参数
- 添加 src/api/ai/backend.ts 统一的AI后端API接口实现
- 集成模型列表、流式对话、非流式对话等功能
- 实现SE流式响应处理和鉴权头自动携带
- 移除历史消息存储和温度参数等冗余功能
This commit is contained in:
2026-02-28 11:02:40 +08:00
parent 5b4f5c393e
commit d079a28ffc
3 changed files with 281 additions and 68 deletions

View File

@@ -2,41 +2,26 @@
import { computed, onBeforeUnmount, ref } from 'vue';
import { message } from 'ant-design-vue';
import {
listOllamaModels,
ollamaChat,
ollamaChatStream,
type OllamaChatMessage
} from '@/api/ai/ollama';
type Msg = OllamaChatMessage;
// Hardcode endpoint to avoid going through mp.websoft.top `/proxy`.
// The API methods append `/api/*` paths.
//
// IMPORTANT: do not use `127.0.0.1` in browser production builds:
// it points to the visitor's machine, not your server.
// If you want to use server-local Ollama (`127.0.0.1:11434`), put it behind an HTTPS reverse proxy
// (e.g. `https://ai-api.websoft.top` or same-origin `/proxy`).
const BASE_URL = 'https://ai-api.websoft.top';
aiChat,
aiChatStream,
aiListModels,
normalizeModels
} from '@/api/ai/backend';
const modelLoading = ref(false);
const models = ref<Array<{ id: string; name?: string }>>([]);
const modelId = ref<string>('');
const systemPrompt = ref<string>('你是一个有帮助的助手。');
const userPrompt = ref<string>('你好,介绍一下你能做什么。');
const temperature = ref<number>(0.7);
const stream = ref<boolean>(true);
const sending = ref(false);
const assistantText = ref<string>('');
const errorText = ref<string>('');
const history = ref<Msg[]>([]);
const canSend = computed(() => {
return !!modelId.value && !!userPrompt.value.trim() && !sending.value;
return !!userPrompt.value.trim() && !sending.value;
});
const abortController = ref<AbortController | null>(null);
@@ -51,13 +36,14 @@
modelLoading.value = true;
errorText.value = '';
try {
const res = await listOllamaModels({
baseURL: BASE_URL
});
models.value = (res.models ?? []).map((m) => ({
id: m.name,
name: m.name
}));
const res = await aiListModels();
const ms = normalizeModels(res);
models.value = ms
.map((m) => ({
id: String(m.name ?? m.id ?? ''),
name: String(m.name ?? m.id ?? '')
}))
.filter((m) => !!m.id);
if (!modelId.value && models.value.length) {
modelId.value = models.value[0].id;
}
@@ -70,7 +56,6 @@
};
const clearChat = () => {
history.value = [];
assistantText.value = '';
errorText.value = '';
};
@@ -82,27 +67,16 @@
assistantText.value = '';
errorText.value = '';
const system: Msg = { role: 'system', content: systemPrompt.value.trim() };
const user: Msg = { role: 'user', content: userPrompt.value.trim() };
const messages: Msg[] = [
...(system.content ? [system] : []),
...history.value,
user
];
const prompt = userPrompt.value.trim();
const controller = new AbortController();
abortController.value = controller;
try {
if (stream.value) {
await ollamaChatStream(
await aiChatStream(
{ prompt },
{
model: modelId.value,
messages,
options: { temperature: temperature.value }
},
{
baseURL: BASE_URL,
signal: controller.signal,
onDelta: (t) => {
assistantText.value += t;
@@ -110,30 +84,15 @@
}
);
} else {
const res = await ollamaChat(
{
model: modelId.value,
messages,
options: { temperature: temperature.value }
},
{
baseURL: BASE_URL,
signal: controller.signal
}
);
assistantText.value = res.message?.content ?? '';
// axios 已在拦截器里自动带上 token/tenant
assistantText.value = await aiChat({ prompt });
}
history.value = [
...messages,
{ role: 'assistant', content: assistantText.value }
];
userPrompt.value = '';
} catch (e: any) {
// Abort is expected when clicking "Stop".
if (e?.name !== 'AbortError') {
errorText.value = e?.message ?? String(e);
message.error('请求失败(可能是 CORS 或鉴权问题');
message.error('请求失败(可能是后端未启动 / 鉴权 / 租户头缺失');
}
} finally {
sending.value = false;
@@ -184,13 +143,6 @@
>
<span>流式</span>
<a-switch v-model:checked="stream" />
<span>温度</span>
<a-input-number
v-model:value="temperature"
:min="0"
:max="2"
:step="0.1"
/>
</a-space>
</a-col>
</a-row>