feat(chat): 基于 RAG 的笔记内容 AI 问答功能

实现类似 Google NotebookLM 的效果:笔记生成后自动向量化,
用户可针对笔记内容进行 LLM 问答。

### 后端
- 新增 VectorStoreManager(ChromaDB),按标题/转录分块建立向量索引
- 新增 chat_service.py RAG 问答:检索相关片段 → 构建 prompt → 调用 LLM
- 新增 /chat/index, /chat/ask, /chat/status API 端点
- 笔记生成完成后自动建立向量索引

### 前端
- 使用 @ant-design/x Bubble.List + Sender 组件构建聊天面板
- 新增 chatStore(Zustand + persist)持久化聊天记录
- MarkdownViewer 右侧嵌入 ChatPanel,通过"AI 问答"按钮切换
- 首次打开自动检查/触发索引,支持重新索引

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
huangjianwu
2026-03-23 14:38:39 +08:00
parent 1cd8c33983
commit efadbc267d
13 changed files with 730 additions and 2 deletions

View File

@@ -0,0 +1,43 @@
import { create } from 'zustand'
import { persist } from 'zustand/middleware'
import type { ChatSource } from '@/services/chat'
export interface ChatMessage {
role: 'user' | 'assistant'
content: string
sources?: ChatSource[]
}
interface ChatState {
chatHistory: Record<string, ChatMessage[]>
addMessage: (taskId: string, msg: ChatMessage) => void
clearChat: (taskId: string) => void
getMessages: (taskId: string) => ChatMessage[]
}
export const useChatStore = create<ChatState>()(
persist(
(set, get) => ({
chatHistory: {},
addMessage: (taskId, msg) =>
set(state => ({
chatHistory: {
...state.chatHistory,
[taskId]: [...(state.chatHistory[taskId] || []), msg],
},
})),
clearChat: (taskId) =>
set(state => {
const { [taskId]: _, ...rest } = state.chatHistory
return { chatHistory: rest }
}),
getMessages: (taskId) => get().chatHistory[taskId] || [],
}),
{
name: 'bilinote-chat-storage',
},
),
)