mirror of
https://github.com/DrizzleTime/Foxel.git
synced 2026-05-12 02:20:28 +08:00
Compare commits
10 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e7cf8dbdb8 | ||
|
|
e7eafdee97 | ||
|
|
051b49d3f6 | ||
|
|
b059b0eb44 | ||
|
|
59ad2cb622 | ||
|
|
6b2ada0b42 | ||
|
|
a727e77341 | ||
|
|
4638356a45 | ||
|
|
e51344b43e | ||
|
|
b7685db0e8 |
@@ -11,10 +11,11 @@ from domain.processors import api as processors
|
|||||||
from domain.share import api as share
|
from domain.share import api as share
|
||||||
from domain.tasks import api as tasks
|
from domain.tasks import api as tasks
|
||||||
from domain.ai import api as ai
|
from domain.ai import api as ai
|
||||||
|
from domain.agent import api as agent
|
||||||
from domain.virtual_fs import api as virtual_fs
|
from domain.virtual_fs import api as virtual_fs
|
||||||
from domain.virtual_fs.mapping import s3_api, webdav_api
|
from domain.virtual_fs.mapping import s3_api, webdav_api
|
||||||
from domain.virtual_fs.search import search_api
|
from domain.virtual_fs.search import search_api
|
||||||
from domain.audit import router as audit
|
from domain.audit import api as audit
|
||||||
|
|
||||||
|
|
||||||
def include_routers(app: FastAPI):
|
def include_routers(app: FastAPI):
|
||||||
@@ -30,9 +31,10 @@ def include_routers(app: FastAPI):
|
|||||||
app.include_router(backup.router)
|
app.include_router(backup.router)
|
||||||
app.include_router(ai.router_vector_db)
|
app.include_router(ai.router_vector_db)
|
||||||
app.include_router(ai.router_ai)
|
app.include_router(ai.router_ai)
|
||||||
|
app.include_router(agent.router)
|
||||||
app.include_router(plugins.router)
|
app.include_router(plugins.router)
|
||||||
app.include_router(webdav_api.router)
|
app.include_router(webdav_api.router)
|
||||||
app.include_router(s3_api.router)
|
app.include_router(s3_api.router)
|
||||||
app.include_router(offline_downloads.router)
|
app.include_router(offline_downloads.router)
|
||||||
app.include_router(email.router)
|
app.include_router(email.router)
|
||||||
app.include_router(audit)
|
app.include_router(audit.router)
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
from tortoise import Tortoise
|
from tortoise import Tortoise
|
||||||
|
|
||||||
from domain.adapters.registry import runtime_registry
|
from domain.adapters import runtime_registry
|
||||||
|
|
||||||
TORTOISE_ORM = {
|
TORTOISE_ORM = {
|
||||||
"connections": {"default": "sqlite://data/db/db.sqlite3"},
|
"connections": {"default": "sqlite://data/db/db.sqlite3"},
|
||||||
|
|||||||
7
domain/__init__.py
Normal file
7
domain/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
"""
|
||||||
|
domain:业务域层
|
||||||
|
|
||||||
|
约定:跨包只从各子包 `__init__.py` 导入公开 API。
|
||||||
|
"""
|
||||||
|
|
||||||
|
__all__: list[str] = []
|
||||||
@@ -1 +1,24 @@
|
|||||||
|
from .providers import BaseAdapter
|
||||||
|
from .registry import (
|
||||||
|
RuntimeRegistry,
|
||||||
|
discover_adapters,
|
||||||
|
get_config_schema,
|
||||||
|
get_config_schemas,
|
||||||
|
normalize_adapter_type,
|
||||||
|
runtime_registry,
|
||||||
|
)
|
||||||
|
from .service import AdapterService
|
||||||
|
from .types import AdapterCreate, AdapterOut
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"BaseAdapter",
|
||||||
|
"RuntimeRegistry",
|
||||||
|
"discover_adapters",
|
||||||
|
"get_config_schema",
|
||||||
|
"get_config_schemas",
|
||||||
|
"normalize_adapter_type",
|
||||||
|
"runtime_registry",
|
||||||
|
"AdapterService",
|
||||||
|
"AdapterCreate",
|
||||||
|
"AdapterOut",
|
||||||
|
]
|
||||||
|
|||||||
@@ -4,10 +4,9 @@ from fastapi import APIRouter, Depends, Request
|
|||||||
|
|
||||||
from api.response import success
|
from api.response import success
|
||||||
from domain.audit import AuditAction, audit
|
from domain.audit import AuditAction, audit
|
||||||
from domain.adapters.service import AdapterService
|
from domain.auth import User, get_current_active_user
|
||||||
from domain.adapters.types import AdapterCreate
|
from .service import AdapterService
|
||||||
from domain.auth.service import get_current_active_user
|
from .types import AdapterCreate
|
||||||
from domain.auth.types import User
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/api/adapters", tags=["adapters"])
|
router = APIRouter(prefix="/api/adapters", tags=["adapters"])
|
||||||
|
|
||||||
|
|||||||
@@ -1,11 +1,26 @@
|
|||||||
from typing import List, Dict, Tuple, AsyncIterator
|
from typing import List, Dict, Tuple, AsyncIterator
|
||||||
|
import asyncio
|
||||||
|
import base64
|
||||||
import io
|
import io
|
||||||
import os
|
import os
|
||||||
|
import struct
|
||||||
from models import StorageAdapter
|
from models import StorageAdapter
|
||||||
from telethon import TelegramClient
|
from telethon import TelegramClient
|
||||||
|
from telethon.crypto import AuthKey
|
||||||
from telethon.sessions import StringSession
|
from telethon.sessions import StringSession
|
||||||
|
from telethon.tl import types
|
||||||
import socks
|
import socks
|
||||||
|
|
||||||
|
_SESSION_LOCKS: Dict[str, asyncio.Lock] = {}
|
||||||
|
|
||||||
|
|
||||||
|
def _get_session_lock(session_string: str) -> asyncio.Lock:
|
||||||
|
lock = _SESSION_LOCKS.get(session_string)
|
||||||
|
if lock is None:
|
||||||
|
lock = asyncio.Lock()
|
||||||
|
_SESSION_LOCKS[session_string] = lock
|
||||||
|
return lock
|
||||||
|
|
||||||
# 适配器类型标识
|
# 适配器类型标识
|
||||||
ADAPTER_TYPE = "telegram"
|
ADAPTER_TYPE = "telegram"
|
||||||
|
|
||||||
@@ -54,9 +69,93 @@ class TelegramAdapter:
|
|||||||
if not all([self.api_id, self.api_hash, self.session_string, self.chat_id]):
|
if not all([self.api_id, self.api_hash, self.session_string, self.chat_id]):
|
||||||
raise ValueError("Telegram 适配器需要 api_id, api_hash, session_string 和 chat_id")
|
raise ValueError("Telegram 适配器需要 api_id, api_hash, session_string 和 chat_id")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _parse_legacy_session_string(value: str) -> StringSession:
|
||||||
|
"""
|
||||||
|
兼容旧版 session_string 格式:
|
||||||
|
- version(1B char) + base64(data)
|
||||||
|
- data: dc_id(1B) + ip_len(2B) + ip(ASCII, ip_len bytes) + port(2B) + auth_key(256B)
|
||||||
|
"""
|
||||||
|
s = (value or "").strip()
|
||||||
|
if not s:
|
||||||
|
raise ValueError("session_string 为空")
|
||||||
|
|
||||||
|
body = s[1:] if s.startswith("1") else s
|
||||||
|
raw = base64.urlsafe_b64decode(body)
|
||||||
|
if len(raw) < 1 + 2 + 2 + 256:
|
||||||
|
raise ValueError("legacy session 数据长度不足")
|
||||||
|
|
||||||
|
dc_id = raw[0]
|
||||||
|
ip_len = struct.unpack(">H", raw[1:3])[0]
|
||||||
|
expected_len = 1 + 2 + ip_len + 2 + 256
|
||||||
|
if len(raw) != expected_len:
|
||||||
|
raise ValueError("legacy session 数据长度不匹配")
|
||||||
|
|
||||||
|
ip_start = 3
|
||||||
|
ip_end = ip_start + ip_len
|
||||||
|
ip = raw[ip_start:ip_end].decode("utf-8")
|
||||||
|
port = struct.unpack(">H", raw[ip_end : ip_end + 2])[0]
|
||||||
|
key = raw[ip_end + 2 : ip_end + 2 + 256]
|
||||||
|
|
||||||
|
sess = StringSession()
|
||||||
|
sess.set_dc(dc_id, ip, port)
|
||||||
|
sess.auth_key = AuthKey(key)
|
||||||
|
return sess
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _pick_photo_thumb(thumbs: list | None):
|
||||||
|
if not thumbs:
|
||||||
|
return None
|
||||||
|
|
||||||
|
cached = []
|
||||||
|
others = []
|
||||||
|
for t in thumbs:
|
||||||
|
if isinstance(t, (types.PhotoCachedSize, types.PhotoStrippedSize)):
|
||||||
|
cached.append(t)
|
||||||
|
elif isinstance(t, (types.PhotoSize, types.PhotoSizeProgressive)):
|
||||||
|
if not isinstance(t, types.PhotoSizeEmpty):
|
||||||
|
others.append(t)
|
||||||
|
|
||||||
|
if cached:
|
||||||
|
cached.sort(key=lambda x: len(getattr(x, "bytes", b"") or b""))
|
||||||
|
return cached[-1]
|
||||||
|
|
||||||
|
if others:
|
||||||
|
def _sz(x):
|
||||||
|
if isinstance(x, types.PhotoSizeProgressive):
|
||||||
|
return max(x.sizes or [0])
|
||||||
|
return int(getattr(x, "size", 0) or 0)
|
||||||
|
|
||||||
|
others.sort(key=_sz)
|
||||||
|
return others[-1]
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _build_session(self) -> StringSession:
|
||||||
|
s = (self.session_string or "").strip()
|
||||||
|
if not s:
|
||||||
|
raise ValueError("Telegram 适配器 session_string 为空")
|
||||||
|
|
||||||
|
try:
|
||||||
|
return StringSession(s)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# 少数工具可能去掉了 version 前缀,这里做一次兼容
|
||||||
|
if not s.startswith("1"):
|
||||||
|
try:
|
||||||
|
return StringSession("1" + s)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
return self._parse_legacy_session_string(s)
|
||||||
|
except Exception as exc:
|
||||||
|
raise ValueError("Telegram session_string 无效,请使用 Telethon StringSession 重新生成") from exc
|
||||||
|
|
||||||
def _get_client(self) -> TelegramClient:
|
def _get_client(self) -> TelegramClient:
|
||||||
"""创建一个新的 TelegramClient 实例"""
|
"""创建一个新的 TelegramClient 实例"""
|
||||||
return TelegramClient(StringSession(self.session_string), self.api_id, self.api_hash, proxy=self.proxy)
|
return TelegramClient(self._build_session(), self.api_id, self.api_hash, proxy=self.proxy)
|
||||||
|
|
||||||
def get_effective_root(self, sub_path: str | None) -> str:
|
def get_effective_root(self, sub_path: str | None) -> str:
|
||||||
return ""
|
return ""
|
||||||
@@ -198,6 +297,41 @@ class TelegramAdapter:
|
|||||||
async def mkdir(self, root: str, rel: str):
|
async def mkdir(self, root: str, rel: str):
|
||||||
raise NotImplementedError("Telegram 适配器不支持创建目录。")
|
raise NotImplementedError("Telegram 适配器不支持创建目录。")
|
||||||
|
|
||||||
|
async def get_thumbnail(self, root: str, rel: str, size: str = "medium"):
|
||||||
|
try:
|
||||||
|
message_id_str, _ = rel.split('_', 1)
|
||||||
|
message_id = int(message_id_str)
|
||||||
|
except (ValueError, IndexError):
|
||||||
|
return None
|
||||||
|
|
||||||
|
client = self._get_client()
|
||||||
|
try:
|
||||||
|
await client.connect()
|
||||||
|
message = await client.get_messages(self.chat_id, ids=message_id)
|
||||||
|
if not message:
|
||||||
|
return None
|
||||||
|
|
||||||
|
doc = message.document or message.video
|
||||||
|
thumbs = None
|
||||||
|
if doc and getattr(doc, "thumbs", None):
|
||||||
|
thumbs = list(doc.thumbs or [])
|
||||||
|
elif message.photo and getattr(message.photo, "sizes", None):
|
||||||
|
thumbs = list(message.photo.sizes or [])
|
||||||
|
|
||||||
|
thumb = self._pick_photo_thumb(thumbs)
|
||||||
|
if not thumb:
|
||||||
|
return None
|
||||||
|
|
||||||
|
result = await client.download_media(message, bytes, thumb=thumb)
|
||||||
|
if isinstance(result, (bytes, bytearray)):
|
||||||
|
return bytes(result)
|
||||||
|
return None
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
finally:
|
||||||
|
if client.is_connected():
|
||||||
|
await client.disconnect()
|
||||||
|
|
||||||
async def delete(self, root: str, rel: str):
|
async def delete(self, root: str, rel: str):
|
||||||
"""删除一个文件 (即一条消息)"""
|
"""删除一个文件 (即一条消息)"""
|
||||||
try:
|
try:
|
||||||
@@ -236,6 +370,8 @@ class TelegramAdapter:
|
|||||||
raise HTTPException(status_code=400, detail=f"无效的文件路径格式: {rel}")
|
raise HTTPException(status_code=400, detail=f"无效的文件路径格式: {rel}")
|
||||||
|
|
||||||
client = self._get_client()
|
client = self._get_client()
|
||||||
|
lock = _get_session_lock(self.session_string)
|
||||||
|
await lock.acquire()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await client.connect()
|
await client.connect()
|
||||||
@@ -273,7 +409,6 @@ class TelegramAdapter:
|
|||||||
headers = {
|
headers = {
|
||||||
"Accept-Ranges": "bytes",
|
"Accept-Ranges": "bytes",
|
||||||
"Content-Type": mime_type,
|
"Content-Type": mime_type,
|
||||||
"Content-Length": str(file_size),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if range_header:
|
if range_header:
|
||||||
@@ -285,7 +420,6 @@ class TelegramAdapter:
|
|||||||
if start >= file_size or end >= file_size or start > end:
|
if start >= file_size or end >= file_size or start > end:
|
||||||
raise HTTPException(status_code=416, detail="Requested Range Not Satisfiable")
|
raise HTTPException(status_code=416, detail="Requested Range Not Satisfiable")
|
||||||
status = 206
|
status = 206
|
||||||
headers["Content-Length"] = str(end - start + 1)
|
|
||||||
headers["Content-Range"] = f"bytes {start}-{end}/{file_size}"
|
headers["Content-Range"] = f"bytes {start}-{end}/{file_size}"
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise HTTPException(status_code=400, detail="Invalid Range header")
|
raise HTTPException(status_code=400, detail="Invalid Range header")
|
||||||
@@ -304,18 +438,28 @@ class TelegramAdapter:
|
|||||||
if downloaded >= limit:
|
if downloaded >= limit:
|
||||||
break
|
break
|
||||||
finally:
|
finally:
|
||||||
if client.is_connected():
|
try:
|
||||||
await client.disconnect()
|
if client.is_connected():
|
||||||
|
await client.disconnect()
|
||||||
|
finally:
|
||||||
|
lock.release()
|
||||||
|
|
||||||
return StreamingResponse(iterator(), status_code=status, headers=headers)
|
return StreamingResponse(iterator(), status_code=status, headers=headers)
|
||||||
|
|
||||||
|
except HTTPException:
|
||||||
|
if client.is_connected():
|
||||||
|
await client.disconnect()
|
||||||
|
lock.release()
|
||||||
|
raise
|
||||||
except FileNotFoundError as e:
|
except FileNotFoundError as e:
|
||||||
if client.is_connected():
|
if client.is_connected():
|
||||||
await client.disconnect()
|
await client.disconnect()
|
||||||
|
lock.release()
|
||||||
raise HTTPException(status_code=404, detail=str(e))
|
raise HTTPException(status_code=404, detail=str(e))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if client.is_connected():
|
if client.is_connected():
|
||||||
await client.disconnect()
|
await client.disconnect()
|
||||||
|
lock.release()
|
||||||
raise HTTPException(status_code=500, detail=f"Streaming failed: {str(e)}")
|
raise HTTPException(status_code=500, detail=f"Streaming failed: {str(e)}")
|
||||||
|
|
||||||
async def stat_file(self, root: str, rel: str):
|
async def stat_file(self, root: str, rel: str):
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ from importlib import import_module
|
|||||||
from typing import Callable, Dict
|
from typing import Callable, Dict
|
||||||
|
|
||||||
from models import StorageAdapter
|
from models import StorageAdapter
|
||||||
from domain.adapters.providers.base import BaseAdapter
|
from .providers.base import BaseAdapter
|
||||||
|
|
||||||
AdapterFactory = Callable[[StorageAdapter], BaseAdapter]
|
AdapterFactory = Callable[[StorageAdapter], BaseAdapter]
|
||||||
|
|
||||||
@@ -21,7 +21,7 @@ def normalize_adapter_type(value: str | None) -> str | None:
|
|||||||
|
|
||||||
def discover_adapters():
|
def discover_adapters():
|
||||||
"""扫描 domain.adapters.providers 包, 自动注册适配器类型、工厂与配置 schema。"""
|
"""扫描 domain.adapters.providers 包, 自动注册适配器类型、工厂与配置 schema。"""
|
||||||
from domain.adapters import providers as adapters_pkg
|
from . import providers as adapters_pkg
|
||||||
|
|
||||||
TYPE_MAP.clear()
|
TYPE_MAP.clear()
|
||||||
CONFIG_SCHEMAS.clear()
|
CONFIG_SCHEMAS.clear()
|
||||||
|
|||||||
@@ -2,13 +2,13 @@ from typing import Optional
|
|||||||
|
|
||||||
from fastapi import HTTPException
|
from fastapi import HTTPException
|
||||||
|
|
||||||
from domain.adapters.registry import (
|
from domain.auth import User
|
||||||
|
from .registry import (
|
||||||
get_config_schemas,
|
get_config_schemas,
|
||||||
normalize_adapter_type,
|
normalize_adapter_type,
|
||||||
runtime_registry,
|
runtime_registry,
|
||||||
)
|
)
|
||||||
from domain.adapters.types import AdapterCreate, AdapterOut
|
from .types import AdapterCreate, AdapterOut
|
||||||
from domain.auth.types import User
|
|
||||||
from models import StorageAdapter
|
from models import StorageAdapter
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
9
domain/agent/__init__.py
Normal file
9
domain/agent/__init__.py
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
from .service import AgentService
|
||||||
|
from .types import AgentChatContext, AgentChatRequest, PendingToolCall
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"AgentService",
|
||||||
|
"AgentChatContext",
|
||||||
|
"AgentChatRequest",
|
||||||
|
"PendingToolCall",
|
||||||
|
]
|
||||||
38
domain/agent/api.py
Normal file
38
domain/agent/api.py
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
from typing import Annotated
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, Request
|
||||||
|
from fastapi.responses import StreamingResponse
|
||||||
|
|
||||||
|
from api.response import success
|
||||||
|
from domain.audit import AuditAction, audit
|
||||||
|
from domain.auth import User, get_current_active_user
|
||||||
|
from .service import AgentService
|
||||||
|
from .types import AgentChatRequest
|
||||||
|
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/agent", tags=["agent"])
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/chat")
|
||||||
|
@audit(action=AuditAction.CREATE, description="Agent 对话", body_fields=["auto_execute"])
|
||||||
|
async def chat(
|
||||||
|
request: Request,
|
||||||
|
payload: AgentChatRequest,
|
||||||
|
current_user: Annotated[User, Depends(get_current_active_user)],
|
||||||
|
):
|
||||||
|
data = await AgentService.chat(payload, current_user)
|
||||||
|
return success(data)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/chat/stream")
|
||||||
|
@audit(action=AuditAction.CREATE, description="Agent 对话(SSE)", body_fields=["auto_execute"])
|
||||||
|
async def chat_stream(
|
||||||
|
request: Request,
|
||||||
|
payload: AgentChatRequest,
|
||||||
|
current_user: Annotated[User, Depends(get_current_active_user)],
|
||||||
|
):
|
||||||
|
return StreamingResponse(
|
||||||
|
AgentService.chat_stream(payload, current_user),
|
||||||
|
media_type="text/event-stream",
|
||||||
|
headers={"Cache-Control": "no-cache"},
|
||||||
|
)
|
||||||
470
domain/agent/service.py
Normal file
470
domain/agent/service.py
Normal file
@@ -0,0 +1,470 @@
|
|||||||
|
import asyncio
|
||||||
|
import json
|
||||||
|
import uuid
|
||||||
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
from fastapi import HTTPException
|
||||||
|
|
||||||
|
from domain.ai import AIProviderService, MissingModelError, chat_completion, chat_completion_stream
|
||||||
|
from domain.auth import User
|
||||||
|
from .tools import get_tool, openai_tools, tool_result_to_content
|
||||||
|
from .types import AgentChatRequest, PendingToolCall
|
||||||
|
|
||||||
|
|
||||||
|
def _normalize_path(p: Optional[str]) -> Optional[str]:
|
||||||
|
if not p:
|
||||||
|
return None
|
||||||
|
s = str(p).strip()
|
||||||
|
if not s:
|
||||||
|
return None
|
||||||
|
s = s.replace("\\", "/")
|
||||||
|
if not s.startswith("/"):
|
||||||
|
s = "/" + s
|
||||||
|
s = s.rstrip("/") or "/"
|
||||||
|
return s
|
||||||
|
|
||||||
|
|
||||||
|
def _build_system_prompt(current_path: Optional[str]) -> str:
|
||||||
|
lines = [
|
||||||
|
"你是 Foxel 的 AI 助手。",
|
||||||
|
"你可以通过工具对文件/目录进行查询、读写、移动、复制、删除,以及运行处理器(processor)。",
|
||||||
|
"",
|
||||||
|
"可用工具:",
|
||||||
|
"- vfs_list_dir:浏览目录(列出 entries + pagination)。",
|
||||||
|
"- vfs_stat:查看文件/目录信息。",
|
||||||
|
"- vfs_read_text:读取文本文件内容(不支持二进制)。",
|
||||||
|
"- vfs_search:搜索文件(vector/filename)。",
|
||||||
|
"- vfs_write_text:写入文本文件内容(覆盖)。",
|
||||||
|
"- vfs_mkdir:创建目录。",
|
||||||
|
"- vfs_delete:删除文件或目录。",
|
||||||
|
"- vfs_move:移动路径。",
|
||||||
|
"- vfs_copy:复制路径。",
|
||||||
|
"- vfs_rename:重命名路径。",
|
||||||
|
"- processors_list:获取可用处理器列表(含 type/name/config_schema/produces_file/supports_directory)。",
|
||||||
|
"- processors_run:运行处理器处理文件或目录(会返回 task_id 或 task_ids)。",
|
||||||
|
"",
|
||||||
|
"规则:",
|
||||||
|
"1) 读操作(vfs_list_dir/vfs_stat/vfs_read_text/vfs_search)可直接调用工具。",
|
||||||
|
"2) 写/改/删操作(vfs_write_text/vfs_mkdir/vfs_delete/vfs_move/vfs_copy/vfs_rename/processors_run)默认需要用户确认;只有在开启自动执行时才应直接执行。",
|
||||||
|
"3) 用户未给出明确路径时先追问;若提供了“当前文件管理目录”,可以基于它把相对描述补全为绝对路径(以 / 开头)。",
|
||||||
|
"4) 修改文件内容:先读取(vfs_read_text)→给出改动点→确认后再写入(vfs_write_text)。",
|
||||||
|
"5) processors_run 返回任务 id 后,说明任务已提交,可在任务队列查看进度。",
|
||||||
|
"6) 回答保持简洁中文。",
|
||||||
|
]
|
||||||
|
if current_path:
|
||||||
|
lines.append("")
|
||||||
|
lines.append(f"当前文件管理目录:{current_path}")
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_tool_call_ids(message: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
tool_calls = message.get("tool_calls")
|
||||||
|
if not isinstance(tool_calls, list):
|
||||||
|
return message
|
||||||
|
|
||||||
|
changed = False
|
||||||
|
for idx, call in enumerate(tool_calls):
|
||||||
|
if not isinstance(call, dict):
|
||||||
|
continue
|
||||||
|
call_id = call.get("id")
|
||||||
|
if isinstance(call_id, str) and call_id.strip():
|
||||||
|
continue
|
||||||
|
call["id"] = f"call_{idx}"
|
||||||
|
changed = True
|
||||||
|
|
||||||
|
if changed:
|
||||||
|
message["tool_calls"] = tool_calls
|
||||||
|
return message
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_pending(tool_call: Dict[str, Any], requires_confirmation: bool) -> PendingToolCall:
|
||||||
|
call_id = str(tool_call.get("id") or "")
|
||||||
|
fn = tool_call.get("function") or {}
|
||||||
|
name = str((fn.get("name") if isinstance(fn, dict) else None) or "")
|
||||||
|
raw_args = fn.get("arguments") if isinstance(fn, dict) else None
|
||||||
|
arguments: Dict[str, Any] = {}
|
||||||
|
if isinstance(raw_args, str) and raw_args.strip():
|
||||||
|
try:
|
||||||
|
parsed = json.loads(raw_args)
|
||||||
|
if isinstance(parsed, dict):
|
||||||
|
arguments = parsed
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
arguments = {}
|
||||||
|
return PendingToolCall(
|
||||||
|
id=call_id,
|
||||||
|
name=name,
|
||||||
|
arguments=arguments,
|
||||||
|
requires_confirmation=requires_confirmation,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _find_last_assistant_tool_calls(messages: List[Dict[str, Any]]) -> Tuple[int, Dict[str, Any]]:
|
||||||
|
for idx in range(len(messages) - 1, -1, -1):
|
||||||
|
msg = messages[idx]
|
||||||
|
if not isinstance(msg, dict):
|
||||||
|
continue
|
||||||
|
if msg.get("role") != "assistant":
|
||||||
|
continue
|
||||||
|
tool_calls = msg.get("tool_calls")
|
||||||
|
if isinstance(tool_calls, list) and tool_calls:
|
||||||
|
return idx, msg
|
||||||
|
raise HTTPException(status_code=400, detail="没有可确认的待执行操作")
|
||||||
|
|
||||||
|
|
||||||
|
def _existing_tool_result_ids(messages: List[Dict[str, Any]]) -> set[str]:
|
||||||
|
ids: set[str] = set()
|
||||||
|
for msg in messages:
|
||||||
|
if not isinstance(msg, dict):
|
||||||
|
continue
|
||||||
|
if msg.get("role") != "tool":
|
||||||
|
continue
|
||||||
|
tool_call_id = msg.get("tool_call_id")
|
||||||
|
if isinstance(tool_call_id, str) and tool_call_id.strip():
|
||||||
|
ids.add(tool_call_id)
|
||||||
|
return ids
|
||||||
|
|
||||||
|
|
||||||
|
async def _choose_chat_ability() -> str:
|
||||||
|
tools_model = await AIProviderService.get_default_model("tools")
|
||||||
|
return "tools" if tools_model else "chat"
|
||||||
|
|
||||||
|
|
||||||
|
def _sse(event: str, data: Any) -> bytes:
|
||||||
|
payload = json.dumps(data, ensure_ascii=False, separators=(",", ":"))
|
||||||
|
return f"event: {event}\ndata: {payload}\n\n".encode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
|
def _format_exc(exc: BaseException) -> str:
|
||||||
|
text = str(exc)
|
||||||
|
return text if text else exc.__class__.__name__
|
||||||
|
|
||||||
|
|
||||||
|
class AgentService:
|
||||||
|
@classmethod
|
||||||
|
async def chat(cls, req: AgentChatRequest, user: Optional[User]) -> Dict[str, Any]:
|
||||||
|
history: List[Dict[str, Any]] = list(req.messages or [])
|
||||||
|
current_path = _normalize_path(req.context.current_path if req.context else None)
|
||||||
|
|
||||||
|
system_prompt = _build_system_prompt(current_path)
|
||||||
|
internal_messages: List[Dict[str, Any]] = [{"role": "system", "content": system_prompt}] + history
|
||||||
|
|
||||||
|
new_messages: List[Dict[str, Any]] = []
|
||||||
|
pending: List[PendingToolCall] = []
|
||||||
|
|
||||||
|
approved_ids = {i for i in (req.approved_tool_call_ids or []) if isinstance(i, str) and i.strip()}
|
||||||
|
rejected_ids = {i for i in (req.rejected_tool_call_ids or []) if isinstance(i, str) and i.strip()}
|
||||||
|
|
||||||
|
if approved_ids or rejected_ids:
|
||||||
|
_, last_call_msg = _find_last_assistant_tool_calls(internal_messages)
|
||||||
|
last_call_msg = _ensure_tool_call_ids(last_call_msg)
|
||||||
|
tool_calls = last_call_msg.get("tool_calls") or []
|
||||||
|
call_map: Dict[str, Dict[str, Any]] = {
|
||||||
|
str(c.get("id")): c
|
||||||
|
for c in tool_calls
|
||||||
|
if isinstance(c, dict) and isinstance(c.get("id"), str)
|
||||||
|
}
|
||||||
|
|
||||||
|
existing_ids = _existing_tool_result_ids(internal_messages)
|
||||||
|
for call_id in approved_ids | rejected_ids:
|
||||||
|
if call_id in existing_ids:
|
||||||
|
continue
|
||||||
|
tool_call = call_map.get(call_id)
|
||||||
|
if not tool_call:
|
||||||
|
continue
|
||||||
|
fn = tool_call.get("function") or {}
|
||||||
|
name = fn.get("name") if isinstance(fn, dict) else None
|
||||||
|
args_raw = fn.get("arguments") if isinstance(fn, dict) else None
|
||||||
|
args: Dict[str, Any] = {}
|
||||||
|
if isinstance(args_raw, str) and args_raw.strip():
|
||||||
|
try:
|
||||||
|
parsed = json.loads(args_raw)
|
||||||
|
if isinstance(parsed, dict):
|
||||||
|
args = parsed
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
args = {}
|
||||||
|
|
||||||
|
spec = get_tool(str(name or ""))
|
||||||
|
if call_id in rejected_ids:
|
||||||
|
content = tool_result_to_content({"canceled": True, "reason": "user_rejected"})
|
||||||
|
tool_msg = {"role": "tool", "tool_call_id": call_id, "content": content}
|
||||||
|
internal_messages.append(tool_msg)
|
||||||
|
new_messages.append(tool_msg)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not spec:
|
||||||
|
content = tool_result_to_content({"error": f"unknown_tool: {name}"})
|
||||||
|
tool_msg = {"role": "tool", "tool_call_id": call_id, "content": content}
|
||||||
|
internal_messages.append(tool_msg)
|
||||||
|
new_messages.append(tool_msg)
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = await spec.handler(args)
|
||||||
|
content = tool_result_to_content(result)
|
||||||
|
except Exception as exc: # noqa: BLE001
|
||||||
|
content = tool_result_to_content({"error": str(exc)})
|
||||||
|
tool_msg = {"role": "tool", "tool_call_id": call_id, "content": content}
|
||||||
|
internal_messages.append(tool_msg)
|
||||||
|
new_messages.append(tool_msg)
|
||||||
|
|
||||||
|
tools_schema = openai_tools()
|
||||||
|
ability = await _choose_chat_ability()
|
||||||
|
max_loops = 4
|
||||||
|
|
||||||
|
for _ in range(max_loops):
|
||||||
|
try:
|
||||||
|
assistant = await chat_completion(
|
||||||
|
internal_messages,
|
||||||
|
ability=ability,
|
||||||
|
tools=tools_schema,
|
||||||
|
tool_choice="auto",
|
||||||
|
timeout=60.0,
|
||||||
|
)
|
||||||
|
except MissingModelError as exc:
|
||||||
|
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
||||||
|
except httpx.HTTPStatusError as exc:
|
||||||
|
raise HTTPException(status_code=502, detail=f"对话请求失败: {exc}") from exc
|
||||||
|
except httpx.RequestError as exc:
|
||||||
|
raise HTTPException(status_code=502, detail=f"对话请求异常: {exc}") from exc
|
||||||
|
|
||||||
|
assistant = _ensure_tool_call_ids(assistant)
|
||||||
|
internal_messages.append(assistant)
|
||||||
|
new_messages.append(assistant)
|
||||||
|
|
||||||
|
tool_calls = assistant.get("tool_calls")
|
||||||
|
if not isinstance(tool_calls, list) or not tool_calls:
|
||||||
|
break
|
||||||
|
|
||||||
|
pending = []
|
||||||
|
for call in tool_calls:
|
||||||
|
if not isinstance(call, dict):
|
||||||
|
continue
|
||||||
|
call_id = str(call.get("id") or "")
|
||||||
|
fn = call.get("function") or {}
|
||||||
|
name = fn.get("name") if isinstance(fn, dict) else None
|
||||||
|
args_raw = fn.get("arguments") if isinstance(fn, dict) else None
|
||||||
|
args: Dict[str, Any] = {}
|
||||||
|
if isinstance(args_raw, str) and args_raw.strip():
|
||||||
|
try:
|
||||||
|
parsed = json.loads(args_raw)
|
||||||
|
if isinstance(parsed, dict):
|
||||||
|
args = parsed
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
args = {}
|
||||||
|
|
||||||
|
spec = get_tool(str(name or ""))
|
||||||
|
if not spec:
|
||||||
|
content = tool_result_to_content({"error": f"unknown_tool: {name}"})
|
||||||
|
tool_msg = {"role": "tool", "tool_call_id": call_id, "content": content}
|
||||||
|
internal_messages.append(tool_msg)
|
||||||
|
new_messages.append(tool_msg)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if spec.requires_confirmation and not req.auto_execute:
|
||||||
|
pending.append(_extract_pending(call, True))
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = await spec.handler(args)
|
||||||
|
content = tool_result_to_content(result)
|
||||||
|
except Exception as exc: # noqa: BLE001
|
||||||
|
content = tool_result_to_content({"error": str(exc)})
|
||||||
|
tool_msg = {"role": "tool", "tool_call_id": call_id, "content": content}
|
||||||
|
internal_messages.append(tool_msg)
|
||||||
|
new_messages.append(tool_msg)
|
||||||
|
|
||||||
|
if pending:
|
||||||
|
break
|
||||||
|
|
||||||
|
payload: Dict[str, Any] = {"messages": new_messages}
|
||||||
|
if pending:
|
||||||
|
payload["pending_tool_calls"] = [p.model_dump() for p in pending]
|
||||||
|
return payload
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def chat_stream(cls, req: AgentChatRequest, user: Optional[User]):
|
||||||
|
history: List[Dict[str, Any]] = list(req.messages or [])
|
||||||
|
current_path = _normalize_path(req.context.current_path if req.context else None)
|
||||||
|
|
||||||
|
system_prompt = _build_system_prompt(current_path)
|
||||||
|
internal_messages: List[Dict[str, Any]] = [{"role": "system", "content": system_prompt}] + history
|
||||||
|
|
||||||
|
new_messages: List[Dict[str, Any]] = []
|
||||||
|
pending: List[PendingToolCall] = []
|
||||||
|
|
||||||
|
approved_ids = {i for i in (req.approved_tool_call_ids or []) if isinstance(i, str) and i.strip()}
|
||||||
|
rejected_ids = {i for i in (req.rejected_tool_call_ids or []) if isinstance(i, str) and i.strip()}
|
||||||
|
|
||||||
|
try:
|
||||||
|
if approved_ids or rejected_ids:
|
||||||
|
_, last_call_msg = _find_last_assistant_tool_calls(internal_messages)
|
||||||
|
last_call_msg = _ensure_tool_call_ids(last_call_msg)
|
||||||
|
tool_calls = last_call_msg.get("tool_calls") or []
|
||||||
|
call_map: Dict[str, Dict[str, Any]] = {
|
||||||
|
str(c.get("id")): c
|
||||||
|
for c in tool_calls
|
||||||
|
if isinstance(c, dict) and isinstance(c.get("id"), str)
|
||||||
|
}
|
||||||
|
|
||||||
|
existing_ids = _existing_tool_result_ids(internal_messages)
|
||||||
|
for call_id in approved_ids | rejected_ids:
|
||||||
|
if call_id in existing_ids:
|
||||||
|
continue
|
||||||
|
tool_call = call_map.get(call_id)
|
||||||
|
if not tool_call:
|
||||||
|
continue
|
||||||
|
fn = tool_call.get("function") or {}
|
||||||
|
name = fn.get("name") if isinstance(fn, dict) else None
|
||||||
|
args_raw = fn.get("arguments") if isinstance(fn, dict) else None
|
||||||
|
args: Dict[str, Any] = {}
|
||||||
|
if isinstance(args_raw, str) and args_raw.strip():
|
||||||
|
try:
|
||||||
|
parsed = json.loads(args_raw)
|
||||||
|
if isinstance(parsed, dict):
|
||||||
|
args = parsed
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
args = {}
|
||||||
|
|
||||||
|
spec = get_tool(str(name or ""))
|
||||||
|
if call_id in rejected_ids:
|
||||||
|
content = tool_result_to_content({"canceled": True, "reason": "user_rejected"})
|
||||||
|
tool_msg = {"role": "tool", "tool_call_id": call_id, "content": content}
|
||||||
|
internal_messages.append(tool_msg)
|
||||||
|
new_messages.append(tool_msg)
|
||||||
|
yield _sse("tool_end", {"tool_call_id": call_id, "name": str(name or ""), "message": tool_msg})
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not spec:
|
||||||
|
content = tool_result_to_content({"error": f"unknown_tool: {name}"})
|
||||||
|
tool_msg = {"role": "tool", "tool_call_id": call_id, "content": content}
|
||||||
|
internal_messages.append(tool_msg)
|
||||||
|
new_messages.append(tool_msg)
|
||||||
|
yield _sse("tool_end", {"tool_call_id": call_id, "name": str(name or ""), "message": tool_msg})
|
||||||
|
continue
|
||||||
|
|
||||||
|
yield _sse("tool_start", {"tool_call_id": call_id, "name": spec.name})
|
||||||
|
try:
|
||||||
|
result = await spec.handler(args)
|
||||||
|
content = tool_result_to_content(result)
|
||||||
|
except Exception as exc: # noqa: BLE001
|
||||||
|
content = tool_result_to_content({"error": str(exc)})
|
||||||
|
tool_msg = {"role": "tool", "tool_call_id": call_id, "content": content}
|
||||||
|
internal_messages.append(tool_msg)
|
||||||
|
new_messages.append(tool_msg)
|
||||||
|
yield _sse("tool_end", {"tool_call_id": call_id, "name": spec.name, "message": tool_msg})
|
||||||
|
|
||||||
|
tools_schema = openai_tools()
|
||||||
|
ability = await _choose_chat_ability()
|
||||||
|
max_loops = 4
|
||||||
|
|
||||||
|
for _ in range(max_loops):
|
||||||
|
assistant_event_id = uuid.uuid4().hex
|
||||||
|
yield _sse("assistant_start", {"id": assistant_event_id})
|
||||||
|
|
||||||
|
assistant_message: Dict[str, Any] | None = None
|
||||||
|
try:
|
||||||
|
async for event in chat_completion_stream(
|
||||||
|
internal_messages,
|
||||||
|
ability=ability,
|
||||||
|
tools=tools_schema,
|
||||||
|
tool_choice="auto",
|
||||||
|
timeout=60.0,
|
||||||
|
):
|
||||||
|
if event.get("type") == "delta":
|
||||||
|
delta = event.get("delta")
|
||||||
|
if isinstance(delta, str) and delta:
|
||||||
|
yield _sse("assistant_delta", {"id": assistant_event_id, "delta": delta})
|
||||||
|
elif event.get("type") == "message":
|
||||||
|
msg = event.get("message")
|
||||||
|
if isinstance(msg, dict):
|
||||||
|
assistant_message = msg
|
||||||
|
except MissingModelError as exc:
|
||||||
|
raise HTTPException(status_code=400, detail=_format_exc(exc)) from exc
|
||||||
|
except httpx.HTTPStatusError as exc:
|
||||||
|
raise HTTPException(status_code=502, detail=f"对话请求失败: {_format_exc(exc)}") from exc
|
||||||
|
except httpx.RequestError as exc:
|
||||||
|
raise HTTPException(status_code=502, detail=f"对话请求异常: {_format_exc(exc)}") from exc
|
||||||
|
|
||||||
|
if not assistant_message:
|
||||||
|
assistant_message = {"role": "assistant", "content": ""}
|
||||||
|
|
||||||
|
assistant_message = _ensure_tool_call_ids(assistant_message)
|
||||||
|
internal_messages.append(assistant_message)
|
||||||
|
new_messages.append(assistant_message)
|
||||||
|
yield _sse("assistant_end", {"id": assistant_event_id, "message": assistant_message})
|
||||||
|
|
||||||
|
tool_calls = assistant_message.get("tool_calls")
|
||||||
|
if not isinstance(tool_calls, list) or not tool_calls:
|
||||||
|
break
|
||||||
|
|
||||||
|
pending = []
|
||||||
|
for call in tool_calls:
|
||||||
|
if not isinstance(call, dict):
|
||||||
|
continue
|
||||||
|
call_id = str(call.get("id") or "")
|
||||||
|
fn = call.get("function") or {}
|
||||||
|
name = fn.get("name") if isinstance(fn, dict) else None
|
||||||
|
args_raw = fn.get("arguments") if isinstance(fn, dict) else None
|
||||||
|
args: Dict[str, Any] = {}
|
||||||
|
if isinstance(args_raw, str) and args_raw.strip():
|
||||||
|
try:
|
||||||
|
parsed = json.loads(args_raw)
|
||||||
|
if isinstance(parsed, dict):
|
||||||
|
args = parsed
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
args = {}
|
||||||
|
|
||||||
|
spec = get_tool(str(name or ""))
|
||||||
|
if not spec:
|
||||||
|
content = tool_result_to_content({"error": f"unknown_tool: {name}"})
|
||||||
|
tool_msg = {"role": "tool", "tool_call_id": call_id, "content": content}
|
||||||
|
internal_messages.append(tool_msg)
|
||||||
|
new_messages.append(tool_msg)
|
||||||
|
yield _sse("tool_end", {"tool_call_id": call_id, "name": str(name or ""), "message": tool_msg})
|
||||||
|
continue
|
||||||
|
|
||||||
|
if spec.requires_confirmation and not req.auto_execute:
|
||||||
|
pending.append(_extract_pending(call, True))
|
||||||
|
continue
|
||||||
|
|
||||||
|
yield _sse("tool_start", {"tool_call_id": call_id, "name": spec.name})
|
||||||
|
try:
|
||||||
|
result = await spec.handler(args)
|
||||||
|
content = tool_result_to_content(result)
|
||||||
|
except Exception as exc: # noqa: BLE001
|
||||||
|
content = tool_result_to_content({"error": str(exc)})
|
||||||
|
tool_msg = {"role": "tool", "tool_call_id": call_id, "content": content}
|
||||||
|
internal_messages.append(tool_msg)
|
||||||
|
new_messages.append(tool_msg)
|
||||||
|
yield _sse("tool_end", {"tool_call_id": call_id, "name": spec.name, "message": tool_msg})
|
||||||
|
|
||||||
|
if pending:
|
||||||
|
yield _sse("pending", {"pending_tool_calls": [p.model_dump() for p in pending]})
|
||||||
|
break
|
||||||
|
|
||||||
|
payload: Dict[str, Any] = {"messages": new_messages}
|
||||||
|
if pending:
|
||||||
|
payload["pending_tool_calls"] = [p.model_dump() for p in pending]
|
||||||
|
yield _sse("done", payload)
|
||||||
|
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
return
|
||||||
|
except HTTPException as exc:
|
||||||
|
detail = exc.detail
|
||||||
|
content = detail if isinstance(detail, str) else str(detail)
|
||||||
|
if not content.strip():
|
||||||
|
content = f"请求失败({exc.status_code})"
|
||||||
|
new_messages.append({"role": "assistant", "content": content})
|
||||||
|
payload: Dict[str, Any] = {"messages": new_messages}
|
||||||
|
if pending:
|
||||||
|
payload["pending_tool_calls"] = [p.model_dump() for p in pending]
|
||||||
|
yield _sse("done", payload)
|
||||||
|
return
|
||||||
|
except Exception as exc: # noqa: BLE001
|
||||||
|
new_messages.append({"role": "assistant", "content": f"服务端异常: {_format_exc(exc)}"})
|
||||||
|
payload: Dict[str, Any] = {"messages": new_messages}
|
||||||
|
if pending:
|
||||||
|
payload["pending_tool_calls"] = [p.model_dump() for p in pending]
|
||||||
|
yield _sse("done", payload)
|
||||||
|
return
|
||||||
412
domain/agent/tools.py
Normal file
412
domain/agent/tools.py
Normal file
@@ -0,0 +1,412 @@
|
|||||||
|
import json
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Any, Awaitable, Callable, Dict, List, Optional
|
||||||
|
|
||||||
|
from domain.processors import ProcessDirectoryRequest, ProcessRequest, ProcessorService
|
||||||
|
from domain.virtual_fs import VirtualFSService
|
||||||
|
from domain.virtual_fs.search import VirtualFSSearchService
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class ToolSpec:
|
||||||
|
name: str
|
||||||
|
description: str
|
||||||
|
parameters: Dict[str, Any]
|
||||||
|
requires_confirmation: bool
|
||||||
|
handler: Callable[[Dict[str, Any]], Awaitable[Any]]
|
||||||
|
|
||||||
|
|
||||||
|
async def _processors_list(_: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
return {"processors": ProcessorService.list_processors()}
|
||||||
|
|
||||||
|
|
||||||
|
async def _processors_run(args: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
path = str(args.get("path") or "")
|
||||||
|
processor_type = str(args.get("processor_type") or "")
|
||||||
|
config = args.get("config")
|
||||||
|
if not isinstance(config, dict):
|
||||||
|
config = {}
|
||||||
|
|
||||||
|
save_to = args.get("save_to")
|
||||||
|
save_to = str(save_to) if isinstance(save_to, str) and save_to.strip() else None
|
||||||
|
|
||||||
|
max_depth = args.get("max_depth")
|
||||||
|
max_depth_value: Optional[int] = None
|
||||||
|
if max_depth is not None:
|
||||||
|
try:
|
||||||
|
max_depth_value = int(max_depth)
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
max_depth_value = None
|
||||||
|
|
||||||
|
suffix = args.get("suffix")
|
||||||
|
suffix_value = str(suffix) if isinstance(suffix, str) and suffix.strip() else None
|
||||||
|
|
||||||
|
overwrite_value = args.get("overwrite")
|
||||||
|
overwrite = bool(overwrite_value) if overwrite_value is not None else None
|
||||||
|
|
||||||
|
is_dir = await VirtualFSService.path_is_directory(path)
|
||||||
|
if is_dir and (max_depth_value is not None or suffix_value is not None):
|
||||||
|
req = ProcessDirectoryRequest(
|
||||||
|
path=path,
|
||||||
|
processor_type=processor_type,
|
||||||
|
config=config,
|
||||||
|
overwrite=True if overwrite is None else overwrite,
|
||||||
|
max_depth=max_depth_value,
|
||||||
|
suffix=suffix_value,
|
||||||
|
)
|
||||||
|
result = await ProcessorService.process_directory(req)
|
||||||
|
return {"mode": "directory", **result}
|
||||||
|
|
||||||
|
req = ProcessRequest(
|
||||||
|
path=path,
|
||||||
|
processor_type=processor_type,
|
||||||
|
config=config,
|
||||||
|
save_to=save_to,
|
||||||
|
overwrite=False if overwrite is None else overwrite,
|
||||||
|
)
|
||||||
|
result = await ProcessorService.process_file(req)
|
||||||
|
return {"mode": "file", **result}
|
||||||
|
|
||||||
|
|
||||||
|
def _normalize_vfs_path(value: Any) -> str:
|
||||||
|
s = str(value or "").strip().replace("\\", "/")
|
||||||
|
if not s:
|
||||||
|
return ""
|
||||||
|
if not s.startswith("/"):
|
||||||
|
s = "/" + s
|
||||||
|
s = s.rstrip("/") or "/"
|
||||||
|
return s
|
||||||
|
|
||||||
|
|
||||||
|
def _require_vfs_path(value: Any, field: str) -> str:
|
||||||
|
path = _normalize_vfs_path(value)
|
||||||
|
if not path:
|
||||||
|
raise ValueError(f"missing_{field}")
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
async def _vfs_list_dir(args: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
path = _normalize_vfs_path(args.get("path") or "/") or "/"
|
||||||
|
page = int(args.get("page") or 1)
|
||||||
|
page_size = int(args.get("page_size") or 50)
|
||||||
|
sort_by = str(args.get("sort_by") or "name")
|
||||||
|
sort_order = str(args.get("sort_order") or "asc")
|
||||||
|
return await VirtualFSService.list_directory(path, page, page_size, sort_by, sort_order)
|
||||||
|
|
||||||
|
|
||||||
|
async def _vfs_stat(args: Dict[str, Any]) -> Any:
|
||||||
|
path = _require_vfs_path(args.get("path"), "path")
|
||||||
|
return await VirtualFSService.stat(path)
|
||||||
|
|
||||||
|
|
||||||
|
async def _vfs_read_text(args: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
path = _require_vfs_path(args.get("path"), "path")
|
||||||
|
encoding = str(args.get("encoding") or "utf-8")
|
||||||
|
max_chars = int(args.get("max_chars") or 8000)
|
||||||
|
|
||||||
|
data = await VirtualFSService.read_file(path)
|
||||||
|
if isinstance(data, (bytes, bytearray)):
|
||||||
|
try:
|
||||||
|
text = bytes(data).decode(encoding)
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
return {"error": "binary_or_invalid_text", "path": path}
|
||||||
|
elif isinstance(data, str):
|
||||||
|
text = data
|
||||||
|
else:
|
||||||
|
text = str(data)
|
||||||
|
|
||||||
|
original_len = len(text)
|
||||||
|
truncated = original_len > max_chars
|
||||||
|
if truncated:
|
||||||
|
text = text[:max_chars]
|
||||||
|
return {
|
||||||
|
"path": path,
|
||||||
|
"encoding": encoding,
|
||||||
|
"content": text,
|
||||||
|
"truncated": truncated,
|
||||||
|
"length": original_len,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async def _vfs_write_text(args: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
path = _require_vfs_path(args.get("path"), "path")
|
||||||
|
if path == "/":
|
||||||
|
raise ValueError("invalid_path")
|
||||||
|
encoding = str(args.get("encoding") or "utf-8")
|
||||||
|
content = str(args.get("content") or "")
|
||||||
|
data = content.encode(encoding)
|
||||||
|
await VirtualFSService.write_file(path, data)
|
||||||
|
return {"written": True, "path": path, "encoding": encoding, "bytes": len(data)}
|
||||||
|
|
||||||
|
|
||||||
|
async def _vfs_mkdir(args: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
path = _require_vfs_path(args.get("path"), "path")
|
||||||
|
return await VirtualFSService.mkdir(path)
|
||||||
|
|
||||||
|
|
||||||
|
async def _vfs_delete(args: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
path = _require_vfs_path(args.get("path"), "path")
|
||||||
|
return await VirtualFSService.delete(path)
|
||||||
|
|
||||||
|
|
||||||
|
async def _vfs_move(args: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
src = _require_vfs_path(args.get("src"), "src")
|
||||||
|
dst = _require_vfs_path(args.get("dst"), "dst")
|
||||||
|
if src == "/" or dst == "/":
|
||||||
|
raise ValueError("invalid_path")
|
||||||
|
overwrite = bool(args.get("overwrite") or False)
|
||||||
|
return await VirtualFSService.move(src, dst, overwrite)
|
||||||
|
|
||||||
|
|
||||||
|
async def _vfs_copy(args: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
src = _require_vfs_path(args.get("src"), "src")
|
||||||
|
dst = _require_vfs_path(args.get("dst"), "dst")
|
||||||
|
if src == "/" or dst == "/":
|
||||||
|
raise ValueError("invalid_path")
|
||||||
|
overwrite = bool(args.get("overwrite") or False)
|
||||||
|
return await VirtualFSService.copy(src, dst, overwrite)
|
||||||
|
|
||||||
|
|
||||||
|
async def _vfs_rename(args: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
src = _require_vfs_path(args.get("src"), "src")
|
||||||
|
dst = _require_vfs_path(args.get("dst"), "dst")
|
||||||
|
if src == "/" or dst == "/":
|
||||||
|
raise ValueError("invalid_path")
|
||||||
|
overwrite = bool(args.get("overwrite") or False)
|
||||||
|
return await VirtualFSService.rename(src, dst, overwrite)
|
||||||
|
|
||||||
|
|
||||||
|
async def _vfs_search(args: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
q = str(args.get("q") or "").strip()
|
||||||
|
if not q:
|
||||||
|
raise ValueError("missing_q")
|
||||||
|
mode = str(args.get("mode") or "vector")
|
||||||
|
top_k = int(args.get("top_k") or 10)
|
||||||
|
page = int(args.get("page") or 1)
|
||||||
|
page_size = int(args.get("page_size") or 10)
|
||||||
|
return await VirtualFSSearchService.search(q, top_k, mode, page, page_size)
|
||||||
|
|
||||||
|
|
||||||
|
TOOLS: Dict[str, ToolSpec] = {
|
||||||
|
"processors_list": ToolSpec(
|
||||||
|
name="processors_list",
|
||||||
|
description="获取可用处理器列表(type/name/config_schema 等)。",
|
||||||
|
parameters={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {},
|
||||||
|
"additionalProperties": False,
|
||||||
|
},
|
||||||
|
requires_confirmation=False,
|
||||||
|
handler=_processors_list,
|
||||||
|
),
|
||||||
|
"processors_run": ToolSpec(
|
||||||
|
name="processors_run",
|
||||||
|
description=(
|
||||||
|
"运行处理器处理文件或目录。"
|
||||||
|
" 对目录可选 max_depth/suffix;对文件可选 overwrite/save_to。"
|
||||||
|
" 返回任务 id(去任务队列查看进度)。"
|
||||||
|
),
|
||||||
|
parameters={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"path": {"type": "string", "description": "文件或目录路径(绝对路径,如 /foo/bar)"},
|
||||||
|
"processor_type": {"type": "string", "description": "处理器类型(例如 image_watermark)"},
|
||||||
|
"config": {"type": "object", "description": "处理器配置,按 processors_list 返回的 config_schema 填写"},
|
||||||
|
"overwrite": {"type": "boolean", "description": "是否覆盖原文件/目录内文件"},
|
||||||
|
"save_to": {"type": "string", "description": "保存到指定路径(仅文件模式,且 overwrite=false 时使用)"},
|
||||||
|
"max_depth": {"type": "integer", "description": "目录遍历深度(仅目录模式)"},
|
||||||
|
"suffix": {"type": "string", "description": "目录批处理时的输出后缀(仅 produces_file 且 overwrite=false)"},
|
||||||
|
},
|
||||||
|
"required": ["path", "processor_type"],
|
||||||
|
},
|
||||||
|
requires_confirmation=True,
|
||||||
|
handler=_processors_run,
|
||||||
|
),
|
||||||
|
"vfs_list_dir": ToolSpec(
|
||||||
|
name="vfs_list_dir",
|
||||||
|
description="浏览目录(列出 entries + pagination)。",
|
||||||
|
parameters={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"path": {"type": "string", "description": "目录路径(绝对路径,如 /foo/bar)"},
|
||||||
|
"page": {"type": "integer", "description": "页码(从 1 开始)"},
|
||||||
|
"page_size": {"type": "integer", "description": "每页条数"},
|
||||||
|
"sort_by": {"type": "string", "description": "排序字段:name/size/mtime"},
|
||||||
|
"sort_order": {"type": "string", "description": "排序顺序:asc/desc"},
|
||||||
|
},
|
||||||
|
"required": ["path"],
|
||||||
|
"additionalProperties": False,
|
||||||
|
},
|
||||||
|
requires_confirmation=False,
|
||||||
|
handler=_vfs_list_dir,
|
||||||
|
),
|
||||||
|
"vfs_stat": ToolSpec(
|
||||||
|
name="vfs_stat",
|
||||||
|
description="查看文件/目录信息(size/mtime/is_dir/has_thumbnail/vector_index 等)。",
|
||||||
|
parameters={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"path": {"type": "string", "description": "路径(绝对路径,如 /foo/bar.txt)"},
|
||||||
|
},
|
||||||
|
"required": ["path"],
|
||||||
|
"additionalProperties": False,
|
||||||
|
},
|
||||||
|
requires_confirmation=False,
|
||||||
|
handler=_vfs_stat,
|
||||||
|
),
|
||||||
|
"vfs_read_text": ToolSpec(
|
||||||
|
name="vfs_read_text",
|
||||||
|
description="读取文本文件内容(解码失败视为二进制,返回 error)。",
|
||||||
|
parameters={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"path": {"type": "string", "description": "文件路径(绝对路径,如 /foo/bar.md)"},
|
||||||
|
"encoding": {"type": "string", "description": "文本编码(默认 utf-8)"},
|
||||||
|
"max_chars": {"type": "integer", "description": "最多返回的字符数(默认 8000)"},
|
||||||
|
},
|
||||||
|
"required": ["path"],
|
||||||
|
"additionalProperties": False,
|
||||||
|
},
|
||||||
|
requires_confirmation=False,
|
||||||
|
handler=_vfs_read_text,
|
||||||
|
),
|
||||||
|
"vfs_write_text": ToolSpec(
|
||||||
|
name="vfs_write_text",
|
||||||
|
description="写入文本文件内容(会覆盖目标文件)。",
|
||||||
|
parameters={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"path": {"type": "string", "description": "文件路径(绝对路径,如 /foo/bar.md)"},
|
||||||
|
"content": {"type": "string", "description": "要写入的文本内容"},
|
||||||
|
"encoding": {"type": "string", "description": "文本编码(默认 utf-8)"},
|
||||||
|
},
|
||||||
|
"required": ["path", "content"],
|
||||||
|
"additionalProperties": False,
|
||||||
|
},
|
||||||
|
requires_confirmation=True,
|
||||||
|
handler=_vfs_write_text,
|
||||||
|
),
|
||||||
|
"vfs_mkdir": ToolSpec(
|
||||||
|
name="vfs_mkdir",
|
||||||
|
description="创建目录。",
|
||||||
|
parameters={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"path": {"type": "string", "description": "目录路径(绝对路径,如 /foo/bar)"},
|
||||||
|
},
|
||||||
|
"required": ["path"],
|
||||||
|
"additionalProperties": False,
|
||||||
|
},
|
||||||
|
requires_confirmation=True,
|
||||||
|
handler=_vfs_mkdir,
|
||||||
|
),
|
||||||
|
"vfs_delete": ToolSpec(
|
||||||
|
name="vfs_delete",
|
||||||
|
description="删除文件或目录(由底层适配器决定是否递归)。",
|
||||||
|
parameters={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"path": {"type": "string", "description": "路径(绝对路径,如 /foo/bar 或 /foo/bar.txt)"},
|
||||||
|
},
|
||||||
|
"required": ["path"],
|
||||||
|
"additionalProperties": False,
|
||||||
|
},
|
||||||
|
requires_confirmation=True,
|
||||||
|
handler=_vfs_delete,
|
||||||
|
),
|
||||||
|
"vfs_move": ToolSpec(
|
||||||
|
name="vfs_move",
|
||||||
|
description="移动路径(可能进入任务队列)。",
|
||||||
|
parameters={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"src": {"type": "string", "description": "源路径(绝对路径)"},
|
||||||
|
"dst": {"type": "string", "description": "目标路径(绝对路径)"},
|
||||||
|
"overwrite": {"type": "boolean", "description": "是否允许覆盖已存在目标(默认 false)"},
|
||||||
|
},
|
||||||
|
"required": ["src", "dst"],
|
||||||
|
"additionalProperties": False,
|
||||||
|
},
|
||||||
|
requires_confirmation=True,
|
||||||
|
handler=_vfs_move,
|
||||||
|
),
|
||||||
|
"vfs_copy": ToolSpec(
|
||||||
|
name="vfs_copy",
|
||||||
|
description="复制路径(可能进入任务队列)。",
|
||||||
|
parameters={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"src": {"type": "string", "description": "源路径(绝对路径)"},
|
||||||
|
"dst": {"type": "string", "description": "目标路径(绝对路径)"},
|
||||||
|
"overwrite": {"type": "boolean", "description": "是否覆盖已存在目标(默认 false)"},
|
||||||
|
},
|
||||||
|
"required": ["src", "dst"],
|
||||||
|
"additionalProperties": False,
|
||||||
|
},
|
||||||
|
requires_confirmation=True,
|
||||||
|
handler=_vfs_copy,
|
||||||
|
),
|
||||||
|
"vfs_rename": ToolSpec(
|
||||||
|
name="vfs_rename",
|
||||||
|
description="重命名路径(本质是同目录 move)。",
|
||||||
|
parameters={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"src": {"type": "string", "description": "源路径(绝对路径)"},
|
||||||
|
"dst": {"type": "string", "description": "目标路径(绝对路径)"},
|
||||||
|
"overwrite": {"type": "boolean", "description": "是否允许覆盖已存在目标(默认 false)"},
|
||||||
|
},
|
||||||
|
"required": ["src", "dst"],
|
||||||
|
"additionalProperties": False,
|
||||||
|
},
|
||||||
|
requires_confirmation=True,
|
||||||
|
handler=_vfs_rename,
|
||||||
|
),
|
||||||
|
"vfs_search": ToolSpec(
|
||||||
|
name="vfs_search",
|
||||||
|
description="搜索文件(mode=vector 或 filename)。",
|
||||||
|
parameters={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"q": {"type": "string", "description": "搜索关键词"},
|
||||||
|
"mode": {"type": "string", "description": "搜索模式:vector/filename(默认 vector)"},
|
||||||
|
"top_k": {"type": "integer", "description": "返回数量(vector 模式使用,默认 10)"},
|
||||||
|
"page": {"type": "integer", "description": "页码(filename 模式使用,默认 1)"},
|
||||||
|
"page_size": {"type": "integer", "description": "分页大小(filename 模式使用,默认 10)"},
|
||||||
|
},
|
||||||
|
"required": ["q"],
|
||||||
|
"additionalProperties": False,
|
||||||
|
},
|
||||||
|
requires_confirmation=False,
|
||||||
|
handler=_vfs_search,
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_tool(name: str) -> Optional[ToolSpec]:
|
||||||
|
return TOOLS.get(name)
|
||||||
|
|
||||||
|
|
||||||
|
def openai_tools() -> List[Dict[str, Any]]:
|
||||||
|
out: List[Dict[str, Any]] = []
|
||||||
|
for spec in TOOLS.values():
|
||||||
|
out.append({
|
||||||
|
"type": "function",
|
||||||
|
"function": {
|
||||||
|
"name": spec.name,
|
||||||
|
"description": spec.description,
|
||||||
|
"parameters": spec.parameters,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
def tool_result_to_content(result: Any) -> str:
|
||||||
|
if result is None:
|
||||||
|
return ""
|
||||||
|
if isinstance(result, str):
|
||||||
|
return result
|
||||||
|
try:
|
||||||
|
return json.dumps(result, ensure_ascii=False)
|
||||||
|
except TypeError:
|
||||||
|
return json.dumps({"result": str(result)}, ensure_ascii=False)
|
||||||
23
domain/agent/types.py
Normal file
23
domain/agent/types.py
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
|
||||||
|
class AgentChatContext(BaseModel):
|
||||||
|
current_path: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class AgentChatRequest(BaseModel):
|
||||||
|
messages: List[Dict[str, Any]] = Field(default_factory=list)
|
||||||
|
auto_execute: bool = False
|
||||||
|
approved_tool_call_ids: List[str] = Field(default_factory=list)
|
||||||
|
rejected_tool_call_ids: List[str] = Field(default_factory=list)
|
||||||
|
context: Optional[AgentChatContext] = None
|
||||||
|
|
||||||
|
|
||||||
|
class PendingToolCall(BaseModel):
|
||||||
|
id: str
|
||||||
|
name: str
|
||||||
|
arguments: Dict[str, Any] = Field(default_factory=dict)
|
||||||
|
requires_confirmation: bool = True
|
||||||
|
|
||||||
@@ -1,28 +1,61 @@
|
|||||||
from .api import router_ai, router_vector_db
|
from .inference import (
|
||||||
|
MissingModelError,
|
||||||
|
chat_completion,
|
||||||
|
chat_completion_stream,
|
||||||
|
describe_image_base64,
|
||||||
|
get_text_embedding,
|
||||||
|
provider_service,
|
||||||
|
rerank_texts,
|
||||||
|
)
|
||||||
from .service import (
|
from .service import (
|
||||||
AIProviderService,
|
AIProviderService,
|
||||||
|
FILE_COLLECTION_NAME,
|
||||||
|
VECTOR_COLLECTION_NAME,
|
||||||
|
DEFAULT_VECTOR_DIMENSION,
|
||||||
VectorDBConfigManager,
|
VectorDBConfigManager,
|
||||||
VectorDBService,
|
VectorDBService,
|
||||||
DEFAULT_VECTOR_DIMENSION,
|
|
||||||
ABILITIES,
|
|
||||||
normalize_capabilities,
|
|
||||||
)
|
)
|
||||||
from .types import (
|
from .types import (
|
||||||
|
ABILITIES,
|
||||||
AIDefaultsUpdate,
|
AIDefaultsUpdate,
|
||||||
AIModelCreate,
|
AIModelCreate,
|
||||||
AIModelUpdate,
|
AIModelUpdate,
|
||||||
AIProviderCreate,
|
AIProviderCreate,
|
||||||
AIProviderUpdate,
|
AIProviderUpdate,
|
||||||
VectorDBConfigPayload,
|
VectorDBConfigPayload,
|
||||||
|
normalize_capabilities,
|
||||||
|
)
|
||||||
|
from .vector_providers import (
|
||||||
|
BaseVectorProvider,
|
||||||
|
MilvusLiteProvider,
|
||||||
|
MilvusServerProvider,
|
||||||
|
QdrantProvider,
|
||||||
|
get_provider_class,
|
||||||
|
get_provider_entry,
|
||||||
|
list_providers,
|
||||||
)
|
)
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"router_ai",
|
"MissingModelError",
|
||||||
"router_vector_db",
|
"chat_completion",
|
||||||
|
"chat_completion_stream",
|
||||||
|
"describe_image_base64",
|
||||||
|
"get_text_embedding",
|
||||||
|
"provider_service",
|
||||||
|
"rerank_texts",
|
||||||
"AIProviderService",
|
"AIProviderService",
|
||||||
"VectorDBService",
|
"VectorDBService",
|
||||||
"VectorDBConfigManager",
|
"VectorDBConfigManager",
|
||||||
"DEFAULT_VECTOR_DIMENSION",
|
"DEFAULT_VECTOR_DIMENSION",
|
||||||
|
"VECTOR_COLLECTION_NAME",
|
||||||
|
"FILE_COLLECTION_NAME",
|
||||||
|
"BaseVectorProvider",
|
||||||
|
"MilvusLiteProvider",
|
||||||
|
"MilvusServerProvider",
|
||||||
|
"QdrantProvider",
|
||||||
|
"list_providers",
|
||||||
|
"get_provider_entry",
|
||||||
|
"get_provider_class",
|
||||||
"ABILITIES",
|
"ABILITIES",
|
||||||
"normalize_capabilities",
|
"normalize_capabilities",
|
||||||
"AIDefaultsUpdate",
|
"AIDefaultsUpdate",
|
||||||
|
|||||||
@@ -5,8 +5,9 @@ from fastapi import APIRouter, Depends, HTTPException, Path, Request
|
|||||||
|
|
||||||
from api.response import success
|
from api.response import success
|
||||||
from domain.audit import AuditAction, audit
|
from domain.audit import AuditAction, audit
|
||||||
from domain.ai.service import AIProviderService, VectorDBConfigManager, VectorDBService
|
from domain.auth import User, get_current_active_user
|
||||||
from domain.ai.types import (
|
from .service import AIProviderService, VectorDBConfigManager, VectorDBService
|
||||||
|
from .types import (
|
||||||
AIDefaultsUpdate,
|
AIDefaultsUpdate,
|
||||||
AIModelCreate,
|
AIModelCreate,
|
||||||
AIModelUpdate,
|
AIModelUpdate,
|
||||||
@@ -14,9 +15,7 @@ from domain.ai.types import (
|
|||||||
AIProviderUpdate,
|
AIProviderUpdate,
|
||||||
VectorDBConfigPayload,
|
VectorDBConfigPayload,
|
||||||
)
|
)
|
||||||
from domain.ai.vector_providers import get_provider_class, get_provider_entry, list_providers
|
from .vector_providers import get_provider_class, get_provider_entry, list_providers
|
||||||
from domain.auth.service import get_current_active_user
|
|
||||||
from domain.auth.types import User
|
|
||||||
|
|
||||||
router_ai = APIRouter(prefix="/api/ai", tags=["ai"])
|
router_ai = APIRouter(prefix="/api/ai", tags=["ai"])
|
||||||
router_vector_db = APIRouter(prefix="/api/vector-db", tags=["vector-db"])
|
router_vector_db = APIRouter(prefix="/api/vector-db", tags=["vector-db"])
|
||||||
|
|||||||
@@ -1,8 +1,10 @@
|
|||||||
|
import json
|
||||||
|
|
||||||
import httpx
|
import httpx
|
||||||
from typing import List, Sequence, Tuple
|
from typing import Any, AsyncIterator, Dict, List, Sequence, Tuple
|
||||||
|
|
||||||
from models.database import AIModel, AIProvider
|
from models.database import AIModel, AIProvider
|
||||||
from domain.ai.service import AIProviderService
|
from .service import AIProviderService
|
||||||
|
|
||||||
|
|
||||||
provider_service = AIProviderService
|
provider_service = AIProviderService
|
||||||
@@ -243,3 +245,195 @@ async def _rerank_with_gemini(
|
|||||||
except (TypeError, ValueError):
|
except (TypeError, ValueError):
|
||||||
scores.append(0.0)
|
scores.append(0.0)
|
||||||
return scores
|
return scores
|
||||||
|
|
||||||
|
|
||||||
|
async def chat_completion(
|
||||||
|
messages: List[Dict[str, Any]],
|
||||||
|
*,
|
||||||
|
ability: str = "chat",
|
||||||
|
tools: List[Dict[str, Any]] | None = None,
|
||||||
|
tool_choice: Any | None = None,
|
||||||
|
temperature: float | None = None,
|
||||||
|
timeout: float = 60.0,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
model, provider = await _require_model(ability)
|
||||||
|
if provider.api_format != "openai":
|
||||||
|
raise MissingModelError("当前仅支持 OpenAI 兼容接口的对话模型。")
|
||||||
|
return await _chat_with_openai(
|
||||||
|
provider,
|
||||||
|
model,
|
||||||
|
messages,
|
||||||
|
tools=tools,
|
||||||
|
tool_choice=tool_choice,
|
||||||
|
temperature=temperature,
|
||||||
|
timeout=timeout,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def _chat_with_openai(
|
||||||
|
provider: AIProvider,
|
||||||
|
model: AIModel,
|
||||||
|
messages: List[Dict[str, Any]],
|
||||||
|
*,
|
||||||
|
tools: List[Dict[str, Any]] | None,
|
||||||
|
tool_choice: Any | None,
|
||||||
|
temperature: float | None,
|
||||||
|
timeout: float,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
url = _openai_endpoint(provider, "/chat/completions")
|
||||||
|
payload: Dict[str, Any] = {
|
||||||
|
"model": model.name,
|
||||||
|
"messages": messages,
|
||||||
|
}
|
||||||
|
if tools:
|
||||||
|
payload["tools"] = tools
|
||||||
|
payload["tool_choice"] = tool_choice or "auto"
|
||||||
|
if temperature is not None:
|
||||||
|
payload["temperature"] = float(temperature)
|
||||||
|
|
||||||
|
async with httpx.AsyncClient(timeout=timeout) as client:
|
||||||
|
response = await client.post(url, headers=_openai_headers(provider), json=payload)
|
||||||
|
response.raise_for_status()
|
||||||
|
body = response.json()
|
||||||
|
|
||||||
|
choices = body.get("choices") or []
|
||||||
|
if not choices:
|
||||||
|
raise RuntimeError("对话接口返回为空")
|
||||||
|
message = choices[0].get("message")
|
||||||
|
if not isinstance(message, dict):
|
||||||
|
raise RuntimeError("对话接口返回格式异常")
|
||||||
|
return message
|
||||||
|
|
||||||
|
|
||||||
|
async def chat_completion_stream(
|
||||||
|
messages: List[Dict[str, Any]],
|
||||||
|
*,
|
||||||
|
ability: str = "chat",
|
||||||
|
tools: List[Dict[str, Any]] | None = None,
|
||||||
|
tool_choice: Any | None = None,
|
||||||
|
temperature: float | None = None,
|
||||||
|
timeout: float = 60.0,
|
||||||
|
) -> AsyncIterator[Dict[str, Any]]:
|
||||||
|
model, provider = await _require_model(ability)
|
||||||
|
if provider.api_format != "openai":
|
||||||
|
raise MissingModelError("当前仅支持 OpenAI 兼容接口的对话模型。")
|
||||||
|
async for event in _chat_stream_with_openai(
|
||||||
|
provider,
|
||||||
|
model,
|
||||||
|
messages,
|
||||||
|
tools=tools,
|
||||||
|
tool_choice=tool_choice,
|
||||||
|
temperature=temperature,
|
||||||
|
timeout=timeout,
|
||||||
|
):
|
||||||
|
yield event
|
||||||
|
|
||||||
|
|
||||||
|
async def _chat_stream_with_openai(
|
||||||
|
provider: AIProvider,
|
||||||
|
model: AIModel,
|
||||||
|
messages: List[Dict[str, Any]],
|
||||||
|
*,
|
||||||
|
tools: List[Dict[str, Any]] | None,
|
||||||
|
tool_choice: Any | None,
|
||||||
|
temperature: float | None,
|
||||||
|
timeout: float,
|
||||||
|
) -> AsyncIterator[Dict[str, Any]]:
|
||||||
|
url = _openai_endpoint(provider, "/chat/completions")
|
||||||
|
payload: Dict[str, Any] = {
|
||||||
|
"model": model.name,
|
||||||
|
"messages": messages,
|
||||||
|
"stream": True,
|
||||||
|
}
|
||||||
|
if tools:
|
||||||
|
payload["tools"] = tools
|
||||||
|
payload["tool_choice"] = tool_choice or "auto"
|
||||||
|
if temperature is not None:
|
||||||
|
payload["temperature"] = float(temperature)
|
||||||
|
|
||||||
|
content_parts: List[str] = []
|
||||||
|
tool_call_map: Dict[int, Dict[str, Any]] = {}
|
||||||
|
role = "assistant"
|
||||||
|
finish_reason: str | None = None
|
||||||
|
|
||||||
|
async with httpx.AsyncClient(timeout=timeout) as client:
|
||||||
|
async with client.stream("POST", url, headers=_openai_headers(provider), json=payload) as response:
|
||||||
|
response.raise_for_status()
|
||||||
|
async for line in response.aiter_lines():
|
||||||
|
if not line:
|
||||||
|
continue
|
||||||
|
if not line.startswith("data:"):
|
||||||
|
continue
|
||||||
|
data = line[5:].strip()
|
||||||
|
if not data:
|
||||||
|
continue
|
||||||
|
if data == "[DONE]":
|
||||||
|
break
|
||||||
|
try:
|
||||||
|
chunk = json.loads(data)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
choices = chunk.get("choices") or []
|
||||||
|
if not choices:
|
||||||
|
continue
|
||||||
|
choice = choices[0] if isinstance(choices[0], dict) else {}
|
||||||
|
delta = choice.get("delta") if isinstance(choice, dict) else None
|
||||||
|
delta = delta if isinstance(delta, dict) else {}
|
||||||
|
|
||||||
|
if isinstance(delta.get("role"), str):
|
||||||
|
role = delta["role"]
|
||||||
|
|
||||||
|
delta_content = delta.get("content")
|
||||||
|
if isinstance(delta_content, str) and delta_content:
|
||||||
|
content_parts.append(delta_content)
|
||||||
|
yield {"type": "delta", "delta": delta_content}
|
||||||
|
|
||||||
|
delta_tool_calls = delta.get("tool_calls")
|
||||||
|
if isinstance(delta_tool_calls, list):
|
||||||
|
for item in delta_tool_calls:
|
||||||
|
if not isinstance(item, dict):
|
||||||
|
continue
|
||||||
|
idx = item.get("index")
|
||||||
|
if not isinstance(idx, int):
|
||||||
|
continue
|
||||||
|
entry = tool_call_map.setdefault(
|
||||||
|
idx,
|
||||||
|
{"id": None, "type": None, "function": {"name": None, "arguments": ""}},
|
||||||
|
)
|
||||||
|
if isinstance(item.get("id"), str) and item["id"].strip():
|
||||||
|
entry["id"] = item["id"]
|
||||||
|
if isinstance(item.get("type"), str) and item["type"].strip():
|
||||||
|
entry["type"] = item["type"]
|
||||||
|
fn = item.get("function")
|
||||||
|
if isinstance(fn, dict):
|
||||||
|
if isinstance(fn.get("name"), str) and fn["name"].strip():
|
||||||
|
entry["function"]["name"] = fn["name"]
|
||||||
|
args_part = fn.get("arguments")
|
||||||
|
if isinstance(args_part, str) and args_part:
|
||||||
|
entry["function"]["arguments"] += args_part
|
||||||
|
|
||||||
|
fr = choice.get("finish_reason") if isinstance(choice, dict) else None
|
||||||
|
if isinstance(fr, str) and fr:
|
||||||
|
finish_reason = fr
|
||||||
|
|
||||||
|
content = "".join(content_parts)
|
||||||
|
message: Dict[str, Any] = {"role": role, "content": content}
|
||||||
|
if tool_call_map:
|
||||||
|
tool_calls: List[Dict[str, Any]] = []
|
||||||
|
for idx in sorted(tool_call_map.keys()):
|
||||||
|
item = tool_call_map[idx]
|
||||||
|
fn = item.get("function") if isinstance(item.get("function"), dict) else {}
|
||||||
|
call_id = item.get("id") if isinstance(item.get("id"), str) and item.get("id") else f"call_{idx}"
|
||||||
|
call_type = item.get("type") if isinstance(item.get("type"), str) and item.get("type") else "function"
|
||||||
|
tool_calls.append({
|
||||||
|
"id": call_id,
|
||||||
|
"type": call_type,
|
||||||
|
"function": {
|
||||||
|
"name": fn.get("name") or "",
|
||||||
|
"arguments": fn.get("arguments") or "",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
message["tool_calls"] = tool_calls
|
||||||
|
|
||||||
|
yield {"type": "message", "message": message, "finish_reason": finish_reason}
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ import httpx
|
|||||||
from tortoise.exceptions import DoesNotExist
|
from tortoise.exceptions import DoesNotExist
|
||||||
from tortoise.transactions import in_transaction
|
from tortoise.transactions import in_transaction
|
||||||
|
|
||||||
from domain.config.service import ConfigService
|
from domain.config import ConfigService
|
||||||
from models.database import AIDefaultModel, AIModel, AIProvider
|
from models.database import AIDefaultModel, AIModel, AIProvider
|
||||||
|
|
||||||
from .types import ABILITIES, normalize_capabilities
|
from .types import ABILITIES, normalize_capabilities
|
||||||
@@ -140,7 +140,7 @@ def serialize_provider(provider: AIProvider) -> Dict[str, Any]:
|
|||||||
"provider_type": provider.provider_type,
|
"provider_type": provider.provider_type,
|
||||||
"api_format": provider.api_format,
|
"api_format": provider.api_format,
|
||||||
"base_url": provider.base_url,
|
"base_url": provider.base_url,
|
||||||
"api_key": provider.api_key,
|
"has_api_key": bool(provider.api_key),
|
||||||
"logo_url": provider.logo_url,
|
"logo_url": provider.logo_url,
|
||||||
"extra_config": provider.extra_config or {},
|
"extra_config": provider.extra_config or {},
|
||||||
"created_at": provider.created_at,
|
"created_at": provider.created_at,
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
from domain.audit.decorator import audit
|
from .decorator import audit
|
||||||
from domain.audit.types import AuditAction
|
from .types import AuditAction
|
||||||
from domain.audit.api import router
|
|
||||||
|
|
||||||
__all__ = ["audit", "AuditAction", "router"]
|
__all__ = ["audit", "AuditAction"]
|
||||||
|
|||||||
@@ -4,10 +4,9 @@ from typing import Annotated, Optional
|
|||||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||||
|
|
||||||
from api import response
|
from api import response
|
||||||
from domain.audit.service import AuditService
|
from domain.auth import User, get_current_active_user
|
||||||
from domain.audit.types import AuditAction
|
from .service import AuditService
|
||||||
from domain.auth.service import get_current_active_user
|
from .types import AuditAction
|
||||||
from domain.auth.types import User
|
|
||||||
|
|
||||||
CurrentUser = Annotated[User, Depends(get_current_active_user)]
|
CurrentUser = Annotated[User, Depends(get_current_active_user)]
|
||||||
|
|
||||||
|
|||||||
@@ -7,11 +7,11 @@ import jwt
|
|||||||
from fastapi import Request
|
from fastapi import Request
|
||||||
from jwt.exceptions import InvalidTokenError
|
from jwt.exceptions import InvalidTokenError
|
||||||
|
|
||||||
from domain.audit.service import AuditService
|
from domain.auth import ALGORITHM
|
||||||
from domain.audit.types import AuditAction
|
from domain.config import ConfigService
|
||||||
from domain.auth.service import ALGORITHM
|
|
||||||
from domain.config.service import ConfigService
|
|
||||||
from models.database import UserAccount
|
from models.database import UserAccount
|
||||||
|
from .service import AuditService
|
||||||
|
from .types import AuditAction
|
||||||
|
|
||||||
|
|
||||||
def _extract_request(bound_args: Mapping[str, Any]) -> Request | None:
|
def _extract_request(bound_args: Mapping[str, Any]) -> Request | None:
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ from typing import Any, Dict, Optional
|
|||||||
|
|
||||||
from models.database import AuditLog
|
from models.database import AuditLog
|
||||||
|
|
||||||
from domain.audit.types import AuditAction
|
from .types import AuditAction
|
||||||
|
|
||||||
|
|
||||||
class AuditService:
|
class AuditService:
|
||||||
|
|||||||
49
domain/auth/__init__.py
Normal file
49
domain/auth/__init__.py
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
from .service import (
|
||||||
|
ALGORITHM,
|
||||||
|
AuthService,
|
||||||
|
authenticate_user_db,
|
||||||
|
create_access_token,
|
||||||
|
get_current_active_user,
|
||||||
|
get_current_user,
|
||||||
|
get_password_hash,
|
||||||
|
has_users,
|
||||||
|
register_user,
|
||||||
|
request_password_reset,
|
||||||
|
reset_password_with_token,
|
||||||
|
verify_password,
|
||||||
|
verify_password_reset_token,
|
||||||
|
)
|
||||||
|
from .types import (
|
||||||
|
PasswordResetConfirm,
|
||||||
|
PasswordResetRequest,
|
||||||
|
RegisterRequest,
|
||||||
|
Token,
|
||||||
|
TokenData,
|
||||||
|
UpdateMeRequest,
|
||||||
|
User,
|
||||||
|
UserInDB,
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"ALGORITHM",
|
||||||
|
"AuthService",
|
||||||
|
"authenticate_user_db",
|
||||||
|
"create_access_token",
|
||||||
|
"get_current_active_user",
|
||||||
|
"get_current_user",
|
||||||
|
"get_password_hash",
|
||||||
|
"has_users",
|
||||||
|
"register_user",
|
||||||
|
"request_password_reset",
|
||||||
|
"reset_password_with_token",
|
||||||
|
"verify_password",
|
||||||
|
"verify_password_reset_token",
|
||||||
|
"PasswordResetConfirm",
|
||||||
|
"PasswordResetRequest",
|
||||||
|
"RegisterRequest",
|
||||||
|
"Token",
|
||||||
|
"TokenData",
|
||||||
|
"UpdateMeRequest",
|
||||||
|
"User",
|
||||||
|
"UserInDB",
|
||||||
|
]
|
||||||
@@ -5,8 +5,8 @@ from fastapi.security import OAuth2PasswordRequestForm
|
|||||||
|
|
||||||
from api.response import success
|
from api.response import success
|
||||||
from domain.audit import AuditAction, audit
|
from domain.audit import AuditAction, audit
|
||||||
from domain.auth.service import AuthService, get_current_active_user
|
from .service import AuthService, get_current_active_user
|
||||||
from domain.auth.types import (
|
from .types import (
|
||||||
PasswordResetConfirm,
|
PasswordResetConfirm,
|
||||||
PasswordResetRequest,
|
PasswordResetRequest,
|
||||||
RegisterRequest,
|
RegisterRequest,
|
||||||
|
|||||||
@@ -11,7 +11,9 @@ from fastapi import Depends, HTTPException, status
|
|||||||
from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm
|
from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm
|
||||||
from jwt.exceptions import InvalidTokenError
|
from jwt.exceptions import InvalidTokenError
|
||||||
|
|
||||||
from domain.auth.types import (
|
from domain.config import ConfigService
|
||||||
|
from models.database import UserAccount
|
||||||
|
from .types import (
|
||||||
PasswordResetConfirm,
|
PasswordResetConfirm,
|
||||||
PasswordResetRequest,
|
PasswordResetRequest,
|
||||||
RegisterRequest,
|
RegisterRequest,
|
||||||
@@ -21,8 +23,6 @@ from domain.auth.types import (
|
|||||||
User,
|
User,
|
||||||
UserInDB,
|
UserInDB,
|
||||||
)
|
)
|
||||||
from models.database import UserAccount
|
|
||||||
from domain.config.service import ConfigService
|
|
||||||
|
|
||||||
ALGORITHM = "HS256"
|
ALGORITHM = "HS256"
|
||||||
ACCESS_TOKEN_EXPIRE_MINUTES = 60 * 24 * 365
|
ACCESS_TOKEN_EXPIRE_MINUTES = 60 * 24 * 365
|
||||||
@@ -324,7 +324,7 @@ class AuthService:
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def _send_password_reset_email(cls, user: UserAccount, token: str) -> None:
|
async def _send_password_reset_email(cls, user: UserAccount, token: str) -> None:
|
||||||
from domain.email.service import EmailService
|
from domain.email import EmailService
|
||||||
|
|
||||||
app_domain = await ConfigService.get("APP_DOMAIN", None)
|
app_domain = await ConfigService.get("APP_DOMAIN", None)
|
||||||
base_url = (app_domain or "http://localhost:5173").rstrip("/")
|
base_url = (app_domain or "http://localhost:5173").rstrip("/")
|
||||||
|
|||||||
@@ -1 +1,7 @@
|
|||||||
|
from .service import BackupService
|
||||||
|
from .types import BackupData
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"BackupService",
|
||||||
|
"BackupData",
|
||||||
|
]
|
||||||
|
|||||||
@@ -4,8 +4,8 @@ from fastapi import APIRouter, Depends, File, Request, UploadFile
|
|||||||
from fastapi.responses import JSONResponse
|
from fastapi.responses import JSONResponse
|
||||||
|
|
||||||
from domain.audit import AuditAction, audit
|
from domain.audit import AuditAction, audit
|
||||||
from domain.auth.service import get_current_active_user
|
from domain.auth import get_current_active_user
|
||||||
from domain.backup.service import BackupService
|
from .service import BackupService
|
||||||
|
|
||||||
router = APIRouter(
|
router = APIRouter(
|
||||||
prefix="/api/backup",
|
prefix="/api/backup",
|
||||||
|
|||||||
@@ -4,8 +4,8 @@ from datetime import datetime
|
|||||||
from fastapi import HTTPException
|
from fastapi import HTTPException
|
||||||
from tortoise.transactions import in_transaction
|
from tortoise.transactions import in_transaction
|
||||||
|
|
||||||
from domain.backup.types import BackupData
|
from domain.config import VERSION
|
||||||
from domain.config.service import VERSION
|
from .types import BackupData
|
||||||
from models.database import (
|
from models.database import (
|
||||||
AIDefaultModel,
|
AIDefaultModel,
|
||||||
AIModel,
|
AIModel,
|
||||||
|
|||||||
10
domain/config/__init__.py
Normal file
10
domain/config/__init__.py
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
from .service import ConfigService, VERSION
|
||||||
|
from .types import ConfigItem, LatestVersionInfo, SystemStatus
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"ConfigService",
|
||||||
|
"VERSION",
|
||||||
|
"ConfigItem",
|
||||||
|
"LatestVersionInfo",
|
||||||
|
"SystemStatus",
|
||||||
|
]
|
||||||
@@ -4,10 +4,9 @@ from fastapi import APIRouter, Depends, Form, Request
|
|||||||
|
|
||||||
from api.response import success
|
from api.response import success
|
||||||
from domain.audit import AuditAction, audit
|
from domain.audit import AuditAction, audit
|
||||||
from domain.auth.service import get_current_active_user
|
from domain.auth import User, get_current_active_user
|
||||||
from domain.auth.types import User
|
from .service import ConfigService
|
||||||
from domain.config.service import ConfigService
|
from .types import ConfigItem
|
||||||
from domain.config.types import ConfigItem
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/api/config", tags=["config"])
|
router = APIRouter(prefix="/api/config", tags=["config"])
|
||||||
|
|
||||||
|
|||||||
@@ -5,12 +5,12 @@ from typing import Any, Dict, Optional
|
|||||||
import httpx
|
import httpx
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
from domain.config.types import LatestVersionInfo, SystemStatus
|
from .types import LatestVersionInfo, SystemStatus
|
||||||
from models.database import Configuration, UserAccount
|
from models.database import Configuration, UserAccount
|
||||||
|
|
||||||
load_dotenv(dotenv_path=".env")
|
load_dotenv(dotenv_path=".env")
|
||||||
|
|
||||||
VERSION = "v1.6.0"
|
VERSION = "v1.7.0"
|
||||||
|
|
||||||
|
|
||||||
class ConfigService:
|
class ConfigService:
|
||||||
|
|||||||
20
domain/email/__init__.py
Normal file
20
domain/email/__init__.py
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
from .service import EmailService, EmailTemplateRenderer
|
||||||
|
from .types import (
|
||||||
|
EmailConfig,
|
||||||
|
EmailSecurity,
|
||||||
|
EmailSendPayload,
|
||||||
|
EmailTemplatePreviewPayload,
|
||||||
|
EmailTemplateUpdate,
|
||||||
|
EmailTestRequest,
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"EmailService",
|
||||||
|
"EmailTemplateRenderer",
|
||||||
|
"EmailConfig",
|
||||||
|
"EmailSecurity",
|
||||||
|
"EmailSendPayload",
|
||||||
|
"EmailTemplatePreviewPayload",
|
||||||
|
"EmailTemplateUpdate",
|
||||||
|
"EmailTestRequest",
|
||||||
|
]
|
||||||
@@ -2,10 +2,9 @@ from fastapi import APIRouter, Depends, HTTPException, Request
|
|||||||
|
|
||||||
from api.response import success
|
from api.response import success
|
||||||
from domain.audit import AuditAction, audit
|
from domain.audit import AuditAction, audit
|
||||||
from domain.auth.service import get_current_active_user
|
from domain.auth import User, get_current_active_user
|
||||||
from domain.auth.types import User
|
from .service import EmailService, EmailTemplateRenderer
|
||||||
from domain.email.service import EmailService, EmailTemplateRenderer
|
from .types import (
|
||||||
from domain.email.types import (
|
|
||||||
EmailTemplatePreviewPayload,
|
EmailTemplatePreviewPayload,
|
||||||
EmailTemplateUpdate,
|
EmailTemplateUpdate,
|
||||||
EmailTestRequest,
|
EmailTestRequest,
|
||||||
|
|||||||
@@ -7,8 +7,8 @@ from pathlib import Path
|
|||||||
from string import Template
|
from string import Template
|
||||||
from typing import Any, Dict, List, Optional
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
from domain.config.service import ConfigService
|
from domain.config import ConfigService
|
||||||
from domain.email.types import EmailConfig, EmailSecurity, EmailSendPayload
|
from .types import EmailConfig, EmailSecurity, EmailSendPayload
|
||||||
|
|
||||||
|
|
||||||
class EmailTemplateRenderer:
|
class EmailTemplateRenderer:
|
||||||
@@ -104,7 +104,7 @@ class EmailService:
|
|||||||
template: str,
|
template: str,
|
||||||
context: Optional[Dict[str, Any]] = None,
|
context: Optional[Dict[str, Any]] = None,
|
||||||
):
|
):
|
||||||
from domain.tasks.task_queue import TaskProgress, task_queue_service
|
from domain.tasks import TaskProgress, task_queue_service
|
||||||
|
|
||||||
payload = EmailSendPayload(
|
payload = EmailSendPayload(
|
||||||
recipients=recipients,
|
recipients=recipients,
|
||||||
@@ -126,7 +126,7 @@ class EmailService:
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def send_from_task(cls, task_id: str, data: Dict[str, Any]):
|
async def send_from_task(cls, task_id: str, data: Dict[str, Any]):
|
||||||
from domain.tasks.task_queue import TaskProgress, task_queue_service
|
from domain.tasks import TaskProgress, task_queue_service
|
||||||
|
|
||||||
payload = EmailSendPayload(**data)
|
payload = EmailSendPayload(**data)
|
||||||
|
|
||||||
|
|||||||
7
domain/offline_downloads/__init__.py
Normal file
7
domain/offline_downloads/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
from .service import OfflineDownloadService
|
||||||
|
from .types import OfflineDownloadCreate
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"OfflineDownloadService",
|
||||||
|
"OfflineDownloadCreate",
|
||||||
|
]
|
||||||
@@ -4,10 +4,9 @@ from fastapi import APIRouter, Depends, Request
|
|||||||
|
|
||||||
from api.response import success
|
from api.response import success
|
||||||
from domain.audit import AuditAction, audit
|
from domain.audit import AuditAction, audit
|
||||||
from domain.auth.service import get_current_active_user
|
from domain.auth import User, get_current_active_user
|
||||||
from domain.auth.types import User
|
from .service import OfflineDownloadService
|
||||||
from domain.offline_downloads.service import OfflineDownloadService
|
from .types import OfflineDownloadCreate
|
||||||
from domain.offline_downloads.types import OfflineDownloadCreate
|
|
||||||
|
|
||||||
CurrentUser = Annotated[User, Depends(get_current_active_user)]
|
CurrentUser = Annotated[User, Depends(get_current_active_user)]
|
||||||
|
|
||||||
|
|||||||
@@ -7,11 +7,10 @@ import aiofiles
|
|||||||
import aiohttp
|
import aiohttp
|
||||||
from fastapi import Depends, HTTPException
|
from fastapi import Depends, HTTPException
|
||||||
|
|
||||||
from domain.auth.service import get_current_active_user
|
from domain.auth import User, get_current_active_user
|
||||||
from domain.auth.types import User
|
from domain.tasks import Task, TaskProgress, task_queue_service
|
||||||
from domain.offline_downloads.types import OfflineDownloadCreate
|
from domain.virtual_fs import VirtualFSService
|
||||||
from domain.virtual_fs.service import VirtualFSService
|
from .types import OfflineDownloadCreate
|
||||||
from domain.tasks.task_queue import Task, TaskProgress, task_queue_service
|
|
||||||
|
|
||||||
|
|
||||||
class OfflineDownloadService:
|
class OfflineDownloadService:
|
||||||
|
|||||||
@@ -4,9 +4,9 @@ Foxel 插件系统
|
|||||||
提供 .foxpkg 插件包的安装、管理和运行时加载功能。
|
提供 .foxpkg 插件包的安装、管理和运行时加载功能。
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from domain.plugins.loader import PluginLoader, PluginLoadError
|
from .loader import PluginLoadError, PluginLoader
|
||||||
from domain.plugins.service import PluginService
|
from .service import PluginService
|
||||||
from domain.plugins.startup import init_plugins, load_installed_plugins
|
from .startup import init_plugins, load_installed_plugins
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"PluginLoader",
|
"PluginLoader",
|
||||||
|
|||||||
@@ -8,8 +8,8 @@ from fastapi import APIRouter, File, Request, UploadFile
|
|||||||
from fastapi.responses import FileResponse
|
from fastapi.responses import FileResponse
|
||||||
|
|
||||||
from domain.audit import AuditAction, audit
|
from domain.audit import AuditAction, audit
|
||||||
from domain.plugins.service import PluginService
|
from .service import PluginService
|
||||||
from domain.plugins.types import (
|
from .types import (
|
||||||
PluginInstallResult,
|
PluginInstallResult,
|
||||||
PluginOut,
|
PluginOut,
|
||||||
)
|
)
|
||||||
@@ -67,10 +67,12 @@ async def delete_plugin(request: Request, key_or_id: str):
|
|||||||
async def get_bundle(request: Request, key_or_id: str):
|
async def get_bundle(request: Request, key_or_id: str):
|
||||||
"""获取插件前端 bundle"""
|
"""获取插件前端 bundle"""
|
||||||
path = await PluginService.get_bundle_path(key_or_id)
|
path = await PluginService.get_bundle_path(key_or_id)
|
||||||
|
v = (request.query_params.get("v") or "").strip()
|
||||||
|
cache_control = "public, max-age=31536000, immutable" if v else "no-cache"
|
||||||
return FileResponse(
|
return FileResponse(
|
||||||
path,
|
path,
|
||||||
media_type="application/javascript",
|
media_type="application/javascript",
|
||||||
headers={"Cache-Control": "no-store"},
|
headers={"Cache-Control": cache_control},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ from typing import Any, Dict, List, Optional, Tuple
|
|||||||
|
|
||||||
from fastapi import APIRouter
|
from fastapi import APIRouter
|
||||||
|
|
||||||
from domain.plugins.types import (
|
from .types import (
|
||||||
ManifestProcessorConfig,
|
ManifestProcessorConfig,
|
||||||
ManifestRouteConfig,
|
ManifestRouteConfig,
|
||||||
PluginManifest,
|
PluginManifest,
|
||||||
@@ -344,7 +344,7 @@ class PluginLoader:
|
|||||||
supported_exts = getattr(module, "SUPPORTED_EXTS", [])
|
supported_exts = getattr(module, "SUPPORTED_EXTS", [])
|
||||||
|
|
||||||
# 注册到处理器注册表
|
# 注册到处理器注册表
|
||||||
from domain.processors.registry import CONFIG_SCHEMAS, TYPE_MAP
|
from domain.processors import CONFIG_SCHEMAS, TYPE_MAP
|
||||||
|
|
||||||
processor_type = processor_config.type
|
processor_type = processor_config.type
|
||||||
TYPE_MAP[processor_type] = factory
|
TYPE_MAP[processor_type] = factory
|
||||||
@@ -401,7 +401,7 @@ class PluginLoader:
|
|||||||
"""
|
"""
|
||||||
# 卸载处理器
|
# 卸载处理器
|
||||||
if manifest and manifest.backend and manifest.backend.processors:
|
if manifest and manifest.backend and manifest.backend.processors:
|
||||||
from domain.processors.registry import CONFIG_SCHEMAS, TYPE_MAP
|
from domain.processors import CONFIG_SCHEMAS, TYPE_MAP
|
||||||
|
|
||||||
for proc_config in manifest.backend.processors:
|
for proc_config in manifest.backend.processors:
|
||||||
proc_type = proc_config.type
|
proc_type = proc_config.type
|
||||||
|
|||||||
@@ -12,8 +12,8 @@ from typing import List, Optional, Union
|
|||||||
|
|
||||||
from fastapi import HTTPException
|
from fastapi import HTTPException
|
||||||
|
|
||||||
from domain.plugins.loader import PluginLoadError, PluginLoader
|
from .loader import PluginLoadError, PluginLoader
|
||||||
from domain.plugins.types import (
|
from .types import (
|
||||||
PluginInstallResult,
|
PluginInstallResult,
|
||||||
PluginManifest,
|
PluginManifest,
|
||||||
PluginOut,
|
PluginOut,
|
||||||
|
|||||||
@@ -7,8 +7,8 @@
|
|||||||
import logging
|
import logging
|
||||||
from typing import TYPE_CHECKING, List, Tuple
|
from typing import TYPE_CHECKING, List, Tuple
|
||||||
|
|
||||||
from domain.plugins.loader import PluginLoadError, PluginLoader
|
from .loader import PluginLoadError, PluginLoader
|
||||||
from domain.plugins.types import PluginManifest
|
from .types import PluginManifest
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from fastapi import FastAPI
|
from fastapi import FastAPI
|
||||||
@@ -113,4 +113,3 @@ async def init_plugins(app: "FastAPI") -> None:
|
|||||||
logger.warning(f" - {error}")
|
logger.warning(f" - {error}")
|
||||||
else:
|
else:
|
||||||
logger.info(f"插件加载完成,共 {loaded_count} 个插件")
|
logger.info(f"插件加载完成,共 {loaded_count} 个插件")
|
||||||
|
|
||||||
|
|||||||
35
domain/processors/__init__.py
Normal file
35
domain/processors/__init__.py
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
from .base import BaseProcessor
|
||||||
|
from .registry import (
|
||||||
|
CONFIG_SCHEMAS,
|
||||||
|
TYPE_MAP,
|
||||||
|
get_config_schema,
|
||||||
|
get_config_schemas,
|
||||||
|
get_last_discovery_errors,
|
||||||
|
get_module_path,
|
||||||
|
reload_processors,
|
||||||
|
)
|
||||||
|
from .service import (
|
||||||
|
ProcessorService,
|
||||||
|
get_processor,
|
||||||
|
list_processors,
|
||||||
|
reload_processor_modules,
|
||||||
|
)
|
||||||
|
from .types import ProcessDirectoryRequest, ProcessRequest, UpdateSourceRequest
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"BaseProcessor",
|
||||||
|
"CONFIG_SCHEMAS",
|
||||||
|
"TYPE_MAP",
|
||||||
|
"get_config_schema",
|
||||||
|
"get_config_schemas",
|
||||||
|
"get_last_discovery_errors",
|
||||||
|
"get_module_path",
|
||||||
|
"reload_processors",
|
||||||
|
"ProcessorService",
|
||||||
|
"get_processor",
|
||||||
|
"list_processors",
|
||||||
|
"reload_processor_modules",
|
||||||
|
"ProcessDirectoryRequest",
|
||||||
|
"ProcessRequest",
|
||||||
|
"UpdateSourceRequest",
|
||||||
|
]
|
||||||
@@ -4,10 +4,9 @@ from fastapi import APIRouter, Body, Depends, Request
|
|||||||
|
|
||||||
from api.response import success
|
from api.response import success
|
||||||
from domain.audit import AuditAction, audit
|
from domain.audit import AuditAction, audit
|
||||||
from domain.auth.service import get_current_active_user
|
from domain.auth import User, get_current_active_user
|
||||||
from domain.auth.types import User
|
from .service import ProcessorService
|
||||||
from domain.processors.service import ProcessorService
|
from .types import (
|
||||||
from domain.processors.types import (
|
|
||||||
ProcessDirectoryRequest,
|
ProcessDirectoryRequest,
|
||||||
ProcessRequest,
|
ProcessRequest,
|
||||||
UpdateSourceRequest,
|
UpdateSourceRequest,
|
||||||
|
|||||||
@@ -8,12 +8,14 @@ from fastapi.responses import Response
|
|||||||
from PIL import Image
|
from PIL import Image
|
||||||
|
|
||||||
from ..base import BaseProcessor
|
from ..base import BaseProcessor
|
||||||
from domain.ai.inference import describe_image_base64, get_text_embedding, provider_service
|
from domain.ai import (
|
||||||
from domain.ai.service import (
|
|
||||||
VectorDBService,
|
|
||||||
DEFAULT_VECTOR_DIMENSION,
|
DEFAULT_VECTOR_DIMENSION,
|
||||||
VECTOR_COLLECTION_NAME,
|
|
||||||
FILE_COLLECTION_NAME,
|
FILE_COLLECTION_NAME,
|
||||||
|
VECTOR_COLLECTION_NAME,
|
||||||
|
VectorDBService,
|
||||||
|
describe_image_base64,
|
||||||
|
get_text_embedding,
|
||||||
|
provider_service,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ from pathlib import Path
|
|||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
from typing import Callable, Dict, Optional
|
from typing import Callable, Dict, Optional
|
||||||
|
|
||||||
from domain.processors.base import BaseProcessor
|
from .base import BaseProcessor
|
||||||
|
|
||||||
ProcessorFactory = Callable[[], BaseProcessor]
|
ProcessorFactory = Callable[[], BaseProcessor]
|
||||||
TYPE_MAP: Dict[str, ProcessorFactory] = {}
|
TYPE_MAP: Dict[str, ProcessorFactory] = {}
|
||||||
@@ -16,7 +16,7 @@ LAST_DISCOVERY_ERRORS: list[str] = []
|
|||||||
|
|
||||||
def discover_processors(force_reload: bool = False) -> list[str]:
|
def discover_processors(force_reload: bool = False) -> list[str]:
|
||||||
"""扫描并缓存可用的处理器模块。"""
|
"""扫描并缓存可用的处理器模块。"""
|
||||||
from domain.processors import builtin as processors_pkg
|
from . import builtin as processors_pkg
|
||||||
|
|
||||||
TYPE_MAP.clear()
|
TYPE_MAP.clear()
|
||||||
CONFIG_SCHEMAS.clear()
|
CONFIG_SCHEMAS.clear()
|
||||||
|
|||||||
@@ -3,20 +3,20 @@ from typing import List, Tuple
|
|||||||
|
|
||||||
from fastapi import HTTPException
|
from fastapi import HTTPException
|
||||||
from fastapi.concurrency import run_in_threadpool
|
from fastapi.concurrency import run_in_threadpool
|
||||||
from domain.processors.registry import (
|
from domain.tasks import task_queue_service
|
||||||
|
from domain.virtual_fs import VirtualFSService
|
||||||
|
from .registry import (
|
||||||
get,
|
get,
|
||||||
get_config_schema,
|
get_config_schema,
|
||||||
get_config_schemas,
|
get_config_schemas,
|
||||||
get_module_path,
|
get_module_path,
|
||||||
reload_processors,
|
reload_processors,
|
||||||
)
|
)
|
||||||
from domain.processors.types import (
|
from .types import (
|
||||||
ProcessDirectoryRequest,
|
ProcessDirectoryRequest,
|
||||||
ProcessRequest,
|
ProcessRequest,
|
||||||
UpdateSourceRequest,
|
UpdateSourceRequest,
|
||||||
)
|
)
|
||||||
from domain.virtual_fs.service import VirtualFSService
|
|
||||||
from domain.tasks.task_queue import task_queue_service
|
|
||||||
|
|
||||||
|
|
||||||
class ProcessorService:
|
class ProcessorService:
|
||||||
|
|||||||
1
domain/repositories/__init__.py
Normal file
1
domain/repositories/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
__all__: list[str] = []
|
||||||
10
domain/share/__init__.py
Normal file
10
domain/share/__init__.py
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
from .service import ShareService
|
||||||
|
from .types import ShareCreate, ShareInfo, ShareInfoWithPassword, SharePassword
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"ShareService",
|
||||||
|
"ShareCreate",
|
||||||
|
"ShareInfo",
|
||||||
|
"ShareInfoWithPassword",
|
||||||
|
"SharePassword",
|
||||||
|
]
|
||||||
@@ -4,10 +4,9 @@ from fastapi import APIRouter, Depends, Request
|
|||||||
|
|
||||||
from api.response import success
|
from api.response import success
|
||||||
from domain.audit import AuditAction, audit
|
from domain.audit import AuditAction, audit
|
||||||
from domain.auth.service import get_current_active_user
|
from domain.auth import User, get_current_active_user
|
||||||
from domain.auth.types import User
|
from .service import ShareService
|
||||||
from domain.share.service import ShareService
|
from .types import (
|
||||||
from domain.share.types import (
|
|
||||||
ShareCreate,
|
ShareCreate,
|
||||||
ShareInfo,
|
ShareInfo,
|
||||||
ShareInfoWithPassword,
|
ShareInfoWithPassword,
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ import bcrypt
|
|||||||
from fastapi import HTTPException, status
|
from fastapi import HTTPException, status
|
||||||
from fastapi.responses import Response
|
from fastapi.responses import Response
|
||||||
|
|
||||||
from domain.virtual_fs.service import VirtualFSService
|
from domain.virtual_fs import VirtualFSService
|
||||||
from models.database import ShareLink, UserAccount
|
from models.database import ShareLink, UserAccount
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
24
domain/tasks/__init__.py
Normal file
24
domain/tasks/__init__.py
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
from .service import TaskService
|
||||||
|
from .task_queue import Task, TaskProgress, TaskStatus, task_queue_service
|
||||||
|
from .types import (
|
||||||
|
AutomationTaskBase,
|
||||||
|
AutomationTaskCreate,
|
||||||
|
AutomationTaskRead,
|
||||||
|
AutomationTaskUpdate,
|
||||||
|
TaskQueueSettings,
|
||||||
|
TaskQueueSettingsResponse,
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"TaskService",
|
||||||
|
"Task",
|
||||||
|
"TaskProgress",
|
||||||
|
"TaskStatus",
|
||||||
|
"task_queue_service",
|
||||||
|
"AutomationTaskBase",
|
||||||
|
"AutomationTaskCreate",
|
||||||
|
"AutomationTaskRead",
|
||||||
|
"AutomationTaskUpdate",
|
||||||
|
"TaskQueueSettings",
|
||||||
|
"TaskQueueSettingsResponse",
|
||||||
|
]
|
||||||
@@ -2,9 +2,9 @@ from fastapi import APIRouter, Depends, Request
|
|||||||
|
|
||||||
from api.response import success
|
from api.response import success
|
||||||
from domain.audit import AuditAction, audit
|
from domain.audit import AuditAction, audit
|
||||||
from domain.auth.service import get_current_active_user
|
from domain.auth import get_current_active_user
|
||||||
from domain.tasks.service import TaskService
|
from .service import TaskService
|
||||||
from domain.tasks.types import (
|
from .types import (
|
||||||
AutomationTaskCreate,
|
AutomationTaskCreate,
|
||||||
AutomationTaskUpdate,
|
AutomationTaskUpdate,
|
||||||
TaskQueueSettings,
|
TaskQueueSettings,
|
||||||
|
|||||||
@@ -3,17 +3,16 @@ from typing import Annotated, Any, Dict, Optional
|
|||||||
|
|
||||||
from fastapi import Depends, HTTPException
|
from fastapi import Depends, HTTPException
|
||||||
|
|
||||||
from domain.auth.service import get_current_active_user
|
from domain.auth import User, get_current_active_user
|
||||||
from domain.auth.types import User
|
from domain.config import ConfigService
|
||||||
from domain.config.service import ConfigService
|
from .task_queue import task_queue_service
|
||||||
from domain.tasks.types import (
|
from .types import (
|
||||||
AutomationTaskCreate,
|
AutomationTaskCreate,
|
||||||
AutomationTaskUpdate,
|
AutomationTaskUpdate,
|
||||||
TaskQueueSettings,
|
TaskQueueSettings,
|
||||||
TaskQueueSettingsResponse,
|
TaskQueueSettingsResponse,
|
||||||
)
|
)
|
||||||
from models.database import AutomationTask
|
from models.database import AutomationTask
|
||||||
from domain.tasks.task_queue import task_queue_service
|
|
||||||
|
|
||||||
|
|
||||||
class TaskService:
|
class TaskService:
|
||||||
|
|||||||
@@ -74,7 +74,7 @@ class TaskQueueService:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
# Local import to avoid circular dependency during module load.
|
# Local import to avoid circular dependency during module load.
|
||||||
from domain.virtual_fs.service import VirtualFSService
|
from domain.virtual_fs import VirtualFSService
|
||||||
|
|
||||||
if task.name == "process_file":
|
if task.name == "process_file":
|
||||||
params = task.task_info
|
params = task.task_info
|
||||||
@@ -88,7 +88,7 @@ class TaskQueueService:
|
|||||||
task.result = result
|
task.result = result
|
||||||
elif task.name == "automation_task" or self._is_processor_task(task.name):
|
elif task.name == "automation_task" or self._is_processor_task(task.name):
|
||||||
from models.database import AutomationTask
|
from models.database import AutomationTask
|
||||||
from domain.processors.service import get_processor
|
from domain.processors import get_processor
|
||||||
|
|
||||||
params = task.task_info
|
params = task.task_info
|
||||||
auto_task = await AutomationTask.get(id=params["task_id"])
|
auto_task = await AutomationTask.get(id=params["task_id"])
|
||||||
@@ -116,7 +116,7 @@ class TaskQueueService:
|
|||||||
await VirtualFSService.write_file(save_to, result)
|
await VirtualFSService.write_file(save_to, result)
|
||||||
task.result = "Automation task completed"
|
task.result = "Automation task completed"
|
||||||
elif task.name == "offline_http_download":
|
elif task.name == "offline_http_download":
|
||||||
from domain.offline_downloads.service import OfflineDownloadService
|
from domain.offline_downloads import OfflineDownloadService
|
||||||
|
|
||||||
result_path = await OfflineDownloadService.run_http_download(task)
|
result_path = await OfflineDownloadService.run_http_download(task)
|
||||||
task.result = {"path": result_path}
|
task.result = {"path": result_path}
|
||||||
@@ -124,7 +124,7 @@ class TaskQueueService:
|
|||||||
result = await VirtualFSService.run_cross_mount_transfer_task(task)
|
result = await VirtualFSService.run_cross_mount_transfer_task(task)
|
||||||
task.result = result
|
task.result = result
|
||||||
elif task.name == "send_email":
|
elif task.name == "send_email":
|
||||||
from domain.email.service import EmailService
|
from domain.email import EmailService
|
||||||
await EmailService.send_from_task(task.id, task.task_info)
|
await EmailService.send_from_task(task.id, task.task_info)
|
||||||
task.result = "Email sent"
|
task.result = "Email sent"
|
||||||
else:
|
else:
|
||||||
@@ -141,7 +141,7 @@ class TaskQueueService:
|
|||||||
|
|
||||||
def _is_processor_task(self, task_name: str) -> bool:
|
def _is_processor_task(self, task_name: str) -> bool:
|
||||||
try:
|
try:
|
||||||
from domain.processors.service import get_processor
|
from domain.processors import get_processor
|
||||||
|
|
||||||
return get_processor(task_name) is not None
|
return get_processor(task_name) is not None
|
||||||
except Exception:
|
except Exception:
|
||||||
@@ -180,7 +180,7 @@ class TaskQueueService:
|
|||||||
|
|
||||||
async def start_worker(self, concurrency: int | None = None):
|
async def start_worker(self, concurrency: int | None = None):
|
||||||
if concurrency is None:
|
if concurrency is None:
|
||||||
from domain.config.service import ConfigService
|
from domain.config import ConfigService
|
||||||
|
|
||||||
stored_value = await ConfigService.get("TASK_QUEUE_CONCURRENCY", self._concurrency)
|
stored_value = await ConfigService.get("TASK_QUEUE_CONCURRENCY", self._concurrency)
|
||||||
try:
|
try:
|
||||||
|
|||||||
11
domain/virtual_fs/__init__.py
Normal file
11
domain/virtual_fs/__init__.py
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
from .service import VirtualFSService
|
||||||
|
from .types import DirListing, MkdirRequest, MoveRequest, SearchResultItem, VfsEntry
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"VirtualFSService",
|
||||||
|
"DirListing",
|
||||||
|
"MkdirRequest",
|
||||||
|
"MoveRequest",
|
||||||
|
"SearchResultItem",
|
||||||
|
"VfsEntry",
|
||||||
|
]
|
||||||
@@ -4,10 +4,9 @@ from fastapi import APIRouter, Depends, File, Query, Request, UploadFile
|
|||||||
|
|
||||||
from api.response import success
|
from api.response import success
|
||||||
from domain.audit import AuditAction, audit
|
from domain.audit import AuditAction, audit
|
||||||
from domain.auth.service import get_current_active_user
|
from domain.auth import User, get_current_active_user
|
||||||
from domain.auth.types import User
|
from .service import VirtualFSService
|
||||||
from domain.virtual_fs.service import VirtualFSService
|
from .types import MkdirRequest, MoveRequest
|
||||||
from domain.virtual_fs.types import MkdirRequest, MoveRequest
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/api/fs", tags=["virtual-fs"])
|
router = APIRouter(prefix="/api/fs", tags=["virtual-fs"])
|
||||||
|
|
||||||
|
|||||||
@@ -4,8 +4,8 @@ from typing import Any, AsyncIterator, Union
|
|||||||
from fastapi import HTTPException
|
from fastapi import HTTPException
|
||||||
from fastapi.responses import Response
|
from fastapi.responses import Response
|
||||||
|
|
||||||
from domain.tasks.service import TaskService
|
from domain.tasks import TaskService
|
||||||
from domain.virtual_fs.thumbnail import is_raw_filename, raw_bytes_to_jpeg
|
from .thumbnail import is_raw_filename, raw_bytes_to_jpeg
|
||||||
|
|
||||||
from .listing import VirtualFSListingMixin
|
from .listing import VirtualFSListingMixin
|
||||||
|
|
||||||
|
|||||||
@@ -3,9 +3,9 @@ from typing import Any, Dict, List, Tuple
|
|||||||
from fastapi import HTTPException
|
from fastapi import HTTPException
|
||||||
|
|
||||||
from api.response import page
|
from api.response import page
|
||||||
from domain.adapters.registry import runtime_registry
|
from domain.adapters import runtime_registry
|
||||||
from domain.ai.service import VectorDBService, VECTOR_COLLECTION_NAME, FILE_COLLECTION_NAME
|
from domain.ai import FILE_COLLECTION_NAME, VECTOR_COLLECTION_NAME, VectorDBService
|
||||||
from domain.virtual_fs.thumbnail import is_image_filename, is_video_filename
|
from .thumbnail import is_image_filename, is_video_filename
|
||||||
from models import StorageAdapter
|
from models import StorageAdapter
|
||||||
|
|
||||||
from .resolver import VirtualFSResolverMixin
|
from .resolver import VirtualFSResolverMixin
|
||||||
|
|||||||
@@ -0,0 +1 @@
|
|||||||
|
__all__: list[str] = []
|
||||||
|
|||||||
@@ -15,8 +15,8 @@ from fastapi import APIRouter, Request, Response
|
|||||||
from fastapi import HTTPException
|
from fastapi import HTTPException
|
||||||
|
|
||||||
from domain.audit import AuditAction, audit
|
from domain.audit import AuditAction, audit
|
||||||
from domain.config.service import ConfigService
|
from domain.config import ConfigService
|
||||||
from domain.virtual_fs.service import VirtualFSService
|
from domain.virtual_fs import VirtualFSService
|
||||||
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/s3", tags=["s3"])
|
router = APIRouter(prefix="/s3", tags=["s3"])
|
||||||
|
|||||||
@@ -9,10 +9,9 @@ from fastapi import APIRouter, Request, Response, HTTPException, Depends
|
|||||||
import xml.etree.ElementTree as ET
|
import xml.etree.ElementTree as ET
|
||||||
|
|
||||||
from domain.audit import AuditAction, audit
|
from domain.audit import AuditAction, audit
|
||||||
from domain.auth.service import AuthService
|
from domain.auth import AuthService, User, UserInDB
|
||||||
from domain.auth.types import User, UserInDB
|
from domain.config import ConfigService
|
||||||
from domain.virtual_fs.service import VirtualFSService
|
from domain.virtual_fs import VirtualFSService
|
||||||
from domain.config.service import ConfigService
|
|
||||||
|
|
||||||
|
|
||||||
_WEBDAV_ENABLED_KEY = "WEBDAV_MAPPING_ENABLED"
|
_WEBDAV_ENABLED_KEY = "WEBDAV_MAPPING_ENABLED"
|
||||||
@@ -172,12 +171,32 @@ async def propfind(
|
|||||||
ctype = None if is_dir else (mimetypes.guess_type(name)[0] or "application/octet-stream")
|
ctype = None if is_dir else (mimetypes.guess_type(name)[0] or "application/octet-stream")
|
||||||
responses.append(_build_prop_response(full_path, name, is_dir, size, mtime, ctype))
|
responses.append(_build_prop_response(full_path, name, is_dir, size, mtime, ctype))
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
raise HTTPException(404, detail="Not found")
|
st = None
|
||||||
|
except HTTPException as e:
|
||||||
|
if e.status_code != 404:
|
||||||
|
raise
|
||||||
|
st = None
|
||||||
|
|
||||||
|
if st is None:
|
||||||
|
is_mount_root = False
|
||||||
|
try:
|
||||||
|
_, rel = await VirtualFSService.resolve_adapter_by_path(full_path)
|
||||||
|
is_mount_root = rel == ""
|
||||||
|
except HTTPException:
|
||||||
|
is_mount_root = False
|
||||||
|
|
||||||
|
if not is_mount_root and full_path != "/":
|
||||||
|
listing_probe = await VirtualFSService.list_virtual_dir(full_path, page_num=1, page_size=1)
|
||||||
|
if not (listing_probe.get("items") or []):
|
||||||
|
raise HTTPException(404, detail="Not found")
|
||||||
|
|
||||||
|
name = "/" if full_path == "/" else (full_path.rstrip("/").rsplit("/", 1)[-1] or "/")
|
||||||
|
responses.append(_build_prop_response(full_path, name, True, None, 0, None))
|
||||||
|
|
||||||
if depth in ("1", "infinity"):
|
if depth in ("1", "infinity"):
|
||||||
try:
|
try:
|
||||||
listing = await VirtualFSService.list_virtual_dir(full_path, page_num=1, page_size=1000)
|
listing = await VirtualFSService.list_virtual_dir(full_path, page_num=1, page_size=1000)
|
||||||
for ent in listing["items"]:
|
for ent in (listing.get("items") or []):
|
||||||
is_dir = bool(ent.get("is_dir"))
|
is_dir = bool(ent.get("is_dir"))
|
||||||
name = ent.get("name")
|
name = ent.get("name")
|
||||||
child_path = full_path.rstrip("/") + "/" + name
|
child_path = full_path.rstrip("/") + "/" + name
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ class VirtualFSProcessingMixin(VirtualFSTransferMixin):
|
|||||||
save_to: str | None = None,
|
save_to: str | None = None,
|
||||||
overwrite: bool = False,
|
overwrite: bool = False,
|
||||||
) -> Any:
|
) -> Any:
|
||||||
from domain.processors.service import get_processor
|
from domain.processors import get_processor
|
||||||
|
|
||||||
processor = get_processor(processor_type)
|
processor = get_processor(processor_type)
|
||||||
if not processor:
|
if not processor:
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ from typing import Tuple
|
|||||||
from fastapi import HTTPException
|
from fastapi import HTTPException
|
||||||
from fastapi.responses import Response
|
from fastapi.responses import Response
|
||||||
|
|
||||||
from domain.adapters.registry import runtime_registry
|
from domain.adapters import runtime_registry
|
||||||
from models import StorageAdapter
|
from models import StorageAdapter
|
||||||
|
|
||||||
from .common import VirtualFSCommonMixin
|
from .common import VirtualFSCommonMixin
|
||||||
|
|||||||
@@ -4,8 +4,8 @@ import re
|
|||||||
from fastapi import HTTPException, UploadFile
|
from fastapi import HTTPException, UploadFile
|
||||||
from fastapi.responses import Response
|
from fastapi.responses import Response
|
||||||
|
|
||||||
from domain.config.service import ConfigService
|
from domain.config import ConfigService
|
||||||
from domain.virtual_fs.thumbnail import (
|
from .thumbnail import (
|
||||||
get_or_create_thumb,
|
get_or_create_thumb,
|
||||||
is_image_filename,
|
is_image_filename,
|
||||||
is_raw_filename,
|
is_raw_filename,
|
||||||
|
|||||||
@@ -0,0 +1,3 @@
|
|||||||
|
from .search_service import VirtualFSSearchService
|
||||||
|
|
||||||
|
__all__ = ["VirtualFSSearchService"]
|
||||||
|
|||||||
@@ -1,9 +1,8 @@
|
|||||||
from fastapi import APIRouter, Depends, Query
|
from fastapi import APIRouter, Depends, Query
|
||||||
|
|
||||||
from api.response import success
|
from api.response import success
|
||||||
from domain.auth.service import get_current_active_user
|
from domain.auth import User, get_current_active_user
|
||||||
from domain.auth.types import User
|
from .search_service import VirtualFSSearchService
|
||||||
from domain.virtual_fs.search.search_service import VirtualFSSearchService
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/api/fs/search", tags=["search"])
|
router = APIRouter(prefix="/api/fs/search", tags=["search"])
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
from typing import Any, Dict, List, Tuple
|
from typing import Any, Dict, List, Tuple
|
||||||
|
|
||||||
from domain.virtual_fs.types import SearchResultItem
|
from domain.ai import FILE_COLLECTION_NAME, VECTOR_COLLECTION_NAME, VectorDBService, get_text_embedding
|
||||||
from domain.ai.inference import get_text_embedding
|
from ..types import SearchResultItem
|
||||||
from domain.ai.service import VectorDBService, VECTOR_COLLECTION_NAME, FILE_COLLECTION_NAME
|
|
||||||
|
|
||||||
|
|
||||||
def _normalize_result(raw: Dict[str, Any], source: str, fallback_score: float = 0.0) -> SearchResultItem:
|
def _normalize_result(raw: Dict[str, Any], source: str, fallback_score: float = 0.0) -> SearchResultItem:
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import time
|
|||||||
|
|
||||||
from fastapi import HTTPException
|
from fastapi import HTTPException
|
||||||
|
|
||||||
from domain.config.service import ConfigService
|
from domain.config import ConfigService
|
||||||
|
|
||||||
from .processing import VirtualFSProcessingMixin
|
from .processing import VirtualFSProcessingMixin
|
||||||
|
|
||||||
|
|||||||
@@ -273,7 +273,7 @@ class VirtualFSTransferMixin(VirtualFSFileOpsMixin):
|
|||||||
"overwrite": overwrite,
|
"overwrite": overwrite,
|
||||||
}
|
}
|
||||||
|
|
||||||
from domain.tasks.task_queue import task_queue_service
|
from domain.tasks import task_queue_service
|
||||||
|
|
||||||
task = await task_queue_service.add_task("cross_mount_transfer", payload)
|
task = await task_queue_service.add_task("cross_mount_transfer", payload)
|
||||||
return {
|
return {
|
||||||
@@ -286,7 +286,7 @@ class VirtualFSTransferMixin(VirtualFSFileOpsMixin):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def run_cross_mount_transfer_task(cls, task: "Task") -> Dict[str, Any]:
|
async def run_cross_mount_transfer_task(cls, task: "Task") -> Dict[str, Any]:
|
||||||
from domain.tasks.task_queue import task_queue_service
|
from domain.tasks import task_queue_service
|
||||||
|
|
||||||
params = task.task_info or {}
|
params = task.task_info or {}
|
||||||
operation = params.get("operation")
|
operation = params.get("operation")
|
||||||
|
|||||||
51
main.py
51
main.py
@@ -2,15 +2,16 @@ import os
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
|
|
||||||
from domain.config.service import ConfigService, VERSION
|
from domain.adapters import runtime_registry
|
||||||
from domain.adapters.registry import runtime_registry
|
from domain.config import ConfigService, VERSION
|
||||||
from db.session import close_db, init_db
|
from db.session import close_db, init_db
|
||||||
from api.routers import include_routers
|
from api.routers import include_routers
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
from fastapi.staticfiles import StaticFiles
|
from fastapi.staticfiles import StaticFiles
|
||||||
from fastapi.responses import FileResponse
|
from fastapi.responses import FileResponse
|
||||||
from fastapi import FastAPI, HTTPException, Request
|
from fastapi import FastAPI, HTTPException
|
||||||
from fastapi.exceptions import RequestValidationError
|
from fastapi.exceptions import RequestValidationError
|
||||||
|
from starlette.exceptions import HTTPException as StarletteHTTPException
|
||||||
from middleware.exception_handler import (
|
from middleware.exception_handler import (
|
||||||
global_exception_handler,
|
global_exception_handler,
|
||||||
http_exception_handler,
|
http_exception_handler,
|
||||||
@@ -19,34 +20,45 @@ from middleware.exception_handler import (
|
|||||||
)
|
)
|
||||||
import httpx
|
import httpx
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
from domain.tasks.task_queue import task_queue_service
|
from domain.tasks import task_queue_service
|
||||||
|
|
||||||
load_dotenv()
|
load_dotenv()
|
||||||
|
|
||||||
|
|
||||||
class SPAStaticFiles(StaticFiles):
|
class SPAStaticFiles(StaticFiles):
|
||||||
async def get_response(self, path, scope):
|
async def get_response(self, path, scope):
|
||||||
response = await super().get_response(path, scope)
|
try:
|
||||||
if response.status_code == 404:
|
response = await super().get_response(path, scope)
|
||||||
return await super().get_response("index.html", scope)
|
except StarletteHTTPException as exc:
|
||||||
|
if exc.status_code != 404:
|
||||||
|
raise
|
||||||
|
if self._should_spa_fallback(scope):
|
||||||
|
return FileResponse(INDEX_FILE)
|
||||||
|
raise
|
||||||
|
|
||||||
|
if response.status_code == 404 and self._should_spa_fallback(scope):
|
||||||
|
return FileResponse(INDEX_FILE)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _should_spa_fallback(scope) -> bool:
|
||||||
|
return (
|
||||||
|
scope.get("method") == "GET"
|
||||||
|
and _request_accepts_html(scope)
|
||||||
|
and not (scope.get("path") or "").startswith(SPA_EXCLUDE_PREFIXES)
|
||||||
|
and INDEX_FILE.exists()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
INDEX_FILE = Path("web/dist/index.html")
|
INDEX_FILE = Path("web/dist/index.html")
|
||||||
SPA_EXCLUDE_PREFIXES = ("/api", "/docs", "/openapi.json", "/webdav", "/s3")
|
SPA_EXCLUDE_PREFIXES = ("/api", "/docs", "/openapi.json", "/webdav", "/s3")
|
||||||
|
|
||||||
|
|
||||||
async def spa_fallback_middleware(request: Request, call_next):
|
def _request_accepts_html(scope) -> bool:
|
||||||
response = await call_next(request)
|
for k, v in scope.get("headers") or []:
|
||||||
if (
|
if k == b"accept":
|
||||||
response.status_code == 404
|
return "text/html" in v.decode("latin-1")
|
||||||
and request.method == "GET"
|
return False
|
||||||
and "text/html" in request.headers.get("accept", "")
|
|
||||||
and not request.url.path.startswith(SPA_EXCLUDE_PREFIXES)
|
|
||||||
and INDEX_FILE.exists()
|
|
||||||
):
|
|
||||||
return FileResponse(INDEX_FILE)
|
|
||||||
return response
|
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
@@ -59,7 +71,7 @@ async def lifespan(app: FastAPI):
|
|||||||
await task_queue_service.start_worker()
|
await task_queue_service.start_worker()
|
||||||
|
|
||||||
# 加载已安装的插件
|
# 加载已安装的插件
|
||||||
from domain.plugins.startup import init_plugins
|
from domain.plugins import init_plugins
|
||||||
await init_plugins(app)
|
await init_plugins(app)
|
||||||
|
|
||||||
# 在所有路由加载完成后,挂载静态文件服务(放在最后以避免覆盖 API 路由)
|
# 在所有路由加载完成后,挂载静态文件服务(放在最后以避免覆盖 API 路由)
|
||||||
@@ -78,7 +90,6 @@ def create_app() -> FastAPI:
|
|||||||
description="A highly extensible private cloud storage solution for individuals and teams",
|
description="A highly extensible private cloud storage solution for individuals and teams",
|
||||||
lifespan=lifespan,
|
lifespan=lifespan,
|
||||||
)
|
)
|
||||||
app.middleware("http")(spa_fallback_middleware)
|
|
||||||
include_routers(app)
|
include_routers(app)
|
||||||
app.add_exception_handler(HTTPException, http_exception_handler)
|
app.add_exception_handler(HTTPException, http_exception_handler)
|
||||||
app.add_exception_handler(RequestValidationError, validation_exception_handler)
|
app.add_exception_handler(RequestValidationError, validation_exception_handler)
|
||||||
|
|||||||
@@ -13,8 +13,8 @@ PROJECT_ROOT = Path(__file__).resolve().parents[1]
|
|||||||
if str(PROJECT_ROOT) not in sys.path:
|
if str(PROJECT_ROOT) not in sys.path:
|
||||||
sys.path.insert(0, str(PROJECT_ROOT))
|
sys.path.insert(0, str(PROJECT_ROOT))
|
||||||
|
|
||||||
from domain.auth.service import get_password_hash
|
from domain.config import VERSION
|
||||||
from domain.config.service import VERSION
|
from domain.auth import get_password_hash
|
||||||
|
|
||||||
|
|
||||||
def _project_root() -> Path:
|
def _project_root() -> Path:
|
||||||
|
|||||||
121
web/src/api/agent.ts
Normal file
121
web/src/api/agent.ts
Normal file
@@ -0,0 +1,121 @@
|
|||||||
|
import request, { API_BASE_URL } from './client';
|
||||||
|
|
||||||
|
export type AgentChatMessage = Record<string, any>;
|
||||||
|
|
||||||
|
export interface AgentChatContext {
|
||||||
|
current_path?: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AgentChatRequest {
|
||||||
|
messages: AgentChatMessage[];
|
||||||
|
auto_execute?: boolean;
|
||||||
|
approved_tool_call_ids?: string[];
|
||||||
|
rejected_tool_call_ids?: string[];
|
||||||
|
context?: AgentChatContext;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PendingToolCall {
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
arguments: Record<string, any>;
|
||||||
|
requires_confirmation: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AgentChatResponse {
|
||||||
|
messages: AgentChatMessage[];
|
||||||
|
pending_tool_calls?: PendingToolCall[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export type AgentSseEvent =
|
||||||
|
| { event: 'assistant_start'; data: { id: string } }
|
||||||
|
| { event: 'assistant_delta'; data: { id: string; delta: string } }
|
||||||
|
| { event: 'assistant_end'; data: { id: string; message: AgentChatMessage } }
|
||||||
|
| { event: 'tool_start'; data: { tool_call_id: string; name: string } }
|
||||||
|
| { event: 'tool_end'; data: { tool_call_id: string; name: string; message: AgentChatMessage } }
|
||||||
|
| { event: 'pending'; data: { pending_tool_calls: PendingToolCall[] } }
|
||||||
|
| { event: 'done'; data: AgentChatResponse };
|
||||||
|
|
||||||
|
export const agentApi = {
|
||||||
|
chat: (payload: AgentChatRequest) =>
|
||||||
|
request<AgentChatResponse>('/agent/chat', {
|
||||||
|
method: 'POST',
|
||||||
|
json: payload,
|
||||||
|
}),
|
||||||
|
chatStream: async (
|
||||||
|
payload: AgentChatRequest,
|
||||||
|
onEvent: (evt: AgentSseEvent) => void,
|
||||||
|
options?: { signal?: AbortSignal }
|
||||||
|
) => {
|
||||||
|
const headers: Record<string, string> = {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'Accept': 'text/event-stream',
|
||||||
|
};
|
||||||
|
const token = localStorage.getItem('token');
|
||||||
|
if (token) headers['Authorization'] = `Bearer ${token}`;
|
||||||
|
|
||||||
|
const resp = await fetch(`${API_BASE_URL}/agent/chat/stream`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers,
|
||||||
|
body: JSON.stringify(payload),
|
||||||
|
signal: options?.signal,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!resp.ok) {
|
||||||
|
let errMsg = resp.statusText;
|
||||||
|
try {
|
||||||
|
const data = await resp.json();
|
||||||
|
if (Array.isArray((data as any)?.detail)) {
|
||||||
|
errMsg = (data as any).detail.map((e: any) => e.msg || JSON.stringify(e)).join('; ');
|
||||||
|
} else {
|
||||||
|
errMsg = (typeof (data as any)?.detail === 'string') ? (data as any).detail : JSON.stringify(data);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
try {
|
||||||
|
errMsg = await resp.text();
|
||||||
|
} catch { void 0; }
|
||||||
|
}
|
||||||
|
throw new Error(errMsg || `Request failed: ${resp.status}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const reader = resp.body?.getReader();
|
||||||
|
if (!reader) throw new Error('Stream not supported');
|
||||||
|
|
||||||
|
const decoder = new TextDecoder();
|
||||||
|
let buffer = '';
|
||||||
|
|
||||||
|
const flush = (raw: string) => {
|
||||||
|
const lines = raw.split(/\r?\n/);
|
||||||
|
let eventName = 'message';
|
||||||
|
const dataLines: string[] = [];
|
||||||
|
for (const line of lines) {
|
||||||
|
if (line.startsWith('event:')) {
|
||||||
|
eventName = line.slice(6).trim();
|
||||||
|
} else if (line.startsWith('data:')) {
|
||||||
|
dataLines.push(line.slice(5).trimStart());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const dataStr = dataLines.join('\n').trim();
|
||||||
|
if (!eventName || !dataStr) return;
|
||||||
|
try {
|
||||||
|
const data = JSON.parse(dataStr);
|
||||||
|
onEvent({ event: eventName as any, data } as any);
|
||||||
|
} catch {
|
||||||
|
// ignore parse error
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
const { value, done } = await reader.read();
|
||||||
|
if (done) break;
|
||||||
|
buffer += decoder.decode(value, { stream: true });
|
||||||
|
while (true) {
|
||||||
|
const idx = buffer.indexOf('\n\n');
|
||||||
|
if (idx === -1) break;
|
||||||
|
const chunk = buffer.slice(0, idx);
|
||||||
|
buffer = buffer.slice(idx + 2);
|
||||||
|
if (chunk.trim()) flush(chunk);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (buffer.trim()) flush(buffer);
|
||||||
|
},
|
||||||
|
};
|
||||||
@@ -13,8 +13,9 @@ export interface AIProviderPayload {
|
|||||||
extra_config?: Record<string, unknown> | null;
|
extra_config?: Record<string, unknown> | null;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface AIProvider extends Omit<AIProviderPayload, 'extra_config'> {
|
export interface AIProvider extends Omit<AIProviderPayload, 'extra_config' | 'api_key'> {
|
||||||
id: number;
|
id: number;
|
||||||
|
has_api_key: boolean;
|
||||||
extra_config: Record<string, unknown>;
|
extra_config: Record<string, unknown>;
|
||||||
created_at: string;
|
created_at: string;
|
||||||
updated_at: string;
|
updated_at: string;
|
||||||
|
|||||||
@@ -7,10 +7,22 @@ export interface PluginAppHostProps extends AppComponentProps {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function buildPluginFrameUrl(params: Record<string, string>): string {
|
function buildPluginFrameUrl(params: Record<string, string>): string {
|
||||||
const qs = new URLSearchParams(params);
|
const qs = new URLSearchParams();
|
||||||
|
Object.entries(params).forEach(([k, v]) => {
|
||||||
|
if (typeof v !== 'string') return;
|
||||||
|
const value = v.trim();
|
||||||
|
if (!value) return;
|
||||||
|
qs.set(k, value);
|
||||||
|
});
|
||||||
return `/plugin-frame.html?${qs.toString()}`;
|
return `/plugin-frame.html?${qs.toString()}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function getPluginStylePaths(plugin: PluginItem): string[] {
|
||||||
|
const styles = (plugin.manifest as any)?.frontend?.styles as unknown;
|
||||||
|
if (!Array.isArray(styles)) return [];
|
||||||
|
return styles.filter((s) => typeof s === 'string' && s.trim().length > 0);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 插件宿主组件 - 文件打开模式
|
* 插件宿主组件 - 文件打开模式
|
||||||
* 使用 iframe 隔离渲染与样式,避免插件污染宿主 DOM/CSS。
|
* 使用 iframe 隔离渲染与样式,避免插件污染宿主 DOM/CSS。
|
||||||
@@ -19,6 +31,7 @@ function buildPluginFrameUrl(params: Record<string, string>): string {
|
|||||||
export const PluginAppHost: React.FC<PluginAppHostProps> = ({
|
export const PluginAppHost: React.FC<PluginAppHostProps> = ({
|
||||||
plugin,
|
plugin,
|
||||||
filePath,
|
filePath,
|
||||||
|
entry,
|
||||||
onRequestClose,
|
onRequestClose,
|
||||||
}) => {
|
}) => {
|
||||||
const iframeRef = useRef<HTMLIFrameElement>(null);
|
const iframeRef = useRef<HTMLIFrameElement>(null);
|
||||||
@@ -29,10 +42,13 @@ export const PluginAppHost: React.FC<PluginAppHostProps> = ({
|
|||||||
() =>
|
() =>
|
||||||
buildPluginFrameUrl({
|
buildPluginFrameUrl({
|
||||||
pluginKey: plugin.key,
|
pluginKey: plugin.key,
|
||||||
|
pluginVersion: plugin.version || '',
|
||||||
|
pluginStyles: JSON.stringify(getPluginStylePaths(plugin)),
|
||||||
mode: 'file',
|
mode: 'file',
|
||||||
filePath,
|
filePath,
|
||||||
|
entry: JSON.stringify(entry),
|
||||||
}),
|
}),
|
||||||
[plugin.key, filePath]
|
[plugin, filePath, entry]
|
||||||
);
|
);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@@ -78,9 +94,11 @@ export const PluginAppOpenHost: React.FC<PluginAppOpenHostProps> = ({ plugin, on
|
|||||||
() =>
|
() =>
|
||||||
buildPluginFrameUrl({
|
buildPluginFrameUrl({
|
||||||
pluginKey: plugin.key,
|
pluginKey: plugin.key,
|
||||||
|
pluginVersion: plugin.version || '',
|
||||||
|
pluginStyles: JSON.stringify(getPluginStylePaths(plugin)),
|
||||||
mode: 'app',
|
mode: 'app',
|
||||||
}),
|
}),
|
||||||
[plugin.key]
|
[plugin]
|
||||||
);
|
);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
|
|||||||
932
web/src/components/AiAgentWidget.tsx
Normal file
932
web/src/components/AiAgentWidget.tsx
Normal file
@@ -0,0 +1,932 @@
|
|||||||
|
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||||
|
import { Avatar, Button, Divider, Drawer, Flex, Input, List, Space, Switch, Tag, Typography, message, theme } from 'antd';
|
||||||
|
import { RobotOutlined, SendOutlined, FolderOpenOutlined, DeleteOutlined, ToolOutlined, DownOutlined, UpOutlined, CodeOutlined, CopyOutlined, LoadingOutlined } from '@ant-design/icons';
|
||||||
|
import ReactMarkdown from 'react-markdown';
|
||||||
|
import PathSelectorModal from './PathSelectorModal';
|
||||||
|
import { agentApi, type AgentChatMessage, type PendingToolCall } from '../api/agent';
|
||||||
|
import { useI18n } from '../i18n';
|
||||||
|
import '../styles/ai-agent.css';
|
||||||
|
|
||||||
|
const { Text, Paragraph } = Typography;
|
||||||
|
|
||||||
|
function normalizePath(p?: string | null): string | null {
|
||||||
|
if (!p) return null;
|
||||||
|
const s = ('/' + p).replace(/\/+/, '/').replace(/\\/g, '/').replace(/\/+$/, '') || '/';
|
||||||
|
return s;
|
||||||
|
}
|
||||||
|
|
||||||
|
function extractTextContent(content: any): string {
|
||||||
|
if (content == null) return '';
|
||||||
|
if (typeof content === 'string') return content;
|
||||||
|
if (Array.isArray(content)) {
|
||||||
|
const parts: string[] = [];
|
||||||
|
for (const item of content) {
|
||||||
|
if (typeof item === 'string') {
|
||||||
|
if (item.trim()) parts.push(item);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const text = typeof item?.text === 'string' ? item.text : '';
|
||||||
|
if (text.trim()) parts.push(text);
|
||||||
|
}
|
||||||
|
return parts.join('\n');
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return JSON.stringify(content, null, 2);
|
||||||
|
} catch {
|
||||||
|
return String(content);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function tryParseJson<T = any>(raw: string): T | null {
|
||||||
|
if (typeof raw !== 'string') return null;
|
||||||
|
const s = raw.trim();
|
||||||
|
if (!s) return null;
|
||||||
|
try {
|
||||||
|
return JSON.parse(s) as T;
|
||||||
|
} catch {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function shortId(id: string, keep: number = 6): string {
|
||||||
|
const s = String(id || '');
|
||||||
|
if (s.length <= keep * 2 + 3) return s;
|
||||||
|
return `${s.slice(0, keep)}…${s.slice(-keep)}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface AiAgentWidgetProps {
|
||||||
|
currentPath?: string | null;
|
||||||
|
open: boolean;
|
||||||
|
onOpenChange(open: boolean): void;
|
||||||
|
}
|
||||||
|
|
||||||
|
const AiAgentWidget = memo(function AiAgentWidget({ currentPath, open, onOpenChange }: AiAgentWidgetProps) {
|
||||||
|
const { t } = useI18n();
|
||||||
|
const { token } = theme.useToken();
|
||||||
|
const [autoExecute, setAutoExecute] = useState(false);
|
||||||
|
const [input, setInput] = useState('');
|
||||||
|
const [loading, setLoading] = useState(false);
|
||||||
|
const [messages, setMessages] = useState<AgentChatMessage[]>([]);
|
||||||
|
const [pending, setPending] = useState<PendingToolCall[]>([]);
|
||||||
|
const [pathModalOpen, setPathModalOpen] = useState(false);
|
||||||
|
const [expandedTools, setExpandedTools] = useState<Record<string, boolean>>({});
|
||||||
|
const [expandedRaw, setExpandedRaw] = useState<Record<string, boolean>>({});
|
||||||
|
const [runningTools, setRunningTools] = useState<Record<string, string>>({});
|
||||||
|
const scrollRef = useRef<HTMLDivElement | null>(null);
|
||||||
|
const streamControllerRef = useRef<AbortController | null>(null);
|
||||||
|
const streamSeqRef = useRef(0);
|
||||||
|
const baseMessagesRef = useRef<AgentChatMessage[]>([]);
|
||||||
|
const assistantIndexRef = useRef<Record<string, number>>({});
|
||||||
|
const toolNameByIdRef = useRef<Record<string, string>>({});
|
||||||
|
|
||||||
|
const effectivePath = useMemo(() => normalizePath(currentPath), [currentPath]);
|
||||||
|
|
||||||
|
const scrollToBottom = useCallback(() => {
|
||||||
|
const el = scrollRef.current;
|
||||||
|
if (!el) return;
|
||||||
|
el.scrollTop = el.scrollHeight;
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!open) return;
|
||||||
|
const t = window.setTimeout(scrollToBottom, 0);
|
||||||
|
return () => window.clearTimeout(t);
|
||||||
|
}, [messages, open, pending, scrollToBottom]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
return () => {
|
||||||
|
streamControllerRef.current?.abort();
|
||||||
|
};
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const toolCallsById = useMemo(() => {
|
||||||
|
const map = new Map<string, { name: string; args: Record<string, any> }>();
|
||||||
|
for (const msg of messages) {
|
||||||
|
if (!msg || typeof msg !== 'object') continue;
|
||||||
|
if (msg.role !== 'assistant') continue;
|
||||||
|
const toolCalls = (msg as any).tool_calls;
|
||||||
|
if (!Array.isArray(toolCalls)) continue;
|
||||||
|
for (const call of toolCalls) {
|
||||||
|
const id = typeof call?.id === 'string' ? call.id : '';
|
||||||
|
const fn = call?.function;
|
||||||
|
const name = typeof fn?.name === 'string' ? fn.name : '';
|
||||||
|
const rawArgs = typeof fn?.arguments === 'string' ? fn.arguments : '';
|
||||||
|
if (!id || !name) continue;
|
||||||
|
const parsedArgs = tryParseJson<Record<string, any>>(rawArgs) || {};
|
||||||
|
map.set(id, { name, args: parsedArgs });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return map;
|
||||||
|
}, [messages]);
|
||||||
|
|
||||||
|
const runStream = useCallback(async (payload: Partial<Parameters<typeof agentApi.chat>[0]> & { messages: AgentChatMessage[] }) => {
|
||||||
|
streamControllerRef.current?.abort();
|
||||||
|
const controller = new AbortController();
|
||||||
|
streamControllerRef.current = controller;
|
||||||
|
streamSeqRef.current += 1;
|
||||||
|
const seq = streamSeqRef.current;
|
||||||
|
|
||||||
|
baseMessagesRef.current = payload.messages;
|
||||||
|
assistantIndexRef.current = {};
|
||||||
|
|
||||||
|
setLoading(true);
|
||||||
|
const approvedIds = payload.approved_tool_call_ids || [];
|
||||||
|
if (Array.isArray(approvedIds) && approvedIds.length > 0) {
|
||||||
|
const preRunning: Record<string, string> = {};
|
||||||
|
approvedIds.forEach((id) => {
|
||||||
|
if (typeof id === 'string' && id.trim()) preRunning[id] = '';
|
||||||
|
});
|
||||||
|
setRunningTools(preRunning);
|
||||||
|
} else {
|
||||||
|
setRunningTools({});
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await agentApi.chatStream(
|
||||||
|
{
|
||||||
|
messages: payload.messages,
|
||||||
|
auto_execute: autoExecute,
|
||||||
|
context: effectivePath ? { current_path: effectivePath } : undefined,
|
||||||
|
approved_tool_call_ids: payload.approved_tool_call_ids,
|
||||||
|
rejected_tool_call_ids: payload.rejected_tool_call_ids,
|
||||||
|
},
|
||||||
|
(evt) => {
|
||||||
|
if (seq !== streamSeqRef.current) return;
|
||||||
|
switch (evt.event) {
|
||||||
|
case 'assistant_start': {
|
||||||
|
const id = String((evt.data as any)?.id || '');
|
||||||
|
if (!id) return;
|
||||||
|
setMessages((prev) => {
|
||||||
|
const idx = prev.length;
|
||||||
|
assistantIndexRef.current[id] = idx;
|
||||||
|
return [...prev, { role: 'assistant', content: '' }];
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
case 'assistant_delta': {
|
||||||
|
const id = String((evt.data as any)?.id || '');
|
||||||
|
const delta = String((evt.data as any)?.delta || '');
|
||||||
|
if (!id || !delta) return;
|
||||||
|
setMessages((prev) => {
|
||||||
|
const idx = assistantIndexRef.current[id];
|
||||||
|
if (idx === undefined || idx < 0 || idx >= prev.length) return prev;
|
||||||
|
const cur = prev[idx] as any;
|
||||||
|
const curContent = typeof cur?.content === 'string' ? cur.content : extractTextContent(cur?.content);
|
||||||
|
const next = prev.slice();
|
||||||
|
next[idx] = { ...cur, content: (curContent || '') + delta };
|
||||||
|
return next;
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
case 'assistant_end': {
|
||||||
|
const id = String((evt.data as any)?.id || '');
|
||||||
|
const msg = (evt.data as any)?.message;
|
||||||
|
if (!id || !msg || typeof msg !== 'object') return;
|
||||||
|
setMessages((prev) => {
|
||||||
|
const idx = assistantIndexRef.current[id];
|
||||||
|
if (idx === undefined || idx < 0 || idx >= prev.length) return prev;
|
||||||
|
const next = prev.slice();
|
||||||
|
next[idx] = msg;
|
||||||
|
return next;
|
||||||
|
});
|
||||||
|
delete assistantIndexRef.current[id];
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
case 'tool_start': {
|
||||||
|
const toolCallId = String((evt.data as any)?.tool_call_id || '');
|
||||||
|
const name = String((evt.data as any)?.name || '');
|
||||||
|
if (!toolCallId) return;
|
||||||
|
if (name) toolNameByIdRef.current[toolCallId] = name;
|
||||||
|
setRunningTools((prev) => ({ ...prev, [toolCallId]: name || prev[toolCallId] || '' }));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
case 'tool_end': {
|
||||||
|
const toolCallId = String((evt.data as any)?.tool_call_id || '');
|
||||||
|
const name = String((evt.data as any)?.name || '');
|
||||||
|
const msg = (evt.data as any)?.message;
|
||||||
|
if (toolCallId && name) toolNameByIdRef.current[toolCallId] = name;
|
||||||
|
if (toolCallId) {
|
||||||
|
setRunningTools((prev) => {
|
||||||
|
const next = { ...prev };
|
||||||
|
delete next[toolCallId];
|
||||||
|
return next;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (msg && typeof msg === 'object') {
|
||||||
|
setMessages((prev) => [...prev, msg]);
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
case 'pending': {
|
||||||
|
const items = Array.isArray((evt.data as any)?.pending_tool_calls) ? (evt.data as any).pending_tool_calls : [];
|
||||||
|
setPending(items);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
case 'done': {
|
||||||
|
const base = baseMessagesRef.current || [];
|
||||||
|
const newMessages = Array.isArray((evt.data as any)?.messages) ? (evt.data as any).messages : [];
|
||||||
|
const nextPending = Array.isArray((evt.data as any)?.pending_tool_calls) ? (evt.data as any).pending_tool_calls : [];
|
||||||
|
setMessages([...base, ...newMessages]);
|
||||||
|
setPending(nextPending);
|
||||||
|
setRunningTools({});
|
||||||
|
assistantIndexRef.current = {};
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{ signal: controller.signal }
|
||||||
|
);
|
||||||
|
} catch (err: any) {
|
||||||
|
if (controller.signal.aborted) return;
|
||||||
|
message.error(err?.message || t('Operation failed'));
|
||||||
|
} finally {
|
||||||
|
if (seq === streamSeqRef.current) {
|
||||||
|
setLoading(false);
|
||||||
|
if (controller.signal.aborted) {
|
||||||
|
setRunningTools({});
|
||||||
|
assistantIndexRef.current = {};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, [autoExecute, effectivePath, t]);
|
||||||
|
|
||||||
|
const handleSend = useCallback(async () => {
|
||||||
|
const text = input.trim();
|
||||||
|
if (!text) return;
|
||||||
|
if (pending.length > 0) {
|
||||||
|
message.warning(t('Please confirm pending actions first'));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const nextUserMsg: AgentChatMessage = { role: 'user', content: text };
|
||||||
|
setInput('');
|
||||||
|
const base = [...messages, nextUserMsg];
|
||||||
|
setMessages(base);
|
||||||
|
setPending([]);
|
||||||
|
await runStream({ messages: base });
|
||||||
|
}, [input, messages, pending.length, runStream, t]);
|
||||||
|
|
||||||
|
const clearChat = useCallback(() => {
|
||||||
|
streamControllerRef.current?.abort();
|
||||||
|
setMessages([]);
|
||||||
|
setPending([]);
|
||||||
|
setExpandedTools({});
|
||||||
|
setExpandedRaw({});
|
||||||
|
setRunningTools({});
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const approveOne = useCallback(async (id: string) => {
|
||||||
|
await runStream({ messages, approved_tool_call_ids: [id] });
|
||||||
|
}, [messages, runStream]);
|
||||||
|
|
||||||
|
const rejectOne = useCallback(async (id: string) => {
|
||||||
|
await runStream({ messages, rejected_tool_call_ids: [id] });
|
||||||
|
}, [messages, runStream]);
|
||||||
|
|
||||||
|
const approveAll = useCallback(async () => {
|
||||||
|
const ids = pending.map((p) => p.id).filter(Boolean);
|
||||||
|
if (ids.length === 0) return;
|
||||||
|
await runStream({ messages, approved_tool_call_ids: ids });
|
||||||
|
}, [messages, pending, runStream]);
|
||||||
|
|
||||||
|
const rejectAll = useCallback(async () => {
|
||||||
|
const ids = pending.map((p) => p.id).filter(Boolean);
|
||||||
|
if (ids.length === 0) return;
|
||||||
|
await runStream({ messages, rejected_tool_call_ids: ids });
|
||||||
|
}, [messages, pending, runStream]);
|
||||||
|
|
||||||
|
const handlePathSelected = useCallback((path: string) => {
|
||||||
|
const p = normalizePath(path) || '/';
|
||||||
|
setInput((prev) => (prev.trim() ? `${prev.trim()} ${p}` : p));
|
||||||
|
setPathModalOpen(false);
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const messageItems = useMemo(() => {
|
||||||
|
return messages.filter((m) => {
|
||||||
|
if (!m || typeof m !== 'object') return false;
|
||||||
|
const role = typeof (m as any).role === 'string' ? String((m as any).role) : '';
|
||||||
|
if (!role || role === 'system') return false;
|
||||||
|
if (role === 'assistant') {
|
||||||
|
const text = extractTextContent((m as any).content);
|
||||||
|
return !!text.trim();
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
});
|
||||||
|
}, [messages]);
|
||||||
|
|
||||||
|
const runningToolEntries = useMemo(() => Object.entries(runningTools).filter(([id]) => !!id), [runningTools]);
|
||||||
|
const runningToolCount = runningToolEntries.length;
|
||||||
|
|
||||||
|
const copyToClipboard = useCallback(async (raw: string) => {
|
||||||
|
try {
|
||||||
|
await navigator.clipboard.writeText(raw);
|
||||||
|
message.success(t('Copied'));
|
||||||
|
} catch (err: any) {
|
||||||
|
message.error(err?.message || t('Operation failed'));
|
||||||
|
}
|
||||||
|
}, [t]);
|
||||||
|
|
||||||
|
const renderToolResultSummary = useCallback((toolName: string, rawContent: string, toolArgs?: Record<string, any> | null) => {
|
||||||
|
const data = tryParseJson<Record<string, any>>(rawContent);
|
||||||
|
if (!data) return '';
|
||||||
|
|
||||||
|
if (data.canceled) return t('Canceled');
|
||||||
|
if (data.error) return `${t('Error')}: ${String(data.error)}`;
|
||||||
|
|
||||||
|
if (toolName === 'processors_list') {
|
||||||
|
const processors = Array.isArray(data.processors) ? data.processors : [];
|
||||||
|
return `${t('Processors')}: ${processors.length}`;
|
||||||
|
}
|
||||||
|
if (toolName === 'processors_run') {
|
||||||
|
const ctx = (() => {
|
||||||
|
const processorType = typeof toolArgs?.processor_type === 'string' ? toolArgs.processor_type.trim() : '';
|
||||||
|
const path = typeof toolArgs?.path === 'string' ? toolArgs.path.trim() : '';
|
||||||
|
const parts = [processorType, path].filter(Boolean);
|
||||||
|
return parts.length ? parts.join(' · ') : '';
|
||||||
|
})();
|
||||||
|
if (typeof data.task_id === 'string') {
|
||||||
|
return ctx ? `${t('Task submitted')}: ${ctx} · ${shortId(data.task_id)}` : `${t('Task submitted')}: ${shortId(data.task_id)}`;
|
||||||
|
}
|
||||||
|
const taskIds = Array.isArray(data.task_ids) ? data.task_ids : [];
|
||||||
|
const scheduled = typeof data.scheduled === 'number' ? data.scheduled : taskIds.length;
|
||||||
|
if (scheduled) return ctx ? `${t('Tasks submitted')}: ${ctx} · ${scheduled}` : `${t('Tasks submitted')}: ${scheduled}`;
|
||||||
|
return t('Task submitted');
|
||||||
|
}
|
||||||
|
if (toolName === 'vfs_list_dir') {
|
||||||
|
const path = typeof data.path === 'string' ? data.path : '';
|
||||||
|
const entries = Array.isArray(data.entries) ? data.entries : [];
|
||||||
|
const names = entries
|
||||||
|
.map((it: any) => String(it?.name || '').trim())
|
||||||
|
.filter(Boolean)
|
||||||
|
.slice(0, 3);
|
||||||
|
const head = `${t('Directory')}: ${path || '/'}`;
|
||||||
|
const tail = `${entries.length} ${t('items')}`;
|
||||||
|
const sample = names.length ? ` · ${names.join(', ')}` : '';
|
||||||
|
return `${head} · ${tail}${sample}`;
|
||||||
|
}
|
||||||
|
if (toolName === 'vfs_search') {
|
||||||
|
const query = typeof data.query === 'string' ? data.query : '';
|
||||||
|
const items = Array.isArray(data.items) ? data.items : [];
|
||||||
|
return `${t('Search')}: ${query || '-'} · ${items.length} ${t('results')}`;
|
||||||
|
}
|
||||||
|
if (toolName === 'vfs_stat') {
|
||||||
|
const isDir = Boolean(data.is_dir);
|
||||||
|
const path = typeof data.path === 'string' ? data.path : '';
|
||||||
|
return `${t('Info')}: ${path || '-'} · ${isDir ? t('Folder') : t('File')}`;
|
||||||
|
}
|
||||||
|
if (toolName === 'vfs_read_text') {
|
||||||
|
const path = typeof data.path === 'string' ? data.path : '';
|
||||||
|
const length = typeof data.length === 'number' ? data.length : undefined;
|
||||||
|
const truncated = Boolean(data.truncated);
|
||||||
|
const tail = length != null ? ` · ${length} ${t('chars')}${truncated ? `(${t('Truncated')})` : ''}` : '';
|
||||||
|
return `${t('Read')}: ${path || '-'}${tail}`;
|
||||||
|
}
|
||||||
|
if (toolName === 'vfs_write_text') {
|
||||||
|
const path = typeof data.path === 'string' ? data.path : '';
|
||||||
|
const bytes = typeof data.bytes === 'number' ? data.bytes : undefined;
|
||||||
|
return `${t('Write')}: ${path || '-'}${bytes != null ? ` · ${bytes} bytes` : ''}`;
|
||||||
|
}
|
||||||
|
if (toolName === 'vfs_mkdir') {
|
||||||
|
const path = typeof data.path === 'string' ? data.path : '';
|
||||||
|
return `${t('Created')}: ${path || '-'}`;
|
||||||
|
}
|
||||||
|
if (toolName === 'vfs_delete') {
|
||||||
|
const path = typeof data.path === 'string' ? data.path : '';
|
||||||
|
return `${t('Deleted')}: ${path || '-'}`;
|
||||||
|
}
|
||||||
|
if (toolName === 'vfs_move') {
|
||||||
|
const src = typeof data.src === 'string' ? data.src : '';
|
||||||
|
const dst = typeof data.dst === 'string' ? data.dst : '';
|
||||||
|
return `${t('Moved')}: ${src || '-'} → ${dst || '-'}`;
|
||||||
|
}
|
||||||
|
if (toolName === 'vfs_copy') {
|
||||||
|
const src = typeof data.src === 'string' ? data.src : '';
|
||||||
|
const dst = typeof data.dst === 'string' ? data.dst : '';
|
||||||
|
return `${t('Copied')}: ${src || '-'} → ${dst || '-'}`;
|
||||||
|
}
|
||||||
|
if (toolName === 'vfs_rename') {
|
||||||
|
const src = typeof data.src === 'string' ? data.src : '';
|
||||||
|
const dst = typeof data.dst === 'string' ? data.dst : '';
|
||||||
|
return `${t('Renamed')}: ${src || '-'} → ${dst || '-'}`;
|
||||||
|
}
|
||||||
|
return '';
|
||||||
|
}, [t]);
|
||||||
|
|
||||||
|
const renderToolDetails = useCallback((toolKey: string, toolName: string, rawContent: string) => {
|
||||||
|
const data = tryParseJson<Record<string, any>>(rawContent);
|
||||||
|
const showRaw = !!expandedRaw[toolKey];
|
||||||
|
const toggleRaw = () => setExpandedRaw((prev) => ({ ...prev, [toolKey]: !prev[toolKey] }));
|
||||||
|
|
||||||
|
const rawJson = (() => {
|
||||||
|
if (!rawContent?.trim()) return '';
|
||||||
|
const parsed = tryParseJson<any>(rawContent);
|
||||||
|
if (!parsed) return rawContent;
|
||||||
|
try {
|
||||||
|
return JSON.stringify(parsed, null, 2);
|
||||||
|
} catch {
|
||||||
|
return rawContent;
|
||||||
|
}
|
||||||
|
})();
|
||||||
|
|
||||||
|
const header = (
|
||||||
|
<Space size={10} wrap>
|
||||||
|
<Button
|
||||||
|
type="text"
|
||||||
|
size="small"
|
||||||
|
icon={<CodeOutlined />}
|
||||||
|
onClick={(e) => { e.stopPropagation(); toggleRaw(); }}
|
||||||
|
>
|
||||||
|
{t('Raw JSON')}
|
||||||
|
</Button>
|
||||||
|
{showRaw && (
|
||||||
|
<Button
|
||||||
|
type="text"
|
||||||
|
size="small"
|
||||||
|
icon={<CopyOutlined />}
|
||||||
|
onClick={(e) => { e.stopPropagation(); void copyToClipboard(rawJson); }}
|
||||||
|
>
|
||||||
|
{t('Copy')}
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
</Space>
|
||||||
|
);
|
||||||
|
|
||||||
|
if (toolName === 'processors_list') {
|
||||||
|
const processors = Array.isArray(data?.processors) ? data!.processors : [];
|
||||||
|
return (
|
||||||
|
<div className="fx-agent-tool-details">
|
||||||
|
{header}
|
||||||
|
<Divider style={{ margin: '10px 0' }} />
|
||||||
|
<List
|
||||||
|
size="small"
|
||||||
|
dataSource={processors}
|
||||||
|
locale={{ emptyText: t('No results') }}
|
||||||
|
renderItem={(item: any) => (
|
||||||
|
<List.Item>
|
||||||
|
<Space size={10} wrap>
|
||||||
|
<Text code style={{ fontVariantNumeric: 'tabular-nums' }}>{String(item?.type || '')}</Text>
|
||||||
|
<Text>{String(item?.name || '')}</Text>
|
||||||
|
</Space>
|
||||||
|
</List.Item>
|
||||||
|
)}
|
||||||
|
style={{ background: 'transparent' }}
|
||||||
|
/>
|
||||||
|
{showRaw && (
|
||||||
|
<>
|
||||||
|
<Divider style={{ margin: '10px 0' }} />
|
||||||
|
<pre className="fx-agent-pre">{rawJson}</pre>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (toolName === 'vfs_list_dir') {
|
||||||
|
const path = typeof data?.path === 'string' ? data!.path : '/';
|
||||||
|
const entries = Array.isArray(data?.entries) ? data!.entries : [];
|
||||||
|
const pagination = data?.pagination && typeof data.pagination === 'object' ? data.pagination : null;
|
||||||
|
return (
|
||||||
|
<div className="fx-agent-tool-details">
|
||||||
|
{header}
|
||||||
|
<Divider style={{ margin: '10px 0' }} />
|
||||||
|
<Space direction="vertical" size={6} style={{ width: '100%' }}>
|
||||||
|
<Text type="secondary" style={{ fontSize: 12 }}>{t('Directory')}: {path}</Text>
|
||||||
|
{pagination?.total != null ? (
|
||||||
|
<Text type="secondary" style={{ fontSize: 12 }}>
|
||||||
|
{t('Total')}: {String(pagination.total)}
|
||||||
|
</Text>
|
||||||
|
) : null}
|
||||||
|
</Space>
|
||||||
|
<Divider style={{ margin: '10px 0' }} />
|
||||||
|
<List
|
||||||
|
size="small"
|
||||||
|
dataSource={entries}
|
||||||
|
locale={{ emptyText: t('No results') }}
|
||||||
|
renderItem={(item: any) => {
|
||||||
|
const name = String(item?.name || '');
|
||||||
|
const type = String(item?.type || (item?.is_dir ? 'dir' : 'file'));
|
||||||
|
return (
|
||||||
|
<List.Item>
|
||||||
|
<Space size={10} wrap style={{ width: '100%', justifyContent: 'space-between' }}>
|
||||||
|
<Space size={10} wrap>
|
||||||
|
<Text code style={{ fontVariantNumeric: 'tabular-nums' }}>{type}</Text>
|
||||||
|
<Text>{name}</Text>
|
||||||
|
</Space>
|
||||||
|
{!item?.is_dir && typeof item?.size === 'number' ? (
|
||||||
|
<Text type="secondary" style={{ fontSize: 12 }}>{item.size} bytes</Text>
|
||||||
|
) : null}
|
||||||
|
</Space>
|
||||||
|
</List.Item>
|
||||||
|
);
|
||||||
|
}}
|
||||||
|
style={{ background: 'transparent' }}
|
||||||
|
/>
|
||||||
|
{showRaw && (
|
||||||
|
<>
|
||||||
|
<Divider style={{ margin: '10px 0' }} />
|
||||||
|
<pre className="fx-agent-pre">{rawJson}</pre>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (toolName === 'vfs_search') {
|
||||||
|
const query = typeof data?.query === 'string' ? data!.query : '';
|
||||||
|
const mode = typeof data?.mode === 'string' ? data!.mode : '';
|
||||||
|
const items = Array.isArray(data?.items) ? data!.items : [];
|
||||||
|
const pagination = data?.pagination && typeof data.pagination === 'object' ? data.pagination : null;
|
||||||
|
return (
|
||||||
|
<div className="fx-agent-tool-details">
|
||||||
|
{header}
|
||||||
|
<Divider style={{ margin: '10px 0' }} />
|
||||||
|
<Space direction="vertical" size={6} style={{ width: '100%' }}>
|
||||||
|
<Text type="secondary" style={{ fontSize: 12 }}>{t('Search')}: {query || '-'}</Text>
|
||||||
|
<Text type="secondary" style={{ fontSize: 12 }}>{t('Mode')}: {mode || '-'}</Text>
|
||||||
|
{pagination?.has_more != null ? (
|
||||||
|
<Text type="secondary" style={{ fontSize: 12 }}>
|
||||||
|
{t('Page')}: {String(pagination.page)} · {t('Has more')}: {String(Boolean(pagination.has_more))}
|
||||||
|
</Text>
|
||||||
|
) : null}
|
||||||
|
</Space>
|
||||||
|
<Divider style={{ margin: '10px 0' }} />
|
||||||
|
<List
|
||||||
|
size="small"
|
||||||
|
dataSource={items}
|
||||||
|
locale={{ emptyText: t('No results') }}
|
||||||
|
renderItem={(item: any) => {
|
||||||
|
const type = String(item?.source_type || item?.mime || '');
|
||||||
|
const path = String(item?.path || '');
|
||||||
|
const score = item?.score != null ? Number(item.score) : null;
|
||||||
|
return (
|
||||||
|
<List.Item>
|
||||||
|
<Space size={10} wrap style={{ width: '100%', justifyContent: 'space-between' }}>
|
||||||
|
<Space size={10} wrap>
|
||||||
|
{type ? <Text code style={{ fontVariantNumeric: 'tabular-nums' }}>{type}</Text> : null}
|
||||||
|
<Text>{path}</Text>
|
||||||
|
</Space>
|
||||||
|
{score != null && !Number.isNaN(score) ? (
|
||||||
|
<Text type="secondary" style={{ fontSize: 12 }}>{score.toFixed(3)}</Text>
|
||||||
|
) : null}
|
||||||
|
</Space>
|
||||||
|
</List.Item>
|
||||||
|
);
|
||||||
|
}}
|
||||||
|
style={{ background: 'transparent' }}
|
||||||
|
/>
|
||||||
|
{showRaw && (
|
||||||
|
<>
|
||||||
|
<Divider style={{ margin: '10px 0' }} />
|
||||||
|
<pre className="fx-agent-pre">{rawJson}</pre>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (toolName === 'vfs_read_text') {
|
||||||
|
const path = typeof data?.path === 'string' ? data!.path : '';
|
||||||
|
const content = typeof data?.content === 'string' ? data!.content : '';
|
||||||
|
return (
|
||||||
|
<div className="fx-agent-tool-details">
|
||||||
|
{header}
|
||||||
|
<Divider style={{ margin: '10px 0' }} />
|
||||||
|
<Text type="secondary" style={{ fontSize: 12 }}>{t('File')}: {path || '-'}</Text>
|
||||||
|
<pre className="fx-agent-pre" style={{ marginTop: 10 }}>{content || ''}</pre>
|
||||||
|
{showRaw && (
|
||||||
|
<>
|
||||||
|
<Divider style={{ margin: '10px 0' }} />
|
||||||
|
<pre className="fx-agent-pre">{rawJson}</pre>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="fx-agent-tool-details">
|
||||||
|
{header}
|
||||||
|
<Divider style={{ margin: '10px 0' }} />
|
||||||
|
{showRaw ? (
|
||||||
|
<pre className="fx-agent-pre">{rawJson}</pre>
|
||||||
|
) : (
|
||||||
|
<Paragraph style={{ marginBottom: 0, whiteSpace: 'pre-wrap' }}>
|
||||||
|
{extractTextContent(data ?? rawContent) || <Text type="secondary">{t('No content')}</Text>}
|
||||||
|
</Paragraph>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}, [copyToClipboard, expandedRaw, t]);
|
||||||
|
|
||||||
|
const renderToolArgsSummary = useCallback((toolName: string, args?: Record<string, any> | null) => {
|
||||||
|
const a = args || {};
|
||||||
|
if (toolName === 'processors_run') {
|
||||||
|
const path = typeof a.path === 'string' ? a.path : '';
|
||||||
|
return path ? `${t('Path')}: ${path}` : '';
|
||||||
|
}
|
||||||
|
if (toolName === 'vfs_read_text' || toolName === 'vfs_list_dir' || toolName === 'vfs_stat' || toolName === 'vfs_delete' || toolName === 'vfs_mkdir') {
|
||||||
|
const path = typeof a.path === 'string' ? a.path : '';
|
||||||
|
return path ? `${t('Path')}: ${path}` : '';
|
||||||
|
}
|
||||||
|
if (toolName === 'vfs_search') {
|
||||||
|
const query = typeof a.query === 'string' ? a.query : '';
|
||||||
|
return query ? `${t('Search')}: ${query}` : '';
|
||||||
|
}
|
||||||
|
if (toolName === 'vfs_write_text') {
|
||||||
|
const path = typeof a.path === 'string' ? a.path : '';
|
||||||
|
return path ? `${t('Path')}: ${path}` : '';
|
||||||
|
}
|
||||||
|
if (toolName === 'vfs_move' || toolName === 'vfs_copy' || toolName === 'vfs_rename') {
|
||||||
|
const src = typeof a.src === 'string' ? a.src : '';
|
||||||
|
const dst = typeof a.dst === 'string' ? a.dst : '';
|
||||||
|
if (src && dst) return `${src} → ${dst}`;
|
||||||
|
if (src) return src;
|
||||||
|
if (dst) return dst;
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
return '';
|
||||||
|
}, [t]);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<Drawer
|
||||||
|
title={t('AI Agent')}
|
||||||
|
open={open}
|
||||||
|
onClose={() => { streamControllerRef.current?.abort(); onOpenChange(false); }}
|
||||||
|
width={520}
|
||||||
|
mask={false}
|
||||||
|
destroyOnHidden
|
||||||
|
styles={{
|
||||||
|
body: {
|
||||||
|
padding: 8,
|
||||||
|
background: token.colorBgContainer,
|
||||||
|
},
|
||||||
|
}}
|
||||||
|
extra={
|
||||||
|
<Space align="center">
|
||||||
|
<Text type="secondary">{t('Auto execute')}</Text>
|
||||||
|
<Switch size="small" checked={autoExecute} onChange={setAutoExecute} />
|
||||||
|
<Button
|
||||||
|
type="text"
|
||||||
|
size="small"
|
||||||
|
icon={<DeleteOutlined />}
|
||||||
|
onClick={clearChat}
|
||||||
|
disabled={loading || messageItems.length === 0}
|
||||||
|
>
|
||||||
|
{t('Clear')}
|
||||||
|
</Button>
|
||||||
|
</Space>
|
||||||
|
}
|
||||||
|
>
|
||||||
|
<Flex vertical gap={0} style={{ height: '100%' }} className="fx-agent-container">
|
||||||
|
<div
|
||||||
|
ref={scrollRef}
|
||||||
|
className="fx-agent-chat-scroll"
|
||||||
|
>
|
||||||
|
{messageItems.length === 0 ? (
|
||||||
|
<div className="fx-agent-empty">
|
||||||
|
<Avatar size={36} icon={<RobotOutlined />} style={{ background: token.colorPrimary }} />
|
||||||
|
<div style={{ marginTop: 8 }}>
|
||||||
|
<Text type="secondary">{t('Start a conversation')}</Text>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<div className="fx-agent-messages">
|
||||||
|
{messageItems.map((m, idx) => {
|
||||||
|
const role = String((m as any).role);
|
||||||
|
const isUser = role === 'user';
|
||||||
|
const isTool = role === 'tool';
|
||||||
|
const toolCallId = typeof (m as any).tool_call_id === 'string' ? String((m as any).tool_call_id) : '';
|
||||||
|
const toolInfo = toolCallId ? toolCallsById.get(toolCallId) : null;
|
||||||
|
const toolName = toolInfo?.name || (toolCallId ? toolNameByIdRef.current[toolCallId] : '') || '';
|
||||||
|
const msgKey = toolCallId ? `tool:${toolCallId}` : `${role}:${idx}`;
|
||||||
|
|
||||||
|
if (isTool) {
|
||||||
|
const rawContent = extractTextContent((m as any).content);
|
||||||
|
const expanded = !!expandedTools[msgKey];
|
||||||
|
const summary = toolName ? renderToolResultSummary(toolName, rawContent, toolInfo?.args || null) : '';
|
||||||
|
return (
|
||||||
|
<div key={msgKey} className="fx-agent-msg fx-agent-msg-tool">
|
||||||
|
<div className="fx-agent-tool-block">
|
||||||
|
<div className="fx-agent-tool-bar">
|
||||||
|
<Space size={6} wrap className="fx-agent-tool-pills">
|
||||||
|
<Tag className="fx-agent-pill" bordered={false} icon={<ToolOutlined />}>
|
||||||
|
{t('MCP Tool')}
|
||||||
|
</Tag>
|
||||||
|
<Tag className="fx-agent-pill fx-agent-pill-strong" bordered={false} icon={<CodeOutlined />}>
|
||||||
|
{toolName || t('Tool')}
|
||||||
|
</Tag>
|
||||||
|
</Space>
|
||||||
|
<Button
|
||||||
|
type="text"
|
||||||
|
size="small"
|
||||||
|
icon={expanded ? <UpOutlined /> : <DownOutlined />}
|
||||||
|
onClick={() => setExpandedTools((prev) => ({ ...prev, [msgKey]: !prev[msgKey] }))}
|
||||||
|
>
|
||||||
|
{expanded ? t('Collapse') : t('Expand')}
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
{summary ? (
|
||||||
|
<div className="fx-agent-tool-summary-line">
|
||||||
|
<Text type="secondary">{summary}</Text>
|
||||||
|
</div>
|
||||||
|
) : null}
|
||||||
|
{expanded && (
|
||||||
|
<div className="fx-agent-tool-expanded">
|
||||||
|
{toolInfo?.args && Object.keys(toolInfo.args).length > 0 && (
|
||||||
|
<div style={{ marginBottom: 10 }}>
|
||||||
|
<Text type="secondary" style={{ fontSize: 12 }}>{t('Arguments')}</Text>
|
||||||
|
<pre className="fx-agent-pre fx-agent-pre-compact">
|
||||||
|
{JSON.stringify(toolInfo.args, null, 2)}
|
||||||
|
</pre>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
{renderToolDetails(msgKey, toolName || t('Tool'), rawContent)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const text = extractTextContent((m as any).content);
|
||||||
|
if (isUser) {
|
||||||
|
return (
|
||||||
|
<div key={msgKey} className="fx-agent-msg fx-agent-msg-user">
|
||||||
|
<div className="fx-agent-user-block fx-agent-content">
|
||||||
|
{text.trim() ? <div className="fx-agent-text">{text}</div> : <Text type="secondary">{t('No content')}</Text>}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div key={msgKey} className="fx-agent-msg fx-agent-msg-assistant">
|
||||||
|
<div className="fx-agent-assistant-block fx-agent-content">
|
||||||
|
{text.trim() ? (
|
||||||
|
<div className="fx-agent-md">
|
||||||
|
<ReactMarkdown>{text}</ReactMarkdown>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<Text type="secondary">{t('No content')}</Text>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
{runningToolCount > 0 && (
|
||||||
|
<div className="fx-agent-running">
|
||||||
|
<LoadingOutlined spin />
|
||||||
|
<Text type="secondary">{t('Calling tools')}</Text>
|
||||||
|
<Space size={6} wrap>
|
||||||
|
{runningToolEntries.slice(0, 2).map(([id, name]) => (
|
||||||
|
<Tag key={id} bordered={false} color="blue">
|
||||||
|
{(name || t('Tool'))} #{shortId(id, 4)}
|
||||||
|
</Tag>
|
||||||
|
))}
|
||||||
|
{runningToolCount > 2 && (
|
||||||
|
<Text type="secondary">+{runningToolCount - 2}</Text>
|
||||||
|
)}
|
||||||
|
</Space>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
{pending.length > 0 && (
|
||||||
|
<div className="fx-agent-pending-group">
|
||||||
|
<div className="fx-agent-pending-head">
|
||||||
|
<Space size={8} wrap>
|
||||||
|
<Tag className="fx-agent-pill fx-agent-pill-warn" bordered={false}>
|
||||||
|
{t('Pending actions')}
|
||||||
|
</Tag>
|
||||||
|
<Text type="secondary">{pending.length}</Text>
|
||||||
|
</Space>
|
||||||
|
<Space size={6}>
|
||||||
|
<Button size="small" type="primary" onClick={approveAll} loading={loading}>
|
||||||
|
{t('Execute all')}
|
||||||
|
</Button>
|
||||||
|
<Button size="small" onClick={rejectAll} disabled={loading}>
|
||||||
|
{t('Cancel all')}
|
||||||
|
</Button>
|
||||||
|
</Space>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="fx-agent-pending-list">
|
||||||
|
{pending.map((p) => {
|
||||||
|
const args = p.arguments || {};
|
||||||
|
const key = `pending:${p.id}`;
|
||||||
|
const expanded = !!expandedTools[key];
|
||||||
|
const running = Object.prototype.hasOwnProperty.call(runningTools, p.id);
|
||||||
|
const summary = renderToolArgsSummary(p.name, args);
|
||||||
|
return (
|
||||||
|
<div key={p.id} className="fx-agent-tool-block fx-agent-pending-item">
|
||||||
|
<div className="fx-agent-tool-bar">
|
||||||
|
<Space size={6} wrap className="fx-agent-tool-pills">
|
||||||
|
<Tag className="fx-agent-pill" bordered={false} icon={<ToolOutlined />}>
|
||||||
|
{t('MCP Tool')}
|
||||||
|
</Tag>
|
||||||
|
<Tag className="fx-agent-pill fx-agent-pill-strong" bordered={false} icon={<CodeOutlined />}>
|
||||||
|
{p.name}
|
||||||
|
</Tag>
|
||||||
|
{running ? <LoadingOutlined spin style={{ color: token.colorPrimary }} /> : null}
|
||||||
|
</Space>
|
||||||
|
<Space size={6}>
|
||||||
|
<Button
|
||||||
|
size="small"
|
||||||
|
type="primary"
|
||||||
|
onClick={() => void approveOne(p.id)}
|
||||||
|
loading={loading && running}
|
||||||
|
disabled={loading && !running}
|
||||||
|
>
|
||||||
|
{t('Execute')}
|
||||||
|
</Button>
|
||||||
|
<Button
|
||||||
|
size="small"
|
||||||
|
onClick={() => void rejectOne(p.id)}
|
||||||
|
disabled={loading && !running}
|
||||||
|
>
|
||||||
|
{t('Cancel')}
|
||||||
|
</Button>
|
||||||
|
<Button
|
||||||
|
type="text"
|
||||||
|
size="small"
|
||||||
|
icon={expanded ? <UpOutlined /> : <DownOutlined />}
|
||||||
|
onClick={() => setExpandedTools((prev) => ({ ...prev, [key]: !prev[key] }))}
|
||||||
|
/>
|
||||||
|
</Space>
|
||||||
|
</div>
|
||||||
|
{summary ? (
|
||||||
|
<div className="fx-agent-tool-summary-line">
|
||||||
|
<Text type="secondary">{summary}</Text>
|
||||||
|
</div>
|
||||||
|
) : null}
|
||||||
|
{expanded && (
|
||||||
|
<div className="fx-agent-tool-expanded">
|
||||||
|
<Text type="secondary" style={{ fontSize: 12 }}>{t('Arguments')}</Text>
|
||||||
|
<pre className="fx-agent-pre">
|
||||||
|
{JSON.stringify(args, null, 2)}
|
||||||
|
</pre>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="fx-agent-composer">
|
||||||
|
<Flex vertical gap={8}>
|
||||||
|
<Space wrap>
|
||||||
|
<Button size="small" icon={<FolderOpenOutlined />} onClick={() => setPathModalOpen(true)} disabled={loading}>
|
||||||
|
{t('Select Path')}
|
||||||
|
</Button>
|
||||||
|
{effectivePath && (
|
||||||
|
<Tag bordered={false} color="blue">{t('Current')}: {effectivePath}</Tag>
|
||||||
|
)}
|
||||||
|
</Space>
|
||||||
|
|
||||||
|
<Input.TextArea
|
||||||
|
value={input}
|
||||||
|
onChange={(e) => setInput(e.target.value)}
|
||||||
|
placeholder={t('Type a message')}
|
||||||
|
autoSize={{ minRows: 2, maxRows: 6 }}
|
||||||
|
disabled={loading || pending.length > 0}
|
||||||
|
variant="borderless"
|
||||||
|
onPressEnter={(e) => {
|
||||||
|
if (e.shiftKey) return;
|
||||||
|
e.preventDefault();
|
||||||
|
void handleSend();
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
<div style={{ display: 'flex', justifyContent: 'flex-end' }}>
|
||||||
|
<Button
|
||||||
|
type="primary"
|
||||||
|
size="small"
|
||||||
|
icon={<SendOutlined />}
|
||||||
|
onClick={handleSend}
|
||||||
|
loading={loading}
|
||||||
|
disabled={loading || pending.length > 0 || !input.trim()}
|
||||||
|
>
|
||||||
|
{t('Send')}
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
</Flex>
|
||||||
|
</div>
|
||||||
|
</Flex>
|
||||||
|
</Drawer>
|
||||||
|
|
||||||
|
<PathSelectorModal
|
||||||
|
open={pathModalOpen}
|
||||||
|
mode="any"
|
||||||
|
initialPath={effectivePath || '/'}
|
||||||
|
onOk={handlePathSelected}
|
||||||
|
onCancel={() => setPathModalOpen(false)}
|
||||||
|
/>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
export default AiAgentWidget;
|
||||||
@@ -690,5 +690,40 @@
|
|||||||
"App \"{key}\" not found.": "App \"{key}\" not found.",
|
"App \"{key}\" not found.": "App \"{key}\" not found.",
|
||||||
"Open with {app}": "Open with {app}",
|
"Open with {app}": "Open with {app}",
|
||||||
"Set as default for .{ext}": "Set as default for .{ext}",
|
"Set as default for .{ext}": "Set as default for .{ext}",
|
||||||
"Advanced tokens must be valid JSON": "Advanced tokens must be valid JSON"
|
"AI Agent": "AI Agent",
|
||||||
|
"Auto execute": "Auto execute",
|
||||||
|
"Start a conversation": "Start a conversation",
|
||||||
|
"No content": "No content",
|
||||||
|
"Pending actions": "Pending actions",
|
||||||
|
"Execute": "Execute",
|
||||||
|
"Execute all": "Execute all",
|
||||||
|
"Cancel all": "Cancel all",
|
||||||
|
"Type a message": "Type a message",
|
||||||
|
"Send": "Send",
|
||||||
|
"Please confirm pending actions first": "Please confirm pending actions first",
|
||||||
|
"You": "You",
|
||||||
|
"Tool": "Tool",
|
||||||
|
"MCP Tool": "MCP Tool",
|
||||||
|
"Arguments": "Arguments",
|
||||||
|
"Raw JSON": "Raw JSON",
|
||||||
|
"Collapse": "Collapse",
|
||||||
|
"Copied": "Copied",
|
||||||
|
"Canceled": "Canceled",
|
||||||
|
"Tasks submitted": "Tasks submitted",
|
||||||
|
"Calling tools": "Calling tools",
|
||||||
|
"Advanced tokens must be valid JSON": "Advanced tokens must be valid JSON",
|
||||||
|
"Search": "Search",
|
||||||
|
"Total": "Total",
|
||||||
|
"Mode": "Mode",
|
||||||
|
"Has more": "Has more",
|
||||||
|
"Page": "Page",
|
||||||
|
"results": "results",
|
||||||
|
"chars": "chars",
|
||||||
|
"Truncated": "Truncated",
|
||||||
|
"Write": "Write",
|
||||||
|
"Read": "Read",
|
||||||
|
"Created": "Created",
|
||||||
|
"Moved": "Moved",
|
||||||
|
"Renamed": "Renamed",
|
||||||
|
"Info": "Info"
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -683,5 +683,40 @@
|
|||||||
"App \"{key}\" not found.": "应用 \"{key}\" 不存在。",
|
"App \"{key}\" not found.": "应用 \"{key}\" 不存在。",
|
||||||
"Open with {app}": "使用 {app} 打开",
|
"Open with {app}": "使用 {app} 打开",
|
||||||
"Set as default for .{ext}": "设为该类型(.{ext})默认应用",
|
"Set as default for .{ext}": "设为该类型(.{ext})默认应用",
|
||||||
"Advanced tokens must be valid JSON": "高级 Token 需为合法 JSON"
|
"AI Agent": "AI 助手",
|
||||||
|
"Auto execute": "自动执行",
|
||||||
|
"Start a conversation": "开始对话",
|
||||||
|
"No content": "无内容",
|
||||||
|
"Pending actions": "待确认操作",
|
||||||
|
"Execute": "执行",
|
||||||
|
"Execute all": "全部执行",
|
||||||
|
"Cancel all": "全部取消",
|
||||||
|
"Type a message": "输入消息",
|
||||||
|
"Send": "发送",
|
||||||
|
"Please confirm pending actions first": "请先确认待执行操作",
|
||||||
|
"You": "你",
|
||||||
|
"Tool": "工具",
|
||||||
|
"MCP Tool": "MCP 工具",
|
||||||
|
"Arguments": "参数",
|
||||||
|
"Raw JSON": "原始 JSON",
|
||||||
|
"Collapse": "收起",
|
||||||
|
"Copied": "已复制",
|
||||||
|
"Canceled": "已取消",
|
||||||
|
"Tasks submitted": "已提交任务",
|
||||||
|
"Calling tools": "正在调用工具",
|
||||||
|
"Advanced tokens must be valid JSON": "高级 Token 需为合法 JSON",
|
||||||
|
"Search": "搜索",
|
||||||
|
"Total": "总计",
|
||||||
|
"Mode": "模式",
|
||||||
|
"Has more": "更多",
|
||||||
|
"Page": "页",
|
||||||
|
"results": "条结果",
|
||||||
|
"chars": "字符",
|
||||||
|
"Truncated": "已截断",
|
||||||
|
"Write": "写入",
|
||||||
|
"Read": "读取",
|
||||||
|
"Created": "已创建",
|
||||||
|
"Moved": "已移动",
|
||||||
|
"Renamed": "已重命名",
|
||||||
|
"Info": "信息"
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import { Layout, Button, Dropdown, theme, Flex, Avatar, Typography } from 'antd';
|
import { Layout, Button, Dropdown, theme, Flex, Avatar, Typography, Tooltip } from 'antd';
|
||||||
import { SearchOutlined, MenuUnfoldOutlined, LogoutOutlined, UserOutlined } from '@ant-design/icons';
|
import { SearchOutlined, MenuUnfoldOutlined, LogoutOutlined, UserOutlined, RobotOutlined } from '@ant-design/icons';
|
||||||
import { memo, useState } from 'react';
|
import { memo, useState } from 'react';
|
||||||
import SearchDialog from './SearchDialog.tsx';
|
import SearchDialog from './SearchDialog.tsx';
|
||||||
import { authApi } from '../api/auth.ts';
|
import { authApi } from '../api/auth.ts';
|
||||||
@@ -14,9 +14,10 @@ const { Header } = Layout;
|
|||||||
export interface TopHeaderProps {
|
export interface TopHeaderProps {
|
||||||
collapsed: boolean;
|
collapsed: boolean;
|
||||||
onToggle(): void;
|
onToggle(): void;
|
||||||
|
onOpenAiAgent(): void;
|
||||||
}
|
}
|
||||||
|
|
||||||
const TopHeader = memo(function TopHeader({ collapsed, onToggle }: TopHeaderProps) {
|
const TopHeader = memo(function TopHeader({ collapsed, onToggle, onOpenAiAgent }: TopHeaderProps) {
|
||||||
const { token } = theme.useToken();
|
const { token } = theme.useToken();
|
||||||
const [searchOpen, setSearchOpen] = useState(false);
|
const [searchOpen, setSearchOpen] = useState(false);
|
||||||
const navigate = useNavigate();
|
const navigate = useNavigate();
|
||||||
@@ -50,6 +51,15 @@ const TopHeader = memo(function TopHeader({ collapsed, onToggle }: TopHeaderProp
|
|||||||
</Button>
|
</Button>
|
||||||
<SearchDialog open={searchOpen} onClose={() => setSearchOpen(false)} />
|
<SearchDialog open={searchOpen} onClose={() => setSearchOpen(false)} />
|
||||||
<Flex style={{ marginLeft: 'auto' }} align="center" gap={12}>
|
<Flex style={{ marginLeft: 'auto' }} align="center" gap={12}>
|
||||||
|
<Tooltip title={t('AI Agent')}>
|
||||||
|
<Button
|
||||||
|
type="text"
|
||||||
|
icon={<RobotOutlined />}
|
||||||
|
aria-label={t('AI Agent')}
|
||||||
|
onClick={onOpenAiAgent}
|
||||||
|
style={{ paddingInline: 8, height: 40 }}
|
||||||
|
/>
|
||||||
|
</Tooltip>
|
||||||
<LanguageSwitcher />
|
<LanguageSwitcher />
|
||||||
<Dropdown
|
<Dropdown
|
||||||
menu={{
|
menu={{
|
||||||
|
|||||||
@@ -295,7 +295,7 @@ export default function AiSettingsTab() {
|
|||||||
identifier: existing.identifier,
|
identifier: existing.identifier,
|
||||||
api_format: existing.api_format,
|
api_format: existing.api_format,
|
||||||
base_url: existing.base_url ?? undefined,
|
base_url: existing.base_url ?? undefined,
|
||||||
api_key: existing.api_key ?? undefined,
|
api_key: '',
|
||||||
logo_url: existing.logo_url ?? undefined,
|
logo_url: existing.logo_url ?? undefined,
|
||||||
provider_type: existing.provider_type ?? undefined,
|
provider_type: existing.provider_type ?? undefined,
|
||||||
});
|
});
|
||||||
@@ -345,10 +345,12 @@ export default function AiSettingsTab() {
|
|||||||
identifier: (values.identifier || '').trim(),
|
identifier: (values.identifier || '').trim(),
|
||||||
api_format: values.api_format,
|
api_format: values.api_format,
|
||||||
base_url: trimmedBaseUrl ? trimmedBaseUrl : null,
|
base_url: trimmedBaseUrl ? trimmedBaseUrl : null,
|
||||||
api_key: trimmedApiKey ? trimmedApiKey : null,
|
|
||||||
logo_url: trimmedLogoUrl ? trimmedLogoUrl : null,
|
logo_url: trimmedLogoUrl ? trimmedLogoUrl : null,
|
||||||
provider_type: trimmedProviderType ? trimmedProviderType : null,
|
provider_type: trimmedProviderType ? trimmedProviderType : null,
|
||||||
};
|
};
|
||||||
|
if (trimmedApiKey) {
|
||||||
|
payload.api_key = trimmedApiKey;
|
||||||
|
}
|
||||||
try {
|
try {
|
||||||
if (providerModal.editing) {
|
if (providerModal.editing) {
|
||||||
await updateProvider(providerModal.editing.id, payload);
|
await updateProvider(providerModal.editing.id, payload);
|
||||||
@@ -1080,8 +1082,28 @@ export default function AiSettingsTab() {
|
|||||||
<Form.Item name="base_url" label={t('Base URL')} rules={[{ required: true, message: t('Enter base url') }]}>
|
<Form.Item name="base_url" label={t('Base URL')} rules={[{ required: true, message: t('Enter base url') }]}>
|
||||||
<Input placeholder="https://" />
|
<Input placeholder="https://" />
|
||||||
</Form.Item>
|
</Form.Item>
|
||||||
<Form.Item name="api_key" label={t('API Key')}>
|
<Form.Item
|
||||||
<Input placeholder={t('Optional, can also be provided per request')} />
|
name="api_key"
|
||||||
|
label={(
|
||||||
|
<Space size={8}>
|
||||||
|
{t('API Key')}
|
||||||
|
{providerModal.editing ? (
|
||||||
|
<Tag color={providerModal.editing.has_api_key ? 'green' : 'default'}>
|
||||||
|
{providerModal.editing.has_api_key ? '已设置' : '未设置'}
|
||||||
|
</Tag>
|
||||||
|
) : null}
|
||||||
|
</Space>
|
||||||
|
)}
|
||||||
|
>
|
||||||
|
<Input.Password
|
||||||
|
placeholder={
|
||||||
|
providerModal.editing
|
||||||
|
? '留空不更新,填写将覆盖'
|
||||||
|
: t('Optional, can also be provided per request')
|
||||||
|
}
|
||||||
|
autoComplete="new-password"
|
||||||
|
visibilityToggle={false}
|
||||||
|
/>
|
||||||
</Form.Item>
|
</Form.Item>
|
||||||
<Form.Item name="logo_url" label={t('Logo URL')}>
|
<Form.Item name="logo_url" label={t('Logo URL')}>
|
||||||
<Input placeholder="https://" />
|
<Input placeholder="https://" />
|
||||||
|
|||||||
@@ -10,6 +10,15 @@ import { vfsApi, type VfsEntry } from './api/vfs';
|
|||||||
|
|
||||||
type FrameMode = 'file' | 'app';
|
type FrameMode = 'file' | 'app';
|
||||||
|
|
||||||
|
type FrameQuery = {
|
||||||
|
pluginKey: string;
|
||||||
|
mode: FrameMode;
|
||||||
|
filePath: string;
|
||||||
|
pluginVersion: string;
|
||||||
|
pluginStyles: string[] | null;
|
||||||
|
entry: VfsEntry | null;
|
||||||
|
};
|
||||||
|
|
||||||
function renderStatus(text: string, isError: boolean = false) {
|
function renderStatus(text: string, isError: boolean = false) {
|
||||||
const root = document.getElementById('root');
|
const root = document.getElementById('root');
|
||||||
if (!root) return;
|
if (!root) return;
|
||||||
@@ -31,12 +40,55 @@ function renderStatus(text: string, isError: boolean = false) {
|
|||||||
root.appendChild(el);
|
root.appendChild(el);
|
||||||
}
|
}
|
||||||
|
|
||||||
function getQuery() {
|
function scheduleStatus(text: string, delayMs: number) {
|
||||||
|
let canceled = false;
|
||||||
|
const t = window.setTimeout(() => {
|
||||||
|
if (canceled) return;
|
||||||
|
renderStatus(text);
|
||||||
|
}, delayMs);
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
canceled = true;
|
||||||
|
window.clearTimeout(t);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function tryParseJson<T = unknown>(raw: string): T | null {
|
||||||
|
try {
|
||||||
|
return JSON.parse(raw) as T;
|
||||||
|
} catch {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function getQuery(): FrameQuery {
|
||||||
const params = new URLSearchParams(window.location.search);
|
const params = new URLSearchParams(window.location.search);
|
||||||
const pluginKey = (params.get('pluginKey') || '').trim();
|
const pluginKey = (params.get('pluginKey') || '').trim();
|
||||||
const mode = (params.get('mode') || 'file') as FrameMode;
|
const mode = (params.get('mode') || 'file') as FrameMode;
|
||||||
const filePath = (params.get('filePath') || '').trim();
|
const filePath = (params.get('filePath') || '').trim();
|
||||||
return { pluginKey, mode, filePath };
|
const pluginVersion = (params.get('pluginVersion') || '').trim();
|
||||||
|
|
||||||
|
const rawStyles = (params.get('pluginStyles') || '').trim();
|
||||||
|
const parsedStyles = rawStyles ? tryParseJson<unknown>(rawStyles) : null;
|
||||||
|
const pluginStyles = Array.isArray(parsedStyles)
|
||||||
|
? parsedStyles.filter((s) => typeof s === 'string' && s.trim().length > 0)
|
||||||
|
: null;
|
||||||
|
|
||||||
|
const rawEntry = (params.get('entry') || '').trim();
|
||||||
|
const parsedEntry = rawEntry ? tryParseJson<any>(rawEntry) : null;
|
||||||
|
const entry: VfsEntry | null =
|
||||||
|
parsedEntry && typeof parsedEntry === 'object' && typeof parsedEntry.name === 'string'
|
||||||
|
? {
|
||||||
|
name: String(parsedEntry.name),
|
||||||
|
is_dir: Boolean(parsedEntry.is_dir),
|
||||||
|
size: Number(parsedEntry.size || 0),
|
||||||
|
mtime: Number(parsedEntry.mtime || 0),
|
||||||
|
type: typeof parsedEntry.type === 'string' ? parsedEntry.type : undefined,
|
||||||
|
has_thumbnail: Boolean(parsedEntry.has_thumbnail),
|
||||||
|
}
|
||||||
|
: null;
|
||||||
|
|
||||||
|
return { pluginKey, mode, filePath, pluginVersion, pluginStyles, entry };
|
||||||
}
|
}
|
||||||
|
|
||||||
function postToParent(data: any) {
|
function postToParent(data: any) {
|
||||||
@@ -45,6 +97,51 @@ function postToParent(data: any) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type TempLinkCache = {
|
||||||
|
url: string;
|
||||||
|
fetchedAt: number;
|
||||||
|
expiresIn: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
const TEMP_LINK_CACHE_PREFIX = 'foxel:tempLink:';
|
||||||
|
const TEMP_LINK_DEFAULT_EXPIRES_IN = 3600;
|
||||||
|
|
||||||
|
function getTempLinkCacheKey(filePath: string) {
|
||||||
|
return `${TEMP_LINK_CACHE_PREFIX}${filePath}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function readTempLinkCache(filePath: string): TempLinkCache | null {
|
||||||
|
try {
|
||||||
|
const raw = sessionStorage.getItem(getTempLinkCacheKey(filePath));
|
||||||
|
if (!raw) return null;
|
||||||
|
const parsed = JSON.parse(raw) as TempLinkCache;
|
||||||
|
if (!parsed || typeof parsed.url !== 'string') return null;
|
||||||
|
if (!parsed.fetchedAt || !parsed.expiresIn) return null;
|
||||||
|
if (Date.now() - parsed.fetchedAt >= parsed.expiresIn * 1000 - 10_000) return null;
|
||||||
|
return parsed;
|
||||||
|
} catch {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function writeTempLinkCache(filePath: string, item: TempLinkCache) {
|
||||||
|
try {
|
||||||
|
sessionStorage.setItem(getTempLinkCacheKey(filePath), JSON.stringify(item));
|
||||||
|
} catch {
|
||||||
|
void 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getTempLinkUrl(filePath: string, expiresIn: number = TEMP_LINK_DEFAULT_EXPIRES_IN) {
|
||||||
|
const cached = readTempLinkCache(filePath);
|
||||||
|
if (cached) return cached.url;
|
||||||
|
|
||||||
|
const tokenData = await vfsApi.getTempLinkToken(filePath, expiresIn);
|
||||||
|
const url = typeof tokenData?.url === 'string' && tokenData.url.trim() ? tokenData.url : vfsApi.getTempPublicUrl(tokenData.token);
|
||||||
|
writeTempLinkCache(filePath, { url, fetchedAt: Date.now(), expiresIn });
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
|
||||||
function createHostApi(pluginKey: string): FoxelHostApi {
|
function createHostApi(pluginKey: string): FoxelHostApi {
|
||||||
const showMessage: FoxelHostApi['showMessage'] = (type, content) => {
|
const showMessage: FoxelHostApi['showMessage'] = (type, content) => {
|
||||||
const antd = window.__FOXEL_EXTERNALS__?.antd;
|
const antd = window.__FOXEL_EXTERNALS__?.antd;
|
||||||
@@ -76,8 +173,7 @@ function createHostApi(pluginKey: string): FoxelHostApi {
|
|||||||
callApi: async <T = unknown>(path: string, options?: RequestInit & { json?: unknown }) =>
|
callApi: async <T = unknown>(path: string, options?: RequestInit & { json?: unknown }) =>
|
||||||
request<T>(path, options),
|
request<T>(path, options),
|
||||||
getTempLink: async (filePath: string) => {
|
getTempLink: async (filePath: string) => {
|
||||||
const token = await vfsApi.getTempLinkToken(filePath);
|
return await getTempLinkUrl(filePath);
|
||||||
return vfsApi.getTempPublicUrl(token.token);
|
|
||||||
},
|
},
|
||||||
getStreamUrl: (filePath: string) => vfsApi.streamUrl(filePath),
|
getStreamUrl: (filePath: string) => vfsApi.streamUrl(filePath),
|
||||||
};
|
};
|
||||||
@@ -89,28 +185,28 @@ function getPluginStylePaths(plugin: PluginItem): string[] {
|
|||||||
return styles.filter((s) => typeof s === 'string' && s.trim().length > 0);
|
return styles.filter((s) => typeof s === 'string' && s.trim().length > 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function loadPluginStyles(pluginKey: string, plugin: PluginItem) {
|
function withVersion(url: string, version?: string | null): string {
|
||||||
const stylePaths = getPluginStylePaths(plugin);
|
const v = typeof version === 'string' ? version.trim() : '';
|
||||||
if (stylePaths.length === 0) return;
|
if (!v) return url;
|
||||||
|
const u = new URL(url, window.location.origin);
|
||||||
const tasks = stylePaths.map(
|
u.searchParams.set('v', v);
|
||||||
(p) =>
|
return u.pathname + u.search;
|
||||||
new Promise<void>((resolve) => {
|
|
||||||
const href = `/api/plugins/${pluginKey}/assets/${p.replace(/^\/+/, '')}`;
|
|
||||||
const link = document.createElement('link');
|
|
||||||
link.rel = 'stylesheet';
|
|
||||||
link.href = href;
|
|
||||||
link.onload = () => resolve();
|
|
||||||
link.onerror = () => resolve();
|
|
||||||
document.head.appendChild(link);
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
await Promise.all(tasks);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function loadPluginBundle(pluginKey: string): Promise<RegisteredPlugin> {
|
function injectPluginStyles(pluginKey: string, stylePaths: string[], version?: string | null) {
|
||||||
const url = `/api/plugins/${pluginKey}/bundle.js`;
|
if (stylePaths.length === 0) return;
|
||||||
|
|
||||||
|
stylePaths.forEach((p) => {
|
||||||
|
const href = withVersion(`/api/plugins/${pluginKey}/assets/${p.replace(/^\/+/, '')}`, version);
|
||||||
|
const link = document.createElement('link');
|
||||||
|
link.rel = 'stylesheet';
|
||||||
|
link.href = href;
|
||||||
|
document.head.appendChild(link);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async function loadPluginBundle(pluginKey: string, version?: string | null): Promise<RegisteredPlugin> {
|
||||||
|
const url = withVersion(`/api/plugins/${pluginKey}/bundle.js`, version);
|
||||||
|
|
||||||
return new Promise<RegisteredPlugin>((resolve, reject) => {
|
return new Promise<RegisteredPlugin>((resolve, reject) => {
|
||||||
let done = false;
|
let done = false;
|
||||||
@@ -140,24 +236,43 @@ async function loadPluginBundle(pluginKey: string): Promise<RegisteredPlugin> {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async function buildFileContext(filePath: string) {
|
function isLikelyImage(pathOrName: string) {
|
||||||
const stat = (await vfsApi.stat(filePath)) as any;
|
return /\.(jpg|jpeg|png|gif|bmp|webp|svg)$/i.test(pathOrName);
|
||||||
const name =
|
}
|
||||||
typeof stat?.name === 'string' && stat.name.trim().length > 0
|
|
||||||
? stat.name
|
|
||||||
: filePath.replace(/\\/g, '/').split('/').filter(Boolean).pop() || 'unknown';
|
|
||||||
|
|
||||||
const entry: VfsEntry = {
|
function preloadImage(url: string) {
|
||||||
name,
|
const img = new Image();
|
||||||
is_dir: Boolean(stat?.is_dir),
|
img.decoding = 'async';
|
||||||
size: Number(stat?.size || 0),
|
img.src = url;
|
||||||
mtime: Number(stat?.mtime || 0),
|
}
|
||||||
type: typeof stat?.type === 'string' ? stat.type : undefined,
|
|
||||||
has_thumbnail: Boolean(stat?.has_thumbnail),
|
|
||||||
};
|
|
||||||
|
|
||||||
const token = await vfsApi.getTempLinkToken(filePath);
|
async function buildFileContext(filePath: string, entryOverride: VfsEntry | null) {
|
||||||
const downloadUrl = vfsApi.getTempPublicUrl(token.token);
|
const entryPromise = entryOverride
|
||||||
|
? Promise.resolve(entryOverride)
|
||||||
|
: (async () => {
|
||||||
|
const stat = (await vfsApi.stat(filePath)) as any;
|
||||||
|
const name =
|
||||||
|
typeof stat?.name === 'string' && stat.name.trim().length > 0
|
||||||
|
? stat.name
|
||||||
|
: filePath.replace(/\\/g, '/').split('/').filter(Boolean).pop() || 'unknown';
|
||||||
|
|
||||||
|
const entry: VfsEntry = {
|
||||||
|
name,
|
||||||
|
is_dir: Boolean(stat?.is_dir),
|
||||||
|
size: Number(stat?.size || 0),
|
||||||
|
mtime: Number(stat?.mtime || 0),
|
||||||
|
type: typeof stat?.type === 'string' ? stat.type : undefined,
|
||||||
|
has_thumbnail: Boolean(stat?.has_thumbnail),
|
||||||
|
};
|
||||||
|
return entry;
|
||||||
|
})();
|
||||||
|
|
||||||
|
const downloadUrlPromise = getTempLinkUrl(filePath);
|
||||||
|
if (isLikelyImage(filePath)) {
|
||||||
|
downloadUrlPromise.then(preloadImage).catch(() => void 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
const [entry, downloadUrl] = await Promise.all([entryPromise, downloadUrlPromise]);
|
||||||
const streamUrl = vfsApi.streamUrl(filePath);
|
const streamUrl = vfsApi.streamUrl(filePath);
|
||||||
|
|
||||||
return { entry, urls: { downloadUrl, streamUrl } };
|
return { entry, urls: { downloadUrl, streamUrl } };
|
||||||
@@ -166,7 +281,7 @@ async function buildFileContext(filePath: string) {
|
|||||||
async function main() {
|
async function main() {
|
||||||
initExternals();
|
initExternals();
|
||||||
|
|
||||||
const { pluginKey, mode, filePath } = getQuery();
|
const { pluginKey, mode, filePath, pluginVersion, pluginStyles, entry } = getQuery();
|
||||||
if (!pluginKey) {
|
if (!pluginKey) {
|
||||||
renderStatus('Missing pluginKey in query string', true);
|
renderStatus('Missing pluginKey in query string', true);
|
||||||
return;
|
return;
|
||||||
@@ -178,34 +293,37 @@ async function main() {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
renderStatus('Loading plugin...');
|
const cancelLoading = scheduleStatus('Loading plugin...', 200);
|
||||||
|
|
||||||
let plugin: PluginItem;
|
|
||||||
try {
|
|
||||||
plugin = await pluginsApi.get(pluginKey);
|
|
||||||
} catch (e) {
|
|
||||||
const msg = e instanceof Error ? e.message : String(e);
|
|
||||||
renderStatus(`Failed to load plugin info: ${msg}`, true);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
await loadPluginStyles(pluginKey, plugin);
|
|
||||||
} catch {
|
|
||||||
// ignore
|
|
||||||
}
|
|
||||||
|
|
||||||
let registered: RegisteredPlugin;
|
|
||||||
try {
|
|
||||||
registered = await loadPluginBundle(pluginKey);
|
|
||||||
} catch (e) {
|
|
||||||
const msg = e instanceof Error ? e.message : String(e);
|
|
||||||
renderStatus(`Failed to load plugin bundle: ${msg}`, true);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const host = createHostApi(pluginKey);
|
const host = createHostApi(pluginKey);
|
||||||
|
|
||||||
|
const pluginPromise = (async () => {
|
||||||
|
if (pluginVersion && pluginStyles) {
|
||||||
|
injectPluginStyles(pluginKey, pluginStyles, pluginVersion);
|
||||||
|
return await loadPluginBundle(pluginKey, pluginVersion);
|
||||||
|
}
|
||||||
|
|
||||||
|
const plugin: PluginItem = await pluginsApi.get(pluginKey);
|
||||||
|
const resolvedVersion = plugin.version || '';
|
||||||
|
injectPluginStyles(pluginKey, getPluginStylePaths(plugin), resolvedVersion);
|
||||||
|
return await loadPluginBundle(pluginKey, resolvedVersion);
|
||||||
|
})();
|
||||||
|
|
||||||
|
const ctxPromise = mode === 'file' ? buildFileContext(filePath, entry) : Promise.resolve(null);
|
||||||
|
|
||||||
|
let registered: RegisteredPlugin;
|
||||||
|
let ctx: Awaited<ReturnType<typeof buildFileContext>> | null;
|
||||||
|
try {
|
||||||
|
[registered, ctx] = await Promise.all([pluginPromise, ctxPromise]);
|
||||||
|
} catch (e) {
|
||||||
|
const msg = e instanceof Error ? e.message : String(e);
|
||||||
|
cancelLoading();
|
||||||
|
renderStatus(`Failed to load plugin: ${msg}`, true);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
cancelLoading();
|
||||||
|
|
||||||
let cleanup: (() => void) | null = null;
|
let cleanup: (() => void) | null = null;
|
||||||
const mountError = async () => {
|
const mountError = async () => {
|
||||||
try {
|
try {
|
||||||
@@ -224,8 +342,11 @@ async function main() {
|
|||||||
throw new Error('Missing filePath in query string');
|
throw new Error('Missing filePath in query string');
|
||||||
}
|
}
|
||||||
|
|
||||||
const { entry, urls } = await buildFileContext(filePath);
|
if (!ctx) {
|
||||||
const ret = await registered.mount(root, { filePath, entry, urls, host });
|
throw new Error('Missing file context');
|
||||||
|
}
|
||||||
|
|
||||||
|
const ret = await registered.mount(root, { filePath, entry: ctx.entry, urls: ctx.urls, host });
|
||||||
if (typeof ret === 'function') cleanup = ret;
|
if (typeof ret === 'function') cleanup = ret;
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
const msg = e instanceof Error ? e.message : String(e);
|
const msg = e instanceof Error ? e.message : String(e);
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ import BackupPage from '../pages/SystemSettingsPage/BackupPage.tsx';
|
|||||||
import PluginsPage from '../pages/PluginsPage.tsx';
|
import PluginsPage from '../pages/PluginsPage.tsx';
|
||||||
import { AppWindowsProvider, useAppWindows } from '../contexts/AppWindowsContext';
|
import { AppWindowsProvider, useAppWindows } from '../contexts/AppWindowsContext';
|
||||||
import { AppWindowsLayer } from '../apps/AppWindowsLayer';
|
import { AppWindowsLayer } from '../apps/AppWindowsLayer';
|
||||||
|
import AiAgentWidget from '../components/AiAgentWidget';
|
||||||
|
|
||||||
const ShellBody = memo(function ShellBody() {
|
const ShellBody = memo(function ShellBody() {
|
||||||
const params = useParams<{ navKey?: string; '*': string }>();
|
const params = useParams<{ navKey?: string; '*': string }>();
|
||||||
@@ -24,11 +25,13 @@ const ShellBody = memo(function ShellBody() {
|
|||||||
const navigate = useNavigate();
|
const navigate = useNavigate();
|
||||||
const COLLAPSED_KEY = 'layout.siderCollapsed';
|
const COLLAPSED_KEY = 'layout.siderCollapsed';
|
||||||
const [collapsed, setCollapsed] = useState(() => localStorage.getItem(COLLAPSED_KEY) === '1');
|
const [collapsed, setCollapsed] = useState(() => localStorage.getItem(COLLAPSED_KEY) === '1');
|
||||||
|
const [agentOpen, setAgentOpen] = useState(false);
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
localStorage.setItem(COLLAPSED_KEY, collapsed ? '1' : '0');
|
localStorage.setItem(COLLAPSED_KEY, collapsed ? '1' : '0');
|
||||||
}, [collapsed]);
|
}, [collapsed]);
|
||||||
const { windows, closeWindow, toggleMax, bringToFront, updateWindow } = useAppWindows();
|
const { windows, closeWindow, toggleMax, bringToFront, updateWindow } = useAppWindows();
|
||||||
const settingsTab = navKey === 'settings' ? (subPath.split('/')[0] || undefined) : undefined;
|
const settingsTab = navKey === 'settings' ? (subPath.split('/')[0] || undefined) : undefined;
|
||||||
|
const agentCurrentPath = navKey === 'files' ? ('/' + subPath).replace(/\/+/g, '/').replace(/\/+$/, '') || '/' : null;
|
||||||
return (
|
return (
|
||||||
<Layout style={{ minHeight: '100vh', background: 'var(--ant-color-bg-layout)' }}>
|
<Layout style={{ minHeight: '100vh', background: 'var(--ant-color-bg-layout)' }}>
|
||||||
<SideNav
|
<SideNav
|
||||||
@@ -44,7 +47,7 @@ const ShellBody = memo(function ShellBody() {
|
|||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
<Layout style={{ background: 'var(--ant-color-bg-layout)' }}>
|
<Layout style={{ background: 'var(--ant-color-bg-layout)' }}>
|
||||||
<TopHeader collapsed={collapsed} onToggle={() => setCollapsed(c => !c)} />
|
<TopHeader collapsed={collapsed} onToggle={() => setCollapsed(c => !c)} onOpenAiAgent={() => setAgentOpen(true)} />
|
||||||
<Layout.Content style={{ padding: 16, background: 'var(--ant-color-bg-layout)' }}>
|
<Layout.Content style={{ padding: 16, background: 'var(--ant-color-bg-layout)' }}>
|
||||||
<div style={{ minHeight: 'calc(100vh - 56px - 32px)', background: 'var(--ant-color-bg-layout)' }}>
|
<div style={{ minHeight: 'calc(100vh - 56px - 32px)', background: 'var(--ant-color-bg-layout)' }}>
|
||||||
<Flex vertical gap={16}>
|
<Flex vertical gap={16}>
|
||||||
@@ -76,6 +79,7 @@ const ShellBody = memo(function ShellBody() {
|
|||||||
onBringToFront={bringToFront}
|
onBringToFront={bringToFront}
|
||||||
onUpdateWindow={updateWindow}
|
onUpdateWindow={updateWindow}
|
||||||
/>
|
/>
|
||||||
|
<AiAgentWidget currentPath={agentCurrentPath} open={agentOpen} onOpenChange={setAgentOpen} />
|
||||||
</Layout>
|
</Layout>
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|||||||
244
web/src/styles/ai-agent.css
Normal file
244
web/src/styles/ai-agent.css
Normal file
@@ -0,0 +1,244 @@
|
|||||||
|
.fx-agent-container {
|
||||||
|
height: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-chat-scroll {
|
||||||
|
flex: 1;
|
||||||
|
overflow-y: auto;
|
||||||
|
padding: 0;
|
||||||
|
border-radius: 0;
|
||||||
|
background: transparent;
|
||||||
|
border: 0;
|
||||||
|
box-shadow: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-empty {
|
||||||
|
height: 100%;
|
||||||
|
min-height: 240px;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-messages {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 14px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-msg {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-msg-user {
|
||||||
|
align-items: flex-end;
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-msg-assistant {
|
||||||
|
align-items: flex-start;
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-msg-tool {
|
||||||
|
align-items: stretch;
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-user-block {
|
||||||
|
max-width: 85%;
|
||||||
|
padding: 10px 12px;
|
||||||
|
border-radius: 12px;
|
||||||
|
border: 1px solid var(--ant-color-border-secondary);
|
||||||
|
background: var(--ant-color-fill-quaternary);
|
||||||
|
box-shadow: 0 1px 0 rgba(0, 0, 0, 0.03);
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-assistant-block {
|
||||||
|
max-width: 100%;
|
||||||
|
padding: 2px 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-tool-block {
|
||||||
|
width: 100%;
|
||||||
|
padding: 10px 12px;
|
||||||
|
border-radius: 12px;
|
||||||
|
border: 1px solid var(--ant-color-border-secondary);
|
||||||
|
background: var(--ant-color-bg-container);
|
||||||
|
box-shadow: 0 1px 0 rgba(0, 0, 0, 0.03);
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-tool-bar {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
gap: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-content {
|
||||||
|
font-size: 13px;
|
||||||
|
line-height: 1.75;
|
||||||
|
word-break: break-word;
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-tool-pills .ant-tag {
|
||||||
|
margin-inline-end: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-pill {
|
||||||
|
border-radius: 999px;
|
||||||
|
padding-inline: 10px;
|
||||||
|
padding-block: 2px;
|
||||||
|
border: 0;
|
||||||
|
background: rgba(0, 0, 0, 0.04);
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-pill-strong {
|
||||||
|
background: var(--ant-color-primary-bg);
|
||||||
|
color: var(--ant-color-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-pill-warn {
|
||||||
|
background: var(--ant-color-warning-bg);
|
||||||
|
color: var(--ant-color-warning);
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-tool-summary-line {
|
||||||
|
margin-top: 6px;
|
||||||
|
font-size: 12px;
|
||||||
|
line-height: 1.6;
|
||||||
|
color: var(--ant-color-text-tertiary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-tool-expanded {
|
||||||
|
margin-top: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-text {
|
||||||
|
white-space: pre-wrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-md {
|
||||||
|
white-space: normal;
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-md p {
|
||||||
|
margin: 0 0 0.5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-md p:last-child {
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-md ul,
|
||||||
|
.fx-agent-md ol {
|
||||||
|
margin: 0 0 0.5em;
|
||||||
|
padding-left: 1.2em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-md code {
|
||||||
|
padding: 1px 6px;
|
||||||
|
border-radius: 6px;
|
||||||
|
background: rgba(0, 0, 0, 0.04);
|
||||||
|
border: 1px solid var(--ant-color-border-secondary);
|
||||||
|
font-family: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace;
|
||||||
|
font-size: 11px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-md pre {
|
||||||
|
margin: 0 0 0.5em;
|
||||||
|
padding: 8px 10px;
|
||||||
|
border-radius: 10px;
|
||||||
|
background: var(--ant-color-bg-container);
|
||||||
|
border: 1px solid var(--ant-color-border-secondary);
|
||||||
|
overflow: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-md pre code {
|
||||||
|
display: block;
|
||||||
|
padding: 0;
|
||||||
|
border: 0;
|
||||||
|
background: transparent;
|
||||||
|
font-size: 11px;
|
||||||
|
line-height: 1.55;
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-md blockquote {
|
||||||
|
margin: 0 0 0.65em;
|
||||||
|
padding: 0 0 0 10px;
|
||||||
|
border-left: 3px solid var(--ant-color-border);
|
||||||
|
color: var(--ant-color-text-tertiary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-md a {
|
||||||
|
color: var(--ant-color-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-tool-details {
|
||||||
|
padding: 8px;
|
||||||
|
border-radius: 10px;
|
||||||
|
background: rgba(0, 0, 0, 0.02);
|
||||||
|
border: 1px solid var(--ant-color-border-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-pre {
|
||||||
|
margin: 8px 0 0;
|
||||||
|
padding: 8px 10px;
|
||||||
|
border-radius: 10px;
|
||||||
|
background: var(--ant-color-bg-container);
|
||||||
|
border: 1px solid var(--ant-color-border-secondary);
|
||||||
|
font-size: 11px;
|
||||||
|
line-height: 1.5;
|
||||||
|
white-space: pre;
|
||||||
|
overflow: auto;
|
||||||
|
max-height: 260px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-pre.fx-agent-pre-compact {
|
||||||
|
max-height: 200px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-pending-group {
|
||||||
|
margin-top: 6px;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-pending-head {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
gap: 10px;
|
||||||
|
padding: 8px 10px;
|
||||||
|
border-radius: 12px;
|
||||||
|
border: 1px solid var(--ant-color-border-secondary);
|
||||||
|
background: rgba(0, 0, 0, 0.02);
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-pending-list {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-composer {
|
||||||
|
padding: 8px 0 0;
|
||||||
|
background: transparent;
|
||||||
|
border-top: 1px solid var(--ant-color-border-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-composer .ant-input {
|
||||||
|
font-size: 12px;
|
||||||
|
line-height: 1.6;
|
||||||
|
}
|
||||||
|
|
||||||
|
.fx-agent-running {
|
||||||
|
margin-top: 4px;
|
||||||
|
padding: 6px 8px;
|
||||||
|
border-radius: 10px;
|
||||||
|
background: rgba(0, 0, 0, 0.03);
|
||||||
|
border: 1px dashed var(--ant-color-border-secondary);
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 10px;
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user