Compare commits

..

16 Commits

Author SHA1 Message Date
shiyu
f4c18f991f chore: update version to v1.7.4 2026-01-19 16:50:36 +08:00
shiyu
58c2cdd440 feat: enforce simultaneous username and password requirement for alist and openlist adapters 2026-01-19 15:58:12 +08:00
dependabot[bot]
7d861ca5f7 chore(deps): bump pyasn1 in the uv group across 1 directory (#92)
Bumps the uv group with 1 update in the / directory: [pyasn1](https://github.com/pyasn1/pyasn1).


Updates `pyasn1` from 0.6.1 to 0.6.2
- [Release notes](https://github.com/pyasn1/pyasn1/releases)
- [Changelog](https://github.com/pyasn1/pyasn1/blob/main/CHANGES.rst)
- [Commits](https://github.com/pyasn1/pyasn1/compare/v0.6.1...v0.6.2)

---
updated-dependencies:
- dependency-name: pyasn1
  dependency-version: 0.6.2
  dependency-type: indirect
  dependency-group: uv
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-01-19 10:06:53 +08:00
shiyu
52bac11760 feat: add create file functionality with modal and context menu integration 2026-01-18 21:31:01 +08:00
shiyu
c441d8776f feat: enhance backup functionality with section selection and import mode options 2026-01-18 21:01:59 +08:00
shiyu
45e0194465 chore: update version to v1.7.3 2026-01-18 18:18:51 +08:00
shiyu
540065f195 feat: implement write_upload_file method for various adapters to handle file uploads 2026-01-18 18:14:04 +08:00
shiyu
4f86e2da4d feat: enhance file upload handling and response normalization in virtual file system 2026-01-18 15:14:25 +08:00
shiyu
31d347d24f feat: add support for filename in public file access and update temp link generation 2026-01-16 20:55:03 +08:00
shiyu
7a9a20509c feat: update system prompt to adjust response language based on user input 2026-01-16 16:29:16 +08:00
shiyu
373b6410c2 feat: add time tool with offset support and update localization for weekday 2026-01-16 15:46:42 +08:00
shiyu
d6eb6e1605 feat: replace Drawer with Modal in AiAgentWidget and enhance styles for better UI 2026-01-16 15:05:53 +08:00
shiyu
1d66fb56c8 feat: update logo.svg 2026-01-16 14:52:53 +08:00
shiyu
bb9589fa62 chore: update version to v1.7.2 in service configuration 2026-01-16 11:20:19 +08:00
shiyu
ab89451b2d feat: implement cron-based automation task scheduling and update task configuration 2026-01-15 15:04:10 +08:00
shiyu
3e1b75d81a feat: add notices feature with modal and API integration 2026-01-14 22:01:29 +08:00
45 changed files with 1926 additions and 480 deletions

View File

@@ -81,8 +81,9 @@ class AListApiAdapterBase:
raise ValueError(f"{product_name} requires base_url http/https") raise ValueError(f"{product_name} requires base_url http/https")
self.username: str = str(cfg.get("username") or "") self.username: str = str(cfg.get("username") or "")
self.password: str = str(cfg.get("password") or "") self.password: str = str(cfg.get("password") or "")
if not self.username or not self.password: if (self.username and not self.password) or (self.password and not self.username):
raise ValueError(f"{product_name} requires username and password") raise ValueError(f"{product_name} requires both username and password")
self.use_auth: bool = bool(self.username and self.password)
self.timeout: float = float(cfg.get("timeout", 30)) self.timeout: float = float(cfg.get("timeout", 30))
self.root_path: str = _normalize_fs_path(str(cfg.get("root") or "/")) self.root_path: str = _normalize_fs_path(str(cfg.get("root") or "/"))
@@ -98,6 +99,8 @@ class AListApiAdapterBase:
return base return base
async def _ensure_token(self) -> str: async def _ensure_token(self) -> str:
if not self.use_auth:
return ""
if self._token: if self._token:
return self._token return self._token
async with self._login_lock: async with self._login_lock:
@@ -137,12 +140,14 @@ class AListApiAdapterBase:
) -> Any: ) -> Any:
token = await self._ensure_token() token = await self._ensure_token()
url = self.base_url + endpoint url = self.base_url + endpoint
req_headers: Dict[str, str] = {"Authorization": token} req_headers: Dict[str, str] = {}
if token:
req_headers["Authorization"] = token
if headers: if headers:
req_headers.update(headers) req_headers.update(headers)
async with httpx.AsyncClient(timeout=self.timeout, follow_redirects=True) as client: async with httpx.AsyncClient(timeout=self.timeout, follow_redirects=True) as client:
resp = await client.request(method, url, json=json, headers=req_headers, files=files) resp = await client.request(method, url, json=json, headers=req_headers, files=files)
if resp.status_code == 401 and retry: if resp.status_code == 401 and retry and self.use_auth:
self._token = None self._token = None
return await self._api_json(method, endpoint, json=json, headers=headers, retry=False, files=files) return await self._api_json(method, endpoint, json=json, headers=headers, retry=False, files=files)
resp.raise_for_status() resp.raise_for_status()
@@ -153,7 +158,7 @@ class AListApiAdapterBase:
code = payload.get("code") code = payload.get("code")
if code in (0, 200): if code in (0, 200):
return payload.get("data") return payload.get("data")
if code in (401, 403) and retry: if code in (401, 403) and retry and self.use_auth:
self._token = None self._token = None
return await self._api_json(method, endpoint, json=json, headers=headers, retry=False, files=files) return await self._api_json(method, endpoint, json=json, headers=headers, retry=False, files=files)
if code == 404: if code == 404:
@@ -349,10 +354,9 @@ class AListApiAdapterBase:
async def _upload_file(self, full_path: str, file_path: Path) -> Any: async def _upload_file(self, full_path: str, file_path: Path) -> Any:
token = await self._ensure_token() token = await self._ensure_token()
headers = { headers = {"File-Path": quote(full_path, safe="/")}
"Authorization": token, if token:
"File-Path": quote(full_path, safe="/"), headers["Authorization"] = token
}
with file_path.open("rb") as f: with file_path.open("rb") as f:
files = {"file": (file_path.name, f, "application/octet-stream")} files = {"file": (file_path.name, f, "application/octet-stream")}
async with httpx.AsyncClient(timeout=self.timeout, follow_redirects=True) as client: async with httpx.AsyncClient(timeout=self.timeout, follow_redirects=True) as client:
@@ -381,6 +385,30 @@ class AListApiAdapterBase:
except Exception: except Exception:
pass pass
async def write_upload_file(self, root: str, rel: str, file_obj, filename: str | None, file_size: int | None = None, content_type: str | None = None):
full_path = _join_fs_path(root, rel)
token = await self._ensure_token()
headers = {"File-Path": quote(full_path, safe="/")}
if token:
headers["Authorization"] = token
name = filename or Path(rel).name or "file"
mime = content_type or "application/octet-stream"
files = {"file": (name, file_obj, mime)}
async with httpx.AsyncClient(timeout=self.timeout, follow_redirects=True) as client:
resp = await client.put(self.base_url + "/api/fs/form", headers=headers, files=files)
resp.raise_for_status()
payload = resp.json()
if not isinstance(payload, dict):
raise HTTPException(502, detail=f"{self.product_name} upload: invalid response")
code = payload.get("code")
if code not in (0, 200):
msg = payload.get("message") or payload.get("msg") or ""
raise HTTPException(502, detail=f"{self.product_name} upload failed: {msg}")
data = payload.get("data")
if isinstance(data, dict) and file_size is not None and "size" not in data:
data["size"] = file_size
return data
async def write_file_stream(self, root: str, rel: str, data_iter: AsyncIterator[bytes]): async def write_file_stream(self, root: str, rel: str, data_iter: AsyncIterator[bytes]):
full_path = _join_fs_path(root, rel) full_path = _join_fs_path(root, rel)
suffix = Path(rel).suffix suffix = Path(rel).suffix
@@ -479,8 +507,8 @@ ADAPTER_TYPES = {"alist": AListAdapter, "openlist": OpenListAdapter}
CONFIG_SCHEMA = [ CONFIG_SCHEMA = [
{"key": "base_url", "label": "基础地址", "type": "string", "required": True, "placeholder": "http://127.0.0.1:5244"}, {"key": "base_url", "label": "基础地址", "type": "string", "required": True, "placeholder": "http://127.0.0.1:5244"},
{"key": "username", "label": "用户名", "type": "string", "required": True}, {"key": "username", "label": "用户名", "type": "string", "required": False, "placeholder": "留空则匿名访问"},
{"key": "password", "label": "密码", "type": "password", "required": True}, {"key": "password", "label": "密码", "type": "password", "required": False, "placeholder": "留空则匿名访问"},
{"key": "root", "label": "根目录", "type": "string", "required": False, "default": "/"}, {"key": "root", "label": "根目录", "type": "string", "required": False, "default": "/"},
{"key": "timeout", "label": "超时(秒)", "type": "number", "required": False, "default": 30}, {"key": "timeout", "label": "超时(秒)", "type": "number", "required": False, "default": 30},
{"key": "enable_direct_download_307", "label": "启用 307 直链下载", "type": "boolean", "default": False}, {"key": "enable_direct_download_307", "label": "启用 307 直链下载", "type": "boolean", "default": False},

View File

@@ -250,6 +250,30 @@ class FoxelAdapter:
return True return True
raise HTTPException(502, detail="Foxel 写入失败") raise HTTPException(502, detail="Foxel 写入失败")
async def write_upload_file(self, root: str, rel: str, file_obj, filename: str | None, file_size: int | None = None, content_type: str | None = None):
rel = (rel or "").lstrip("/")
full_path = _join_fs_path(root, rel)
url = self.base_url + self._file_path(full_path)
name = filename or Path(rel).name or "file"
mime = content_type or "application/octet-stream"
for attempt in range(2):
try:
if callable(getattr(file_obj, "seek", None)):
file_obj.seek(0)
except Exception:
pass
token = await self._ensure_token()
headers = {"Authorization": f"Bearer {token}"}
files = {"file": (name, file_obj, mime)}
async with httpx.AsyncClient(timeout=self.timeout, follow_redirects=True) as client:
resp = await client.post(url, headers=headers, files=files)
if resp.status_code == 401 and attempt == 0:
self._token = None
continue
resp.raise_for_status()
return {"size": file_size or 0}
raise HTTPException(502, detail="Foxel 上传失败")
async def write_file_stream(self, root: str, rel: str, data_iter: AsyncIterator[bytes]): async def write_file_stream(self, root: str, rel: str, data_iter: AsyncIterator[bytes]):
rel = (rel or "").lstrip("/") rel = (rel or "").lstrip("/")
full_path = _join_fs_path(root, rel) full_path = _join_fs_path(root, rel)

View File

@@ -238,6 +238,39 @@ class FTPAdapter:
await asyncio.to_thread(_do_write) await asyncio.to_thread(_do_write)
async def write_upload_file(self, root: str, rel: str, file_obj, filename: str | None, file_size: int | None = None, content_type: str | None = None):
path = _join_remote(root, rel)
def _ensure_dirs(ftp: FTP, dir_path: str):
parts = [p for p in dir_path.strip("/").split("/") if p]
cur = "/"
for p in parts:
cur = _join_remote(cur, p)
try:
ftp.mkd(cur)
except Exception:
pass
def _do_upload():
ftp = self._connect()
try:
parent = "/" if "/" not in path.strip("/") else path.rsplit("/", 1)[0]
_ensure_dirs(ftp, parent)
try:
if callable(getattr(file_obj, "seek", None)):
file_obj.seek(0)
except Exception:
pass
ftp.storbinary("STOR " + path, file_obj)
finally:
try:
ftp.quit()
except Exception:
pass
await asyncio.to_thread(_do_upload)
return {"size": file_size or 0}
async def write_file_stream(self, root: str, rel: str, data_iter: AsyncIterator[bytes]): async def write_file_stream(self, root: str, rel: str, data_iter: AsyncIterator[bytes]):
# KISS: 聚合后一次性写入 # KISS: 聚合后一次性写入
buf = bytearray() buf = bytearray()

View File

@@ -114,6 +114,32 @@ class LocalAdapter:
if not pre_exists: if not pre_exists:
await asyncio.to_thread(_apply_mode, fp, DEFAULT_FILE_MODE) await asyncio.to_thread(_apply_mode, fp, DEFAULT_FILE_MODE)
async def write_upload_file(self, root: str, rel: str, file_obj, filename: str | None, file_size: int | None = None, content_type: str | None = None):
fp = _safe_join(root, rel)
pre_exists = fp.exists()
await asyncio.to_thread(os.makedirs, fp.parent, mode=DEFAULT_DIR_MODE, exist_ok=True)
def _copy():
try:
if callable(getattr(file_obj, "seek", None)):
file_obj.seek(0)
except Exception:
pass
with open(fp, "wb") as f:
shutil.copyfileobj(file_obj, f)
await asyncio.to_thread(_copy)
if not pre_exists:
await asyncio.to_thread(_apply_mode, fp, DEFAULT_FILE_MODE)
size = file_size
if size is None:
try:
size = fp.stat().st_size
except Exception:
size = 0
return {"size": int(size or 0)}
async def write_file_stream(self, root: str, rel: str, data_iter: AsyncIterator[bytes]): async def write_file_stream(self, root: str, rel: str, data_iter: AsyncIterator[bytes]):
fp = _safe_join(root, rel) fp = _safe_join(root, rel)
pre_exists = fp.exists() pre_exists = fp.exists()

View File

@@ -453,6 +453,159 @@ class QuarkAdapter:
yield data yield data
return await self.write_file_stream(root, rel, gen()) return await self.write_file_stream(root, rel, gen())
async def write_upload_file(self, root: str, rel: str, file_obj, filename: str | None, file_size: int | None = None, content_type: str | None = None):
if not rel or rel.endswith("/"):
raise HTTPException(400, detail="Invalid file path")
parent = rel.rsplit("/", 1)[0] if "/" in rel else ""
name = filename or rel.rsplit("/", 1)[-1]
base_fid = root or self.root_fid
parent_fid = await self._resolve_dir_fid_from(base_fid, parent)
md5 = hashlib.md5()
sha1 = hashlib.sha1()
total = 0
try:
if callable(getattr(file_obj, "seek", None)):
file_obj.seek(0)
except Exception:
pass
while True:
chunk = file_obj.read(1024 * 1024)
if not chunk:
break
total += len(chunk)
md5.update(chunk)
sha1.update(chunk)
md5_hex = md5.hexdigest()
sha1_hex = sha1.hexdigest()
# 预上传,拿到上传信息
pre_resp = await self._upload_pre(name, total, parent_fid)
pre_data = pre_resp.get("data", {})
# hash 秒传
hash_body = {"md5": md5_hex, "sha1": sha1_hex, "task_id": pre_data.get("task_id")}
hash_resp = await self._request("POST", "/file/update/hash", json=hash_body)
if (hash_resp.get("data") or {}).get("finish") is True:
self._invalidate_children_cache(parent_fid)
return {"size": total}
# 分片上传
part_size = int((pre_resp.get("metadata") or {}).get("part_size") or 0)
if part_size <= 0:
raise HTTPException(502, detail="Invalid part_size from Quark")
bucket = pre_data.get("bucket")
obj_key = pre_data.get("obj_key")
upload_id = pre_data.get("upload_id")
upload_url = pre_data.get("upload_url")
if not (bucket and obj_key and upload_id and upload_url):
raise HTTPException(502, detail="Upload pre missing fields")
try:
upload_host = upload_url.split("://", 1)[1]
except Exception:
upload_host = upload_url
base_url = f"https://{bucket}.{upload_host}/{obj_key}"
try:
if callable(getattr(file_obj, "seek", None)):
file_obj.seek(0)
except Exception:
pass
etags: List[str] = []
oss_ua = "aliyun-sdk-js/6.6.1 Chrome 98.0.4758.80 on Windows 10 64-bit"
async with httpx.AsyncClient(timeout=None, follow_redirects=True) as client:
part_number = 1
left = total
while left > 0:
sz = min(part_size, left)
data_bytes = file_obj.read(sz)
if len(data_bytes) != sz:
raise IOError("Failed to read part bytes")
now_str = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())
auth_meta = (
"PUT\n\n"
f"{self._guess_mime(name)}\n"
f"{now_str}\n"
f"x-oss-date:{now_str}\n"
f"x-oss-user-agent:{oss_ua}\n"
f"/{bucket}/{obj_key}?partNumber={part_number}&uploadId={upload_id}"
)
auth_req_body = {"auth_info": pre_data.get("auth_info"), "auth_meta": auth_meta, "task_id": pre_data.get("task_id")}
auth_resp = await self._request("POST", "/file/upload/auth", json=auth_req_body)
auth_key = (auth_resp.get("data") or {}).get("auth_key")
if not auth_key:
raise HTTPException(502, detail="upload/auth missing auth_key")
put_headers = {
"Authorization": auth_key,
"Content-Type": self._guess_mime(name),
"Referer": REFERER + "/",
"x-oss-date": now_str,
"x-oss-user-agent": oss_ua,
}
put_url = f"{base_url}?partNumber={part_number}&uploadId={upload_id}"
put_resp = await client.put(put_url, headers=put_headers, content=data_bytes)
if put_resp.status_code != 200:
raise HTTPException(502, detail=f"Upload part failed status={put_resp.status_code} text={put_resp.text}")
etag = put_resp.headers.get("Etag", "")
etags.append(etag)
left -= sz
part_number += 1
parts_xml = [f"<Part>\n<PartNumber>{i+1}</PartNumber>\n<ETag>{etags[i]}</ETag>\n</Part>\n" for i in range(len(etags))]
body_xml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<CompleteMultipartUpload>\n" + "".join(parts_xml) + "</CompleteMultipartUpload>"
content_md5 = base64.b64encode(hashlib.md5(body_xml.encode("utf-8")).digest()).decode("ascii")
callback = pre_data.get("callback") or {}
try:
import json as _json
callback_b64 = base64.b64encode(_json.dumps(callback).encode("utf-8")).decode("ascii")
except Exception:
callback_b64 = ""
now_str = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())
auth_meta_commit = (
"POST\n"
f"{content_md5}\n"
"application/xml\n"
f"{now_str}\n"
f"x-oss-callback:{callback_b64}\n"
f"x-oss-date:{now_str}\n"
f"x-oss-user-agent:{oss_ua}\n"
f"/{bucket}/{obj_key}?uploadId={upload_id}"
)
auth_commit_resp = await self._request("POST", "/file/upload/auth", json={"auth_info": pre_data.get("auth_info"), "auth_meta": auth_meta_commit, "task_id": pre_data.get("task_id")})
auth_key_commit = (auth_commit_resp.get("data") or {}).get("auth_key")
if not auth_key_commit:
raise HTTPException(502, detail="upload/auth(commit) missing auth_key")
async with httpx.AsyncClient(timeout=None, follow_redirects=True) as client:
commit_headers = {
"Authorization": auth_key_commit,
"Content-MD5": content_md5,
"Content-Type": "application/xml",
"Referer": REFERER + "/",
"x-oss-callback": callback_b64,
"x-oss-date": now_str,
"x-oss-user-agent": oss_ua,
}
commit_url = f"{base_url}?uploadId={upload_id}"
r = await client.post(commit_url, headers=commit_headers, content=body_xml.encode("utf-8"))
if r.status_code != 200:
raise HTTPException(502, detail=f"Upload commit failed status={r.status_code} text={r.text}")
await self._request("POST", "/file/upload/finish", json={"obj_key": obj_key, "task_id": pre_data.get("task_id")})
try:
await asyncio.sleep(1.0)
except Exception:
pass
self._invalidate_children_cache(parent_fid)
return {"size": total}
async def write_file_stream(self, root: str, rel: str, data_iter: AsyncIterator[bytes]): async def write_file_stream(self, root: str, rel: str, data_iter: AsyncIterator[bytes]):
if not rel or rel.endswith("/"): if not rel or rel.endswith("/"):
raise HTTPException(400, detail="Invalid file path") raise HTTPException(400, detail="Invalid file path")

View File

@@ -157,6 +157,41 @@ class SFTPAdapter:
await asyncio.to_thread(_do_write) await asyncio.to_thread(_do_write)
async def write_upload_file(self, root: str, rel: str, file_obj, filename: str | None, file_size: int | None = None, content_type: str | None = None):
path = _join_remote(root, rel)
def _ensure_dirs(sftp: paramiko.SFTPClient, dir_path: str):
parts = [p for p in dir_path.strip("/").split("/") if p]
cur = "/"
for p in parts:
cur = _join_remote(cur, p)
try:
sftp.mkdir(cur)
except IOError:
pass
def _do_upload():
sftp = self._connect()
try:
parent = "/" if "/" not in path.strip("/") else path.rsplit("/", 1)[0]
_ensure_dirs(sftp, parent)
try:
if callable(getattr(file_obj, "seek", None)):
file_obj.seek(0)
except Exception:
pass
with sftp.open(path, "wb") as f:
import shutil
shutil.copyfileobj(file_obj, f)
finally:
try:
sftp.close()
except Exception:
pass
await asyncio.to_thread(_do_upload)
return {"size": file_size or 0}
async def write_file_stream(self, root: str, rel: str, data_iter: AsyncIterator[bytes]): async def write_file_stream(self, root: str, rel: str, data_iter: AsyncIterator[bytes]):
buf = bytearray() buf = bytearray()
async for chunk in data_iter: async for chunk in data_iter:

View File

@@ -21,6 +21,30 @@ def _get_session_lock(session_string: str) -> asyncio.Lock:
_SESSION_LOCKS[session_string] = lock _SESSION_LOCKS[session_string] = lock
return lock return lock
class _NamedFile:
def __init__(self, file_obj, name: str):
self._file = file_obj
self.name = name
def read(self, *args, **kwargs):
return self._file.read(*args, **kwargs)
def seek(self, *args, **kwargs):
return self._file.seek(*args, **kwargs)
def tell(self):
return self._file.tell()
def seekable(self):
return self._file.seekable()
def close(self):
return self._file.close()
def __getattr__(self, name):
return getattr(self._file, name)
# 适配器类型标识 # 适配器类型标识
ADAPTER_TYPE = "telegram" ADAPTER_TYPE = "telegram"
@@ -263,7 +287,48 @@ class TelegramAdapter:
try: try:
await client.connect() await client.connect()
await client.send_file(self.chat_id, file_like, caption=file_like.name) sent = await client.send_file(self.chat_id, file_like, caption=file_like.name)
message = sent[0] if isinstance(sent, list) and sent else sent
actual_rel = rel
if message:
stored_name = file_like.name
file_meta = getattr(message, "file", None)
if file_meta and getattr(file_meta, "name", None):
stored_name = file_meta.name
if getattr(message, "id", None) is not None:
actual_rel = f"{message.id}_{stored_name}"
return {"rel": actual_rel, "size": len(data)}
finally:
if client.is_connected():
await client.disconnect()
async def write_upload_file(self, root: str, rel: str, file_obj, filename: str | None, file_size: int | None = None, content_type: str | None = None):
client = self._get_client()
name = filename or os.path.basename(rel) or "file"
file_like = _NamedFile(file_obj, name)
try:
await client.connect()
sent = await client.send_file(
self.chat_id,
file_like,
caption=file_like.name,
file_size=file_size,
mime_type=content_type,
)
message = sent[0] if isinstance(sent, list) and sent else sent
actual_rel = rel
size = file_size or 0
if message:
stored_name = file_like.name
file_meta = getattr(message, "file", None)
if file_meta and getattr(file_meta, "name", None):
stored_name = file_meta.name
if getattr(message, "id", None) is not None:
actual_rel = f"{message.id}_{stored_name}"
if file_meta and getattr(file_meta, "size", None):
size = int(file_meta.size)
return {"rel": actual_rel, "size": size}
finally: finally:
if client.is_connected(): if client.is_connected():
await client.disconnect() await client.disconnect()
@@ -273,8 +338,9 @@ class TelegramAdapter:
client = self._get_client() client = self._get_client()
filename = os.path.basename(rel) or "file" filename = os.path.basename(rel) or "file"
import tempfile import tempfile
temp_dir = tempfile.gettempdir() suffix = os.path.splitext(filename)[1]
temp_path = os.path.join(temp_dir, filename) with tempfile.NamedTemporaryFile(delete=False, suffix=suffix) as tf:
temp_path = tf.name
total_size = 0 total_size = 0
try: try:
@@ -285,14 +351,23 @@ class TelegramAdapter:
total_size += len(chunk) total_size += len(chunk)
await client.connect() await client.connect()
await client.send_file(self.chat_id, temp_path, caption=filename) sent = await client.send_file(self.chat_id, temp_path, caption=filename)
message = sent[0] if isinstance(sent, list) and sent else sent
actual_rel = rel
if message:
stored_name = filename
file_meta = getattr(message, "file", None)
if file_meta and getattr(file_meta, "name", None):
stored_name = file_meta.name
if getattr(message, "id", None) is not None:
actual_rel = f"{message.id}_{stored_name}"
finally: finally:
if os.path.exists(temp_path): if os.path.exists(temp_path):
os.remove(temp_path) os.remove(temp_path)
if client.is_connected(): if client.is_connected():
await client.disconnect() await client.disconnect()
return total_size return {"rel": actual_rel, "size": total_size}
async def mkdir(self, root: str, rel: str): async def mkdir(self, root: str, rel: str):
raise NotImplementedError("Telegram 适配器不支持创建目录。") raise NotImplementedError("Telegram 适配器不支持创建目录。")

View File

@@ -36,6 +36,11 @@ class AdapterService:
missing.append(k) missing.append(k)
if missing: if missing:
raise HTTPException(400, detail="缺少必填配置字段: " + ", ".join(missing)) raise HTTPException(400, detail="缺少必填配置字段: " + ", ".join(missing))
if adapter_type in ("alist", "openlist"):
username = out.get("username")
password = out.get("password")
if (username and not password) or (password and not username):
raise HTTPException(400, detail="用户名和密码必须同时填写或同时留空")
return out return out
@classmethod @classmethod

View File

@@ -31,6 +31,7 @@ def _build_system_prompt(current_path: Optional[str]) -> str:
"你可以通过工具对文件/目录进行查询、读写、移动、复制、删除以及运行处理器processor", "你可以通过工具对文件/目录进行查询、读写、移动、复制、删除以及运行处理器processor",
"", "",
"可用工具:", "可用工具:",
"- time获取服务器当前时间精确到秒英文星期支持 year/month/day/hour/minute/second 偏移。",
"- vfs_list_dir浏览目录列出 entries + pagination", "- vfs_list_dir浏览目录列出 entries + pagination",
"- vfs_stat查看文件/目录信息。", "- vfs_stat查看文件/目录信息。",
"- vfs_read_text读取文本文件内容不支持二进制", "- vfs_read_text读取文本文件内容不支持二进制",
@@ -50,7 +51,7 @@ def _build_system_prompt(current_path: Optional[str]) -> str:
"3) 用户未给出明确路径时先追问;若提供了“当前文件管理目录”,可以基于它把相对描述补全为绝对路径(以 / 开头)。", "3) 用户未给出明确路径时先追问;若提供了“当前文件管理目录”,可以基于它把相对描述补全为绝对路径(以 / 开头)。",
"4) 修改文件内容先读取vfs_read_text→给出改动点→确认后再写入vfs_write_text", "4) 修改文件内容先读取vfs_read_text→给出改动点→确认后再写入vfs_write_text",
"5) processors_run 返回任务 id 后,说明任务已提交,可在任务队列查看进度。", "5) processors_run 返回任务 id 后,说明任务已提交,可在任务队列查看进度。",
"6) 回答保持简洁中文", "6) 回答语言跟随用户;用户用英文则用英文,用户用中文则用中文。回答尽量简洁",
] ]
if current_path: if current_path:
lines.append("") lines.append("")

View File

@@ -1,5 +1,7 @@
import calendar
import json import json
from dataclasses import dataclass from dataclasses import dataclass
from datetime import datetime, timedelta
from typing import Any, Awaitable, Callable, Dict, List, Optional from typing import Any, Awaitable, Callable, Dict, List, Optional
from domain.processors import ProcessDirectoryRequest, ProcessRequest, ProcessorService from domain.processors import ProcessDirectoryRequest, ProcessRequest, ProcessorService
@@ -16,6 +18,68 @@ class ToolSpec:
handler: Callable[[Dict[str, Any]], Awaitable[Any]] handler: Callable[[Dict[str, Any]], Awaitable[Any]]
def _parse_offset(args: Dict[str, Any], key: str) -> int:
value = args.get(key)
if value is None:
return 0
try:
return int(value)
except (TypeError, ValueError):
return 0
def _add_months(dt: datetime, months: int) -> datetime:
if months == 0:
return dt
total = dt.year * 12 + (dt.month - 1) + months
year = total // 12
month = total % 12 + 1
last_day = calendar.monthrange(year, month)[1]
day = min(dt.day, last_day)
return dt.replace(year=year, month=month, day=day)
async def _time(args: Dict[str, Any]) -> Dict[str, Any]:
now = datetime.now()
year_offset = _parse_offset(args, "year")
month_offset = _parse_offset(args, "month")
day_offset = _parse_offset(args, "day")
hour_offset = _parse_offset(args, "hour")
minute_offset = _parse_offset(args, "minute")
second_offset = _parse_offset(args, "second")
dt = _add_months(now, year_offset * 12 + month_offset)
dt = dt + timedelta(days=day_offset, hours=hour_offset, minutes=minute_offset, seconds=second_offset)
weekday_names = [
"Monday",
"Tuesday",
"Wednesday",
"Thursday",
"Friday",
"Saturday",
"Sunday",
]
weekday = weekday_names[dt.weekday()]
dt_str = dt.strftime("%Y-%m-%d %H:%M:%S")
return {
"ok": True,
"summary": f"{dt_str} · {weekday}",
"data": {
"datetime": dt_str,
"weekday": weekday,
"offset": {
"year": year_offset,
"month": month_offset,
"day": day_offset,
"hour": hour_offset,
"minute": minute_offset,
"second": second_offset,
},
},
}
async def _processors_list(_: Dict[str, Any]) -> Dict[str, Any]: async def _processors_list(_: Dict[str, Any]) -> Dict[str, Any]:
return {"processors": ProcessorService.list_processors()} return {"processors": ProcessorService.list_processors()}
@@ -188,6 +252,27 @@ async def _vfs_search(args: Dict[str, Any]) -> Dict[str, Any]:
TOOLS: Dict[str, ToolSpec] = { TOOLS: Dict[str, ToolSpec] = {
"time": ToolSpec(
name="time",
description=(
"获取服务器当前时间(精确到秒,含英文星期)。"
" 支持 year/month/day/hour/minute/second 偏移(可为负数)。"
),
parameters={
"type": "object",
"properties": {
"year": {"type": "integer", "description": "年偏移(可为负数)"},
"month": {"type": "integer", "description": "月偏移(可为负数)"},
"day": {"type": "integer", "description": "日偏移(可为负数)"},
"hour": {"type": "integer", "description": "时偏移(可为负数)"},
"minute": {"type": "integer", "description": "分偏移(可为负数)"},
"second": {"type": "integer", "description": "秒偏移(可为负数)"},
},
"additionalProperties": False,
},
requires_confirmation=False,
handler=_time,
),
"processors_list": ToolSpec( "processors_list": ToolSpec(
name="processors_list", name="processors_list",
description="获取可用处理器列表type/name/config_schema 等)。", description="获取可用处理器列表type/name/config_schema 等)。",
@@ -401,12 +486,138 @@ def openai_tools() -> List[Dict[str, Any]]:
return out return out
def tool_result_to_content(result: Any) -> str: def _stringify_value(value: Any) -> str:
if result is None: if value is None:
return "" return ""
if isinstance(result, str): if isinstance(value, bool):
return result return "true" if value else "false"
if isinstance(value, (int, float)):
return str(value)
if isinstance(value, str):
return value
try: try:
return json.dumps(result, ensure_ascii=False) return json.dumps(value, ensure_ascii=False)
except TypeError: except TypeError:
return json.dumps({"result": str(result)}, ensure_ascii=False) return str(value)
def _list_to_view_items(items: List[Any]) -> List[Any]:
normalized: List[Any] = []
for item in items:
if isinstance(item, dict):
normalized.append({str(k): _stringify_value(v) for k, v in item.items()})
else:
normalized.append(_stringify_value(item))
return normalized
def _dict_to_kv_items(data: Dict[str, Any]) -> List[Dict[str, str]]:
return [{"key": str(k), "value": _stringify_value(v)} for k, v in data.items()]
def _first_list_field(data: Dict[str, Any]) -> tuple[Optional[str], Optional[List[Any]]]:
for key, value in data.items():
if isinstance(value, list):
return str(key), value
return None, None
def _build_view(data: Any) -> Dict[str, Any]:
if data is None:
return {"type": "kv", "items": []}
if isinstance(data, str):
return {"type": "text", "text": data}
if isinstance(data, list):
return {"type": "list", "items": _list_to_view_items(data)}
if isinstance(data, dict):
content = data.get("content")
if isinstance(content, str):
meta = {k: _stringify_value(v) for k, v in data.items() if k != "content"}
view: Dict[str, Any] = {"type": "text", "text": content}
if meta:
view["meta"] = meta
return view
list_key, list_val = _first_list_field(data)
if list_key and isinstance(list_val, list):
meta = {k: _stringify_value(v) for k, v in data.items() if k != list_key}
view = {"type": "list", "title": list_key, "items": _list_to_view_items(list_val)}
if meta:
view["meta"] = meta
return view
return {"type": "kv", "items": _dict_to_kv_items(data)}
return {"type": "text", "text": _stringify_value(data)}
def _build_summary(view: Dict[str, Any]) -> str:
view_type = str(view.get("type") or "")
if view_type == "text":
text = view.get("text")
size = len(text) if isinstance(text, str) else 0
return f"chars: {size}" if size else "text"
if view_type == "list":
items = view.get("items")
count = len(items) if isinstance(items, list) else 0
title = str(view.get("title") or "items")
return f"{title}: {count}"
if view_type == "kv":
items = view.get("items")
count = len(items) if isinstance(items, list) else 0
return f"fields: {count}"
if view_type == "error":
return str(view.get("message") or "error")
return ""
def _build_error_payload(code: str, message: str, detail: Any = None) -> Dict[str, Any]:
summary = "Canceled" if code == "canceled" else message or "error"
view = {"type": "error", "message": summary}
payload: Dict[str, Any] = {
"ok": False,
"summary": summary,
"view": view,
"error": {
"code": code,
"message": message,
},
}
if detail is not None:
payload["error"]["detail"] = detail
return payload
def _normalize_tool_result(result: Any) -> Dict[str, Any]:
if isinstance(result, dict) and "ok" in result:
payload = dict(result)
if payload.get("ok") is False:
error = payload.get("error")
message = _stringify_value(error.get("message") if isinstance(error, dict) else error)
payload.setdefault("summary", message or "error")
payload.setdefault("view", {"type": "error", "message": payload["summary"]})
return payload
data = payload.get("data")
if payload.get("view") is None:
payload["view"] = _build_view(data)
if not payload.get("summary"):
payload["summary"] = _build_summary(payload["view"])
return payload
if isinstance(result, dict) and result.get("canceled"):
reason = _stringify_value(result.get("reason") or "canceled")
return _build_error_payload("canceled", reason, detail=result)
if isinstance(result, dict) and "error" in result:
error = result.get("error")
message = _stringify_value(error.get("message") if isinstance(error, dict) else error)
return _build_error_payload("error", message, detail=error)
view = _build_view(result)
summary = _build_summary(view)
return {"ok": True, "summary": summary, "view": view, "data": result}
def tool_result_to_content(result: Any) -> str:
payload = _normalize_tool_result(result)
try:
return json.dumps(payload, ensure_ascii=False, default=str)
except TypeError:
return json.dumps({"ok": False, "summary": "error", "view": {"type": "error", "message": "error"}}, ensure_ascii=False)

View File

@@ -1,6 +1,6 @@
import datetime import datetime
from fastapi import APIRouter, Depends, File, Request, UploadFile from fastapi import APIRouter, Depends, File, Form, Query, Request, UploadFile
from fastapi.responses import JSONResponse from fastapi.responses import JSONResponse
from domain.audit import AuditAction, audit from domain.audit import AuditAction, audit
@@ -16,8 +16,10 @@ router = APIRouter(
@router.get("/export", summary="导出全站数据") @router.get("/export", summary="导出全站数据")
@audit(action=AuditAction.DOWNLOAD, description="导出备份") @audit(action=AuditAction.DOWNLOAD, description="导出备份")
async def export_backup(request: Request): async def export_backup(
data = await BackupService.export_data() request: Request, sections: list[str] | None = Query(default=None)
):
data = await BackupService.export_data(sections=sections)
timestamp = datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S") timestamp = datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
headers = {"Content-Disposition": f"attachment; filename=foxel_backup_{timestamp}.json"} headers = {"Content-Disposition": f"attachment; filename=foxel_backup_{timestamp}.json"}
return JSONResponse(content=data.model_dump(), headers=headers) return JSONResponse(content=data.model_dump(), headers=headers)
@@ -25,6 +27,10 @@ async def export_backup(request: Request):
@router.post("/import", summary="导入数据") @router.post("/import", summary="导入数据")
@audit(action=AuditAction.UPLOAD, description="导入备份") @audit(action=AuditAction.UPLOAD, description="导入备份")
async def import_backup(request: Request, file: UploadFile = File(...)): async def import_backup(
await BackupService.import_from_bytes(file.filename, await file.read()) request: Request,
file: UploadFile = File(...),
mode: str = Form("replace"),
):
await BackupService.import_from_bytes(file.filename, await file.read(), mode=mode)
return {"message": "数据导入成功。"} return {"message": "数据导入成功。"}

View File

@@ -20,18 +20,64 @@ from models.database import (
class BackupService: class BackupService:
ALL_SECTIONS = (
"storage_adapters",
"user_accounts",
"automation_tasks",
"share_links",
"configurations",
"ai_providers",
"ai_models",
"ai_default_models",
"plugins",
)
@classmethod @classmethod
async def export_data(cls) -> BackupData: async def export_data(cls, sections: list[str] | None = None) -> BackupData:
sections = cls._normalize_sections(sections)
section_set = set(sections)
async with in_transaction(): async with in_transaction():
adapters = await StorageAdapter.all().values() adapters = (
users = await UserAccount.all().values() await StorageAdapter.all().values()
tasks = await AutomationTask.all().values() if "storage_adapters" in section_set
shares = await ShareLink.all().values() else []
configs = await Configuration.all().values() )
providers = await AIProvider.all().values() users = (
models = await AIModel.all().values() await UserAccount.all().values()
default_models = await AIDefaultModel.all().values() if "user_accounts" in section_set
plugins = await Plugin.all().values() else []
)
tasks = (
await AutomationTask.all().values()
if "automation_tasks" in section_set
else []
)
shares = (
await ShareLink.all().values()
if "share_links" in section_set
else []
)
configs = (
await Configuration.all().values()
if "configurations" in section_set
else []
)
providers = (
await AIProvider.all().values()
if "ai_providers" in section_set
else []
)
models = (
await AIModel.all().values() if "ai_models" in section_set else []
)
default_models = (
await AIDefaultModel.all().values()
if "ai_default_models" in section_set
else []
)
plugins = (
await Plugin.all().values() if "plugins" in section_set else []
)
share_links = cls._serialize_datetime_fields( share_links = cls._serialize_datetime_fields(
shares, ["created_at", "expires_at"] shares, ["created_at", "expires_at"]
@@ -51,6 +97,7 @@ class BackupService:
return BackupData( return BackupData(
version=VERSION, version=VERSION,
sections=sections,
storage_adapters=list(adapters), storage_adapters=list(adapters),
user_accounts=list(users), user_accounts=list(users),
automation_tasks=list(tasks), automation_tasks=list(tasks),
@@ -63,106 +110,195 @@ class BackupService:
) )
@classmethod @classmethod
async def import_from_bytes(cls, filename: str, content: bytes) -> None: async def import_from_bytes(
cls, filename: str, content: bytes, mode: str = "replace"
) -> None:
if not filename.endswith(".json"): if not filename.endswith(".json"):
raise HTTPException(status_code=400, detail="无效的文件类型, 请上传 .json 文件") raise HTTPException(status_code=400, detail="无效的文件类型, 请上传 .json 文件")
try: try:
raw_data = json.loads(content) raw_data = json.loads(content)
except Exception: except Exception:
raise HTTPException(status_code=400, detail="无法解析JSON文件") raise HTTPException(status_code=400, detail="无法解析JSON文件")
await cls.import_data(BackupData(**raw_data)) await cls.import_data(BackupData(**raw_data), mode=mode)
@classmethod @classmethod
async def import_data(cls, payload: BackupData) -> None: async def import_data(cls, payload: BackupData, mode: str = "replace") -> None:
sections = cls._normalize_sections(payload.sections)
if mode not in {"replace", "merge"}:
raise HTTPException(status_code=400, detail="无效的导入模式")
share_links = (
cls._parse_datetime_fields(payload.share_links, ["created_at", "expires_at"])
if payload.share_links
else []
)
ai_providers = (
cls._parse_datetime_fields(payload.ai_providers, ["created_at", "updated_at"])
if payload.ai_providers
else []
)
ai_models = (
cls._parse_datetime_fields(payload.ai_models, ["created_at", "updated_at"])
if payload.ai_models
else []
)
ai_default_models = (
cls._parse_datetime_fields(
payload.ai_default_models, ["created_at", "updated_at"]
)
if payload.ai_default_models
else []
)
plugins = (
cls._parse_datetime_fields(payload.plugins, ["created_at", "updated_at"])
if payload.plugins
else []
)
async with in_transaction() as conn: async with in_transaction() as conn:
await ShareLink.all().using_db(conn).delete() if mode == "replace":
await AutomationTask.all().using_db(conn).delete() if "share_links" in sections:
await StorageAdapter.all().using_db(conn).delete() await ShareLink.all().using_db(conn).delete()
await UserAccount.all().using_db(conn).delete() if "automation_tasks" in sections:
await Configuration.all().using_db(conn).delete() await AutomationTask.all().using_db(conn).delete()
await AIDefaultModel.all().using_db(conn).delete() if "storage_adapters" in sections:
await AIModel.all().using_db(conn).delete() await StorageAdapter.all().using_db(conn).delete()
await AIProvider.all().using_db(conn).delete() if "user_accounts" in sections:
await Plugin.all().using_db(conn).delete() await UserAccount.all().using_db(conn).delete()
if "configurations" in sections:
await Configuration.all().using_db(conn).delete()
if "ai_default_models" in sections:
await AIDefaultModel.all().using_db(conn).delete()
if "ai_models" in sections:
await AIModel.all().using_db(conn).delete()
if "ai_providers" in sections:
await AIProvider.all().using_db(conn).delete()
if "plugins" in sections:
await Plugin.all().using_db(conn).delete()
if payload.configurations: if "configurations" in sections and payload.configurations:
await Configuration.bulk_create( if mode == "merge":
[Configuration(**config) for config in payload.configurations], await cls._merge_records(
using_db=conn, Configuration, payload.configurations, conn
) )
else:
await Configuration.bulk_create(
[Configuration(**config) for config in payload.configurations],
using_db=conn,
)
if payload.user_accounts: if "user_accounts" in sections and payload.user_accounts:
await UserAccount.bulk_create( if mode == "merge":
[UserAccount(**user) for user in payload.user_accounts], await cls._merge_records(UserAccount, payload.user_accounts, conn)
using_db=conn, else:
) await UserAccount.bulk_create(
[UserAccount(**user) for user in payload.user_accounts],
using_db=conn,
)
if payload.storage_adapters: if "storage_adapters" in sections and payload.storage_adapters:
await StorageAdapter.bulk_create( if mode == "merge":
[StorageAdapter(**adapter) for adapter in payload.storage_adapters], await cls._merge_records(
using_db=conn, StorageAdapter, payload.storage_adapters, conn
) )
else:
await StorageAdapter.bulk_create(
[StorageAdapter(**adapter) for adapter in payload.storage_adapters],
using_db=conn,
)
if payload.automation_tasks: if "automation_tasks" in sections and payload.automation_tasks:
await AutomationTask.bulk_create( if mode == "merge":
[AutomationTask(**task) for task in payload.automation_tasks], await cls._merge_records(
using_db=conn, AutomationTask, payload.automation_tasks, conn
) )
else:
await AutomationTask.bulk_create(
[AutomationTask(**task) for task in payload.automation_tasks],
using_db=conn,
)
if payload.share_links: if "share_links" in sections and share_links:
await ShareLink.bulk_create( if mode == "merge":
[ await cls._merge_records(ShareLink, share_links, conn)
ShareLink(**share) else:
for share in cls._parse_datetime_fields( await ShareLink.bulk_create(
payload.share_links, ["created_at", "expires_at"] [ShareLink(**share) for share in share_links],
) using_db=conn,
], )
using_db=conn,
)
if payload.ai_providers: if "ai_providers" in sections and ai_providers:
await AIProvider.bulk_create( if mode == "merge":
[ await cls._merge_records(AIProvider, ai_providers, conn)
AIProvider(**item) else:
for item in cls._parse_datetime_fields( await AIProvider.bulk_create(
payload.ai_providers, ["created_at", "updated_at"] [AIProvider(**item) for item in ai_providers],
) using_db=conn,
], )
using_db=conn,
)
if payload.ai_models: if "ai_models" in sections and ai_models:
await AIModel.bulk_create( if mode == "merge":
[ await cls._merge_records(AIModel, ai_models, conn)
AIModel(**item) else:
for item in cls._parse_datetime_fields( await AIModel.bulk_create(
payload.ai_models, ["created_at", "updated_at"] [AIModel(**item) for item in ai_models],
) using_db=conn,
], )
using_db=conn,
)
if payload.ai_default_models: if "ai_default_models" in sections and ai_default_models:
await AIDefaultModel.bulk_create( if mode == "merge":
[ await cls._merge_records(
AIDefaultModel(**item) AIDefaultModel, ai_default_models, conn
for item in cls._parse_datetime_fields( )
payload.ai_default_models, ["created_at", "updated_at"] else:
) await AIDefaultModel.bulk_create(
], [AIDefaultModel(**item) for item in ai_default_models],
using_db=conn, using_db=conn,
) )
if payload.plugins: if "plugins" in sections and plugins:
await Plugin.bulk_create( if mode == "merge":
[ await cls._merge_records(Plugin, plugins, conn)
Plugin(**item) else:
for item in cls._parse_datetime_fields( await Plugin.bulk_create(
payload.plugins, ["created_at", "updated_at"] [Plugin(**item) for item in plugins],
) using_db=conn,
], )
using_db=conn,
) @classmethod
def _normalize_sections(cls, sections: list[str] | None) -> list[str]:
if not sections:
return list(cls.ALL_SECTIONS)
normalized = [item for item in sections if item]
invalid = [item for item in normalized if item not in cls.ALL_SECTIONS]
if invalid:
raise HTTPException(
status_code=400, detail=f"无效的备份分区: {', '.join(invalid)}"
)
result: list[str] = []
seen = set()
for item in normalized:
if item in seen:
continue
seen.add(item)
result.append(item)
return result
@staticmethod
async def _merge_records(model, records: list[dict], using_db) -> None:
for record in records:
data = dict(record)
record_id = data.pop("id", None)
if record_id is None:
await model.create(using_db=using_db, **data)
continue
updated = (
await model.filter(id=record_id)
.using_db(using_db)
.update(**data)
)
if updated == 0:
await model.create(using_db=using_db, id=record_id, **data)
@staticmethod @staticmethod
def _serialize_datetime_fields( def _serialize_datetime_fields(

View File

@@ -5,6 +5,7 @@ from pydantic import BaseModel, Field
class BackupData(BaseModel): class BackupData(BaseModel):
version: str | None = None version: str | None = None
sections: list[str] = Field(default_factory=list)
storage_adapters: list[dict[str, Any]] = Field(default_factory=list) storage_adapters: list[dict[str, Any]] = Field(default_factory=list)
user_accounts: list[dict[str, Any]] = Field(default_factory=list) user_accounts: list[dict[str, Any]] = Field(default_factory=list)
automation_tasks: list[dict[str, Any]] = Field(default_factory=list) automation_tasks: list[dict[str, Any]] = Field(default_factory=list)

View File

@@ -10,7 +10,7 @@ from models.database import Configuration, UserAccount
load_dotenv(dotenv_path=".env") load_dotenv(dotenv_path=".env")
VERSION = "v1.7.1" VERSION = "v1.7.4"
class ConfigService: class ConfigService:

View File

@@ -1,4 +1,5 @@
from .service import TaskService from .service import TaskService
from .scheduler import task_scheduler
from .task_queue import Task, TaskProgress, TaskStatus, task_queue_service from .task_queue import Task, TaskProgress, TaskStatus, task_queue_service
from .types import ( from .types import (
AutomationTaskBase, AutomationTaskBase,
@@ -15,6 +16,7 @@ __all__ = [
"TaskProgress", "TaskProgress",
"TaskStatus", "TaskStatus",
"task_queue_service", "task_queue_service",
"task_scheduler",
"AutomationTaskBase", "AutomationTaskBase",
"AutomationTaskCreate", "AutomationTaskCreate",
"AutomationTaskRead", "AutomationTaskRead",

View File

@@ -59,8 +59,7 @@ async def get_task_status(task_id: str, request: Request, current_user: CurrentU
body_fields=[ body_fields=[
"name", "name",
"event", "event",
"path_pattern", "trigger_config",
"filename_regex",
"processor_type", "processor_type",
"processor_config", "processor_config",
"enabled", "enabled",
@@ -93,8 +92,7 @@ async def list_tasks(request: Request, current_user: CurrentUser):
body_fields=[ body_fields=[
"name", "name",
"event", "event",
"path_pattern", "trigger_config",
"filename_regex",
"processor_type", "processor_type",
"processor_config", "processor_config",
"enabled", "enabled",

102
domain/tasks/scheduler.py Normal file
View File

@@ -0,0 +1,102 @@
import asyncio
from dataclasses import dataclass
from datetime import datetime
from croniter import croniter
from models.database import AutomationTask
from .task_queue import task_queue_service
@dataclass
class CronTaskItem:
task_id: int
processor_type: str
path: str
cron: croniter
next_run: datetime
class AutomationTaskScheduler:
def __init__(self):
self._items: list[CronTaskItem] = []
self._worker: asyncio.Task | None = None
self._reload_event = asyncio.Event()
self._stop_event = asyncio.Event()
async def start(self) -> None:
if self._worker and not self._worker.done():
return
self._stop_event.clear()
await self._load_tasks()
self._worker = asyncio.create_task(self._run_loop())
async def stop(self) -> None:
if not self._worker:
return
self._stop_event.set()
self._reload_event.set()
await self._worker
self._worker = None
def refresh(self) -> None:
if self._worker and not self._worker.done():
self._reload_event.set()
async def _load_tasks(self) -> None:
tasks = await AutomationTask.filter(event="cron", enabled=True)
items: list[CronTaskItem] = []
now = datetime.now()
for task in tasks:
trigger = task.trigger_config or {}
if not isinstance(trigger, dict):
continue
cron_expr = trigger.get("cron_expr")
path = trigger.get("path")
if not cron_expr or not path:
continue
cron = self._build_cron(cron_expr, now)
if not cron:
continue
next_run = cron.get_next(datetime)
items.append(
CronTaskItem(
task_id=task.id,
processor_type=task.processor_type,
path=path,
cron=cron,
next_run=next_run,
)
)
self._items = items
def _build_cron(self, expr: str, base_time: datetime) -> croniter | None:
expr = str(expr or "").strip()
if not expr:
return None
parts = [p for p in expr.split() if p]
if len(parts) not in (5, 6):
return None
second_at_beginning = len(parts) == 6
try:
return croniter(expr, base_time, second_at_beginning=second_at_beginning)
except Exception:
return None
async def _run_loop(self) -> None:
while not self._stop_event.is_set():
if self._reload_event.is_set():
self._reload_event.clear()
await self._load_tasks()
now = datetime.now()
for item in list(self._items):
if item.next_run <= now:
await task_queue_service.add_task(
item.processor_type,
{"task_id": item.task_id, "path": item.path},
)
item.next_run = item.cron.get_next(datetime)
await asyncio.sleep(1)
task_scheduler = AutomationTaskScheduler()

View File

@@ -5,6 +5,7 @@ from fastapi import Depends, HTTPException
from domain.auth import User, get_current_active_user from domain.auth import User, get_current_active_user
from domain.config import ConfigService from domain.config import ConfigService
from .scheduler import task_scheduler
from .task_queue import task_queue_service from .task_queue import task_queue_service
from .types import ( from .types import (
AutomationTaskCreate, AutomationTaskCreate,
@@ -46,6 +47,7 @@ class TaskService:
@classmethod @classmethod
async def create_task(cls, payload: AutomationTaskCreate, user: Optional[User]) -> AutomationTask: async def create_task(cls, payload: AutomationTaskCreate, user: Optional[User]) -> AutomationTask:
task = await AutomationTask.create(**payload.model_dump()) task = await AutomationTask.create(**payload.model_dump())
task_scheduler.refresh()
return task return task
@classmethod @classmethod
@@ -69,6 +71,7 @@ class TaskService:
for key, value in update_data.items(): for key, value in update_data.items():
setattr(task, key, value) setattr(task, key, value)
await task.save() await task.save()
task_scheduler.refresh()
return task return task
@classmethod @classmethod
@@ -76,6 +79,7 @@ class TaskService:
deleted_count = await AutomationTask.filter(id=task_id).delete() deleted_count = await AutomationTask.filter(id=task_id).delete()
if not deleted_count: if not deleted_count:
raise HTTPException(status_code=404, detail=f"Task {task_id} not found") raise HTTPException(status_code=404, detail=f"Task {task_id} not found")
task_scheduler.refresh()
@classmethod @classmethod
async def trigger_tasks(cls, event: str, path: str): async def trigger_tasks(cls, event: str, path: str):
@@ -86,11 +90,16 @@ class TaskService:
@classmethod @classmethod
def match(cls, task: AutomationTask, path: str) -> bool: def match(cls, task: AutomationTask, path: str) -> bool:
if task.path_pattern and not path.startswith(task.path_pattern): trigger_config = task.trigger_config or {}
if not isinstance(trigger_config, dict):
trigger_config = {}
path_prefix = trigger_config.get("path_prefix")
filename_regex = trigger_config.get("filename_regex")
if path_prefix and not path.startswith(path_prefix):
return False return False
if task.filename_regex: if filename_regex:
filename = path.split("/")[-1] filename = path.split("/")[-1]
if not re.match(task.filename_regex, filename): if not re.match(filename_regex, filename):
return False return False
return True return True

View File

@@ -88,32 +88,27 @@ class TaskQueueService:
task.result = result task.result = result
elif task.name == "automation_task" or self._is_processor_task(task.name): elif task.name == "automation_task" or self._is_processor_task(task.name):
from models.database import AutomationTask from models.database import AutomationTask
from domain.processors import get_processor
params = task.task_info params = task.task_info
auto_task = await AutomationTask.get(id=params["task_id"]) auto_task = await AutomationTask.get(id=params["task_id"])
path = params["path"] path = params["path"]
processor_type = auto_task.processor_type if task.name == "automation_task" else task.name processor_type = auto_task.processor_type
processor = get_processor(processor_type) config = auto_task.processor_config or {}
if not processor: save_to = config.get("save_to") if isinstance(config, dict) else None
raise ValueError(f"Processor {processor_type} not found for task {auto_task.id}") overwrite = bool(config.get("overwrite")) if isinstance(config, dict) else False
try:
if processor_type != auto_task.processor_type: if await VirtualFSService.path_is_directory(path):
processor_type = auto_task.processor_type overwrite = True
processor = get_processor(processor_type) except Exception:
if not processor: pass
raise ValueError(f"Processor {processor_type} not found for task {auto_task.id}") await VirtualFSService.process_file(
path=path,
requires_input_bytes = bool(getattr(processor, "requires_input_bytes", True)) processor_type=processor_type,
file_content = b"" config=config if isinstance(config, dict) else {},
if requires_input_bytes: save_to=save_to,
file_content = await VirtualFSService.read_file(path) overwrite=overwrite,
result = await processor.process(file_content, path, auto_task.processor_config) )
save_to = auto_task.processor_config.get("save_to")
if save_to and getattr(processor, "produces_file", False):
await VirtualFSService.write_file(save_to, result)
task.result = "Automation task completed" task.result = "Automation task completed"
elif task.name == "offline_http_download": elif task.name == "offline_http_download":
from domain.offline_downloads import OfflineDownloadService from domain.offline_downloads import OfflineDownloadService
@@ -129,7 +124,6 @@ class TaskQueueService:
task.result = "Email sent" task.result = "Email sent"
else: else:
raise ValueError(f"Unknown task name: {task.name}") raise ValueError(f"Unknown task name: {task.name}")
task.status = TaskStatus.SUCCESS task.status = TaskStatus.SUCCESS
except Exception as e: except Exception as e:

View File

@@ -6,8 +6,7 @@ from pydantic import BaseModel, Field
class AutomationTaskBase(BaseModel): class AutomationTaskBase(BaseModel):
name: str name: str
event: str event: str
path_pattern: Optional[str] = None trigger_config: Dict[str, Any] = {}
filename_regex: Optional[str] = None
processor_type: str processor_type: str
processor_config: Dict[str, Any] = {} processor_config: Dict[str, Any] = {}
enabled: bool = True enabled: bool = True
@@ -22,6 +21,7 @@ class AutomationTaskUpdate(AutomationTaskBase):
event: Optional[str] = None event: Optional[str] = None
processor_type: Optional[str] = None processor_type: Optional[str] = None
processor_config: Optional[Dict[str, Any]] = None processor_config: Optional[Dict[str, Any]] = None
trigger_config: Optional[Dict[str, Any]] = None
enabled: Optional[bool] = None enabled: Optional[bool] = None

View File

@@ -63,6 +63,16 @@ async def access_public_file(
return await VirtualFSService.access_public_file(token, request.headers.get("Range")) return await VirtualFSService.access_public_file(token, request.headers.get("Range"))
@router.get("/public/{token}/{filename}")
@audit(action=AuditAction.DOWNLOAD, description="访问临时链接文件")
async def access_public_file_with_name(
token: str,
filename: str,
request: Request,
):
return await VirtualFSService.access_public_file(token, request.headers.get("Range"))
@router.get("/stat/{full_path:path}") @router.get("/stat/{full_path:path}")
@audit(action=AuditAction.READ, description="查看文件信息") @audit(action=AuditAction.READ, description="查看文件信息")
async def get_file_stat( async def get_file_stat(

View File

@@ -11,6 +11,29 @@ from .listing import VirtualFSListingMixin
class VirtualFSFileOpsMixin(VirtualFSListingMixin): class VirtualFSFileOpsMixin(VirtualFSListingMixin):
@classmethod
def _normalize_written_result(
cls,
original_path: str,
adapter_model: Any,
result: Any,
size_hint: int,
) -> tuple[str, int]:
final_path = original_path
size = size_hint
if isinstance(result, dict):
rel_override = result.get("rel")
if isinstance(rel_override, str) and rel_override:
final_path = cls._build_absolute_path(adapter_model.path, rel_override)
else:
path_override = result.get("path")
if isinstance(path_override, str) and path_override:
final_path = cls._normalize_path(path_override)
size_val = result.get("size")
if isinstance(size_val, int):
size = size_val
return final_path, size
@classmethod @classmethod
async def read_file(cls, path: str) -> Union[bytes, Any]: async def read_file(cls, path: str) -> Union[bytes, Any]:
adapter_instance, _, root, rel = await cls.resolve_adapter_and_rel(path) adapter_instance, _, root, rel = await cls.resolve_adapter_and_rel(path)
@@ -21,16 +44,18 @@ class VirtualFSFileOpsMixin(VirtualFSListingMixin):
@classmethod @classmethod
async def write_file(cls, path: str, data: bytes): async def write_file(cls, path: str, data: bytes):
adapter_instance, _, root, rel = await cls.resolve_adapter_and_rel(path) adapter_instance, adapter_model, root, rel = await cls.resolve_adapter_and_rel(path)
if rel.endswith("/"): if rel.endswith("/"):
raise HTTPException(400, detail="Invalid file path") raise HTTPException(400, detail="Invalid file path")
write_func = await cls._ensure_method(adapter_instance, "write_file") write_func = await cls._ensure_method(adapter_instance, "write_file")
await write_func(root, rel, data) result = await write_func(root, rel, data)
await TaskService.trigger_tasks("file_written", path) final_path, size = cls._normalize_written_result(path, adapter_model, result, len(data))
await TaskService.trigger_tasks("file_written", final_path)
return {"path": final_path, "size": size}
@classmethod @classmethod
async def write_file_stream(cls, path: str, data_iter: AsyncIterator[bytes], overwrite: bool = True): async def write_file_stream(cls, path: str, data_iter: AsyncIterator[bytes], overwrite: bool = True):
adapter_instance, _, root, rel = await cls.resolve_adapter_and_rel(path) adapter_instance, adapter_model, root, rel = await cls.resolve_adapter_and_rel(path)
if rel.endswith("/"): if rel.endswith("/"):
raise HTTPException(400, detail="Invalid file path") raise HTTPException(400, detail="Invalid file path")
exists_func = getattr(adapter_instance, "exists", None) exists_func = getattr(adapter_instance, "exists", None)
@@ -46,18 +71,23 @@ class VirtualFSFileOpsMixin(VirtualFSListingMixin):
size = 0 size = 0
stream_func = getattr(adapter_instance, "write_file_stream", None) stream_func = getattr(adapter_instance, "write_file_stream", None)
if callable(stream_func): if callable(stream_func):
size = await stream_func(root, rel, data_iter) result = await stream_func(root, rel, data_iter)
if isinstance(result, dict):
size = int(result.get("size") or 0)
else:
size = int(result or 0)
else: else:
buf = bytearray() buf = bytearray()
async for chunk in data_iter: async for chunk in data_iter:
if chunk: if chunk:
buf.extend(chunk) buf.extend(chunk)
write_func = await cls._ensure_method(adapter_instance, "write_file") write_func = await cls._ensure_method(adapter_instance, "write_file")
await write_func(root, rel, bytes(buf)) result = await write_func(root, rel, bytes(buf))
size = len(buf) size = len(buf)
await TaskService.trigger_tasks("file_written", path) final_path, size = cls._normalize_written_result(path, adapter_model, result, size)
return size await TaskService.trigger_tasks("file_written", final_path)
return {"path": final_path, "size": size}
@classmethod @classmethod
async def make_dir(cls, path: str): async def make_dir(cls, path: str):

View File

@@ -225,7 +225,10 @@ class VirtualFSListingMixin(VirtualFSResolverMixin):
stat_func = getattr(adapter_instance, "stat_file", None) stat_func = getattr(adapter_instance, "stat_file", None)
if not callable(stat_func): if not callable(stat_func):
raise HTTPException(501, detail="Adapter does not implement stat_file") raise HTTPException(501, detail="Adapter does not implement stat_file")
info = await stat_func(root, rel) try:
info = await stat_func(root, rel)
except FileNotFoundError as exc:
raise HTTPException(404, detail=str(exc))
if isinstance(info, dict): if isinstance(info, dict):
info.setdefault("path", path) info.setdefault("path", path)

View File

@@ -1,10 +1,12 @@
import mimetypes import mimetypes
import re import re
from urllib.parse import quote
from fastapi import HTTPException, UploadFile from fastapi import HTTPException, UploadFile
from fastapi.responses import Response from fastapi.responses import Response
from domain.config import ConfigService from domain.config import ConfigService
from domain.tasks import TaskService
from .thumbnail import ( from .thumbnail import (
get_or_create_thumb, get_or_create_thumb,
is_image_filename, is_image_filename,
@@ -112,12 +114,14 @@ class VirtualFSRouteMixin(VirtualFSTempLinkMixin):
async def create_temp_link(cls, full_path: str, expires_in: int): async def create_temp_link(cls, full_path: str, expires_in: int):
full_path = cls._normalize_path(full_path) full_path = cls._normalize_path(full_path)
token = await cls.generate_temp_link_token(full_path, expires_in=expires_in) token = await cls.generate_temp_link_token(full_path, expires_in=expires_in)
filename = full_path.rstrip("/").split("/")[-1]
filename_part = f"/{quote(filename, safe='')}" if filename else ""
file_domain = await ConfigService.get("FILE_DOMAIN") file_domain = await ConfigService.get("FILE_DOMAIN")
if file_domain: if file_domain:
file_domain = file_domain.rstrip("/") file_domain = file_domain.rstrip("/")
url = f"{file_domain}/api/fs/public/{token}" url = f"{file_domain}/api/fs/public/{token}{filename_part}"
else: else:
url = f"/api/fs/public/{token}" url = f"/api/fs/public/{token}{filename_part}"
return {"token": token, "path": full_path, "url": url} return {"token": token, "path": full_path, "url": url}
@classmethod @classmethod
@@ -128,12 +132,17 @@ class VirtualFSRouteMixin(VirtualFSTempLinkMixin):
raise exc raise exc
try: try:
return await cls.stream_file(path, range_header) response = await cls.stream_file(path, range_header)
except FileNotFoundError: except FileNotFoundError:
raise HTTPException(404, detail="File not found via token") raise HTTPException(404, detail="File not found via token")
except Exception as exc: except Exception as exc:
raise HTTPException(500, detail=f"File access error: {exc}") raise HTTPException(500, detail=f"File access error: {exc}")
filename = path.rstrip("/").split("/")[-1]
if filename and not response.headers.get("Content-Disposition"):
response.headers["Content-Disposition"] = f"inline; filename*=UTF-8''{quote(filename, safe='')}"
return response
@classmethod @classmethod
async def stat(cls, full_path: str): async def stat(cls, full_path: str):
full_path = cls._normalize_path(full_path) full_path = cls._normalize_path(full_path)
@@ -142,8 +151,15 @@ class VirtualFSRouteMixin(VirtualFSTempLinkMixin):
@classmethod @classmethod
async def write_uploaded_file(cls, full_path: str, data: bytes): async def write_uploaded_file(cls, full_path: str, data: bytes):
full_path = cls._normalize_path(full_path) full_path = cls._normalize_path(full_path)
await cls.write_file(full_path, data) result = await cls.write_file(full_path, data)
return {"written": True, "path": full_path, "size": len(data)} path = full_path
size = len(data)
if isinstance(result, dict):
path = result.get("path") or path
size_val = result.get("size")
if isinstance(size_val, int):
size = size_val
return {"written": True, "path": path, "size": size}
@classmethod @classmethod
async def mkdir(cls, path: str): async def mkdir(cls, path: str):
@@ -201,7 +217,7 @@ class VirtualFSRouteMixin(VirtualFSTempLinkMixin):
full_path = cls._normalize_path(full_path) full_path = cls._normalize_path(full_path)
if full_path.endswith("/"): if full_path.endswith("/"):
raise HTTPException(400, detail="Path must be a file") raise HTTPException(400, detail="Path must be a file")
adapter, _m, root, rel = await cls.resolve_adapter_and_rel(full_path) adapter, adapter_model, root, rel = await cls.resolve_adapter_and_rel(full_path)
exists_func = getattr(adapter, "exists", None) exists_func = getattr(adapter, "exists", None)
if not overwrite and callable(exists_func): if not overwrite and callable(exists_func):
try: try:
@@ -212,6 +228,21 @@ class VirtualFSRouteMixin(VirtualFSTempLinkMixin):
except Exception: except Exception:
pass pass
upload_func = getattr(adapter, "write_upload_file", None)
if callable(upload_func):
try:
await file.seek(0)
except Exception:
pass
size_hint = getattr(file, "size", None)
if not isinstance(size_hint, int):
size_hint = None
filename = file.filename or (rel.rsplit("/", 1)[-1] if rel else "file")
result = await upload_func(root, rel, file.file, filename, size_hint, file.content_type)
final_path, size = cls._normalize_written_result(full_path, adapter_model, result, size_hint or 0)
await TaskService.trigger_tasks("file_written", final_path)
return {"uploaded": True, "path": final_path, "size": size, "overwrite": overwrite}
async def gen(): async def gen():
while True: while True:
chunk = await file.read(chunk_size) chunk = await file.read(chunk_size)
@@ -219,8 +250,17 @@ class VirtualFSRouteMixin(VirtualFSTempLinkMixin):
break break
yield chunk yield chunk
size = await cls.write_file_stream(full_path, gen(), overwrite=overwrite) result = await cls.write_file_stream(full_path, gen(), overwrite=overwrite)
return {"uploaded": True, "path": full_path, "size": size, "overwrite": overwrite} path = full_path
size = 0
if isinstance(result, dict):
path = result.get("path") or path
size_val = result.get("size")
if isinstance(size_val, int):
size = size_val
else:
size = int(result or 0)
return {"uploaded": True, "path": path, "size": size, "overwrite": overwrite}
@classmethod @classmethod
async def list_directory(cls, full_path: str, page_num: int, page_size: int, sort_by: str, sort_order: str): async def list_directory(cls, full_path: str, page_num: int, page_size: int, sort_by: str, sort_order: str):

View File

@@ -20,7 +20,7 @@ from middleware.exception_handler import (
) )
import httpx import httpx
from dotenv import load_dotenv from dotenv import load_dotenv
from domain.tasks import task_queue_service from domain.tasks import task_queue_service, task_scheduler
load_dotenv() load_dotenv()
@@ -73,6 +73,7 @@ async def lifespan(app: FastAPI):
# 加载已安装的插件 # 加载已安装的插件
from domain.plugins import init_plugins from domain.plugins import init_plugins
await init_plugins(app) await init_plugins(app)
await task_scheduler.start()
# 在所有路由加载完成后,挂载静态文件服务(放在最后以避免覆盖 API 路由) # 在所有路由加载完成后,挂载静态文件服务(放在最后以避免覆盖 API 路由)
app.mount("/", SPAStaticFiles(directory="web/dist", html=True, check_dir=False), name="static") app.mount("/", SPAStaticFiles(directory="web/dist", html=True, check_dir=False), name="static")
@@ -80,6 +81,7 @@ async def lifespan(app: FastAPI):
try: try:
yield yield
finally: finally:
await task_scheduler.stop()
await task_queue_service.stop_worker() await task_queue_service.stop_worker()
await close_db() await close_db()

View File

@@ -116,8 +116,7 @@ class AutomationTask(Model):
name = fields.CharField(max_length=100) name = fields.CharField(max_length=100)
event = fields.CharField(max_length=50) event = fields.CharField(max_length=50)
path_pattern = fields.CharField(max_length=1024, null=True) trigger_config = fields.JSONField(null=True)
filename_regex = fields.CharField(max_length=255, null=True)
processor_type = fields.CharField(max_length=100) processor_type = fields.CharField(max_length=100)
processor_config = fields.JSONField() processor_config = fields.JSONField()

View File

@@ -7,6 +7,7 @@ requires-python = ">=3.14"
dependencies = [ dependencies = [
"aioboto3>=15.5.0", "aioboto3>=15.5.0",
"bcrypt>=5.0.0", "bcrypt>=5.0.0",
"croniter>=6.0.0",
"fastapi>=0.127.0", "fastapi>=0.127.0",
"paramiko>=4.0.0", "paramiko>=4.0.0",
"pillow>=12.0.0", "pillow>=12.0.0",

21
uv.lock generated
View File

@@ -318,6 +318,19 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
] ]
[[package]]
name = "croniter"
version = "6.0.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "python-dateutil" },
{ name = "pytz" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ad/2f/44d1ae153a0e27be56be43465e5cb39b9650c781e001e7864389deb25090/croniter-6.0.0.tar.gz", hash = "sha256:37c504b313956114a983ece2c2b07790b1f1094fe9d81cc94739214748255577", size = 64481, upload-time = "2024-12-17T17:17:47.32Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/07/4b/290b4c3efd6417a8b0c284896de19b1d5855e6dbdb97d2a35e68fa42de85/croniter-6.0.0-py2.py3-none-any.whl", hash = "sha256:2f878c3856f17896979b2a4379ba1f09c83e374931ea15cc835c5dd2eee9b368", size = 25468, upload-time = "2024-12-17T17:17:45.359Z" },
]
[[package]] [[package]]
name = "cryptography" name = "cryptography"
version = "46.0.3" version = "46.0.3"
@@ -418,6 +431,7 @@ source = { virtual = "." }
dependencies = [ dependencies = [
{ name = "aioboto3" }, { name = "aioboto3" },
{ name = "bcrypt" }, { name = "bcrypt" },
{ name = "croniter" },
{ name = "fastapi" }, { name = "fastapi" },
{ name = "paramiko" }, { name = "paramiko" },
{ name = "pillow" }, { name = "pillow" },
@@ -437,6 +451,7 @@ dependencies = [
requires-dist = [ requires-dist = [
{ name = "aioboto3", specifier = ">=15.5.0" }, { name = "aioboto3", specifier = ">=15.5.0" },
{ name = "bcrypt", specifier = ">=5.0.0" }, { name = "bcrypt", specifier = ">=5.0.0" },
{ name = "croniter", specifier = ">=6.0.0" },
{ name = "fastapi", specifier = ">=0.127.0" }, { name = "fastapi", specifier = ">=0.127.0" },
{ name = "paramiko", specifier = ">=4.0.0" }, { name = "paramiko", specifier = ">=4.0.0" },
{ name = "pillow", specifier = ">=12.0.0" }, { name = "pillow", specifier = ">=12.0.0" },
@@ -883,11 +898,11 @@ sdist = { url = "https://files.pythonhosted.org/packages/44/66/2c17bae31c9066137
[[package]] [[package]]
name = "pyasn1" name = "pyasn1"
version = "0.6.1" version = "0.6.2"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/ba/e9/01f1a64245b89f039897cb0130016d79f77d52669aae6ee7b159a6c4c018/pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034", size = 145322, upload-time = "2024-09-10T22:41:42.55Z" } sdist = { url = "https://files.pythonhosted.org/packages/fe/b6/6e630dff89739fcd427e3f72b3d905ce0acb85a45d4ec3e2678718a3487f/pyasn1-0.6.2.tar.gz", hash = "sha256:9b59a2b25ba7e4f8197db7686c09fb33e658b98339fadb826e9512629017833b", size = 146586, upload-time = "2026-01-16T18:04:18.534Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/c8/f1/d6a797abb14f6283c0ddff96bbdd46937f64122b8c925cab503dd37f8214/pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629", size = 83135, upload-time = "2024-09-11T16:00:36.122Z" }, { url = "https://files.pythonhosted.org/packages/44/b5/a96872e5184f354da9c84ae119971a0a4c221fe9b27a4d94bd43f2596727/pyasn1-0.6.2-py3-none-any.whl", hash = "sha256:1eb26d860996a18e9b6ed05e7aae0e9fc21619fcee6af91cca9bad4fbea224bf", size = 83371, upload-time = "2026-01-16T18:04:17.174Z" },
] ]
[[package]] [[package]]

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 7.6 KiB

After

Width:  |  Height:  |  Size: 3.7 KiB

View File

@@ -1,8 +1,11 @@
import request from './client'; import request from './client';
export const backupApi = { export const backupApi = {
export: async () => { export: async (sections?: string[]) => {
const response = await request('/backup/export', { const params = new URLSearchParams();
(sections || []).forEach((section) => params.append('sections', section));
const query = params.toString();
const response = await request(`/backup/export${query ? `?${query}` : ''}`, {
method: 'GET', method: 'GET',
rawResponse: true, rawResponse: true,
}) as Response; }) as Response;
@@ -27,12 +30,13 @@ export const backupApi = {
window.URL.revokeObjectURL(url); window.URL.revokeObjectURL(url);
}, },
import: async (file: File) => { import: async (file: File, mode: 'replace' | 'merge' = 'replace') => {
const formData = new FormData(); const formData = new FormData();
formData.append('file', file); formData.append('file', file);
formData.append('mode', mode);
return request('/backup/import', { return request('/backup/import', {
method: 'POST', method: 'POST',
body: formData, body: formData,
}); });
}, },
}; };

55
web/src/api/notices.ts Normal file
View File

@@ -0,0 +1,55 @@
export interface NoticeItem {
id: number;
title: string;
contentMd: string;
isPopup: boolean;
createdAt: number;
}
export interface GetNoticesResponse {
items: NoticeItem[];
page: number;
pageSize: number;
total: number;
}
export interface GetNoticesParams {
version: string;
page?: number;
}
const FOXEL_CORE_BASE = 'https://foxel.cc';
function normalizeVersion(version: string) {
return (version || '').trim().replace(/^v/i, '');
}
function extractErrorMessage(data: any) {
if (!data) return '';
if (typeof data === 'string') return data;
if (typeof data.detail === 'string') return data.detail;
if (typeof data.code === 'string') return data.code;
if (typeof data.message === 'string') return data.message;
if (typeof data.msg === 'string') return data.msg;
return '';
}
export const noticesApi = {
list: async (params: GetNoticesParams): Promise<GetNoticesResponse> => {
const url = new URL('/api/notices', FOXEL_CORE_BASE);
url.searchParams.set('version', normalizeVersion(params.version));
url.searchParams.set('page', String(params.page ?? 1));
const resp = await fetch(url.href);
if (!resp.ok) {
let msg = resp.statusText || `Request failed: ${resp.status}`;
try {
const data = await resp.json();
msg = extractErrorMessage(data) || msg;
} catch { void 0; }
throw new Error(msg);
}
return await resp.json();
},
};

View File

@@ -5,8 +5,7 @@ export interface AutomationTask {
id: number; id: number;
name: string; name: string;
event: string; event: string;
path_pattern?: string; trigger_config?: Record<string, any>;
filename_regex?: string;
processor_type: string; processor_type: string;
processor_config: Record<string, any>; processor_config: Record<string, any>;
enabled: boolean; enabled: boolean;

View File

@@ -1,8 +1,8 @@
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react'; import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { Avatar, Button, Divider, Drawer, Flex, Input, List, Space, Switch, Tag, Typography, message, theme } from 'antd'; import { Avatar, Button, Divider, Flex, Input, List, Modal, Space, Switch, Tag, Typography, message, theme } from 'antd';
import { RobotOutlined, SendOutlined, FolderOpenOutlined, DeleteOutlined, ToolOutlined, DownOutlined, UpOutlined, CodeOutlined, CopyOutlined, LoadingOutlined } from '@ant-design/icons'; import { RobotOutlined, SendOutlined, DeleteOutlined, ToolOutlined, DownOutlined, UpOutlined, CodeOutlined, CopyOutlined, LoadingOutlined } from '@ant-design/icons';
import ReactMarkdown from 'react-markdown'; import ReactMarkdown from 'react-markdown';
import PathSelectorModal from './PathSelectorModal'; import type { TextAreaRef } from 'antd/es/input/TextArea';
import { agentApi, type AgentChatMessage, type PendingToolCall } from '../api/agent'; import { agentApi, type AgentChatMessage, type PendingToolCall } from '../api/agent';
import { useI18n } from '../i18n'; import { useI18n } from '../i18n';
import '../styles/ai-agent.css'; import '../styles/ai-agent.css';
@@ -54,6 +54,47 @@ function shortId(id: string, keep: number = 6): string {
return `${s.slice(0, keep)}${s.slice(-keep)}`; return `${s.slice(0, keep)}${s.slice(-keep)}`;
} }
function clampText(value: string, maxLen: number): string {
if (value.length <= maxLen) return value;
return `${value.slice(0, maxLen)}`;
}
function formatDisplayValue(value: any, maxLen: number = 120): string {
if (value == null) return '';
if (typeof value === 'string') return clampText(value, maxLen);
if (typeof value === 'number' || typeof value === 'boolean') return String(value);
try {
return clampText(JSON.stringify(value), maxLen);
} catch {
return clampText(String(value), maxLen);
}
}
function isPlainObject(value: any): value is Record<string, any> {
return !!value && typeof value === 'object' && !Array.isArray(value);
}
type ToolPayload = {
ok?: boolean;
summary?: string;
view?: {
type?: string;
title?: string;
meta?: Record<string, any>;
items?: any[];
text?: string;
message?: string;
};
data?: any;
error?: any;
};
function parseToolPayload(raw: string): ToolPayload | null {
const parsed = tryParseJson<ToolPayload>(raw);
if (!parsed || typeof parsed !== 'object') return null;
return parsed;
}
interface AiAgentWidgetProps { interface AiAgentWidgetProps {
currentPath?: string | null; currentPath?: string | null;
open: boolean; open: boolean;
@@ -68,11 +109,11 @@ const AiAgentWidget = memo(function AiAgentWidget({ currentPath, open, onOpenCha
const [loading, setLoading] = useState(false); const [loading, setLoading] = useState(false);
const [messages, setMessages] = useState<AgentChatMessage[]>([]); const [messages, setMessages] = useState<AgentChatMessage[]>([]);
const [pending, setPending] = useState<PendingToolCall[]>([]); const [pending, setPending] = useState<PendingToolCall[]>([]);
const [pathModalOpen, setPathModalOpen] = useState(false);
const [expandedTools, setExpandedTools] = useState<Record<string, boolean>>({}); const [expandedTools, setExpandedTools] = useState<Record<string, boolean>>({});
const [expandedRaw, setExpandedRaw] = useState<Record<string, boolean>>({}); const [expandedRaw, setExpandedRaw] = useState<Record<string, boolean>>({});
const [runningTools, setRunningTools] = useState<Record<string, string>>({}); const [runningTools, setRunningTools] = useState<Record<string, string>>({});
const scrollRef = useRef<HTMLDivElement | null>(null); const scrollRef = useRef<HTMLDivElement | null>(null);
const inputRef = useRef<TextAreaRef | null>(null);
const streamControllerRef = useRef<AbortController | null>(null); const streamControllerRef = useRef<AbortController | null>(null);
const streamSeqRef = useRef(0); const streamSeqRef = useRef(0);
const baseMessagesRef = useRef<AgentChatMessage[]>([]); const baseMessagesRef = useRef<AgentChatMessage[]>([]);
@@ -93,6 +134,14 @@ const AiAgentWidget = memo(function AiAgentWidget({ currentPath, open, onOpenCha
return () => window.clearTimeout(t); return () => window.clearTimeout(t);
}, [messages, open, pending, scrollToBottom]); }, [messages, open, pending, scrollToBottom]);
useEffect(() => {
if (!open || loading || pending.length > 0) return;
const t = window.setTimeout(() => {
inputRef.current?.focus();
}, 0);
return () => window.clearTimeout(t);
}, [open, loading, messages.length, pending.length]);
useEffect(() => { useEffect(() => {
return () => { return () => {
streamControllerRef.current?.abort(); streamControllerRef.current?.abort();
@@ -296,12 +345,6 @@ const AiAgentWidget = memo(function AiAgentWidget({ currentPath, open, onOpenCha
await runStream({ messages, rejected_tool_call_ids: ids }); await runStream({ messages, rejected_tool_call_ids: ids });
}, [messages, pending, runStream]); }, [messages, pending, runStream]);
const handlePathSelected = useCallback((path: string) => {
const p = normalizePath(path) || '/';
setInput((prev) => (prev.trim() ? `${prev.trim()} ${p}` : p));
setPathModalOpen(false);
}, []);
const messageItems = useMemo(() => { const messageItems = useMemo(() => {
return messages.filter((m) => { return messages.filter((m) => {
if (!m || typeof m !== 'object') return false; if (!m || typeof m !== 'object') return false;
@@ -327,94 +370,37 @@ const AiAgentWidget = memo(function AiAgentWidget({ currentPath, open, onOpenCha
} }
}, [t]); }, [t]);
const renderToolResultSummary = useCallback((toolName: string, rawContent: string, toolArgs?: Record<string, any> | null) => { const renderToolResultSummary = useCallback((rawContent: string) => {
const data = tryParseJson<Record<string, any>>(rawContent); const payload = parseToolPayload(rawContent);
if (!data) return ''; if (!payload) return '';
const summary = typeof payload.summary === 'string' ? payload.summary.trim() : '';
if (summary) return summary;
if (data.canceled) return t('Canceled'); if (payload.ok === false) {
if (data.error) return `${t('Error')}: ${String(data.error)}`; const message = typeof payload.error?.message === 'string' ? payload.error.message : '';
return message ? `${t('Error')}: ${message}` : t('Error');
}
if (toolName === 'processors_list') { const view = payload.view || {};
const processors = Array.isArray(data.processors) ? data.processors : []; const viewType = typeof view.type === 'string' ? view.type : '';
return `${t('Processors')}: ${processors.length}`; if (viewType === 'text') {
const text = typeof view.text === 'string' ? view.text : '';
return text ? `${text.length} ${t('chars')}` : '';
} }
if (toolName === 'processors_run') { if (viewType === 'list') {
const ctx = (() => { const items = Array.isArray(view.items) ? view.items : [];
const processorType = typeof toolArgs?.processor_type === 'string' ? toolArgs.processor_type.trim() : ''; return `${items.length} ${t('items')}`;
const path = typeof toolArgs?.path === 'string' ? toolArgs.path.trim() : '';
const parts = [processorType, path].filter(Boolean);
return parts.length ? parts.join(' · ') : '';
})();
if (typeof data.task_id === 'string') {
return ctx ? `${t('Task submitted')}: ${ctx} · ${shortId(data.task_id)}` : `${t('Task submitted')}: ${shortId(data.task_id)}`;
}
const taskIds = Array.isArray(data.task_ids) ? data.task_ids : [];
const scheduled = typeof data.scheduled === 'number' ? data.scheduled : taskIds.length;
if (scheduled) return ctx ? `${t('Tasks submitted')}: ${ctx} · ${scheduled}` : `${t('Tasks submitted')}: ${scheduled}`;
return t('Task submitted');
} }
if (toolName === 'vfs_list_dir') { if (viewType === 'kv') {
const path = typeof data.path === 'string' ? data.path : ''; const items = Array.isArray(view.items) ? view.items : [];
const entries = Array.isArray(data.entries) ? data.entries : []; return `${items.length} ${t('items')}`;
const names = entries
.map((it: any) => String(it?.name || '').trim())
.filter(Boolean)
.slice(0, 3);
const head = `${t('Directory')}: ${path || '/'}`;
const tail = `${entries.length} ${t('items')}`;
const sample = names.length ? ` · ${names.join(', ')}` : '';
return `${head} · ${tail}${sample}`;
}
if (toolName === 'vfs_search') {
const query = typeof data.query === 'string' ? data.query : '';
const items = Array.isArray(data.items) ? data.items : [];
return `${t('Search')}: ${query || '-'} · ${items.length} ${t('results')}`;
}
if (toolName === 'vfs_stat') {
const isDir = Boolean(data.is_dir);
const path = typeof data.path === 'string' ? data.path : '';
return `${t('Info')}: ${path || '-'} · ${isDir ? t('Folder') : t('File')}`;
}
if (toolName === 'vfs_read_text') {
const path = typeof data.path === 'string' ? data.path : '';
const length = typeof data.length === 'number' ? data.length : undefined;
const truncated = Boolean(data.truncated);
const tail = length != null ? ` · ${length} ${t('chars')}${truncated ? `(${t('Truncated')})` : ''}` : '';
return `${t('Read')}: ${path || '-'}${tail}`;
}
if (toolName === 'vfs_write_text') {
const path = typeof data.path === 'string' ? data.path : '';
const bytes = typeof data.bytes === 'number' ? data.bytes : undefined;
return `${t('Write')}: ${path || '-'}${bytes != null ? ` · ${bytes} bytes` : ''}`;
}
if (toolName === 'vfs_mkdir') {
const path = typeof data.path === 'string' ? data.path : '';
return `${t('Created')}: ${path || '-'}`;
}
if (toolName === 'vfs_delete') {
const path = typeof data.path === 'string' ? data.path : '';
return `${t('Deleted')}: ${path || '-'}`;
}
if (toolName === 'vfs_move') {
const src = typeof data.src === 'string' ? data.src : '';
const dst = typeof data.dst === 'string' ? data.dst : '';
return `${t('Moved')}: ${src || '-'}${dst || '-'}`;
}
if (toolName === 'vfs_copy') {
const src = typeof data.src === 'string' ? data.src : '';
const dst = typeof data.dst === 'string' ? data.dst : '';
return `${t('Copied')}: ${src || '-'}${dst || '-'}`;
}
if (toolName === 'vfs_rename') {
const src = typeof data.src === 'string' ? data.src : '';
const dst = typeof data.dst === 'string' ? data.dst : '';
return `${t('Renamed')}: ${src || '-'}${dst || '-'}`;
} }
return ''; return '';
}, [t]); }, [t]);
const renderToolDetails = useCallback((toolKey: string, toolName: string, rawContent: string) => { const renderToolDetails = useCallback((toolKey: string, rawContent: string) => {
const data = tryParseJson<Record<string, any>>(rawContent); const payload = parseToolPayload(rawContent);
const view = payload?.view;
const showRaw = !!expandedRaw[toolKey]; const showRaw = !!expandedRaw[toolKey];
const toggleRaw = () => setExpandedRaw((prev) => ({ ...prev, [toolKey]: !prev[toolKey] })); const toggleRaw = () => setExpandedRaw((prev) => ({ ...prev, [toolKey]: !prev[toolKey] }));
@@ -452,26 +438,40 @@ const AiAgentWidget = memo(function AiAgentWidget({ currentPath, open, onOpenCha
</Space> </Space>
); );
if (toolName === 'processors_list') { const viewType = typeof view?.type === 'string' ? view.type : '';
const processors = Array.isArray(data?.processors) ? data!.processors : []; const title = typeof view?.title === 'string' ? view.title : '';
const metaEntries = isPlainObject(view?.meta) ? Object.entries(view!.meta) : [];
const renderMeta = () => {
if (metaEntries.length === 0 && !title) return null;
return (
<>
<Space direction="vertical" size={6} style={{ width: '100%' }}>
{title ? (
<Text type="secondary" style={{ fontSize: 12 }}>{title}</Text>
) : null}
{metaEntries.slice(0, 6).map(([key, value]) => (
<Text key={key} type="secondary" style={{ fontSize: 12 }}>
{key}: {formatDisplayValue(value, 180) || '-'}
</Text>
))}
</Space>
<Divider style={{ margin: '10px 0' }} />
</>
);
};
if (viewType === 'error') {
const message = typeof view?.message === 'string'
? view.message
: (typeof payload?.error?.message === 'string' ? payload.error.message : t('Error'));
return ( return (
<div className="fx-agent-tool-details"> <div className="fx-agent-tool-details">
{header} {header}
<Divider style={{ margin: '10px 0' }} /> <Divider style={{ margin: '10px 0' }} />
<List <Paragraph style={{ marginBottom: 0, whiteSpace: 'pre-wrap' }}>
size="small" {message || t('Error')}
dataSource={processors} </Paragraph>
locale={{ emptyText: t('No results') }}
renderItem={(item: any) => (
<List.Item>
<Space size={10} wrap>
<Text code style={{ fontVariantNumeric: 'tabular-nums' }}>{String(item?.type || '')}</Text>
<Text>{String(item?.name || '')}</Text>
</Space>
</List.Item>
)}
style={{ background: 'transparent' }}
/>
{showRaw && ( {showRaw && (
<> <>
<Divider style={{ margin: '10px 0' }} /> <Divider style={{ margin: '10px 0' }} />
@@ -482,40 +482,43 @@ const AiAgentWidget = memo(function AiAgentWidget({ currentPath, open, onOpenCha
); );
} }
if (toolName === 'vfs_list_dir') { if (viewType === 'text') {
const path = typeof data?.path === 'string' ? data!.path : '/'; const text = typeof view?.text === 'string' ? view.text : '';
const entries = Array.isArray(data?.entries) ? data!.entries : [];
const pagination = data?.pagination && typeof data.pagination === 'object' ? data.pagination : null;
return ( return (
<div className="fx-agent-tool-details"> <div className="fx-agent-tool-details">
{header} {header}
<Divider style={{ margin: '10px 0' }} /> <Divider style={{ margin: '10px 0' }} />
<Space direction="vertical" size={6} style={{ width: '100%' }}> {renderMeta()}
<Text type="secondary" style={{ fontSize: 12 }}>{t('Directory')}: {path}</Text> <pre className="fx-agent-pre" style={{ marginTop: metaEntries.length || title ? 0 : 10 }}>{text || ''}</pre>
{pagination?.total != null ? ( {showRaw && (
<Text type="secondary" style={{ fontSize: 12 }}> <>
{t('Total')}: {String(pagination.total)} <Divider style={{ margin: '10px 0' }} />
</Text> <pre className="fx-agent-pre">{rawJson}</pre>
) : null} </>
</Space> )}
</div>
);
}
if (viewType === 'kv') {
const items = Array.isArray(view?.items) ? view!.items : [];
return (
<div className="fx-agent-tool-details">
{header}
<Divider style={{ margin: '10px 0' }} /> <Divider style={{ margin: '10px 0' }} />
{renderMeta()}
<List <List
size="small" size="small"
dataSource={entries} dataSource={items}
locale={{ emptyText: t('No results') }} locale={{ emptyText: t('No results') }}
renderItem={(item: any) => { renderItem={(item: any, idx) => {
const name = String(item?.name || ''); const key = typeof item?.key === 'string' ? item.key : (typeof item?.label === 'string' ? item.label : String(idx));
const type = String(item?.type || (item?.is_dir ? 'dir' : 'file')); const value = typeof item?.value === 'string' ? item.value : formatDisplayValue(item?.value, 200);
return ( return (
<List.Item> <List.Item>
<Space size={10} wrap style={{ width: '100%', justifyContent: 'space-between' }}> <Space size={10} wrap>
<Space size={10} wrap> <Text code style={{ fontVariantNumeric: 'tabular-nums' }}>{key || '-'}</Text>
<Text code style={{ fontVariantNumeric: 'tabular-nums' }}>{type}</Text> <Text>{value || '-'}</Text>
<Text>{name}</Text>
</Space>
{!item?.is_dir && typeof item?.size === 'number' ? (
<Text type="secondary" style={{ fontSize: 12 }}>{item.size} bytes</Text>
) : null}
</Space> </Space>
</List.Item> </List.Item>
); );
@@ -532,44 +535,40 @@ const AiAgentWidget = memo(function AiAgentWidget({ currentPath, open, onOpenCha
); );
} }
if (toolName === 'vfs_search') { if (viewType === 'list') {
const query = typeof data?.query === 'string' ? data!.query : ''; const items = Array.isArray(view?.items) ? view!.items : [];
const mode = typeof data?.mode === 'string' ? data!.mode : '';
const items = Array.isArray(data?.items) ? data!.items : [];
const pagination = data?.pagination && typeof data.pagination === 'object' ? data.pagination : null;
return ( return (
<div className="fx-agent-tool-details"> <div className="fx-agent-tool-details">
{header} {header}
<Divider style={{ margin: '10px 0' }} /> <Divider style={{ margin: '10px 0' }} />
<Space direction="vertical" size={6} style={{ width: '100%' }}> {renderMeta()}
<Text type="secondary" style={{ fontSize: 12 }}>{t('Search')}: {query || '-'}</Text>
<Text type="secondary" style={{ fontSize: 12 }}>{t('Mode')}: {mode || '-'}</Text>
{pagination?.has_more != null ? (
<Text type="secondary" style={{ fontSize: 12 }}>
{t('Page')}: {String(pagination.page)} · {t('Has more')}: {String(Boolean(pagination.has_more))}
</Text>
) : null}
</Space>
<Divider style={{ margin: '10px 0' }} />
<List <List
size="small" size="small"
dataSource={items} dataSource={items}
locale={{ emptyText: t('No results') }} locale={{ emptyText: t('No results') }}
renderItem={(item: any) => { renderItem={(item: any) => {
const type = String(item?.source_type || item?.mime || ''); if (isPlainObject(item)) {
const path = String(item?.path || ''); const entries = Object.entries(item);
const score = item?.score != null ? Number(item.score) : null; const shown = entries.slice(0, 4);
const extra = entries.length - shown.length;
return (
<List.Item>
<Space size={10} wrap style={{ width: '100%', justifyContent: 'space-between' }}>
<Space size={10} wrap>
{shown.map(([key, value]) => (
<Text key={key}>
<Text type="secondary">{key}</Text>: {formatDisplayValue(value, 160) || '-'}
</Text>
))}
{extra > 0 ? <Text type="secondary">+{extra}</Text> : null}
</Space>
</Space>
</List.Item>
);
}
return ( return (
<List.Item> <List.Item>
<Space size={10} wrap style={{ width: '100%', justifyContent: 'space-between' }}> <Text>{formatDisplayValue(item, 200) || '-'}</Text>
<Space size={10} wrap>
{type ? <Text code style={{ fontVariantNumeric: 'tabular-nums' }}>{type}</Text> : null}
<Text>{path}</Text>
</Space>
{score != null && !Number.isNaN(score) ? (
<Text type="secondary" style={{ fontSize: 12 }}>{score.toFixed(3)}</Text>
) : null}
</Space>
</List.Item> </List.Item>
); );
}} }}
@@ -585,25 +584,6 @@ const AiAgentWidget = memo(function AiAgentWidget({ currentPath, open, onOpenCha
); );
} }
if (toolName === 'vfs_read_text') {
const path = typeof data?.path === 'string' ? data!.path : '';
const content = typeof data?.content === 'string' ? data!.content : '';
return (
<div className="fx-agent-tool-details">
{header}
<Divider style={{ margin: '10px 0' }} />
<Text type="secondary" style={{ fontSize: 12 }}>{t('File')}: {path || '-'}</Text>
<pre className="fx-agent-pre" style={{ marginTop: 10 }}>{content || ''}</pre>
{showRaw && (
<>
<Divider style={{ margin: '10px 0' }} />
<pre className="fx-agent-pre">{rawJson}</pre>
</>
)}
</div>
);
}
return ( return (
<div className="fx-agent-tool-details"> <div className="fx-agent-tool-details">
{header} {header}
@@ -612,74 +592,62 @@ const AiAgentWidget = memo(function AiAgentWidget({ currentPath, open, onOpenCha
<pre className="fx-agent-pre">{rawJson}</pre> <pre className="fx-agent-pre">{rawJson}</pre>
) : ( ) : (
<Paragraph style={{ marginBottom: 0, whiteSpace: 'pre-wrap' }}> <Paragraph style={{ marginBottom: 0, whiteSpace: 'pre-wrap' }}>
{extractTextContent(data ?? rawContent) || <Text type="secondary">{t('No content')}</Text>} {extractTextContent(payload ?? rawContent) || <Text type="secondary">{t('No content')}</Text>}
</Paragraph> </Paragraph>
)} )}
</div> </div>
); );
}, [copyToClipboard, expandedRaw, t]); }, [copyToClipboard, expandedRaw, t]);
const renderToolArgsSummary = useCallback((toolName: string, args?: Record<string, any> | null) => { const renderToolArgsSummary = useCallback((args?: Record<string, any> | null) => {
const a = args || {}; const entries = Object.entries(args || {})
if (toolName === 'processors_run') { .filter(([, value]) => value != null && String(value).trim() !== '');
const path = typeof a.path === 'string' ? a.path : ''; if (entries.length === 0) return '';
return path ? `${t('Path')}: ${path}` : ''; return entries.slice(0, 2)
} .map(([key, value]) => `${key}: ${formatDisplayValue(value, 60)}`)
if (toolName === 'vfs_read_text' || toolName === 'vfs_list_dir' || toolName === 'vfs_stat' || toolName === 'vfs_delete' || toolName === 'vfs_mkdir') { .join(' · ');
const path = typeof a.path === 'string' ? a.path : ''; }, []);
return path ? `${t('Path')}: ${path}` : '';
}
if (toolName === 'vfs_search') {
const query = typeof a.query === 'string' ? a.query : '';
return query ? `${t('Search')}: ${query}` : '';
}
if (toolName === 'vfs_write_text') {
const path = typeof a.path === 'string' ? a.path : '';
return path ? `${t('Path')}: ${path}` : '';
}
if (toolName === 'vfs_move' || toolName === 'vfs_copy' || toolName === 'vfs_rename') {
const src = typeof a.src === 'string' ? a.src : '';
const dst = typeof a.dst === 'string' ? a.dst : '';
if (src && dst) return `${src}${dst}`;
if (src) return src;
if (dst) return dst;
return '';
}
return '';
}, [t]);
return ( return (
<> <>
<Drawer <Modal
title={t('AI Agent')} title={(
<Flex align="center" justify="space-between" gap={12} wrap>
<Text strong>{t('AI Agent')}</Text>
<Space align="center">
<Text type="secondary">{t('Auto execute')}</Text>
<Switch size="small" checked={autoExecute} onChange={setAutoExecute} />
<Button
type="text"
size="small"
icon={<DeleteOutlined />}
onClick={clearChat}
disabled={loading || messageItems.length === 0}
>
{t('Clear')}
</Button>
</Space>
</Flex>
)}
open={open} open={open}
onClose={() => { streamControllerRef.current?.abort(); onOpenChange(false); }} onCancel={() => { streamControllerRef.current?.abort(); onOpenChange(false); }}
width={520} width={720}
mask={false} centered
closable={false}
destroyOnHidden destroyOnHidden
footer={null}
styles={{ styles={{
body: { body: {
padding: 8, padding: 8,
background: token.colorBgContainer, background: token.colorBgContainer,
height: '70vh',
display: 'flex',
flexDirection: 'column',
overflow: 'hidden',
}, },
}} }}
extra={
<Space align="center">
<Text type="secondary">{t('Auto execute')}</Text>
<Switch size="small" checked={autoExecute} onChange={setAutoExecute} />
<Button
type="text"
size="small"
icon={<DeleteOutlined />}
onClick={clearChat}
disabled={loading || messageItems.length === 0}
>
{t('Clear')}
</Button>
</Space>
}
> >
<Flex vertical gap={0} style={{ height: '100%' }} className="fx-agent-container"> <Flex vertical gap={0} style={{ flex: 1, minHeight: 0 }} className="fx-agent-container">
<div <div
ref={scrollRef} ref={scrollRef}
className="fx-agent-chat-scroll" className="fx-agent-chat-scroll"
@@ -705,7 +673,7 @@ const AiAgentWidget = memo(function AiAgentWidget({ currentPath, open, onOpenCha
if (isTool) { if (isTool) {
const rawContent = extractTextContent((m as any).content); const rawContent = extractTextContent((m as any).content);
const expanded = !!expandedTools[msgKey]; const expanded = !!expandedTools[msgKey];
const summary = toolName ? renderToolResultSummary(toolName, rawContent, toolInfo?.args || null) : ''; const summary = rawContent ? renderToolResultSummary(rawContent) : '';
return ( return (
<div key={msgKey} className="fx-agent-msg fx-agent-msg-tool"> <div key={msgKey} className="fx-agent-msg fx-agent-msg-tool">
<div className="fx-agent-tool-block"> <div className="fx-agent-tool-block">
@@ -742,7 +710,7 @@ const AiAgentWidget = memo(function AiAgentWidget({ currentPath, open, onOpenCha
</pre> </pre>
</div> </div>
)} )}
{renderToolDetails(msgKey, toolName || t('Tool'), rawContent)} {renderToolDetails(msgKey, rawContent)}
</div> </div>
)} )}
</div> </div>
@@ -816,7 +784,7 @@ const AiAgentWidget = memo(function AiAgentWidget({ currentPath, open, onOpenCha
const key = `pending:${p.id}`; const key = `pending:${p.id}`;
const expanded = !!expandedTools[key]; const expanded = !!expandedTools[key];
const running = Object.prototype.hasOwnProperty.call(runningTools, p.id); const running = Object.prototype.hasOwnProperty.call(runningTools, p.id);
const summary = renderToolArgsSummary(p.name, args); const summary = renderToolArgsSummary(args);
return ( return (
<div key={p.id} className="fx-agent-tool-block fx-agent-pending-item"> <div key={p.id} className="fx-agent-tool-block fx-agent-pending-item">
<div className="fx-agent-tool-bar"> <div className="fx-agent-tool-bar">
@@ -880,19 +848,18 @@ const AiAgentWidget = memo(function AiAgentWidget({ currentPath, open, onOpenCha
<div className="fx-agent-composer"> <div className="fx-agent-composer">
<Flex vertical gap={8}> <Flex vertical gap={8}>
<Space wrap> <Space wrap>
<Button size="small" icon={<FolderOpenOutlined />} onClick={() => setPathModalOpen(true)} disabled={loading}>
{t('Select Path')}
</Button>
{effectivePath && ( {effectivePath && (
<Tag bordered={false} color="blue">{t('Current')}: {effectivePath}</Tag> <Tag bordered={false} color="blue">{t('Current')}: {effectivePath}</Tag>
)} )}
</Space> </Space>
<Input.TextArea <Input.TextArea
ref={inputRef}
value={input} value={input}
onChange={(e) => setInput(e.target.value)} onChange={(e) => setInput(e.target.value)}
placeholder={t('Type a message')} placeholder={t('Type a message')}
autoSize={{ minRows: 2, maxRows: 6 }} autoSize={{ minRows: 2, maxRows: 6 }}
autoFocus
disabled={loading || pending.length > 0} disabled={loading || pending.length > 0}
variant="borderless" variant="borderless"
onPressEnter={(e) => { onPressEnter={(e) => {
@@ -916,15 +883,7 @@ const AiAgentWidget = memo(function AiAgentWidget({ currentPath, open, onOpenCha
</Flex> </Flex>
</div> </div>
</Flex> </Flex>
</Drawer> </Modal>
<PathSelectorModal
open={pathModalOpen}
mode="any"
initialPath={effectivePath || '/'}
onOk={handlePathSelected}
onCancel={() => setPathModalOpen(false)}
/>
</> </>
); );
}); });

View File

@@ -0,0 +1,184 @@
import { memo, useEffect, useMemo, useState } from 'react';
import { Modal, List, Typography, theme, Flex, Button, Empty, message, Divider, Spin } from 'antd';
import ReactMarkdown from 'react-markdown';
import { format } from 'date-fns';
import { noticesApi, type NoticeItem } from '../api/notices';
import { useI18n } from '../i18n';
export interface NoticesModalProps {
open: boolean;
version: string;
onClose: () => void;
}
const NoticesModal = memo(function NoticesModal({ open, version, onClose }: NoticesModalProps) {
const { token } = theme.useToken();
const { t } = useI18n();
const [items, setItems] = useState<NoticeItem[]>([]);
const [page, setPage] = useState(1);
const [total, setTotal] = useState(0);
const [loading, setLoading] = useState(false);
const [loadingMore, setLoadingMore] = useState(false);
const [selectedId, setSelectedId] = useState<number | null>(null);
const selected = useMemo(() => items.find(i => i.id === selectedId) ?? null, [items, selectedId]);
const hasMore = items.length < total;
const loadPage = async (targetPage: number, mode: 'replace' | 'append') => {
if (mode === 'replace') setLoading(true);
else setLoadingMore(true);
try {
const resp = await noticesApi.list({ version, page: targetPage });
setPage(resp.page ?? targetPage);
setTotal(resp.total ?? 0);
setItems(prev => mode === 'replace' ? resp.items : [...prev, ...resp.items]);
if (mode === 'replace') {
setSelectedId(resp.items[0]?.id ?? null);
} else {
setSelectedId(prev => prev ?? resp.items[0]?.id ?? null);
}
} catch (e) {
if (e instanceof Error) {
message.error(e.message || t('Error'));
}
} finally {
setLoading(false);
setLoadingMore(false);
}
};
useEffect(() => {
if (!open) return;
setItems([]);
setPage(1);
setTotal(0);
setSelectedId(null);
loadPage(1, 'replace');
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [open, version]);
const formatTime = (ts: number) => {
try {
return format(new Date(ts), 'yyyy-MM-dd HH:mm');
} catch {
return '';
}
};
return (
<Modal
title={t('Notices')}
open={open}
onCancel={onClose}
footer={null}
width={980}
styles={{
body: {
padding: 0,
height: '70vh',
overflow: 'hidden',
},
}}
>
<Flex style={{ height: '70vh', minHeight: 0 }}>
<div style={{
width: 320,
minWidth: 280,
borderRight: `1px solid ${token.colorBorderSecondary}`,
display: 'flex',
flexDirection: 'column',
minHeight: 0,
}}>
<div style={{
padding: '10px 12px',
borderBottom: `1px solid ${token.colorBorderSecondary}`,
display: 'flex',
alignItems: 'center',
justifyContent: 'space-between',
gap: 12,
}}>
<Typography.Text type="secondary">{t('Total')}: {total}</Typography.Text>
<Typography.Text type="secondary">{items.length}/{total}</Typography.Text>
</div>
<List
size="small"
loading={loading && items.length === 0}
dataSource={items}
style={{ flex: 1, minHeight: 0, overflow: 'auto' }}
renderItem={(item) => {
const isSelected = item.id === selectedId;
return (
<List.Item
onClick={() => setSelectedId(item.id)}
style={{
cursor: 'pointer',
background: isSelected ? 'rgba(22,119,255,0.08)' : undefined,
borderInlineStart: isSelected ? `3px solid ${token.colorPrimary}` : '3px solid transparent',
paddingInlineStart: 10,
}}
>
<List.Item.Meta
title={<Typography.Text strong={isSelected}>{item.title}</Typography.Text>}
description={<Typography.Text type="secondary">{formatTime(item.createdAt)}</Typography.Text>}
/>
</List.Item>
);
}}
/>
<div style={{
padding: 12,
borderTop: `1px solid ${token.colorBorderSecondary}`,
}}>
<Button
block
loading={loadingMore}
disabled={!hasMore}
onClick={() => loadPage(page + 1, 'append')}
>
{t('Load more')}
</Button>
</div>
</div>
<div style={{ flex: 1, minWidth: 0, padding: 16, overflow: 'auto' }}>
{selected ? (
<>
<Typography.Title level={4} style={{ marginTop: 0, marginBottom: 6 }}>
{selected.title}
</Typography.Title>
<Typography.Text type="secondary">{formatTime(selected.createdAt)}</Typography.Text>
<Divider style={{ margin: '12px 0' }} />
{selected.contentMd?.trim() ? (
<div style={{ color: token.colorText, lineHeight: 1.7 }}>
<ReactMarkdown
components={{
a: ({ ...props }) => <a {...props} target="_blank" rel="noopener noreferrer" />,
ul: ({ ...props }) => <ul style={{ paddingLeft: 20, marginBottom: 12 }} {...props} />,
li: ({ ...props }) => <li style={{ marginBottom: 6 }} {...props} />,
p: ({ ...props }) => <p style={{ marginBottom: 12 }} {...props} />,
}}
>
{selected.contentMd}
</ReactMarkdown>
</div>
) : (
<Empty description={t('No content')} />
)}
</>
) : (
loading ? (
<div style={{ display: 'flex', justifyContent: 'center', paddingTop: 80 }}>
<Spin />
</div>
) : (
<Empty description={t('No notices')} />
)
)}
</div>
</Flex>
</Modal>
);
});
export default NoticesModal;

View File

@@ -97,6 +97,7 @@
"Home": "Home", "Home": "Home",
"File Manager": "File Manager", "File Manager": "File Manager",
"New Folder": "New Folder", "New Folder": "New Folder",
"New File": "New File",
"Upload": "Upload", "Upload": "Upload",
"Name": "Name", "Name": "Name",
"Size": "Size", "Size": "Size",
@@ -521,9 +522,12 @@
"Trigger Event": "Trigger Event", "Trigger Event": "Trigger Event",
"File Written": "File Written", "File Written": "File Written",
"File Deleted": "File Deleted", "File Deleted": "File Deleted",
"Scheduled": "Scheduled",
"Matching Rules": "Matching Rules", "Matching Rules": "Matching Rules",
"Path Prefix (optional)": "Path Prefix (optional)", "Path Prefix (optional)": "Path Prefix (optional)",
"Filename Regex (optional)": "Filename Regex (optional)", "Filename Regex (optional)": "Filename Regex (optional)",
"Schedule": "Schedule",
"Cron Expression": "Cron Expression",
"Action": "Action", "Action": "Action",
"Current Task Queue": "Current Task Queue", "Current Task Queue": "Current Task Queue",
"Params": "Params", "Params": "Params",
@@ -533,6 +537,7 @@
"This will delete all logs irreversibly.": "This will delete all logs irreversibly.", "This will delete all logs irreversibly.": "This will delete all logs irreversibly.",
"Cleared {count} logs": "Cleared {count} logs", "Cleared {count} logs": "Cleared {count} logs",
"Time": "Time", "Time": "Time",
"Weekday": "Weekday",
"Level": "Level", "Level": "Level",
"Source": "Source", "Source": "Source",
"Message": "Message", "Message": "Message",
@@ -552,10 +557,24 @@
"Export": "Export", "Export": "Export",
"Import": "Import", "Import": "Import",
"Export all data (adapters, users, tasks, shares) into a JSON file.": "Export all data (adapters, users, tasks, shares) into a JSON file.", "Export all data (adapters, users, tasks, shares) into a JSON file.": "Export all data (adapters, users, tasks, shares) into a JSON file.",
"Export selected data into a JSON file.": "Export selected data into a JSON file.",
"Keep your backup file safe.": "Keep your backup file safe.", "Keep your backup file safe.": "Keep your backup file safe.",
"Select backup sections": "Select backup sections",
"User Accounts": "User Accounts",
"Share Links": "Share Links",
"Configurations": "Configurations",
"AI Providers": "AI Providers",
"AI Models": "AI Models",
"AI Default Models": "AI Default Models",
"Plugin Data": "Plugins",
"Export Backup": "Export Backup", "Export Backup": "Export Backup",
"Restore data from a previously exported JSON file.": "Restore data from a previously exported JSON file.", "Restore data from a previously exported JSON file.": "Restore data from a previously exported JSON file.",
"Warning: This will clear and overwrite existing data.": "Warning: This will clear and overwrite existing data.", "Warning: This will clear and overwrite existing data.": "Warning: This will clear and overwrite existing data.",
"Import mode": "Import mode",
"Merge (upsert by ID)": "Merge (upsert by ID)",
"Replace (clear before import)": "Replace (clear before import)",
"Warning: This will clear data in the backup sections before importing.": "Warning: This will clear data in the backup sections before importing.",
"Warning: This will merge data in the backup sections and overwrite existing records with the same ID.": "Warning: This will merge data in the backup sections and overwrite existing records with the same ID.",
"Choose File and Restore": "Choose File and Restore", "Choose File and Restore": "Choose File and Restore",
"No files yet here": "No files yet here", "No files yet here": "No files yet here",
"This folder is empty": "This folder is empty", "This folder is empty": "This folder is empty",
@@ -693,6 +712,9 @@
"Open with {app}": "Open with {app}", "Open with {app}": "Open with {app}",
"Set as default for .{ext}": "Set as default for .{ext}", "Set as default for .{ext}": "Set as default for .{ext}",
"AI Agent": "AI Agent", "AI Agent": "AI Agent",
"Notices": "Notices",
"No notices": "No notices",
"Load more": "Load more",
"Auto execute": "Auto execute", "Auto execute": "Auto execute",
"Start a conversation": "Start a conversation", "Start a conversation": "Start a conversation",
"No content": "No content", "No content": "No content",

View File

@@ -116,6 +116,7 @@
"Home": "主页", "Home": "主页",
"File Manager": "文件管理", "File Manager": "文件管理",
"New Folder": "新建目录", "New Folder": "新建目录",
"New File": "新建文件",
"Upload": "上传", "Upload": "上传",
"Name": "名称", "Name": "名称",
"Size": "大小", "Size": "大小",
@@ -512,9 +513,12 @@
"Trigger Event": "触发事件", "Trigger Event": "触发事件",
"File Written": "文件写入", "File Written": "文件写入",
"File Deleted": "文件删除", "File Deleted": "文件删除",
"Scheduled": "定时任务",
"Matching Rules": "匹配规则", "Matching Rules": "匹配规则",
"Path Prefix (optional)": "路径前缀 (可选)", "Path Prefix (optional)": "路径前缀 (可选)",
"Filename Regex (optional)": "文件名正则 (可选)", "Filename Regex (optional)": "文件名正则 (可选)",
"Schedule": "定时设置",
"Cron Expression": "Cron 表达式",
"Action": "执行动作", "Action": "执行动作",
"Current Task Queue": "当前任务队列", "Current Task Queue": "当前任务队列",
"Params": "参数", "Params": "参数",
@@ -524,6 +528,7 @@
"This will delete all logs irreversibly.": "将删除全部日志且不可恢复", "This will delete all logs irreversibly.": "将删除全部日志且不可恢复",
"Cleared {count} logs": "成功清理 {count} 条日志", "Cleared {count} logs": "成功清理 {count} 条日志",
"Time": "时间", "Time": "时间",
"Weekday": "星期",
"Level": "级别", "Level": "级别",
"Source": "来源", "Source": "来源",
"Message": "消息", "Message": "消息",
@@ -543,10 +548,24 @@
"Export": "导出", "Export": "导出",
"Import": "恢复", "Import": "恢复",
"Export all data (adapters, users, tasks, shares) into a JSON file.": "点击按钮将所有数据(包括存储、用户、自动化任务和分享)导出为一个 JSON 文件。", "Export all data (adapters, users, tasks, shares) into a JSON file.": "点击按钮将所有数据(包括存储、用户、自动化任务和分享)导出为一个 JSON 文件。",
"Export selected data into a JSON file.": "导出选中的数据为一个 JSON 文件。",
"Keep your backup file safe.": "请妥善保管您的备份文件。", "Keep your backup file safe.": "请妥善保管您的备份文件。",
"Select backup sections": "选择备份内容",
"User Accounts": "账号",
"Share Links": "分享列表",
"Configurations": "配置",
"AI Providers": "AI 服务商",
"AI Models": "AI 模型",
"AI Default Models": "AI 默认模型",
"Plugin Data": "插件",
"Export Backup": "导出备份", "Export Backup": "导出备份",
"Restore data from a previously exported JSON file.": "从之前导出的JSON文件恢复数据。", "Restore data from a previously exported JSON file.": "从之前导出的JSON文件恢复数据。",
"Warning: This will clear and overwrite existing data.": "警告:此操作将清除并覆盖现有数据。", "Warning: This will clear and overwrite existing data.": "警告:此操作将清除并覆盖现有数据。",
"Import mode": "导入方式",
"Merge (upsert by ID)": "增量+覆盖(按 ID",
"Replace (clear before import)": "清空后导入",
"Warning: This will clear data in the backup sections before importing.": "警告:此操作会先清空备份中包含的分区数据,再导入。",
"Warning: This will merge data in the backup sections and overwrite existing records with the same ID.": "警告:此操作会合并备份中包含的分区数据,并按 ID 覆盖已存在记录。",
"Choose File and Restore": "选择文件并恢复", "Choose File and Restore": "选择文件并恢复",
"No files yet here": "这里还没有任何文件", "No files yet here": "这里还没有任何文件",
"This folder is empty": "此目录为空", "This folder is empty": "此目录为空",
@@ -646,7 +665,6 @@
"Created (newest)": "创建时间(最新)", "Created (newest)": "创建时间(最新)",
"Installed already": "已安装", "Installed already": "已安装",
"No results": "暂无结果", "No results": "暂无结果",
"Downloading": "下载中",
"Download and Install": "下载并安装", "Download and Install": "下载并安装",
"Loading apps": "加载应用中", "Loading apps": "加载应用中",
"Failed to load apps": "加载应用失败", "Failed to load apps": "加载应用失败",
@@ -695,6 +713,9 @@
"Open with {app}": "使用 {app} 打开", "Open with {app}": "使用 {app} 打开",
"Set as default for .{ext}": "设为该类型(.{ext})默认应用", "Set as default for .{ext}": "设为该类型(.{ext})默认应用",
"AI Agent": "AI 助手", "AI Agent": "AI 助手",
"Notices": "公告",
"No notices": "暂无公告",
"Load more": "加载更多",
"Auto execute": "自动执行", "Auto execute": "自动执行",
"Start a conversation": "开始对话", "Start a conversation": "开始对话",
"No content": "无内容", "No content": "无内容",

View File

@@ -1,5 +1,5 @@
import { Layout, Button, Dropdown, theme, Flex, Avatar, Typography, Tooltip } from 'antd'; import { Layout, Button, Dropdown, theme, Flex, Avatar, Typography, Tooltip } from 'antd';
import { SearchOutlined, MenuUnfoldOutlined, LogoutOutlined, UserOutlined, RobotOutlined } from '@ant-design/icons'; import { SearchOutlined, MenuUnfoldOutlined, LogoutOutlined, UserOutlined, RobotOutlined, BellOutlined } from '@ant-design/icons';
import { memo, useState } from 'react'; import { memo, useState } from 'react';
import SearchDialog from './SearchDialog.tsx'; import SearchDialog from './SearchDialog.tsx';
import { authApi } from '../api/auth.ts'; import { authApi } from '../api/auth.ts';
@@ -8,6 +8,8 @@ import { useI18n } from '../i18n';
import LanguageSwitcher from '../components/LanguageSwitcher'; import LanguageSwitcher from '../components/LanguageSwitcher';
import { useAuth } from '../contexts/AuthContext'; import { useAuth } from '../contexts/AuthContext';
import ProfileModal from '../components/ProfileModal'; import ProfileModal from '../components/ProfileModal';
import NoticesModal from '../components/NoticesModal';
import { useSystemStatus } from '../contexts/SystemContext';
const { Header } = Layout; const { Header } = Layout;
@@ -24,6 +26,8 @@ const TopHeader = memo(function TopHeader({ collapsed, onToggle, onOpenAiAgent }
const { t } = useI18n(); const { t } = useI18n();
const { user } = useAuth(); const { user } = useAuth();
const [profileOpen, setProfileOpen] = useState(false); const [profileOpen, setProfileOpen] = useState(false);
const [noticesOpen, setNoticesOpen] = useState(false);
const status = useSystemStatus();
const handleLogout = () => { const handleLogout = () => {
authApi.logout(); authApi.logout();
@@ -51,6 +55,15 @@ const TopHeader = memo(function TopHeader({ collapsed, onToggle, onOpenAiAgent }
</Button> </Button>
<SearchDialog open={searchOpen} onClose={() => setSearchOpen(false)} /> <SearchDialog open={searchOpen} onClose={() => setSearchOpen(false)} />
<Flex style={{ marginLeft: 'auto' }} align="center" gap={12}> <Flex style={{ marginLeft: 'auto' }} align="center" gap={12}>
<Tooltip title={t('Notices')}>
<Button
type="text"
icon={<BellOutlined />}
aria-label={t('Notices')}
onClick={() => setNoticesOpen(true)}
style={{ paddingInline: 8, height: 40 }}
/>
</Tooltip>
<Tooltip title={t('AI Agent')}> <Tooltip title={t('AI Agent')}>
<Button <Button
type="text" type="text"
@@ -81,6 +94,7 @@ const TopHeader = memo(function TopHeader({ collapsed, onToggle, onOpenAiAgent }
</Button> </Button>
</Dropdown> </Dropdown>
<ProfileModal open={profileOpen} onClose={() => setProfileOpen(false)} /> <ProfileModal open={profileOpen} onClose={() => setProfileOpen(false)} />
<NoticesModal open={noticesOpen} onClose={() => setNoticesOpen(false)} version={status?.version || ''} />
</Flex> </Flex>
</Header> </Header>
); );

View File

@@ -17,6 +17,7 @@ import { EmptyState } from './components/EmptyState';
import { ContextMenu } from './components/ContextMenu'; import { ContextMenu } from './components/ContextMenu';
import { DropzoneOverlay } from './components/DropzoneOverlay'; import { DropzoneOverlay } from './components/DropzoneOverlay';
import { CreateDirModal } from './components/Modals/CreateDirModal'; import { CreateDirModal } from './components/Modals/CreateDirModal';
import { CreateFileModal } from './components/Modals/CreateFileModal';
import { RenameModal } from './components/Modals/RenameModal'; import { RenameModal } from './components/Modals/RenameModal';
import { ProcessorModal } from './components/Modals/ProcessorModal'; import { ProcessorModal } from './components/Modals/ProcessorModal';
import UploadModal from './components/Modals/UploadModal'; import UploadModal from './components/Modals/UploadModal';
@@ -49,6 +50,7 @@ const FileExplorerPage = memo(function FileExplorerPage() {
// --- State for Modals --- // --- State for Modals ---
const [creatingDir, setCreatingDir] = useState(false); const [creatingDir, setCreatingDir] = useState(false);
const [creatingFile, setCreatingFile] = useState(false);
const [renaming, setRenaming] = useState<VfsEntry | null>(null); const [renaming, setRenaming] = useState<VfsEntry | null>(null);
const [sharingEntries, setSharingEntries] = useState<VfsEntry[]>([]); const [sharingEntries, setSharingEntries] = useState<VfsEntry[]>([]);
const [detailEntry, setDetailEntry] = useState<VfsEntry | null>(null); const [detailEntry, setDetailEntry] = useState<VfsEntry | null>(null);
@@ -138,7 +140,7 @@ const FileExplorerPage = memo(function FileExplorerPage() {
clearSearchSelection(); clearSearchSelection();
}, [clearSearchSelection, clearSelection]); }, [clearSearchSelection, clearSelection]);
const { doCreateDir: doCreateDirInCurrentDir } = useFileActions({ const { doCreateDir: doCreateDirInCurrentDir, doCreateFile: doCreateFileInCurrentDir } = useFileActions({
path, path,
refresh, refresh,
clearSelection, clearSelection,
@@ -343,6 +345,7 @@ const FileExplorerPage = memo(function FileExplorerPage() {
{/* --- Modals & Context Menus --- */} {/* --- Modals & Context Menus --- */}
<CreateDirModal open={creatingDir} onOk={(name) => { doCreateDirInCurrentDir(name); setCreatingDir(false); }} onCancel={() => setCreatingDir(false)} /> <CreateDirModal open={creatingDir} onOk={(name) => { doCreateDirInCurrentDir(name); setCreatingDir(false); }} onCancel={() => setCreatingDir(false)} />
<CreateFileModal open={creatingFile} onOk={(name) => { doCreateFileInCurrentDir(name); setCreatingFile(false); }} onCancel={() => setCreatingFile(false)} />
<RenameModal entry={renaming} onOk={(entry, newName) => { doRename(entry, newName); setRenaming(null); }} onCancel={() => setRenaming(null)} /> <RenameModal entry={renaming} onOk={(entry, newName) => { doRename(entry, newName); setRenaming(null); }} onCancel={() => setRenaming(null)} />
<FileDetailModal entry={detailEntry} loading={detailLoading} data={detailData} onClose={() => setDetailEntry(null)} /> <FileDetailModal entry={detailEntry} loading={detailLoading} data={detailData} onClose={() => setDetailEntry(null)} />
<MoveCopyModal <MoveCopyModal
@@ -422,6 +425,7 @@ const FileExplorerPage = memo(function FileExplorerPage() {
}} }}
onUploadFile={openFilePicker} onUploadFile={openFilePicker}
onUploadDirectory={openDirectoryPicker} onUploadDirectory={openDirectoryPicker}
onCreateFile={() => setCreatingFile(true)}
onCreateDir={() => setCreatingDir(true)} onCreateDir={() => setCreatingDir(true)}
onShare={doShare} onShare={doShare}
onGetDirectLink={doGetDirectLink} onGetDirectLink={doGetDirectLink}

View File

@@ -8,7 +8,7 @@ import { useI18n } from '../../../i18n';
import { import {
FolderFilled, AppstoreOutlined, AppstoreAddOutlined, DownloadOutlined, FolderFilled, AppstoreOutlined, AppstoreAddOutlined, DownloadOutlined,
EditOutlined, DeleteOutlined, InfoCircleOutlined, UploadOutlined, PlusOutlined, EditOutlined, DeleteOutlined, InfoCircleOutlined, UploadOutlined, PlusOutlined,
ShareAltOutlined, LinkOutlined, CopyOutlined, SwapOutlined ShareAltOutlined, LinkOutlined, CopyOutlined, SwapOutlined, FileAddOutlined
} from '@ant-design/icons'; } from '@ant-design/icons';
interface ContextMenuProps { interface ContextMenuProps {
@@ -28,6 +28,7 @@ interface ContextMenuProps {
onProcess: (entry: VfsEntry, processorType: string) => void; onProcess: (entry: VfsEntry, processorType: string) => void;
onUploadFile: () => void; onUploadFile: () => void;
onUploadDirectory: () => void; onUploadDirectory: () => void;
onCreateFile: () => void;
onCreateDir: () => void; onCreateDir: () => void;
onShare: (entries: VfsEntry[]) => void; onShare: (entries: VfsEntry[]) => void;
onGetDirectLink: (entry: VfsEntry) => void; onGetDirectLink: (entry: VfsEntry) => void;
@@ -70,6 +71,7 @@ export const ContextMenu: React.FC<ContextMenuProps> = (props) => {
{ key: 'upload-folder', label: t('Upload Folder'), onClick: actions.onUploadDirectory }, { key: 'upload-folder', label: t('Upload Folder'), onClick: actions.onUploadDirectory },
], ],
}, },
{ key: 'new-file', label: t('New File'), icon: <FileAddOutlined />, onClick: actions.onCreateFile },
{ key: 'mkdir', label: t('New Folder'), icon: <PlusOutlined />, onClick: actions.onCreateDir }, { key: 'mkdir', label: t('New Folder'), icon: <PlusOutlined />, onClick: actions.onCreateDir },
]; ];
} }

View File

@@ -0,0 +1,43 @@
import React, { useEffect, useState } from 'react';
import { Input, Modal } from 'antd';
import { useI18n } from '../../../../i18n';
interface CreateFileModalProps {
open: boolean;
onOk: (name: string) => void;
onCancel: () => void;
}
export const CreateFileModal: React.FC<CreateFileModalProps> = ({ open, onOk, onCancel }) => {
const [name, setName] = useState('');
const { t } = useI18n();
useEffect(() => {
if (open) {
setName('');
}
}, [open]);
const handleOk = () => {
onOk(name);
};
return (
<Modal
title={t('New File')}
open={open}
onOk={handleOk}
onCancel={onCancel}
okButtonProps={{ disabled: !name.trim() }}
destroyOnHidden
>
<Input
placeholder={t('Filename')}
value={name}
onChange={(e) => setName(e.target.value)}
onPressEnter={handleOk}
autoFocus
/>
</Modal>
);
};

View File

@@ -37,6 +37,20 @@ export function useFileActions({ path, refresh, clearSelection, onShare, onGetDi
} }
}, [path, refresh, t]); }, [path, refresh, t]);
const doCreateFile = useCallback(async (name: string) => {
if (!name.trim()) {
message.warning(t('Please input name'));
return;
}
try {
const fullPath = (path === '/' ? '' : path) + '/' + name.trim();
await vfsApi.uploadFile(fullPath, new Blob([]));
refresh();
} catch (e: any) {
message.error(e.message);
}
}, [path, refresh, t]);
const doDelete = useCallback(async (entries: VfsEntry[]) => { const doDelete = useCallback(async (entries: VfsEntry[]) => {
Modal.confirm({ Modal.confirm({
title: t('Confirm delete {name}?', { name: entries.length > 1 ? `${entries.length} ${t('items')}` : entries[0].name }), title: t('Confirm delete {name}?', { name: entries.length > 1 ? `${entries.length} ${t('items')}` : entries[0].name }),
@@ -193,6 +207,7 @@ export function useFileActions({ path, refresh, clearSelection, onShare, onGetDi
return { return {
doCreateDir, doCreateDir,
doCreateFile,
doDelete, doDelete,
doRename, doRename,
doDownload, doDownload,

View File

@@ -487,7 +487,7 @@ export function useUploader(path: string, onUploadComplete: () => void) {
const parentDir = task.targetPath.replace(/\/[^/]+$/, '') || '/'; const parentDir = task.targetPath.replace(/\/[^/]+$/, '') || '/';
try { try {
await ensureDirectoryTree(parentDir); await ensureDirectoryTree(parentDir);
await vfsApi.uploadStream(task.targetPath, task.file, shouldOverwrite, (loaded, total) => { const uploadResult = await vfsApi.uploadStream(task.targetPath, task.file, shouldOverwrite, (loaded, total) => {
mutateFiles((prev) => prev.map((f) => { mutateFiles((prev) => prev.map((f) => {
if (f.id !== task.id) return f; if (f.id !== task.id) return f;
const effectiveTotal = total > 0 ? total : f.size; const effectiveTotal = total > 0 ? total : f.size;
@@ -502,9 +502,20 @@ export function useUploader(path: string, onUploadComplete: () => void) {
})); }));
}); });
const link = await vfsApi.getTempLinkToken(task.targetPath, 60 * 60 * 24 * 365 * 10); const actualPath = uploadResult?.path || task.targetPath;
const finalSize = typeof uploadResult?.size === 'number' && uploadResult.size > 0
? uploadResult.size
: task.size;
const link = await vfsApi.getTempLinkToken(actualPath, 60 * 60 * 24 * 365 * 10);
const permanentLink = vfsApi.getTempPublicUrl(link.token); const permanentLink = vfsApi.getTempPublicUrl(link.token);
updateFile(task.id, { status: 'success', progress: 100, loadedBytes: task.size, permanentLink }); updateFile(task.id, {
status: 'success',
progress: 100,
loadedBytes: finalSize,
size: finalSize,
targetPath: actualPath,
permanentLink,
});
} catch (err: unknown) { } catch (err: unknown) {
const error = err instanceof Error ? err.message : t('Upload failed'); const error = err instanceof Error ? err.message : t('Upload failed');
updateFile(task.id, { status: 'error', error, progress: 0 }); updateFile(task.id, { status: 'error', error, progress: 0 });

View File

@@ -1,5 +1,5 @@
import { memo, useState } from 'react'; import { memo, useState } from 'react';
import { Button, Typography, Upload, message, Modal, Card } from 'antd'; import { Button, Typography, Upload, message, Modal, Card, Checkbox, Space, Radio } from 'antd';
import PageCard from '../../components/PageCard'; import PageCard from '../../components/PageCard';
import { UploadOutlined, DownloadOutlined } from '@ant-design/icons'; import { UploadOutlined, DownloadOutlined } from '@ant-design/icons';
import { backupApi } from '../../api/backup'; import { backupApi } from '../../api/backup';
@@ -7,14 +7,40 @@ import { useI18n } from '../../i18n';
const { Paragraph, Text } = Typography; const { Paragraph, Text } = Typography;
const BACKUP_SECTIONS = [
{ key: 'user_accounts', labelKey: 'User Accounts' },
{ key: 'storage_adapters', labelKey: 'Storage Adapters' },
{ key: 'automation_tasks', labelKey: 'Automation Tasks' },
{ key: 'share_links', labelKey: 'Share Links' },
{ key: 'configurations', labelKey: 'Configurations' },
{ key: 'ai_providers', labelKey: 'AI Providers' },
{ key: 'ai_models', labelKey: 'AI Models' },
{ key: 'ai_default_models', labelKey: 'AI Default Models' },
{ key: 'plugins', labelKey: 'Plugin Data' },
] as const;
type BackupSection = typeof BACKUP_SECTIONS[number]['key'];
const ALL_SECTION_KEYS = BACKUP_SECTIONS.map((section) => section.key) as BackupSection[];
const BackupPage = memo(function BackupPage() { const BackupPage = memo(function BackupPage() {
const [loading, setLoading] = useState(false); const [loading, setLoading] = useState(false);
const [selectedSections, setSelectedSections] = useState<BackupSection[]>(ALL_SECTION_KEYS);
const [importMode, setImportMode] = useState<'replace' | 'merge'>('replace');
const { t } = useI18n(); const { t } = useI18n();
const importWarning = importMode === 'replace'
? t('Warning: This will clear data in the backup sections before importing.')
: t('Warning: This will merge data in the backup sections and overwrite existing records with the same ID.');
const importWarningType = importMode === 'replace' ? 'danger' : 'warning';
const exportOptions = BACKUP_SECTIONS.map((section) => ({
label: t(section.labelKey),
value: section.key,
}));
const canExport = selectedSections.length > 0;
const handleExport = async () => { const handleExport = async () => {
setLoading(true); setLoading(true);
try { try {
await backupApi.export(); await backupApi.export(selectedSections);
message.success(t('Export started, check your downloads.')); message.success(t('Export started, check your downloads.'));
} catch (e: any) { } catch (e: any) {
message.error(e.message || t('Export failed')); message.error(e.message || t('Export failed'));
@@ -29,7 +55,9 @@ const BackupPage = memo(function BackupPage() {
content: ( content: (
<Typography> <Typography>
<Paragraph>{t('Are you sure to import from this file?')}</Paragraph> <Paragraph>{t('Are you sure to import from this file?')}</Paragraph>
<Paragraph strong>{t('Warning: This will overwrite all data including users (with passwords), settings, storages and tasks. Irreversible!')}</Paragraph> <Paragraph>
<Text strong type={importWarningType}>{importWarning}</Text>
</Paragraph>
</Typography> </Typography>
), ),
okText: t('Confirm Import'), okText: t('Confirm Import'),
@@ -38,7 +66,7 @@ const BackupPage = memo(function BackupPage() {
onOk: async () => { onOk: async () => {
setLoading(true); setLoading(true);
try { try {
const response = await backupApi.import(file); const response = await backupApi.import(file, importMode);
message.success(response.message || t('Import succeeded! The page will refresh.')); message.success(response.message || t('Import succeeded! The page will refresh.'));
setTimeout(() => window.location.reload(), 2000); setTimeout(() => window.location.reload(), 2000);
} catch (e: any) { } catch (e: any) {
@@ -57,13 +85,22 @@ const BackupPage = memo(function BackupPage() {
<div style={{ display: 'flex', gap: '16px' }}> <div style={{ display: 'flex', gap: '16px' }}>
<Card title={t('Export')} style={{ flex: 1 }}> <Card title={t('Export')} style={{ flex: 1 }}>
<Paragraph> <Paragraph>
{t('Export all data (adapters, users, tasks, shares) into a JSON file.')} {t('Export selected data into a JSON file.')}
<Text strong>{t('Keep your backup file safe.')}</Text> <Text strong>{t('Keep your backup file safe.')}</Text>
</Paragraph> </Paragraph>
<Space direction="vertical" size={8} style={{ width: '100%', marginBottom: 12 }}>
<Text>{t('Select backup sections')}</Text>
<Checkbox.Group
options={exportOptions}
value={selectedSections}
onChange={(values) => setSelectedSections(values as BackupSection[])}
/>
</Space>
<Button <Button
icon={<DownloadOutlined />} icon={<DownloadOutlined />}
onClick={handleExport} onClick={handleExport}
loading={loading} loading={loading}
disabled={!canExport}
> >
{t('Export Backup')} {t('Export Backup')}
</Button> </Button>
@@ -71,8 +108,22 @@ const BackupPage = memo(function BackupPage() {
<Card title={t('Import')} style={{ flex: 1 }}> <Card title={t('Import')} style={{ flex: 1 }}>
<Paragraph> <Paragraph>
{t('Restore data from a previously exported JSON file.')} {t('Restore data from a previously exported JSON file.')}
<Text strong type="danger">{t('Warning: This will clear and overwrite existing data.')}</Text>
</Paragraph> </Paragraph>
<Space direction="vertical" size={8} style={{ width: '100%', marginBottom: 12 }}>
<Text>{t('Import mode')}</Text>
<Radio.Group
optionType="button"
buttonStyle="solid"
value={importMode}
onChange={(event) => setImportMode(event.target.value)}
>
<Radio.Button value="merge">{t('Merge (upsert by ID)')}</Radio.Button>
<Radio.Button value="replace">{t('Replace (clear before import)')}</Radio.Button>
</Radio.Group>
<Text type={importWarningType}>
{importWarning}
</Text>
</Space>
<Upload <Upload
beforeUpload={handleImport} beforeUpload={handleImport}
showUploadList={false} showUploadList={false}

View File

@@ -15,7 +15,7 @@ const TasksPage = memo(function TasksPage() {
const [form] = Form.useForm(); const [form] = Form.useForm();
const [availableProcessors, setAvailableProcessors] = useState<ProcessorTypeMeta[]>([]); const [availableProcessors, setAvailableProcessors] = useState<ProcessorTypeMeta[]>([]);
const { t } = useI18n(); const { t } = useI18n();
const [pathPickerOpen, setPathPickerOpen] = useState(false); const [pathPickerField, setPathPickerField] = useState<'path_prefix' | 'cron_path' | null>(null);
const fetchList = useCallback(async () => { const fetchList = useCallback(async () => {
setLoading(true); setLoading(true);
@@ -42,7 +42,8 @@ const TasksPage = memo(function TasksPage() {
name: '', name: '',
event: 'file_written', event: 'file_written',
enabled: true, enabled: true,
processor_config: {} processor_config: {},
trigger_config: {}
}); });
setOpen(true); setOpen(true);
}; };
@@ -52,7 +53,8 @@ const TasksPage = memo(function TasksPage() {
form.resetFields(); form.resetFields();
form.setFieldsValue({ form.setFieldsValue({
...rec, ...rec,
processor_config: rec.processor_config || {} processor_config: rec.processor_config || {},
trigger_config: rec.trigger_config || {}
}); });
setOpen(true); setOpen(true);
}; };
@@ -60,7 +62,15 @@ const TasksPage = memo(function TasksPage() {
const submit = async () => { const submit = async () => {
try { try {
const values = await form.validateFields(); const values = await form.validateFields();
const body = { ...values }; const triggerConfig = { ...(values.trigger_config || {}) };
if (values.event === 'cron') {
delete triggerConfig.path_prefix;
delete triggerConfig.filename_regex;
} else {
delete triggerConfig.cron_expr;
delete triggerConfig.path;
}
const body = { ...values, trigger_config: triggerConfig };
setLoading(true); setLoading(true);
if (editing) { if (editing) {
await tasksApi.update(editing.id, body); await tasksApi.update(editing.id, body);
@@ -133,7 +143,10 @@ const TasksPage = memo(function TasksPage() {
const selectedProcessor = Form.useWatch('processor_type', form); const selectedProcessor = Form.useWatch('processor_type', form);
const currentProcessorMeta = availableProcessors.find(p => p.type === selectedProcessor); const currentProcessorMeta = availableProcessors.find(p => p.type === selectedProcessor);
const watchedPathPattern = Form.useWatch('path_pattern', form); const selectedEvent = Form.useWatch('event', form);
const watchedPathPrefix = Form.useWatch(['trigger_config', 'path_prefix'], form);
const watchedCronPath = Form.useWatch(['trigger_config', 'path'], form);
const isCron = selectedEvent === 'cron';
return ( return (
@@ -158,11 +171,11 @@ const TasksPage = memo(function TasksPage() {
title={editing ? `${t('Edit Task')}: ${editing.name}` : t('Create Automation Task')} title={editing ? `${t('Edit Task')}: ${editing.name}` : t('Create Automation Task')}
width={480} width={480}
open={open} open={open}
onClose={() => { setOpen(false); setEditing(null); }} onClose={() => { setOpen(false); setEditing(null); setPathPickerField(null); }}
destroyOnHidden destroyOnHidden
extra={ extra={
<Space> <Space>
<Button onClick={() => { setOpen(false); setEditing(null); }}>{t('Cancel')}</Button> <Button onClick={() => { setOpen(false); setEditing(null); setPathPickerField(null); }}>{t('Cancel')}</Button>
<Button type="primary" onClick={submit} loading={loading}>{t('Submit')}</Button> <Button type="primary" onClick={submit} loading={loading}>{t('Submit')}</Button>
</Space> </Space>
} }
@@ -174,19 +187,45 @@ const TasksPage = memo(function TasksPage() {
<Form.Item name="event" label={t('Trigger Event')} rules={[{ required: true }]}> <Form.Item name="event" label={t('Trigger Event')} rules={[{ required: true }]}>
<Select options={[ <Select options={[
{ value: 'file_written', label: t('File Written') }, { value: 'file_written', label: t('File Written') },
{ value: 'file_deleted', label: t('File Deleted') }, { value: 'file_deleted', label: t('File Deleted') },
{ value: 'cron', label: t('Scheduled') },
]} /> ]} />
</Form.Item> </Form.Item>
<Typography.Title level={5} style={{ marginTop: 8, fontSize: 14 }}>{t('Matching Rules')}</Typography.Title> {isCron ? (
<Form.Item name="path_pattern" label={t('Path Prefix (optional)')}> <>
<Input <Typography.Title level={5} style={{ marginTop: 8, fontSize: 14 }}>{t('Schedule')}</Typography.Title>
placeholder="/images/screenshots" <Form.Item
addonAfter={<Button size="small" onClick={() => setPathPickerOpen(true)}>{t('Select')}</Button>} name={['trigger_config', 'cron_expr']}
/> label={t('Cron Expression')}
</Form.Item> rules={[{ required: true }]}
<Form.Item name="filename_regex" label={t('Filename Regex (optional)')}> >
<Input placeholder=".*\.png$" /> <Input placeholder="*/5 * * * * *" />
</Form.Item> </Form.Item>
<Form.Item
name={['trigger_config', 'path']}
label={t('Target Path')}
rules={[{ required: true }]}
>
<Input
placeholder="/images"
addonAfter={<Button size="small" onClick={() => setPathPickerField('cron_path')}>{t('Select')}</Button>}
/>
</Form.Item>
</>
) : (
<>
<Typography.Title level={5} style={{ marginTop: 8, fontSize: 14 }}>{t('Matching Rules')}</Typography.Title>
<Form.Item name={['trigger_config', 'path_prefix']} label={t('Path Prefix (optional)')}>
<Input
placeholder="/images/screenshots"
addonAfter={<Button size="small" onClick={() => setPathPickerField('path_prefix')}>{t('Select')}</Button>}
/>
</Form.Item>
<Form.Item name={['trigger_config', 'filename_regex']} label={t('Filename Regex (optional)')}>
<Input placeholder=".*\\.png$" />
</Form.Item>
</>
)}
<Form.Item name="enabled" label={t('Enabled')} valuePropName="checked"> <Form.Item name="enabled" label={t('Enabled')} valuePropName="checked">
<Switch /> <Switch />
</Form.Item> </Form.Item>
@@ -205,11 +244,18 @@ const TasksPage = memo(function TasksPage() {
</Form> </Form>
</Drawer> </Drawer>
<PathSelectorModal <PathSelectorModal
open={pathPickerOpen} open={!!pathPickerField}
mode="directory" mode={pathPickerField === 'cron_path' ? 'any' : 'directory'}
initialPath={watchedPathPattern || '/'} initialPath={(pathPickerField === 'cron_path' ? watchedCronPath : watchedPathPrefix) || '/'}
onCancel={() => setPathPickerOpen(false)} onCancel={() => setPathPickerField(null)}
onOk={(p) => { form.setFieldsValue({ path_pattern: p }); setPathPickerOpen(false); }} onOk={(p) => {
if (pathPickerField === 'cron_path') {
form.setFieldValue(['trigger_config', 'path'], p);
} else if (pathPickerField === 'path_prefix') {
form.setFieldValue(['trigger_config', 'path_prefix'], p);
}
setPathPickerField(null);
}}
/> />
</PageCard> </PageCard>
); );

View File

@@ -5,7 +5,7 @@
.fx-agent-chat-scroll { .fx-agent-chat-scroll {
flex: 1; flex: 1;
overflow-y: auto; overflow-y: auto;
padding: 0; padding: 8px 4px 12px;
border-radius: 0; border-radius: 0;
background: transparent; background: transparent;
border: 0; border: 0;
@@ -54,8 +54,12 @@
} }
.fx-agent-assistant-block { .fx-agent-assistant-block {
max-width: 100%; max-width: 92%;
padding: 2px 2px; padding: 12px 14px;
border-radius: 14px;
border: 1px solid var(--ant-color-border-secondary);
background: var(--ant-color-bg-container);
box-shadow: 0 1px 6px rgba(0, 0, 0, 0.04);
} }
.fx-agent-tool-block { .fx-agent-tool-block {
@@ -75,9 +79,11 @@
} }
.fx-agent-content { .fx-agent-content {
font-size: 13px; font-size: 14px;
line-height: 1.75; line-height: 1.7;
word-break: break-word; word-break: break-word;
overflow-wrap: anywhere;
color: var(--ant-color-text);
} }
.fx-agent-tool-pills .ant-tag { .fx-agent-tool-pills .ant-tag {
@@ -122,19 +128,58 @@
} }
.fx-agent-md p { .fx-agent-md p {
margin: 0 0 0.5em; margin: 0 0 0.65em;
white-space: pre-wrap;
} }
.fx-agent-md p:last-child { .fx-agent-md p:last-child {
margin-bottom: 0; margin-bottom: 0;
} }
.fx-agent-md > :first-child {
margin-top: 0;
}
.fx-agent-md > :last-child {
margin-bottom: 0;
}
.fx-agent-md h1,
.fx-agent-md h2,
.fx-agent-md h3,
.fx-agent-md h4 {
margin: 0.9em 0 0.4em;
font-weight: 600;
line-height: 1.35;
}
.fx-agent-md h1 {
font-size: 18px;
}
.fx-agent-md h2 {
font-size: 16px;
}
.fx-agent-md h3 {
font-size: 15px;
}
.fx-agent-md h4 {
font-size: 14px;
}
.fx-agent-md ul, .fx-agent-md ul,
.fx-agent-md ol { .fx-agent-md ol {
margin: 0 0 0.5em; margin: 0 0 0.65em;
padding-left: 1.2em; padding-left: 1.2em;
} }
.fx-agent-md li {
margin-bottom: 0.35em;
white-space: pre-wrap;
}
.fx-agent-md code { .fx-agent-md code {
padding: 1px 6px; padding: 1px 6px;
border-radius: 6px; border-radius: 6px;
@@ -145,12 +190,13 @@
} }
.fx-agent-md pre { .fx-agent-md pre {
margin: 0 0 0.5em; margin: 0 0 0.65em;
padding: 8px 10px; padding: 10px 12px;
border-radius: 10px; border-radius: 10px;
background: var(--ant-color-bg-container); background: rgba(0, 0, 0, 0.03);
border: 1px solid var(--ant-color-border-secondary); border: 1px solid var(--ant-color-border-secondary);
overflow: auto; overflow: auto;
white-space: pre;
} }
.fx-agent-md pre code { .fx-agent-md pre code {
@@ -158,19 +204,54 @@
padding: 0; padding: 0;
border: 0; border: 0;
background: transparent; background: transparent;
font-size: 11px; font-size: 12px;
line-height: 1.55; line-height: 1.6;
} }
.fx-agent-md blockquote { .fx-agent-md blockquote {
margin: 0 0 0.65em; margin: 0 0 0.7em;
padding: 0 0 0 10px; padding: 6px 10px;
border-radius: 8px;
border-left: 3px solid var(--ant-color-border); border-left: 3px solid var(--ant-color-border);
background: rgba(0, 0, 0, 0.03);
color: var(--ant-color-text-tertiary); color: var(--ant-color-text-tertiary);
} }
.fx-agent-md a { .fx-agent-md a {
color: var(--ant-color-primary); color: var(--ant-color-primary);
text-decoration: underline;
}
.fx-agent-md hr {
margin: 0.8em 0;
border: 0;
border-top: 1px solid var(--ant-color-border-secondary);
}
.fx-agent-md img {
max-width: 100%;
border-radius: 8px;
border: 1px solid var(--ant-color-border-secondary);
}
.fx-agent-md table {
width: 100%;
border-collapse: collapse;
margin: 0 0 0.8em;
font-size: 13px;
}
.fx-agent-md th,
.fx-agent-md td {
padding: 6px 8px;
border: 1px solid var(--ant-color-border-secondary);
text-align: left;
vertical-align: top;
}
.fx-agent-md thead th {
background: rgba(0, 0, 0, 0.04);
font-weight: 600;
} }
.fx-agent-tool-details { .fx-agent-tool-details {
@@ -228,8 +309,8 @@
} }
.fx-agent-composer .ant-input { .fx-agent-composer .ant-input {
font-size: 12px; font-size: 14px;
line-height: 1.6; line-height: 1.7;
} }
.fx-agent-running { .fx-agent-running {