Compare commits

...

18 Commits
v2.2.0 ... main

Author SHA1 Message Date
shiyu
deddbdf585 feat: implement plugin frame cleanup on unload and enhance iframe handling 2026-05-06 23:30:20 +08:00
shiyu
bd24d7eeeb feat: add download locking and flood wait handling in TelegramAdapter 2026-05-06 23:00:10 +08:00
shiyu
93d5e5e313 feat: enhance TelegramAdapter with message caching and connection management 2026-05-06 22:12:35 +08:00
时雨
7b5f5e986e feat: add recent files backend APIs (#119) 2026-05-06 21:20:29 +08:00
shiyu
7741c1fe55 fix: handle native video thumbnail availability in get_or_create_thumb function 2026-05-04 13:51:53 +08:00
shiyu
c2015dd17c feat: enhance thumbnail handling and add native thumbnail support in VirtualFS 2026-05-03 23:51:39 +08:00
shiyu
ca500cbbf8 fix: handle FileNotFoundError in dav_get function and return 404 response 2026-05-03 23:51:39 +08:00
dependabot[bot]
d7aa3f1796 chore(deps): bump the uv group across 1 directory with 2 updates (#118)
Bumps the uv group with 2 updates in the / directory: [python-dotenv](https://github.com/theskumar/python-dotenv) and [python-multipart](https://github.com/Kludex/python-multipart).


Updates `python-dotenv` from 1.2.1 to 1.2.2
- [Release notes](https://github.com/theskumar/python-dotenv/releases)
- [Changelog](https://github.com/theskumar/python-dotenv/blob/main/CHANGELOG.md)
- [Commits](https://github.com/theskumar/python-dotenv/compare/v1.2.1...v1.2.2)

Updates `python-multipart` from 0.0.22 to 0.0.26
- [Release notes](https://github.com/Kludex/python-multipart/releases)
- [Changelog](https://github.com/Kludex/python-multipart/blob/main/CHANGELOG.md)
- [Commits](https://github.com/Kludex/python-multipart/compare/0.0.22...0.0.26)

---
updated-dependencies:
- dependency-name: python-dotenv
  dependency-version: 1.2.2
  dependency-type: direct:production
  dependency-group: uv
- dependency-name: python-multipart
  dependency-version: 0.0.26
  dependency-type: direct:production
  dependency-group: uv
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-05-03 13:58:32 +08:00
dependabot[bot]
460ce0c954 chore(deps): bump pillow in the uv group across 1 directory (#117)
Bumps the uv group with 1 update in the / directory: [pillow](https://github.com/python-pillow/Pillow).


Updates `pillow` from 12.1.1 to 12.2.0
- [Release notes](https://github.com/python-pillow/Pillow/releases)
- [Changelog](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst)
- [Commits](https://github.com/python-pillow/Pillow/compare/12.1.1...12.2.0)

---
updated-dependencies:
- dependency-name: pillow
  dependency-version: 12.2.0
  dependency-type: direct:production
  dependency-group: uv
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-05-03 13:54:14 +08:00
shiyu
873ef7aee5 chore: update version to v2.2.1 2026-05-03 08:28:39 +08:00
shiyu
dd2400c3ef fix: correct adapter type casing from "PikPak" to "pikpak" 2026-05-03 08:09:39 +08:00
shiyu
e0d6039a1a fix: correct adapter type casing from "pikpak" to "PikPak" 2026-05-03 07:56:02 +08:00
shiyu
676dacce41 feat: update PikPak adapter configuration and enhance token handling 2026-05-03 07:51:55 +08:00
shiyu
c514e17803 feat: add user_id to configuration and improve error handling in token refresh 2026-05-03 06:44:27 +08:00
shiyu
54821f78c6 feat: implement caching for adapter usage and display summary in AdaptersPage 2026-05-03 01:53:48 +08:00
shiyu
1f608974dc feat: enhance adapter usage tracking with new interface and display capacity usage in AdaptersPage 2026-05-02 22:47:22 +08:00
shiyu
a8737b883e feat: add adapter usage tracking and retrieval methods across various adapters 2026-05-02 21:55:35 +08:00
shiyu
dcc8aa139e feat: add LOCK and UNLOCK methods to WebDAV API and improve path handling in existing methods 2026-05-02 16:30:56 +08:00
31 changed files with 934 additions and 249 deletions

View File

@@ -19,12 +19,14 @@ from domain.audit import api as audit
from domain.permission import api as permission
from domain.user import api as user
from domain.role import api as role
from domain.recent_files import api as recent_files
def include_routers(app: FastAPI):
app.include_router(adapters.router)
app.include_router(search_api.router)
app.include_router(virtual_fs.router)
app.include_router(recent_files.router)
app.include_router(auth.router)
app.include_router(config.router)
app.include_router(processors.router)

View File

@@ -51,6 +51,29 @@ async def available_adapter_types(
return success(data)
@router.get("/usage")
@audit(action=AuditAction.READ, description="获取适配器容量使用情况")
@require_system_permission(AdapterPermission.LIST)
async def list_adapter_usages(
request: Request,
current_user: Annotated[User, Depends(get_current_active_user)]
):
usages = await AdapterService.list_adapter_usages()
return success(usages)
@router.get("/{adapter_id}/usage")
@audit(action=AuditAction.READ, description="获取单个适配器容量使用情况")
@require_system_permission(AdapterPermission.LIST)
async def get_adapter_usage(
request: Request,
adapter_id: int,
current_user: Annotated[User, Depends(get_current_active_user)]
):
usage = await AdapterService.get_adapter_usage(adapter_id)
return success(usage)
@router.get("/{adapter_id}")
@audit(action=AuditAction.READ, description="获取适配器详情")
@require_system_permission(AdapterPermission.LIST)

View File

@@ -21,3 +21,8 @@ class BaseAdapter(Protocol):
async def stream_file(self, root: str, rel: str, range_header: str | None): ...
async def stat_file(self, root: str, rel: str): ...
def get_effective_root(self, sub_path: str | None) -> str: ...
@runtime_checkable
class UsageCapableAdapter(Protocol):
async def get_usage(self, root: str) -> Dict: ...

View File

@@ -455,6 +455,23 @@ class DropboxAdapter:
return StreamingResponse(iterator(), status_code=resp.status_code, headers=out_headers, media_type=content_type)
async def get_usage(self, root: str):
resp = await self._api_json("/users/get_space_usage", {})
resp.raise_for_status()
payload = resp.json() or {}
allocation = payload.get("allocation") or {}
allocated = allocation.get("allocated")
used = payload.get("used")
total = int(allocated) if allocated is not None else None
used_bytes = int(used) if used is not None else None
return {
"used_bytes": used_bytes,
"total_bytes": total,
"free_bytes": total - used_bytes if total is not None and used_bytes is not None else None,
"source": "dropbox",
"scope": "account",
}
ADAPTER_TYPE = "dropbox"
CONFIG_SCHEMA = [
@@ -468,4 +485,3 @@ CONFIG_SCHEMA = [
def ADAPTER_FACTORY(rec): return DropboxAdapter(rec)

View File

@@ -541,6 +541,22 @@ class GoogleDriveAdapter:
except Exception:
return None
async def get_usage(self, root: str):
resp = await self._request("GET", "/about", params={"fields": "storageQuota"})
resp.raise_for_status()
quota = (resp.json() or {}).get("storageQuota") or {}
limit = quota.get("limit")
usage = quota.get("usage")
total = int(limit) if limit is not None else None
used = int(usage) if usage is not None else None
return {
"used_bytes": used,
"total_bytes": total,
"free_bytes": total - used if total is not None and used is not None else None,
"source": "googledrive",
"scope": "drive",
}
ADAPTER_TYPE = "googledrive"

View File

@@ -329,6 +329,29 @@ class LocalAdapter:
info["exif"] = exif
return info
async def get_usage(self, root: str):
root_path = Path(root).resolve()
def _usage():
used = 0
for dirpath, dirnames, filenames in os.walk(root_path):
for filename in filenames:
fp = Path(dirpath) / filename
try:
used += fp.stat().st_size
except OSError:
continue
disk = shutil.disk_usage(root_path)
return {
"used_bytes": used,
"total_bytes": disk.total,
"free_bytes": disk.free,
"source": "local",
"scope": "mount",
}
return await asyncio.to_thread(_usage)
ADAPTER_TYPE = "local"
CONFIG_SCHEMA = [

View File

@@ -443,6 +443,21 @@ class OneDriveAdapter:
resp.raise_for_status()
return self._format_item(resp.json())
async def get_usage(self, root: str):
resp = await self._request("GET", full_url=f"{MS_GRAPH_URL}/me/drive?$select=quota")
resp.raise_for_status()
quota = (resp.json() or {}).get("quota") or {}
used = quota.get("used")
total = quota.get("total")
remaining = quota.get("remaining")
return {
"used_bytes": int(used) if used is not None else None,
"total_bytes": int(total) if total is not None else None,
"free_bytes": int(remaining) if remaining is not None else None,
"source": "onedrive",
"scope": "drive",
}
ADAPTER_TYPE = "onedrive"

View File

@@ -13,8 +13,9 @@ from models import StorageAdapter
from .base import BaseAdapter
API_BASE = "https://api-drive.mypikpak.net/drive/v1"
USER_BASE = "https://user.mypikpak.net/v1"
API_BASE = "https://api-drive.mypikpak.com/drive/v1"
USER_BASE = "https://user.mypikpak.com/v1"
TOKEN_REFRESH_BUFFER = 300
ANDROID_ALGORITHMS = [
"SOP04dGzk0TNO7t7t9ekDbAmx+eq0OI1ovEx",
@@ -62,11 +63,11 @@ PLATFORM_CONFIG = {
"android": {
"client_id": "YNxT9w7GMdWvEOKa",
"client_secret": "dbw2OtmVEeuUvIptb1Coyg",
"client_version": "1.53.2",
"client_version": "1.21.0",
"package_name": "com.pikcloud.pikpak",
"sdk_version": "2.0.6.206003",
"algorithms": ANDROID_ALGORITHMS,
"ua": None,
"ua": "ANDROID-com.pikcloud.pikpak/1.21.0",
},
"web": {
"client_id": "YUMx5nI8ZU8Ap8pm",
@@ -109,6 +110,13 @@ def _as_bool(value: Any, default: bool = False) -> bool:
return bool(value)
def _as_int(value: Any, default: int = 0) -> int:
try:
return int(value or default)
except Exception:
return default
def _root_payload(root: str | None) -> Tuple[str, str]:
raw = (root or "").strip()
if not raw:
@@ -158,9 +166,9 @@ class PikPakAdapter:
if not self.username or not self.password:
raise ValueError("PikPak adapter requires username and password")
self.platform = str(cfg.get("platform") or "web").strip().lower()
self.platform = str(cfg.get("platform") or "android").strip().lower()
if self.platform not in PLATFORM_CONFIG:
self.platform = "web"
self.platform = "android"
platform_cfg = PLATFORM_CONFIG[self.platform]
self.client_id = str(platform_cfg["client_id"])
@@ -170,10 +178,14 @@ class PikPakAdapter:
self.sdk_version = str(platform_cfg["sdk_version"])
self.algorithms = list(platform_cfg["algorithms"])
self.device_id = str(cfg.get("device_id") or "").strip() or _md5_text(self.username + self.password)
device_id = str(cfg.get("device_id") or "").strip()
if not device_id or device_id == _md5_text(self.username + self.password):
device_id = _md5_text(self.username)
self.device_id = device_id
self.user_id = str(cfg.get("user_id") or "").strip()
self.refresh_token = str(cfg.get("refresh_token") or "").strip()
self.access_token = str(cfg.get("access_token") or "").strip()
self.expires_at = _as_int(cfg.get("expires_at"), 0)
self.captcha_token = str(cfg.get("captcha_token") or "").strip()
self.root_id = str(cfg.get("root_id") or "").strip()
self.disable_media_link = _as_bool(cfg.get("disable_media_link"), True)
@@ -232,6 +244,18 @@ class PikPakAdapter:
path = m.group(1) if m else "/"
return f"{method.upper()}:{path}"
@staticmethod
def _full_action(method: str, url: str) -> str:
return f"{method.upper()}:{url}"
def _captcha_action(self, method: str, url: str, *, auth: bool) -> str:
if not auth and url == f"{USER_BASE}/auth/signin":
return self._full_action(method, url)
return self._action(method, url)
def _has_valid_access_token(self) -> bool:
return bool(self.access_token and self.expires_at > int(time.time()) + TOKEN_REFRESH_BUFFER)
def _download_headers(self) -> Dict[str, str]:
headers = {
"User-Agent": self.user_agent,
@@ -247,8 +271,11 @@ class PikPakAdapter:
changed = False
for key, value in (
("refresh_token", self.refresh_token),
("access_token", self.access_token),
("expires_at", self.expires_at),
("captcha_token", self.captcha_token),
("device_id", self.device_id),
("user_id", self.user_id),
):
if value and cfg.get(key) != value:
cfg[key] = value
@@ -260,40 +287,44 @@ class PikPakAdapter:
await self.record.save(update_fields=["config"])
async def _ensure_auth(self):
if self.access_token:
if self._has_valid_access_token():
return
async with self._auth_lock:
if self.access_token:
if self._has_valid_access_token():
return
if self.refresh_token:
try:
await self._refresh_access_token()
return
except Exception:
except Exception as e:
self.access_token = ""
if not self.username or not self.password:
raise
raise HTTPException(
502,
detail=f"PikPak refresh token failed, please update refresh_token or login manually: {e}",
)
await self._login()
async def _login(self):
url = f"{USER_BASE}/auth/signin"
if not self.captcha_token:
await self._refresh_captcha_token(self._action("POST", url), self._login_captcha_meta())
await self._refresh_captcha_token(self._full_action("POST", url), self._login_captcha_meta())
body = {
"captcha_token": self.captcha_token,
"client_id": self.client_id,
"client_secret": self.client_secret,
"grant_type": "password",
"username": self.username,
"password": self.password,
}
data = await self._raw_json("POST", url, json=body, params={"client_id": self.client_id}, auth=False)
data = await self._raw_json("POST", url, json=body, auth=False)
self.refresh_token = str(data.get("refresh_token") or "").strip()
self.access_token = str(data.get("access_token") or "").strip()
self.expires_at = int(time.time()) + _as_int(data.get("expires_in"), 0)
self.user_id = str(data.get("sub") or self.user_id).strip()
if not self.refresh_token or not self.access_token:
raise HTTPException(502, detail="PikPak login failed: missing token")
if self.platform == "android":
if self.platform == "android" and not PLATFORM_CONFIG[self.platform].get("ua"):
self.user_agent = self._build_android_user_agent()
await self._save_runtime_config()
@@ -305,21 +336,18 @@ class PikPakAdapter:
"grant_type": "refresh_token",
"refresh_token": self.refresh_token,
}
data = await self._raw_json("POST", url, json=body, params={"client_id": self.client_id}, auth=False)
data = await self._raw_json("POST", url, json=body, auth=False)
self.refresh_token = str(data.get("refresh_token") or "").strip()
self.access_token = str(data.get("access_token") or "").strip()
self.expires_at = int(time.time()) + _as_int(data.get("expires_in"), 0)
self.user_id = str(data.get("sub") or self.user_id).strip()
if not self.refresh_token or not self.access_token:
raise HTTPException(502, detail="PikPak refresh token failed: missing token")
if self.platform == "android":
if self.platform == "android" and not PLATFORM_CONFIG[self.platform].get("ua"):
self.user_agent = self._build_android_user_agent()
await self._save_runtime_config()
def _login_captcha_meta(self) -> Dict[str, str]:
if re.match(r"\w+([-+.]\w+)*@\w+([-.]\w+)*\.\w+([-.]\w+)*", self.username):
return {"email": self.username}
if 11 <= len(self.username) <= 18:
return {"phone_number": self.username}
return {"username": self.username}
async def _refresh_captcha_token(self, action: str, meta: Dict[str, str]):
@@ -332,7 +360,7 @@ class PikPakAdapter:
"meta": meta,
"redirect_uri": "xlaccsdk01://xbase.cloud/callback?state=harbor",
}
data = await self._raw_json("POST", url, json=body, params={"client_id": self.client_id}, auth=False)
data = await self._raw_json("POST", url, json=body, auth=False)
verify_url = str(data.get("url") or "").strip()
token = str(data.get("captcha_token") or "").strip()
if token and not verify_url:
@@ -437,9 +465,15 @@ class PikPakAdapter:
if self.user_id:
await self._refresh_captcha_token_after_login(method, url)
else:
await self._refresh_captcha_token(self._action(method, url), self._login_captcha_meta())
await self._refresh_captcha_token(
self._captcha_action(method, url, auth=auth),
self._login_captcha_meta(),
)
else:
await self._refresh_captcha_token(self._action(method, url), self._login_captcha_meta())
await self._refresh_captcha_token(
self._captcha_action(method, url, auth=auth),
self._login_captcha_meta(),
)
return await self._raw_json(
method,
url,
@@ -776,6 +810,21 @@ class PikPakAdapter:
return None
return resp.content
async def get_usage(self, root: str):
data = await self._request("GET", "/about")
quota = data.get("quota") or {}
limit = quota.get("limit")
usage = quota.get("usage")
total = int(limit) if limit is not None else None
used = int(usage) if usage is not None else None
return {
"used_bytes": used,
"total_bytes": total,
"free_bytes": total - used if total is not None and used is not None else None,
"source": "pikpak",
"scope": "drive",
}
async def mkdir(self, root: str, rel: str):
rel = (rel or "").strip("/")
if not rel:
@@ -861,8 +910,10 @@ ADAPTER_TYPE = "pikpak"
CONFIG_SCHEMA = [
{"key": "username", "label": "PikPak 账号", "type": "string", "required": True},
{"key": "password", "label": "PikPak 密码", "type": "password", "required": True},
{"key": "platform", "label": "平台", "type": "select", "required": False, "default": "web", "options": ["web", "android", "pc"]},
{"key": "platform", "label": "平台", "type": "select", "required": False, "default": "android", "options": ["android", "web", "pc"]},
{"key": "refresh_token", "label": "Refresh Token", "type": "password", "required": False},
{"key": "access_token", "label": "Access Token", "type": "password", "required": False},
{"key": "expires_at", "label": "Access Token 过期时间戳", "type": "number", "required": False},
{"key": "captcha_token", "label": "Captcha Token", "type": "password", "required": False},
{"key": "device_id", "label": "Device ID", "type": "string", "required": False},
{"key": "root_id", "label": "根目录 ID", "type": "string", "required": False, "default": ""},

View File

@@ -840,6 +840,23 @@ class QuarkAdapter:
async def copy(self, root: str, src_rel: str, dst_rel: str, overwrite: bool = False):
raise NotImplementedError("QuarkOpen does not support copy via open API")
async def get_usage(self, root: str):
data = await self._request("GET", "/capacity/growth/info")
payload = (data or {}).get("data") or {}
if isinstance(payload.get("member"), dict):
payload = payload["member"]
used = payload.get("use_capacity") or payload.get("used_capacity")
total = payload.get("total_capacity")
used_bytes = int(used) if used is not None else None
total_bytes = int(total) if total is not None else None
return {
"used_bytes": used_bytes,
"total_bytes": total_bytes,
"free_bytes": total_bytes - used_bytes if total_bytes is not None and used_bytes is not None else None,
"source": "quark",
"scope": "account",
}
# -----------------
# STAT / EXISTS / 辅助
# -----------------

View File

@@ -1,26 +1,17 @@
from typing import List, Dict, Tuple, AsyncIterator
from typing import List, Dict, Tuple, AsyncIterator, Optional
import asyncio
import base64
import io
import os
import struct
import time
from models import StorageAdapter
from telethon import TelegramClient
from telethon import TelegramClient, errors, utils
from telethon.crypto import AuthKey
from telethon.sessions import StringSession
from telethon.tl import types
import socks
_SESSION_LOCKS: Dict[str, asyncio.Lock] = {}
def _get_session_lock(session_string: str) -> asyncio.Lock:
lock = _SESSION_LOCKS.get(session_string)
if lock is None:
lock = asyncio.Lock()
_SESSION_LOCKS[session_string] = lock
return lock
class _NamedFile:
def __init__(self, file_obj, name: str):
@@ -61,6 +52,10 @@ CONFIG_SCHEMA = [
class TelegramAdapter:
"""Telegram 存储适配器 (使用用户 Session)"""
native_video_thumbnail_only = True
_message_cache_ttl = 300
_message_cache_limit = 200
_download_chunk_size = 512 * 1024
def __init__(self, record: StorageAdapter):
self.record = record
@@ -93,6 +88,12 @@ class TelegramAdapter:
if not all([self.api_id, self.api_hash, self.session_string, self.chat_id]):
raise ValueError("Telegram 适配器需要 api_id, api_hash, session_string 和 chat_id")
self._client: TelegramClient | None = None
self._client_lock = asyncio.Lock()
self._download_lock = asyncio.Lock()
self._active_stream_message_id: int | None = None
self._message_cache: Dict[int, Tuple[float, object]] = {}
@staticmethod
def _parse_legacy_session_string(value: str) -> StringSession:
"""
@@ -132,29 +133,42 @@ class TelegramAdapter:
return None
cached = []
others = []
downloadable = []
for t in thumbs:
if isinstance(t, (types.PhotoCachedSize, types.PhotoStrippedSize)):
cached.append(t)
elif isinstance(t, (types.PhotoSize, types.PhotoSizeProgressive)):
if not isinstance(t, types.PhotoSizeEmpty):
others.append(t)
downloadable.append(t)
if cached:
cached.sort(key=lambda x: len(getattr(x, "bytes", b"") or b""))
return cached[-1]
if others:
if downloadable:
def _sz(x):
if isinstance(x, types.PhotoSizeProgressive):
return max(x.sizes or [0])
return int(getattr(x, "size", 0) or 0)
others.sort(key=_sz)
return others[-1]
downloadable.sort(key=_sz)
return downloadable[-1]
if cached:
cached.sort(key=lambda x: len(getattr(x, "bytes", b"") or b""))
return cached[-1]
return None
@staticmethod
def _get_message_thumbs(message) -> list:
doc = message.document or message.video
if doc and getattr(doc, "thumbs", None):
return list(doc.thumbs or [])
if message.photo and getattr(message.photo, "sizes", None):
return list(message.photo.sizes or [])
return []
@classmethod
def _message_has_thumbnail(cls, message) -> bool:
return cls._pick_photo_thumb(cls._get_message_thumbs(message)) is not None
def _build_session(self) -> StringSession:
s = (self.session_string or "").strip()
if not s:
@@ -181,6 +195,88 @@ class TelegramAdapter:
"""创建一个新的 TelegramClient 实例"""
return TelegramClient(self._build_session(), self.api_id, self.api_hash, proxy=self.proxy)
async def _get_connected_client(self) -> TelegramClient:
async with self._client_lock:
if self._client is None:
self._client = self._get_client()
if not self._client.is_connected():
await self._client.connect()
return self._client
async def _disconnect_shared_client(self):
if self._client and self._client.is_connected():
await self._client.disconnect()
def _clear_message_cache(self):
self._message_cache.clear()
async def _get_cached_message(self, message_id: int):
now = time.monotonic()
cached = self._message_cache.get(message_id)
if cached and cached[0] > now:
return cached[1]
client = await self._get_connected_client()
message = await client.get_messages(self.chat_id, ids=message_id)
if message:
if len(self._message_cache) >= self._message_cache_limit:
oldest_key = min(self._message_cache, key=lambda k: self._message_cache[k][0])
self._message_cache.pop(oldest_key, None)
self._message_cache[message_id] = (now + self._message_cache_ttl, message)
else:
self._message_cache.pop(message_id, None)
return message
@staticmethod
def _get_message_media(message):
return message.document or message.video or message.photo
@staticmethod
def _flood_wait_http_exception(exc: errors.FloodWaitError):
from fastapi import HTTPException
seconds = int(getattr(exc, "seconds", 0) or 0)
if seconds > 0:
return HTTPException(
status_code=429,
detail=f"Telegram 请求过于频繁,请等待 {seconds} 秒后重试",
headers={"Retry-After": str(seconds)},
)
return HTTPException(status_code=429, detail="Telegram 请求过于频繁,请稍后重试")
@staticmethod
def _get_message_file_size(message, media) -> int:
file_meta = message.file
size = file_meta.size if file_meta and file_meta.size is not None else None
if size is None:
if hasattr(media, "size") and media.size is not None:
size = media.size
elif message.photo and getattr(message.photo, "sizes", None):
photo_size = message.photo.sizes[-1]
size = getattr(photo_size, "size", 0) or 0
else:
size = 0
return int(size or 0)
@staticmethod
def _get_message_mime_type(message, media) -> str:
file_meta = message.file
if file_meta and getattr(file_meta, "mime_type", None):
return file_meta.mime_type
if hasattr(media, "mime_type") and media.mime_type:
return media.mime_type
if message.photo:
return "image/jpeg"
return "application/octet-stream"
@staticmethod
def _parse_message_id(rel: str) -> int:
try:
message_id_str, _ = rel.split('_', 1)
return int(message_id_str)
except (ValueError, IndexError):
raise FileNotFoundError(f"无效的文件路径格式: {rel}")
def get_effective_root(self, sub_path: str | None) -> str:
return ""
@@ -229,6 +325,7 @@ class TelegramAdapter:
"size": size,
"mtime": int(message.date.timestamp()),
"type": "file",
"has_thumbnail": False,
})
finally:
if client.is_connected():
@@ -260,24 +357,69 @@ class TelegramAdapter:
return page_entries, total_count
async def read_file(self, root: str, rel: str) -> bytes:
try:
message_id_str, _ = rel.split('_', 1)
message_id = int(message_id_str)
except (ValueError, IndexError):
raise FileNotFoundError(f"无效的文件路径格式: {rel}")
message_id = self._parse_message_id(rel)
client = await self._get_connected_client()
message = await self._get_cached_message(message_id)
if not message or not self._get_message_media(message):
raise FileNotFoundError(f"在频道 {self.chat_id} 中未找到消息ID为 {message_id} 的文件")
client = self._get_client()
try:
await client.connect()
message = await client.get_messages(self.chat_id, ids=message_id)
if not message or not (message.document or message.video or message.photo):
raise FileNotFoundError(f"在频道 {self.chat_id} 中未找到消息ID为 {message_id} 的文件")
file_bytes = await client.download_media(message, file=bytes)
return file_bytes
finally:
if client.is_connected():
await client.disconnect()
async with self._download_lock:
file_bytes = await client.download_media(message, file=bytes)
return file_bytes
except errors.FloodWaitError as exc:
await self._disconnect_shared_client()
raise self._flood_wait_http_exception(exc)
async def read_file_range(self, root: str, rel: str, start: int, end: Optional[int] = None) -> bytes:
from fastapi import HTTPException
message_id = self._parse_message_id(rel)
client = await self._get_connected_client()
message = await self._get_cached_message(message_id)
if not message:
raise FileNotFoundError(f"在频道 {self.chat_id} 中未找到消息ID为 {message_id} 的文件")
media = self._get_message_media(message)
if not media:
raise FileNotFoundError(f"在频道 {self.chat_id} 中未找到消息ID为 {message_id} 的文件")
file_size = self._get_message_file_size(message, media)
if file_size > 0:
if start >= file_size:
raise HTTPException(status_code=416, detail="Requested Range Not Satisfiable")
if end is None or end >= file_size:
end = file_size - 1
elif end is None:
end = start
if end < start:
raise HTTPException(status_code=416, detail="Requested Range Not Satisfiable")
limit = end - start + 1
data = bytearray()
try:
async with self._download_lock:
async for chunk in client.iter_download(
media,
offset=start,
request_size=self._download_chunk_size,
chunk_size=self._download_chunk_size,
file_size=file_size or None,
):
if not chunk:
continue
need = limit - len(data)
if need <= 0:
break
data.extend(chunk[:need])
if len(data) >= limit:
break
return bytes(data)
except errors.FloodWaitError as exc:
await self._disconnect_shared_client()
raise self._flood_wait_http_exception(exc)
async def write_file(self, root: str, rel: str, data: bytes):
"""将字节数据作为文件上传"""
@@ -297,6 +439,7 @@ class TelegramAdapter:
stored_name = file_meta.name
if getattr(message, "id", None) is not None:
actual_rel = f"{message.id}_{stored_name}"
self._clear_message_cache()
return {"rel": actual_rel, "size": len(data)}
finally:
if client.is_connected():
@@ -326,6 +469,7 @@ class TelegramAdapter:
stored_name = file_meta.name
if getattr(message, "id", None) is not None:
actual_rel = f"{message.id}_{stored_name}"
self._clear_message_cache()
if file_meta and getattr(file_meta, "size", None):
size = int(file_meta.size)
return {"rel": actual_rel, "size": size}
@@ -361,6 +505,7 @@ class TelegramAdapter:
stored_name = file_meta.name
if getattr(message, "id", None) is not None:
actual_rel = f"{message.id}_{stored_name}"
self._clear_message_cache()
finally:
if os.path.exists(temp_path):
@@ -373,39 +518,7 @@ class TelegramAdapter:
raise NotImplementedError("Telegram 适配器不支持创建目录。")
async def get_thumbnail(self, root: str, rel: str, size: str = "medium"):
try:
message_id_str, _ = rel.split('_', 1)
message_id = int(message_id_str)
except (ValueError, IndexError):
return None
client = self._get_client()
try:
await client.connect()
message = await client.get_messages(self.chat_id, ids=message_id)
if not message:
return None
doc = message.document or message.video
thumbs = None
if doc and getattr(doc, "thumbs", None):
thumbs = list(doc.thumbs or [])
elif message.photo and getattr(message.photo, "sizes", None):
thumbs = list(message.photo.sizes or [])
thumb = self._pick_photo_thumb(thumbs)
if not thumb:
return None
result = await client.download_media(message, bytes, thumb=thumb)
if isinstance(result, (bytes, bytearray)):
return bytes(result)
return None
except Exception:
return None
finally:
if client.is_connected():
await client.disconnect()
return None
async def delete(self, root: str, rel: str):
"""删除一个文件 (即一条消息)"""
@@ -421,9 +534,12 @@ class TelegramAdapter:
result = await client.delete_messages(self.chat_id, [message_id])
if not result or not result[0].pts:
raise FileNotFoundError(f"{self.chat_id} 中删除消息 {message_id} 失败,可能消息不存在或无权限")
self._message_cache.pop(message_id, None)
finally:
if client.is_connected():
await client.disconnect()
if self._client is client:
self._client = None
async def move(self, root: str, src_rel: str, dst_rel: str):
raise NotImplementedError("Telegram 适配器不支持移动。")
@@ -439,43 +555,21 @@ class TelegramAdapter:
from fastapi import HTTPException
try:
message_id_str, _ = rel.split('_', 1)
message_id = int(message_id_str)
except (ValueError, IndexError):
message_id = self._parse_message_id(rel)
except FileNotFoundError:
raise HTTPException(status_code=400, detail=f"无效的文件路径格式: {rel}")
client = self._get_client()
lock = _get_session_lock(self.session_string)
await lock.acquire()
try:
await client.connect()
message = await client.get_messages(self.chat_id, ids=message_id)
media = message.document or message.video or message.photo
if not message or not media:
client = await self._get_connected_client()
message = await self._get_cached_message(message_id)
if not message:
raise FileNotFoundError(f"在频道 {self.chat_id} 中未找到消息ID为 {message_id} 的文件")
media = self._get_message_media(message)
if not media:
raise FileNotFoundError(f"在频道 {self.chat_id} 中未找到消息ID为 {message_id} 的文件")
file_meta = message.file
file_size = file_meta.size if file_meta and file_meta.size is not None else None
if file_size is None:
if hasattr(media, "size") and media.size is not None:
file_size = media.size
elif message.photo and getattr(message.photo, "sizes", None):
photo_size = message.photo.sizes[-1]
file_size = getattr(photo_size, "size", 0) or 0
else:
file_size = 0
mime_type = None
if file_meta and getattr(file_meta, "mime_type", None):
mime_type = file_meta.mime_type
if not mime_type:
if hasattr(media, "mime_type") and media.mime_type:
mime_type = media.mime_type
elif message.photo:
mime_type = "image/jpeg"
else:
mime_type = "application/octet-stream"
file_size = self._get_message_file_size(message, media)
mime_type = self._get_message_mime_type(message, media)
start = 0
end = file_size - 1
@@ -486,6 +580,10 @@ class TelegramAdapter:
"Content-Type": mime_type,
}
if file_size <= 0:
headers["Content-Length"] = "0"
return StreamingResponse(iter(()), status_code=status, headers=headers)
if range_header:
try:
range_val = range_header.strip().partition("=")[2]
@@ -499,42 +597,71 @@ class TelegramAdapter:
except ValueError:
raise HTTPException(status_code=400, detail="Invalid Range header")
headers["Content-Length"] = str(end - start + 1)
self._active_stream_message_id = message_id
async def iterator():
downloaded = 0
try:
limit = end - start + 1
downloaded = 0
async for chunk in client.iter_download(media, offset=start):
if downloaded + len(chunk) > limit:
yield chunk[:limit - downloaded]
break
yield chunk
downloaded += len(chunk)
if downloaded >= limit:
break
finally:
try:
if client.is_connected():
await client.disconnect()
finally:
lock.release()
if self._active_stream_message_id != message_id:
return
async with self._download_lock:
async for chunk in client.iter_download(
media,
offset=start,
request_size=self._download_chunk_size,
chunk_size=self._download_chunk_size,
file_size=file_size,
):
if self._active_stream_message_id != message_id:
return
if not chunk:
continue
remaining = limit - downloaded
if remaining <= 0:
break
data = chunk[:remaining]
downloaded += len(data)
yield data
if downloaded >= limit:
break
except errors.FloodWaitError as exc:
await self._disconnect_shared_client()
if downloaded == 0:
raise self._flood_wait_http_exception(exc)
seconds = int(getattr(exc, "seconds", 0) or 0)
print(f"Telegram streaming stopped by FloodWait after partial response, wait={seconds}s")
return
except Exception:
await self._disconnect_shared_client()
raise
return StreamingResponse(iterator(), status_code=status, headers=headers)
agen = iterator()
try:
first_chunk = await agen.__anext__()
except StopAsyncIteration:
first_chunk = b""
except HTTPException:
raise
async def response_iterator():
try:
if first_chunk:
yield first_chunk
async for chunk in agen:
yield chunk
finally:
await agen.aclose()
return StreamingResponse(response_iterator(), status_code=status, headers=headers)
except HTTPException:
if client.is_connected():
await client.disconnect()
lock.release()
raise
except FileNotFoundError as e:
if client.is_connected():
await client.disconnect()
lock.release()
raise HTTPException(status_code=404, detail=str(e))
except Exception as e:
if client.is_connected():
await client.disconnect()
lock.release()
await self._disconnect_shared_client()
raise HTTPException(status_code=500, detail=f"Streaming failed: {str(e)}")
async def stat_file(self, root: str, rel: str):
@@ -544,35 +671,21 @@ class TelegramAdapter:
except (ValueError, IndexError):
raise FileNotFoundError(f"无效的文件路径格式: {rel}")
client = self._get_client()
try:
await client.connect()
message = await client.get_messages(self.chat_id, ids=message_id)
media = message.document or message.video or message.photo
if not message or not media:
raise FileNotFoundError(f"在频道 {self.chat_id} 中未找到消息ID为 {message_id} 的文件")
message = await self._get_cached_message(message_id)
media = self._get_message_media(message) if message else None
if not message or not media:
raise FileNotFoundError(f"在频道 {self.chat_id} 中未找到消息ID为 {message_id} 的文件")
file_meta = message.file
size = file_meta.size if file_meta and file_meta.size is not None else None
if size is None:
if hasattr(media, "size") and media.size is not None:
size = media.size
elif message.photo and getattr(message.photo, "sizes", None):
photo_size = message.photo.sizes[-1]
size = getattr(photo_size, "size", 0) or 0
else:
size = 0
size = self._get_message_file_size(message, media)
return {
"name": rel,
"is_dir": False,
"size": size,
"mtime": int(message.date.timestamp()),
"type": "file",
}
finally:
if client.is_connected():
await client.disconnect()
return {
"name": rel,
"is_dir": False,
"size": size,
"mtime": int(message.date.timestamp()),
"type": "file",
"has_thumbnail": False,
}
def ADAPTER_FACTORY(rec: StorageAdapter) -> TelegramAdapter:
return TelegramAdapter(rec)

View File

@@ -1,3 +1,4 @@
import time
from typing import Optional
from fastapi import HTTPException
@@ -8,11 +9,34 @@ from .registry import (
normalize_adapter_type,
runtime_registry,
)
from .types import AdapterCreate, AdapterOut
from .types import AdapterCreate, AdapterOut, AdapterUsage
from .providers.base import UsageCapableAdapter
from models import StorageAdapter
class AdapterService:
_usage_cache_ttl = 3600
_usage_cache: dict[int, tuple[float, AdapterUsage]] = {}
@classmethod
def _get_cached_usage(cls, adapter_id: int) -> AdapterUsage | None:
cached = cls._usage_cache.get(adapter_id)
if not cached:
return None
expires_at, usage = cached
if expires_at <= time.time():
cls._usage_cache.pop(adapter_id, None)
return None
return usage
@classmethod
def _set_cached_usage(cls, usage: AdapterUsage):
cls._usage_cache[usage.id] = (time.time() + cls._usage_cache_ttl, usage)
@classmethod
def _clear_cached_usage(cls, adapter_id: int):
cls._usage_cache.pop(adapter_id, None)
@classmethod
def _validate_and_normalize_config(cls, adapter_type: str, cfg):
schemas = get_config_schemas()
@@ -85,6 +109,74 @@ class AdapterService:
raise HTTPException(404, detail="Not found")
return AdapterOut.model_validate(rec)
@classmethod
def _unsupported_usage(cls, rec: StorageAdapter, reason: str) -> AdapterUsage:
return AdapterUsage(
id=rec.id,
name=rec.name,
type=rec.type,
path=rec.path,
supported=False,
reason=reason,
)
@classmethod
async def get_adapter_usage(cls, adapter_id: int) -> AdapterUsage:
rec = await StorageAdapter.get_or_none(id=adapter_id)
if not rec:
raise HTTPException(404, detail="Not found")
return await cls._get_adapter_usage_for_record(rec)
@classmethod
async def _get_adapter_usage_for_record(cls, rec: StorageAdapter) -> AdapterUsage:
cached = cls._get_cached_usage(rec.id)
if cached:
return cached
if not rec.enabled:
return cls._unsupported_usage(rec, "adapter_disabled")
adapter = runtime_registry.get(rec.id)
if not adapter:
await runtime_registry.refresh()
adapter = runtime_registry.get(rec.id)
if not adapter:
return cls._unsupported_usage(rec, "adapter_unavailable")
if not isinstance(adapter, UsageCapableAdapter):
return cls._unsupported_usage(rec, "adapter_not_implemented")
root = adapter.get_effective_root(rec.sub_path)
try:
raw_usage = await adapter.get_usage(root)
except Exception as e:
return cls._unsupported_usage(rec, f"usage_failed: {e}")
if not isinstance(raw_usage, dict):
return cls._unsupported_usage(rec, "invalid_usage_response")
usage = AdapterUsage(
id=rec.id,
name=rec.name,
type=rec.type,
path=rec.path,
supported=True,
used_bytes=raw_usage.get("used_bytes"),
total_bytes=raw_usage.get("total_bytes"),
free_bytes=raw_usage.get("free_bytes"),
source=raw_usage.get("source") or rec.type,
scope=raw_usage.get("scope"),
)
cls._set_cached_usage(usage)
return usage
@classmethod
async def list_adapter_usages(cls):
adapters = await StorageAdapter.all()
result = []
for rec in adapters:
result.append(await cls._get_adapter_usage_for_record(rec))
return result
@classmethod
async def update_adapter(cls, adapter_id: int, data: AdapterCreate, current_user: Optional[User]):
rec = await StorageAdapter.get_or_none(id=adapter_id)
@@ -105,6 +197,7 @@ class AdapterService:
await rec.save()
await runtime_registry.upsert(rec)
cls._clear_cached_usage(adapter_id)
return AdapterOut.model_validate(rec)
@classmethod
@@ -113,4 +206,5 @@ class AdapterService:
if not deleted:
raise HTTPException(404, detail="Not found")
runtime_registry.remove(adapter_id)
cls._clear_cached_usage(adapter_id)
return {"deleted": True}

View File

@@ -48,3 +48,17 @@ class AdapterOut(AdapterBase):
class Config:
from_attributes = True
class AdapterUsage(BaseModel):
id: int
name: str
type: str
path: str
supported: bool
used_bytes: Optional[int] = None
total_bytes: Optional[int] = None
free_bytes: Optional[int] = None
source: Optional[str] = None
scope: Optional[str] = None
reason: Optional[str] = None

View File

@@ -10,7 +10,7 @@ from models.database import Configuration, UserAccount
load_dotenv(dotenv_path=".env")
VERSION = "v2.2.0"
VERSION = "v2.2.1"
class ConfigService:

View File

@@ -0,0 +1,3 @@
from .api import router
__all__ = ["router"]

View File

@@ -0,0 +1,44 @@
from typing import Annotated
from fastapi import APIRouter, Depends, Query, Request
from api.response import success
from domain.audit import AuditAction, audit
from domain.auth import User, get_current_active_user
from .service import RecentFilesService
from .types import RecordRecentFileRequest
router = APIRouter(prefix="/api/fs/recent", tags=["recent-files"])
@router.get("/")
@audit(action=AuditAction.READ, description="查看最近打开文件")
async def list_recent_files(
request: Request,
current_user: Annotated[User, Depends(get_current_active_user)],
limit: int = Query(20, ge=1, le=200, description="返回数量"),
):
data = await RecentFilesService.list_recent_files(current_user.id, limit)
return success(data)
@router.post("/")
@audit(action=AuditAction.CREATE, description="记录最近打开文件", body_fields=["path"])
async def record_recent_file(
request: Request,
body: RecordRecentFileRequest,
current_user: Annotated[User, Depends(get_current_active_user)],
):
data = await RecentFilesService.record_opened_file(current_user.id, body.path)
return success(data)
@router.delete("/")
@audit(action=AuditAction.DELETE, description="清空最近打开文件")
async def clear_recent_files(
request: Request,
current_user: Annotated[User, Depends(get_current_active_user)],
):
data = await RecentFilesService.clear_recent_files(current_user.id)
return success(data)

View File

@@ -0,0 +1,23 @@
from datetime import datetime, timezone
from models.database import RecentFile
class RecentFilesService:
@staticmethod
async def record_opened_file(user_id: int, path: str) -> dict:
item, created = await RecentFile.get_or_create(user_id=user_id, path=path)
if not created:
await RecentFile.filter(id=item.id).update(opened_at=datetime.now(timezone.utc))
await item.fetch_from_db()
return {"id": item.id, "path": item.path, "opened_at": item.opened_at.isoformat()}
@staticmethod
async def list_recent_files(user_id: int, limit: int) -> list[dict]:
items = await RecentFile.filter(user_id=user_id).order_by("-opened_at").limit(limit)
return [{"id": i.id, "path": i.path, "opened_at": i.opened_at.isoformat()} for i in items]
@staticmethod
async def clear_recent_files(user_id: int) -> dict:
deleted = await RecentFile.filter(user_id=user_id).delete()
return {"deleted": deleted}

View File

@@ -0,0 +1,11 @@
from pydantic import BaseModel, Field
class RecordRecentFileRequest(BaseModel):
path: str = Field(..., min_length=1, max_length=4096, description="文件完整路径")
class RecentFileItem(BaseModel):
id: int
path: str
opened_at: str

View File

@@ -89,6 +89,9 @@ class VirtualFSListingMixin(VirtualFSResolverMixin):
def annotate_entry(entry: Dict) -> None:
if not entry.get("is_dir"):
if entry.get("has_thumbnail") is not None:
entry["has_thumbnail"] = bool(entry.get("has_thumbnail"))
return
name = entry.get("name", "")
entry["has_thumbnail"] = bool(is_image_filename(name) or is_video_filename(name))
else:
@@ -273,7 +276,10 @@ class VirtualFSListingMixin(VirtualFSResolverMixin):
is_dir = False
rel_name = rel.rstrip("/").split("/")[-1] if rel else path.rstrip("/").split("/")[-1]
name_hint = str(info.get("name") or rel_name or "")
info["has_thumbnail"] = bool(not is_dir and (is_image_filename(name_hint) or is_video_filename(name_hint)))
if not is_dir and info.get("has_thumbnail") is not None:
info["has_thumbnail"] = bool(info.get("has_thumbnail"))
else:
info["has_thumbnail"] = bool(not is_dir and (is_image_filename(name_hint) or is_video_filename(name_hint)))
if verbose and not is_dir:
vector_index = await cls._gather_vector_index(path)
if vector_index is not None:

View File

@@ -1,6 +1,7 @@
import base64
import hashlib
import mimetypes
import uuid
from email.utils import formatdate
from urllib.parse import urlparse, unquote
from typing import Optional
@@ -43,6 +44,8 @@ def _dav_headers(extra: Optional[dict] = None) -> dict:
"MKCOL",
"MOVE",
"COPY",
"LOCK",
"UNLOCK",
]),
}
if extra:
@@ -157,17 +160,19 @@ def _normalize_fs_path(path: str) -> str:
return unquote(full)
@router.options("")
@router.options("/{path:path}")
@audit(action=AuditAction.READ, description="WebDAV: OPTIONS", user_kw="user")
async def options_root(_request: Request, path: str = "", _enabled: None = Depends(_ensure_webdav_enabled)):
return Response(status_code=200, headers=_dav_headers())
@router.api_route("", methods=["PROPFIND"])
@router.api_route("/{path:path}", methods=["PROPFIND"])
@audit(action=AuditAction.READ, description="WebDAV: PROPFIND", user_kw="user")
async def propfind(
request: Request,
path: str,
path: str = "",
_enabled: None = Depends(_ensure_webdav_enabled),
user: User = Depends(_get_basic_user),
):
@@ -247,7 +252,10 @@ async def dav_get(
if full_path != "/":
await PermissionService.require_path_permission(user.id, full_path, PathAction.READ)
range_header = request.headers.get("Range")
return await VirtualFSService.stream_file(full_path, range_header)
try:
return await VirtualFSService.stream_file(full_path, range_header)
except FileNotFoundError:
raise HTTPException(404, detail="Not found")
@router.head("/{path:path}")
@@ -280,29 +288,43 @@ async def dav_head(
return Response(status_code=200, headers=headers)
@router.api_route("", methods=["PUT"])
@router.api_route("/{path:path}", methods=["PUT"])
@audit(action=AuditAction.UPLOAD, description="WebDAV: PUT", user_kw="user")
async def dav_put(
path: str,
request: Request,
path: str = "",
_enabled: None = Depends(_ensure_webdav_enabled),
user: User = Depends(_get_basic_user),
):
full_path = _normalize_fs_path(path)
await PermissionService.require_path_permission(user.id, full_path, PathAction.WRITE)
existed = True
try:
await VirtualFSService.stat_file(full_path)
except FileNotFoundError:
existed = False
except HTTPException as exc:
if exc.status_code == 404:
existed = False
else:
raise
async def body_iter():
async for chunk in request.stream():
if chunk:
yield chunk
size = await VirtualFSService.write_file_stream(full_path, body_iter(), overwrite=True)
return Response(status_code=201, headers=_dav_headers({"Content-Length": "0"}))
await VirtualFSService.write_file_stream(full_path, body_iter(), overwrite=True)
return Response(status_code=204 if existed else 201, headers=_dav_headers({"Content-Length": "0"}))
@router.api_route("", methods=["DELETE"])
@router.api_route("/{path:path}", methods=["DELETE"])
@audit(action=AuditAction.DELETE, description="WebDAV: DELETE", user_kw="user")
async def dav_delete(
path: str,
_request: Request,
path: str = "",
_enabled: None = Depends(_ensure_webdav_enabled),
user: User = Depends(_get_basic_user),
):
@@ -312,6 +334,58 @@ async def dav_delete(
return Response(status_code=204, headers=_dav_headers())
@router.api_route("", methods=["LOCK"])
@router.api_route("/{path:path}", methods=["LOCK"])
@audit(action=AuditAction.UPDATE, description="WebDAV: LOCK", user_kw="user")
async def dav_lock(
path: str = "",
_request: Request = None,
_enabled: None = Depends(_ensure_webdav_enabled),
user: User = Depends(_get_basic_user),
):
full_path = _normalize_fs_path(path)
if full_path != "/":
await PermissionService.require_path_permission(user.id, full_path, PathAction.WRITE)
token = f"opaquelocktoken:{uuid.uuid4()}"
ns = "{DAV:}"
prop = ET.Element(ns + "prop")
lockdiscovery = ET.SubElement(prop, ns + "lockdiscovery")
activelock = ET.SubElement(lockdiscovery, ns + "activelock")
locktype = ET.SubElement(activelock, ns + "locktype")
ET.SubElement(locktype, ns + "write")
lockscope = ET.SubElement(activelock, ns + "lockscope")
ET.SubElement(lockscope, ns + "exclusive")
depth = ET.SubElement(activelock, ns + "depth")
depth.text = "Infinity"
locktoken = ET.SubElement(activelock, ns + "locktoken")
href = ET.SubElement(locktoken, ns + "href")
href.text = token
xml = ET.tostring(prop, encoding="utf-8", xml_declaration=True)
return Response(
content=xml,
status_code=200,
media_type='application/xml; charset="utf-8"',
headers=_dav_headers({"Lock-Token": f"<{token}>"}),
)
@router.api_route("", methods=["UNLOCK"])
@router.api_route("/{path:path}", methods=["UNLOCK"])
@audit(action=AuditAction.UPDATE, description="WebDAV: UNLOCK", user_kw="user")
async def dav_unlock(
path: str = "",
_request: Request = None,
_enabled: None = Depends(_ensure_webdav_enabled),
user: User = Depends(_get_basic_user),
):
full_path = _normalize_fs_path(path)
if full_path != "/":
await PermissionService.require_path_permission(user.id, full_path, PathAction.WRITE)
return Response(status_code=204, headers=_dav_headers())
@router.api_route("/{path:path}", methods=["MKCOL"])
@audit(action=AuditAction.CREATE, description="WebDAV: MKCOL", user_kw="user")
async def dav_mkcol(

View File

@@ -89,8 +89,17 @@ class VirtualFSRouteMixin(VirtualFSTempLinkMixin):
adapter, mount, root, rel = await cls.resolve_adapter_and_rel(full_path)
if not rel or rel.endswith("/"):
raise HTTPException(400, detail="Not a file")
if not (is_image_filename(rel) or is_video_filename(rel)):
raise HTTPException(404, detail="Not an image or video")
has_native_thumb = False
if callable(getattr(adapter, "get_thumbnail", None)):
stat_file = getattr(adapter, "stat_file", None)
if callable(stat_file):
try:
stat = await stat_file(root, rel)
has_native_thumb = bool(isinstance(stat, dict) and stat.get("has_thumbnail"))
except Exception:
has_native_thumb = False
if not (is_image_filename(rel) or is_video_filename(rel) or has_native_thumb):
raise HTTPException(404, detail="Not an image, video, or native thumbnail file")
data, mime, key = await get_or_create_thumb(adapter, mount.id, root, rel, w, h, fit) # type: ignore
headers = {
"Cache-Control": "public, max-age=3600",

View File

@@ -23,6 +23,7 @@ VIDEO_HEAD_FALLBACK_LIMIT = 4 * 1024 * 1024 # 4MB
VIDEO_THUMB_SEEK_SECONDS = (15, 10, 5, 3, 1, 0)
VIDEO_BLACK_FRAME_MEAN_THRESHOLD = 12.0
CACHE_ROOT = Path('data/.thumb_cache')
THUMB_CACHE_VERSION = "v2"
def is_image_filename(name: str) -> bool:
@@ -47,7 +48,7 @@ def is_video_filename(name: str) -> bool:
def _cache_key(adapter_id: int, rel: str, size: int, mtime: int, w: int, h: int, fit: str) -> str:
raw = f"{adapter_id}|{rel}|{size}|{mtime}|{w}x{h}|{fit}".encode()
raw = f"{THUMB_CACHE_VERSION}|{adapter_id}|{rel}|{size}|{mtime}|{w}x{h}|{fit}".encode()
return hashlib.sha1(raw).hexdigest()
@@ -385,8 +386,11 @@ async def get_or_create_thumb(adapter, adapter_id: int, root: str, rel: str, w:
stat = await adapter.stat_file(root, rel)
size = int(stat.get('size') or 0)
is_video = is_video_filename(rel)
if not is_video and size > MAX_IMAGE_SOURCE_SIZE:
raise HTTPException(400, detail="Image too large for thumbnail")
is_image = is_image_filename(rel)
get_thumb_impl = getattr(adapter, "get_thumbnail", None)
should_try_native_thumb = callable(get_thumb_impl) and (
is_image or is_video or bool(stat.get("has_thumbnail"))
)
key = _cache_key(adapter_id, rel, size, int(
stat.get('mtime', 0)), w, h, fit)
@@ -397,8 +401,7 @@ async def get_or_create_thumb(adapter, adapter_id: int, root: str, rel: str, w:
_ensure_cache_dir(path)
thumb_bytes, mime = None, None
get_thumb_impl = getattr(adapter, "get_thumbnail", None)
if callable(get_thumb_impl):
if should_try_native_thumb:
size_str = "large" if w > 400 else "medium" if w > 100 else "small"
native_thumb_bytes = await get_thumb_impl(root, rel, size_str)
@@ -406,15 +409,15 @@ async def get_or_create_thumb(adapter, adapter_id: int, root: str, rel: str, w:
try:
from PIL import Image
im = Image.open(io.BytesIO(native_thumb_bytes))
buf = io.BytesIO()
im.save(buf, 'WEBP', quality=85)
thumb_bytes = buf.getvalue()
mime = 'image/webp'
thumb_bytes, mime = _image_to_webp(im, w, h, fit)
except Exception as e:
print(
f"Failed to convert native thumbnail to WebP: {e}, falling back.")
thumb_bytes, mime = None, None
if is_video and getattr(adapter, "native_video_thumbnail_only", False) and not thumb_bytes:
raise HTTPException(404, detail="Native video thumbnail unavailable")
if not thumb_bytes:
if is_video:
async def _maybe_transcoding_thumb() -> Tuple[bytes, str] | None:
@@ -493,7 +496,9 @@ async def get_or_create_thumb(adapter, adapter_id: int, root: str, rel: str, w:
thumb_bytes, mime = retry_thumb, retry_mime
except Exception:
pass
else:
elif is_image:
if size > MAX_IMAGE_SOURCE_SIZE:
raise HTTPException(400, detail="Image too large for thumbnail")
read_data = await adapter.read_file(root, rel)
try:
thumb_bytes, mime = generate_thumb(
@@ -502,6 +507,8 @@ async def get_or_create_thumb(adapter, adapter_id: int, root: str, rel: str, w:
print(e)
raise HTTPException(
500, detail=f"Thumbnail generation failed: {e}")
else:
raise HTTPException(500, detail="Native thumbnail unavailable")
if thumb_bytes:
path.write_bytes(thumb_bytes)

View File

@@ -234,6 +234,19 @@ class ShareLink(Model):
table = "share_links"
class RecentFile(Model):
id = fields.IntField(pk=True)
user: fields.ForeignKeyRelation[UserAccount] = fields.ForeignKeyField(
"models.UserAccount", related_name="recent_files", on_delete=fields.CASCADE
)
path = fields.CharField(max_length=4096)
opened_at = fields.DatetimeField(auto_now=True)
class Meta:
table = "recent_files"
unique_together = (("user", "path"),)
class Plugin(Model):
id = fields.IntField(pk=True)
key = fields.CharField(max_length=100, unique=True) # 插件唯一标识

View File

@@ -11,13 +11,13 @@ dependencies = [
"fastapi>=0.127.0",
"mcp>=1.26.0",
"paramiko>=4.0.0",
"pillow>=12.0.0",
"pillow>=12.2.0",
"pydantic[email]>=2.12.5",
"pyjwt>=2.10.1",
"pymilvus[milvus-lite]>=2.6.5",
"pysocks>=1.7.1",
"python-dotenv>=1.2.1",
"python-multipart>=0.0.21",
"python-dotenv>=1.2.2",
"python-multipart>=0.0.26",
"qdrant-client>=1.16.2",
"setuptools<82",
"telethon>=1.42.0",

72
uv.lock generated
View File

@@ -469,13 +469,13 @@ requires-dist = [
{ name = "fastapi", specifier = ">=0.127.0" },
{ name = "mcp", specifier = ">=1.26.0" },
{ name = "paramiko", specifier = ">=4.0.0" },
{ name = "pillow", specifier = ">=12.0.0" },
{ name = "pillow", specifier = ">=12.2.0" },
{ name = "pydantic", extras = ["email"], specifier = ">=2.12.5" },
{ name = "pyjwt", specifier = ">=2.10.1" },
{ name = "pymilvus", extras = ["milvus-lite"], specifier = ">=2.6.5" },
{ name = "pysocks", specifier = ">=1.7.1" },
{ name = "python-dotenv", specifier = ">=1.2.1" },
{ name = "python-multipart", specifier = ">=0.0.21" },
{ name = "python-dotenv", specifier = ">=1.2.2" },
{ name = "python-multipart", specifier = ">=0.0.26" },
{ name = "qdrant-client", specifier = ">=1.16.2" },
{ name = "setuptools", specifier = "<82" },
{ name = "telethon", specifier = ">=1.42.0" },
@@ -872,35 +872,35 @@ wheels = [
[[package]]
name = "pillow"
version = "12.1.1"
version = "12.2.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/1f/42/5c74462b4fd957fcd7b13b04fb3205ff8349236ea74c7c375766d6c82288/pillow-12.1.1.tar.gz", hash = "sha256:9ad8fa5937ab05218e2b6a4cff30295ad35afd2f83ac592e68c0d871bb0fdbc4", size = 46980264, upload-time = "2026-02-11T04:23:07.146Z" }
sdist = { url = "https://files.pythonhosted.org/packages/8c/21/c2bcdd5906101a30244eaffc1b6e6ce71a31bd0742a01eb89e660ebfac2d/pillow-12.2.0.tar.gz", hash = "sha256:a830b1a40919539d07806aa58e1b114df53ddd43213d9c8b75847eee6c0182b5", size = 46987819, upload-time = "2026-04-01T14:46:17.687Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/03/d0/bebb3ffbf31c5a8e97241476c4cf8b9828954693ce6744b4a2326af3e16b/pillow-12.1.1-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:417423db963cb4be8bac3fc1204fe61610f6abeed1580a7a2cbb2fbda20f12af", size = 4062652, upload-time = "2026-02-11T04:21:53.19Z" },
{ url = "https://files.pythonhosted.org/packages/2d/c0/0e16fb0addda4851445c28f8350d8c512f09de27bbb0d6d0bbf8b6709605/pillow-12.1.1-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:b957b71c6b2387610f556a7eb0828afbe40b4a98036fc0d2acfa5a44a0c2036f", size = 4138823, upload-time = "2026-02-11T04:22:03.088Z" },
{ url = "https://files.pythonhosted.org/packages/6b/fb/6170ec655d6f6bb6630a013dd7cf7bc218423d7b5fa9071bf63dc32175ae/pillow-12.1.1-cp314-cp314-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:097690ba1f2efdeb165a20469d59d8bb03c55fb6621eb2041a060ae8ea3e9642", size = 3601143, upload-time = "2026-02-11T04:22:04.909Z" },
{ url = "https://files.pythonhosted.org/packages/59/04/dc5c3f297510ba9a6837cbb318b87dd2b8f73eb41a43cc63767f65cb599c/pillow-12.1.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:2815a87ab27848db0321fb78c7f0b2c8649dee134b7f2b80c6a45c6831d75ccd", size = 5266254, upload-time = "2026-02-11T04:22:07.656Z" },
{ url = "https://files.pythonhosted.org/packages/05/30/5db1236b0d6313f03ebf97f5e17cda9ca060f524b2fcc875149a8360b21c/pillow-12.1.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:f7ed2c6543bad5a7d5530eb9e78c53132f93dfa44a28492db88b41cdab885202", size = 4657499, upload-time = "2026-02-11T04:22:09.613Z" },
{ url = "https://files.pythonhosted.org/packages/6f/18/008d2ca0eb612e81968e8be0bbae5051efba24d52debf930126d7eaacbba/pillow-12.1.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:652a2c9ccfb556235b2b501a3a7cf3742148cd22e04b5625c5fe057ea3e3191f", size = 6232137, upload-time = "2026-02-11T04:22:11.434Z" },
{ url = "https://files.pythonhosted.org/packages/70/f1/f14d5b8eeb4b2cd62b9f9f847eb6605f103df89ef619ac68f92f748614ea/pillow-12.1.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d6e4571eedf43af33d0fc233a382a76e849badbccdf1ac438841308652a08e1f", size = 8042721, upload-time = "2026-02-11T04:22:13.321Z" },
{ url = "https://files.pythonhosted.org/packages/5a/d6/17824509146e4babbdabf04d8171491fa9d776f7061ff6e727522df9bd03/pillow-12.1.1-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b574c51cf7d5d62e9be37ba446224b59a2da26dc4c1bb2ecbe936a4fb1a7cb7f", size = 6347798, upload-time = "2026-02-11T04:22:15.449Z" },
{ url = "https://files.pythonhosted.org/packages/d1/ee/c85a38a9ab92037a75615aba572c85ea51e605265036e00c5b67dfafbfe2/pillow-12.1.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a37691702ed687799de29a518d63d4682d9016932db66d4e90c345831b02fb4e", size = 7039315, upload-time = "2026-02-11T04:22:17.24Z" },
{ url = "https://files.pythonhosted.org/packages/ec/f3/bc8ccc6e08a148290d7523bde4d9a0d6c981db34631390dc6e6ec34cacf6/pillow-12.1.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f95c00d5d6700b2b890479664a06e754974848afaae5e21beb4d83c106923fd0", size = 6462360, upload-time = "2026-02-11T04:22:19.111Z" },
{ url = "https://files.pythonhosted.org/packages/f6/ab/69a42656adb1d0665ab051eec58a41f169ad295cf81ad45406963105408f/pillow-12.1.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:559b38da23606e68681337ad74622c4dbba02254fc9cb4488a305dd5975c7eeb", size = 7165438, upload-time = "2026-02-11T04:22:21.041Z" },
{ url = "https://files.pythonhosted.org/packages/02/46/81f7aa8941873f0f01d4b55cc543b0a3d03ec2ee30d617a0448bf6bd6dec/pillow-12.1.1-cp314-cp314-win32.whl", hash = "sha256:03edcc34d688572014ff223c125a3f77fb08091e4607e7745002fc214070b35f", size = 6431503, upload-time = "2026-02-11T04:22:22.833Z" },
{ url = "https://files.pythonhosted.org/packages/40/72/4c245f7d1044b67affc7f134a09ea619d4895333d35322b775b928180044/pillow-12.1.1-cp314-cp314-win_amd64.whl", hash = "sha256:50480dcd74fa63b8e78235957d302d98d98d82ccbfac4c7e12108ba9ecbdba15", size = 7176748, upload-time = "2026-02-11T04:22:24.64Z" },
{ url = "https://files.pythonhosted.org/packages/e4/ad/8a87bdbe038c5c698736e3348af5c2194ffb872ea52f11894c95f9305435/pillow-12.1.1-cp314-cp314-win_arm64.whl", hash = "sha256:5cb1785d97b0c3d1d1a16bc1d710c4a0049daefc4935f3a8f31f827f4d3d2e7f", size = 2544314, upload-time = "2026-02-11T04:22:26.685Z" },
{ url = "https://files.pythonhosted.org/packages/6c/9d/efd18493f9de13b87ede7c47e69184b9e859e4427225ea962e32e56a49bc/pillow-12.1.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:1f90cff8aa76835cba5769f0b3121a22bd4eb9e6884cfe338216e557a9a548b8", size = 5268612, upload-time = "2026-02-11T04:22:29.884Z" },
{ url = "https://files.pythonhosted.org/packages/f8/f1/4f42eb2b388eb2ffc660dcb7f7b556c1015c53ebd5f7f754965ef997585b/pillow-12.1.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1f1be78ce9466a7ee64bfda57bdba0f7cc499d9794d518b854816c41bf0aa4e9", size = 4660567, upload-time = "2026-02-11T04:22:31.799Z" },
{ url = "https://files.pythonhosted.org/packages/01/54/df6ef130fa43e4b82e32624a7b821a2be1c5653a5fdad8469687a7db4e00/pillow-12.1.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:42fc1f4677106188ad9a55562bbade416f8b55456f522430fadab3cef7cd4e60", size = 6269951, upload-time = "2026-02-11T04:22:33.921Z" },
{ url = "https://files.pythonhosted.org/packages/a9/48/618752d06cc44bb4aae8ce0cd4e6426871929ed7b46215638088270d9b34/pillow-12.1.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:98edb152429ab62a1818039744d8fbb3ccab98a7c29fc3d5fcef158f3f1f68b7", size = 8074769, upload-time = "2026-02-11T04:22:35.877Z" },
{ url = "https://files.pythonhosted.org/packages/c3/bd/f1d71eb39a72fa088d938655afba3e00b38018d052752f435838961127d8/pillow-12.1.1-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d470ab1178551dd17fdba0fef463359c41aaa613cdcd7ff8373f54be629f9f8f", size = 6381358, upload-time = "2026-02-11T04:22:37.698Z" },
{ url = "https://files.pythonhosted.org/packages/64/ef/c784e20b96674ed36a5af839305f55616f8b4f8aa8eeccf8531a6e312243/pillow-12.1.1-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6408a7b064595afcab0a49393a413732a35788f2a5092fdc6266952ed67de586", size = 7068558, upload-time = "2026-02-11T04:22:39.597Z" },
{ url = "https://files.pythonhosted.org/packages/73/cb/8059688b74422ae61278202c4e1ad992e8a2e7375227be0a21c6b87ca8d5/pillow-12.1.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5d8c41325b382c07799a3682c1c258469ea2ff97103c53717b7893862d0c98ce", size = 6493028, upload-time = "2026-02-11T04:22:42.73Z" },
{ url = "https://files.pythonhosted.org/packages/c6/da/e3c008ed7d2dd1f905b15949325934510b9d1931e5df999bb15972756818/pillow-12.1.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c7697918b5be27424e9ce568193efd13d925c4481dd364e43f5dff72d33e10f8", size = 7191940, upload-time = "2026-02-11T04:22:44.543Z" },
{ url = "https://files.pythonhosted.org/packages/01/4a/9202e8d11714c1fc5951f2e1ef362f2d7fbc595e1f6717971d5dd750e969/pillow-12.1.1-cp314-cp314t-win32.whl", hash = "sha256:d2912fd8114fc5545aa3a4b5576512f64c55a03f3ebcca4c10194d593d43ea36", size = 6438736, upload-time = "2026-02-11T04:22:46.347Z" },
{ url = "https://files.pythonhosted.org/packages/f3/ca/cbce2327eb9885476b3957b2e82eb12c866a8b16ad77392864ad601022ce/pillow-12.1.1-cp314-cp314t-win_amd64.whl", hash = "sha256:4ceb838d4bd9dab43e06c363cab2eebf63846d6a4aeaea283bbdfd8f1a8ed58b", size = 7182894, upload-time = "2026-02-11T04:22:48.114Z" },
{ url = "https://files.pythonhosted.org/packages/ec/d2/de599c95ba0a973b94410477f8bf0b6f0b5e67360eb89bcb1ad365258beb/pillow-12.1.1-cp314-cp314t-win_arm64.whl", hash = "sha256:7b03048319bfc6170e93bd60728a1af51d3dd7704935feb228c4d4faab35d334", size = 2546446, upload-time = "2026-02-11T04:22:50.342Z" },
{ url = "https://files.pythonhosted.org/packages/bf/98/4595daa2365416a86cb0d495248a393dfc84e96d62ad080c8546256cb9c0/pillow-12.2.0-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:3adc9215e8be0448ed6e814966ecf3d9952f0ea40eb14e89a102b87f450660d8", size = 4100848, upload-time = "2026-04-01T14:44:48.48Z" },
{ url = "https://files.pythonhosted.org/packages/0b/79/40184d464cf89f6663e18dfcf7ca21aae2491fff1a16127681bf1fa9b8cf/pillow-12.2.0-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:6a9adfc6d24b10f89588096364cc726174118c62130c817c2837c60cf08a392b", size = 4176515, upload-time = "2026-04-01T14:44:51.353Z" },
{ url = "https://files.pythonhosted.org/packages/b0/63/703f86fd4c422a9cf722833670f4f71418fb116b2853ff7da722ea43f184/pillow-12.2.0-cp314-cp314-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:6a6e67ea2e6feda684ed370f9a1c52e7a243631c025ba42149a2cc5934dec295", size = 3640159, upload-time = "2026-04-01T14:44:53.588Z" },
{ url = "https://files.pythonhosted.org/packages/71/e0/fb22f797187d0be2270f83500aab851536101b254bfa1eae10795709d283/pillow-12.2.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:2bb4a8d594eacdfc59d9e5ad972aa8afdd48d584ffd5f13a937a664c3e7db0ed", size = 5312185, upload-time = "2026-04-01T14:44:56.039Z" },
{ url = "https://files.pythonhosted.org/packages/ba/8c/1a9e46228571de18f8e28f16fabdfc20212a5d019f3e3303452b3f0a580d/pillow-12.2.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:80b2da48193b2f33ed0c32c38140f9d3186583ce7d516526d462645fd98660ae", size = 4695386, upload-time = "2026-04-01T14:44:58.663Z" },
{ url = "https://files.pythonhosted.org/packages/70/62/98f6b7f0c88b9addd0e87c217ded307b36be024d4ff8869a812b241d1345/pillow-12.2.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22db17c68434de69d8ecfc2fe821569195c0c373b25cccb9cbdacf2c6e53c601", size = 6280384, upload-time = "2026-04-01T14:45:01.5Z" },
{ url = "https://files.pythonhosted.org/packages/5e/03/688747d2e91cfbe0e64f316cd2e8005698f76ada3130d0194664174fa5de/pillow-12.2.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7b14cc0106cd9aecda615dd6903840a058b4700fcb817687d0ee4fc8b6e389be", size = 8091599, upload-time = "2026-04-01T14:45:04.5Z" },
{ url = "https://files.pythonhosted.org/packages/f6/35/577e22b936fcdd66537329b33af0b4ccfefaeabd8aec04b266528cddb33c/pillow-12.2.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8cbeb542b2ebc6fcdacabf8aca8c1a97c9b3ad3927d46b8723f9d4f033288a0f", size = 6396021, upload-time = "2026-04-01T14:45:07.117Z" },
{ url = "https://files.pythonhosted.org/packages/11/8d/d2532ad2a603ca2b93ad9f5135732124e57811d0168155852f37fbce2458/pillow-12.2.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4bfd07bc812fbd20395212969e41931001fd59eb55a60658b0e5710872e95286", size = 7083360, upload-time = "2026-04-01T14:45:09.763Z" },
{ url = "https://files.pythonhosted.org/packages/5e/26/d325f9f56c7e039034897e7380e9cc202b1e368bfd04d4cbe6a441f02885/pillow-12.2.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9aba9a17b623ef750a4d11b742cbafffeb48a869821252b30ee21b5e91392c50", size = 6507628, upload-time = "2026-04-01T14:45:12.378Z" },
{ url = "https://files.pythonhosted.org/packages/5f/f7/769d5632ffb0988f1c5e7660b3e731e30f7f8ec4318e94d0a5d674eb65a4/pillow-12.2.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:deede7c263feb25dba4e82ea23058a235dcc2fe1f6021025dc71f2b618e26104", size = 7209321, upload-time = "2026-04-01T14:45:15.122Z" },
{ url = "https://files.pythonhosted.org/packages/6a/7a/c253e3c645cd47f1aceea6a8bacdba9991bf45bb7dfe927f7c893e89c93c/pillow-12.2.0-cp314-cp314-win32.whl", hash = "sha256:632ff19b2778e43162304d50da0181ce24ac5bb8180122cbe1bf4673428328c7", size = 6479723, upload-time = "2026-04-01T14:45:17.797Z" },
{ url = "https://files.pythonhosted.org/packages/cd/8b/601e6566b957ca50e28725cb6c355c59c2c8609751efbecd980db44e0349/pillow-12.2.0-cp314-cp314-win_amd64.whl", hash = "sha256:4e6c62e9d237e9b65fac06857d511e90d8461a32adcc1b9065ea0c0fa3a28150", size = 7217400, upload-time = "2026-04-01T14:45:20.529Z" },
{ url = "https://files.pythonhosted.org/packages/d6/94/220e46c73065c3e2951bb91c11a1fb636c8c9ad427ac3ce7d7f3359b9b2f/pillow-12.2.0-cp314-cp314-win_arm64.whl", hash = "sha256:b1c1fbd8a5a1af3412a0810d060a78b5136ec0836c8a4ef9aa11807f2a22f4e1", size = 2554835, upload-time = "2026-04-01T14:45:23.162Z" },
{ url = "https://files.pythonhosted.org/packages/b6/ab/1b426a3974cb0e7da5c29ccff4807871d48110933a57207b5a676cccc155/pillow-12.2.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:57850958fe9c751670e49b2cecf6294acc99e562531f4bd317fa5ddee2068463", size = 5314225, upload-time = "2026-04-01T14:45:25.637Z" },
{ url = "https://files.pythonhosted.org/packages/19/1e/dce46f371be2438eecfee2a1960ee2a243bbe5e961890146d2dee1ff0f12/pillow-12.2.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:d5d38f1411c0ed9f97bcb49b7bd59b6b7c314e0e27420e34d99d844b9ce3b6f3", size = 4698541, upload-time = "2026-04-01T14:45:28.355Z" },
{ url = "https://files.pythonhosted.org/packages/55/c3/7fbecf70adb3a0c33b77a300dc52e424dc22ad8cdc06557a2e49523b703d/pillow-12.2.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5c0a9f29ca8e79f09de89293f82fc9b0270bb4af1d58bc98f540cc4aedf03166", size = 6322251, upload-time = "2026-04-01T14:45:30.924Z" },
{ url = "https://files.pythonhosted.org/packages/1c/3c/7fbc17cfb7e4fe0ef1642e0abc17fc6c94c9f7a16be41498e12e2ba60408/pillow-12.2.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1610dd6c61621ae1cf811bef44d77e149ce3f7b95afe66a4512f8c59f25d9ebe", size = 8127807, upload-time = "2026-04-01T14:45:33.908Z" },
{ url = "https://files.pythonhosted.org/packages/ff/c3/a8ae14d6defd2e448493ff512fae903b1e9bd40b72efb6ec55ce0048c8ce/pillow-12.2.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a34329707af4f73cf1782a36cd2289c0368880654a2c11f027bcee9052d35dd", size = 6433935, upload-time = "2026-04-01T14:45:36.623Z" },
{ url = "https://files.pythonhosted.org/packages/6e/32/2880fb3a074847ac159d8f902cb43278a61e85f681661e7419e6596803ed/pillow-12.2.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8e9c4f5b3c546fa3458a29ab22646c1c6c787ea8f5ef51300e5a60300736905e", size = 7116720, upload-time = "2026-04-01T14:45:39.258Z" },
{ url = "https://files.pythonhosted.org/packages/46/87/495cc9c30e0129501643f24d320076f4cc54f718341df18cc70ec94c44e1/pillow-12.2.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:fb043ee2f06b41473269765c2feae53fc2e2fbf96e5e22ca94fb5ad677856f06", size = 6540498, upload-time = "2026-04-01T14:45:41.879Z" },
{ url = "https://files.pythonhosted.org/packages/18/53/773f5edca692009d883a72211b60fdaf8871cbef075eaa9d577f0a2f989e/pillow-12.2.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f278f034eb75b4e8a13a54a876cc4a5ab39173d2cdd93a638e1b467fc545ac43", size = 7239413, upload-time = "2026-04-01T14:45:44.705Z" },
{ url = "https://files.pythonhosted.org/packages/c9/e4/4b64a97d71b2a83158134abbb2f5bd3f8a2ea691361282f010998f339ec7/pillow-12.2.0-cp314-cp314t-win32.whl", hash = "sha256:6bb77b2dcb06b20f9f4b4a8454caa581cd4dd0643a08bacf821216a16d9c8354", size = 6482084, upload-time = "2026-04-01T14:45:47.568Z" },
{ url = "https://files.pythonhosted.org/packages/ba/13/306d275efd3a3453f72114b7431c877d10b1154014c1ebbedd067770d629/pillow-12.2.0-cp314-cp314t-win_amd64.whl", hash = "sha256:6562ace0d3fb5f20ed7290f1f929cae41b25ae29528f2af1722966a0a02e2aa1", size = 7225152, upload-time = "2026-04-01T14:45:50.032Z" },
{ url = "https://files.pythonhosted.org/packages/ff/6e/cf826fae916b8658848d7b9f38d88da6396895c676e8086fc0988073aaf8/pillow-12.2.0-cp314-cp314t-win_arm64.whl", hash = "sha256:aa88ccfe4e32d362816319ed727a004423aab09c5cea43c01a4b435643fa34eb", size = 2556579, upload-time = "2026-04-01T14:45:52.529Z" },
]
[[package]]
@@ -1170,20 +1170,20 @@ wheels = [
[[package]]
name = "python-dotenv"
version = "1.2.1"
version = "1.2.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f0/26/19cadc79a718c5edbec86fd4919a6b6d3f681039a2f6d66d14be94e75fb9/python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6", size = 44221, upload-time = "2025-10-26T15:12:10.434Z" }
sdist = { url = "https://files.pythonhosted.org/packages/82/ed/0301aeeac3e5353ef3d94b6ec08bbcabd04a72018415dcb29e588514bba8/python_dotenv-1.2.2.tar.gz", hash = "sha256:2c371a91fbd7ba082c2c1dc1f8bf89ca22564a087c2c287cd9b662adde799cf3", size = 50135, upload-time = "2026-03-01T16:00:26.196Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230, upload-time = "2025-10-26T15:12:09.109Z" },
{ url = "https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl", hash = "sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a", size = 22101, upload-time = "2026-03-01T16:00:25.09Z" },
]
[[package]]
name = "python-multipart"
version = "0.0.22"
version = "0.0.26"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/94/01/979e98d542a70714b0cb2b6728ed0b7c46792b695e3eaec3e20711271ca3/python_multipart-0.0.22.tar.gz", hash = "sha256:7340bef99a7e0032613f56dc36027b959fd3b30a787ed62d310e951f7c3a3a58", size = 37612, upload-time = "2026-01-25T10:15:56.219Z" }
sdist = { url = "https://files.pythonhosted.org/packages/88/71/b145a380824a960ebd60e1014256dbb7d2253f2316ff2d73dfd8928ec2c3/python_multipart-0.0.26.tar.gz", hash = "sha256:08fadc45918cd615e26846437f50c5d6d23304da32c341f289a617127b081f17", size = 43501, upload-time = "2026-04-10T14:09:59.473Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/1b/d0/397f9626e711ff749a95d96b7af99b9c566a9bb5129b8e4c10fc4d100304/python_multipart-0.0.22-py3-none-any.whl", hash = "sha256:2b2cd894c83d21bf49d702499531c7bafd057d730c201782048f7945d82de155", size = 24579, upload-time = "2026-01-25T10:15:54.811Z" },
{ url = "https://files.pythonhosted.org/packages/9a/22/f1925cdda983ab66fc8ec6ec8014b959262747e58bdca26a4e3d1da29d56/python_multipart-0.0.26-py3-none-any.whl", hash = "sha256:c0b169f8c4484c13b0dcf2ef0ec3a4adb255c4b7d18d8e420477d2b1dd03f185", size = 28847, upload-time = "2026-04-10T14:09:58.131Z" },
]
[[package]]

View File

@@ -10,6 +10,20 @@ export interface AdapterItem {
sub_path?: string | null;
}
export interface AdapterUsage {
id: number;
name: string;
type: string;
path: string;
supported: boolean;
used_bytes?: number | null;
total_bytes?: number | null;
free_bytes?: number | null;
source?: string | null;
scope?: string | null;
reason?: string | null;
}
export interface AdapterTypeField {
key: string;
label: string;
@@ -31,4 +45,6 @@ export const adaptersApi = {
update: (id: number, payload: Omit<AdapterItem, 'id'>) => request<AdapterItem>(`/adapters/${id}`, { method: 'PUT', json: payload }),
remove: (id: number) => request<void>(`/adapters/${id}`, { method: 'DELETE' }),
available: () => request<AdapterTypeMeta[]>('/adapters/available'),
usage: () => request<AdapterUsage[]>('/adapters/usage'),
usageById: (id: number) => request<AdapterUsage>(`/adapters/${id}/usage`),
};

View File

@@ -71,7 +71,7 @@ async function request<T = any>(url: string, options: RequestOptions = {}): Prom
}
export { vfsApi, type VfsEntry, type DirListing } from './vfs';
export { adaptersApi, type AdapterItem, type AdapterTypeField, type AdapterTypeMeta } from './adapters';
export { adaptersApi, type AdapterItem, type AdapterTypeField, type AdapterTypeMeta, type AdapterUsage } from './adapters';
export { shareApi, type ShareInfo, type ShareInfoWithPassword } from './share';
export { offlineDownloadsApi, type OfflineDownloadTask, type OfflineDownloadCreate, type TaskProgress } from './offlineDownloads';
export default request;

View File

@@ -24,6 +24,16 @@ function getPluginStylePaths(plugin: PluginItem): string[] {
return styles.filter((s) => typeof s === 'string' && s.trim().length > 0);
}
function unloadPluginFrame(iframe: HTMLIFrameElement | null) {
if (!iframe) return;
try {
iframe.contentWindow?.postMessage({ type: 'foxel-plugin:unload' }, window.location.origin);
} catch {
void 0;
}
iframe.src = 'about:blank';
}
/**
* 插件宿主组件 - 文件打开模式
* 使用 iframe 隔离渲染与样式,避免插件污染宿主 DOM/CSS。
@@ -66,7 +76,10 @@ export const PluginAppHost: React.FC<PluginAppHostProps> = ({
};
window.addEventListener('message', onMessage);
return () => window.removeEventListener('message', onMessage);
return () => {
window.removeEventListener('message', onMessage);
unloadPluginFrame(iframeRef.current);
};
}, [plugin.key]);
return (
@@ -118,7 +131,10 @@ export const PluginAppOpenHost: React.FC<PluginAppOpenHostProps> = ({ plugin, on
};
window.addEventListener('message', onMessage);
return () => window.removeEventListener('message', onMessage);
return () => {
window.removeEventListener('message', onMessage);
unloadPluginFrame(iframeRef.current);
};
}, [plugin.key]);
return (

View File

@@ -514,6 +514,8 @@
"Unique name": "Unique name",
"Select adapter type": "Select adapter type",
"/ or /drive": "/ or /drive",
"Used Capacity": "Used Capacity",
"Capacity Usage": "Capacity Usage",
"Adapter Config": "Adapter Config",
"adapter.type.local": "Local Filesystem",
"adapter.type.foxel": "Foxel Node",

View File

@@ -513,6 +513,8 @@
"Unique name": "唯一名称",
"Select adapter type": "选择适配器类型",
"/ or /drive": "/或/drive",
"Used Capacity": "已使用容量",
"Capacity Usage": "容量使用",
"Adapter Config": "适配器配置",
"adapter.type.local": "本地文件系统",
"adapter.type.foxel": "Foxel 节点",

View File

@@ -1,12 +1,29 @@
import { memo, useState, useEffect, useCallback } from 'react';
import { Table, Button, Space, Drawer, Form, Input, Switch, message, Typography, Popconfirm, Select } from 'antd';
import PageCard from '../components/PageCard';
import { adaptersApi, type AdapterItem, type AdapterTypeMeta } from '../api/client';
import { adaptersApi, type AdapterItem, type AdapterTypeMeta, type AdapterUsage } from '../api/client';
import { useI18n } from '../i18n';
const formatBytes = (bytes?: number | null) => {
if (bytes === null || bytes === undefined) return '-';
if (bytes === 0) return '0 B';
const units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB'];
const index = Math.min(Math.floor(Math.log(bytes) / Math.log(1024)), units.length - 1);
const value = bytes / (1024 ** index);
return `${value.toFixed(value >= 10 || index === 0 ? 0 : 1)} ${units[index]}`;
};
const formatUsage = (usage?: AdapterUsage) => {
if (!usage?.supported || usage.used_bytes === null || usage.used_bytes === undefined) return '-';
const used = formatBytes(usage.used_bytes);
if (usage.total_bytes === null || usage.total_bytes === undefined) return used;
return `${used} / ${formatBytes(usage.total_bytes)}`;
};
const AdaptersPage = memo(function AdaptersPage() {
const [loading, setLoading] = useState(false);
const [data, setData] = useState<AdapterItem[]>([]);
const [usageMap, setUsageMap] = useState<Record<number, AdapterUsage>>({});
const [open, setOpen] = useState(false);
const [editing, setEditing] = useState<AdapterItem | null>(null);
const [form] = Form.useForm();
@@ -16,12 +33,14 @@ const AdaptersPage = memo(function AdaptersPage() {
const fetchList = useCallback(async () => {
setLoading(true);
try {
const [list, types] = await Promise.all([
const [list, types, usages] = await Promise.all([
adaptersApi.list(),
adaptersApi.available()
adaptersApi.available(),
adaptersApi.usage()
]);
setData(list);
setAvailableTypes(types);
setUsageMap(Object.fromEntries(usages.map(item => [item.id, item])));
} catch (e: any) {
message.error(e.message || t('Load failed'));
} finally {
@@ -137,11 +156,47 @@ const AdaptersPage = memo(function AdaptersPage() {
return label === key ? type : label;
}, [t]);
const usageSummary = Object.values(usageMap).reduce(
(acc, usage) => {
if (!usage.supported) return acc;
if (usage.used_bytes !== null && usage.used_bytes !== undefined) {
acc.used += usage.used_bytes;
acc.hasUsed = true;
}
if (usage.total_bytes !== null && usage.total_bytes !== undefined) {
acc.total += usage.total_bytes;
acc.hasTotal = true;
}
return acc;
},
{ used: 0, total: 0, hasUsed: false, hasTotal: false }
);
const pageTitle = (
<Space size={12} wrap>
<span>{t('Storage Adapters')}</span>
{(usageSummary.hasUsed || usageSummary.hasTotal) && (
<Typography.Text type="secondary" style={{ fontSize: 13, fontWeight: 400 }}>
{usageSummary.hasUsed ? formatBytes(usageSummary.used) : '-'}
{' / '}
{usageSummary.hasTotal ? formatBytes(usageSummary.total) : '-'}
</Typography.Text>
)}
</Space>
);
const columns = [
{ title: t('Name'), dataIndex: 'name' },
{ title: t('Type'), dataIndex: 'type', width: 140, render: (value: string) => renderTypeLabel(value) },
{ title: t('Mount Path'), dataIndex: 'path', width: 140, render: (v: string) => v || '-' },
{ title: t('Sub Path'), dataIndex: 'sub_path', width: 140, render: (v: string) => v || '-' },
{
title: t('Capacity Usage'),
width: 180,
render: (_: any, rec: AdapterItem) => {
return formatUsage(usageMap[rec.id]);
}
},
{
title: t('Enabled'),
dataIndex: 'enabled',
@@ -208,7 +263,7 @@ const AdaptersPage = memo(function AdaptersPage() {
return (
<PageCard
title={t('Storage Adapters')}
title={pageTitle}
extra={
<Space wrap>
<Button onClick={fetchList} loading={loading}>{t('Refresh')}</Button>

View File

@@ -364,12 +364,27 @@ async function main() {
await mountError();
window.addEventListener('beforeunload', () => {
const runCleanup = () => {
try {
cleanup?.();
} catch {
void 0;
}
cleanup = null;
};
window.addEventListener('message', (ev) => {
if (ev.origin !== window.location.origin) return;
if (ev.source !== window.parent) return;
const data = ev.data as any;
if (!data || typeof data !== 'object') return;
if (data.type !== 'foxel-plugin:unload') return;
runCleanup();
root.innerHTML = '';
});
window.addEventListener('beforeunload', () => {
runCleanup();
});
}