Compare commits

...

26 Commits
v2.2.0 ... main

Author SHA1 Message Date
dependabot[bot]
d5a24c69e1 chore(deps): bump python-multipart in the uv group across 1 directory (#121)
Bumps the uv group with 1 update in the / directory: [python-multipart](https://github.com/Kludex/python-multipart).


Updates `python-multipart` from 0.0.26 to 0.0.27
- [Release notes](https://github.com/Kludex/python-multipart/releases)
- [Changelog](https://github.com/Kludex/python-multipart/blob/main/CHANGELOG.md)
- [Commits](https://github.com/Kludex/python-multipart/compare/0.0.26...0.0.27)

---
updated-dependencies:
- dependency-name: python-multipart
  dependency-version: 0.0.27
  dependency-type: direct:production
  dependency-group: uv
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-05-10 22:12:43 +08:00
shiyu
e410c4982e feat: update version to v2.2.2 2026-05-10 12:27:00 +08:00
shiyu
70a57f6e88 feat: enhance release drafter workflow to include direct commits summary 2026-05-10 12:23:55 +08:00
shiyu
f89292e451 feat: implement cursor-based pagination across various components and APIs 2026-05-10 00:36:41 +08:00
shiyu
56b48b28a1 feat: add notices feature with API, database model, and UI integration 2026-05-09 21:40:15 +08:00
shiyu
a745c5975a feat: enhance SystemSettingsPage with improved config handling and state management 2026-05-08 22:06:27 +08:00
shiyu
19825c21d5 feat: add default file view mode configuration and UI support 2026-05-08 21:56:08 +08:00
shiyu
ee4de697fc feat: add file type categorization and size formatting in FileListView 2026-05-08 21:39:49 +08:00
shiyu
deddbdf585 feat: implement plugin frame cleanup on unload and enhance iframe handling 2026-05-06 23:30:20 +08:00
shiyu
bd24d7eeeb feat: add download locking and flood wait handling in TelegramAdapter 2026-05-06 23:00:10 +08:00
shiyu
93d5e5e313 feat: enhance TelegramAdapter with message caching and connection management 2026-05-06 22:12:35 +08:00
时雨
7b5f5e986e feat: add recent files backend APIs (#119) 2026-05-06 21:20:29 +08:00
shiyu
7741c1fe55 fix: handle native video thumbnail availability in get_or_create_thumb function 2026-05-04 13:51:53 +08:00
shiyu
c2015dd17c feat: enhance thumbnail handling and add native thumbnail support in VirtualFS 2026-05-03 23:51:39 +08:00
shiyu
ca500cbbf8 fix: handle FileNotFoundError in dav_get function and return 404 response 2026-05-03 23:51:39 +08:00
dependabot[bot]
d7aa3f1796 chore(deps): bump the uv group across 1 directory with 2 updates (#118)
Bumps the uv group with 2 updates in the / directory: [python-dotenv](https://github.com/theskumar/python-dotenv) and [python-multipart](https://github.com/Kludex/python-multipart).


Updates `python-dotenv` from 1.2.1 to 1.2.2
- [Release notes](https://github.com/theskumar/python-dotenv/releases)
- [Changelog](https://github.com/theskumar/python-dotenv/blob/main/CHANGELOG.md)
- [Commits](https://github.com/theskumar/python-dotenv/compare/v1.2.1...v1.2.2)

Updates `python-multipart` from 0.0.22 to 0.0.26
- [Release notes](https://github.com/Kludex/python-multipart/releases)
- [Changelog](https://github.com/Kludex/python-multipart/blob/main/CHANGELOG.md)
- [Commits](https://github.com/Kludex/python-multipart/compare/0.0.22...0.0.26)

---
updated-dependencies:
- dependency-name: python-dotenv
  dependency-version: 1.2.2
  dependency-type: direct:production
  dependency-group: uv
- dependency-name: python-multipart
  dependency-version: 0.0.26
  dependency-type: direct:production
  dependency-group: uv
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-05-03 13:58:32 +08:00
dependabot[bot]
460ce0c954 chore(deps): bump pillow in the uv group across 1 directory (#117)
Bumps the uv group with 1 update in the / directory: [pillow](https://github.com/python-pillow/Pillow).


Updates `pillow` from 12.1.1 to 12.2.0
- [Release notes](https://github.com/python-pillow/Pillow/releases)
- [Changelog](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst)
- [Commits](https://github.com/python-pillow/Pillow/compare/12.1.1...12.2.0)

---
updated-dependencies:
- dependency-name: pillow
  dependency-version: 12.2.0
  dependency-type: direct:production
  dependency-group: uv
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-05-03 13:54:14 +08:00
shiyu
873ef7aee5 chore: update version to v2.2.1 2026-05-03 08:28:39 +08:00
shiyu
dd2400c3ef fix: correct adapter type casing from "PikPak" to "pikpak" 2026-05-03 08:09:39 +08:00
shiyu
e0d6039a1a fix: correct adapter type casing from "pikpak" to "PikPak" 2026-05-03 07:56:02 +08:00
shiyu
676dacce41 feat: update PikPak adapter configuration and enhance token handling 2026-05-03 07:51:55 +08:00
shiyu
c514e17803 feat: add user_id to configuration and improve error handling in token refresh 2026-05-03 06:44:27 +08:00
shiyu
54821f78c6 feat: implement caching for adapter usage and display summary in AdaptersPage 2026-05-03 01:53:48 +08:00
shiyu
1f608974dc feat: enhance adapter usage tracking with new interface and display capacity usage in AdaptersPage 2026-05-02 22:47:22 +08:00
shiyu
a8737b883e feat: add adapter usage tracking and retrieval methods across various adapters 2026-05-02 21:55:35 +08:00
shiyu
dcc8aa139e feat: add LOCK and UNLOCK methods to WebDAV API and improve path handling in existing methods 2026-05-02 16:30:56 +08:00
51 changed files with 1769 additions and 456 deletions

View File

@@ -1,6 +1,9 @@
name: Release Drafter
on:
push:
branches:
- main
workflow_dispatch:
jobs:
@@ -10,8 +13,119 @@ jobs:
contents: write
pull-requests: write
steps:
- uses: release-drafter/release-drafter@v6
- id: drafter
uses: release-drafter/release-drafter@v6
with:
config-name: release-drafter.yml
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Add direct commits
if: steps.drafter.outputs.id != ''
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
RELEASE_ID: ${{ steps.drafter.outputs.id }}
HEAD_SHA: ${{ github.sha }}
run: |
set -euo pipefail
latest_tag="$(gh api "repos/${GITHUB_REPOSITORY}/releases/latest" --jq '.tag_name' 2>/dev/null || true)"
if [ -n "$latest_tag" ]; then
commits_json="$(gh api "repos/${GITHUB_REPOSITORY}/compare/${latest_tag}...${HEAD_SHA}" --jq '.commits')"
else
commits_json="$(gh api "repos/${GITHUB_REPOSITORY}/commits?sha=${HEAD_SHA}&per_page=100")"
fi
direct_commits="$(mktemp)"
printf '%s\n' "$commits_json" \
| jq -r '.[] | [.sha, (.commit.message | split("\n")[0]), (.author.login // .commit.author.name)] | @tsv' \
> "$direct_commits"
features=()
fixes=()
refactors=()
docs=()
maintenance=()
while IFS=$'\t' read -r sha subject author; do
if [[ -z "$sha" || "$subject" =~ ^Merge[[:space:]] ]]; then
continue
fi
prs="$(gh api \
-H "Accept: application/vnd.github+json" \
"repos/${GITHUB_REPOSITORY}/commits/${sha}/pulls" \
--jq 'length')"
if [ "$prs" -gt 0 ]; then
continue
fi
short_sha="${sha:0:7}"
line="- ${short_sha} ${subject} @${author}"
type="${subject%%:*}"
type="${type%%(*}"
type="${type%!}"
if [ "$type" = "feat" ]; then
features+=("$line")
elif [ "$type" = "fix" ]; then
fixes+=("$line")
elif [ "$type" = "refactor" ]; then
refactors+=("$line")
elif [ "$type" = "docs" ]; then
docs+=("$line")
elif [[ "$type" = "chore" || "$type" = "ci" || "$type" = "build" ]]; then
maintenance+=("$line")
fi
done < "$direct_commits"
direct_notes="$(mktemp)"
{
echo "## Direct Commits"
echo
if [ "${#features[@]}" -gt 0 ]; then
echo "### 🚀 Features"
printf '%s\n' "${features[@]}"
echo
fi
if [ "${#fixes[@]}" -gt 0 ]; then
echo "### 🐛 Bug Fixes"
printf '%s\n' "${fixes[@]}"
echo
fi
if [ "${#refactors[@]}" -gt 0 ]; then
echo "### 📦 Code Refactoring"
printf '%s\n' "${refactors[@]}"
echo
fi
if [ "${#docs[@]}" -gt 0 ]; then
echo "### 📄 Documentation"
printf '%s\n' "${docs[@]}"
echo
fi
if [ "${#maintenance[@]}" -gt 0 ]; then
echo "### 🧰 Maintenance"
printf '%s\n' "${maintenance[@]}"
echo
fi
} > "$direct_notes"
if [ "$(wc -l < "$direct_notes")" -le 2 ]; then
exit 0
fi
body="$(gh api "repos/${GITHUB_REPOSITORY}/releases/${RELEASE_ID}" --jq '.body')"
body_without_direct="$(printf '%s\n' "$body" | sed '/^## Direct Commits$/,$d')"
new_body="$(printf '%s\n\n%s\n' "$body_without_direct" "$(cat "$direct_notes")")"
gh api \
--method PATCH \
"repos/${GITHUB_REPOSITORY}/releases/${RELEASE_ID}" \
-f body="$new_body"

View File

@@ -9,7 +9,25 @@ def success(data: Any = None, msg: str = "ok", code: int = 0):
def page(items: list[Any], total: int, page: int, page_size: int):
"""统一分页数据结构。"""
pages = (total + page_size - 1) // page_size if page_size else 0
return {"items": items, "total": total, "page": page, "page_size": page_size, "pages": pages}
return {"items": items, "total": total, "page": page, "page_size": page_size, "pages": pages, "pagination_mode": "paged"}
def cursor_page(
items: list[Any],
page_size: int,
*,
cursor: str | None = None,
next_cursor: str | None = None,
):
"""无总数游标分页结构。"""
return {
"items": items,
"page_size": page_size,
"pagination_mode": "cursor",
"cursor": cursor,
"next_cursor": next_cursor,
"has_next": bool(next_cursor),
}
def error(msg: str, code: int = 1, data: Optional[Any] = None):

View File

@@ -6,6 +6,7 @@ from domain.backup import api as backup
from domain.config import api as config
from domain.email import api as email
from domain.offline_downloads import api as offline_downloads
from domain.notices import api as notices
from domain.plugins import api as plugins
from domain.processors import api as processors
from domain.share import api as share
@@ -19,12 +20,14 @@ from domain.audit import api as audit
from domain.permission import api as permission
from domain.user import api as user
from domain.role import api as role
from domain.recent_files import api as recent_files
def include_routers(app: FastAPI):
app.include_router(adapters.router)
app.include_router(search_api.router)
app.include_router(virtual_fs.router)
app.include_router(recent_files.router)
app.include_router(auth.router)
app.include_router(config.router)
app.include_router(processors.router)
@@ -39,6 +42,7 @@ def include_routers(app: FastAPI):
app.include_router(webdav_api.router)
app.include_router(s3_api.router)
app.include_router(offline_downloads.router)
app.include_router(notices.router)
app.include_router(email.router)
app.include_router(audit.router)
app.include_router(permission.router)

View File

@@ -51,6 +51,29 @@ async def available_adapter_types(
return success(data)
@router.get("/usage")
@audit(action=AuditAction.READ, description="获取适配器容量使用情况")
@require_system_permission(AdapterPermission.LIST)
async def list_adapter_usages(
request: Request,
current_user: Annotated[User, Depends(get_current_active_user)]
):
usages = await AdapterService.list_adapter_usages()
return success(usages)
@router.get("/{adapter_id}/usage")
@audit(action=AuditAction.READ, description="获取单个适配器容量使用情况")
@require_system_permission(AdapterPermission.LIST)
async def get_adapter_usage(
request: Request,
adapter_id: int,
current_user: Annotated[User, Depends(get_current_active_user)]
):
usage = await AdapterService.get_adapter_usage(adapter_id)
return success(usage)
@router.get("/{adapter_id}")
@audit(action=AuditAction.READ, description="获取适配器详情")
@require_system_permission(AdapterPermission.LIST)

View File

@@ -1,4 +1,4 @@
from typing import List, Dict, Protocol, runtime_checkable, Tuple, AsyncIterator
from typing import List, Dict, Protocol, runtime_checkable, Tuple, AsyncIterator, Any
from models import StorageAdapter
# 约定:任意新适配器模块需定义:
@@ -9,7 +9,7 @@ from models import StorageAdapter
@runtime_checkable
class BaseAdapter(Protocol):
record: StorageAdapter
async def list_dir(self, root: str, rel: str, page_num: int = 1, page_size: int = 50, sort_by: str = "name", sort_order: str = "asc") -> Tuple[List[Dict], int]: ...
async def list_dir(self, root: str, rel: str, page_num: int = 1, page_size: int = 50, sort_by: str = "name", sort_order: str = "asc", cursor: str | None = None) -> Tuple[List[Dict], int] | Dict[str, Any]: ...
async def read_file(self, root: str, rel: str) -> bytes: ...
async def write_file(self, root: str, rel: str, data: bytes): ...
async def write_file_stream(self, root: str, rel: str, data_iter: AsyncIterator[bytes]): ...
@@ -21,3 +21,8 @@ class BaseAdapter(Protocol):
async def stream_file(self, root: str, rel: str, range_header: str | None): ...
async def stat_file(self, root: str, rel: str): ...
def get_effective_root(self, sub_path: str | None) -> str: ...
@runtime_checkable
class UsageCapableAdapter(Protocol):
async def get_usage(self, root: str) -> Dict: ...

View File

@@ -455,6 +455,23 @@ class DropboxAdapter:
return StreamingResponse(iterator(), status_code=resp.status_code, headers=out_headers, media_type=content_type)
async def get_usage(self, root: str):
resp = await self._api_json("/users/get_space_usage", {})
resp.raise_for_status()
payload = resp.json() or {}
allocation = payload.get("allocation") or {}
allocated = allocation.get("allocated")
used = payload.get("used")
total = int(allocated) if allocated is not None else None
used_bytes = int(used) if used is not None else None
return {
"used_bytes": used_bytes,
"total_bytes": total,
"free_bytes": total - used_bytes if total is not None and used_bytes is not None else None,
"source": "dropbox",
"scope": "account",
}
ADAPTER_TYPE = "dropbox"
CONFIG_SCHEMA = [
@@ -468,4 +485,3 @@ CONFIG_SCHEMA = [
def ADAPTER_FACTORY(rec): return DropboxAdapter(rec)

View File

@@ -541,6 +541,22 @@ class GoogleDriveAdapter:
except Exception:
return None
async def get_usage(self, root: str):
resp = await self._request("GET", "/about", params={"fields": "storageQuota"})
resp.raise_for_status()
quota = (resp.json() or {}).get("storageQuota") or {}
limit = quota.get("limit")
usage = quota.get("usage")
total = int(limit) if limit is not None else None
used = int(usage) if usage is not None else None
return {
"used_bytes": used,
"total_bytes": total,
"free_bytes": total - used if total is not None and used is not None else None,
"source": "googledrive",
"scope": "drive",
}
ADAPTER_TYPE = "googledrive"

View File

@@ -329,6 +329,29 @@ class LocalAdapter:
info["exif"] = exif
return info
async def get_usage(self, root: str):
root_path = Path(root).resolve()
def _usage():
used = 0
for dirpath, dirnames, filenames in os.walk(root_path):
for filename in filenames:
fp = Path(dirpath) / filename
try:
used += fp.stat().st_size
except OSError:
continue
disk = shutil.disk_usage(root_path)
return {
"used_bytes": used,
"total_bytes": disk.total,
"free_bytes": disk.free,
"source": "local",
"scope": "mount",
}
return await asyncio.to_thread(_usage)
ADAPTER_TYPE = "local"
CONFIG_SCHEMA = [

View File

@@ -4,6 +4,7 @@ import httpx
from fastapi.responses import StreamingResponse, Response
from fastapi import HTTPException
from models import StorageAdapter
from api.response import cursor_page
MS_GRAPH_URL = "https://graph.microsoft.com/v1.0"
MS_OAUTH_URL = "https://login.microsoftonline.com/common/oauth2/v2.0/token"
@@ -114,65 +115,51 @@ class OneDriveAdapter:
"type": "dir" if is_dir else "file",
}
async def list_dir(self, root: str, rel: str, page_num: int = 1, page_size: int = 50, sort_by: str = "name", sort_order: str = "asc") -> Tuple[List[Dict], int]:
async def list_dir(
self,
root: str,
rel: str,
page_num: int = 1,
page_size: int = 50,
sort_by: str = "name",
sort_order: str = "asc",
cursor: str | None = None,
):
"""
列出目录内容。
由于 Graph API 不支持基于偏移($skip)的分页,此方法将获取所有项目,
Graph API 不提供目录总数,使用 nextLink 游标分页。
:param root: 根路径 (在此适配器中未使用,通过配置的 root 确定)。
:param rel: 相对路径。
:param page_num: 页码。
:param page_size: 每页大小。
:param sort_by: 排序字段
:param sort_order: 排序顺序
:return: 文件/目录列表和总数
:param cursor: Graph nextLink
:return: 游标分页结果。
"""
api_path = self._get_api_path(rel)
children_path = f"{api_path}:/children" if api_path else "/children"
all_items = []
params = {"$top": 999}
resp = await self._request("GET", api_path_segment=children_path, params=params)
if cursor:
resp = await self._request("GET", full_url=cursor)
else:
api_path = self._get_api_path(rel)
children_path = f"{api_path}:/children" if api_path else "/children"
resp = await self._request("GET", api_path_segment=children_path, params={"$top": page_size})
while True:
if resp.status_code == 404 and not all_items:
return [], 0
resp.raise_for_status()
if resp.status_code == 404:
return cursor_page([], page_size, cursor=cursor)
resp.raise_for_status()
try:
data = resp.json()
except Exception as e:
raise IOError(f"解析 Graph API 响应失败: {e}") from e
try:
data = resp.json()
except Exception as e:
raise IOError(f"解析 Graph API 响应失败: {e}") from e
all_items.extend(data.get("value", []))
next_link = data.get("@odata.nextLink")
if not next_link:
break
resp = await self._request("GET", full_url=next_link)
formatted_items = [self._format_item(item) for item in all_items]
# 排序
reverse = sort_order.lower() == "desc"
def get_sort_key(item):
key = (not item["is_dir"],)
sort_field = sort_by.lower()
if sort_field == "name":
key += (item["name"].lower(),)
elif sort_field == "size":
key += (item["size"],)
elif sort_field == "mtime":
key += (item["mtime"],)
else:
key += (item["name"].lower(),)
return key
formatted_items.sort(key=get_sort_key, reverse=reverse)
total_count = len(formatted_items)
start_idx = (page_num - 1) * page_size
end_idx = start_idx + page_size
return formatted_items[start_idx:end_idx], total_count
formatted_items = [self._format_item(item) for item in data.get("value", [])]
return cursor_page(
formatted_items,
page_size,
cursor=cursor,
next_cursor=data.get("@odata.nextLink"),
)
async def read_file(self, root: str, rel: str) -> bytes:
"""
@@ -443,6 +430,21 @@ class OneDriveAdapter:
resp.raise_for_status()
return self._format_item(resp.json())
async def get_usage(self, root: str):
resp = await self._request("GET", full_url=f"{MS_GRAPH_URL}/me/drive?$select=quota")
resp.raise_for_status()
quota = (resp.json() or {}).get("quota") or {}
used = quota.get("used")
total = quota.get("total")
remaining = quota.get("remaining")
return {
"used_bytes": int(used) if used is not None else None,
"total_bytes": int(total) if total is not None else None,
"free_bytes": int(remaining) if remaining is not None else None,
"source": "onedrive",
"scope": "drive",
}
ADAPTER_TYPE = "onedrive"

View File

@@ -13,8 +13,9 @@ from models import StorageAdapter
from .base import BaseAdapter
API_BASE = "https://api-drive.mypikpak.net/drive/v1"
USER_BASE = "https://user.mypikpak.net/v1"
API_BASE = "https://api-drive.mypikpak.com/drive/v1"
USER_BASE = "https://user.mypikpak.com/v1"
TOKEN_REFRESH_BUFFER = 300
ANDROID_ALGORITHMS = [
"SOP04dGzk0TNO7t7t9ekDbAmx+eq0OI1ovEx",
@@ -62,11 +63,11 @@ PLATFORM_CONFIG = {
"android": {
"client_id": "YNxT9w7GMdWvEOKa",
"client_secret": "dbw2OtmVEeuUvIptb1Coyg",
"client_version": "1.53.2",
"client_version": "1.21.0",
"package_name": "com.pikcloud.pikpak",
"sdk_version": "2.0.6.206003",
"algorithms": ANDROID_ALGORITHMS,
"ua": None,
"ua": "ANDROID-com.pikcloud.pikpak/1.21.0",
},
"web": {
"client_id": "YUMx5nI8ZU8Ap8pm",
@@ -109,6 +110,13 @@ def _as_bool(value: Any, default: bool = False) -> bool:
return bool(value)
def _as_int(value: Any, default: int = 0) -> int:
try:
return int(value or default)
except Exception:
return default
def _root_payload(root: str | None) -> Tuple[str, str]:
raw = (root or "").strip()
if not raw:
@@ -158,9 +166,9 @@ class PikPakAdapter:
if not self.username or not self.password:
raise ValueError("PikPak adapter requires username and password")
self.platform = str(cfg.get("platform") or "web").strip().lower()
self.platform = str(cfg.get("platform") or "android").strip().lower()
if self.platform not in PLATFORM_CONFIG:
self.platform = "web"
self.platform = "android"
platform_cfg = PLATFORM_CONFIG[self.platform]
self.client_id = str(platform_cfg["client_id"])
@@ -170,10 +178,14 @@ class PikPakAdapter:
self.sdk_version = str(platform_cfg["sdk_version"])
self.algorithms = list(platform_cfg["algorithms"])
self.device_id = str(cfg.get("device_id") or "").strip() or _md5_text(self.username + self.password)
device_id = str(cfg.get("device_id") or "").strip()
if not device_id or device_id == _md5_text(self.username + self.password):
device_id = _md5_text(self.username)
self.device_id = device_id
self.user_id = str(cfg.get("user_id") or "").strip()
self.refresh_token = str(cfg.get("refresh_token") or "").strip()
self.access_token = str(cfg.get("access_token") or "").strip()
self.expires_at = _as_int(cfg.get("expires_at"), 0)
self.captcha_token = str(cfg.get("captcha_token") or "").strip()
self.root_id = str(cfg.get("root_id") or "").strip()
self.disable_media_link = _as_bool(cfg.get("disable_media_link"), True)
@@ -232,6 +244,18 @@ class PikPakAdapter:
path = m.group(1) if m else "/"
return f"{method.upper()}:{path}"
@staticmethod
def _full_action(method: str, url: str) -> str:
return f"{method.upper()}:{url}"
def _captcha_action(self, method: str, url: str, *, auth: bool) -> str:
if not auth and url == f"{USER_BASE}/auth/signin":
return self._full_action(method, url)
return self._action(method, url)
def _has_valid_access_token(self) -> bool:
return bool(self.access_token and self.expires_at > int(time.time()) + TOKEN_REFRESH_BUFFER)
def _download_headers(self) -> Dict[str, str]:
headers = {
"User-Agent": self.user_agent,
@@ -247,8 +271,11 @@ class PikPakAdapter:
changed = False
for key, value in (
("refresh_token", self.refresh_token),
("access_token", self.access_token),
("expires_at", self.expires_at),
("captcha_token", self.captcha_token),
("device_id", self.device_id),
("user_id", self.user_id),
):
if value and cfg.get(key) != value:
cfg[key] = value
@@ -260,40 +287,44 @@ class PikPakAdapter:
await self.record.save(update_fields=["config"])
async def _ensure_auth(self):
if self.access_token:
if self._has_valid_access_token():
return
async with self._auth_lock:
if self.access_token:
if self._has_valid_access_token():
return
if self.refresh_token:
try:
await self._refresh_access_token()
return
except Exception:
except Exception as e:
self.access_token = ""
if not self.username or not self.password:
raise
raise HTTPException(
502,
detail=f"PikPak refresh token failed, please update refresh_token or login manually: {e}",
)
await self._login()
async def _login(self):
url = f"{USER_BASE}/auth/signin"
if not self.captcha_token:
await self._refresh_captcha_token(self._action("POST", url), self._login_captcha_meta())
await self._refresh_captcha_token(self._full_action("POST", url), self._login_captcha_meta())
body = {
"captcha_token": self.captcha_token,
"client_id": self.client_id,
"client_secret": self.client_secret,
"grant_type": "password",
"username": self.username,
"password": self.password,
}
data = await self._raw_json("POST", url, json=body, params={"client_id": self.client_id}, auth=False)
data = await self._raw_json("POST", url, json=body, auth=False)
self.refresh_token = str(data.get("refresh_token") or "").strip()
self.access_token = str(data.get("access_token") or "").strip()
self.expires_at = int(time.time()) + _as_int(data.get("expires_in"), 0)
self.user_id = str(data.get("sub") or self.user_id).strip()
if not self.refresh_token or not self.access_token:
raise HTTPException(502, detail="PikPak login failed: missing token")
if self.platform == "android":
if self.platform == "android" and not PLATFORM_CONFIG[self.platform].get("ua"):
self.user_agent = self._build_android_user_agent()
await self._save_runtime_config()
@@ -305,21 +336,18 @@ class PikPakAdapter:
"grant_type": "refresh_token",
"refresh_token": self.refresh_token,
}
data = await self._raw_json("POST", url, json=body, params={"client_id": self.client_id}, auth=False)
data = await self._raw_json("POST", url, json=body, auth=False)
self.refresh_token = str(data.get("refresh_token") or "").strip()
self.access_token = str(data.get("access_token") or "").strip()
self.expires_at = int(time.time()) + _as_int(data.get("expires_in"), 0)
self.user_id = str(data.get("sub") or self.user_id).strip()
if not self.refresh_token or not self.access_token:
raise HTTPException(502, detail="PikPak refresh token failed: missing token")
if self.platform == "android":
if self.platform == "android" and not PLATFORM_CONFIG[self.platform].get("ua"):
self.user_agent = self._build_android_user_agent()
await self._save_runtime_config()
def _login_captcha_meta(self) -> Dict[str, str]:
if re.match(r"\w+([-+.]\w+)*@\w+([-.]\w+)*\.\w+([-.]\w+)*", self.username):
return {"email": self.username}
if 11 <= len(self.username) <= 18:
return {"phone_number": self.username}
return {"username": self.username}
async def _refresh_captcha_token(self, action: str, meta: Dict[str, str]):
@@ -332,7 +360,7 @@ class PikPakAdapter:
"meta": meta,
"redirect_uri": "xlaccsdk01://xbase.cloud/callback?state=harbor",
}
data = await self._raw_json("POST", url, json=body, params={"client_id": self.client_id}, auth=False)
data = await self._raw_json("POST", url, json=body, auth=False)
verify_url = str(data.get("url") or "").strip()
token = str(data.get("captcha_token") or "").strip()
if token and not verify_url:
@@ -437,9 +465,15 @@ class PikPakAdapter:
if self.user_id:
await self._refresh_captcha_token_after_login(method, url)
else:
await self._refresh_captcha_token(self._action(method, url), self._login_captcha_meta())
await self._refresh_captcha_token(
self._captcha_action(method, url, auth=auth),
self._login_captcha_meta(),
)
else:
await self._refresh_captcha_token(self._action(method, url), self._login_captcha_meta())
await self._refresh_captcha_token(
self._captcha_action(method, url, auth=auth),
self._login_captcha_meta(),
)
return await self._raw_json(
method,
url,
@@ -776,6 +810,21 @@ class PikPakAdapter:
return None
return resp.content
async def get_usage(self, root: str):
data = await self._request("GET", "/about")
quota = data.get("quota") or {}
limit = quota.get("limit")
usage = quota.get("usage")
total = int(limit) if limit is not None else None
used = int(usage) if usage is not None else None
return {
"used_bytes": used,
"total_bytes": total,
"free_bytes": total - used if total is not None and used is not None else None,
"source": "pikpak",
"scope": "drive",
}
async def mkdir(self, root: str, rel: str):
rel = (rel or "").strip("/")
if not rel:
@@ -861,8 +910,10 @@ ADAPTER_TYPE = "pikpak"
CONFIG_SCHEMA = [
{"key": "username", "label": "PikPak 账号", "type": "string", "required": True},
{"key": "password", "label": "PikPak 密码", "type": "password", "required": True},
{"key": "platform", "label": "平台", "type": "select", "required": False, "default": "web", "options": ["web", "android", "pc"]},
{"key": "platform", "label": "平台", "type": "select", "required": False, "default": "android", "options": ["android", "web", "pc"]},
{"key": "refresh_token", "label": "Refresh Token", "type": "password", "required": False},
{"key": "access_token", "label": "Access Token", "type": "password", "required": False},
{"key": "expires_at", "label": "Access Token 过期时间戳", "type": "number", "required": False},
{"key": "captcha_token", "label": "Captcha Token", "type": "password", "required": False},
{"key": "device_id", "label": "Device ID", "type": "string", "required": False},
{"key": "root_id", "label": "根目录 ID", "type": "string", "required": False, "default": ""},

View File

@@ -840,6 +840,23 @@ class QuarkAdapter:
async def copy(self, root: str, src_rel: str, dst_rel: str, overwrite: bool = False):
raise NotImplementedError("QuarkOpen does not support copy via open API")
async def get_usage(self, root: str):
data = await self._request("GET", "/capacity/growth/info")
payload = (data or {}).get("data") or {}
if isinstance(payload.get("member"), dict):
payload = payload["member"]
used = payload.get("use_capacity") or payload.get("used_capacity")
total = payload.get("total_capacity")
used_bytes = int(used) if used is not None else None
total_bytes = int(total) if total is not None else None
return {
"used_bytes": used_bytes,
"total_bytes": total_bytes,
"free_bytes": total_bytes - used_bytes if total_bytes is not None and used_bytes is not None else None,
"source": "quark",
"scope": "account",
}
# -----------------
# STAT / EXISTS / 辅助
# -----------------

View File

@@ -1,26 +1,18 @@
from typing import List, Dict, Tuple, AsyncIterator
from typing import List, Dict, Tuple, AsyncIterator, Optional
import asyncio
import base64
import io
import os
import struct
import time
from models import StorageAdapter
from telethon import TelegramClient
from api.response import cursor_page
from telethon import TelegramClient, errors, utils
from telethon.crypto import AuthKey
from telethon.sessions import StringSession
from telethon.tl import types
import socks
_SESSION_LOCKS: Dict[str, asyncio.Lock] = {}
def _get_session_lock(session_string: str) -> asyncio.Lock:
lock = _SESSION_LOCKS.get(session_string)
if lock is None:
lock = asyncio.Lock()
_SESSION_LOCKS[session_string] = lock
return lock
class _NamedFile:
def __init__(self, file_obj, name: str):
@@ -61,6 +53,10 @@ CONFIG_SCHEMA = [
class TelegramAdapter:
"""Telegram 存储适配器 (使用用户 Session)"""
native_video_thumbnail_only = True
_message_cache_ttl = 300
_message_cache_limit = 200
_download_chunk_size = 512 * 1024
def __init__(self, record: StorageAdapter):
self.record = record
@@ -93,6 +89,12 @@ class TelegramAdapter:
if not all([self.api_id, self.api_hash, self.session_string, self.chat_id]):
raise ValueError("Telegram 适配器需要 api_id, api_hash, session_string 和 chat_id")
self._client: TelegramClient | None = None
self._client_lock = asyncio.Lock()
self._download_lock = asyncio.Lock()
self._active_stream_message_id: int | None = None
self._message_cache: Dict[int, Tuple[float, object]] = {}
@staticmethod
def _parse_legacy_session_string(value: str) -> StringSession:
"""
@@ -132,29 +134,42 @@ class TelegramAdapter:
return None
cached = []
others = []
downloadable = []
for t in thumbs:
if isinstance(t, (types.PhotoCachedSize, types.PhotoStrippedSize)):
cached.append(t)
elif isinstance(t, (types.PhotoSize, types.PhotoSizeProgressive)):
if not isinstance(t, types.PhotoSizeEmpty):
others.append(t)
downloadable.append(t)
if cached:
cached.sort(key=lambda x: len(getattr(x, "bytes", b"") or b""))
return cached[-1]
if others:
if downloadable:
def _sz(x):
if isinstance(x, types.PhotoSizeProgressive):
return max(x.sizes or [0])
return int(getattr(x, "size", 0) or 0)
others.sort(key=_sz)
return others[-1]
downloadable.sort(key=_sz)
return downloadable[-1]
if cached:
cached.sort(key=lambda x: len(getattr(x, "bytes", b"") or b""))
return cached[-1]
return None
@staticmethod
def _get_message_thumbs(message) -> list:
doc = message.document or message.video
if doc and getattr(doc, "thumbs", None):
return list(doc.thumbs or [])
if message.photo and getattr(message.photo, "sizes", None):
return list(message.photo.sizes or [])
return []
@classmethod
def _message_has_thumbnail(cls, message) -> bool:
return cls._pick_photo_thumb(cls._get_message_thumbs(message)) is not None
def _build_session(self) -> StringSession:
s = (self.session_string or "").strip()
if not s:
@@ -181,104 +196,230 @@ class TelegramAdapter:
"""创建一个新的 TelegramClient 实例"""
return TelegramClient(self._build_session(), self.api_id, self.api_hash, proxy=self.proxy)
def get_effective_root(self, sub_path: str | None) -> str:
return ""
async def _get_connected_client(self) -> TelegramClient:
async with self._client_lock:
if self._client is None:
self._client = self._get_client()
if not self._client.is_connected():
await self._client.connect()
return self._client
async def list_dir(self, root: str, rel: str, page_num: int = 1, page_size: int = 50, sort_by: str = "name", sort_order: str = "asc") -> Tuple[List[Dict], int]:
if rel:
return [], 0
async def _disconnect_shared_client(self):
if self._client and self._client.is_connected():
await self._client.disconnect()
client = self._get_client()
entries = []
try:
await client.connect()
messages = await client.get_messages(self.chat_id, limit=200)
for message in messages:
if not message:
continue
def _clear_message_cache(self):
self._message_cache.clear()
media = message.document or message.video or message.photo
if not media:
continue
async def _get_cached_message(self, message_id: int):
now = time.monotonic()
cached = self._message_cache.get(message_id)
if cached and cached[0] > now:
return cached[1]
file_meta = message.file
if not file_meta:
continue
client = await self._get_connected_client()
message = await client.get_messages(self.chat_id, ids=message_id)
if message:
if len(self._message_cache) >= self._message_cache_limit:
oldest_key = min(self._message_cache, key=lambda k: self._message_cache[k][0])
self._message_cache.pop(oldest_key, None)
self._message_cache[message_id] = (now + self._message_cache_ttl, message)
else:
self._message_cache.pop(message_id, None)
return message
filename = file_meta.name
if not filename:
if message.text and '.' in message.text and len(message.text) < 256 and '\n' not in message.text:
filename = message.text
else:
filename = f"unknown_{message.id}"
@staticmethod
def _get_message_media(message):
return message.document or message.video or message.photo
size = file_meta.size
if size is None:
# 兼容缺失 size 的情况
if hasattr(media, "size") and media.size is not None:
size = media.size
elif message.photo and getattr(message.photo, "sizes", None):
photo_size = message.photo.sizes[-1]
size = getattr(photo_size, "size", 0) or 0
else:
size = 0
@staticmethod
def _flood_wait_http_exception(exc: errors.FloodWaitError):
from fastapi import HTTPException
entries.append({
"name": f"{message.id}_{filename}",
"is_dir": False,
"size": size,
"mtime": int(message.date.timestamp()),
"type": "file",
})
finally:
if client.is_connected():
await client.disconnect()
seconds = int(getattr(exc, "seconds", 0) or 0)
if seconds > 0:
return HTTPException(
status_code=429,
detail=f"Telegram 请求过于频繁,请等待 {seconds} 秒后重试",
headers={"Retry-After": str(seconds)},
)
return HTTPException(status_code=429, detail="Telegram 请求过于频繁,请稍后重试")
# 排序
reverse = sort_order.lower() == "desc"
def get_sort_key(item):
key = (not item["is_dir"],)
sort_field = sort_by.lower()
if sort_field == "name":
key += (item["name"].lower(),)
elif sort_field == "size":
key += (item["size"],)
elif sort_field == "mtime":
key += (item["mtime"],)
@staticmethod
def _get_message_file_size(message, media) -> int:
file_meta = message.file
size = file_meta.size if file_meta and file_meta.size is not None else None
if size is None:
if hasattr(media, "size") and media.size is not None:
size = media.size
elif message.photo and getattr(message.photo, "sizes", None):
photo_size = message.photo.sizes[-1]
size = getattr(photo_size, "size", 0) or 0
else:
key += (item["name"].lower(),)
return key
entries.sort(key=get_sort_key, reverse=reverse)
size = 0
return int(size or 0)
total_count = len(entries)
@staticmethod
def _get_message_mime_type(message, media) -> str:
file_meta = message.file
if file_meta and getattr(file_meta, "mime_type", None):
return file_meta.mime_type
if hasattr(media, "mime_type") and media.mime_type:
return media.mime_type
if message.photo:
return "image/jpeg"
return "application/octet-stream"
# 分页
start_idx = (page_num - 1) * page_size
end_idx = start_idx + page_size
page_entries = entries[start_idx:end_idx]
return page_entries, total_count
async def read_file(self, root: str, rel: str) -> bytes:
@staticmethod
def _parse_message_id(rel: str) -> int:
try:
message_id_str, _ = rel.split('_', 1)
message_id = int(message_id_str)
return int(message_id_str)
except (ValueError, IndexError):
raise FileNotFoundError(f"无效的文件路径格式: {rel}")
def get_effective_root(self, sub_path: str | None) -> str:
return ""
async def list_dir(
self,
root: str,
rel: str,
page_num: int = 1,
page_size: int = 50,
sort_by: str = "name",
sort_order: str = "asc",
cursor: str | None = None,
):
if rel:
return cursor_page([], page_size, cursor=cursor)
client = self._get_client()
entries = []
next_cursor = None
try:
await client.connect()
message = await client.get_messages(self.chat_id, ids=message_id)
if not message or not (message.document or message.video or message.photo):
raise FileNotFoundError(f"在频道 {self.chat_id} 中未找到消息ID为 {message_id} 的文件")
offset_id = int(cursor) if cursor else 0
batch_limit = min(max(page_size, 50), 200)
while len(entries) < page_size:
messages = await client.get_messages(self.chat_id, limit=batch_limit, offset_id=offset_id)
if not messages:
next_cursor = None
break
file_bytes = await client.download_media(message, file=bytes)
return file_bytes
offset_id = messages[-1].id
next_cursor = str(offset_id)
for message in messages:
if not message:
continue
media = message.document or message.video or message.photo
if not media:
continue
file_meta = message.file
if not file_meta:
continue
filename = file_meta.name
if not filename:
if message.text and '.' in message.text and len(message.text) < 256 and '\n' not in message.text:
filename = message.text
else:
filename = f"unknown_{message.id}"
size = file_meta.size
if size is None:
# 兼容缺失 size 的情况
if hasattr(media, "size") and media.size is not None:
size = media.size
elif message.photo and getattr(message.photo, "sizes", None):
photo_size = message.photo.sizes[-1]
size = getattr(photo_size, "size", 0) or 0
else:
size = 0
entries.append({
"name": f"{message.id}_{filename}",
"is_dir": False,
"size": size,
"mtime": int(message.date.timestamp()),
"type": "file",
"has_thumbnail": False,
})
if len(entries) >= page_size:
break
finally:
if client.is_connected():
await client.disconnect()
return cursor_page(entries, page_size, cursor=cursor, next_cursor=next_cursor)
async def read_file(self, root: str, rel: str) -> bytes:
message_id = self._parse_message_id(rel)
client = await self._get_connected_client()
message = await self._get_cached_message(message_id)
if not message or not self._get_message_media(message):
raise FileNotFoundError(f"在频道 {self.chat_id} 中未找到消息ID为 {message_id} 的文件")
try:
async with self._download_lock:
file_bytes = await client.download_media(message, file=bytes)
return file_bytes
except errors.FloodWaitError as exc:
await self._disconnect_shared_client()
raise self._flood_wait_http_exception(exc)
async def read_file_range(self, root: str, rel: str, start: int, end: Optional[int] = None) -> bytes:
from fastapi import HTTPException
message_id = self._parse_message_id(rel)
client = await self._get_connected_client()
message = await self._get_cached_message(message_id)
if not message:
raise FileNotFoundError(f"在频道 {self.chat_id} 中未找到消息ID为 {message_id} 的文件")
media = self._get_message_media(message)
if not media:
raise FileNotFoundError(f"在频道 {self.chat_id} 中未找到消息ID为 {message_id} 的文件")
file_size = self._get_message_file_size(message, media)
if file_size > 0:
if start >= file_size:
raise HTTPException(status_code=416, detail="Requested Range Not Satisfiable")
if end is None or end >= file_size:
end = file_size - 1
elif end is None:
end = start
if end < start:
raise HTTPException(status_code=416, detail="Requested Range Not Satisfiable")
limit = end - start + 1
data = bytearray()
try:
async with self._download_lock:
async for chunk in client.iter_download(
media,
offset=start,
request_size=self._download_chunk_size,
chunk_size=self._download_chunk_size,
file_size=file_size or None,
):
if not chunk:
continue
need = limit - len(data)
if need <= 0:
break
data.extend(chunk[:need])
if len(data) >= limit:
break
return bytes(data)
except errors.FloodWaitError as exc:
await self._disconnect_shared_client()
raise self._flood_wait_http_exception(exc)
async def write_file(self, root: str, rel: str, data: bytes):
"""将字节数据作为文件上传"""
client = self._get_client()
@@ -297,6 +438,7 @@ class TelegramAdapter:
stored_name = file_meta.name
if getattr(message, "id", None) is not None:
actual_rel = f"{message.id}_{stored_name}"
self._clear_message_cache()
return {"rel": actual_rel, "size": len(data)}
finally:
if client.is_connected():
@@ -326,6 +468,7 @@ class TelegramAdapter:
stored_name = file_meta.name
if getattr(message, "id", None) is not None:
actual_rel = f"{message.id}_{stored_name}"
self._clear_message_cache()
if file_meta and getattr(file_meta, "size", None):
size = int(file_meta.size)
return {"rel": actual_rel, "size": size}
@@ -361,6 +504,7 @@ class TelegramAdapter:
stored_name = file_meta.name
if getattr(message, "id", None) is not None:
actual_rel = f"{message.id}_{stored_name}"
self._clear_message_cache()
finally:
if os.path.exists(temp_path):
@@ -373,39 +517,7 @@ class TelegramAdapter:
raise NotImplementedError("Telegram 适配器不支持创建目录。")
async def get_thumbnail(self, root: str, rel: str, size: str = "medium"):
try:
message_id_str, _ = rel.split('_', 1)
message_id = int(message_id_str)
except (ValueError, IndexError):
return None
client = self._get_client()
try:
await client.connect()
message = await client.get_messages(self.chat_id, ids=message_id)
if not message:
return None
doc = message.document or message.video
thumbs = None
if doc and getattr(doc, "thumbs", None):
thumbs = list(doc.thumbs or [])
elif message.photo and getattr(message.photo, "sizes", None):
thumbs = list(message.photo.sizes or [])
thumb = self._pick_photo_thumb(thumbs)
if not thumb:
return None
result = await client.download_media(message, bytes, thumb=thumb)
if isinstance(result, (bytes, bytearray)):
return bytes(result)
return None
except Exception:
return None
finally:
if client.is_connected():
await client.disconnect()
return None
async def delete(self, root: str, rel: str):
"""删除一个文件 (即一条消息)"""
@@ -421,9 +533,12 @@ class TelegramAdapter:
result = await client.delete_messages(self.chat_id, [message_id])
if not result or not result[0].pts:
raise FileNotFoundError(f"{self.chat_id} 中删除消息 {message_id} 失败,可能消息不存在或无权限")
self._message_cache.pop(message_id, None)
finally:
if client.is_connected():
await client.disconnect()
if self._client is client:
self._client = None
async def move(self, root: str, src_rel: str, dst_rel: str):
raise NotImplementedError("Telegram 适配器不支持移动。")
@@ -439,43 +554,21 @@ class TelegramAdapter:
from fastapi import HTTPException
try:
message_id_str, _ = rel.split('_', 1)
message_id = int(message_id_str)
except (ValueError, IndexError):
message_id = self._parse_message_id(rel)
except FileNotFoundError:
raise HTTPException(status_code=400, detail=f"无效的文件路径格式: {rel}")
client = self._get_client()
lock = _get_session_lock(self.session_string)
await lock.acquire()
try:
await client.connect()
message = await client.get_messages(self.chat_id, ids=message_id)
media = message.document or message.video or message.photo
if not message or not media:
client = await self._get_connected_client()
message = await self._get_cached_message(message_id)
if not message:
raise FileNotFoundError(f"在频道 {self.chat_id} 中未找到消息ID为 {message_id} 的文件")
media = self._get_message_media(message)
if not media:
raise FileNotFoundError(f"在频道 {self.chat_id} 中未找到消息ID为 {message_id} 的文件")
file_meta = message.file
file_size = file_meta.size if file_meta and file_meta.size is not None else None
if file_size is None:
if hasattr(media, "size") and media.size is not None:
file_size = media.size
elif message.photo and getattr(message.photo, "sizes", None):
photo_size = message.photo.sizes[-1]
file_size = getattr(photo_size, "size", 0) or 0
else:
file_size = 0
mime_type = None
if file_meta and getattr(file_meta, "mime_type", None):
mime_type = file_meta.mime_type
if not mime_type:
if hasattr(media, "mime_type") and media.mime_type:
mime_type = media.mime_type
elif message.photo:
mime_type = "image/jpeg"
else:
mime_type = "application/octet-stream"
file_size = self._get_message_file_size(message, media)
mime_type = self._get_message_mime_type(message, media)
start = 0
end = file_size - 1
@@ -486,6 +579,10 @@ class TelegramAdapter:
"Content-Type": mime_type,
}
if file_size <= 0:
headers["Content-Length"] = "0"
return StreamingResponse(iter(()), status_code=status, headers=headers)
if range_header:
try:
range_val = range_header.strip().partition("=")[2]
@@ -499,42 +596,71 @@ class TelegramAdapter:
except ValueError:
raise HTTPException(status_code=400, detail="Invalid Range header")
headers["Content-Length"] = str(end - start + 1)
self._active_stream_message_id = message_id
async def iterator():
downloaded = 0
try:
limit = end - start + 1
downloaded = 0
if self._active_stream_message_id != message_id:
return
async with self._download_lock:
async for chunk in client.iter_download(
media,
offset=start,
request_size=self._download_chunk_size,
chunk_size=self._download_chunk_size,
file_size=file_size,
):
if self._active_stream_message_id != message_id:
return
if not chunk:
continue
remaining = limit - downloaded
if remaining <= 0:
break
data = chunk[:remaining]
downloaded += len(data)
yield data
if downloaded >= limit:
break
except errors.FloodWaitError as exc:
await self._disconnect_shared_client()
if downloaded == 0:
raise self._flood_wait_http_exception(exc)
seconds = int(getattr(exc, "seconds", 0) or 0)
print(f"Telegram streaming stopped by FloodWait after partial response, wait={seconds}s")
return
except Exception:
await self._disconnect_shared_client()
raise
async for chunk in client.iter_download(media, offset=start):
if downloaded + len(chunk) > limit:
yield chunk[:limit - downloaded]
break
agen = iterator()
try:
first_chunk = await agen.__anext__()
except StopAsyncIteration:
first_chunk = b""
except HTTPException:
raise
async def response_iterator():
try:
if first_chunk:
yield first_chunk
async for chunk in agen:
yield chunk
downloaded += len(chunk)
if downloaded >= limit:
break
finally:
try:
if client.is_connected():
await client.disconnect()
finally:
lock.release()
await agen.aclose()
return StreamingResponse(iterator(), status_code=status, headers=headers)
return StreamingResponse(response_iterator(), status_code=status, headers=headers)
except HTTPException:
if client.is_connected():
await client.disconnect()
lock.release()
raise
except FileNotFoundError as e:
if client.is_connected():
await client.disconnect()
lock.release()
raise HTTPException(status_code=404, detail=str(e))
except Exception as e:
if client.is_connected():
await client.disconnect()
lock.release()
await self._disconnect_shared_client()
raise HTTPException(status_code=500, detail=f"Streaming failed: {str(e)}")
async def stat_file(self, root: str, rel: str):
@@ -544,35 +670,21 @@ class TelegramAdapter:
except (ValueError, IndexError):
raise FileNotFoundError(f"无效的文件路径格式: {rel}")
client = self._get_client()
try:
await client.connect()
message = await client.get_messages(self.chat_id, ids=message_id)
media = message.document or message.video or message.photo
if not message or not media:
raise FileNotFoundError(f"在频道 {self.chat_id} 中未找到消息ID为 {message_id} 的文件")
message = await self._get_cached_message(message_id)
media = self._get_message_media(message) if message else None
if not message or not media:
raise FileNotFoundError(f"在频道 {self.chat_id} 中未找到消息ID为 {message_id} 的文件")
file_meta = message.file
size = file_meta.size if file_meta and file_meta.size is not None else None
if size is None:
if hasattr(media, "size") and media.size is not None:
size = media.size
elif message.photo and getattr(message.photo, "sizes", None):
photo_size = message.photo.sizes[-1]
size = getattr(photo_size, "size", 0) or 0
else:
size = 0
size = self._get_message_file_size(message, media)
return {
"name": rel,
"is_dir": False,
"size": size,
"mtime": int(message.date.timestamp()),
"type": "file",
}
finally:
if client.is_connected():
await client.disconnect()
return {
"name": rel,
"is_dir": False,
"size": size,
"mtime": int(message.date.timestamp()),
"type": "file",
"has_thumbnail": False,
}
def ADAPTER_FACTORY(rec: StorageAdapter) -> TelegramAdapter:
return TelegramAdapter(rec)

View File

@@ -1,3 +1,4 @@
import time
from typing import Optional
from fastapi import HTTPException
@@ -8,11 +9,34 @@ from .registry import (
normalize_adapter_type,
runtime_registry,
)
from .types import AdapterCreate, AdapterOut
from .types import AdapterCreate, AdapterOut, AdapterUsage
from .providers.base import UsageCapableAdapter
from models import StorageAdapter
class AdapterService:
_usage_cache_ttl = 3600
_usage_cache: dict[int, tuple[float, AdapterUsage]] = {}
@classmethod
def _get_cached_usage(cls, adapter_id: int) -> AdapterUsage | None:
cached = cls._usage_cache.get(adapter_id)
if not cached:
return None
expires_at, usage = cached
if expires_at <= time.time():
cls._usage_cache.pop(adapter_id, None)
return None
return usage
@classmethod
def _set_cached_usage(cls, usage: AdapterUsage):
cls._usage_cache[usage.id] = (time.time() + cls._usage_cache_ttl, usage)
@classmethod
def _clear_cached_usage(cls, adapter_id: int):
cls._usage_cache.pop(adapter_id, None)
@classmethod
def _validate_and_normalize_config(cls, adapter_type: str, cfg):
schemas = get_config_schemas()
@@ -85,6 +109,74 @@ class AdapterService:
raise HTTPException(404, detail="Not found")
return AdapterOut.model_validate(rec)
@classmethod
def _unsupported_usage(cls, rec: StorageAdapter, reason: str) -> AdapterUsage:
return AdapterUsage(
id=rec.id,
name=rec.name,
type=rec.type,
path=rec.path,
supported=False,
reason=reason,
)
@classmethod
async def get_adapter_usage(cls, adapter_id: int) -> AdapterUsage:
rec = await StorageAdapter.get_or_none(id=adapter_id)
if not rec:
raise HTTPException(404, detail="Not found")
return await cls._get_adapter_usage_for_record(rec)
@classmethod
async def _get_adapter_usage_for_record(cls, rec: StorageAdapter) -> AdapterUsage:
cached = cls._get_cached_usage(rec.id)
if cached:
return cached
if not rec.enabled:
return cls._unsupported_usage(rec, "adapter_disabled")
adapter = runtime_registry.get(rec.id)
if not adapter:
await runtime_registry.refresh()
adapter = runtime_registry.get(rec.id)
if not adapter:
return cls._unsupported_usage(rec, "adapter_unavailable")
if not isinstance(adapter, UsageCapableAdapter):
return cls._unsupported_usage(rec, "adapter_not_implemented")
root = adapter.get_effective_root(rec.sub_path)
try:
raw_usage = await adapter.get_usage(root)
except Exception as e:
return cls._unsupported_usage(rec, f"usage_failed: {e}")
if not isinstance(raw_usage, dict):
return cls._unsupported_usage(rec, "invalid_usage_response")
usage = AdapterUsage(
id=rec.id,
name=rec.name,
type=rec.type,
path=rec.path,
supported=True,
used_bytes=raw_usage.get("used_bytes"),
total_bytes=raw_usage.get("total_bytes"),
free_bytes=raw_usage.get("free_bytes"),
source=raw_usage.get("source") or rec.type,
scope=raw_usage.get("scope"),
)
cls._set_cached_usage(usage)
return usage
@classmethod
async def list_adapter_usages(cls):
adapters = await StorageAdapter.all()
result = []
for rec in adapters:
result.append(await cls._get_adapter_usage_for_record(rec))
return result
@classmethod
async def update_adapter(cls, adapter_id: int, data: AdapterCreate, current_user: Optional[User]):
rec = await StorageAdapter.get_or_none(id=adapter_id)
@@ -105,6 +197,7 @@ class AdapterService:
await rec.save()
await runtime_registry.upsert(rec)
cls._clear_cached_usage(adapter_id)
return AdapterOut.model_validate(rec)
@classmethod
@@ -113,4 +206,5 @@ class AdapterService:
if not deleted:
raise HTTPException(404, detail="Not found")
runtime_registry.remove(adapter_id)
cls._clear_cached_usage(adapter_id)
return {"deleted": True}

View File

@@ -48,3 +48,17 @@ class AdapterOut(AdapterBase):
class Config:
from_attributes = True
class AdapterUsage(BaseModel):
id: int
name: str
type: str
path: str
supported: bool
used_bytes: Optional[int] = None
total_bytes: Optional[int] = None
free_bytes: Optional[int] = None
source: Optional[str] = None
scope: Optional[str] = None
reason: Optional[str] = None

View File

@@ -19,6 +19,7 @@ PUBLIC_CONFIG_KEYS = [
"THEME_BORDER_RADIUS",
"THEME_CUSTOM_TOKENS",
"THEME_CUSTOM_CSS",
"DEFAULT_FILE_VIEW_MODE",
]

View File

@@ -10,7 +10,7 @@ from models.database import Configuration, UserAccount
load_dotenv(dotenv_path=".env")
VERSION = "v2.2.0"
VERSION = "v2.2.2"
class ConfigService:

View File

@@ -0,0 +1,3 @@
from .service import NoticeService, notice_sync_service
__all__ = ["NoticeService", "notice_sync_service"]

36
domain/notices/api.py Normal file
View File

@@ -0,0 +1,36 @@
from typing import Annotated
from fastapi import APIRouter, Depends, Query
from api.response import success
from domain.auth import User, get_current_active_user
from .service import NoticeService
router = APIRouter(prefix="/api/notices", tags=["notices"])
@router.get("")
async def list_notices(
current_user: Annotated[User, Depends(get_current_active_user)],
page: int = Query(1, ge=1),
):
data = await NoticeService.list_notices(page=page)
return data.model_dump()
@router.get("/popup")
async def get_popup_notice(
current_user: Annotated[User, Depends(get_current_active_user)],
):
item = await NoticeService.get_popup_notice()
return success(item.model_dump() if item else None)
@router.post("/{notice_id}/dismiss")
async def dismiss_popup_notice(
notice_id: int,
current_user: Annotated[User, Depends(get_current_active_user)],
):
await NoticeService.dismiss_popup(notice_id)
return success()

177
domain/notices/service.py Normal file
View File

@@ -0,0 +1,177 @@
import asyncio
import logging
from datetime import datetime, timezone
from typing import Any
import httpx
from domain.config import VERSION
from models.database import Notice
from .types import NoticeItem, NoticeListResponse
logger = logging.getLogger(__name__)
REMOTE_NOTICES_URL = "https://foxel.cc/api/notices"
SYNC_INTERVAL_SECONDS = 60 * 60 * 24
PAGE_SIZE = 20
def _normalize_version(version: str) -> str:
return (version or "").strip().removeprefix("v").removeprefix("V")
def _parse_remote_time(value: Any) -> datetime:
if isinstance(value, (int, float)):
timestamp = float(value)
if timestamp > 10_000_000_000:
timestamp = timestamp / 1000
return datetime.fromtimestamp(timestamp, timezone.utc)
if isinstance(value, str):
text = value.strip()
if not text:
return datetime.now(timezone.utc)
try:
if text.isdigit():
return _parse_remote_time(int(text))
return datetime.fromisoformat(text.replace("Z", "+00:00"))
except ValueError:
return datetime.now(timezone.utc)
return datetime.now(timezone.utc)
class NoticeService:
@classmethod
async def list_notices(cls, page: int = 1, page_size: int = PAGE_SIZE) -> NoticeListResponse:
page = max(1, page)
page_size = max(1, min(page_size, 100))
query = Notice.all().order_by("-created_at", "-id")
total = await query.count()
notices = await query.offset((page - 1) * page_size).limit(page_size)
return NoticeListResponse(
items=[cls._to_item(item) for item in notices],
page=page,
pageSize=page_size,
total=total,
)
@classmethod
async def get_popup_notice(cls) -> NoticeItem | None:
notice = await Notice.filter(is_popup=True, popup_dismissed=False).order_by("-created_at", "-id").first()
if not notice:
return None
return cls._to_item(notice)
@classmethod
async def dismiss_popup(cls, notice_id: int) -> None:
await Notice.filter(id=notice_id).update(popup_dismissed=True, is_popup=False)
@classmethod
async def sync_remote_notices(cls) -> None:
items = await cls._fetch_remote_notices()
if not items:
return
popup_remote_ids: list[int] = []
for raw in items:
remote_id = raw.get("id")
if remote_id is None:
continue
try:
remote_id = int(remote_id)
except (TypeError, ValueError):
continue
is_popup = bool(raw.get("isPopup"))
if is_popup:
popup_remote_ids.append(remote_id)
notice = await Notice.get_or_none(remote_id=remote_id)
popup_dismissed = notice.popup_dismissed if notice else False
await Notice.update_or_create(
remote_id=remote_id,
defaults={
"title": str(raw.get("title") or "")[:255],
"content_md": str(raw.get("contentMd") or ""),
"is_popup": is_popup and not popup_dismissed,
"created_at": _parse_remote_time(raw.get("createdAt")),
},
)
await cls._keep_only_latest_popup(popup_remote_ids)
@classmethod
async def _keep_only_latest_popup(cls, popup_remote_ids: list[int]) -> None:
latest = await Notice.filter(remote_id__in=popup_remote_ids, popup_dismissed=False).order_by(
"-created_at", "-id"
).first()
if not latest:
return
await Notice.filter(is_popup=True).exclude(id=latest.id).update(is_popup=False)
@classmethod
async def _fetch_remote_notices(cls) -> list[dict[str, Any]]:
results: list[dict[str, Any]] = []
page = 1
async with httpx.AsyncClient(timeout=15.0, follow_redirects=True) as client:
while True:
resp = await client.get(
REMOTE_NOTICES_URL,
params={"version": _normalize_version(VERSION), "page": page},
)
resp.raise_for_status()
data = resp.json()
items = data.get("items") if isinstance(data, dict) else None
if not isinstance(items, list):
break
results.extend(item for item in items if isinstance(item, dict))
total = data.get("total", len(results)) if isinstance(data, dict) else len(results)
page_size = data.get("pageSize") or data.get("page_size") or len(items)
if not items or len(results) >= int(total or 0) or page_size <= 0:
break
page += 1
return results
@staticmethod
def _to_item(notice: Notice) -> NoticeItem:
return NoticeItem(
id=notice.id,
title=notice.title,
contentMd=notice.content_md or "",
isPopup=notice.is_popup and not notice.popup_dismissed,
createdAt=int(notice.created_at.timestamp() * 1000),
)
class NoticeSyncService:
def __init__(self):
self._worker: asyncio.Task | None = None
self._stop_event = asyncio.Event()
async def start(self) -> None:
if self._worker and not self._worker.done():
return
self._stop_event.clear()
self._worker = asyncio.create_task(self._run_loop())
async def stop(self) -> None:
if not self._worker:
return
self._stop_event.set()
await self._worker
self._worker = None
async def _run_loop(self) -> None:
while not self._stop_event.is_set():
try:
await NoticeService.sync_remote_notices()
except Exception:
logger.exception("Failed to sync notices")
try:
await asyncio.wait_for(self._stop_event.wait(), timeout=SYNC_INTERVAL_SECONDS)
except asyncio.TimeoutError:
pass
notice_sync_service = NoticeSyncService()

16
domain/notices/types.py Normal file
View File

@@ -0,0 +1,16 @@
from pydantic import BaseModel
class NoticeItem(BaseModel):
id: int
title: str
contentMd: str
isPopup: bool
createdAt: int
class NoticeListResponse(BaseModel):
items: list[NoticeItem]
page: int
pageSize: int
total: int

View File

@@ -0,0 +1,3 @@
from .api import router
__all__ = ["router"]

View File

@@ -0,0 +1,44 @@
from typing import Annotated
from fastapi import APIRouter, Depends, Query, Request
from api.response import success
from domain.audit import AuditAction, audit
from domain.auth import User, get_current_active_user
from .service import RecentFilesService
from .types import RecordRecentFileRequest
router = APIRouter(prefix="/api/fs/recent", tags=["recent-files"])
@router.get("/")
@audit(action=AuditAction.READ, description="查看最近打开文件")
async def list_recent_files(
request: Request,
current_user: Annotated[User, Depends(get_current_active_user)],
limit: int = Query(20, ge=1, le=200, description="返回数量"),
):
data = await RecentFilesService.list_recent_files(current_user.id, limit)
return success(data)
@router.post("/")
@audit(action=AuditAction.CREATE, description="记录最近打开文件", body_fields=["path"])
async def record_recent_file(
request: Request,
body: RecordRecentFileRequest,
current_user: Annotated[User, Depends(get_current_active_user)],
):
data = await RecentFilesService.record_opened_file(current_user.id, body.path)
return success(data)
@router.delete("/")
@audit(action=AuditAction.DELETE, description="清空最近打开文件")
async def clear_recent_files(
request: Request,
current_user: Annotated[User, Depends(get_current_active_user)],
):
data = await RecentFilesService.clear_recent_files(current_user.id)
return success(data)

View File

@@ -0,0 +1,23 @@
from datetime import datetime, timezone
from models.database import RecentFile
class RecentFilesService:
@staticmethod
async def record_opened_file(user_id: int, path: str) -> dict:
item, created = await RecentFile.get_or_create(user_id=user_id, path=path)
if not created:
await RecentFile.filter(id=item.id).update(opened_at=datetime.now(timezone.utc))
await item.fetch_from_db()
return {"id": item.id, "path": item.path, "opened_at": item.opened_at.isoformat()}
@staticmethod
async def list_recent_files(user_id: int, limit: int) -> list[dict]:
items = await RecentFile.filter(user_id=user_id).order_by("-opened_at").limit(limit)
return [{"id": i.id, "path": i.path, "opened_at": i.opened_at.isoformat()} for i in items]
@staticmethod
async def clear_recent_files(user_id: int) -> dict:
deleted = await RecentFile.filter(user_id=user_id).delete()
return {"deleted": deleted}

View File

@@ -0,0 +1,11 @@
from pydantic import BaseModel, Field
class RecordRecentFileRequest(BaseModel):
path: str = Field(..., min_length=1, max_length=4096, description="文件完整路径")
class RecentFileItem(BaseModel):
id: int
path: str
opened_at: str

View File

@@ -183,9 +183,10 @@ async def browse_fs(
page_size: int = Query(50, ge=1, le=500, description="每页条数"),
sort_by: str = Query("name", description="按字段排序: name, size, mtime"),
sort_order: str = Query("asc", description="排序顺序: asc, desc"),
cursor: str | None = Query(None, description="游标分页位置"),
):
data = await VirtualFSService.list_directory_with_permission(
full_path, current_user.id, page_num, page_size, sort_by, sort_order
full_path, current_user.id, page_num, page_size, sort_by, sort_order, cursor
)
return success(data)
@@ -211,9 +212,10 @@ async def root_listing(
page_size: int = Query(50, ge=1, le=500, description="每页条数"),
sort_by: str = Query("name", description="按字段排序: name, size, mtime"),
sort_order: str = Query("asc", description="排序顺序: asc, desc"),
cursor: str | None = Query(None, description="游标分页位置"),
):
# 根目录不需要权限检查,但需要过滤无权限的子目录
data = await VirtualFSService.list_directory_with_permission(
"/", current_user.id, page_num, page_size, sort_by, sort_order
"/", current_user.id, page_num, page_size, sort_by, sort_order, cursor
)
return success(data)

View File

@@ -57,6 +57,7 @@ class VirtualFSListingMixin(VirtualFSResolverMixin):
page_size: int = 50,
sort_by: str = "name",
sort_order: str = "asc",
cursor: str | None = None,
) -> Dict:
norm = cls._normalize_path(path).rstrip("/") or "/"
adapters = await StorageAdapter.filter(enabled=True)
@@ -89,6 +90,9 @@ class VirtualFSListingMixin(VirtualFSResolverMixin):
def annotate_entry(entry: Dict) -> None:
if not entry.get("is_dir"):
if entry.get("has_thumbnail") is not None:
entry["has_thumbnail"] = bool(entry.get("has_thumbnail"))
return
name = entry.get("name", "")
entry["has_thumbnail"] = bool(is_image_filename(name) or is_video_filename(name))
else:
@@ -116,12 +120,28 @@ class VirtualFSListingMixin(VirtualFSResolverMixin):
adapter_entries_for_merge: List[Dict] = []
adapter_entries_page: List[Dict] | None = None
adapter_total: int | None = None
adapter_listing: Dict[str, Any] | None = None
if adapter_model and adapter_instance:
list_dir = getattr(adapter_instance, "list_dir", None)
if callable(list_dir):
adapter_entries_page, adapter_total = await list_dir(
effective_root, rel, page_num, page_size, sort_by, sort_order
)
try:
parameters = inspect.signature(list_dir).parameters
except (TypeError, ValueError):
parameters = {}
if "cursor" in parameters:
raw_listing = await list_dir(
effective_root, rel, page_num, page_size, sort_by, sort_order, cursor=cursor
)
else:
raw_listing = await list_dir(
effective_root, rel, page_num, page_size, sort_by, sort_order
)
if isinstance(raw_listing, dict):
adapter_listing = raw_listing
adapter_entries_page = raw_listing.get("items", [])
adapter_total = raw_listing.get("total")
else:
adapter_entries_page, adapter_total = raw_listing
if rel:
parent_rel = cls._parent_rel(rel)
if rel:
@@ -186,6 +206,9 @@ class VirtualFSListingMixin(VirtualFSResolverMixin):
annotate_entry_list = adapter_entries_page or []
for ent in annotate_entry_list:
annotate_entry(ent)
if adapter_listing and adapter_listing.get("pagination_mode") == "cursor":
adapter_listing["items"] = annotate_entry_list
return adapter_listing
return page(adapter_entries_page, adapter_total, page_num, page_size)
@classmethod
@@ -273,7 +296,10 @@ class VirtualFSListingMixin(VirtualFSResolverMixin):
is_dir = False
rel_name = rel.rstrip("/").split("/")[-1] if rel else path.rstrip("/").split("/")[-1]
name_hint = str(info.get("name") or rel_name or "")
info["has_thumbnail"] = bool(not is_dir and (is_image_filename(name_hint) or is_video_filename(name_hint)))
if not is_dir and info.get("has_thumbnail") is not None:
info["has_thumbnail"] = bool(info.get("has_thumbnail"))
else:
info["has_thumbnail"] = bool(not is_dir and (is_image_filename(name_hint) or is_video_filename(name_hint)))
if verbose and not is_dir:
vector_index = await cls._gather_vector_index(path)
if vector_index is not None:
@@ -290,13 +316,14 @@ class VirtualFSListingMixin(VirtualFSResolverMixin):
page_size: int = 50,
sort_by: str = "name",
sort_order: str = "asc",
cursor: str | None = None,
) -> Dict:
"""
带权限过滤的目录列表
过滤掉用户没有读取权限的条目
"""
result = await cls.list_virtual_dir(path, page_num, page_size, sort_by, sort_order)
result = await cls.list_virtual_dir(path, page_num, page_size, sort_by, sort_order, cursor)
items = result.get("items", [])
if not items:
return result

View File

@@ -1,6 +1,7 @@
import base64
import hashlib
import mimetypes
import uuid
from email.utils import formatdate
from urllib.parse import urlparse, unquote
from typing import Optional
@@ -43,6 +44,8 @@ def _dav_headers(extra: Optional[dict] = None) -> dict:
"MKCOL",
"MOVE",
"COPY",
"LOCK",
"UNLOCK",
]),
}
if extra:
@@ -157,17 +160,19 @@ def _normalize_fs_path(path: str) -> str:
return unquote(full)
@router.options("")
@router.options("/{path:path}")
@audit(action=AuditAction.READ, description="WebDAV: OPTIONS", user_kw="user")
async def options_root(_request: Request, path: str = "", _enabled: None = Depends(_ensure_webdav_enabled)):
return Response(status_code=200, headers=_dav_headers())
@router.api_route("", methods=["PROPFIND"])
@router.api_route("/{path:path}", methods=["PROPFIND"])
@audit(action=AuditAction.READ, description="WebDAV: PROPFIND", user_kw="user")
async def propfind(
request: Request,
path: str,
path: str = "",
_enabled: None = Depends(_ensure_webdav_enabled),
user: User = Depends(_get_basic_user),
):
@@ -247,7 +252,10 @@ async def dav_get(
if full_path != "/":
await PermissionService.require_path_permission(user.id, full_path, PathAction.READ)
range_header = request.headers.get("Range")
return await VirtualFSService.stream_file(full_path, range_header)
try:
return await VirtualFSService.stream_file(full_path, range_header)
except FileNotFoundError:
raise HTTPException(404, detail="Not found")
@router.head("/{path:path}")
@@ -280,29 +288,43 @@ async def dav_head(
return Response(status_code=200, headers=headers)
@router.api_route("", methods=["PUT"])
@router.api_route("/{path:path}", methods=["PUT"])
@audit(action=AuditAction.UPLOAD, description="WebDAV: PUT", user_kw="user")
async def dav_put(
path: str,
request: Request,
path: str = "",
_enabled: None = Depends(_ensure_webdav_enabled),
user: User = Depends(_get_basic_user),
):
full_path = _normalize_fs_path(path)
await PermissionService.require_path_permission(user.id, full_path, PathAction.WRITE)
existed = True
try:
await VirtualFSService.stat_file(full_path)
except FileNotFoundError:
existed = False
except HTTPException as exc:
if exc.status_code == 404:
existed = False
else:
raise
async def body_iter():
async for chunk in request.stream():
if chunk:
yield chunk
size = await VirtualFSService.write_file_stream(full_path, body_iter(), overwrite=True)
return Response(status_code=201, headers=_dav_headers({"Content-Length": "0"}))
await VirtualFSService.write_file_stream(full_path, body_iter(), overwrite=True)
return Response(status_code=204 if existed else 201, headers=_dav_headers({"Content-Length": "0"}))
@router.api_route("", methods=["DELETE"])
@router.api_route("/{path:path}", methods=["DELETE"])
@audit(action=AuditAction.DELETE, description="WebDAV: DELETE", user_kw="user")
async def dav_delete(
path: str,
_request: Request,
path: str = "",
_enabled: None = Depends(_ensure_webdav_enabled),
user: User = Depends(_get_basic_user),
):
@@ -312,6 +334,58 @@ async def dav_delete(
return Response(status_code=204, headers=_dav_headers())
@router.api_route("", methods=["LOCK"])
@router.api_route("/{path:path}", methods=["LOCK"])
@audit(action=AuditAction.UPDATE, description="WebDAV: LOCK", user_kw="user")
async def dav_lock(
path: str = "",
_request: Request = None,
_enabled: None = Depends(_ensure_webdav_enabled),
user: User = Depends(_get_basic_user),
):
full_path = _normalize_fs_path(path)
if full_path != "/":
await PermissionService.require_path_permission(user.id, full_path, PathAction.WRITE)
token = f"opaquelocktoken:{uuid.uuid4()}"
ns = "{DAV:}"
prop = ET.Element(ns + "prop")
lockdiscovery = ET.SubElement(prop, ns + "lockdiscovery")
activelock = ET.SubElement(lockdiscovery, ns + "activelock")
locktype = ET.SubElement(activelock, ns + "locktype")
ET.SubElement(locktype, ns + "write")
lockscope = ET.SubElement(activelock, ns + "lockscope")
ET.SubElement(lockscope, ns + "exclusive")
depth = ET.SubElement(activelock, ns + "depth")
depth.text = "Infinity"
locktoken = ET.SubElement(activelock, ns + "locktoken")
href = ET.SubElement(locktoken, ns + "href")
href.text = token
xml = ET.tostring(prop, encoding="utf-8", xml_declaration=True)
return Response(
content=xml,
status_code=200,
media_type='application/xml; charset="utf-8"',
headers=_dav_headers({"Lock-Token": f"<{token}>"}),
)
@router.api_route("", methods=["UNLOCK"])
@router.api_route("/{path:path}", methods=["UNLOCK"])
@audit(action=AuditAction.UPDATE, description="WebDAV: UNLOCK", user_kw="user")
async def dav_unlock(
path: str = "",
_request: Request = None,
_enabled: None = Depends(_ensure_webdav_enabled),
user: User = Depends(_get_basic_user),
):
full_path = _normalize_fs_path(path)
if full_path != "/":
await PermissionService.require_path_permission(user.id, full_path, PathAction.WRITE)
return Response(status_code=204, headers=_dav_headers())
@router.api_route("/{path:path}", methods=["MKCOL"])
@audit(action=AuditAction.CREATE, description="WebDAV: MKCOL", user_kw="user")
async def dav_mkcol(

View File

@@ -89,8 +89,17 @@ class VirtualFSRouteMixin(VirtualFSTempLinkMixin):
adapter, mount, root, rel = await cls.resolve_adapter_and_rel(full_path)
if not rel or rel.endswith("/"):
raise HTTPException(400, detail="Not a file")
if not (is_image_filename(rel) or is_video_filename(rel)):
raise HTTPException(404, detail="Not an image or video")
has_native_thumb = False
if callable(getattr(adapter, "get_thumbnail", None)):
stat_file = getattr(adapter, "stat_file", None)
if callable(stat_file):
try:
stat = await stat_file(root, rel)
has_native_thumb = bool(isinstance(stat, dict) and stat.get("has_thumbnail"))
except Exception:
has_native_thumb = False
if not (is_image_filename(rel) or is_video_filename(rel) or has_native_thumb):
raise HTTPException(404, detail="Not an image, video, or native thumbnail file")
data, mime, key = await get_or_create_thumb(adapter, mount.id, root, rel, w, h, fit) # type: ignore
headers = {
"Cache-Control": "public, max-age=3600",
@@ -266,15 +275,30 @@ class VirtualFSRouteMixin(VirtualFSTempLinkMixin):
async def list_directory(cls, full_path: str, page_num: int, page_size: int, sort_by: str, sort_order: str):
full_path = cls._normalize_path(full_path)
result = await cls.list_virtual_dir(full_path, page_num, page_size, sort_by, sort_order)
pagination = {
"mode": result.get("pagination_mode", "paged"),
"page_size": result.get("page_size", page_size),
}
if pagination["mode"] == "cursor":
pagination.update(
{
"cursor": result.get("cursor"),
"next_cursor": result.get("next_cursor"),
"has_next": bool(result.get("has_next")),
}
)
else:
pagination.update(
{
"total": result["total"],
"page": result["page"],
"pages": result["pages"],
}
)
return {
"path": full_path,
"entries": result["items"],
"pagination": {
"total": result["total"],
"page": result["page"],
"page_size": result["page_size"],
"pages": result["pages"],
},
"pagination": pagination,
}
@classmethod

View File

@@ -26,9 +26,10 @@ class VirtualFSService(
page_size: int = 50,
sort_by: str = "name",
sort_order: str = "asc",
cursor: str | None = None,
):
"""列出目录内容"""
return await cls.list_virtual_dir(path, page_num, page_size, sort_by, sort_order)
return await cls.list_virtual_dir(path, page_num, page_size, sort_by, sort_order, cursor)
@classmethod
async def list_directory_with_permission(
@@ -39,19 +40,35 @@ class VirtualFSService(
page_size: int = 50,
sort_by: str = "name",
sort_order: str = "asc",
cursor: str | None = None,
):
"""列出目录内容(带权限过滤)"""
full_path = cls._normalize_path(path).rstrip("/") or "/"
result = await cls.list_virtual_dir_with_permission(
full_path, user_id, page_num, page_size, sort_by, sort_order
full_path, user_id, page_num, page_size, sort_by, sort_order, cursor
)
pagination = {
"mode": result.get("pagination_mode", "paged") if isinstance(result, dict) else "paged",
"page_size": result.get("page_size", page_size) if isinstance(result, dict) else page_size,
}
if pagination["mode"] == "cursor":
pagination.update(
{
"cursor": result.get("cursor") if isinstance(result, dict) else cursor,
"next_cursor": result.get("next_cursor") if isinstance(result, dict) else None,
"has_next": bool(result.get("has_next")) if isinstance(result, dict) else False,
}
)
else:
pagination.update(
{
"total": result.get("total", 0) if isinstance(result, dict) else 0,
"page": result.get("page", page_num) if isinstance(result, dict) else page_num,
"pages": result.get("pages", 0) if isinstance(result, dict) else 0,
}
)
return {
"path": full_path,
"entries": result.get("items", []) if isinstance(result, dict) else [],
"pagination": {
"total": result.get("total", 0) if isinstance(result, dict) else 0,
"page": result.get("page", page_num) if isinstance(result, dict) else page_num,
"page_size": result.get("page_size", page_size) if isinstance(result, dict) else page_size,
"pages": result.get("pages", 0) if isinstance(result, dict) else 0,
},
"pagination": pagination,
}

View File

@@ -23,6 +23,7 @@ VIDEO_HEAD_FALLBACK_LIMIT = 4 * 1024 * 1024 # 4MB
VIDEO_THUMB_SEEK_SECONDS = (15, 10, 5, 3, 1, 0)
VIDEO_BLACK_FRAME_MEAN_THRESHOLD = 12.0
CACHE_ROOT = Path('data/.thumb_cache')
THUMB_CACHE_VERSION = "v2"
def is_image_filename(name: str) -> bool:
@@ -47,7 +48,7 @@ def is_video_filename(name: str) -> bool:
def _cache_key(adapter_id: int, rel: str, size: int, mtime: int, w: int, h: int, fit: str) -> str:
raw = f"{adapter_id}|{rel}|{size}|{mtime}|{w}x{h}|{fit}".encode()
raw = f"{THUMB_CACHE_VERSION}|{adapter_id}|{rel}|{size}|{mtime}|{w}x{h}|{fit}".encode()
return hashlib.sha1(raw).hexdigest()
@@ -385,8 +386,11 @@ async def get_or_create_thumb(adapter, adapter_id: int, root: str, rel: str, w:
stat = await adapter.stat_file(root, rel)
size = int(stat.get('size') or 0)
is_video = is_video_filename(rel)
if not is_video and size > MAX_IMAGE_SOURCE_SIZE:
raise HTTPException(400, detail="Image too large for thumbnail")
is_image = is_image_filename(rel)
get_thumb_impl = getattr(adapter, "get_thumbnail", None)
should_try_native_thumb = callable(get_thumb_impl) and (
is_image or is_video or bool(stat.get("has_thumbnail"))
)
key = _cache_key(adapter_id, rel, size, int(
stat.get('mtime', 0)), w, h, fit)
@@ -397,8 +401,7 @@ async def get_or_create_thumb(adapter, adapter_id: int, root: str, rel: str, w:
_ensure_cache_dir(path)
thumb_bytes, mime = None, None
get_thumb_impl = getattr(adapter, "get_thumbnail", None)
if callable(get_thumb_impl):
if should_try_native_thumb:
size_str = "large" if w > 400 else "medium" if w > 100 else "small"
native_thumb_bytes = await get_thumb_impl(root, rel, size_str)
@@ -406,15 +409,15 @@ async def get_or_create_thumb(adapter, adapter_id: int, root: str, rel: str, w:
try:
from PIL import Image
im = Image.open(io.BytesIO(native_thumb_bytes))
buf = io.BytesIO()
im.save(buf, 'WEBP', quality=85)
thumb_bytes = buf.getvalue()
mime = 'image/webp'
thumb_bytes, mime = _image_to_webp(im, w, h, fit)
except Exception as e:
print(
f"Failed to convert native thumbnail to WebP: {e}, falling back.")
thumb_bytes, mime = None, None
if is_video and getattr(adapter, "native_video_thumbnail_only", False) and not thumb_bytes:
raise HTTPException(404, detail="Native video thumbnail unavailable")
if not thumb_bytes:
if is_video:
async def _maybe_transcoding_thumb() -> Tuple[bytes, str] | None:
@@ -493,7 +496,9 @@ async def get_or_create_thumb(adapter, adapter_id: int, root: str, rel: str, w:
thumb_bytes, mime = retry_thumb, retry_mime
except Exception:
pass
else:
elif is_image:
if size > MAX_IMAGE_SOURCE_SIZE:
raise HTTPException(400, detail="Image too large for thumbnail")
read_data = await adapter.read_file(root, rel)
try:
thumb_bytes, mime = generate_thumb(
@@ -502,6 +507,8 @@ async def get_or_create_thumb(adapter, adapter_id: int, root: str, rel: str, w:
print(e)
raise HTTPException(
500, detail=f"Thumbnail generation failed: {e}")
else:
raise HTTPException(500, detail="Native thumbnail unavailable")
if thumb_bytes:
path.write_bytes(thumb_bytes)

View File

@@ -23,6 +23,7 @@ import httpx
from dotenv import load_dotenv
from domain.tasks import task_queue_service, task_scheduler
from domain.role.service import RoleService
from domain.notices import notice_sync_service
load_dotenv()
@@ -77,6 +78,7 @@ async def lifespan(app: FastAPI):
from domain.plugins import init_plugins
await init_plugins(app)
await task_scheduler.start()
await notice_sync_service.start()
# 在所有路由加载完成后,挂载静态文件服务(放在最后以避免覆盖 API 路由)
app.mount("/", SPAStaticFiles(directory="web/dist", html=True, check_dir=False), name="static")
@@ -85,6 +87,7 @@ async def lifespan(app: FastAPI):
try:
yield
finally:
await notice_sync_service.stop()
await task_scheduler.stop()
await task_queue_service.stop_worker()
await close_db()

View File

@@ -234,6 +234,33 @@ class ShareLink(Model):
table = "share_links"
class RecentFile(Model):
id = fields.IntField(pk=True)
user: fields.ForeignKeyRelation[UserAccount] = fields.ForeignKeyField(
"models.UserAccount", related_name="recent_files", on_delete=fields.CASCADE
)
path = fields.CharField(max_length=4096)
opened_at = fields.DatetimeField(auto_now=True)
class Meta:
table = "recent_files"
unique_together = (("user", "path"),)
class Notice(Model):
id = fields.IntField(pk=True)
remote_id = fields.IntField(unique=True, index=True)
title = fields.CharField(max_length=255)
content_md = fields.TextField(null=True)
is_popup = fields.BooleanField(default=False)
popup_dismissed = fields.BooleanField(default=False)
created_at = fields.DatetimeField()
updated_at = fields.DatetimeField(auto_now=True)
class Meta:
table = "notices"
class Plugin(Model):
id = fields.IntField(pk=True)
key = fields.CharField(max_length=100, unique=True) # 插件唯一标识

View File

@@ -11,13 +11,13 @@ dependencies = [
"fastapi>=0.127.0",
"mcp>=1.26.0",
"paramiko>=4.0.0",
"pillow>=12.0.0",
"pillow>=12.2.0",
"pydantic[email]>=2.12.5",
"pyjwt>=2.10.1",
"pymilvus[milvus-lite]>=2.6.5",
"pysocks>=1.7.1",
"python-dotenv>=1.2.1",
"python-multipart>=0.0.21",
"python-dotenv>=1.2.2",
"python-multipart>=0.0.27",
"qdrant-client>=1.16.2",
"setuptools<82",
"telethon>=1.42.0",

72
uv.lock generated
View File

@@ -469,13 +469,13 @@ requires-dist = [
{ name = "fastapi", specifier = ">=0.127.0" },
{ name = "mcp", specifier = ">=1.26.0" },
{ name = "paramiko", specifier = ">=4.0.0" },
{ name = "pillow", specifier = ">=12.0.0" },
{ name = "pillow", specifier = ">=12.2.0" },
{ name = "pydantic", extras = ["email"], specifier = ">=2.12.5" },
{ name = "pyjwt", specifier = ">=2.10.1" },
{ name = "pymilvus", extras = ["milvus-lite"], specifier = ">=2.6.5" },
{ name = "pysocks", specifier = ">=1.7.1" },
{ name = "python-dotenv", specifier = ">=1.2.1" },
{ name = "python-multipart", specifier = ">=0.0.21" },
{ name = "python-dotenv", specifier = ">=1.2.2" },
{ name = "python-multipart", specifier = ">=0.0.27" },
{ name = "qdrant-client", specifier = ">=1.16.2" },
{ name = "setuptools", specifier = "<82" },
{ name = "telethon", specifier = ">=1.42.0" },
@@ -872,35 +872,35 @@ wheels = [
[[package]]
name = "pillow"
version = "12.1.1"
version = "12.2.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/1f/42/5c74462b4fd957fcd7b13b04fb3205ff8349236ea74c7c375766d6c82288/pillow-12.1.1.tar.gz", hash = "sha256:9ad8fa5937ab05218e2b6a4cff30295ad35afd2f83ac592e68c0d871bb0fdbc4", size = 46980264, upload-time = "2026-02-11T04:23:07.146Z" }
sdist = { url = "https://files.pythonhosted.org/packages/8c/21/c2bcdd5906101a30244eaffc1b6e6ce71a31bd0742a01eb89e660ebfac2d/pillow-12.2.0.tar.gz", hash = "sha256:a830b1a40919539d07806aa58e1b114df53ddd43213d9c8b75847eee6c0182b5", size = 46987819, upload-time = "2026-04-01T14:46:17.687Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/03/d0/bebb3ffbf31c5a8e97241476c4cf8b9828954693ce6744b4a2326af3e16b/pillow-12.1.1-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:417423db963cb4be8bac3fc1204fe61610f6abeed1580a7a2cbb2fbda20f12af", size = 4062652, upload-time = "2026-02-11T04:21:53.19Z" },
{ url = "https://files.pythonhosted.org/packages/2d/c0/0e16fb0addda4851445c28f8350d8c512f09de27bbb0d6d0bbf8b6709605/pillow-12.1.1-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:b957b71c6b2387610f556a7eb0828afbe40b4a98036fc0d2acfa5a44a0c2036f", size = 4138823, upload-time = "2026-02-11T04:22:03.088Z" },
{ url = "https://files.pythonhosted.org/packages/6b/fb/6170ec655d6f6bb6630a013dd7cf7bc218423d7b5fa9071bf63dc32175ae/pillow-12.1.1-cp314-cp314-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:097690ba1f2efdeb165a20469d59d8bb03c55fb6621eb2041a060ae8ea3e9642", size = 3601143, upload-time = "2026-02-11T04:22:04.909Z" },
{ url = "https://files.pythonhosted.org/packages/59/04/dc5c3f297510ba9a6837cbb318b87dd2b8f73eb41a43cc63767f65cb599c/pillow-12.1.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:2815a87ab27848db0321fb78c7f0b2c8649dee134b7f2b80c6a45c6831d75ccd", size = 5266254, upload-time = "2026-02-11T04:22:07.656Z" },
{ url = "https://files.pythonhosted.org/packages/05/30/5db1236b0d6313f03ebf97f5e17cda9ca060f524b2fcc875149a8360b21c/pillow-12.1.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:f7ed2c6543bad5a7d5530eb9e78c53132f93dfa44a28492db88b41cdab885202", size = 4657499, upload-time = "2026-02-11T04:22:09.613Z" },
{ url = "https://files.pythonhosted.org/packages/6f/18/008d2ca0eb612e81968e8be0bbae5051efba24d52debf930126d7eaacbba/pillow-12.1.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:652a2c9ccfb556235b2b501a3a7cf3742148cd22e04b5625c5fe057ea3e3191f", size = 6232137, upload-time = "2026-02-11T04:22:11.434Z" },
{ url = "https://files.pythonhosted.org/packages/70/f1/f14d5b8eeb4b2cd62b9f9f847eb6605f103df89ef619ac68f92f748614ea/pillow-12.1.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d6e4571eedf43af33d0fc233a382a76e849badbccdf1ac438841308652a08e1f", size = 8042721, upload-time = "2026-02-11T04:22:13.321Z" },
{ url = "https://files.pythonhosted.org/packages/5a/d6/17824509146e4babbdabf04d8171491fa9d776f7061ff6e727522df9bd03/pillow-12.1.1-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b574c51cf7d5d62e9be37ba446224b59a2da26dc4c1bb2ecbe936a4fb1a7cb7f", size = 6347798, upload-time = "2026-02-11T04:22:15.449Z" },
{ url = "https://files.pythonhosted.org/packages/d1/ee/c85a38a9ab92037a75615aba572c85ea51e605265036e00c5b67dfafbfe2/pillow-12.1.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a37691702ed687799de29a518d63d4682d9016932db66d4e90c345831b02fb4e", size = 7039315, upload-time = "2026-02-11T04:22:17.24Z" },
{ url = "https://files.pythonhosted.org/packages/ec/f3/bc8ccc6e08a148290d7523bde4d9a0d6c981db34631390dc6e6ec34cacf6/pillow-12.1.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f95c00d5d6700b2b890479664a06e754974848afaae5e21beb4d83c106923fd0", size = 6462360, upload-time = "2026-02-11T04:22:19.111Z" },
{ url = "https://files.pythonhosted.org/packages/f6/ab/69a42656adb1d0665ab051eec58a41f169ad295cf81ad45406963105408f/pillow-12.1.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:559b38da23606e68681337ad74622c4dbba02254fc9cb4488a305dd5975c7eeb", size = 7165438, upload-time = "2026-02-11T04:22:21.041Z" },
{ url = "https://files.pythonhosted.org/packages/02/46/81f7aa8941873f0f01d4b55cc543b0a3d03ec2ee30d617a0448bf6bd6dec/pillow-12.1.1-cp314-cp314-win32.whl", hash = "sha256:03edcc34d688572014ff223c125a3f77fb08091e4607e7745002fc214070b35f", size = 6431503, upload-time = "2026-02-11T04:22:22.833Z" },
{ url = "https://files.pythonhosted.org/packages/40/72/4c245f7d1044b67affc7f134a09ea619d4895333d35322b775b928180044/pillow-12.1.1-cp314-cp314-win_amd64.whl", hash = "sha256:50480dcd74fa63b8e78235957d302d98d98d82ccbfac4c7e12108ba9ecbdba15", size = 7176748, upload-time = "2026-02-11T04:22:24.64Z" },
{ url = "https://files.pythonhosted.org/packages/e4/ad/8a87bdbe038c5c698736e3348af5c2194ffb872ea52f11894c95f9305435/pillow-12.1.1-cp314-cp314-win_arm64.whl", hash = "sha256:5cb1785d97b0c3d1d1a16bc1d710c4a0049daefc4935f3a8f31f827f4d3d2e7f", size = 2544314, upload-time = "2026-02-11T04:22:26.685Z" },
{ url = "https://files.pythonhosted.org/packages/6c/9d/efd18493f9de13b87ede7c47e69184b9e859e4427225ea962e32e56a49bc/pillow-12.1.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:1f90cff8aa76835cba5769f0b3121a22bd4eb9e6884cfe338216e557a9a548b8", size = 5268612, upload-time = "2026-02-11T04:22:29.884Z" },
{ url = "https://files.pythonhosted.org/packages/f8/f1/4f42eb2b388eb2ffc660dcb7f7b556c1015c53ebd5f7f754965ef997585b/pillow-12.1.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1f1be78ce9466a7ee64bfda57bdba0f7cc499d9794d518b854816c41bf0aa4e9", size = 4660567, upload-time = "2026-02-11T04:22:31.799Z" },
{ url = "https://files.pythonhosted.org/packages/01/54/df6ef130fa43e4b82e32624a7b821a2be1c5653a5fdad8469687a7db4e00/pillow-12.1.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:42fc1f4677106188ad9a55562bbade416f8b55456f522430fadab3cef7cd4e60", size = 6269951, upload-time = "2026-02-11T04:22:33.921Z" },
{ url = "https://files.pythonhosted.org/packages/a9/48/618752d06cc44bb4aae8ce0cd4e6426871929ed7b46215638088270d9b34/pillow-12.1.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:98edb152429ab62a1818039744d8fbb3ccab98a7c29fc3d5fcef158f3f1f68b7", size = 8074769, upload-time = "2026-02-11T04:22:35.877Z" },
{ url = "https://files.pythonhosted.org/packages/c3/bd/f1d71eb39a72fa088d938655afba3e00b38018d052752f435838961127d8/pillow-12.1.1-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d470ab1178551dd17fdba0fef463359c41aaa613cdcd7ff8373f54be629f9f8f", size = 6381358, upload-time = "2026-02-11T04:22:37.698Z" },
{ url = "https://files.pythonhosted.org/packages/64/ef/c784e20b96674ed36a5af839305f55616f8b4f8aa8eeccf8531a6e312243/pillow-12.1.1-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6408a7b064595afcab0a49393a413732a35788f2a5092fdc6266952ed67de586", size = 7068558, upload-time = "2026-02-11T04:22:39.597Z" },
{ url = "https://files.pythonhosted.org/packages/73/cb/8059688b74422ae61278202c4e1ad992e8a2e7375227be0a21c6b87ca8d5/pillow-12.1.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5d8c41325b382c07799a3682c1c258469ea2ff97103c53717b7893862d0c98ce", size = 6493028, upload-time = "2026-02-11T04:22:42.73Z" },
{ url = "https://files.pythonhosted.org/packages/c6/da/e3c008ed7d2dd1f905b15949325934510b9d1931e5df999bb15972756818/pillow-12.1.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c7697918b5be27424e9ce568193efd13d925c4481dd364e43f5dff72d33e10f8", size = 7191940, upload-time = "2026-02-11T04:22:44.543Z" },
{ url = "https://files.pythonhosted.org/packages/01/4a/9202e8d11714c1fc5951f2e1ef362f2d7fbc595e1f6717971d5dd750e969/pillow-12.1.1-cp314-cp314t-win32.whl", hash = "sha256:d2912fd8114fc5545aa3a4b5576512f64c55a03f3ebcca4c10194d593d43ea36", size = 6438736, upload-time = "2026-02-11T04:22:46.347Z" },
{ url = "https://files.pythonhosted.org/packages/f3/ca/cbce2327eb9885476b3957b2e82eb12c866a8b16ad77392864ad601022ce/pillow-12.1.1-cp314-cp314t-win_amd64.whl", hash = "sha256:4ceb838d4bd9dab43e06c363cab2eebf63846d6a4aeaea283bbdfd8f1a8ed58b", size = 7182894, upload-time = "2026-02-11T04:22:48.114Z" },
{ url = "https://files.pythonhosted.org/packages/ec/d2/de599c95ba0a973b94410477f8bf0b6f0b5e67360eb89bcb1ad365258beb/pillow-12.1.1-cp314-cp314t-win_arm64.whl", hash = "sha256:7b03048319bfc6170e93bd60728a1af51d3dd7704935feb228c4d4faab35d334", size = 2546446, upload-time = "2026-02-11T04:22:50.342Z" },
{ url = "https://files.pythonhosted.org/packages/bf/98/4595daa2365416a86cb0d495248a393dfc84e96d62ad080c8546256cb9c0/pillow-12.2.0-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:3adc9215e8be0448ed6e814966ecf3d9952f0ea40eb14e89a102b87f450660d8", size = 4100848, upload-time = "2026-04-01T14:44:48.48Z" },
{ url = "https://files.pythonhosted.org/packages/0b/79/40184d464cf89f6663e18dfcf7ca21aae2491fff1a16127681bf1fa9b8cf/pillow-12.2.0-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:6a9adfc6d24b10f89588096364cc726174118c62130c817c2837c60cf08a392b", size = 4176515, upload-time = "2026-04-01T14:44:51.353Z" },
{ url = "https://files.pythonhosted.org/packages/b0/63/703f86fd4c422a9cf722833670f4f71418fb116b2853ff7da722ea43f184/pillow-12.2.0-cp314-cp314-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:6a6e67ea2e6feda684ed370f9a1c52e7a243631c025ba42149a2cc5934dec295", size = 3640159, upload-time = "2026-04-01T14:44:53.588Z" },
{ url = "https://files.pythonhosted.org/packages/71/e0/fb22f797187d0be2270f83500aab851536101b254bfa1eae10795709d283/pillow-12.2.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:2bb4a8d594eacdfc59d9e5ad972aa8afdd48d584ffd5f13a937a664c3e7db0ed", size = 5312185, upload-time = "2026-04-01T14:44:56.039Z" },
{ url = "https://files.pythonhosted.org/packages/ba/8c/1a9e46228571de18f8e28f16fabdfc20212a5d019f3e3303452b3f0a580d/pillow-12.2.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:80b2da48193b2f33ed0c32c38140f9d3186583ce7d516526d462645fd98660ae", size = 4695386, upload-time = "2026-04-01T14:44:58.663Z" },
{ url = "https://files.pythonhosted.org/packages/70/62/98f6b7f0c88b9addd0e87c217ded307b36be024d4ff8869a812b241d1345/pillow-12.2.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22db17c68434de69d8ecfc2fe821569195c0c373b25cccb9cbdacf2c6e53c601", size = 6280384, upload-time = "2026-04-01T14:45:01.5Z" },
{ url = "https://files.pythonhosted.org/packages/5e/03/688747d2e91cfbe0e64f316cd2e8005698f76ada3130d0194664174fa5de/pillow-12.2.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7b14cc0106cd9aecda615dd6903840a058b4700fcb817687d0ee4fc8b6e389be", size = 8091599, upload-time = "2026-04-01T14:45:04.5Z" },
{ url = "https://files.pythonhosted.org/packages/f6/35/577e22b936fcdd66537329b33af0b4ccfefaeabd8aec04b266528cddb33c/pillow-12.2.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8cbeb542b2ebc6fcdacabf8aca8c1a97c9b3ad3927d46b8723f9d4f033288a0f", size = 6396021, upload-time = "2026-04-01T14:45:07.117Z" },
{ url = "https://files.pythonhosted.org/packages/11/8d/d2532ad2a603ca2b93ad9f5135732124e57811d0168155852f37fbce2458/pillow-12.2.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4bfd07bc812fbd20395212969e41931001fd59eb55a60658b0e5710872e95286", size = 7083360, upload-time = "2026-04-01T14:45:09.763Z" },
{ url = "https://files.pythonhosted.org/packages/5e/26/d325f9f56c7e039034897e7380e9cc202b1e368bfd04d4cbe6a441f02885/pillow-12.2.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9aba9a17b623ef750a4d11b742cbafffeb48a869821252b30ee21b5e91392c50", size = 6507628, upload-time = "2026-04-01T14:45:12.378Z" },
{ url = "https://files.pythonhosted.org/packages/5f/f7/769d5632ffb0988f1c5e7660b3e731e30f7f8ec4318e94d0a5d674eb65a4/pillow-12.2.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:deede7c263feb25dba4e82ea23058a235dcc2fe1f6021025dc71f2b618e26104", size = 7209321, upload-time = "2026-04-01T14:45:15.122Z" },
{ url = "https://files.pythonhosted.org/packages/6a/7a/c253e3c645cd47f1aceea6a8bacdba9991bf45bb7dfe927f7c893e89c93c/pillow-12.2.0-cp314-cp314-win32.whl", hash = "sha256:632ff19b2778e43162304d50da0181ce24ac5bb8180122cbe1bf4673428328c7", size = 6479723, upload-time = "2026-04-01T14:45:17.797Z" },
{ url = "https://files.pythonhosted.org/packages/cd/8b/601e6566b957ca50e28725cb6c355c59c2c8609751efbecd980db44e0349/pillow-12.2.0-cp314-cp314-win_amd64.whl", hash = "sha256:4e6c62e9d237e9b65fac06857d511e90d8461a32adcc1b9065ea0c0fa3a28150", size = 7217400, upload-time = "2026-04-01T14:45:20.529Z" },
{ url = "https://files.pythonhosted.org/packages/d6/94/220e46c73065c3e2951bb91c11a1fb636c8c9ad427ac3ce7d7f3359b9b2f/pillow-12.2.0-cp314-cp314-win_arm64.whl", hash = "sha256:b1c1fbd8a5a1af3412a0810d060a78b5136ec0836c8a4ef9aa11807f2a22f4e1", size = 2554835, upload-time = "2026-04-01T14:45:23.162Z" },
{ url = "https://files.pythonhosted.org/packages/b6/ab/1b426a3974cb0e7da5c29ccff4807871d48110933a57207b5a676cccc155/pillow-12.2.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:57850958fe9c751670e49b2cecf6294acc99e562531f4bd317fa5ddee2068463", size = 5314225, upload-time = "2026-04-01T14:45:25.637Z" },
{ url = "https://files.pythonhosted.org/packages/19/1e/dce46f371be2438eecfee2a1960ee2a243bbe5e961890146d2dee1ff0f12/pillow-12.2.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:d5d38f1411c0ed9f97bcb49b7bd59b6b7c314e0e27420e34d99d844b9ce3b6f3", size = 4698541, upload-time = "2026-04-01T14:45:28.355Z" },
{ url = "https://files.pythonhosted.org/packages/55/c3/7fbecf70adb3a0c33b77a300dc52e424dc22ad8cdc06557a2e49523b703d/pillow-12.2.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5c0a9f29ca8e79f09de89293f82fc9b0270bb4af1d58bc98f540cc4aedf03166", size = 6322251, upload-time = "2026-04-01T14:45:30.924Z" },
{ url = "https://files.pythonhosted.org/packages/1c/3c/7fbc17cfb7e4fe0ef1642e0abc17fc6c94c9f7a16be41498e12e2ba60408/pillow-12.2.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1610dd6c61621ae1cf811bef44d77e149ce3f7b95afe66a4512f8c59f25d9ebe", size = 8127807, upload-time = "2026-04-01T14:45:33.908Z" },
{ url = "https://files.pythonhosted.org/packages/ff/c3/a8ae14d6defd2e448493ff512fae903b1e9bd40b72efb6ec55ce0048c8ce/pillow-12.2.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a34329707af4f73cf1782a36cd2289c0368880654a2c11f027bcee9052d35dd", size = 6433935, upload-time = "2026-04-01T14:45:36.623Z" },
{ url = "https://files.pythonhosted.org/packages/6e/32/2880fb3a074847ac159d8f902cb43278a61e85f681661e7419e6596803ed/pillow-12.2.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8e9c4f5b3c546fa3458a29ab22646c1c6c787ea8f5ef51300e5a60300736905e", size = 7116720, upload-time = "2026-04-01T14:45:39.258Z" },
{ url = "https://files.pythonhosted.org/packages/46/87/495cc9c30e0129501643f24d320076f4cc54f718341df18cc70ec94c44e1/pillow-12.2.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:fb043ee2f06b41473269765c2feae53fc2e2fbf96e5e22ca94fb5ad677856f06", size = 6540498, upload-time = "2026-04-01T14:45:41.879Z" },
{ url = "https://files.pythonhosted.org/packages/18/53/773f5edca692009d883a72211b60fdaf8871cbef075eaa9d577f0a2f989e/pillow-12.2.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f278f034eb75b4e8a13a54a876cc4a5ab39173d2cdd93a638e1b467fc545ac43", size = 7239413, upload-time = "2026-04-01T14:45:44.705Z" },
{ url = "https://files.pythonhosted.org/packages/c9/e4/4b64a97d71b2a83158134abbb2f5bd3f8a2ea691361282f010998f339ec7/pillow-12.2.0-cp314-cp314t-win32.whl", hash = "sha256:6bb77b2dcb06b20f9f4b4a8454caa581cd4dd0643a08bacf821216a16d9c8354", size = 6482084, upload-time = "2026-04-01T14:45:47.568Z" },
{ url = "https://files.pythonhosted.org/packages/ba/13/306d275efd3a3453f72114b7431c877d10b1154014c1ebbedd067770d629/pillow-12.2.0-cp314-cp314t-win_amd64.whl", hash = "sha256:6562ace0d3fb5f20ed7290f1f929cae41b25ae29528f2af1722966a0a02e2aa1", size = 7225152, upload-time = "2026-04-01T14:45:50.032Z" },
{ url = "https://files.pythonhosted.org/packages/ff/6e/cf826fae916b8658848d7b9f38d88da6396895c676e8086fc0988073aaf8/pillow-12.2.0-cp314-cp314t-win_arm64.whl", hash = "sha256:aa88ccfe4e32d362816319ed727a004423aab09c5cea43c01a4b435643fa34eb", size = 2556579, upload-time = "2026-04-01T14:45:52.529Z" },
]
[[package]]
@@ -1170,20 +1170,20 @@ wheels = [
[[package]]
name = "python-dotenv"
version = "1.2.1"
version = "1.2.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f0/26/19cadc79a718c5edbec86fd4919a6b6d3f681039a2f6d66d14be94e75fb9/python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6", size = 44221, upload-time = "2025-10-26T15:12:10.434Z" }
sdist = { url = "https://files.pythonhosted.org/packages/82/ed/0301aeeac3e5353ef3d94b6ec08bbcabd04a72018415dcb29e588514bba8/python_dotenv-1.2.2.tar.gz", hash = "sha256:2c371a91fbd7ba082c2c1dc1f8bf89ca22564a087c2c287cd9b662adde799cf3", size = 50135, upload-time = "2026-03-01T16:00:26.196Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230, upload-time = "2025-10-26T15:12:09.109Z" },
{ url = "https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl", hash = "sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a", size = 22101, upload-time = "2026-03-01T16:00:25.09Z" },
]
[[package]]
name = "python-multipart"
version = "0.0.22"
version = "0.0.27"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/94/01/979e98d542a70714b0cb2b6728ed0b7c46792b695e3eaec3e20711271ca3/python_multipart-0.0.22.tar.gz", hash = "sha256:7340bef99a7e0032613f56dc36027b959fd3b30a787ed62d310e951f7c3a3a58", size = 37612, upload-time = "2026-01-25T10:15:56.219Z" }
sdist = { url = "https://files.pythonhosted.org/packages/69/9b/f23807317a113dc36e74e75eb265a02dd1a4d9082abc3c1064acd22997c4/python_multipart-0.0.27.tar.gz", hash = "sha256:9870a6a8c5a20a5bf4f07c017bd1489006ff8836cff097b6933355ee2b49b602", size = 44043, upload-time = "2026-04-27T10:51:26.649Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/1b/d0/397f9626e711ff749a95d96b7af99b9c566a9bb5129b8e4c10fc4d100304/python_multipart-0.0.22-py3-none-any.whl", hash = "sha256:2b2cd894c83d21bf49d702499531c7bafd057d730c201782048f7945d82de155", size = 24579, upload-time = "2026-01-25T10:15:54.811Z" },
{ url = "https://files.pythonhosted.org/packages/99/78/4126abcbdbd3c559d43e0db7f7b9173fc6befe45d39a2856cc0b8ec2a5a6/python_multipart-0.0.27-py3-none-any.whl", hash = "sha256:6fccfad17a27334bd0193681b369f476eda3409f17381a2d65aa7df3f7275645", size = 29254, upload-time = "2026-04-27T10:51:24.997Z" },
]
[[package]]

View File

@@ -10,6 +10,20 @@ export interface AdapterItem {
sub_path?: string | null;
}
export interface AdapterUsage {
id: number;
name: string;
type: string;
path: string;
supported: boolean;
used_bytes?: number | null;
total_bytes?: number | null;
free_bytes?: number | null;
source?: string | null;
scope?: string | null;
reason?: string | null;
}
export interface AdapterTypeField {
key: string;
label: string;
@@ -31,4 +45,6 @@ export const adaptersApi = {
update: (id: number, payload: Omit<AdapterItem, 'id'>) => request<AdapterItem>(`/adapters/${id}`, { method: 'PUT', json: payload }),
remove: (id: number) => request<void>(`/adapters/${id}`, { method: 'DELETE' }),
available: () => request<AdapterTypeMeta[]>('/adapters/available'),
usage: () => request<AdapterUsage[]>('/adapters/usage'),
usageById: (id: number) => request<AdapterUsage>(`/adapters/${id}/usage`),
};

View File

@@ -71,7 +71,7 @@ async function request<T = any>(url: string, options: RequestOptions = {}): Prom
}
export { vfsApi, type VfsEntry, type DirListing } from './vfs';
export { adaptersApi, type AdapterItem, type AdapterTypeField, type AdapterTypeMeta } from './adapters';
export { adaptersApi, type AdapterItem, type AdapterTypeField, type AdapterTypeMeta, type AdapterUsage } from './adapters';
export { shareApi, type ShareInfo, type ShareInfoWithPassword } from './share';
export { offlineDownloadsApi, type OfflineDownloadTask, type OfflineDownloadCreate, type TaskProgress } from './offlineDownloads';
export default request;

View File

@@ -1,3 +1,5 @@
import request from './client';
export interface NoticeItem {
id: number;
title: string;
@@ -14,42 +16,17 @@ export interface GetNoticesResponse {
}
export interface GetNoticesParams {
version: string;
page?: number;
}
const FOXEL_CORE_BASE = 'https://foxel.cc';
function normalizeVersion(version: string) {
return (version || '').trim().replace(/^v/i, '');
}
function extractErrorMessage(data: any) {
if (!data) return '';
if (typeof data === 'string') return data;
if (typeof data.detail === 'string') return data.detail;
if (typeof data.code === 'string') return data.code;
if (typeof data.message === 'string') return data.message;
if (typeof data.msg === 'string') return data.msg;
return '';
}
export const noticesApi = {
list: async (params: GetNoticesParams): Promise<GetNoticesResponse> => {
const url = new URL('/api/notices', FOXEL_CORE_BASE);
url.searchParams.set('version', normalizeVersion(params.version));
url.searchParams.set('page', String(params.page ?? 1));
const resp = await fetch(url.href);
if (!resp.ok) {
let msg = resp.statusText || `Request failed: ${resp.status}`;
try {
const data = await resp.json();
msg = extractErrorMessage(data) || msg;
} catch { void 0; }
throw new Error(msg);
}
return await resp.json();
return await request<GetNoticesResponse>(`/notices?page=${params.page ?? 1}`);
},
getPopup: async (): Promise<NoticeItem | null> => {
return await request<NoticeItem | null>('/notices/popup');
},
dismiss: async (id: number): Promise<void> => {
await request(`/notices/${id}/dismiss`, { method: 'POST' });
},
};

View File

@@ -13,10 +13,14 @@ export interface DirListing {
path: string;
entries: VfsEntry[];
pagination?: {
total: number;
page: number;
mode?: 'paged' | 'cursor';
page_size: number;
pages: number;
total?: number;
page?: number;
pages?: number;
cursor?: string | null;
next_cursor?: string | null;
has_next?: boolean;
};
}
@@ -47,7 +51,7 @@ export interface SearchResponse {
}
export const vfsApi = {
list: (path: string, page: number = 1, pageSize: number = 50, sortBy: string = 'name', sortOrder: string = 'asc') => {
list: (path: string, page: number = 1, pageSize: number = 50, sortBy: string = 'name', sortOrder: string = 'asc', cursor?: string | null) => {
const cleaned = path.replace(/\\/g, '/');
const trimmed = cleaned === '/' ? '' : cleaned.replace(/^\/+/, '');
const params = new URLSearchParams({
@@ -56,6 +60,7 @@ export const vfsApi = {
sort_by: sortBy,
sort_order: sortOrder
});
if (cursor) params.set('cursor', cursor);
return request<DirListing>(`/fs/${encodeURI(trimmed)}?${params}`);
},
readFile: async (path: string) => {

View File

@@ -24,6 +24,16 @@ function getPluginStylePaths(plugin: PluginItem): string[] {
return styles.filter((s) => typeof s === 'string' && s.trim().length > 0);
}
function unloadPluginFrame(iframe: HTMLIFrameElement | null) {
if (!iframe) return;
try {
iframe.contentWindow?.postMessage({ type: 'foxel-plugin:unload' }, window.location.origin);
} catch {
void 0;
}
iframe.src = 'about:blank';
}
/**
* 插件宿主组件 - 文件打开模式
* 使用 iframe 隔离渲染与样式,避免插件污染宿主 DOM/CSS。
@@ -66,7 +76,10 @@ export const PluginAppHost: React.FC<PluginAppHostProps> = ({
};
window.addEventListener('message', onMessage);
return () => window.removeEventListener('message', onMessage);
return () => {
window.removeEventListener('message', onMessage);
unloadPluginFrame(iframeRef.current);
};
}, [plugin.key]);
return (
@@ -118,7 +131,10 @@ export const PluginAppOpenHost: React.FC<PluginAppOpenHostProps> = ({ plugin, on
};
window.addEventListener('message', onMessage);
return () => window.removeEventListener('message', onMessage);
return () => {
window.removeEventListener('message', onMessage);
unloadPluginFrame(iframeRef.current);
};
}, [plugin.key]);
return (

View File

@@ -7,11 +7,11 @@ import { useI18n } from '../i18n';
export interface NoticesModalProps {
open: boolean;
version: string;
onClose: () => void;
initialNotice?: NoticeItem | null;
}
const NoticesModal = memo(function NoticesModal({ open, version, onClose }: NoticesModalProps) {
const NoticesModal = memo(function NoticesModal({ open, onClose, initialNotice }: NoticesModalProps) {
const { token } = theme.useToken();
const { t } = useI18n();
const [items, setItems] = useState<NoticeItem[]>([]);
@@ -28,12 +28,15 @@ const NoticesModal = memo(function NoticesModal({ open, version, onClose }: Noti
if (mode === 'replace') setLoading(true);
else setLoadingMore(true);
try {
const resp = await noticesApi.list({ version, page: targetPage });
const resp = await noticesApi.list({ page: targetPage });
setPage(resp.page ?? targetPage);
setTotal(resp.total ?? 0);
setItems(prev => mode === 'replace' ? resp.items : [...prev, ...resp.items]);
const nextItems = mode === 'replace' && initialNotice && !resp.items.some(item => item.id === initialNotice.id)
? [initialNotice, ...resp.items]
: resp.items;
setItems(prev => mode === 'replace' ? nextItems : [...prev, ...resp.items]);
if (mode === 'replace') {
setSelectedId(resp.items[0]?.id ?? null);
setSelectedId(initialNotice?.id ?? resp.items[0]?.id ?? null);
} else {
setSelectedId(prev => prev ?? resp.items[0]?.id ?? null);
}
@@ -55,7 +58,7 @@ const NoticesModal = memo(function NoticesModal({ open, version, onClose }: Noti
setSelectedId(null);
loadPage(1, 'replace');
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [open, version]);
}, [open, initialNotice?.id]);
const formatTime = (ts: number) => {
try {
@@ -181,4 +184,3 @@ const NoticesModal = memo(function NoticesModal({ open, version, onClose }: Noti
});
export default NoticesModal;

View File

@@ -34,6 +34,7 @@
"English": "English",
"Default Language": "Default Language",
"Used when the user has not selected a language": "Used when the user has not selected a language",
"Default File View Mode": "Default File View Mode",
"Full Name": "Full Name",
"Email": "Email",
"Change Password": "Change Password",
@@ -239,6 +240,20 @@
"Type": "Type",
"Folder": "Folder",
"File": "File",
"Image": "Image",
"Video": "Video",
"Audio": "Audio",
"PDF": "PDF",
"Word": "Word",
"Spreadsheet": "Spreadsheet",
"Presentation": "Presentation",
"Archive": "Archive",
"Code": "Code",
"Markdown": "Markdown",
"Text": "Text",
"Font": "Font",
"Database": "Database",
"Config": "Config",
"Path": "Path",
"Path copied to clipboard": "Path copied to clipboard",
"Copy failed": "Copy failed",
@@ -514,6 +529,8 @@
"Unique name": "Unique name",
"Select adapter type": "Select adapter type",
"/ or /drive": "/ or /drive",
"Used Capacity": "Used Capacity",
"Capacity Usage": "Capacity Usage",
"Adapter Config": "Adapter Config",
"adapter.type.local": "Local Filesystem",
"adapter.type.foxel": "Foxel Node",

View File

@@ -57,6 +57,7 @@
"English": "English",
"Default Language": "默认语言",
"Used when the user has not selected a language": "用户未手动选择语言时使用",
"Default File View Mode": "默认文件展示方式",
"Full Name": "昵称",
"Email": "邮箱",
"Change Password": "修改密码",
@@ -258,6 +259,20 @@
"Type": "类型",
"Folder": "文件夹",
"File": "文件",
"Image": "图片",
"Video": "视频",
"Audio": "音频",
"PDF": "PDF",
"Word": "Word 文档",
"Spreadsheet": "表格",
"Presentation": "演示文稿",
"Archive": "压缩包",
"Code": "代码",
"Markdown": "Markdown",
"Text": "文本",
"Font": "字体",
"Database": "数据库",
"Config": "配置",
"Path": "路径",
"Path copied to clipboard": "路径已复制到剪贴板",
"Copy failed": "复制失败",
@@ -513,6 +528,8 @@
"Unique name": "唯一名称",
"Select adapter type": "选择适配器类型",
"/ or /drive": "/或/drive",
"Used Capacity": "已使用容量",
"Capacity Usage": "容量使用",
"Adapter Config": "适配器配置",
"adapter.type.local": "本地文件系统",
"adapter.type.foxel": "Foxel 节点",
@@ -773,7 +790,6 @@
"Users": "用户",
"Create User": "创建用户",
"Create Role": "创建角色",
"Edit": "编辑",
"Submit": "提交",
"Super Admin": "超级管理员",
"Disabled": "已禁用",
@@ -794,14 +810,11 @@
"Is Regex": "正则表达式",
"Priority": "优先级",
"Higher value = higher priority": "数值越大优先级越高",
"Permissions": "权限",
"System Permissions": "系统权限",
"Download and preview files": "下载和预览文件",
"Upload and modify files": "上传和修改文件",
"Delete files and folders": "删除文件和目录",
"Create share links": "创建分享链接",
"Share": "分享",
"Delete": "删除",
"permission.category.system": "系统",
"permission.category.adapter": "存储适配器"
}

View File

@@ -1,6 +1,6 @@
import { Layout, Button, Dropdown, theme, Flex, Avatar, Typography, Tooltip, Modal, QRCode } from 'antd';
import { SearchOutlined, MenuUnfoldOutlined, LogoutOutlined, UserOutlined, RobotOutlined, BellOutlined, QrcodeOutlined } from '@ant-design/icons';
import { memo, useMemo, useState } from 'react';
import { memo, useEffect, useMemo, useState } from 'react';
import SearchDialog from './SearchDialog.tsx';
import { authApi } from '../api/auth.ts';
import { useNavigate } from 'react-router';
@@ -9,8 +9,8 @@ import LanguageSwitcher from '../components/LanguageSwitcher';
import { useAuth } from '../contexts/AuthContext';
import ProfileModal from '../components/ProfileModal';
import NoticesModal from '../components/NoticesModal';
import { useSystemStatus } from '../contexts/SystemContext';
import useResponsive from '../hooks/useResponsive';
import { noticesApi, type NoticeItem } from '../api/notices';
const { Header } = Layout;
@@ -30,7 +30,8 @@ const TopHeader = memo(function TopHeader({ collapsed, onToggle, onOpenAiAgent,
const [profileOpen, setProfileOpen] = useState(false);
const [clientAuthOpen, setClientAuthOpen] = useState(false);
const [noticesOpen, setNoticesOpen] = useState(false);
const status = useSystemStatus();
const [popupNotice, setPopupNotice] = useState<NoticeItem | null>(null);
const [popupMode, setPopupMode] = useState(false);
const { isMobile } = useResponsive();
const clientAuthPayload = useMemo(() => JSON.stringify({
base_url: window.location.origin,
@@ -44,6 +45,35 @@ const TopHeader = memo(function TopHeader({ collapsed, onToggle, onOpenAiAgent,
const openProfile = () => setProfileOpen(true);
const openClientAuth = () => setClientAuthOpen(true);
const openNotices = () => {
setPopupMode(false);
setNoticesOpen(true);
};
const closeNotices = async () => {
const shouldDismiss = popupMode && popupNotice;
setNoticesOpen(false);
setPopupMode(false);
if (shouldDismiss) {
try {
await noticesApi.dismiss(popupNotice.id);
setPopupNotice(null);
} catch { void 0; }
}
};
useEffect(() => {
let cancelled = false;
if (!authToken) return;
noticesApi.getPopup().then((notice) => {
if (cancelled || !notice) return;
setPopupNotice(notice);
setPopupMode(true);
setNoticesOpen(true);
}).catch(() => void 0);
return () => {
cancelled = true;
};
}, [authToken]);
return (
<Header
@@ -84,7 +114,7 @@ const TopHeader = memo(function TopHeader({ collapsed, onToggle, onOpenAiAgent,
type="text"
icon={<BellOutlined />}
aria-label={t('Notices')}
onClick={() => setNoticesOpen(true)}
onClick={openNotices}
style={{ paddingInline: 8, height: 40 }}
/>
</Tooltip>
@@ -133,7 +163,7 @@ const TopHeader = memo(function TopHeader({ collapsed, onToggle, onOpenAiAgent,
<QRCode value={clientAuthPayload} size={220} />
</Flex>
</Modal>
<NoticesModal open={noticesOpen} onClose={() => setNoticesOpen(false)} version={status?.version || ''} />
<NoticesModal open={noticesOpen} onClose={closeNotices} initialNotice={popupMode ? popupNotice : null} />
</Flex>
</Header>
);

View File

@@ -1,12 +1,29 @@
import { memo, useState, useEffect, useCallback } from 'react';
import { Table, Button, Space, Drawer, Form, Input, Switch, message, Typography, Popconfirm, Select } from 'antd';
import PageCard from '../components/PageCard';
import { adaptersApi, type AdapterItem, type AdapterTypeMeta } from '../api/client';
import { adaptersApi, type AdapterItem, type AdapterTypeMeta, type AdapterUsage } from '../api/client';
import { useI18n } from '../i18n';
const formatBytes = (bytes?: number | null) => {
if (bytes === null || bytes === undefined) return '-';
if (bytes === 0) return '0 B';
const units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB'];
const index = Math.min(Math.floor(Math.log(bytes) / Math.log(1024)), units.length - 1);
const value = bytes / (1024 ** index);
return `${value.toFixed(value >= 10 || index === 0 ? 0 : 1)} ${units[index]}`;
};
const formatUsage = (usage?: AdapterUsage) => {
if (!usage?.supported || usage.used_bytes === null || usage.used_bytes === undefined) return '-';
const used = formatBytes(usage.used_bytes);
if (usage.total_bytes === null || usage.total_bytes === undefined) return used;
return `${used} / ${formatBytes(usage.total_bytes)}`;
};
const AdaptersPage = memo(function AdaptersPage() {
const [loading, setLoading] = useState(false);
const [data, setData] = useState<AdapterItem[]>([]);
const [usageMap, setUsageMap] = useState<Record<number, AdapterUsage>>({});
const [open, setOpen] = useState(false);
const [editing, setEditing] = useState<AdapterItem | null>(null);
const [form] = Form.useForm();
@@ -16,12 +33,14 @@ const AdaptersPage = memo(function AdaptersPage() {
const fetchList = useCallback(async () => {
setLoading(true);
try {
const [list, types] = await Promise.all([
const [list, types, usages] = await Promise.all([
adaptersApi.list(),
adaptersApi.available()
adaptersApi.available(),
adaptersApi.usage()
]);
setData(list);
setAvailableTypes(types);
setUsageMap(Object.fromEntries(usages.map(item => [item.id, item])));
} catch (e: any) {
message.error(e.message || t('Load failed'));
} finally {
@@ -137,11 +156,47 @@ const AdaptersPage = memo(function AdaptersPage() {
return label === key ? type : label;
}, [t]);
const usageSummary = Object.values(usageMap).reduce(
(acc, usage) => {
if (!usage.supported) return acc;
if (usage.used_bytes !== null && usage.used_bytes !== undefined) {
acc.used += usage.used_bytes;
acc.hasUsed = true;
}
if (usage.total_bytes !== null && usage.total_bytes !== undefined) {
acc.total += usage.total_bytes;
acc.hasTotal = true;
}
return acc;
},
{ used: 0, total: 0, hasUsed: false, hasTotal: false }
);
const pageTitle = (
<Space size={12} wrap>
<span>{t('Storage Adapters')}</span>
{(usageSummary.hasUsed || usageSummary.hasTotal) && (
<Typography.Text type="secondary" style={{ fontSize: 13, fontWeight: 400 }}>
{usageSummary.hasUsed ? formatBytes(usageSummary.used) : '-'}
{' / '}
{usageSummary.hasTotal ? formatBytes(usageSummary.total) : '-'}
</Typography.Text>
)}
</Space>
);
const columns = [
{ title: t('Name'), dataIndex: 'name' },
{ title: t('Type'), dataIndex: 'type', width: 140, render: (value: string) => renderTypeLabel(value) },
{ title: t('Mount Path'), dataIndex: 'path', width: 140, render: (v: string) => v || '-' },
{ title: t('Sub Path'), dataIndex: 'sub_path', width: 140, render: (v: string) => v || '-' },
{
title: t('Capacity Usage'),
width: 180,
render: (_: any, rec: AdapterItem) => {
return formatUsage(usageMap[rec.id]);
}
},
{
title: t('Enabled'),
dataIndex: 'enabled',
@@ -208,7 +263,7 @@ const AdaptersPage = memo(function AdaptersPage() {
return (
<PageCard
title={t('Storage Adapters')}
title={pageTitle}
extra={
<Space wrap>
<Button onClick={fetchList} loading={loading}>{t('Refresh')}</Button>

View File

@@ -1,6 +1,6 @@
import { memo, useCallback, useEffect, useRef, useState } from 'react';
import { useParams } from 'react-router';
import { theme, Pagination } from 'antd';
import { Button, Space, theme, Pagination } from 'antd';
import { useFileExplorer } from './hooks/useFileExplorer';
import { useFileSelection } from './hooks/useFileSelection';
import { useFileActions } from './hooks/useFileActions.tsx';
@@ -28,6 +28,7 @@ import { MoveCopyModal } from './components/Modals/MoveCopyModal';
import { SearchResultsView } from './components/SearchResultsView';
import type { ViewMode } from './types';
import { vfsApi, type VfsEntry } from '../../api/client';
import { getPublicConfig } from '../../api/config';
import { LoadingSkeleton } from './components/LoadingSkeleton';
import useResponsive from '../../hooks/useResponsive';
@@ -42,7 +43,7 @@ const FileExplorerPage = memo(function FileExplorerPage() {
const skeletonTimerRef = useRef<number | null>(null);
// --- Hooks ---
const { path, entries, loading, pagination, processorTypes, sortBy, sortOrder, load, navigateTo, goUp, handlePaginationChange, refresh, handleSortChange } = useFileExplorer(navKey);
const { path, entries, loading, pagination, processorTypes, sortBy, sortOrder, load, navigateTo, goUp, handlePaginationChange, refresh, handleSortChange, goCursorNext, goCursorPrev } = useFileExplorer(navKey);
const { selectedEntries, handleSelect, handleSelectRange, clearSelection, setSelectedEntries } = useFileSelection();
const { openFileWithDefaultApp, confirmOpenWithApp } = useAppWindows();
const { ctxMenu, blankCtxMenu, openContextMenu, openBlankContextMenu, openContextMenuAt, closeContextMenus } = useContextMenu();
@@ -106,6 +107,21 @@ const FileExplorerPage = memo(function FileExplorerPage() {
load(routePath, 1, pagination.pageSize, sortBy, sortOrder);
}, [routePath, navKey, load, pagination.pageSize, sortBy, sortOrder]);
useEffect(() => {
let mounted = true;
getPublicConfig()
.then((cfg) => {
if (!mounted || isMobile) return;
setViewMode(cfg.DEFAULT_FILE_VIEW_MODE === 'list' ? 'list' : 'grid');
})
.catch(() => {
if (mounted && !isMobile) setViewMode('grid');
});
return () => {
mounted = false;
};
}, [isMobile]);
useEffect(() => {
if (isMobile && viewMode !== 'grid') {
setViewMode('grid');
@@ -205,8 +221,10 @@ const FileExplorerPage = memo(function FileExplorerPage() {
}
return joined.startsWith('/') ? joined : `/${joined}`;
}, [entryBasePath]);
const showFsPagination = !isSearching && pagination.total > 0;
const showFsPagination = !isSearching && pagination.mode === 'paged' && pagination.total > 0;
const showCursorPagination = !isSearching && pagination.mode === 'cursor' && (pagination.cursorHistory.length > 0 || pagination.hasNext);
const shouldReserveBottomBar = showSearchPagination || showFsPagination;
const shouldReserveAnyBottomBar = shouldReserveBottomBar || showCursorPagination;
const handleDragEnter = (e: React.DragEvent) => {
e.preventDefault();
@@ -282,6 +300,7 @@ const FileExplorerPage = memo(function FileExplorerPage() {
viewMode={viewMode}
sortBy={sortBy}
sortOrder={sortOrder}
paginationMode={pagination.mode}
isMobile={isMobile}
onGoUp={goUp}
onNavigate={navigateTo}
@@ -309,7 +328,7 @@ const FileExplorerPage = memo(function FileExplorerPage() {
onChange={handleDirectoryInputChange}
/>
<div style={{ flex: 1, overflow: 'auto', minHeight: 0, paddingBottom: shouldReserveBottomBar ? '80px' : '0' }} onContextMenu={isMobile ? undefined : openBlankContextMenu}>
<div style={{ flex: 1, overflow: 'auto', minHeight: 0, paddingBottom: shouldReserveAnyBottomBar ? '80px' : '0' }} onContextMenu={isMobile ? undefined : openBlankContextMenu}>
{isSearching ? (
<SearchResultsView
viewMode={viewMode}
@@ -364,6 +383,19 @@ const FileExplorerPage = memo(function FileExplorerPage() {
</div>
)}
{showCursorPagination && (
<div style={{ position: 'absolute', bottom: 0, left: 0, right: 0, padding: '12px 16px', background: token.colorBgContainer, borderTop: `1px solid ${token.colorBorderSecondary}`, textAlign: 'center', zIndex: 10 }}>
<Space>
<Button size="small" onClick={goCursorPrev} disabled={pagination.cursorHistory.length === 0 || loading}>
</Button>
<Button size="small" type="primary" onClick={goCursorNext} disabled={!pagination.hasNext || loading}>
</Button>
</Space>
</div>
)}
{showSearchPagination && (
<div style={{ position: 'absolute', bottom: 0, left: 0, right: 0, padding: '12px 16px', background: token.colorBgContainer, borderTop: `1px solid ${token.colorBorderSecondary}`, textAlign: 'center', zIndex: 10 }}>
<Pagination

View File

@@ -19,6 +19,45 @@ interface FileListViewProps {
onContextMenu: (e: React.MouseEvent, entry: VfsEntry) => void;
}
const fileTypeGroups: Array<{ key: string; exts: string[] }> = [
{ key: 'Image', exts: ['png', 'jpg', 'jpeg', 'gif', 'webp', 'svg', 'bmp', 'ico', 'tiff'] },
{ key: 'Video', exts: ['mp4', 'avi', 'mov', 'wmv', 'flv', 'mkv', 'webm', 'm4v', '3gp'] },
{ key: 'Audio', exts: ['mp3', 'wav', 'flac', 'aac', 'ogg', 'wma', 'm4a'] },
{ key: 'PDF', exts: ['pdf'] },
{ key: 'Word', exts: ['doc', 'docx'] },
{ key: 'Spreadsheet', exts: ['xls', 'xlsx', 'csv'] },
{ key: 'Presentation', exts: ['ppt', 'pptx'] },
{ key: 'Archive', exts: ['zip', 'rar', '7z', 'tar', 'gz', 'bz2', 'xz'] },
{ key: 'Code', exts: ['js', 'jsx', 'ts', 'tsx', 'vue', 'html', 'htm', 'css', 'scss', 'sass', 'less', 'json', 'xml', 'yaml', 'yml', 'py', 'java', 'cpp', 'cc', 'cxx', 'c', 'h', 'hpp', 'hxx', 'php', 'rb', 'go', 'rs', 'rust', 'swift', 'kt', 'scala', 'clj', 'cljs', 'cs', 'vb', 'fs', 'pl', 'pm', 'r', 'lua', 'dart', 'elm'] },
{ key: 'Markdown', exts: ['md', 'markdown'] },
{ key: 'Text', exts: ['txt', 'log', 'ini', 'cfg', 'conf', 'sh', 'bash', 'zsh', 'fish', 'ps1', 'bat', 'cmd', 'dockerfile', 'makefile', 'gradle', 'cmake', 'gitignore', 'gitattributes', 'editorconfig', 'prettierrc'] },
{ key: 'Font', exts: ['ttf', 'otf', 'woff', 'woff2', 'eot'] },
{ key: 'Database', exts: ['db', 'sqlite', 'sql'] },
{ key: 'Config', exts: ['env', 'config', 'properties', 'toml'] },
];
const formatFileSize = (size: number) => {
if (!Number.isFinite(size) || size < 0) return '-';
const units = ['B', 'KB', 'MB', 'GB'];
let value = size;
let unitIndex = 0;
while (value >= 1024 && unitIndex < units.length - 1) {
value /= 1024;
unitIndex += 1;
}
if (unitIndex === 0) return `${value} ${units[unitIndex]}`;
return `${value.toFixed(2)} ${units[unitIndex]}`;
};
const getFileTypeLabel = (entry: VfsEntry, t: (key: string) => string) => {
if (entry.type === 'mount') return t('Mount Point');
if (entry.is_dir) return t('Folder');
const ext = entry.name.split('.').pop()?.toLowerCase() || '';
const group = fileTypeGroups.find(item => item.exts.includes(ext));
return t(group?.key || 'File');
};
export const FileListView: React.FC<FileListViewProps> = ({
entries,
selectedEntries,
@@ -63,7 +102,8 @@ export const FileListView: React.FC<FileListViewProps> = ({
</span>
)
},
{ title: t('Size'), dataIndex: 'size', width: 100, render: (v: number, r: VfsEntry) => r.is_dir ? '-' : v },
{ title: t('Type'), key: 'fileType', width: 110, render: (_: any, r: VfsEntry) => getFileTypeLabel(r, t) },
{ title: t('Size'), dataIndex: 'size', width: 120, render: (v: number, r: VfsEntry) => r.is_dir ? '-' : formatFileSize(v) },
{ title: t('Modified Time'), dataIndex: 'mtime', width: 160, render: (v: number) => v ? new Date(v * 1000).toLocaleString() : '-' },
{
title: t('Actions'),

View File

@@ -12,6 +12,7 @@ interface HeaderProps {
viewMode: ViewMode;
sortBy: string;
sortOrder: string;
paginationMode?: 'paged' | 'cursor';
isMobile?: boolean;
onGoUp: () => void;
onNavigate: (path: string) => void;
@@ -30,6 +31,7 @@ export const Header: React.FC<HeaderProps> = ({
viewMode,
sortBy,
sortOrder,
paginationMode = 'paged',
isMobile = false,
onGoUp,
onNavigate,
@@ -82,6 +84,7 @@ export const Header: React.FC<HeaderProps> = ({
setEditingPath(false);
setPathInputValue('');
};
const sortDisabled = paginationMode === 'cursor';
const renderBreadcrumb = () => {
if (editingPath) {
@@ -154,6 +157,7 @@ export const Header: React.FC<HeaderProps> = ({
{
key: 'sort',
label: t('Sort By') + `: ${t(sortBy === 'mtime' ? 'Modified Time' : sortBy === 'size' ? 'Size' : 'Name')}`,
disabled: sortDisabled,
children: [
{ key: 'sort-name', label: t('Name'), onClick: () => onSortChange('name', sortOrder) },
{ key: 'sort-size', label: t('Size'), onClick: () => onSortChange('size', sortOrder) },
@@ -164,6 +168,7 @@ export const Header: React.FC<HeaderProps> = ({
key: 'sort-order',
label: sortOrder === 'asc' ? t('Ascending') : t('Descending'),
icon: sortOrder === 'asc' ? <ArrowUpOutlined /> : <ArrowDownOutlined />,
disabled: sortDisabled,
onClick: () => onSortChange(sortBy, sortOrder === 'asc' ? 'desc' : 'asc'),
},
];
@@ -230,6 +235,7 @@ export const Header: React.FC<HeaderProps> = ({
<Select
size="small"
value={sortBy}
disabled={sortDisabled}
onChange={(val) => onSortChange(val, sortOrder)}
style={{ width: 112 }}
options={[
@@ -240,6 +246,7 @@ export const Header: React.FC<HeaderProps> = ({
/>
<Button
size="small"
disabled={sortDisabled}
icon={sortOrder === 'asc' ? <ArrowUpOutlined /> : <ArrowDownOutlined />}
onClick={() => onSortChange(sortBy, sortOrder === 'asc' ? 'desc' : 'asc')}
/>

View File

@@ -7,10 +7,14 @@ type ExplorerSnapshot = {
path: string;
entries: VfsEntry[];
pagination?: {
total: number;
page: number;
mode?: 'paged' | 'cursor';
page_size: number;
pages: number;
total?: number;
page?: number;
pages?: number;
cursor?: string | null;
next_cursor?: string | null;
has_next?: boolean;
};
sortBy: string;
sortOrder: string;
@@ -30,6 +34,11 @@ export function useFileExplorer(navKey: string) {
current: 1,
pageSize: 50,
total: 0,
mode: 'paged' as 'paged' | 'cursor',
cursor: null as string | null,
nextCursor: null as string | null,
cursorHistory: [] as (string | null)[],
hasNext: false,
showSizeChanger: true,
showQuickJumper: true,
showTotal: (total: number, range: [number, number]) => `${total} ${'items'} ${range[0]}-${range[1]}`,
@@ -38,23 +47,29 @@ export function useFileExplorer(navKey: string) {
const [sortBy, setSortBy] = useState('name');
const [sortOrder, setSortOrder] = useState('asc');
const load = useCallback(async (p: string, page: number = 1, pageSize: number = 50, sb = sortBy, so = sortOrder) => {
const load = useCallback(async (p: string, page: number = 1, pageSize: number = 50, sb = sortBy, so = sortOrder, cursor?: string | null, cursorHistory: (string | null)[] = []) => {
const canonical = p === '' ? '/' : (p.startsWith('/') ? p : '/' + p);
setLoading(true);
try {
// Load entries and processor types concurrently
const [res, processors] = await Promise.all([
vfsApi.list(canonical === '/' ? '' : canonical, page, pageSize, sb, so),
vfsApi.list(canonical === '/' ? '' : canonical, page, pageSize, sb, so, cursor),
processorsApi.list()
]);
setEntries(res.entries);
const resolvedPath = res.path || canonical;
setPath(resolvedPath);
const pageMode = res.pagination?.mode || 'paged';
setPagination(prev => ({
...prev,
current: res.pagination!.page,
pageSize: res.pagination!.page_size,
total: res.pagination!.total
mode: pageMode,
current: res.pagination?.page || page,
pageSize: res.pagination?.page_size || pageSize,
total: res.pagination?.total || 0,
cursor: res.pagination?.cursor || null,
nextCursor: res.pagination?.next_cursor || null,
hasNext: Boolean(res.pagination?.has_next),
cursorHistory: pageMode === 'cursor' ? cursorHistory : [],
}));
setProcessorTypes(processors);
if (typeof window !== 'undefined') {
@@ -94,8 +109,31 @@ export function useFileExplorer(navKey: string) {
load(path, page, pageSize, sortBy, sortOrder);
};
const goCursorNext = () => {
if (!pagination.nextCursor) return;
load(path, 1, pagination.pageSize, sortBy, sortOrder, pagination.nextCursor, [
...pagination.cursorHistory,
pagination.cursor,
]);
};
const goCursorPrev = () => {
if (pagination.cursorHistory.length === 0) return;
const nextHistory = pagination.cursorHistory.slice(0, -1);
const prevCursor = pagination.cursorHistory[pagination.cursorHistory.length - 1];
load(path, 1, pagination.pageSize, sortBy, sortOrder, prevCursor, nextHistory);
};
const refresh = () => {
load(path, pagination.current, pagination.pageSize, sortBy, sortOrder);
load(
path,
pagination.current,
pagination.pageSize,
sortBy,
sortOrder,
pagination.mode === 'cursor' ? pagination.cursor : null,
pagination.mode === 'cursor' ? pagination.cursorHistory : [],
);
}
const handleSortChange = (sb: string, so: string) => {
@@ -117,6 +155,8 @@ export function useFileExplorer(navKey: string) {
goUp,
handlePaginationChange,
refresh,
handleSortChange
handleSortChange,
goCursorNext,
goCursorPrev,
};
}

View File

@@ -43,6 +43,30 @@ const THEME_KEYS = {
CSS: 'THEME_CUSTOM_CSS',
};
const CONFIG_DEFAULTS: Record<string, string> = {
...Object.fromEntries(APP_CONFIG_KEYS.map(({ key, default: def }) => [key, def ?? ''])),
APP_DEFAULT_LANGUAGE: 'zh',
AUTH_ALLOW_REGISTER: 'false',
AUTH_DEFAULT_REGISTER_ROLE_ID: '',
DEFAULT_FILE_VIEW_MODE: 'grid',
[THEME_KEYS.MODE]: 'light',
[THEME_KEYS.PRIMARY]: '#111111',
[THEME_KEYS.RADIUS]: '10',
[THEME_KEYS.TOKENS]: '',
[THEME_KEYS.CSS]: '',
WEBDAV_MAPPING_ENABLED: '1',
S3_MAPPING_ENABLED: '1',
S3_MAPPING_BUCKET: 'foxel',
S3_MAPPING_REGION: '',
S3_MAPPING_BASE_PATH: '/',
S3_MAPPING_ACCESS_KEY: '',
S3_MAPPING_SECRET_KEY: '',
EMAIL_CONFIG: '',
EMAIL_PASSWORD_RESET_TEMPLATE: '',
};
const stringifyConfigValue = (value: unknown) => String(value ?? '');
export default function SystemSettingsPage({ tabKey, onTabNavigate }: SystemSettingsPageProps) {
const [loading, setLoading] = useState(false);
const [config, setConfigState] = useState<Record<string, string> | null>(null);
@@ -69,16 +93,21 @@ export default function SystemSettingsPage({ tabKey, onTabNavigate }: SystemSett
const handleSave = async (values: Record<string, unknown>): Promise<boolean> => {
setLoading(true);
try {
for (const [key, value] of Object.entries(values)) {
await setConfig(key, String(value ?? ''));
}
message.success(t('Saved successfully'));
const stringValues = Object.fromEntries(
Object.entries(values).map(([key, value]) => [key, String(value ?? '')]),
const currentConfig = config ?? {};
const changedValues = Object.fromEntries(
Object.entries(values)
.map(([key, value]) => [key, stringifyConfigValue(value)] as const)
.filter(([key, value]) => value !== (currentConfig[key] ?? CONFIG_DEFAULTS[key] ?? '')),
) as Record<string, string>;
setConfigState((prev) => ({ ...(prev ?? {}), ...stringValues }));
for (const [key, value] of Object.entries(changedValues)) {
await setConfig(key, value);
}
message.success(t('Saved successfully'));
setConfigState((prev) => ({ ...(prev ?? {}), ...changedValues }));
// trigger theme refresh if related keys changed
if (Object.keys(values).some(k => Object.values(THEME_KEYS).includes(k))) {
if (Object.keys(changedValues).some(k => Object.values(THEME_KEYS).includes(k))) {
await refreshTheme();
}
return true;

View File

@@ -54,6 +54,7 @@ export default function AppSettingsTab({
return {
...Object.fromEntries(configKeys.map(({ key, default: def }) => [key, config[key] ?? def ?? ''])),
APP_DEFAULT_LANGUAGE: normalizeLang(config.APP_DEFAULT_LANGUAGE, 'zh'),
DEFAULT_FILE_VIEW_MODE: config.DEFAULT_FILE_VIEW_MODE === 'list' ? 'list' : 'grid',
AUTH_ALLOW_REGISTER: allowRegister,
AUTH_DEFAULT_REGISTER_ROLE_ID: Number.isFinite(roleId) ? roleId : undefined,
};
@@ -70,6 +71,7 @@ export default function AppSettingsTab({
}
const defaultLanguage = normalizeLang(vals.APP_DEFAULT_LANGUAGE, 'zh');
payload.APP_DEFAULT_LANGUAGE = defaultLanguage;
payload.DEFAULT_FILE_VIEW_MODE = vals.DEFAULT_FILE_VIEW_MODE === 'list' ? 'list' : 'grid';
const allow = !!vals.AUTH_ALLOW_REGISTER;
payload.AUTH_ALLOW_REGISTER = allow ? 'true' : 'false';
if (allow) {
@@ -103,6 +105,19 @@ export default function AppSettingsTab({
/>
</Form.Item>
<Form.Item
name="DEFAULT_FILE_VIEW_MODE"
label={t('Default File View Mode')}
>
<Select
size="large"
options={[
{ value: 'grid', label: t('Grid') },
{ value: 'list', label: t('List') },
]}
/>
</Form.Item>
<Divider titlePlacement="left">{t('Registration Settings')}</Divider>
<Alert

View File

@@ -364,12 +364,27 @@ async function main() {
await mountError();
window.addEventListener('beforeunload', () => {
const runCleanup = () => {
try {
cleanup?.();
} catch {
void 0;
}
cleanup = null;
};
window.addEventListener('message', (ev) => {
if (ev.origin !== window.location.origin) return;
if (ev.source !== window.parent) return;
const data = ev.data as any;
if (!data || typeof data !== 'object') return;
if (data.type !== 'foxel-plugin:unload') return;
runCleanup();
root.innerHTML = '';
});
window.addEventListener('beforeunload', () => {
runCleanup();
});
}