mirror of
https://github.com/DrizzleTime/Foxel.git
synced 2026-05-08 01:03:20 +08:00
Compare commits
26 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
873ef7aee5 | ||
|
|
dd2400c3ef | ||
|
|
e0d6039a1a | ||
|
|
676dacce41 | ||
|
|
c514e17803 | ||
|
|
54821f78c6 | ||
|
|
1f608974dc | ||
|
|
a8737b883e | ||
|
|
dcc8aa139e | ||
|
|
1c216a7516 | ||
|
|
d8425f1cdd | ||
|
|
e235845737 | ||
|
|
6981bb8444 | ||
|
|
1101273077 | ||
|
|
398dbcf8ae | ||
|
|
0609cf6971 | ||
|
|
93c4d7a748 | ||
|
|
a0fe35b6e9 | ||
|
|
3efc286ef8 | ||
|
|
013c14b767 | ||
|
|
da7db4ff2a | ||
|
|
4038a7292a | ||
|
|
f96a4dce11 | ||
|
|
ef1fe1cce8 | ||
|
|
a5d606387f | ||
|
|
e6402661d6 |
4
.gitignore
vendored
4
.gitignore
vendored
@@ -31,4 +31,6 @@ lerna-debug.log*
|
||||
*.ntvs*
|
||||
*.njsproj
|
||||
*.sln
|
||||
*.sw?
|
||||
*.sw?
|
||||
|
||||
/client
|
||||
@@ -51,6 +51,29 @@ async def available_adapter_types(
|
||||
return success(data)
|
||||
|
||||
|
||||
@router.get("/usage")
|
||||
@audit(action=AuditAction.READ, description="获取适配器容量使用情况")
|
||||
@require_system_permission(AdapterPermission.LIST)
|
||||
async def list_adapter_usages(
|
||||
request: Request,
|
||||
current_user: Annotated[User, Depends(get_current_active_user)]
|
||||
):
|
||||
usages = await AdapterService.list_adapter_usages()
|
||||
return success(usages)
|
||||
|
||||
|
||||
@router.get("/{adapter_id}/usage")
|
||||
@audit(action=AuditAction.READ, description="获取单个适配器容量使用情况")
|
||||
@require_system_permission(AdapterPermission.LIST)
|
||||
async def get_adapter_usage(
|
||||
request: Request,
|
||||
adapter_id: int,
|
||||
current_user: Annotated[User, Depends(get_current_active_user)]
|
||||
):
|
||||
usage = await AdapterService.get_adapter_usage(adapter_id)
|
||||
return success(usage)
|
||||
|
||||
|
||||
@router.get("/{adapter_id}")
|
||||
@audit(action=AuditAction.READ, description="获取适配器详情")
|
||||
@require_system_permission(AdapterPermission.LIST)
|
||||
|
||||
@@ -21,3 +21,8 @@ class BaseAdapter(Protocol):
|
||||
async def stream_file(self, root: str, rel: str, range_header: str | None): ...
|
||||
async def stat_file(self, root: str, rel: str): ...
|
||||
def get_effective_root(self, sub_path: str | None) -> str: ...
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class UsageCapableAdapter(Protocol):
|
||||
async def get_usage(self, root: str) -> Dict: ...
|
||||
|
||||
@@ -455,6 +455,23 @@ class DropboxAdapter:
|
||||
|
||||
return StreamingResponse(iterator(), status_code=resp.status_code, headers=out_headers, media_type=content_type)
|
||||
|
||||
async def get_usage(self, root: str):
|
||||
resp = await self._api_json("/users/get_space_usage", {})
|
||||
resp.raise_for_status()
|
||||
payload = resp.json() or {}
|
||||
allocation = payload.get("allocation") or {}
|
||||
allocated = allocation.get("allocated")
|
||||
used = payload.get("used")
|
||||
total = int(allocated) if allocated is not None else None
|
||||
used_bytes = int(used) if used is not None else None
|
||||
return {
|
||||
"used_bytes": used_bytes,
|
||||
"total_bytes": total,
|
||||
"free_bytes": total - used_bytes if total is not None and used_bytes is not None else None,
|
||||
"source": "dropbox",
|
||||
"scope": "account",
|
||||
}
|
||||
|
||||
|
||||
ADAPTER_TYPE = "dropbox"
|
||||
CONFIG_SCHEMA = [
|
||||
@@ -468,4 +485,3 @@ CONFIG_SCHEMA = [
|
||||
|
||||
|
||||
def ADAPTER_FACTORY(rec): return DropboxAdapter(rec)
|
||||
|
||||
|
||||
@@ -541,6 +541,22 @@ class GoogleDriveAdapter:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
async def get_usage(self, root: str):
|
||||
resp = await self._request("GET", "/about", params={"fields": "storageQuota"})
|
||||
resp.raise_for_status()
|
||||
quota = (resp.json() or {}).get("storageQuota") or {}
|
||||
limit = quota.get("limit")
|
||||
usage = quota.get("usage")
|
||||
total = int(limit) if limit is not None else None
|
||||
used = int(usage) if usage is not None else None
|
||||
return {
|
||||
"used_bytes": used,
|
||||
"total_bytes": total,
|
||||
"free_bytes": total - used if total is not None and used is not None else None,
|
||||
"source": "googledrive",
|
||||
"scope": "drive",
|
||||
}
|
||||
|
||||
|
||||
ADAPTER_TYPE = "googledrive"
|
||||
|
||||
|
||||
@@ -299,23 +299,23 @@ class LocalAdapter:
|
||||
|
||||
return StreamingResponse(iterator(), status_code=status, headers=headers, media_type=content_type)
|
||||
|
||||
async def stat_file(self, root: str, rel: str):
|
||||
async def stat_file(self, root: str, rel: str, include_metadata: bool = False):
|
||||
fp = _safe_join(root, rel)
|
||||
if not fp.exists():
|
||||
raise FileNotFoundError(rel)
|
||||
st = await asyncio.to_thread(fp.stat)
|
||||
is_dir = fp.is_dir()
|
||||
info = {
|
||||
"name": fp.name,
|
||||
"is_dir": fp.is_dir(),
|
||||
"is_dir": is_dir,
|
||||
"size": st.st_size,
|
||||
"mtime": int(st.st_mtime),
|
||||
"mode": stat.S_IMODE(st.st_mode),
|
||||
"type": "dir" if fp.is_dir() else "file",
|
||||
"type": "dir" if is_dir else "file",
|
||||
"path": str(fp),
|
||||
}
|
||||
# exif信息
|
||||
exif = None
|
||||
if not fp.is_dir():
|
||||
if include_metadata and not is_dir:
|
||||
exif = None
|
||||
mime, _ = mimetypes.guess_type(fp.name)
|
||||
if mime and mime.startswith("image/"):
|
||||
try:
|
||||
@@ -326,9 +326,32 @@ class LocalAdapter:
|
||||
exif = {str(k): str(v) for k, v in exif_data.items()}
|
||||
except Exception:
|
||||
exif = None
|
||||
info["exif"] = exif
|
||||
info["exif"] = exif
|
||||
return info
|
||||
|
||||
async def get_usage(self, root: str):
|
||||
root_path = Path(root).resolve()
|
||||
|
||||
def _usage():
|
||||
used = 0
|
||||
for dirpath, dirnames, filenames in os.walk(root_path):
|
||||
for filename in filenames:
|
||||
fp = Path(dirpath) / filename
|
||||
try:
|
||||
used += fp.stat().st_size
|
||||
except OSError:
|
||||
continue
|
||||
disk = shutil.disk_usage(root_path)
|
||||
return {
|
||||
"used_bytes": used,
|
||||
"total_bytes": disk.total,
|
||||
"free_bytes": disk.free,
|
||||
"source": "local",
|
||||
"scope": "mount",
|
||||
}
|
||||
|
||||
return await asyncio.to_thread(_usage)
|
||||
|
||||
|
||||
ADAPTER_TYPE = "local"
|
||||
CONFIG_SCHEMA = [
|
||||
|
||||
@@ -443,6 +443,21 @@ class OneDriveAdapter:
|
||||
resp.raise_for_status()
|
||||
return self._format_item(resp.json())
|
||||
|
||||
async def get_usage(self, root: str):
|
||||
resp = await self._request("GET", full_url=f"{MS_GRAPH_URL}/me/drive?$select=quota")
|
||||
resp.raise_for_status()
|
||||
quota = (resp.json() or {}).get("quota") or {}
|
||||
used = quota.get("used")
|
||||
total = quota.get("total")
|
||||
remaining = quota.get("remaining")
|
||||
return {
|
||||
"used_bytes": int(used) if used is not None else None,
|
||||
"total_bytes": int(total) if total is not None else None,
|
||||
"free_bytes": int(remaining) if remaining is not None else None,
|
||||
"source": "onedrive",
|
||||
"scope": "drive",
|
||||
}
|
||||
|
||||
|
||||
ADAPTER_TYPE = "onedrive"
|
||||
|
||||
|
||||
926
domain/adapters/providers/pikpak.py
Normal file
926
domain/adapters/providers/pikpak.py
Normal file
@@ -0,0 +1,926 @@
|
||||
import asyncio
|
||||
import hashlib
|
||||
import mimetypes
|
||||
import re
|
||||
import time
|
||||
from typing import Any, AsyncIterator, Dict, List, Optional, Tuple
|
||||
|
||||
import httpx
|
||||
from fastapi import HTTPException
|
||||
from fastapi.responses import Response, StreamingResponse
|
||||
|
||||
from models import StorageAdapter
|
||||
from .base import BaseAdapter
|
||||
|
||||
|
||||
API_BASE = "https://api-drive.mypikpak.com/drive/v1"
|
||||
USER_BASE = "https://user.mypikpak.com/v1"
|
||||
TOKEN_REFRESH_BUFFER = 300
|
||||
|
||||
ANDROID_ALGORITHMS = [
|
||||
"SOP04dGzk0TNO7t7t9ekDbAmx+eq0OI1ovEx",
|
||||
"nVBjhYiND4hZ2NCGyV5beamIr7k6ifAsAbl",
|
||||
"Ddjpt5B/Cit6EDq2a6cXgxY9lkEIOw4yC1GDF28KrA",
|
||||
"VVCogcmSNIVvgV6U+AochorydiSymi68YVNGiz",
|
||||
"u5ujk5sM62gpJOsB/1Gu/zsfgfZO",
|
||||
"dXYIiBOAHZgzSruaQ2Nhrqc2im",
|
||||
"z5jUTBSIpBN9g4qSJGlidNAutX6",
|
||||
"KJE2oveZ34du/g1tiimm",
|
||||
]
|
||||
|
||||
WEB_ALGORITHMS = [
|
||||
"C9qPpZLN8ucRTaTiUMWYS9cQvWOE",
|
||||
"+r6CQVxjzJV6LCV",
|
||||
"F",
|
||||
"pFJRC",
|
||||
"9WXYIDGrwTCz2OiVlgZa90qpECPD6olt",
|
||||
"/750aCr4lm/Sly/c",
|
||||
"RB+DT/gZCrbV",
|
||||
"",
|
||||
"CyLsf7hdkIRxRm215hl",
|
||||
"7xHvLi2tOYP0Y92b",
|
||||
"ZGTXXxu8E/MIWaEDB+Sm/",
|
||||
"1UI3",
|
||||
"E7fP5Pfijd+7K+t6Tg/NhuLq0eEUVChpJSkrKxpO",
|
||||
"ihtqpG6FMt65+Xk+tWUH2",
|
||||
"NhXXU9rg4XXdzo7u5o",
|
||||
]
|
||||
|
||||
PC_ALGORITHMS = [
|
||||
"KHBJ07an7ROXDoK7Db",
|
||||
"G6n399rSWkl7WcQmw5rpQInurc1DkLmLJqE",
|
||||
"JZD1A3M4x+jBFN62hkr7VDhkkZxb9g3rWqRZqFAAb",
|
||||
"fQnw/AmSlbbI91Ik15gpddGgyU7U",
|
||||
"/Dv9JdPYSj3sHiWjouR95NTQff",
|
||||
"yGx2zuTjbWENZqecNI+edrQgqmZKP",
|
||||
"ljrbSzdHLwbqcRn",
|
||||
"lSHAsqCkGDGxQqqwrVu",
|
||||
"TsWXI81fD1",
|
||||
"vk7hBjawK/rOSrSWajtbMk95nfgf3",
|
||||
]
|
||||
|
||||
PLATFORM_CONFIG = {
|
||||
"android": {
|
||||
"client_id": "YNxT9w7GMdWvEOKa",
|
||||
"client_secret": "dbw2OtmVEeuUvIptb1Coyg",
|
||||
"client_version": "1.21.0",
|
||||
"package_name": "com.pikcloud.pikpak",
|
||||
"sdk_version": "2.0.6.206003",
|
||||
"algorithms": ANDROID_ALGORITHMS,
|
||||
"ua": "ANDROID-com.pikcloud.pikpak/1.21.0",
|
||||
},
|
||||
"web": {
|
||||
"client_id": "YUMx5nI8ZU8Ap8pm",
|
||||
"client_secret": "dbw2OtmVEeuUvIptb1Coyg",
|
||||
"client_version": "2.0.0",
|
||||
"package_name": "mypikpak.com",
|
||||
"sdk_version": "8.0.3",
|
||||
"algorithms": WEB_ALGORITHMS,
|
||||
"ua": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
|
||||
"(KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36",
|
||||
},
|
||||
"pc": {
|
||||
"client_id": "YvtoWO6GNHiuCl7x",
|
||||
"client_secret": "1NIH5R1IEe2pAxZE3hv3uA",
|
||||
"client_version": "undefined",
|
||||
"package_name": "mypikpak.com",
|
||||
"sdk_version": "8.0.3",
|
||||
"algorithms": PC_ALGORITHMS,
|
||||
"ua": "MainWindow Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 "
|
||||
"(KHTML, like Gecko) PikPak/2.6.11.4955 Chrome/100.0.4896.160 Electron/18.3.15 Safari/537.36",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def _md5_text(value: str) -> str:
|
||||
return hashlib.md5(value.encode("utf-8")).hexdigest()
|
||||
|
||||
|
||||
def _sha1_text(value: str) -> str:
|
||||
return hashlib.sha1(value.encode("utf-8")).hexdigest()
|
||||
|
||||
|
||||
def _as_bool(value: Any, default: bool = False) -> bool:
|
||||
if value is None:
|
||||
return default
|
||||
if isinstance(value, bool):
|
||||
return value
|
||||
if isinstance(value, str):
|
||||
return value.strip().lower() in {"1", "true", "yes", "on"}
|
||||
return bool(value)
|
||||
|
||||
|
||||
def _as_int(value: Any, default: int = 0) -> int:
|
||||
try:
|
||||
return int(value or default)
|
||||
except Exception:
|
||||
return default
|
||||
|
||||
|
||||
def _root_payload(root: str | None) -> Tuple[str, str]:
|
||||
raw = (root or "").strip()
|
||||
if not raw:
|
||||
return "", ""
|
||||
if "|" not in raw:
|
||||
return raw, ""
|
||||
root_id, sub_path = raw.split("|", 1)
|
||||
return root_id.strip(), sub_path.strip("/")
|
||||
|
||||
|
||||
def _split_parent_name(rel: str) -> Tuple[str, str]:
|
||||
rel = (rel or "").strip("/")
|
||||
if not rel:
|
||||
return "", ""
|
||||
if "/" not in rel:
|
||||
return "", rel
|
||||
parent, _, name = rel.rpartition("/")
|
||||
return parent, name
|
||||
|
||||
|
||||
def _parse_time(value: str | None) -> int:
|
||||
if not value:
|
||||
return 0
|
||||
text = str(value).strip()
|
||||
if not text:
|
||||
return 0
|
||||
try:
|
||||
from datetime import datetime, timezone
|
||||
|
||||
if text.endswith("Z"):
|
||||
text = text[:-1] + "+00:00"
|
||||
dt = datetime.fromisoformat(text)
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
return int(dt.timestamp())
|
||||
except Exception:
|
||||
return 0
|
||||
|
||||
|
||||
class PikPakAdapter:
|
||||
def __init__(self, record: StorageAdapter):
|
||||
self.record = record
|
||||
cfg = record.config or {}
|
||||
|
||||
self.username = str(cfg.get("username") or "").strip()
|
||||
self.password = str(cfg.get("password") or "")
|
||||
if not self.username or not self.password:
|
||||
raise ValueError("PikPak adapter requires username and password")
|
||||
|
||||
self.platform = str(cfg.get("platform") or "android").strip().lower()
|
||||
if self.platform not in PLATFORM_CONFIG:
|
||||
self.platform = "android"
|
||||
platform_cfg = PLATFORM_CONFIG[self.platform]
|
||||
|
||||
self.client_id = str(platform_cfg["client_id"])
|
||||
self.client_secret = str(platform_cfg["client_secret"])
|
||||
self.client_version = str(platform_cfg["client_version"])
|
||||
self.package_name = str(platform_cfg["package_name"])
|
||||
self.sdk_version = str(platform_cfg["sdk_version"])
|
||||
self.algorithms = list(platform_cfg["algorithms"])
|
||||
|
||||
device_id = str(cfg.get("device_id") or "").strip()
|
||||
if not device_id or device_id == _md5_text(self.username + self.password):
|
||||
device_id = _md5_text(self.username)
|
||||
self.device_id = device_id
|
||||
self.user_id = str(cfg.get("user_id") or "").strip()
|
||||
self.refresh_token = str(cfg.get("refresh_token") or "").strip()
|
||||
self.access_token = str(cfg.get("access_token") or "").strip()
|
||||
self.expires_at = _as_int(cfg.get("expires_at"), 0)
|
||||
self.captcha_token = str(cfg.get("captcha_token") or "").strip()
|
||||
self.root_id = str(cfg.get("root_id") or "").strip()
|
||||
self.disable_media_link = _as_bool(cfg.get("disable_media_link"), True)
|
||||
self.enable_direct_download_307 = _as_bool(cfg.get("enable_direct_download_307"), False)
|
||||
self.timeout = float(cfg.get("timeout") or 30)
|
||||
|
||||
ua = platform_cfg.get("ua")
|
||||
self.user_agent = str(ua) if ua else self._build_android_user_agent()
|
||||
|
||||
self._auth_lock = asyncio.Lock()
|
||||
self._config_save_lock = asyncio.Lock()
|
||||
self._dir_id_cache: Dict[str, str] = {}
|
||||
self._children_cache: Dict[str, List[Dict[str, Any]]] = {}
|
||||
|
||||
def get_effective_root(self, sub_path: str | None) -> str:
|
||||
return f"{self.root_id}|{(sub_path or '').strip('/')}"
|
||||
|
||||
def _build_android_user_agent(self) -> str:
|
||||
device_sign = self._generate_device_sign(self.device_id, self.package_name)
|
||||
user_id = self.user_id
|
||||
return (
|
||||
f"ANDROID-{self.package_name}/{self.client_version} "
|
||||
"protocolVersion/200 accesstype/ "
|
||||
f"clientid/{self.client_id} "
|
||||
f"clientversion/{self.client_version} "
|
||||
"action_type/ networktype/WIFI sessionid/ "
|
||||
f"deviceid/{self.device_id} "
|
||||
"providername/NONE "
|
||||
f"devicesign/{device_sign} "
|
||||
"refresh_token/ "
|
||||
f"sdkversion/{self.sdk_version} "
|
||||
f"datetime/{int(time.time() * 1000)} "
|
||||
f"usrno/{user_id} "
|
||||
f"appname/android-{self.package_name} "
|
||||
"session_origin/ grant_type/ appid/ clientip/ "
|
||||
"devicename/Xiaomi_M2004j7ac osversion/13 platformversion/10 "
|
||||
"accessmode/ devicemodel/M2004J7AC "
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _generate_device_sign(device_id: str, package_name: str) -> str:
|
||||
sha1_str = _sha1_text(f"{device_id}{package_name}1appkey")
|
||||
md5_str = _md5_text(sha1_str)
|
||||
return f"div101.{device_id}{md5_str}"
|
||||
|
||||
def _captcha_sign(self) -> Tuple[str, str]:
|
||||
timestamp = str(int(time.time() * 1000))
|
||||
value = f"{self.client_id}{self.client_version}{self.package_name}{self.device_id}{timestamp}"
|
||||
for algorithm in self.algorithms:
|
||||
value = _md5_text(value + algorithm)
|
||||
return timestamp, "1." + value
|
||||
|
||||
@staticmethod
|
||||
def _action(method: str, url: str) -> str:
|
||||
m = re.search(r"://[^/]+((/[^/\s?#]+)*)", url)
|
||||
path = m.group(1) if m else "/"
|
||||
return f"{method.upper()}:{path}"
|
||||
|
||||
@staticmethod
|
||||
def _full_action(method: str, url: str) -> str:
|
||||
return f"{method.upper()}:{url}"
|
||||
|
||||
def _captcha_action(self, method: str, url: str, *, auth: bool) -> str:
|
||||
if not auth and url == f"{USER_BASE}/auth/signin":
|
||||
return self._full_action(method, url)
|
||||
return self._action(method, url)
|
||||
|
||||
def _has_valid_access_token(self) -> bool:
|
||||
return bool(self.access_token and self.expires_at > int(time.time()) + TOKEN_REFRESH_BUFFER)
|
||||
|
||||
def _download_headers(self) -> Dict[str, str]:
|
||||
headers = {
|
||||
"User-Agent": self.user_agent,
|
||||
"X-Device-ID": self.device_id,
|
||||
"X-Captcha-Token": self.captcha_token,
|
||||
}
|
||||
if self.access_token:
|
||||
headers["Authorization"] = f"Bearer {self.access_token}"
|
||||
return headers
|
||||
|
||||
async def _save_runtime_config(self):
|
||||
cfg = dict(self.record.config or {})
|
||||
changed = False
|
||||
for key, value in (
|
||||
("refresh_token", self.refresh_token),
|
||||
("access_token", self.access_token),
|
||||
("expires_at", self.expires_at),
|
||||
("captcha_token", self.captcha_token),
|
||||
("device_id", self.device_id),
|
||||
("user_id", self.user_id),
|
||||
):
|
||||
if value and cfg.get(key) != value:
|
||||
cfg[key] = value
|
||||
changed = True
|
||||
if not changed:
|
||||
return
|
||||
async with self._config_save_lock:
|
||||
self.record.config = cfg
|
||||
await self.record.save(update_fields=["config"])
|
||||
|
||||
async def _ensure_auth(self):
|
||||
if self._has_valid_access_token():
|
||||
return
|
||||
async with self._auth_lock:
|
||||
if self._has_valid_access_token():
|
||||
return
|
||||
if self.refresh_token:
|
||||
try:
|
||||
await self._refresh_access_token()
|
||||
return
|
||||
except Exception as e:
|
||||
self.access_token = ""
|
||||
raise HTTPException(
|
||||
502,
|
||||
detail=f"PikPak refresh token failed, please update refresh_token or login manually: {e}",
|
||||
)
|
||||
await self._login()
|
||||
|
||||
async def _login(self):
|
||||
url = f"{USER_BASE}/auth/signin"
|
||||
if not self.captcha_token:
|
||||
await self._refresh_captcha_token(self._full_action("POST", url), self._login_captcha_meta())
|
||||
|
||||
body = {
|
||||
"captcha_token": self.captcha_token,
|
||||
"client_id": self.client_id,
|
||||
"client_secret": self.client_secret,
|
||||
"grant_type": "password",
|
||||
"username": self.username,
|
||||
"password": self.password,
|
||||
}
|
||||
data = await self._raw_json("POST", url, json=body, auth=False)
|
||||
self.refresh_token = str(data.get("refresh_token") or "").strip()
|
||||
self.access_token = str(data.get("access_token") or "").strip()
|
||||
self.expires_at = int(time.time()) + _as_int(data.get("expires_in"), 0)
|
||||
self.user_id = str(data.get("sub") or self.user_id).strip()
|
||||
if not self.refresh_token or not self.access_token:
|
||||
raise HTTPException(502, detail="PikPak login failed: missing token")
|
||||
if self.platform == "android" and not PLATFORM_CONFIG[self.platform].get("ua"):
|
||||
self.user_agent = self._build_android_user_agent()
|
||||
await self._save_runtime_config()
|
||||
|
||||
async def _refresh_access_token(self):
|
||||
url = f"{USER_BASE}/auth/token"
|
||||
body = {
|
||||
"client_id": self.client_id,
|
||||
"client_secret": self.client_secret,
|
||||
"grant_type": "refresh_token",
|
||||
"refresh_token": self.refresh_token,
|
||||
}
|
||||
data = await self._raw_json("POST", url, json=body, auth=False)
|
||||
self.refresh_token = str(data.get("refresh_token") or "").strip()
|
||||
self.access_token = str(data.get("access_token") or "").strip()
|
||||
self.expires_at = int(time.time()) + _as_int(data.get("expires_in"), 0)
|
||||
self.user_id = str(data.get("sub") or self.user_id).strip()
|
||||
if not self.refresh_token or not self.access_token:
|
||||
raise HTTPException(502, detail="PikPak refresh token failed: missing token")
|
||||
if self.platform == "android" and not PLATFORM_CONFIG[self.platform].get("ua"):
|
||||
self.user_agent = self._build_android_user_agent()
|
||||
await self._save_runtime_config()
|
||||
|
||||
def _login_captcha_meta(self) -> Dict[str, str]:
|
||||
return {"username": self.username}
|
||||
|
||||
async def _refresh_captcha_token(self, action: str, meta: Dict[str, str]):
|
||||
url = f"{USER_BASE}/shield/captcha/init"
|
||||
body = {
|
||||
"action": action,
|
||||
"captcha_token": self.captcha_token,
|
||||
"client_id": self.client_id,
|
||||
"device_id": self.device_id,
|
||||
"meta": meta,
|
||||
"redirect_uri": "xlaccsdk01://xbase.cloud/callback?state=harbor",
|
||||
}
|
||||
data = await self._raw_json("POST", url, json=body, auth=False)
|
||||
verify_url = str(data.get("url") or "").strip()
|
||||
token = str(data.get("captcha_token") or "").strip()
|
||||
if token and not verify_url:
|
||||
self.captcha_token = token
|
||||
await self._save_runtime_config()
|
||||
if verify_url:
|
||||
raise HTTPException(
|
||||
400,
|
||||
detail=(
|
||||
"PikPak requires captcha verification. Open the URL, finish verification, "
|
||||
"then capture the fresh captcha_token from the successful verification request and paste it into the adapter config. URL: "
|
||||
f"{verify_url}"
|
||||
),
|
||||
)
|
||||
if not token:
|
||||
raise HTTPException(502, detail="PikPak captcha refresh failed: missing captcha_token")
|
||||
self.captcha_token = token
|
||||
await self._save_runtime_config()
|
||||
|
||||
async def _refresh_captcha_token_after_login(self, method: str, url: str):
|
||||
timestamp, sign = self._captcha_sign()
|
||||
meta = {
|
||||
"client_version": self.client_version,
|
||||
"package_name": self.package_name,
|
||||
"user_id": self.user_id,
|
||||
"timestamp": timestamp,
|
||||
"captcha_sign": sign,
|
||||
}
|
||||
await self._refresh_captcha_token(self._action(method, url), meta)
|
||||
|
||||
async def _raw_json(
|
||||
self,
|
||||
method: str,
|
||||
url: str,
|
||||
*,
|
||||
json: Any | None = None,
|
||||
params: Dict[str, Any] | None = None,
|
||||
auth: bool = True,
|
||||
retry_auth: bool = True,
|
||||
retry_captcha: bool = True,
|
||||
) -> Dict[str, Any]:
|
||||
if auth:
|
||||
await self._ensure_auth()
|
||||
|
||||
headers = {
|
||||
"User-Agent": self.user_agent,
|
||||
"X-Device-ID": self.device_id,
|
||||
"X-Captcha-Token": self.captcha_token,
|
||||
}
|
||||
if auth and self.access_token:
|
||||
headers["Authorization"] = f"Bearer {self.access_token}"
|
||||
|
||||
async with httpx.AsyncClient(timeout=self.timeout, follow_redirects=True) as client:
|
||||
resp = await client.request(method, url, headers=headers, params=params, json=json)
|
||||
|
||||
payload: Dict[str, Any] = {}
|
||||
try:
|
||||
parsed = resp.json()
|
||||
if isinstance(parsed, dict):
|
||||
payload = parsed
|
||||
except Exception:
|
||||
resp.raise_for_status()
|
||||
return {}
|
||||
|
||||
if auth and retry_auth and resp.status_code in {401, 403}:
|
||||
async with self._auth_lock:
|
||||
await self._refresh_access_token()
|
||||
return await self._raw_json(
|
||||
method,
|
||||
url,
|
||||
json=json,
|
||||
params=params,
|
||||
auth=auth,
|
||||
retry_auth=False,
|
||||
retry_captcha=retry_captcha,
|
||||
)
|
||||
|
||||
error_code = payload.get("error_code")
|
||||
error_msg = payload.get("error") or payload.get("error_description") or payload.get("message")
|
||||
try:
|
||||
code_int = int(error_code or 0)
|
||||
except Exception:
|
||||
code_int = 0
|
||||
has_error = code_int != 0 or bool(error_msg and resp.status_code >= 400)
|
||||
|
||||
if has_error:
|
||||
if auth and retry_auth and code_int in {4122, 4121, 16}:
|
||||
async with self._auth_lock:
|
||||
await self._refresh_access_token()
|
||||
return await self._raw_json(
|
||||
method,
|
||||
url,
|
||||
json=json,
|
||||
params=params,
|
||||
auth=auth,
|
||||
retry_auth=False,
|
||||
retry_captcha=retry_captcha,
|
||||
)
|
||||
if code_int == 4002 or error_msg == "captcha_invalid":
|
||||
if retry_captcha:
|
||||
if auth:
|
||||
if self.user_id:
|
||||
await self._refresh_captcha_token_after_login(method, url)
|
||||
else:
|
||||
await self._refresh_captcha_token(
|
||||
self._captcha_action(method, url, auth=auth),
|
||||
self._login_captcha_meta(),
|
||||
)
|
||||
else:
|
||||
await self._refresh_captcha_token(
|
||||
self._captcha_action(method, url, auth=auth),
|
||||
self._login_captcha_meta(),
|
||||
)
|
||||
return await self._raw_json(
|
||||
method,
|
||||
url,
|
||||
json=json,
|
||||
params=params,
|
||||
auth=auth,
|
||||
retry_auth=retry_auth,
|
||||
retry_captcha=False,
|
||||
)
|
||||
raise HTTPException(
|
||||
400,
|
||||
detail=(
|
||||
"PikPak captcha_invalid. Refresh the captcha token, then retry after solving the verification page."
|
||||
),
|
||||
)
|
||||
if auth and retry_captcha and code_int == 9:
|
||||
await self._refresh_captcha_token_after_login(method, url)
|
||||
return await self._raw_json(
|
||||
method,
|
||||
url,
|
||||
json=json,
|
||||
params=params,
|
||||
auth=auth,
|
||||
retry_auth=retry_auth,
|
||||
retry_captcha=False,
|
||||
)
|
||||
raise HTTPException(502, detail=f"PikPak error code={error_code} msg={error_msg}")
|
||||
|
||||
if resp.status_code >= 400:
|
||||
raise HTTPException(resp.status_code, detail=f"PikPak HTTP error: {payload or resp.text}")
|
||||
|
||||
return payload
|
||||
|
||||
async def _request(
|
||||
self,
|
||||
method: str,
|
||||
path_or_url: str,
|
||||
*,
|
||||
json: Any | None = None,
|
||||
params: Dict[str, Any] | None = None,
|
||||
) -> Dict[str, Any]:
|
||||
url = path_or_url if path_or_url.startswith("http") else API_BASE + path_or_url
|
||||
return await self._raw_json(method, url, json=json, params=params, auth=True)
|
||||
|
||||
def _map_file_item(self, it: Dict[str, Any]) -> Dict[str, Any]:
|
||||
is_dir = it.get("kind") == "drive#folder"
|
||||
size = 0
|
||||
if not is_dir:
|
||||
try:
|
||||
size = int(it.get("size") or 0)
|
||||
except Exception:
|
||||
size = 0
|
||||
return {
|
||||
"fid": it.get("id"),
|
||||
"id": it.get("id"),
|
||||
"name": it.get("name") or "",
|
||||
"is_dir": is_dir,
|
||||
"size": size,
|
||||
"ctime": _parse_time(it.get("created_time")),
|
||||
"mtime": _parse_time(it.get("modified_time")),
|
||||
"type": "dir" if is_dir else "file",
|
||||
"hash": it.get("hash") or "",
|
||||
"thumbnail_link": it.get("thumbnail_link") or "",
|
||||
"web_content_link": it.get("web_content_link") or "",
|
||||
"medias": it.get("medias") or [],
|
||||
}
|
||||
|
||||
async def _list_children(self, parent_id: str) -> List[Dict[str, Any]]:
|
||||
if parent_id in self._children_cache:
|
||||
return self._children_cache[parent_id]
|
||||
|
||||
items: List[Dict[str, Any]] = []
|
||||
page_token = ""
|
||||
while True:
|
||||
params = {
|
||||
"parent_id": parent_id,
|
||||
"thumbnail_size": "SIZE_LARGE",
|
||||
"with_audit": "true",
|
||||
"limit": "100",
|
||||
"filters": '{"phase":{"eq":"PHASE_TYPE_COMPLETE"},"trashed":{"eq":false}}',
|
||||
"page_token": page_token,
|
||||
}
|
||||
data = await self._request("GET", "/files", params=params)
|
||||
files = data.get("files") or []
|
||||
if isinstance(files, list):
|
||||
items.extend(self._map_file_item(x) for x in files if isinstance(x, dict))
|
||||
page_token = str(data.get("next_page_token") or "")
|
||||
if not page_token:
|
||||
break
|
||||
|
||||
self._children_cache[parent_id] = items
|
||||
return items
|
||||
|
||||
async def _resolve_root_id(self, root: str | None) -> str:
|
||||
root_id, sub_path = _root_payload(root)
|
||||
base_id = root_id or ""
|
||||
if not sub_path:
|
||||
return base_id
|
||||
return await self._resolve_dir_id_from(base_id, sub_path)
|
||||
|
||||
async def _resolve_dir_id_from(self, base_id: str, rel: str) -> str:
|
||||
rel = (rel or "").strip("/")
|
||||
cache_key = f"{base_id}:{rel}"
|
||||
if cache_key in self._dir_id_cache:
|
||||
return self._dir_id_cache[cache_key]
|
||||
if not rel:
|
||||
self._dir_id_cache[cache_key] = base_id
|
||||
return base_id
|
||||
|
||||
parent_id = base_id
|
||||
path_so_far: List[str] = []
|
||||
for seg in rel.split("/"):
|
||||
if not seg:
|
||||
continue
|
||||
path_so_far.append(seg)
|
||||
current_key = f"{base_id}:{'/'.join(path_so_far)}"
|
||||
cached = self._dir_id_cache.get(current_key)
|
||||
if cached is not None:
|
||||
parent_id = cached
|
||||
continue
|
||||
children = await self._list_children(parent_id)
|
||||
found = next((item for item in children if item["is_dir"] and item["name"] == seg), None)
|
||||
if not found:
|
||||
raise FileNotFoundError(rel)
|
||||
parent_id = str(found["fid"])
|
||||
self._dir_id_cache[current_key] = parent_id
|
||||
return parent_id
|
||||
|
||||
async def _find_child(self, parent_id: str, name: str) -> Optional[Dict[str, Any]]:
|
||||
children = await self._list_children(parent_id)
|
||||
return next((item for item in children if item.get("name") == name), None)
|
||||
|
||||
async def _resolve_obj(self, root: str, rel: str) -> Dict[str, Any]:
|
||||
rel = (rel or "").strip("/")
|
||||
base_id = await self._resolve_root_id(root)
|
||||
if not rel:
|
||||
return {"fid": base_id, "id": base_id, "name": "", "is_dir": True, "size": 0, "mtime": 0, "type": "dir"}
|
||||
if rel.endswith("/"):
|
||||
fid = await self._resolve_dir_id_from(base_id, rel.rstrip("/"))
|
||||
return {"fid": fid, "id": fid, "name": rel.rstrip("/").split("/")[-1], "is_dir": True, "size": 0, "mtime": 0, "type": "dir"}
|
||||
parent_rel, name = _split_parent_name(rel)
|
||||
parent_id = await self._resolve_dir_id_from(base_id, parent_rel)
|
||||
item = await self._find_child(parent_id, name)
|
||||
if not item:
|
||||
raise FileNotFoundError(rel)
|
||||
return item
|
||||
|
||||
async def _resolve_parent_and_obj(self, root: str, rel: str) -> Tuple[str, Dict[str, Any]]:
|
||||
base_id = await self._resolve_root_id(root)
|
||||
parent_rel, name = _split_parent_name(rel)
|
||||
parent_id = await self._resolve_dir_id_from(base_id, parent_rel)
|
||||
item = await self._find_child(parent_id, name)
|
||||
if not item:
|
||||
raise FileNotFoundError(rel)
|
||||
return parent_id, item
|
||||
|
||||
def _invalidate_children_cache(self, parent_id: str):
|
||||
self._children_cache.pop(parent_id, None)
|
||||
|
||||
def _clear_path_cache(self):
|
||||
self._dir_id_cache.clear()
|
||||
|
||||
async def list_dir(
|
||||
self,
|
||||
root: str,
|
||||
rel: str,
|
||||
page_num: int = 1,
|
||||
page_size: int = 50,
|
||||
sort_by: str = "name",
|
||||
sort_order: str = "asc",
|
||||
) -> Tuple[List[Dict], int]:
|
||||
base_id = await self._resolve_root_id(root)
|
||||
target_id = await self._resolve_dir_id_from(base_id, rel)
|
||||
items = list(await self._list_children(target_id))
|
||||
|
||||
reverse = sort_order.lower() == "desc"
|
||||
|
||||
def sort_key(item: Dict[str, Any]) -> Tuple:
|
||||
key = (not item.get("is_dir"),)
|
||||
field = sort_by.lower()
|
||||
if field == "size":
|
||||
key += (int(item.get("size") or 0),)
|
||||
elif field == "mtime":
|
||||
key += (int(item.get("mtime") or 0),)
|
||||
else:
|
||||
key += (str(item.get("name") or "").lower(),)
|
||||
return key
|
||||
|
||||
items.sort(key=sort_key, reverse=reverse)
|
||||
total = len(items)
|
||||
start = max(page_num - 1, 0) * page_size
|
||||
return items[start : start + page_size], total
|
||||
|
||||
async def stat_file(self, root: str, rel: str):
|
||||
return await self._resolve_obj(root, rel)
|
||||
|
||||
async def stat_path(self, root: str, rel: str):
|
||||
try:
|
||||
item = await self._resolve_obj(root, rel)
|
||||
return {"exists": True, "is_dir": bool(item.get("is_dir")), "path": rel, "fid": item.get("fid")}
|
||||
except FileNotFoundError:
|
||||
return {"exists": False, "is_dir": None, "path": rel}
|
||||
|
||||
async def exists(self, root: str, rel: str) -> bool:
|
||||
try:
|
||||
await self._resolve_obj(root, rel)
|
||||
return True
|
||||
except FileNotFoundError:
|
||||
return False
|
||||
|
||||
async def _get_remote_file(self, file_id: str) -> Dict[str, Any]:
|
||||
params = {"_magic": "2021", "usage": "FETCH", "thumbnail_size": "SIZE_LARGE"}
|
||||
if not self.disable_media_link:
|
||||
params["usage"] = "CACHE"
|
||||
return await self._request("GET", f"/files/{file_id}", params=params)
|
||||
|
||||
async def _get_download_url(self, item: Dict[str, Any]) -> str:
|
||||
file_id = str(item.get("fid") or item.get("id") or "")
|
||||
if not file_id:
|
||||
raise FileNotFoundError(item.get("name") or "")
|
||||
data = await self._get_remote_file(file_id)
|
||||
url = str(data.get("web_content_link") or "").strip()
|
||||
medias = data.get("medias") or []
|
||||
if not self.disable_media_link and isinstance(medias, list) and medias:
|
||||
first = medias[0]
|
||||
if isinstance(first, dict):
|
||||
media_url = str(((first.get("link") or {}).get("url") if isinstance(first.get("link"), dict) else "") or "")
|
||||
if media_url:
|
||||
url = media_url
|
||||
if not url:
|
||||
raise HTTPException(502, detail="PikPak did not return download url")
|
||||
return url
|
||||
|
||||
async def read_file(self, root: str, rel: str) -> bytes:
|
||||
item = await self._resolve_obj(root, rel)
|
||||
if item.get("is_dir"):
|
||||
raise IsADirectoryError(rel)
|
||||
url = await self._get_download_url(item)
|
||||
async with httpx.AsyncClient(timeout=None, follow_redirects=True) as client:
|
||||
resp = await client.get(url, headers=self._download_headers())
|
||||
if resp.status_code == 404:
|
||||
raise FileNotFoundError(rel)
|
||||
resp.raise_for_status()
|
||||
return resp.content
|
||||
|
||||
async def read_file_range(self, root: str, rel: str, start: int, end: Optional[int] = None) -> bytes:
|
||||
item = await self._resolve_obj(root, rel)
|
||||
if item.get("is_dir"):
|
||||
raise IsADirectoryError(rel)
|
||||
url = await self._get_download_url(item)
|
||||
headers = self._download_headers()
|
||||
headers["Range"] = f"bytes={start}-" if end is None else f"bytes={start}-{end}"
|
||||
async with httpx.AsyncClient(timeout=self.timeout, follow_redirects=True) as client:
|
||||
resp = await client.get(url, headers=headers)
|
||||
if resp.status_code == 404:
|
||||
raise FileNotFoundError(rel)
|
||||
if resp.status_code == 416:
|
||||
raise HTTPException(416, detail="Requested Range Not Satisfiable")
|
||||
resp.raise_for_status()
|
||||
return resp.content
|
||||
|
||||
async def stream_file(self, root: str, rel: str, range_header: str | None):
|
||||
item = await self._resolve_obj(root, rel)
|
||||
if item.get("is_dir"):
|
||||
raise IsADirectoryError(rel)
|
||||
url = await self._get_download_url(item)
|
||||
file_size = int(item.get("size") or 0)
|
||||
mime, _ = mimetypes.guess_type(rel)
|
||||
content_type = mime or "application/octet-stream"
|
||||
|
||||
start = 0
|
||||
end = file_size - 1 if file_size > 0 else None
|
||||
status_code = 200
|
||||
if range_header and range_header.startswith("bytes="):
|
||||
status_code = 206
|
||||
part = range_header.split("=", 1)[1]
|
||||
s, _, e = part.partition("-")
|
||||
if s.strip():
|
||||
start = int(s)
|
||||
if e.strip():
|
||||
end = int(e)
|
||||
elif file_size > 0:
|
||||
end = file_size - 1
|
||||
if file_size > 0:
|
||||
if start >= file_size:
|
||||
raise HTTPException(416, detail="Requested Range Not Satisfiable")
|
||||
if end is None or end >= file_size:
|
||||
end = file_size - 1
|
||||
if start > end:
|
||||
raise HTTPException(416, detail="Requested Range Not Satisfiable")
|
||||
|
||||
resp_headers = {"Accept-Ranges": "bytes", "Content-Type": content_type}
|
||||
if file_size > 0:
|
||||
if status_code == 206 and end is not None:
|
||||
resp_headers["Content-Range"] = f"bytes {start}-{end}/{file_size}"
|
||||
resp_headers["Content-Length"] = str(end - start + 1)
|
||||
else:
|
||||
resp_headers["Content-Length"] = str(file_size)
|
||||
|
||||
async def iterator():
|
||||
headers = self._download_headers()
|
||||
if status_code == 206 and end is not None:
|
||||
headers["Range"] = f"bytes={start}-{end}"
|
||||
async with httpx.AsyncClient(timeout=None, follow_redirects=True) as client:
|
||||
async with client.stream("GET", url, headers=headers) as resp:
|
||||
if resp.status_code == 404:
|
||||
raise FileNotFoundError(rel)
|
||||
if resp.status_code == 416:
|
||||
raise HTTPException(416, detail="Requested Range Not Satisfiable")
|
||||
resp.raise_for_status()
|
||||
async for chunk in resp.aiter_bytes():
|
||||
if chunk:
|
||||
yield chunk
|
||||
|
||||
return StreamingResponse(iterator(), status_code=status_code, headers=resp_headers, media_type=content_type)
|
||||
|
||||
async def get_direct_download_response(self, root: str, rel: str):
|
||||
if not self.enable_direct_download_307:
|
||||
return None
|
||||
item = await self._resolve_obj(root, rel)
|
||||
if item.get("is_dir"):
|
||||
return None
|
||||
url = await self._get_download_url(item)
|
||||
return Response(status_code=307, headers={"Location": url})
|
||||
|
||||
async def get_thumbnail(self, root: str, rel: str, size: str = "medium"):
|
||||
item = await self._resolve_obj(root, rel)
|
||||
url = str(item.get("thumbnail_link") or "").strip()
|
||||
if not url:
|
||||
return None
|
||||
async with httpx.AsyncClient(timeout=self.timeout, follow_redirects=True) as client:
|
||||
resp = await client.get(url, headers=self._download_headers())
|
||||
if resp.status_code >= 400:
|
||||
return None
|
||||
return resp.content
|
||||
|
||||
async def get_usage(self, root: str):
|
||||
data = await self._request("GET", "/about")
|
||||
quota = data.get("quota") or {}
|
||||
limit = quota.get("limit")
|
||||
usage = quota.get("usage")
|
||||
total = int(limit) if limit is not None else None
|
||||
used = int(usage) if usage is not None else None
|
||||
return {
|
||||
"used_bytes": used,
|
||||
"total_bytes": total,
|
||||
"free_bytes": total - used if total is not None and used is not None else None,
|
||||
"source": "pikpak",
|
||||
"scope": "drive",
|
||||
}
|
||||
|
||||
async def mkdir(self, root: str, rel: str):
|
||||
rel = (rel or "").strip("/")
|
||||
if not rel:
|
||||
raise HTTPException(400, detail="Cannot create root")
|
||||
parent_rel, name = _split_parent_name(rel)
|
||||
if not name:
|
||||
raise HTTPException(400, detail="Invalid directory name")
|
||||
base_id = await self._resolve_root_id(root)
|
||||
parent_id = await self._resolve_dir_id_from(base_id, parent_rel)
|
||||
await self._request("POST", "/files", json={"kind": "drive#folder", "parent_id": parent_id, "name": name})
|
||||
self._invalidate_children_cache(parent_id)
|
||||
|
||||
async def delete(self, root: str, rel: str):
|
||||
parent_id, item = await self._resolve_parent_and_obj(root, rel)
|
||||
await self._request("POST", "/files:batchTrash", json={"ids": [item["fid"]]})
|
||||
self._invalidate_children_cache(parent_id)
|
||||
if item.get("is_dir"):
|
||||
self._clear_path_cache()
|
||||
|
||||
async def move(self, root: str, src_rel: str, dst_rel: str):
|
||||
src_parent_id, item = await self._resolve_parent_and_obj(root, src_rel)
|
||||
base_id = await self._resolve_root_id(root)
|
||||
dst_parent_rel, dst_name = _split_parent_name(dst_rel)
|
||||
dst_parent_id = await self._resolve_dir_id_from(base_id, dst_parent_rel)
|
||||
|
||||
if src_parent_id != dst_parent_id:
|
||||
await self._request("POST", "/files:batchMove", json={"ids": [item["fid"]], "to": {"parent_id": dst_parent_id}})
|
||||
self._invalidate_children_cache(src_parent_id)
|
||||
self._invalidate_children_cache(dst_parent_id)
|
||||
|
||||
if item.get("name") != dst_name:
|
||||
await self._request("PATCH", f"/files/{item['fid']}", json={"name": dst_name})
|
||||
self._invalidate_children_cache(dst_parent_id)
|
||||
|
||||
if item.get("is_dir"):
|
||||
self._clear_path_cache()
|
||||
|
||||
async def rename(self, root: str, src_rel: str, dst_rel: str):
|
||||
await self.move(root, src_rel, dst_rel)
|
||||
|
||||
async def copy(self, root: str, src_rel: str, dst_rel: str, overwrite: bool = False):
|
||||
src_parent_id, item = await self._resolve_parent_and_obj(root, src_rel)
|
||||
base_id = await self._resolve_root_id(root)
|
||||
dst_parent_rel, dst_name = _split_parent_name(dst_rel)
|
||||
dst_parent_id = await self._resolve_dir_id_from(base_id, dst_parent_rel)
|
||||
await self._request("POST", "/files:batchCopy", json={"ids": [item["fid"]], "to": {"parent_id": dst_parent_id}})
|
||||
self._invalidate_children_cache(dst_parent_id)
|
||||
|
||||
if item.get("name") != dst_name:
|
||||
children = await self._list_children(dst_parent_id)
|
||||
copied_candidates = [x for x in children if x.get("name") == item.get("name") and x.get("fid") != item.get("fid")]
|
||||
copied = None
|
||||
if copied_candidates:
|
||||
copied_candidates.sort(key=lambda x: (int(x.get("ctime") or 0), int(x.get("mtime") or 0)), reverse=True)
|
||||
copied = copied_candidates[0]
|
||||
if copied:
|
||||
await self._request("PATCH", f"/files/{copied['fid']}", json={"name": dst_name})
|
||||
self._invalidate_children_cache(dst_parent_id)
|
||||
if item.get("is_dir"):
|
||||
self._clear_path_cache()
|
||||
_ = src_parent_id
|
||||
|
||||
async def write_file(self, root: str, rel: str, data: bytes):
|
||||
raise HTTPException(501, detail="PikPak upload not implemented")
|
||||
|
||||
async def write_file_stream(self, root: str, rel: str, data_iter: AsyncIterator[bytes]):
|
||||
raise HTTPException(501, detail="PikPak upload not implemented")
|
||||
|
||||
async def write_upload_file(
|
||||
self,
|
||||
root: str,
|
||||
rel: str,
|
||||
file_obj,
|
||||
filename: str | None,
|
||||
file_size: int | None = None,
|
||||
content_type: str | None = None,
|
||||
):
|
||||
raise HTTPException(501, detail="PikPak upload not implemented")
|
||||
|
||||
|
||||
ADAPTER_TYPE = "pikpak"
|
||||
|
||||
CONFIG_SCHEMA = [
|
||||
{"key": "username", "label": "PikPak 账号", "type": "string", "required": True},
|
||||
{"key": "password", "label": "PikPak 密码", "type": "password", "required": True},
|
||||
{"key": "platform", "label": "平台", "type": "select", "required": False, "default": "android", "options": ["android", "web", "pc"]},
|
||||
{"key": "refresh_token", "label": "Refresh Token", "type": "password", "required": False},
|
||||
{"key": "access_token", "label": "Access Token", "type": "password", "required": False},
|
||||
{"key": "expires_at", "label": "Access Token 过期时间戳", "type": "number", "required": False},
|
||||
{"key": "captcha_token", "label": "Captcha Token", "type": "password", "required": False},
|
||||
{"key": "device_id", "label": "Device ID", "type": "string", "required": False},
|
||||
{"key": "root_id", "label": "根目录 ID", "type": "string", "required": False, "default": ""},
|
||||
{"key": "disable_media_link", "label": "禁用媒体转码链接", "type": "boolean", "required": False, "default": True},
|
||||
{"key": "enable_direct_download_307", "label": "直链 307 跳转", "type": "boolean", "required": False, "default": False},
|
||||
]
|
||||
|
||||
|
||||
def ADAPTER_FACTORY(rec: StorageAdapter) -> BaseAdapter:
|
||||
return PikPakAdapter(rec)
|
||||
@@ -840,6 +840,23 @@ class QuarkAdapter:
|
||||
async def copy(self, root: str, src_rel: str, dst_rel: str, overwrite: bool = False):
|
||||
raise NotImplementedError("QuarkOpen does not support copy via open API")
|
||||
|
||||
async def get_usage(self, root: str):
|
||||
data = await self._request("GET", "/capacity/growth/info")
|
||||
payload = (data or {}).get("data") or {}
|
||||
if isinstance(payload.get("member"), dict):
|
||||
payload = payload["member"]
|
||||
used = payload.get("use_capacity") or payload.get("used_capacity")
|
||||
total = payload.get("total_capacity")
|
||||
used_bytes = int(used) if used is not None else None
|
||||
total_bytes = int(total) if total is not None else None
|
||||
return {
|
||||
"used_bytes": used_bytes,
|
||||
"total_bytes": total_bytes,
|
||||
"free_bytes": total_bytes - used_bytes if total_bytes is not None and used_bytes is not None else None,
|
||||
"source": "quark",
|
||||
"scope": "account",
|
||||
}
|
||||
|
||||
# -----------------
|
||||
# STAT / EXISTS / 辅助
|
||||
# -----------------
|
||||
|
||||
@@ -376,7 +376,7 @@ class WebDAVAdapter:
|
||||
|
||||
return StreamingResponse(segmented_body(), status_code=status_code, headers=resp_headers, media_type=content_type)
|
||||
|
||||
async def stat_file(self, root: str, rel: str):
|
||||
async def stat_file(self, root: str, rel: str, include_metadata: bool = False):
|
||||
url = self._build_url(rel)
|
||||
async with self._client() as client:
|
||||
# PROPFIND 获取属性
|
||||
@@ -426,9 +426,8 @@ class WebDAVAdapter:
|
||||
info["mtime"] = 0
|
||||
elif info["mtime"] is None:
|
||||
info["mtime"] = 0
|
||||
# exif信息
|
||||
exif = None
|
||||
if not info["is_dir"]:
|
||||
if include_metadata and not info["is_dir"]:
|
||||
exif = None
|
||||
mime, _ = mimetypes.guess_type(info["name"])
|
||||
if mime and mime.startswith("image/"):
|
||||
try:
|
||||
@@ -442,7 +441,7 @@ class WebDAVAdapter:
|
||||
exif = {str(k): str(v) for k, v in exif_data.items()}
|
||||
except Exception:
|
||||
exif = None
|
||||
info["exif"] = exif
|
||||
info["exif"] = exif
|
||||
return info
|
||||
|
||||
async def exists(self, root: str, rel: str) -> bool:
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import time
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import HTTPException
|
||||
@@ -8,11 +9,34 @@ from .registry import (
|
||||
normalize_adapter_type,
|
||||
runtime_registry,
|
||||
)
|
||||
from .types import AdapterCreate, AdapterOut
|
||||
from .types import AdapterCreate, AdapterOut, AdapterUsage
|
||||
from .providers.base import UsageCapableAdapter
|
||||
from models import StorageAdapter
|
||||
|
||||
|
||||
class AdapterService:
|
||||
_usage_cache_ttl = 3600
|
||||
_usage_cache: dict[int, tuple[float, AdapterUsage]] = {}
|
||||
|
||||
@classmethod
|
||||
def _get_cached_usage(cls, adapter_id: int) -> AdapterUsage | None:
|
||||
cached = cls._usage_cache.get(adapter_id)
|
||||
if not cached:
|
||||
return None
|
||||
expires_at, usage = cached
|
||||
if expires_at <= time.time():
|
||||
cls._usage_cache.pop(adapter_id, None)
|
||||
return None
|
||||
return usage
|
||||
|
||||
@classmethod
|
||||
def _set_cached_usage(cls, usage: AdapterUsage):
|
||||
cls._usage_cache[usage.id] = (time.time() + cls._usage_cache_ttl, usage)
|
||||
|
||||
@classmethod
|
||||
def _clear_cached_usage(cls, adapter_id: int):
|
||||
cls._usage_cache.pop(adapter_id, None)
|
||||
|
||||
@classmethod
|
||||
def _validate_and_normalize_config(cls, adapter_type: str, cfg):
|
||||
schemas = get_config_schemas()
|
||||
@@ -85,6 +109,74 @@ class AdapterService:
|
||||
raise HTTPException(404, detail="Not found")
|
||||
return AdapterOut.model_validate(rec)
|
||||
|
||||
@classmethod
|
||||
def _unsupported_usage(cls, rec: StorageAdapter, reason: str) -> AdapterUsage:
|
||||
return AdapterUsage(
|
||||
id=rec.id,
|
||||
name=rec.name,
|
||||
type=rec.type,
|
||||
path=rec.path,
|
||||
supported=False,
|
||||
reason=reason,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
async def get_adapter_usage(cls, adapter_id: int) -> AdapterUsage:
|
||||
rec = await StorageAdapter.get_or_none(id=adapter_id)
|
||||
if not rec:
|
||||
raise HTTPException(404, detail="Not found")
|
||||
return await cls._get_adapter_usage_for_record(rec)
|
||||
|
||||
@classmethod
|
||||
async def _get_adapter_usage_for_record(cls, rec: StorageAdapter) -> AdapterUsage:
|
||||
cached = cls._get_cached_usage(rec.id)
|
||||
if cached:
|
||||
return cached
|
||||
|
||||
if not rec.enabled:
|
||||
return cls._unsupported_usage(rec, "adapter_disabled")
|
||||
|
||||
adapter = runtime_registry.get(rec.id)
|
||||
if not adapter:
|
||||
await runtime_registry.refresh()
|
||||
adapter = runtime_registry.get(rec.id)
|
||||
if not adapter:
|
||||
return cls._unsupported_usage(rec, "adapter_unavailable")
|
||||
if not isinstance(adapter, UsageCapableAdapter):
|
||||
return cls._unsupported_usage(rec, "adapter_not_implemented")
|
||||
|
||||
root = adapter.get_effective_root(rec.sub_path)
|
||||
try:
|
||||
raw_usage = await adapter.get_usage(root)
|
||||
except Exception as e:
|
||||
return cls._unsupported_usage(rec, f"usage_failed: {e}")
|
||||
|
||||
if not isinstance(raw_usage, dict):
|
||||
return cls._unsupported_usage(rec, "invalid_usage_response")
|
||||
|
||||
usage = AdapterUsage(
|
||||
id=rec.id,
|
||||
name=rec.name,
|
||||
type=rec.type,
|
||||
path=rec.path,
|
||||
supported=True,
|
||||
used_bytes=raw_usage.get("used_bytes"),
|
||||
total_bytes=raw_usage.get("total_bytes"),
|
||||
free_bytes=raw_usage.get("free_bytes"),
|
||||
source=raw_usage.get("source") or rec.type,
|
||||
scope=raw_usage.get("scope"),
|
||||
)
|
||||
cls._set_cached_usage(usage)
|
||||
return usage
|
||||
|
||||
@classmethod
|
||||
async def list_adapter_usages(cls):
|
||||
adapters = await StorageAdapter.all()
|
||||
result = []
|
||||
for rec in adapters:
|
||||
result.append(await cls._get_adapter_usage_for_record(rec))
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
async def update_adapter(cls, adapter_id: int, data: AdapterCreate, current_user: Optional[User]):
|
||||
rec = await StorageAdapter.get_or_none(id=adapter_id)
|
||||
@@ -105,6 +197,7 @@ class AdapterService:
|
||||
await rec.save()
|
||||
|
||||
await runtime_registry.upsert(rec)
|
||||
cls._clear_cached_usage(adapter_id)
|
||||
return AdapterOut.model_validate(rec)
|
||||
|
||||
@classmethod
|
||||
@@ -113,4 +206,5 @@ class AdapterService:
|
||||
if not deleted:
|
||||
raise HTTPException(404, detail="Not found")
|
||||
runtime_registry.remove(adapter_id)
|
||||
cls._clear_cached_usage(adapter_id)
|
||||
return {"deleted": True}
|
||||
|
||||
@@ -48,3 +48,17 @@ class AdapterOut(AdapterBase):
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class AdapterUsage(BaseModel):
|
||||
id: int
|
||||
name: str
|
||||
type: str
|
||||
path: str
|
||||
supported: bool
|
||||
used_bytes: Optional[int] = None
|
||||
total_bytes: Optional[int] = None
|
||||
free_bytes: Optional[int] = None
|
||||
source: Optional[str] = None
|
||||
scope: Optional[str] = None
|
||||
reason: Optional[str] = None
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
from .service import AgentService
|
||||
from .types import AgentChatContext, AgentChatRequest, PendingToolCall
|
||||
from .types import AgentChatContext, AgentChatRequest, McpCall, PendingMcpCall
|
||||
|
||||
__all__ = [
|
||||
"AgentService",
|
||||
"AgentChatContext",
|
||||
"AgentChatRequest",
|
||||
"PendingToolCall",
|
||||
"McpCall",
|
||||
"PendingMcpCall",
|
||||
]
|
||||
|
||||
@@ -14,7 +14,7 @@ router = APIRouter(prefix="/api/agent", tags=["agent"])
|
||||
|
||||
|
||||
@router.post("/chat")
|
||||
@audit(action=AuditAction.CREATE, description="Agent 对话", body_fields=["auto_execute"])
|
||||
@audit(action=AuditAction.CREATE, description="Agent 对话", body_fields=["auto_execute", "approved_mcp_call_ids", "rejected_mcp_call_ids"])
|
||||
async def chat(
|
||||
request: Request,
|
||||
payload: AgentChatRequest,
|
||||
@@ -25,7 +25,7 @@ async def chat(
|
||||
|
||||
|
||||
@router.post("/chat/stream")
|
||||
@audit(action=AuditAction.CREATE, description="Agent 对话(SSE)", body_fields=["auto_execute"])
|
||||
@audit(action=AuditAction.CREATE, description="Agent 对话(SSE)", body_fields=["auto_execute", "approved_mcp_call_ids", "rejected_mcp_call_ids"])
|
||||
async def chat_stream(
|
||||
request: Request,
|
||||
payload: AgentChatRequest,
|
||||
|
||||
334
domain/agent/mcp.py
Normal file
334
domain/agent/mcp.py
Normal file
@@ -0,0 +1,334 @@
|
||||
import inspect
|
||||
import json
|
||||
from contextlib import asynccontextmanager
|
||||
from datetime import timedelta
|
||||
from typing import Annotated, Any, Literal
|
||||
from urllib.parse import quote, unquote
|
||||
|
||||
import httpx
|
||||
from mcp import ClientSession
|
||||
from mcp.client.streamable_http import streamablehttp_client
|
||||
from mcp.server.auth.provider import AccessToken
|
||||
from mcp.server.fastmcp import Context, FastMCP
|
||||
from mcp.server.fastmcp.server import AuthSettings
|
||||
from mcp.types import ToolAnnotations
|
||||
from pydantic import Field
|
||||
|
||||
from domain.auth import AuthService, User
|
||||
from domain.processors import ProcessorService
|
||||
|
||||
from .tools import get_tool, mcp_tool_descriptors
|
||||
from .tools.base import McpToolDescriptor, normalize_tool_result, tool_result_to_content
|
||||
|
||||
INTERNAL_MCP_BASE_URL = "http://127.0.0.1:8000/"
|
||||
CURRENT_PATH_HEADER = "x-foxel-current-path"
|
||||
|
||||
|
||||
def _normalize_path(path: str | None) -> str | None:
|
||||
if not path:
|
||||
return None
|
||||
value = str(path).strip().replace("\\", "/")
|
||||
if not value:
|
||||
return None
|
||||
if not value.startswith("/"):
|
||||
value = "/" + value
|
||||
return value.rstrip("/") or "/"
|
||||
|
||||
|
||||
def _header_current_path(ctx: Context | None) -> str | None:
|
||||
request = ctx.request_context.request if ctx and ctx.request_context else None
|
||||
if request is None:
|
||||
return None
|
||||
return _normalize_path(request.headers.get(CURRENT_PATH_HEADER))
|
||||
|
||||
|
||||
def _field_annotation(schema: dict[str, Any], required: bool) -> tuple[Any, Any]:
|
||||
raw_type = schema.get("type")
|
||||
enum_values = schema.get("enum")
|
||||
description = str(schema.get("description") or "").strip() or None
|
||||
default = schema.get("default", inspect.Parameter.empty if required else None)
|
||||
|
||||
annotation: Any
|
||||
if isinstance(enum_values, list) and enum_values:
|
||||
annotation = Literal.__getitem__(tuple(enum_values))
|
||||
elif raw_type == "string":
|
||||
annotation = str
|
||||
elif raw_type == "integer":
|
||||
annotation = int
|
||||
elif raw_type == "number":
|
||||
annotation = float
|
||||
elif raw_type == "boolean":
|
||||
annotation = bool
|
||||
elif raw_type == "array":
|
||||
annotation = list[Any]
|
||||
elif raw_type == "object":
|
||||
annotation = dict[str, Any]
|
||||
else:
|
||||
annotation = Any
|
||||
|
||||
if not required and default is None:
|
||||
annotation = annotation | None
|
||||
|
||||
if description:
|
||||
annotation = Annotated[annotation, Field(description=description)]
|
||||
return annotation, default
|
||||
|
||||
|
||||
def _build_tool_signature(descriptor: McpToolDescriptor) -> inspect.Signature:
|
||||
schema = descriptor.input_schema if isinstance(descriptor.input_schema, dict) else {}
|
||||
properties = schema.get("properties") if isinstance(schema.get("properties"), dict) else {}
|
||||
required = set(schema.get("required") or [])
|
||||
parameters: list[inspect.Parameter] = []
|
||||
for key, value in properties.items():
|
||||
prop_schema = value if isinstance(value, dict) else {}
|
||||
annotation, default = _field_annotation(prop_schema, key in required)
|
||||
parameters.append(
|
||||
inspect.Parameter(
|
||||
str(key),
|
||||
inspect.Parameter.POSITIONAL_OR_KEYWORD,
|
||||
default=default,
|
||||
annotation=annotation,
|
||||
)
|
||||
)
|
||||
return inspect.Signature(parameters=parameters, return_annotation=dict[str, Any])
|
||||
|
||||
|
||||
def _build_tool_wrapper(descriptor: McpToolDescriptor):
|
||||
async def wrapper(**kwargs: Any) -> dict[str, Any]:
|
||||
spec = get_tool(descriptor.name)
|
||||
if not spec:
|
||||
return normalize_tool_result({"error": f"unknown_tool: {descriptor.name}"})
|
||||
try:
|
||||
result = await spec.handler(kwargs)
|
||||
return normalize_tool_result(result)
|
||||
except Exception as exc: # noqa: BLE001
|
||||
return normalize_tool_result({"error": str(exc)})
|
||||
|
||||
wrapper.__name__ = descriptor.name
|
||||
wrapper.__doc__ = descriptor.description
|
||||
wrapper.__signature__ = _build_tool_signature(descriptor)
|
||||
return wrapper
|
||||
|
||||
|
||||
class FoxelMcpTokenVerifier:
|
||||
async def verify_token(self, token: str) -> AccessToken | None:
|
||||
try:
|
||||
user = await AuthService.get_current_active_user(await AuthService.get_current_user(token))
|
||||
except Exception: # noqa: BLE001
|
||||
return None
|
||||
return AccessToken(token=token, client_id=user.username, scopes=[])
|
||||
|
||||
|
||||
MCP_SERVER = FastMCP(
|
||||
name="Foxel MCP",
|
||||
instructions="Foxel 内置 MCP 服务,提供文件系统、网页抓取、时间与处理器相关能力。",
|
||||
streamable_http_path="/",
|
||||
token_verifier=FoxelMcpTokenVerifier(),
|
||||
auth=AuthSettings(
|
||||
issuer_url="http://127.0.0.1:8000",
|
||||
resource_server_url=None,
|
||||
required_scopes=[],
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
for descriptor in mcp_tool_descriptors():
|
||||
MCP_SERVER.add_tool(
|
||||
_build_tool_wrapper(descriptor),
|
||||
name=descriptor.name,
|
||||
description=descriptor.description,
|
||||
annotations=ToolAnnotations.model_validate(descriptor.annotations),
|
||||
meta=descriptor.meta,
|
||||
structured_output=False,
|
||||
)
|
||||
|
||||
|
||||
@MCP_SERVER.resource(
|
||||
"foxel://context/current-path",
|
||||
name="current_path",
|
||||
title="Current Path",
|
||||
description="返回当前请求上下文里的文件管理目录。",
|
||||
mime_type="application/json",
|
||||
)
|
||||
def current_path_resource() -> dict[str, Any]:
|
||||
return {"current_path": None}
|
||||
|
||||
|
||||
@MCP_SERVER.resource(
|
||||
"foxel://policy/tool-confirmation",
|
||||
name="tool_confirmation_policy",
|
||||
title="Tool Confirmation Policy",
|
||||
description="返回 Foxel agent 对工具审批的策略。",
|
||||
mime_type="application/json",
|
||||
)
|
||||
def tool_confirmation_policy_resource() -> dict[str, Any]:
|
||||
return {
|
||||
"read_tools": [tool.name for tool in mcp_tool_descriptors() if not tool.requires_confirmation],
|
||||
"write_tools": [tool.name for tool in mcp_tool_descriptors() if tool.requires_confirmation],
|
||||
"rule": "直接调用 MCP tool 时不额外审批;通过 agent 代表用户执行写操作时需要审批。",
|
||||
}
|
||||
|
||||
|
||||
@MCP_SERVER.resource(
|
||||
"foxel://processors/index",
|
||||
name="processors_index",
|
||||
title="Processors Index",
|
||||
description="返回当前可用处理器列表。",
|
||||
mime_type="application/json",
|
||||
)
|
||||
def processors_index_resource() -> dict[str, Any]:
|
||||
return {"processors": ProcessorService.list_processors()}
|
||||
|
||||
|
||||
async def _tool_resource(tool_name: str, arguments: dict[str, Any]) -> dict[str, Any]:
|
||||
spec = get_tool(tool_name)
|
||||
if not spec:
|
||||
return normalize_tool_result({"error": f"unknown_tool: {tool_name}"})
|
||||
try:
|
||||
result = await spec.handler(arguments)
|
||||
return normalize_tool_result(result)
|
||||
except Exception as exc: # noqa: BLE001
|
||||
return normalize_tool_result({"error": str(exc)})
|
||||
|
||||
|
||||
@MCP_SERVER.resource(
|
||||
"foxel://vfs/stat/{path}",
|
||||
name="vfs_stat_resource",
|
||||
title="VFS Stat",
|
||||
description="读取指定路径的文件或目录元信息;path 需要 URL 编码。",
|
||||
mime_type="application/json",
|
||||
)
|
||||
async def vfs_stat_resource(path: str) -> dict[str, Any]:
|
||||
return await _tool_resource("vfs_stat", {"path": "/" + unquote(path).lstrip("/")})
|
||||
|
||||
|
||||
@MCP_SERVER.resource(
|
||||
"foxel://vfs/text/{path}",
|
||||
name="vfs_text_resource",
|
||||
title="VFS Text",
|
||||
description="读取文本文件内容;path 需要 URL 编码。",
|
||||
mime_type="application/json",
|
||||
)
|
||||
async def vfs_text_resource(path: str) -> dict[str, Any]:
|
||||
return await _tool_resource("vfs_read_text", {"path": "/" + unquote(path).lstrip("/")})
|
||||
|
||||
|
||||
@MCP_SERVER.resource(
|
||||
"foxel://vfs/dir/{path}",
|
||||
name="vfs_dir_resource",
|
||||
title="VFS Directory",
|
||||
description="列出目录内容;path 需要 URL 编码。",
|
||||
mime_type="application/json",
|
||||
)
|
||||
async def vfs_dir_resource(path: str) -> dict[str, Any]:
|
||||
return await _tool_resource("vfs_list_dir", {"path": "/" + unquote(path).lstrip("/")})
|
||||
|
||||
|
||||
@MCP_SERVER.resource(
|
||||
"foxel://vfs/search/{query}",
|
||||
name="vfs_search_resource",
|
||||
title="VFS Search",
|
||||
description="搜索文件;query 需要 URL 编码。",
|
||||
mime_type="application/json",
|
||||
)
|
||||
async def vfs_search_resource(query: str) -> dict[str, Any]:
|
||||
return await _tool_resource("vfs_search", {"q": unquote(query)})
|
||||
|
||||
|
||||
@MCP_SERVER.prompt(name="browse_path", title="Browse Path", description="生成浏览目录的推荐提示词。")
|
||||
def browse_path_prompt(path: Annotated[str, Field(description="目标目录路径")]) -> list[dict[str, Any]]:
|
||||
return [{"role": "user", "content": f"请先浏览目录 `{path}`,总结结构与关键文件。必要时调用 vfs_list_dir 与 vfs_stat。"}]
|
||||
|
||||
|
||||
@MCP_SERVER.prompt(name="inspect_file", title="Inspect File", description="生成查看文件的推荐提示词。")
|
||||
def inspect_file_prompt(path: Annotated[str, Field(description="目标文件路径")]) -> list[dict[str, Any]]:
|
||||
return [{"role": "user", "content": f"请检查文件 `{path}` 的内容与用途。必要时调用 vfs_read_text。"}]
|
||||
|
||||
|
||||
@MCP_SERVER.prompt(name="search_files", title="Search Files", description="生成搜索文件的推荐提示词。")
|
||||
def search_files_prompt(query: Annotated[str, Field(description="搜索关键词")]) -> list[dict[str, Any]]:
|
||||
return [{"role": "user", "content": f"请搜索与 `{query}` 相关的文件,并按相关性总结。必要时调用 vfs_search。"}]
|
||||
|
||||
|
||||
@MCP_SERVER.prompt(name="edit_file_safely", title="Edit File Safely", description="生成安全修改文件的推荐提示词。")
|
||||
def edit_file_safely_prompt(path: Annotated[str, Field(description="目标文件路径")]) -> list[dict[str, Any]]:
|
||||
return [{"role": "user", "content": f"请先读取 `{path}`,解释拟修改点,再等待我确认后执行写入。"}]
|
||||
|
||||
|
||||
@MCP_SERVER.prompt(name="run_processor", title="Run Processor", description="生成运行处理器的推荐提示词。")
|
||||
def run_processor_prompt(
|
||||
path: Annotated[str, Field(description="目标文件或目录路径")],
|
||||
processor_type: Annotated[str, Field(description="处理器类型")],
|
||||
) -> list[dict[str, Any]]:
|
||||
return [{"role": "user", "content": f"请检查 `{path}` 是否适合运行处理器 `{processor_type}`,确认参数后再执行 processors_run。"}]
|
||||
|
||||
|
||||
@MCP_SERVER.prompt(name="fetch_web_page", title="Fetch Web Page", description="生成抓取网页的推荐提示词。")
|
||||
def fetch_web_page_prompt(url: Annotated[str, Field(description="目标网址")]) -> list[dict[str, Any]]:
|
||||
return [{"role": "user", "content": f"请抓取网页 `{url}`,并总结标题、正文与关键链接。必要时调用 web_fetch。"}]
|
||||
|
||||
|
||||
MCP_HTTP_APP = MCP_SERVER.streamable_http_app()
|
||||
|
||||
|
||||
def loopback_httpx_client_factory(app):
|
||||
def factory(headers: dict[str, str] | None = None, timeout=None, auth=None) -> httpx.AsyncClient:
|
||||
return httpx.AsyncClient(
|
||||
transport=httpx.ASGITransport(app=app),
|
||||
base_url=INTERNAL_MCP_BASE_URL.rstrip("/"),
|
||||
headers=headers,
|
||||
timeout=timeout,
|
||||
auth=auth,
|
||||
follow_redirects=True,
|
||||
)
|
||||
|
||||
return factory
|
||||
|
||||
|
||||
async def create_loopback_mcp_headers(user: User | None, current_path: str | None = None) -> dict[str, str]:
|
||||
headers: dict[str, str] = {}
|
||||
if user is not None:
|
||||
token = await AuthService.create_access_token(
|
||||
{"sub": user.username},
|
||||
expires_delta=timedelta(minutes=5),
|
||||
)
|
||||
headers["Authorization"] = f"Bearer {token}"
|
||||
if current_path:
|
||||
headers[CURRENT_PATH_HEADER] = current_path
|
||||
return headers
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def mcp_client_session(user: User | None, current_path: str | None = None):
|
||||
headers = await create_loopback_mcp_headers(user, current_path)
|
||||
async with streamablehttp_client(
|
||||
INTERNAL_MCP_BASE_URL,
|
||||
headers=headers,
|
||||
httpx_client_factory=loopback_httpx_client_factory(MCP_HTTP_APP),
|
||||
) as (read_stream, write_stream, _):
|
||||
async with ClientSession(read_stream, write_stream) as session:
|
||||
await session.initialize()
|
||||
yield session
|
||||
|
||||
|
||||
def mcp_content_to_text(content: list[Any], structured_content: dict[str, Any] | None = None) -> str:
|
||||
if structured_content is not None:
|
||||
try:
|
||||
return json.dumps(structured_content, ensure_ascii=False)
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
text_parts: list[str] = []
|
||||
for item in content:
|
||||
item_type = getattr(item, "type", None)
|
||||
if item_type == "text":
|
||||
text = getattr(item, "text", None)
|
||||
if isinstance(text, str) and text:
|
||||
text_parts.append(text)
|
||||
if text_parts:
|
||||
return "\n".join(text_parts)
|
||||
return tool_result_to_content({"error": "empty_mcp_content"})
|
||||
|
||||
|
||||
def encode_resource_path(path: str) -> str:
|
||||
return quote(path.lstrip("/"), safe="")
|
||||
@@ -8,27 +8,27 @@ from fastapi import HTTPException
|
||||
|
||||
from domain.ai import AIProviderService, MissingModelError, chat_completion, chat_completion_stream
|
||||
from domain.auth import User
|
||||
from .tools import get_tool, openai_tools, tool_result_to_content
|
||||
from .types import AgentChatRequest, PendingToolCall
|
||||
|
||||
from .mcp import mcp_client_session, mcp_content_to_text
|
||||
from .tools import tool_result_to_content
|
||||
from .types import AgentChatRequest, PendingMcpCall
|
||||
|
||||
|
||||
def _normalize_path(p: Optional[str]) -> Optional[str]:
|
||||
if not p:
|
||||
def _normalize_path(path: Optional[str]) -> Optional[str]:
|
||||
if not path:
|
||||
return None
|
||||
s = str(p).strip()
|
||||
if not s:
|
||||
value = str(path).strip().replace("\\", "/")
|
||||
if not value:
|
||||
return None
|
||||
s = s.replace("\\", "/")
|
||||
if not s.startswith("/"):
|
||||
s = "/" + s
|
||||
s = s.rstrip("/") or "/"
|
||||
return s
|
||||
if not value.startswith("/"):
|
||||
value = "/" + value
|
||||
return value.rstrip("/") or "/"
|
||||
|
||||
|
||||
def _build_system_prompt(current_path: Optional[str]) -> str:
|
||||
lines = [
|
||||
"你是 Foxel 的 AI 助手。",
|
||||
"你可以通过工具对文件/目录进行查询、读写、移动、复制、删除,以及运行处理器(processor)。",
|
||||
"你可以通过 MCP 工具对文件/目录进行查询、读写、移动、复制、删除,以及运行处理器(processor)。",
|
||||
"",
|
||||
"可用工具:",
|
||||
"- time:获取服务器当前时间(精确到秒,英文星期),支持 year/month/day/hour/minute/second 偏移。",
|
||||
@@ -60,13 +60,13 @@ def _build_system_prompt(current_path: Optional[str]) -> str:
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def _ensure_tool_call_ids(message: Dict[str, Any]) -> Dict[str, Any]:
|
||||
tool_calls = message.get("tool_calls")
|
||||
if not isinstance(tool_calls, list):
|
||||
def _ensure_mcp_call_ids(message: Dict[str, Any]) -> Dict[str, Any]:
|
||||
mcp_calls = message.get("mcp_calls")
|
||||
if not isinstance(mcp_calls, list):
|
||||
return message
|
||||
|
||||
changed = False
|
||||
for idx, call in enumerate(tool_calls):
|
||||
for idx, call in enumerate(mcp_calls):
|
||||
if not isinstance(call, dict):
|
||||
continue
|
||||
call_id = call.get("id")
|
||||
@@ -76,57 +76,54 @@ def _ensure_tool_call_ids(message: Dict[str, Any]) -> Dict[str, Any]:
|
||||
changed = True
|
||||
|
||||
if changed:
|
||||
message["tool_calls"] = tool_calls
|
||||
message["mcp_calls"] = mcp_calls
|
||||
return message
|
||||
|
||||
|
||||
def _extract_pending(tool_call: Dict[str, Any], requires_confirmation: bool) -> PendingToolCall:
|
||||
call_id = str(tool_call.get("id") or "")
|
||||
fn = tool_call.get("function") or {}
|
||||
name = str((fn.get("name") if isinstance(fn, dict) else None) or "")
|
||||
raw_args = fn.get("arguments") if isinstance(fn, dict) else None
|
||||
arguments: Dict[str, Any] = {}
|
||||
if isinstance(raw_args, str) and raw_args.strip():
|
||||
try:
|
||||
parsed = json.loads(raw_args)
|
||||
if isinstance(parsed, dict):
|
||||
arguments = parsed
|
||||
except json.JSONDecodeError:
|
||||
arguments = {}
|
||||
return PendingToolCall(
|
||||
id=call_id,
|
||||
name=name,
|
||||
def _extract_pending(mcp_call: Dict[str, Any], requires_confirmation: bool) -> PendingMcpCall:
|
||||
arguments = mcp_call.get("arguments") if isinstance(mcp_call.get("arguments"), dict) else {}
|
||||
return PendingMcpCall(
|
||||
id=str(mcp_call.get("id") or ""),
|
||||
name=str(mcp_call.get("name") or ""),
|
||||
arguments=arguments,
|
||||
requires_confirmation=requires_confirmation,
|
||||
)
|
||||
|
||||
|
||||
def _find_last_assistant_tool_calls(messages: List[Dict[str, Any]]) -> Tuple[int, Dict[str, Any]]:
|
||||
def _find_last_assistant_mcp_calls(messages: List[Dict[str, Any]]) -> Tuple[int, Dict[str, Any]]:
|
||||
for idx in range(len(messages) - 1, -1, -1):
|
||||
msg = messages[idx]
|
||||
if not isinstance(msg, dict):
|
||||
continue
|
||||
if msg.get("role") != "assistant":
|
||||
continue
|
||||
tool_calls = msg.get("tool_calls")
|
||||
if isinstance(tool_calls, list) and tool_calls:
|
||||
mcp_calls = msg.get("mcp_calls")
|
||||
if isinstance(mcp_calls, list) and mcp_calls:
|
||||
return idx, msg
|
||||
raise HTTPException(status_code=400, detail="没有可确认的待执行操作")
|
||||
|
||||
|
||||
def _existing_tool_result_ids(messages: List[Dict[str, Any]]) -> set[str]:
|
||||
def _existing_mcp_result_ids(messages: List[Dict[str, Any]]) -> set[str]:
|
||||
ids: set[str] = set()
|
||||
for msg in messages:
|
||||
if not isinstance(msg, dict):
|
||||
continue
|
||||
if msg.get("role") != "tool":
|
||||
continue
|
||||
tool_call_id = msg.get("tool_call_id")
|
||||
if isinstance(tool_call_id, str) and tool_call_id.strip():
|
||||
ids.add(tool_call_id)
|
||||
call_id = msg.get("mcp_call_id")
|
||||
if isinstance(call_id, str) and call_id.strip():
|
||||
ids.add(call_id)
|
||||
return ids
|
||||
|
||||
|
||||
def _tool_requires_confirmation(tool_descriptor: Dict[str, Any]) -> bool:
|
||||
meta = tool_descriptor.get("meta") if isinstance(tool_descriptor.get("meta"), dict) else {}
|
||||
if "requires_confirmation" in meta:
|
||||
return bool(meta.get("requires_confirmation"))
|
||||
annotations = tool_descriptor.get("annotations") if isinstance(tool_descriptor.get("annotations"), dict) else {}
|
||||
return not bool(annotations.get("readOnlyHint"))
|
||||
|
||||
|
||||
async def _choose_chat_ability() -> str:
|
||||
tools_model = await AIProviderService.get_default_model("tools")
|
||||
return "tools" if tools_model else "chat"
|
||||
@@ -142,245 +139,91 @@ def _format_exc(exc: BaseException) -> str:
|
||||
return text if text else exc.__class__.__name__
|
||||
|
||||
|
||||
async def _list_mcp_tools(session) -> List[Dict[str, Any]]:
|
||||
result = await session.list_tools()
|
||||
tools: List[Dict[str, Any]] = []
|
||||
for item in result.tools:
|
||||
annotations = getattr(item, "annotations", None)
|
||||
meta = getattr(item, "meta", None)
|
||||
tools.append(
|
||||
{
|
||||
"name": str(getattr(item, "name", "") or ""),
|
||||
"description": str(getattr(item, "description", "") or ""),
|
||||
"input_schema": getattr(item, "inputSchema", None) or {},
|
||||
"annotations": annotations.model_dump(exclude_none=True) if annotations is not None else {},
|
||||
"meta": meta if isinstance(meta, dict) else {},
|
||||
}
|
||||
)
|
||||
return tools
|
||||
|
||||
|
||||
async def _execute_mcp_call(session, name: str, arguments: Dict[str, Any]) -> str:
|
||||
result = await session.call_tool(name, arguments)
|
||||
return mcp_content_to_text(result.content, result.structuredContent)
|
||||
|
||||
|
||||
class AgentService:
|
||||
@classmethod
|
||||
async def chat(cls, req: AgentChatRequest, user: Optional[User]) -> Dict[str, Any]:
|
||||
history: List[Dict[str, Any]] = list(req.messages or [])
|
||||
current_path = _normalize_path(req.context.current_path if req.context else None)
|
||||
|
||||
system_prompt = _build_system_prompt(current_path)
|
||||
internal_messages: List[Dict[str, Any]] = [{"role": "system", "content": system_prompt}] + history
|
||||
|
||||
new_messages: List[Dict[str, Any]] = []
|
||||
pending: List[PendingToolCall] = []
|
||||
pending: List[PendingMcpCall] = []
|
||||
|
||||
approved_ids = {i for i in (req.approved_tool_call_ids or []) if isinstance(i, str) and i.strip()}
|
||||
rejected_ids = {i for i in (req.rejected_tool_call_ids or []) if isinstance(i, str) and i.strip()}
|
||||
approved_ids = {i for i in (req.approved_mcp_call_ids or []) if isinstance(i, str) and i.strip()}
|
||||
rejected_ids = {i for i in (req.rejected_mcp_call_ids or []) if isinstance(i, str) and i.strip()}
|
||||
|
||||
if approved_ids or rejected_ids:
|
||||
_, last_call_msg = _find_last_assistant_tool_calls(internal_messages)
|
||||
last_call_msg = _ensure_tool_call_ids(last_call_msg)
|
||||
tool_calls = last_call_msg.get("tool_calls") or []
|
||||
call_map: Dict[str, Dict[str, Any]] = {
|
||||
str(c.get("id")): c
|
||||
for c in tool_calls
|
||||
if isinstance(c, dict) and isinstance(c.get("id"), str)
|
||||
}
|
||||
async with mcp_client_session(user, current_path) as mcp_session:
|
||||
tools_schema = await _list_mcp_tools(mcp_session)
|
||||
tool_index = {tool["name"]: tool for tool in tools_schema if tool.get("name")}
|
||||
|
||||
existing_ids = _existing_tool_result_ids(internal_messages)
|
||||
for call_id in approved_ids | rejected_ids:
|
||||
if call_id in existing_ids:
|
||||
continue
|
||||
tool_call = call_map.get(call_id)
|
||||
if not tool_call:
|
||||
continue
|
||||
fn = tool_call.get("function") or {}
|
||||
name = fn.get("name") if isinstance(fn, dict) else None
|
||||
args_raw = fn.get("arguments") if isinstance(fn, dict) else None
|
||||
args: Dict[str, Any] = {}
|
||||
if isinstance(args_raw, str) and args_raw.strip():
|
||||
try:
|
||||
parsed = json.loads(args_raw)
|
||||
if isinstance(parsed, dict):
|
||||
args = parsed
|
||||
except json.JSONDecodeError:
|
||||
args = {}
|
||||
|
||||
spec = get_tool(str(name or ""))
|
||||
if call_id in rejected_ids:
|
||||
content = tool_result_to_content({"canceled": True, "reason": "user_rejected"})
|
||||
tool_msg = {"role": "tool", "tool_call_id": call_id, "content": content}
|
||||
internal_messages.append(tool_msg)
|
||||
new_messages.append(tool_msg)
|
||||
continue
|
||||
|
||||
if not spec:
|
||||
content = tool_result_to_content({"error": f"unknown_tool: {name}"})
|
||||
tool_msg = {"role": "tool", "tool_call_id": call_id, "content": content}
|
||||
internal_messages.append(tool_msg)
|
||||
new_messages.append(tool_msg)
|
||||
continue
|
||||
|
||||
try:
|
||||
result = await spec.handler(args)
|
||||
content = tool_result_to_content(result)
|
||||
except Exception as exc: # noqa: BLE001
|
||||
content = tool_result_to_content({"error": str(exc)})
|
||||
tool_msg = {"role": "tool", "tool_call_id": call_id, "content": content}
|
||||
internal_messages.append(tool_msg)
|
||||
new_messages.append(tool_msg)
|
||||
|
||||
tools_schema = openai_tools()
|
||||
ability = await _choose_chat_ability()
|
||||
max_loops = 4
|
||||
|
||||
for _ in range(max_loops):
|
||||
try:
|
||||
assistant = await chat_completion(
|
||||
internal_messages,
|
||||
ability=ability,
|
||||
tools=tools_schema,
|
||||
tool_choice="auto",
|
||||
timeout=60.0,
|
||||
)
|
||||
except MissingModelError as exc:
|
||||
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
||||
except httpx.HTTPStatusError as exc:
|
||||
raise HTTPException(status_code=502, detail=f"对话请求失败: {exc}") from exc
|
||||
except httpx.RequestError as exc:
|
||||
raise HTTPException(status_code=502, detail=f"对话请求异常: {exc}") from exc
|
||||
|
||||
assistant = _ensure_tool_call_ids(assistant)
|
||||
internal_messages.append(assistant)
|
||||
new_messages.append(assistant)
|
||||
|
||||
tool_calls = assistant.get("tool_calls")
|
||||
if not isinstance(tool_calls, list) or not tool_calls:
|
||||
break
|
||||
|
||||
pending = []
|
||||
for call in tool_calls:
|
||||
if not isinstance(call, dict):
|
||||
continue
|
||||
call_id = str(call.get("id") or "")
|
||||
fn = call.get("function") or {}
|
||||
name = fn.get("name") if isinstance(fn, dict) else None
|
||||
args_raw = fn.get("arguments") if isinstance(fn, dict) else None
|
||||
args: Dict[str, Any] = {}
|
||||
if isinstance(args_raw, str) and args_raw.strip():
|
||||
try:
|
||||
parsed = json.loads(args_raw)
|
||||
if isinstance(parsed, dict):
|
||||
args = parsed
|
||||
except json.JSONDecodeError:
|
||||
args = {}
|
||||
|
||||
spec = get_tool(str(name or ""))
|
||||
if not spec:
|
||||
content = tool_result_to_content({"error": f"unknown_tool: {name}"})
|
||||
tool_msg = {"role": "tool", "tool_call_id": call_id, "content": content}
|
||||
internal_messages.append(tool_msg)
|
||||
new_messages.append(tool_msg)
|
||||
continue
|
||||
|
||||
if spec.requires_confirmation and not req.auto_execute:
|
||||
pending.append(_extract_pending(call, True))
|
||||
continue
|
||||
|
||||
try:
|
||||
result = await spec.handler(args)
|
||||
content = tool_result_to_content(result)
|
||||
except Exception as exc: # noqa: BLE001
|
||||
content = tool_result_to_content({"error": str(exc)})
|
||||
tool_msg = {"role": "tool", "tool_call_id": call_id, "content": content}
|
||||
internal_messages.append(tool_msg)
|
||||
new_messages.append(tool_msg)
|
||||
|
||||
if pending:
|
||||
break
|
||||
|
||||
payload: Dict[str, Any] = {"messages": new_messages}
|
||||
if pending:
|
||||
payload["pending_tool_calls"] = [p.model_dump() for p in pending]
|
||||
return payload
|
||||
|
||||
@classmethod
|
||||
async def chat_stream(cls, req: AgentChatRequest, user: Optional[User]):
|
||||
history: List[Dict[str, Any]] = list(req.messages or [])
|
||||
current_path = _normalize_path(req.context.current_path if req.context else None)
|
||||
|
||||
system_prompt = _build_system_prompt(current_path)
|
||||
internal_messages: List[Dict[str, Any]] = [{"role": "system", "content": system_prompt}] + history
|
||||
|
||||
new_messages: List[Dict[str, Any]] = []
|
||||
pending: List[PendingToolCall] = []
|
||||
|
||||
approved_ids = {i for i in (req.approved_tool_call_ids or []) if isinstance(i, str) and i.strip()}
|
||||
rejected_ids = {i for i in (req.rejected_tool_call_ids or []) if isinstance(i, str) and i.strip()}
|
||||
|
||||
try:
|
||||
if approved_ids or rejected_ids:
|
||||
_, last_call_msg = _find_last_assistant_tool_calls(internal_messages)
|
||||
last_call_msg = _ensure_tool_call_ids(last_call_msg)
|
||||
tool_calls = last_call_msg.get("tool_calls") or []
|
||||
_, last_call_msg = _find_last_assistant_mcp_calls(internal_messages)
|
||||
last_call_msg = _ensure_mcp_call_ids(last_call_msg)
|
||||
mcp_calls = last_call_msg.get("mcp_calls") or []
|
||||
call_map: Dict[str, Dict[str, Any]] = {
|
||||
str(c.get("id")): c
|
||||
for c in tool_calls
|
||||
if isinstance(c, dict) and isinstance(c.get("id"), str)
|
||||
str(call.get("id")): call
|
||||
for call in mcp_calls
|
||||
if isinstance(call, dict) and isinstance(call.get("id"), str)
|
||||
}
|
||||
|
||||
existing_ids = _existing_tool_result_ids(internal_messages)
|
||||
existing_ids = _existing_mcp_result_ids(internal_messages)
|
||||
for call_id in approved_ids | rejected_ids:
|
||||
if call_id in existing_ids:
|
||||
continue
|
||||
tool_call = call_map.get(call_id)
|
||||
if not tool_call:
|
||||
mcp_call = call_map.get(call_id)
|
||||
if not mcp_call:
|
||||
continue
|
||||
fn = tool_call.get("function") or {}
|
||||
name = fn.get("name") if isinstance(fn, dict) else None
|
||||
args_raw = fn.get("arguments") if isinstance(fn, dict) else None
|
||||
args: Dict[str, Any] = {}
|
||||
if isinstance(args_raw, str) and args_raw.strip():
|
||||
try:
|
||||
parsed = json.loads(args_raw)
|
||||
if isinstance(parsed, dict):
|
||||
args = parsed
|
||||
except json.JSONDecodeError:
|
||||
args = {}
|
||||
name = str(mcp_call.get("name") or "")
|
||||
arguments = mcp_call.get("arguments") if isinstance(mcp_call.get("arguments"), dict) else {}
|
||||
tool_desc = tool_index.get(name)
|
||||
|
||||
spec = get_tool(str(name or ""))
|
||||
if call_id in rejected_ids:
|
||||
content = tool_result_to_content({"canceled": True, "reason": "user_rejected"})
|
||||
tool_msg = {"role": "tool", "tool_call_id": call_id, "content": content}
|
||||
internal_messages.append(tool_msg)
|
||||
new_messages.append(tool_msg)
|
||||
yield _sse("tool_end", {"tool_call_id": call_id, "name": str(name or ""), "message": tool_msg})
|
||||
continue
|
||||
|
||||
if not spec:
|
||||
elif not tool_desc:
|
||||
content = tool_result_to_content({"error": f"unknown_tool: {name}"})
|
||||
tool_msg = {"role": "tool", "tool_call_id": call_id, "content": content}
|
||||
internal_messages.append(tool_msg)
|
||||
new_messages.append(tool_msg)
|
||||
yield _sse("tool_end", {"tool_call_id": call_id, "name": str(name or ""), "message": tool_msg})
|
||||
continue
|
||||
|
||||
yield _sse("tool_start", {"tool_call_id": call_id, "name": spec.name})
|
||||
try:
|
||||
result = await spec.handler(args)
|
||||
content = tool_result_to_content(result)
|
||||
except Exception as exc: # noqa: BLE001
|
||||
content = tool_result_to_content({"error": str(exc)})
|
||||
tool_msg = {"role": "tool", "tool_call_id": call_id, "content": content}
|
||||
else:
|
||||
try:
|
||||
content = await _execute_mcp_call(mcp_session, name, arguments)
|
||||
except Exception as exc: # noqa: BLE001
|
||||
content = tool_result_to_content({"error": str(exc)})
|
||||
tool_msg = {"role": "tool", "mcp_call_id": call_id, "content": content}
|
||||
internal_messages.append(tool_msg)
|
||||
new_messages.append(tool_msg)
|
||||
yield _sse("tool_end", {"tool_call_id": call_id, "name": spec.name, "message": tool_msg})
|
||||
|
||||
tools_schema = openai_tools()
|
||||
ability = await _choose_chat_ability()
|
||||
max_loops = 4
|
||||
|
||||
for _ in range(max_loops):
|
||||
assistant_event_id = uuid.uuid4().hex
|
||||
yield _sse("assistant_start", {"id": assistant_event_id})
|
||||
|
||||
assistant_message: Dict[str, Any] | None = None
|
||||
for _ in range(8):
|
||||
try:
|
||||
async for event in chat_completion_stream(
|
||||
assistant = await chat_completion(
|
||||
internal_messages,
|
||||
ability=ability,
|
||||
tools=tools_schema,
|
||||
tool_choice="auto",
|
||||
timeout=60.0,
|
||||
):
|
||||
if event.get("type") == "delta":
|
||||
delta = event.get("delta")
|
||||
if isinstance(delta, str) and delta:
|
||||
yield _sse("assistant_delta", {"id": assistant_event_id, "delta": delta})
|
||||
elif event.get("type") == "message":
|
||||
msg = event.get("message")
|
||||
if isinstance(msg, dict):
|
||||
assistant_message = msg
|
||||
)
|
||||
except MissingModelError as exc:
|
||||
raise HTTPException(status_code=400, detail=_format_exc(exc)) from exc
|
||||
except httpx.HTTPStatusError as exc:
|
||||
@@ -388,66 +231,196 @@ class AgentService:
|
||||
except httpx.RequestError as exc:
|
||||
raise HTTPException(status_code=502, detail=f"对话请求异常: {_format_exc(exc)}") from exc
|
||||
|
||||
if not assistant_message:
|
||||
assistant_message = {"role": "assistant", "content": ""}
|
||||
assistant = _ensure_mcp_call_ids(assistant if isinstance(assistant, dict) else {"role": "assistant", "content": ""})
|
||||
internal_messages.append(assistant)
|
||||
new_messages.append(assistant)
|
||||
|
||||
assistant_message = _ensure_tool_call_ids(assistant_message)
|
||||
internal_messages.append(assistant_message)
|
||||
new_messages.append(assistant_message)
|
||||
yield _sse("assistant_end", {"id": assistant_event_id, "message": assistant_message})
|
||||
|
||||
tool_calls = assistant_message.get("tool_calls")
|
||||
if not isinstance(tool_calls, list) or not tool_calls:
|
||||
mcp_calls = assistant.get("mcp_calls")
|
||||
if not isinstance(mcp_calls, list) or not mcp_calls:
|
||||
break
|
||||
|
||||
pending = []
|
||||
for call in tool_calls:
|
||||
for call in mcp_calls:
|
||||
if not isinstance(call, dict):
|
||||
continue
|
||||
call_id = str(call.get("id") or "")
|
||||
fn = call.get("function") or {}
|
||||
name = fn.get("name") if isinstance(fn, dict) else None
|
||||
args_raw = fn.get("arguments") if isinstance(fn, dict) else None
|
||||
args: Dict[str, Any] = {}
|
||||
if isinstance(args_raw, str) and args_raw.strip():
|
||||
try:
|
||||
parsed = json.loads(args_raw)
|
||||
if isinstance(parsed, dict):
|
||||
args = parsed
|
||||
except json.JSONDecodeError:
|
||||
args = {}
|
||||
name = str(call.get("name") or "")
|
||||
arguments = call.get("arguments") if isinstance(call.get("arguments"), dict) else {}
|
||||
tool_desc = tool_index.get(name)
|
||||
|
||||
spec = get_tool(str(name or ""))
|
||||
if not spec:
|
||||
if not tool_desc:
|
||||
content = tool_result_to_content({"error": f"unknown_tool: {name}"})
|
||||
tool_msg = {"role": "tool", "tool_call_id": call_id, "content": content}
|
||||
tool_msg = {"role": "tool", "mcp_call_id": call_id, "content": content}
|
||||
internal_messages.append(tool_msg)
|
||||
new_messages.append(tool_msg)
|
||||
yield _sse("tool_end", {"tool_call_id": call_id, "name": str(name or ""), "message": tool_msg})
|
||||
continue
|
||||
|
||||
if spec.requires_confirmation and not req.auto_execute:
|
||||
if _tool_requires_confirmation(tool_desc) and not req.auto_execute:
|
||||
pending.append(_extract_pending(call, True))
|
||||
continue
|
||||
|
||||
yield _sse("tool_start", {"tool_call_id": call_id, "name": spec.name})
|
||||
try:
|
||||
result = await spec.handler(args)
|
||||
content = tool_result_to_content(result)
|
||||
content = await _execute_mcp_call(mcp_session, name, arguments)
|
||||
except Exception as exc: # noqa: BLE001
|
||||
content = tool_result_to_content({"error": str(exc)})
|
||||
tool_msg = {"role": "tool", "tool_call_id": call_id, "content": content}
|
||||
tool_msg = {"role": "tool", "mcp_call_id": call_id, "content": content}
|
||||
internal_messages.append(tool_msg)
|
||||
new_messages.append(tool_msg)
|
||||
yield _sse("tool_end", {"tool_call_id": call_id, "name": spec.name, "message": tool_msg})
|
||||
|
||||
if pending:
|
||||
yield _sse("pending", {"pending_tool_calls": [p.model_dump() for p in pending]})
|
||||
break
|
||||
|
||||
payload: Dict[str, Any] = {"messages": new_messages}
|
||||
if pending:
|
||||
payload["pending_mcp_calls"] = [item.model_dump() for item in pending]
|
||||
return payload
|
||||
|
||||
@classmethod
|
||||
async def chat_stream(cls, req: AgentChatRequest, user: Optional[User]):
|
||||
history: List[Dict[str, Any]] = list(req.messages or [])
|
||||
current_path = _normalize_path(req.context.current_path if req.context else None)
|
||||
system_prompt = _build_system_prompt(current_path)
|
||||
internal_messages: List[Dict[str, Any]] = [{"role": "system", "content": system_prompt}] + history
|
||||
new_messages: List[Dict[str, Any]] = []
|
||||
pending: List[PendingMcpCall] = []
|
||||
|
||||
approved_ids = {i for i in (req.approved_mcp_call_ids or []) if isinstance(i, str) and i.strip()}
|
||||
rejected_ids = {i for i in (req.rejected_mcp_call_ids or []) if isinstance(i, str) and i.strip()}
|
||||
|
||||
try:
|
||||
async with mcp_client_session(user, current_path) as mcp_session:
|
||||
tools_schema = await _list_mcp_tools(mcp_session)
|
||||
tool_index = {tool["name"]: tool for tool in tools_schema if tool.get("name")}
|
||||
|
||||
if approved_ids or rejected_ids:
|
||||
_, last_call_msg = _find_last_assistant_mcp_calls(internal_messages)
|
||||
last_call_msg = _ensure_mcp_call_ids(last_call_msg)
|
||||
mcp_calls = last_call_msg.get("mcp_calls") or []
|
||||
call_map: Dict[str, Dict[str, Any]] = {
|
||||
str(call.get("id")): call
|
||||
for call in mcp_calls
|
||||
if isinstance(call, dict) and isinstance(call.get("id"), str)
|
||||
}
|
||||
|
||||
existing_ids = _existing_mcp_result_ids(internal_messages)
|
||||
for call_id in approved_ids | rejected_ids:
|
||||
if call_id in existing_ids:
|
||||
continue
|
||||
mcp_call = call_map.get(call_id)
|
||||
if not mcp_call:
|
||||
continue
|
||||
|
||||
name = str(mcp_call.get("name") or "")
|
||||
arguments = mcp_call.get("arguments") if isinstance(mcp_call.get("arguments"), dict) else {}
|
||||
tool_desc = tool_index.get(name)
|
||||
|
||||
if call_id in rejected_ids:
|
||||
content = tool_result_to_content({"canceled": True, "reason": "user_rejected"})
|
||||
tool_msg = {"role": "tool", "mcp_call_id": call_id, "content": content}
|
||||
internal_messages.append(tool_msg)
|
||||
new_messages.append(tool_msg)
|
||||
yield _sse("mcp_call_end", {"mcp_call_id": call_id, "name": name, "message": tool_msg})
|
||||
continue
|
||||
|
||||
if not tool_desc:
|
||||
content = tool_result_to_content({"error": f"unknown_tool: {name}"})
|
||||
tool_msg = {"role": "tool", "mcp_call_id": call_id, "content": content}
|
||||
internal_messages.append(tool_msg)
|
||||
new_messages.append(tool_msg)
|
||||
yield _sse("mcp_call_end", {"mcp_call_id": call_id, "name": name, "message": tool_msg})
|
||||
continue
|
||||
|
||||
yield _sse("mcp_call_start", {"mcp_call_id": call_id, "name": name})
|
||||
try:
|
||||
content = await _execute_mcp_call(mcp_session, name, arguments)
|
||||
except Exception as exc: # noqa: BLE001
|
||||
content = tool_result_to_content({"error": str(exc)})
|
||||
tool_msg = {"role": "tool", "mcp_call_id": call_id, "content": content}
|
||||
internal_messages.append(tool_msg)
|
||||
new_messages.append(tool_msg)
|
||||
yield _sse("mcp_call_end", {"mcp_call_id": call_id, "name": name, "message": tool_msg})
|
||||
|
||||
ability = await _choose_chat_ability()
|
||||
|
||||
for _ in range(8):
|
||||
assistant_event_id = str(uuid.uuid4())
|
||||
yield _sse("assistant_start", {"id": assistant_event_id})
|
||||
|
||||
assistant_message: Dict[str, Any] | None = None
|
||||
try:
|
||||
async for event in chat_completion_stream(
|
||||
internal_messages,
|
||||
ability=ability,
|
||||
tools=tools_schema,
|
||||
tool_choice="auto",
|
||||
timeout=60.0,
|
||||
):
|
||||
event_type = event.get("type")
|
||||
if event_type == "delta":
|
||||
delta = event.get("delta")
|
||||
if isinstance(delta, str) and delta:
|
||||
yield _sse("assistant_delta", {"id": assistant_event_id, "delta": delta})
|
||||
elif event_type == "message":
|
||||
msg = event.get("message")
|
||||
if isinstance(msg, dict):
|
||||
assistant_message = msg
|
||||
except MissingModelError as exc:
|
||||
raise HTTPException(status_code=400, detail=_format_exc(exc)) from exc
|
||||
except httpx.HTTPStatusError as exc:
|
||||
raise HTTPException(status_code=502, detail=f"对话请求失败: {_format_exc(exc)}") from exc
|
||||
except httpx.RequestError as exc:
|
||||
raise HTTPException(status_code=502, detail=f"对话请求异常: {_format_exc(exc)}") from exc
|
||||
|
||||
if not assistant_message:
|
||||
assistant_message = {"role": "assistant", "content": ""}
|
||||
|
||||
assistant_message = _ensure_mcp_call_ids(assistant_message)
|
||||
internal_messages.append(assistant_message)
|
||||
new_messages.append(assistant_message)
|
||||
yield _sse("assistant_end", {"id": assistant_event_id, "message": assistant_message})
|
||||
|
||||
mcp_calls = assistant_message.get("mcp_calls")
|
||||
if not isinstance(mcp_calls, list) or not mcp_calls:
|
||||
break
|
||||
|
||||
pending = []
|
||||
for call in mcp_calls:
|
||||
if not isinstance(call, dict):
|
||||
continue
|
||||
call_id = str(call.get("id") or "")
|
||||
name = str(call.get("name") or "")
|
||||
arguments = call.get("arguments") if isinstance(call.get("arguments"), dict) else {}
|
||||
tool_desc = tool_index.get(name)
|
||||
|
||||
if not tool_desc:
|
||||
content = tool_result_to_content({"error": f"unknown_tool: {name}"})
|
||||
tool_msg = {"role": "tool", "mcp_call_id": call_id, "content": content}
|
||||
internal_messages.append(tool_msg)
|
||||
new_messages.append(tool_msg)
|
||||
yield _sse("mcp_call_end", {"mcp_call_id": call_id, "name": name, "message": tool_msg})
|
||||
continue
|
||||
|
||||
if _tool_requires_confirmation(tool_desc) and not req.auto_execute:
|
||||
pending.append(_extract_pending(call, True))
|
||||
continue
|
||||
|
||||
yield _sse("mcp_call_start", {"mcp_call_id": call_id, "name": name})
|
||||
try:
|
||||
content = await _execute_mcp_call(mcp_session, name, arguments)
|
||||
except Exception as exc: # noqa: BLE001
|
||||
content = tool_result_to_content({"error": str(exc)})
|
||||
tool_msg = {"role": "tool", "mcp_call_id": call_id, "content": content}
|
||||
internal_messages.append(tool_msg)
|
||||
new_messages.append(tool_msg)
|
||||
yield _sse("mcp_call_end", {"mcp_call_id": call_id, "name": name, "message": tool_msg})
|
||||
|
||||
if pending:
|
||||
yield _sse("pending", {"pending_mcp_calls": [item.model_dump() for item in pending]})
|
||||
break
|
||||
|
||||
payload: Dict[str, Any] = {"messages": new_messages}
|
||||
if pending:
|
||||
payload["pending_tool_calls"] = [p.model_dump() for p in pending]
|
||||
payload["pending_mcp_calls"] = [item.model_dump() for item in pending]
|
||||
yield _sse("done", payload)
|
||||
|
||||
except asyncio.CancelledError:
|
||||
@@ -460,13 +433,11 @@ class AgentService:
|
||||
new_messages.append({"role": "assistant", "content": content})
|
||||
payload: Dict[str, Any] = {"messages": new_messages}
|
||||
if pending:
|
||||
payload["pending_tool_calls"] = [p.model_dump() for p in pending]
|
||||
payload["pending_mcp_calls"] = [item.model_dump() for item in pending]
|
||||
yield _sse("done", payload)
|
||||
return
|
||||
except Exception as exc: # noqa: BLE001
|
||||
new_messages.append({"role": "assistant", "content": f"服务端异常: {_format_exc(exc)}"})
|
||||
payload: Dict[str, Any] = {"messages": new_messages}
|
||||
if pending:
|
||||
payload["pending_tool_calls"] = [p.model_dump() for p in pending]
|
||||
payload["pending_mcp_calls"] = [item.model_dump() for item in pending]
|
||||
yield _sse("done", payload)
|
||||
return
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from .base import ToolSpec, tool_result_to_content
|
||||
from .base import McpToolDescriptor, ToolSpec, tool_result_to_content, tool_spec_to_mcp_descriptor
|
||||
from .processors import TOOLS as PROCESSOR_TOOLS
|
||||
from .time import TOOLS as TIME_TOOLS
|
||||
from .vfs import TOOLS as VFS_TOOLS
|
||||
@@ -15,23 +15,19 @@ def get_tool(name: str) -> Optional[ToolSpec]:
|
||||
return TOOLS.get(name)
|
||||
|
||||
|
||||
def openai_tools() -> List[Dict[str, Any]]:
|
||||
out: List[Dict[str, Any]] = []
|
||||
for spec in TOOLS.values():
|
||||
out.append({
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": spec.name,
|
||||
"description": spec.description,
|
||||
"parameters": spec.parameters,
|
||||
},
|
||||
})
|
||||
return out
|
||||
def list_tool_specs() -> List[ToolSpec]:
|
||||
return list(TOOLS.values())
|
||||
|
||||
|
||||
def mcp_tool_descriptors() -> List[McpToolDescriptor]:
|
||||
return [tool_spec_to_mcp_descriptor(spec) for spec in TOOLS.values()]
|
||||
|
||||
|
||||
__all__ = [
|
||||
"McpToolDescriptor",
|
||||
"ToolSpec",
|
||||
"get_tool",
|
||||
"openai_tools",
|
||||
"list_tool_specs",
|
||||
"mcp_tool_descriptors",
|
||||
"tool_result_to_content",
|
||||
]
|
||||
|
||||
@@ -3,6 +3,16 @@ from dataclasses import dataclass
|
||||
from typing import Any, Awaitable, Callable, Dict, List, Optional
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class McpToolDescriptor:
|
||||
name: str
|
||||
description: str
|
||||
input_schema: Dict[str, Any]
|
||||
annotations: Dict[str, Any]
|
||||
meta: Dict[str, Any]
|
||||
requires_confirmation: bool
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ToolSpec:
|
||||
name: str
|
||||
@@ -141,9 +151,31 @@ def _normalize_tool_result(result: Any) -> Dict[str, Any]:
|
||||
return {"ok": True, "summary": summary, "view": view, "data": result}
|
||||
|
||||
|
||||
def normalize_tool_result(result: Any) -> Dict[str, Any]:
|
||||
return _normalize_tool_result(result)
|
||||
|
||||
|
||||
def tool_result_to_content(result: Any) -> str:
|
||||
payload = _normalize_tool_result(result)
|
||||
payload = normalize_tool_result(result)
|
||||
try:
|
||||
return json.dumps(payload, ensure_ascii=False, default=str)
|
||||
except TypeError:
|
||||
return json.dumps({"ok": False, "summary": "error", "view": {"type": "error", "message": "error"}}, ensure_ascii=False)
|
||||
|
||||
|
||||
def tool_spec_to_mcp_descriptor(spec: ToolSpec) -> McpToolDescriptor:
|
||||
read_only = not spec.requires_confirmation
|
||||
annotations: Dict[str, Any] = {
|
||||
"readOnlyHint": read_only,
|
||||
"destructiveHint": bool(spec.requires_confirmation),
|
||||
}
|
||||
if spec.name == "web_fetch":
|
||||
annotations["openWorldHint"] = True
|
||||
return McpToolDescriptor(
|
||||
name=spec.name,
|
||||
description=spec.description,
|
||||
input_schema=spec.parameters,
|
||||
annotations=annotations,
|
||||
meta={"requires_confirmation": spec.requires_confirmation},
|
||||
requires_confirmation=spec.requires_confirmation,
|
||||
)
|
||||
|
||||
@@ -10,14 +10,19 @@ class AgentChatContext(BaseModel):
|
||||
class AgentChatRequest(BaseModel):
|
||||
messages: List[Dict[str, Any]] = Field(default_factory=list)
|
||||
auto_execute: bool = False
|
||||
approved_tool_call_ids: List[str] = Field(default_factory=list)
|
||||
rejected_tool_call_ids: List[str] = Field(default_factory=list)
|
||||
approved_mcp_call_ids: List[str] = Field(default_factory=list)
|
||||
rejected_mcp_call_ids: List[str] = Field(default_factory=list)
|
||||
context: Optional[AgentChatContext] = None
|
||||
|
||||
|
||||
class PendingToolCall(BaseModel):
|
||||
class McpCall(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
arguments: Dict[str, Any] = Field(default_factory=dict)
|
||||
|
||||
|
||||
class PendingMcpCall(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
arguments: Dict[str, Any] = Field(default_factory=dict)
|
||||
requires_confirmation: bool = True
|
||||
|
||||
|
||||
@@ -267,19 +267,24 @@ async def get_vector_db_config(request: Request, user: User = Depends(get_curren
|
||||
async def update_vector_db_config(
|
||||
request: Request, payload: VectorDBConfigPayload, user: User = Depends(get_current_active_user)
|
||||
):
|
||||
entry = get_provider_entry(payload.type)
|
||||
provider_type = str(payload.type or "").strip()
|
||||
if not provider_type:
|
||||
raise HTTPException(status_code=400, detail="向量数据库类型不能为空")
|
||||
normalized_config = VectorDBConfigManager.normalize_config(payload.config)
|
||||
|
||||
entry = get_provider_entry(provider_type)
|
||||
if not entry:
|
||||
raise HTTPException(
|
||||
status_code=400, detail=f"未知的向量数据库类型: {payload.type}")
|
||||
status_code=400, detail=f"未知的向量数据库类型: {provider_type}")
|
||||
if not entry.get("enabled", True):
|
||||
raise HTTPException(status_code=400, detail="该向量数据库类型暂不可用")
|
||||
|
||||
provider_cls = get_provider_class(payload.type)
|
||||
provider_cls = get_provider_class(provider_type)
|
||||
if not provider_cls:
|
||||
raise HTTPException(
|
||||
status_code=400, detail=f"未找到类型 {payload.type} 对应的实现")
|
||||
status_code=400, detail=f"未找到类型 {provider_type} 对应的实现")
|
||||
|
||||
test_provider = provider_cls(payload.config)
|
||||
test_provider = provider_cls(normalized_config)
|
||||
try:
|
||||
await test_provider.initialize()
|
||||
except Exception as exc:
|
||||
@@ -293,7 +298,7 @@ async def update_vector_db_config(
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
await VectorDBConfigManager.save_config(payload.type, payload.config)
|
||||
await VectorDBConfigManager.save_config(provider_type, normalized_config)
|
||||
service = VectorDBService()
|
||||
await service.reload()
|
||||
config_data = await service.current_provider()
|
||||
|
||||
@@ -15,6 +15,102 @@ class MissingModelError(RuntimeError):
|
||||
pass
|
||||
|
||||
|
||||
def _mcp_tools_to_openai_wire(tools: List[Dict[str, Any]] | None) -> List[Dict[str, Any]] | None:
|
||||
if not tools:
|
||||
return None
|
||||
out: List[Dict[str, Any]] = []
|
||||
for tool in tools:
|
||||
if not isinstance(tool, dict):
|
||||
continue
|
||||
name = tool.get("name")
|
||||
if not isinstance(name, str) or not name.strip():
|
||||
continue
|
||||
out.append(
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": name,
|
||||
"description": str(tool.get("description") or ""),
|
||||
"parameters": tool.get("input_schema") if isinstance(tool.get("input_schema"), dict) else {},
|
||||
},
|
||||
}
|
||||
)
|
||||
return out
|
||||
|
||||
|
||||
def _mcp_messages_to_openai_wire(messages: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
||||
out: List[Dict[str, Any]] = []
|
||||
for message in messages:
|
||||
if not isinstance(message, dict):
|
||||
continue
|
||||
item = dict(message)
|
||||
mcp_call_id = item.pop("mcp_call_id", None)
|
||||
if isinstance(mcp_call_id, str) and mcp_call_id.strip():
|
||||
item["tool_call_id"] = mcp_call_id
|
||||
|
||||
mcp_calls = item.pop("mcp_calls", None)
|
||||
if isinstance(mcp_calls, list):
|
||||
tool_calls: List[Dict[str, Any]] = []
|
||||
for idx, call in enumerate(mcp_calls):
|
||||
if not isinstance(call, dict):
|
||||
continue
|
||||
name = call.get("name")
|
||||
if not isinstance(name, str) or not name.strip():
|
||||
continue
|
||||
arguments = call.get("arguments") if isinstance(call.get("arguments"), dict) else {}
|
||||
tool_calls.append(
|
||||
{
|
||||
"id": str(call.get("id") or f"call_{idx}"),
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": name,
|
||||
"arguments": json.dumps(arguments, ensure_ascii=False),
|
||||
},
|
||||
}
|
||||
)
|
||||
if tool_calls:
|
||||
item["tool_calls"] = tool_calls
|
||||
out.append(item)
|
||||
return out
|
||||
|
||||
|
||||
def _openai_wire_message_to_mcp(message: Dict[str, Any]) -> Dict[str, Any]:
|
||||
out = dict(message)
|
||||
tool_call_id = out.pop("tool_call_id", None)
|
||||
if isinstance(tool_call_id, str) and tool_call_id.strip():
|
||||
out["mcp_call_id"] = tool_call_id
|
||||
|
||||
tool_calls = out.pop("tool_calls", None)
|
||||
if isinstance(tool_calls, list):
|
||||
mcp_calls: List[Dict[str, Any]] = []
|
||||
for idx, call in enumerate(tool_calls):
|
||||
if not isinstance(call, dict):
|
||||
continue
|
||||
fn = call.get("function") if isinstance(call.get("function"), dict) else {}
|
||||
name = fn.get("name")
|
||||
if not isinstance(name, str) or not name.strip():
|
||||
continue
|
||||
arguments: Dict[str, Any] = {}
|
||||
raw_args = fn.get("arguments")
|
||||
if isinstance(raw_args, str) and raw_args.strip():
|
||||
try:
|
||||
parsed = json.loads(raw_args)
|
||||
if isinstance(parsed, dict):
|
||||
arguments = parsed
|
||||
except json.JSONDecodeError:
|
||||
arguments = {}
|
||||
mcp_calls.append(
|
||||
{
|
||||
"id": str(call.get("id") or f"call_{idx}"),
|
||||
"name": name,
|
||||
"arguments": arguments,
|
||||
}
|
||||
)
|
||||
if mcp_calls:
|
||||
out["mcp_calls"] = mcp_calls
|
||||
return out
|
||||
|
||||
|
||||
async def describe_image_base64(base64_image: str, detail: str = "high") -> str:
|
||||
"""
|
||||
传入 base64 图片并返回描述文本。缺省时返回错误提示。
|
||||
@@ -939,34 +1035,39 @@ async def chat_completion(
|
||||
) -> Dict[str, Any]:
|
||||
model, provider = await _require_model(ability)
|
||||
fmt = str(provider.api_format or "").lower()
|
||||
wire_messages = _mcp_messages_to_openai_wire(messages)
|
||||
wire_tools = _mcp_tools_to_openai_wire(tools)
|
||||
if fmt == "openai":
|
||||
return await _chat_with_openai(
|
||||
result = await _chat_with_openai(
|
||||
provider,
|
||||
model,
|
||||
messages,
|
||||
tools=tools,
|
||||
wire_messages,
|
||||
tools=wire_tools,
|
||||
tool_choice=tool_choice,
|
||||
temperature=temperature,
|
||||
timeout=timeout,
|
||||
)
|
||||
return _openai_wire_message_to_mcp(result)
|
||||
if fmt == "anthropic":
|
||||
return await _chat_with_anthropic(
|
||||
result = await _chat_with_anthropic(
|
||||
provider,
|
||||
model,
|
||||
messages,
|
||||
tools=tools,
|
||||
wire_messages,
|
||||
tools=wire_tools,
|
||||
temperature=temperature,
|
||||
timeout=timeout,
|
||||
)
|
||||
return _openai_wire_message_to_mcp(result)
|
||||
if fmt == "ollama":
|
||||
return await _chat_with_ollama(
|
||||
result = await _chat_with_ollama(
|
||||
provider,
|
||||
model,
|
||||
messages,
|
||||
tools=tools,
|
||||
wire_messages,
|
||||
tools=wire_tools,
|
||||
temperature=temperature,
|
||||
timeout=timeout,
|
||||
)
|
||||
return _openai_wire_message_to_mcp(result)
|
||||
raise MissingModelError(f"当前不支持该对话模型接口类型: {provider.api_format}")
|
||||
|
||||
|
||||
@@ -1016,38 +1117,49 @@ async def chat_completion_stream(
|
||||
) -> AsyncIterator[Dict[str, Any]]:
|
||||
model, provider = await _require_model(ability)
|
||||
fmt = str(provider.api_format or "").lower()
|
||||
wire_messages = _mcp_messages_to_openai_wire(messages)
|
||||
wire_tools = _mcp_tools_to_openai_wire(tools)
|
||||
if fmt == "openai":
|
||||
async for event in _chat_stream_with_openai(
|
||||
provider,
|
||||
model,
|
||||
messages,
|
||||
tools=tools,
|
||||
wire_messages,
|
||||
tools=wire_tools,
|
||||
tool_choice=tool_choice,
|
||||
temperature=temperature,
|
||||
timeout=timeout,
|
||||
):
|
||||
if event.get("type") == "message" and isinstance(event.get("message"), dict):
|
||||
yield {**event, "message": _openai_wire_message_to_mcp(event["message"])}
|
||||
continue
|
||||
yield event
|
||||
return
|
||||
if fmt == "anthropic":
|
||||
async for event in _chat_stream_with_anthropic(
|
||||
provider,
|
||||
model,
|
||||
messages,
|
||||
tools=tools,
|
||||
wire_messages,
|
||||
tools=wire_tools,
|
||||
temperature=temperature,
|
||||
timeout=timeout,
|
||||
):
|
||||
if event.get("type") == "message" and isinstance(event.get("message"), dict):
|
||||
yield {**event, "message": _openai_wire_message_to_mcp(event["message"])}
|
||||
continue
|
||||
yield event
|
||||
return
|
||||
if fmt == "ollama":
|
||||
async for event in _chat_stream_with_ollama(
|
||||
provider,
|
||||
model,
|
||||
messages,
|
||||
tools=tools,
|
||||
wire_messages,
|
||||
tools=wire_tools,
|
||||
temperature=temperature,
|
||||
timeout=timeout,
|
||||
):
|
||||
if event.get("type") == "message" and isinstance(event.get("message"), dict):
|
||||
yield {**event, "message": _openai_wire_message_to_mcp(event["message"])}
|
||||
continue
|
||||
yield event
|
||||
return
|
||||
raise MissingModelError(f"当前不支持该对话模型接口类型: {provider.api_format}")
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import asyncio
|
||||
import json
|
||||
from collections.abc import Iterable
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
from typing import Any, Dict, List, Optional, Tuple, TypeVar
|
||||
|
||||
import httpx
|
||||
from tortoise.exceptions import DoesNotExist
|
||||
@@ -28,16 +28,37 @@ OPENAI_EMBEDDING_DIMS = {
|
||||
"text-embedding-ada-002": 1536,
|
||||
}
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
class VectorDBConfigManager:
|
||||
TYPE_KEY = "VECTOR_DB_TYPE"
|
||||
CONFIG_KEY = "VECTOR_DB_CONFIG"
|
||||
DEFAULT_TYPE = "milvus_lite"
|
||||
|
||||
@classmethod
|
||||
def normalize_type(cls, provider_type: Any) -> str:
|
||||
normalized = str(provider_type or cls.DEFAULT_TYPE).strip()
|
||||
return normalized or cls.DEFAULT_TYPE
|
||||
|
||||
@classmethod
|
||||
def normalize_config(cls, config: Dict[str, Any] | None) -> Dict[str, Any]:
|
||||
normalized: Dict[str, Any] = {}
|
||||
for key, value in (config or {}).items():
|
||||
normalized_key = str(key).strip()
|
||||
if not normalized_key:
|
||||
continue
|
||||
if isinstance(value, str):
|
||||
value = value.strip()
|
||||
if not value:
|
||||
continue
|
||||
normalized[normalized_key] = value
|
||||
return normalized
|
||||
|
||||
@classmethod
|
||||
async def load_config(cls) -> Tuple[str, Dict[str, Any]]:
|
||||
raw_type = await ConfigService.get(cls.TYPE_KEY, cls.DEFAULT_TYPE)
|
||||
provider_type = str(raw_type or cls.DEFAULT_TYPE)
|
||||
provider_type = cls.normalize_type(raw_type)
|
||||
|
||||
raw_config = await ConfigService.get(cls.CONFIG_KEY)
|
||||
config_dict: Dict[str, Any] = {}
|
||||
@@ -48,12 +69,14 @@ class VectorDBConfigManager:
|
||||
config_dict = {}
|
||||
elif isinstance(raw_config, dict):
|
||||
config_dict = raw_config
|
||||
return provider_type, config_dict
|
||||
return provider_type, cls.normalize_config(config_dict)
|
||||
|
||||
@classmethod
|
||||
async def save_config(cls, provider_type: str, config: Dict[str, Any]) -> None:
|
||||
await ConfigService.set(cls.TYPE_KEY, provider_type)
|
||||
await ConfigService.set(cls.CONFIG_KEY, json.dumps(config or {}))
|
||||
normalized_type = cls.normalize_type(provider_type)
|
||||
normalized_config = cls.normalize_config(config)
|
||||
await ConfigService.set(cls.TYPE_KEY, normalized_type)
|
||||
await ConfigService.set(cls.CONFIG_KEY, json.dumps(normalized_config))
|
||||
|
||||
@classmethod
|
||||
async def get_type(cls) -> str:
|
||||
@@ -413,6 +436,7 @@ class VectorDBService:
|
||||
self._provider_type: Optional[str] = None
|
||||
self._provider_config: Dict[str, Any] | None = None
|
||||
self._lock = asyncio.Lock()
|
||||
self._operation_lock = asyncio.Lock()
|
||||
|
||||
async def _ensure_provider(self) -> BaseVectorProvider:
|
||||
if self._provider is None:
|
||||
@@ -449,33 +473,38 @@ class VectorDBService:
|
||||
self._provider_config = normalized_config
|
||||
return provider
|
||||
|
||||
async def _run_provider_call(self, provider: BaseVectorProvider, method_name: str, *args, **kwargs) -> T:
|
||||
method = getattr(provider, method_name)
|
||||
async with self._operation_lock:
|
||||
return await asyncio.to_thread(method, *args, **kwargs)
|
||||
|
||||
async def ensure_collection(self, collection_name: str, vector: bool = True, dim: int = DEFAULT_VECTOR_DIMENSION) -> None:
|
||||
provider = await self._ensure_provider()
|
||||
provider.ensure_collection(collection_name, vector, dim)
|
||||
await self._run_provider_call(provider, "ensure_collection", collection_name, vector, dim)
|
||||
|
||||
async def upsert_vector(self, collection_name: str, data: Dict[str, Any]) -> None:
|
||||
provider = await self._ensure_provider()
|
||||
provider.upsert_vector(collection_name, data)
|
||||
await self._run_provider_call(provider, "upsert_vector", collection_name, data)
|
||||
|
||||
async def delete_vector(self, collection_name: str, path: str) -> None:
|
||||
provider = await self._ensure_provider()
|
||||
provider.delete_vector(collection_name, path)
|
||||
await self._run_provider_call(provider, "delete_vector", collection_name, path)
|
||||
|
||||
async def search_vectors(self, collection_name: str, query_embedding, top_k: int = 5):
|
||||
provider = await self._ensure_provider()
|
||||
return provider.search_vectors(collection_name, query_embedding, top_k)
|
||||
return await self._run_provider_call(provider, "search_vectors", collection_name, query_embedding, top_k)
|
||||
|
||||
async def search_by_path(self, collection_name: str, query_path: str, top_k: int = 20):
|
||||
provider = await self._ensure_provider()
|
||||
return provider.search_by_path(collection_name, query_path, top_k)
|
||||
return await self._run_provider_call(provider, "search_by_path", collection_name, query_path, top_k)
|
||||
|
||||
async def get_all_stats(self) -> Dict[str, Any]:
|
||||
provider = await self._ensure_provider()
|
||||
return provider.get_all_stats()
|
||||
return await self._run_provider_call(provider, "get_all_stats")
|
||||
|
||||
async def clear_all_data(self) -> None:
|
||||
provider = await self._ensure_provider()
|
||||
provider.clear_all_data()
|
||||
await self._run_provider_call(provider, "clear_all_data")
|
||||
|
||||
async def current_provider(self) -> Dict[str, Any]:
|
||||
provider_type, provider_config = await VectorDBConfigManager.load_config()
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
@@ -23,12 +24,14 @@ class MilvusLiteProvider(BaseVectorProvider):
|
||||
|
||||
def __init__(self, config: Dict[str, Any] | None = None):
|
||||
super().__init__(config)
|
||||
self.db_path = Path(self.config.get("db_path") or "data/db/milvus.db")
|
||||
raw_db_path = self.config.get("db_path")
|
||||
db_path = str(raw_db_path).strip() if raw_db_path is not None else ""
|
||||
self.db_path = Path(db_path or "data/db/milvus.db")
|
||||
self.client: MilvusClient | None = None
|
||||
|
||||
async def initialize(self) -> None:
|
||||
try:
|
||||
self.client = MilvusClient(str(self.db_path))
|
||||
self.client = await asyncio.to_thread(MilvusClient, str(self.db_path))
|
||||
except Exception as exc: # pragma: no cover - depends on local environment
|
||||
raise RuntimeError(f"Failed to open Milvus Lite at {self.db_path}: {exc}") from exc
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import asyncio
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from pymilvus import CollectionSchema, DataType, FieldSchema, MilvusClient
|
||||
@@ -32,11 +33,14 @@ class MilvusServerProvider(BaseVectorProvider):
|
||||
self.client: MilvusClient | None = None
|
||||
|
||||
async def initialize(self) -> None:
|
||||
uri = self.config.get("uri")
|
||||
uri = str(self.config.get("uri") or "").strip()
|
||||
if not uri:
|
||||
raise RuntimeError("Milvus Server URI is required")
|
||||
token = self.config.get("token")
|
||||
if isinstance(token, str):
|
||||
token = token.strip() or None
|
||||
try:
|
||||
self.client = MilvusClient(uri=uri, token=self.config.get("token"))
|
||||
self.client = await asyncio.to_thread(MilvusClient, uri=uri, token=token)
|
||||
except Exception as exc: # pragma: no cover - depends on remote availability
|
||||
raise RuntimeError(f"Failed to connect to Milvus Server {uri}: {exc}") from exc
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import asyncio
|
||||
from typing import Any, Dict, List, Optional, Sequence
|
||||
from uuid import NAMESPACE_URL, uuid5
|
||||
|
||||
@@ -40,7 +41,7 @@ class QdrantProvider(BaseVectorProvider):
|
||||
api_key = (self.config.get("api_key") or None) or None
|
||||
try:
|
||||
client = QdrantClient(url=url, api_key=api_key)
|
||||
client.get_collections()
|
||||
await asyncio.to_thread(client.get_collections)
|
||||
self.client = client
|
||||
except Exception as exc: # pragma: no cover - 依赖外部服务
|
||||
raise RuntimeError(f"Failed to connect to Qdrant at {url}: {exc}") from exc
|
||||
|
||||
@@ -13,6 +13,7 @@ from .types import ConfigItem
|
||||
router = APIRouter(prefix="/api/config", tags=["config"])
|
||||
|
||||
PUBLIC_CONFIG_KEYS = [
|
||||
"APP_DEFAULT_LANGUAGE",
|
||||
"THEME_MODE",
|
||||
"THEME_PRIMARY_COLOR",
|
||||
"THEME_BORDER_RADIUS",
|
||||
@@ -56,6 +57,7 @@ async def get_all_config(
|
||||
configs = await ConfigService.get_all()
|
||||
return success(configs)
|
||||
|
||||
|
||||
@router.get("/public")
|
||||
@audit(action=AuditAction.READ, description="获取公开配置")
|
||||
async def get_public_config(
|
||||
|
||||
@@ -10,7 +10,7 @@ from models.database import Configuration, UserAccount
|
||||
|
||||
load_dotenv(dotenv_path=".env")
|
||||
|
||||
VERSION = "v2.1.0"
|
||||
VERSION = "v2.2.1"
|
||||
|
||||
|
||||
class ConfigService:
|
||||
@@ -80,6 +80,7 @@ class ConfigService:
|
||||
logo=logo,
|
||||
favicon=favicon,
|
||||
is_initialized=user_count > 0,
|
||||
default_language=await cls.get("APP_DEFAULT_LANGUAGE", "zh"),
|
||||
app_domain=await cls.get("APP_DOMAIN"),
|
||||
file_domain=await cls.get("FILE_DOMAIN"),
|
||||
)
|
||||
|
||||
@@ -14,6 +14,7 @@ class SystemStatus(BaseModel):
|
||||
logo: str
|
||||
favicon: str
|
||||
is_initialized: bool
|
||||
default_language: str = "zh"
|
||||
app_domain: Optional[str] = None
|
||||
file_domain: Optional[str] = None
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import List, Optional
|
||||
from fastapi import HTTPException
|
||||
|
||||
@@ -17,74 +18,169 @@ from .types import (
|
||||
PERMISSION_DEFINITIONS,
|
||||
)
|
||||
|
||||
@dataclass(slots=True)
|
||||
class PermissionContext:
|
||||
exists: bool
|
||||
is_admin: bool
|
||||
path_rules: List[PathRule]
|
||||
|
||||
|
||||
class PermissionService:
|
||||
"""权限检查服务"""
|
||||
|
||||
# 权限检查结果缓存(简单的内存缓存)
|
||||
_cache: dict[str, tuple[bool, float]] = {}
|
||||
_context_cache: dict[int, tuple[PermissionContext, float]] = {}
|
||||
_cache_ttl = 300 # 5分钟缓存
|
||||
|
||||
@classmethod
|
||||
def _now(cls) -> float:
|
||||
import time
|
||||
|
||||
return time.time()
|
||||
|
||||
@classmethod
|
||||
def _is_cache_valid(cls, timestamp: float) -> bool:
|
||||
return cls._now() - timestamp < cls._cache_ttl
|
||||
|
||||
@classmethod
|
||||
def _get_cached_result(cls, cache_key: str) -> Optional[bool]:
|
||||
cached = cls._cache.get(cache_key)
|
||||
if not cached:
|
||||
return None
|
||||
result, timestamp = cached
|
||||
if cls._is_cache_valid(timestamp):
|
||||
return result
|
||||
cls._cache.pop(cache_key, None)
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def _sort_path_rules(cls, rules: List[PathRule]) -> List[PathRule]:
|
||||
return sorted(
|
||||
rules,
|
||||
key=lambda r: (
|
||||
r.priority,
|
||||
PathMatcher.get_pattern_specificity(r.path_pattern, r.is_regex),
|
||||
),
|
||||
reverse=True,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _match_sorted_path_rules(
|
||||
cls, path: str, action: str, sorted_rules: List[PathRule]
|
||||
) -> Optional[bool]:
|
||||
for rule in sorted_rules:
|
||||
if PathMatcher.match_pattern(path, rule.path_pattern, rule.is_regex):
|
||||
if action == PathAction.READ:
|
||||
return rule.can_read
|
||||
if action == PathAction.WRITE:
|
||||
return rule.can_write
|
||||
if action == PathAction.DELETE:
|
||||
return rule.can_delete
|
||||
if action == PathAction.SHARE:
|
||||
return rule.can_share
|
||||
return False
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
async def _get_permission_context(cls, user_id: int) -> PermissionContext:
|
||||
cached = cls._context_cache.get(user_id)
|
||||
if cached:
|
||||
context, timestamp = cached
|
||||
if cls._is_cache_valid(timestamp):
|
||||
return context
|
||||
cls._context_cache.pop(user_id, None)
|
||||
|
||||
user = await UserAccount.get_or_none(id=user_id)
|
||||
if not user:
|
||||
context = PermissionContext(exists=False, is_admin=False, path_rules=[])
|
||||
cls._context_cache[user_id] = (context, cls._now())
|
||||
return context
|
||||
|
||||
if user.is_admin:
|
||||
context = PermissionContext(exists=True, is_admin=True, path_rules=[])
|
||||
cls._context_cache[user_id] = (context, cls._now())
|
||||
return context
|
||||
|
||||
user_roles = await UserRole.filter(user_id=user_id)
|
||||
role_ids = [ur.role_id for ur in user_roles]
|
||||
if not role_ids:
|
||||
context = PermissionContext(exists=True, is_admin=False, path_rules=[])
|
||||
cls._context_cache[user_id] = (context, cls._now())
|
||||
return context
|
||||
|
||||
path_rules = await PathRule.filter(role_id__in=role_ids)
|
||||
context = PermissionContext(
|
||||
exists=True,
|
||||
is_admin=False,
|
||||
path_rules=cls._sort_path_rules(list(path_rules)),
|
||||
)
|
||||
cls._context_cache[user_id] = (context, cls._now())
|
||||
return context
|
||||
|
||||
@classmethod
|
||||
def _check_path_permission_with_context(
|
||||
cls,
|
||||
user_id: int,
|
||||
normalized_path: str,
|
||||
action: str,
|
||||
context: PermissionContext,
|
||||
) -> bool:
|
||||
if not context.exists:
|
||||
return False
|
||||
if context.is_admin:
|
||||
return True
|
||||
|
||||
checked_cache_keys: List[str] = []
|
||||
current_path = normalized_path
|
||||
|
||||
while True:
|
||||
cache_key = f"{user_id}:{current_path}:{action}"
|
||||
cached_result = cls._get_cached_result(cache_key)
|
||||
if cached_result is not None:
|
||||
result = cached_result
|
||||
break
|
||||
|
||||
checked_cache_keys.append(cache_key)
|
||||
result = cls._match_sorted_path_rules(current_path, action, context.path_rules)
|
||||
if result is not None:
|
||||
break
|
||||
|
||||
parent_path = PathMatcher.get_parent_path(current_path)
|
||||
if not parent_path:
|
||||
result = False
|
||||
break
|
||||
current_path = parent_path
|
||||
|
||||
timestamp = cls._now()
|
||||
for cache_key in checked_cache_keys:
|
||||
cls._cache[cache_key] = (result, timestamp)
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
async def check_path_permission(
|
||||
cls, user_id: int, path: str, action: str
|
||||
) -> bool:
|
||||
"""
|
||||
检查用户对路径的操作权限
|
||||
|
||||
|
||||
Args:
|
||||
user_id: 用户ID
|
||||
path: 要检查的路径
|
||||
action: 操作类型 (read/write/delete/share)
|
||||
|
||||
|
||||
Returns:
|
||||
是否有权限
|
||||
"""
|
||||
import time
|
||||
|
||||
# 检查缓存
|
||||
cache_key = f"{user_id}:{path}:{action}"
|
||||
if cache_key in cls._cache:
|
||||
result, timestamp = cls._cache[cache_key]
|
||||
if time.time() - timestamp < cls._cache_ttl:
|
||||
return result
|
||||
|
||||
# 获取用户
|
||||
user = await UserAccount.get_or_none(id=user_id)
|
||||
if not user:
|
||||
return False
|
||||
|
||||
# 超级管理员直接放行
|
||||
if user.is_admin:
|
||||
cls._cache[cache_key] = (True, time.time())
|
||||
return True
|
||||
|
||||
# 获取用户所有角色
|
||||
user_roles = await UserRole.filter(user_id=user_id).prefetch_related("role")
|
||||
role_ids = [ur.role_id for ur in user_roles]
|
||||
|
||||
if not role_ids:
|
||||
cls._cache[cache_key] = (False, time.time())
|
||||
return False
|
||||
|
||||
# 获取所有角色的路径规则
|
||||
path_rules = await PathRule.filter(role_id__in=role_ids).order_by("-priority")
|
||||
|
||||
# 规范化路径
|
||||
normalized_path = PathMatcher.normalize_path(path)
|
||||
cache_key = f"{user_id}:{normalized_path}:{action}"
|
||||
cached_result = cls._get_cached_result(cache_key)
|
||||
if cached_result is not None:
|
||||
return cached_result
|
||||
|
||||
# 按优先级和具体程度匹配
|
||||
result = cls._match_path_rules(normalized_path, action, list(path_rules))
|
||||
|
||||
# 如果没有匹配到规则,检查父目录(继承)
|
||||
if result is None:
|
||||
parent_path = PathMatcher.get_parent_path(normalized_path)
|
||||
if parent_path:
|
||||
result = await cls.check_path_permission(user_id, parent_path, action)
|
||||
else:
|
||||
result = False # 默认拒绝
|
||||
|
||||
cls._cache[cache_key] = (result, time.time())
|
||||
context = await cls._get_permission_context(user_id)
|
||||
result = cls._check_path_permission_with_context(user_id, normalized_path, action, context)
|
||||
cls._cache[cache_key] = (result, cls._now())
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
@@ -97,31 +193,7 @@ class PermissionService:
|
||||
Returns:
|
||||
True/False 表示明确的权限结果,None 表示没有匹配到规则
|
||||
"""
|
||||
# 按优先级和具体程度排序
|
||||
sorted_rules = sorted(
|
||||
rules,
|
||||
key=lambda r: (
|
||||
r.priority,
|
||||
PathMatcher.get_pattern_specificity(r.path_pattern, r.is_regex),
|
||||
),
|
||||
reverse=True,
|
||||
)
|
||||
|
||||
for rule in sorted_rules:
|
||||
if PathMatcher.match_pattern(path, rule.path_pattern, rule.is_regex):
|
||||
# 匹配到规则,检查具体操作权限
|
||||
if action == PathAction.READ:
|
||||
return rule.can_read
|
||||
elif action == PathAction.WRITE:
|
||||
return rule.can_write
|
||||
elif action == PathAction.DELETE:
|
||||
return rule.can_delete
|
||||
elif action == PathAction.SHARE:
|
||||
return rule.can_share
|
||||
else:
|
||||
return False
|
||||
|
||||
return None
|
||||
return cls._match_sorted_path_rules(path, action, cls._sort_path_rules(rules))
|
||||
|
||||
@classmethod
|
||||
async def check_system_permission(cls, user_id: int, permission_code: str) -> bool:
|
||||
@@ -251,35 +323,20 @@ class PermissionService:
|
||||
cls, user_id: int, path: str, action: str
|
||||
) -> PathPermissionResult:
|
||||
"""检查路径权限并返回详细结果"""
|
||||
user = await UserAccount.get_or_none(id=user_id)
|
||||
if not user:
|
||||
context = await cls._get_permission_context(user_id)
|
||||
if not context.exists:
|
||||
return PathPermissionResult(path=path, action=action, allowed=False)
|
||||
|
||||
# 超级管理员
|
||||
if user.is_admin:
|
||||
if context.is_admin:
|
||||
return PathPermissionResult(path=path, action=action, allowed=True)
|
||||
|
||||
# 获取用户角色
|
||||
user_roles = await UserRole.filter(user_id=user_id)
|
||||
role_ids = [ur.role_id for ur in user_roles]
|
||||
|
||||
if not role_ids:
|
||||
if not context.path_rules:
|
||||
return PathPermissionResult(path=path, action=action, allowed=False)
|
||||
|
||||
# 获取路径规则
|
||||
path_rules = await PathRule.filter(role_id__in=role_ids).order_by("-priority")
|
||||
normalized_path = PathMatcher.normalize_path(path)
|
||||
|
||||
# 查找匹配的规则
|
||||
matched_rule = None
|
||||
for rule in sorted(
|
||||
path_rules,
|
||||
key=lambda r: (
|
||||
r.priority,
|
||||
PathMatcher.get_pattern_specificity(r.path_pattern, r.is_regex),
|
||||
),
|
||||
reverse=True,
|
||||
):
|
||||
for rule in context.path_rules:
|
||||
if PathMatcher.match_pattern(
|
||||
normalized_path, rule.path_pattern, rule.is_regex
|
||||
):
|
||||
@@ -322,19 +379,30 @@ class PermissionService:
|
||||
"""清除权限缓存"""
|
||||
if user_id is None:
|
||||
cls._cache.clear()
|
||||
cls._context_cache.clear()
|
||||
else:
|
||||
# 清除特定用户的缓存
|
||||
keys_to_delete = [k for k in cls._cache if k.startswith(f"{user_id}:")]
|
||||
for k in keys_to_delete:
|
||||
del cls._cache[k]
|
||||
cls._context_cache.pop(user_id, None)
|
||||
|
||||
@classmethod
|
||||
async def filter_paths_by_permission(
|
||||
cls, user_id: int, paths: List[str], action: str
|
||||
) -> List[str]:
|
||||
"""过滤出用户有权限的路径列表"""
|
||||
if not paths:
|
||||
return []
|
||||
|
||||
context = await cls._get_permission_context(user_id)
|
||||
if not context.exists:
|
||||
return []
|
||||
if context.is_admin:
|
||||
return list(paths)
|
||||
|
||||
result = []
|
||||
for path in paths:
|
||||
if await cls.check_path_permission(user_id, path, action):
|
||||
normalized_path = PathMatcher.normalize_path(path)
|
||||
if cls._check_path_permission_with_context(user_id, normalized_path, action, context):
|
||||
result.append(path)
|
||||
return result
|
||||
|
||||
@@ -84,8 +84,9 @@ async def get_file_stat(
|
||||
full_path: str,
|
||||
request: Request,
|
||||
current_user: Annotated[User, Depends(get_current_active_user)],
|
||||
verbose: bool = Query(False, description="是否返回扩展元数据"),
|
||||
):
|
||||
stat = await VirtualFSService.stat(full_path)
|
||||
stat = await VirtualFSService.stat(full_path, verbose=verbose)
|
||||
return success(stat)
|
||||
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import inspect
|
||||
from typing import Any, Dict, List, Tuple
|
||||
|
||||
from fastapi import HTTPException
|
||||
@@ -14,6 +15,23 @@ from .resolver import VirtualFSResolverMixin
|
||||
|
||||
|
||||
class VirtualFSListingMixin(VirtualFSResolverMixin):
|
||||
@staticmethod
|
||||
async def _call_stat_file(
|
||||
stat_func,
|
||||
root: str,
|
||||
rel: str,
|
||||
*,
|
||||
include_metadata: bool = False,
|
||||
):
|
||||
try:
|
||||
parameters = inspect.signature(stat_func).parameters
|
||||
except (TypeError, ValueError):
|
||||
parameters = {}
|
||||
|
||||
if "include_metadata" in parameters:
|
||||
return await stat_func(root, rel, include_metadata=include_metadata)
|
||||
return await stat_func(root, rel)
|
||||
|
||||
@classmethod
|
||||
async def path_is_directory(cls, path: str) -> bool:
|
||||
adapter_instance, _, root, rel = await cls.resolve_adapter_and_rel(path)
|
||||
@@ -24,7 +42,7 @@ class VirtualFSListingMixin(VirtualFSResolverMixin):
|
||||
if not callable(stat_func):
|
||||
raise HTTPException(501, detail="Adapter does not implement stat_file")
|
||||
try:
|
||||
info = await stat_func(root, rel)
|
||||
info = await cls._call_stat_file(stat_func, root, rel, include_metadata=False)
|
||||
except FileNotFoundError:
|
||||
raise HTTPException(404, detail="Path not found")
|
||||
if isinstance(info, dict):
|
||||
@@ -110,7 +128,12 @@ class VirtualFSListingMixin(VirtualFSResolverMixin):
|
||||
stat_file = getattr(adapter_instance, "stat_file", None)
|
||||
if callable(stat_file):
|
||||
try:
|
||||
parent_info = await stat_file(effective_root, rel)
|
||||
parent_info = await cls._call_stat_file(
|
||||
stat_file,
|
||||
effective_root,
|
||||
rel,
|
||||
include_metadata=False,
|
||||
)
|
||||
if isinstance(parent_info, dict):
|
||||
parent_info.setdefault("name", rel.split("/")[-1])
|
||||
parent_info["is_dir"] = bool(parent_info.get("is_dir", True))
|
||||
@@ -121,7 +144,12 @@ class VirtualFSListingMixin(VirtualFSResolverMixin):
|
||||
stat_file = getattr(adapter_instance, "stat_file", None)
|
||||
if callable(stat_file):
|
||||
try:
|
||||
parent_info = await stat_file(effective_root, parent_rel)
|
||||
parent_info = await cls._call_stat_file(
|
||||
stat_file,
|
||||
effective_root,
|
||||
parent_rel,
|
||||
include_metadata=False,
|
||||
)
|
||||
if isinstance(parent_info, dict):
|
||||
parent_info.setdefault("name", parent_rel.split("/")[-1])
|
||||
parent_info["is_dir"] = bool(parent_info.get("is_dir", True))
|
||||
@@ -222,13 +250,18 @@ class VirtualFSListingMixin(VirtualFSResolverMixin):
|
||||
}
|
||||
|
||||
@classmethod
|
||||
async def stat_file(cls, path: str):
|
||||
async def stat_file(cls, path: str, verbose: bool = False):
|
||||
adapter_instance, _, root, rel = await cls.resolve_adapter_and_rel(path)
|
||||
stat_func = getattr(adapter_instance, "stat_file", None)
|
||||
if not callable(stat_func):
|
||||
raise HTTPException(501, detail="Adapter does not implement stat_file")
|
||||
try:
|
||||
info = await stat_func(root, rel)
|
||||
info = await cls._call_stat_file(
|
||||
stat_func,
|
||||
root,
|
||||
rel,
|
||||
include_metadata=verbose,
|
||||
)
|
||||
except FileNotFoundError as exc:
|
||||
raise HTTPException(404, detail=str(exc))
|
||||
|
||||
@@ -241,7 +274,7 @@ class VirtualFSListingMixin(VirtualFSResolverMixin):
|
||||
rel_name = rel.rstrip("/").split("/")[-1] if rel else path.rstrip("/").split("/")[-1]
|
||||
name_hint = str(info.get("name") or rel_name or "")
|
||||
info["has_thumbnail"] = bool(not is_dir and (is_image_filename(name_hint) or is_video_filename(name_hint)))
|
||||
if not is_dir:
|
||||
if verbose and not is_dir:
|
||||
vector_index = await cls._gather_vector_index(path)
|
||||
if vector_index is not None:
|
||||
info["vector_index"] = vector_index
|
||||
@@ -263,38 +296,26 @@ class VirtualFSListingMixin(VirtualFSResolverMixin):
|
||||
|
||||
过滤掉用户没有读取权限的条目
|
||||
"""
|
||||
# 首先获取完整的目录列表
|
||||
result = await cls.list_virtual_dir(path, page_num, page_size, sort_by, sort_order)
|
||||
|
||||
# 检查用户是否是管理员(管理员可以看到所有内容)
|
||||
from models.database import UserAccount
|
||||
user = await UserAccount.get_or_none(id=user_id)
|
||||
if user and user.is_admin:
|
||||
return result
|
||||
|
||||
# 过滤无权限的条目
|
||||
items = result.get("items", [])
|
||||
if not items:
|
||||
return result
|
||||
|
||||
|
||||
norm = cls._normalize_path(path).rstrip("/") or "/"
|
||||
filtered_items = []
|
||||
|
||||
path_pairs: List[Tuple[str, Dict]] = []
|
||||
for item in items:
|
||||
item_name = item.get("name", "")
|
||||
if norm == "/":
|
||||
item_path = f"/{item_name}"
|
||||
else:
|
||||
item_path = f"{norm}/{item_name}"
|
||||
|
||||
# 检查用户是否有读取权限
|
||||
has_permission = await PermissionService.check_path_permission(
|
||||
user_id, item_path, PathAction.READ
|
||||
)
|
||||
if has_permission:
|
||||
filtered_items.append(item)
|
||||
|
||||
# 更新结果
|
||||
result["items"] = filtered_items
|
||||
|
||||
path_pairs.append((item_path, item))
|
||||
|
||||
allowed_paths = await PermissionService.filter_paths_by_permission(
|
||||
user_id,
|
||||
[item_path for item_path, _ in path_pairs],
|
||||
PathAction.READ,
|
||||
)
|
||||
allowed_set = set(allowed_paths)
|
||||
result["items"] = [item for item_path, item in path_pairs if item_path in allowed_set]
|
||||
return result
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import base64
|
||||
import hashlib
|
||||
import mimetypes
|
||||
import uuid
|
||||
from email.utils import formatdate
|
||||
from urllib.parse import urlparse, unquote
|
||||
from typing import Optional
|
||||
@@ -43,6 +44,8 @@ def _dav_headers(extra: Optional[dict] = None) -> dict:
|
||||
"MKCOL",
|
||||
"MOVE",
|
||||
"COPY",
|
||||
"LOCK",
|
||||
"UNLOCK",
|
||||
]),
|
||||
}
|
||||
if extra:
|
||||
@@ -157,17 +160,19 @@ def _normalize_fs_path(path: str) -> str:
|
||||
return unquote(full)
|
||||
|
||||
|
||||
@router.options("")
|
||||
@router.options("/{path:path}")
|
||||
@audit(action=AuditAction.READ, description="WebDAV: OPTIONS", user_kw="user")
|
||||
async def options_root(_request: Request, path: str = "", _enabled: None = Depends(_ensure_webdav_enabled)):
|
||||
return Response(status_code=200, headers=_dav_headers())
|
||||
|
||||
|
||||
@router.api_route("", methods=["PROPFIND"])
|
||||
@router.api_route("/{path:path}", methods=["PROPFIND"])
|
||||
@audit(action=AuditAction.READ, description="WebDAV: PROPFIND", user_kw="user")
|
||||
async def propfind(
|
||||
request: Request,
|
||||
path: str,
|
||||
path: str = "",
|
||||
_enabled: None = Depends(_ensure_webdav_enabled),
|
||||
user: User = Depends(_get_basic_user),
|
||||
):
|
||||
@@ -280,29 +285,43 @@ async def dav_head(
|
||||
return Response(status_code=200, headers=headers)
|
||||
|
||||
|
||||
@router.api_route("", methods=["PUT"])
|
||||
@router.api_route("/{path:path}", methods=["PUT"])
|
||||
@audit(action=AuditAction.UPLOAD, description="WebDAV: PUT", user_kw="user")
|
||||
async def dav_put(
|
||||
path: str,
|
||||
request: Request,
|
||||
path: str = "",
|
||||
_enabled: None = Depends(_ensure_webdav_enabled),
|
||||
user: User = Depends(_get_basic_user),
|
||||
):
|
||||
full_path = _normalize_fs_path(path)
|
||||
await PermissionService.require_path_permission(user.id, full_path, PathAction.WRITE)
|
||||
existed = True
|
||||
try:
|
||||
await VirtualFSService.stat_file(full_path)
|
||||
except FileNotFoundError:
|
||||
existed = False
|
||||
except HTTPException as exc:
|
||||
if exc.status_code == 404:
|
||||
existed = False
|
||||
else:
|
||||
raise
|
||||
|
||||
async def body_iter():
|
||||
async for chunk in request.stream():
|
||||
if chunk:
|
||||
yield chunk
|
||||
size = await VirtualFSService.write_file_stream(full_path, body_iter(), overwrite=True)
|
||||
return Response(status_code=201, headers=_dav_headers({"Content-Length": "0"}))
|
||||
|
||||
await VirtualFSService.write_file_stream(full_path, body_iter(), overwrite=True)
|
||||
return Response(status_code=204 if existed else 201, headers=_dav_headers({"Content-Length": "0"}))
|
||||
|
||||
|
||||
@router.api_route("", methods=["DELETE"])
|
||||
@router.api_route("/{path:path}", methods=["DELETE"])
|
||||
@audit(action=AuditAction.DELETE, description="WebDAV: DELETE", user_kw="user")
|
||||
async def dav_delete(
|
||||
path: str,
|
||||
_request: Request,
|
||||
path: str = "",
|
||||
_enabled: None = Depends(_ensure_webdav_enabled),
|
||||
user: User = Depends(_get_basic_user),
|
||||
):
|
||||
@@ -312,6 +331,58 @@ async def dav_delete(
|
||||
return Response(status_code=204, headers=_dav_headers())
|
||||
|
||||
|
||||
@router.api_route("", methods=["LOCK"])
|
||||
@router.api_route("/{path:path}", methods=["LOCK"])
|
||||
@audit(action=AuditAction.UPDATE, description="WebDAV: LOCK", user_kw="user")
|
||||
async def dav_lock(
|
||||
path: str = "",
|
||||
_request: Request = None,
|
||||
_enabled: None = Depends(_ensure_webdav_enabled),
|
||||
user: User = Depends(_get_basic_user),
|
||||
):
|
||||
full_path = _normalize_fs_path(path)
|
||||
if full_path != "/":
|
||||
await PermissionService.require_path_permission(user.id, full_path, PathAction.WRITE)
|
||||
|
||||
token = f"opaquelocktoken:{uuid.uuid4()}"
|
||||
ns = "{DAV:}"
|
||||
prop = ET.Element(ns + "prop")
|
||||
lockdiscovery = ET.SubElement(prop, ns + "lockdiscovery")
|
||||
activelock = ET.SubElement(lockdiscovery, ns + "activelock")
|
||||
locktype = ET.SubElement(activelock, ns + "locktype")
|
||||
ET.SubElement(locktype, ns + "write")
|
||||
lockscope = ET.SubElement(activelock, ns + "lockscope")
|
||||
ET.SubElement(lockscope, ns + "exclusive")
|
||||
depth = ET.SubElement(activelock, ns + "depth")
|
||||
depth.text = "Infinity"
|
||||
locktoken = ET.SubElement(activelock, ns + "locktoken")
|
||||
href = ET.SubElement(locktoken, ns + "href")
|
||||
href.text = token
|
||||
|
||||
xml = ET.tostring(prop, encoding="utf-8", xml_declaration=True)
|
||||
return Response(
|
||||
content=xml,
|
||||
status_code=200,
|
||||
media_type='application/xml; charset="utf-8"',
|
||||
headers=_dav_headers({"Lock-Token": f"<{token}>"}),
|
||||
)
|
||||
|
||||
|
||||
@router.api_route("", methods=["UNLOCK"])
|
||||
@router.api_route("/{path:path}", methods=["UNLOCK"])
|
||||
@audit(action=AuditAction.UPDATE, description="WebDAV: UNLOCK", user_kw="user")
|
||||
async def dav_unlock(
|
||||
path: str = "",
|
||||
_request: Request = None,
|
||||
_enabled: None = Depends(_ensure_webdav_enabled),
|
||||
user: User = Depends(_get_basic_user),
|
||||
):
|
||||
full_path = _normalize_fs_path(path)
|
||||
if full_path != "/":
|
||||
await PermissionService.require_path_permission(user.id, full_path, PathAction.WRITE)
|
||||
return Response(status_code=204, headers=_dav_headers())
|
||||
|
||||
|
||||
@router.api_route("/{path:path}", methods=["MKCOL"])
|
||||
@audit(action=AuditAction.CREATE, description="WebDAV: MKCOL", user_kw="user")
|
||||
async def dav_mkcol(
|
||||
|
||||
@@ -144,9 +144,9 @@ class VirtualFSRouteMixin(VirtualFSTempLinkMixin):
|
||||
return response
|
||||
|
||||
@classmethod
|
||||
async def stat(cls, full_path: str):
|
||||
async def stat(cls, full_path: str, verbose: bool = False):
|
||||
full_path = cls._normalize_path(full_path)
|
||||
return await cls.stat_file(full_path)
|
||||
return await cls.stat_file(full_path, verbose=verbose)
|
||||
|
||||
@classmethod
|
||||
async def write_uploaded_file(cls, full_path: str, data: bytes):
|
||||
|
||||
@@ -28,12 +28,12 @@ async def search_files(
|
||||
data = await VirtualFSSearchService.search(q, top_k, mode, page, page_size)
|
||||
items = data.get("items") if isinstance(data, dict) else None
|
||||
if isinstance(items, list) and items:
|
||||
filtered = []
|
||||
for item in items:
|
||||
path = getattr(item, "path", None)
|
||||
if not path:
|
||||
continue
|
||||
if await PermissionService.check_path_permission(user.id, str(path), PathAction.READ):
|
||||
filtered.append(item)
|
||||
data["items"] = filtered
|
||||
path_pairs = [(str(item.path), item) for item in items if getattr(item, "path", None)]
|
||||
allowed_paths = await PermissionService.filter_paths_by_permission(
|
||||
user.id,
|
||||
[path for path, _ in path_pairs],
|
||||
PathAction.READ,
|
||||
)
|
||||
allowed_set = set(allowed_paths)
|
||||
data["items"] = [item for path, item in path_pairs if path in allowed_set]
|
||||
return success(data)
|
||||
|
||||
15
main.py
15
main.py
@@ -3,6 +3,7 @@ from pathlib import Path
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from domain.adapters import runtime_registry
|
||||
from domain.agent.mcp import MCP_HTTP_APP
|
||||
from domain.config import ConfigService, VERSION
|
||||
from db.session import close_db, init_db
|
||||
from api.routers import include_routers
|
||||
@@ -80,12 +81,13 @@ async def lifespan(app: FastAPI):
|
||||
# 在所有路由加载完成后,挂载静态文件服务(放在最后以避免覆盖 API 路由)
|
||||
app.mount("/", SPAStaticFiles(directory="web/dist", html=True, check_dir=False), name="static")
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
await task_scheduler.stop()
|
||||
await task_queue_service.stop_worker()
|
||||
await close_db()
|
||||
async with MCP_HTTP_APP.router.lifespan_context(MCP_HTTP_APP):
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
await task_scheduler.stop()
|
||||
await task_queue_service.stop_worker()
|
||||
await close_db()
|
||||
|
||||
|
||||
def create_app() -> FastAPI:
|
||||
@@ -95,6 +97,7 @@ def create_app() -> FastAPI:
|
||||
lifespan=lifespan,
|
||||
)
|
||||
include_routers(app)
|
||||
app.mount("/api/mcp", MCP_HTTP_APP, name="mcp")
|
||||
app.add_exception_handler(HTTPException, http_exception_handler)
|
||||
app.add_exception_handler(RequestValidationError, validation_exception_handler)
|
||||
app.add_exception_handler(httpx.HTTPStatusError, httpx_exception_handler)
|
||||
|
||||
@@ -9,6 +9,7 @@ dependencies = [
|
||||
"bcrypt>=5.0.0",
|
||||
"croniter>=6.0.0",
|
||||
"fastapi>=0.127.0",
|
||||
"mcp>=1.26.0",
|
||||
"paramiko>=4.0.0",
|
||||
"pillow>=12.0.0",
|
||||
"pydantic[email]>=2.12.5",
|
||||
|
||||
311
uv.lock
generated
311
uv.lock
generated
@@ -63,7 +63,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "aiohttp"
|
||||
version = "3.13.3"
|
||||
version = "3.13.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "aiohappyeyeballs" },
|
||||
@@ -74,42 +74,42 @@ dependencies = [
|
||||
{ name = "propcache" },
|
||||
{ name = "yarl" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/50/42/32cf8e7704ceb4481406eb87161349abb46a57fee3f008ba9cb610968646/aiohttp-3.13.3.tar.gz", hash = "sha256:a949eee43d3782f2daae4f4a2819b2cb9b0c5d3b7f7a927067cc84dafdbb9f88", size = 7844556, upload-time = "2026-01-03T17:33:05.204Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/45/4a/064321452809dae953c1ed6e017504e72551a26b6f5708a5a80e4bf556ff/aiohttp-3.13.4.tar.gz", hash = "sha256:d97a6d09c66087890c2ab5d49069e1e570583f7ac0314ecf98294c1b6aaebd38", size = 7859748, upload-time = "2026-03-28T17:19:40.6Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/99/36/5b6514a9f5d66f4e2597e40dea2e3db271e023eb7a5d22defe96ba560996/aiohttp-3.13.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:ea37047c6b367fd4bd632bff8077449b8fa034b69e812a18e0132a00fae6e808", size = 737238, upload-time = "2026-01-03T17:31:17.909Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/49/459327f0d5bcd8c6c9ca69e60fdeebc3622861e696490d8674a6d0cb90a6/aiohttp-3.13.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6fc0e2337d1a4c3e6acafda6a78a39d4c14caea625124817420abceed36e2415", size = 492292, upload-time = "2026-01-03T17:31:19.919Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e8/0b/b97660c5fd05d3495b4eb27f2d0ef18dc1dc4eff7511a9bf371397ff0264/aiohttp-3.13.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c685f2d80bb67ca8c3837823ad76196b3694b0159d232206d1e461d3d434666f", size = 493021, upload-time = "2026-01-03T17:31:21.636Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/54/d4/438efabdf74e30aeceb890c3290bbaa449780583b1270b00661126b8aae4/aiohttp-3.13.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:48e377758516d262bde50c2584fc6c578af272559c409eecbdd2bae1601184d6", size = 1717263, upload-time = "2026-01-03T17:31:23.296Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/71/f2/7bddc7fd612367d1459c5bcf598a9e8f7092d6580d98de0e057eb42697ad/aiohttp-3.13.3-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:34749271508078b261c4abb1767d42b8d0c0cc9449c73a4df494777dc55f0687", size = 1669107, upload-time = "2026-01-03T17:31:25.334Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/5a/1aeaecca40e22560f97610a329e0e5efef5e0b5afdf9f857f0d93839ab2e/aiohttp-3.13.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:82611aeec80eb144416956ec85b6ca45a64d76429c1ed46ae1b5f86c6e0c9a26", size = 1760196, upload-time = "2026-01-03T17:31:27.394Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/f8/0ff6992bea7bd560fc510ea1c815f87eedd745fe035589c71ce05612a19a/aiohttp-3.13.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2fff83cfc93f18f215896e3a190e8e5cb413ce01553901aca925176e7568963a", size = 1843591, upload-time = "2026-01-03T17:31:29.238Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/d1/e30e537a15f53485b61f5be525f2157da719819e8377298502aebac45536/aiohttp-3.13.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bbe7d4cecacb439e2e2a8a1a7b935c25b812af7a5fd26503a66dadf428e79ec1", size = 1720277, upload-time = "2026-01-03T17:31:31.053Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/45/23f4c451d8192f553d38d838831ebbc156907ea6e05557f39563101b7717/aiohttp-3.13.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b928f30fe49574253644b1ca44b1b8adbd903aa0da4b9054a6c20fc7f4092a25", size = 1548575, upload-time = "2026-01-03T17:31:32.87Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/ed/0a42b127a43712eda7807e7892c083eadfaf8429ca8fb619662a530a3aab/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7b5e8fe4de30df199155baaf64f2fcd604f4c678ed20910db8e2c66dc4b11603", size = 1679455, upload-time = "2026-01-03T17:31:34.76Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/b5/c05f0c2b4b4fe2c9d55e73b6d3ed4fd6c9dc2684b1d81cbdf77e7fad9adb/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:8542f41a62bcc58fc7f11cf7c90e0ec324ce44950003feb70640fc2a9092c32a", size = 1687417, upload-time = "2026-01-03T17:31:36.699Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/6b/915bc5dad66aef602b9e459b5a973529304d4e89ca86999d9d75d80cbd0b/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:5e1d8c8b8f1d91cd08d8f4a3c2b067bfca6ec043d3ff36de0f3a715feeedf926", size = 1729968, upload-time = "2026-01-03T17:31:38.622Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/11/3b/e84581290a9520024a08640b63d07673057aec5ca548177a82026187ba73/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:90455115e5da1c3c51ab619ac57f877da8fd6d73c05aacd125c5ae9819582aba", size = 1545690, upload-time = "2026-01-03T17:31:40.57Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/04/0c3655a566c43fd647c81b895dfe361b9f9ad6d58c19309d45cff52d6c3b/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:042e9e0bcb5fba81886c8b4fbb9a09d6b8a00245fd8d88e4d989c1f96c74164c", size = 1746390, upload-time = "2026-01-03T17:31:42.857Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1f/53/71165b26978f719c3419381514c9690bd5980e764a09440a10bb816ea4ab/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2eb752b102b12a76ca02dff751a801f028b4ffbbc478840b473597fc91a9ed43", size = 1702188, upload-time = "2026-01-03T17:31:44.984Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/29/a7/cbe6c9e8e136314fa1980da388a59d2f35f35395948a08b6747baebb6aa6/aiohttp-3.13.3-cp314-cp314-win32.whl", hash = "sha256:b556c85915d8efaed322bf1bdae9486aa0f3f764195a0fb6ee962e5c71ef5ce1", size = 433126, upload-time = "2026-01-03T17:31:47.463Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/de/56/982704adea7d3b16614fc5936014e9af85c0e34b58f9046655817f04306e/aiohttp-3.13.3-cp314-cp314-win_amd64.whl", hash = "sha256:9bf9f7a65e7aa20dd764151fb3d616c81088f91f8df39c3893a536e279b4b984", size = 459128, upload-time = "2026-01-03T17:31:49.2Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/2a/3c79b638a9c3d4658d345339d22070241ea341ed4e07b5ac60fb0f418003/aiohttp-3.13.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:05861afbbec40650d8a07ea324367cb93e9e8cc7762e04dd4405df99fa65159c", size = 769512, upload-time = "2026-01-03T17:31:51.134Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/29/b9/3e5014d46c0ab0db8707e0ac2711ed28c4da0218c358a4e7c17bae0d8722/aiohttp-3.13.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2fc82186fadc4a8316768d61f3722c230e2c1dcab4200d52d2ebdf2482e47592", size = 506444, upload-time = "2026-01-03T17:31:52.85Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/03/c1d4ef9a054e151cd7839cdc497f2638f00b93cbe8043983986630d7a80c/aiohttp-3.13.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0add0900ff220d1d5c5ebbf99ed88b0c1bbf87aa7e4262300ed1376a6b13414f", size = 510798, upload-time = "2026-01-03T17:31:54.91Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/76/8c1e5abbfe8e127c893fe7ead569148a4d5a799f7cf958d8c09f3eedf097/aiohttp-3.13.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:568f416a4072fbfae453dcf9a99194bbb8bdeab718e08ee13dfa2ba0e4bebf29", size = 1868835, upload-time = "2026-01-03T17:31:56.733Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/ac/984c5a6f74c363b01ff97adc96a3976d9c98940b8969a1881575b279ac5d/aiohttp-3.13.3-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:add1da70de90a2569c5e15249ff76a631ccacfe198375eead4aadf3b8dc849dc", size = 1720486, upload-time = "2026-01-03T17:31:58.65Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/9a/b7039c5f099c4eb632138728828b33428585031a1e658d693d41d07d89d1/aiohttp-3.13.3-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:10b47b7ba335d2e9b1239fa571131a87e2d8ec96b333e68b2a305e7a98b0bae2", size = 1847951, upload-time = "2026-01-03T17:32:00.989Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/02/3bec2b9a1ba3c19ff89a43a19324202b8eb187ca1e928d8bdac9bbdddebd/aiohttp-3.13.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3dd4dce1c718e38081c8f35f323209d4c1df7d4db4bab1b5c88a6b4d12b74587", size = 1941001, upload-time = "2026-01-03T17:32:03.122Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/df/d879401cedeef27ac4717f6426c8c36c3091c6e9f08a9178cc87549c537f/aiohttp-3.13.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34bac00a67a812570d4a460447e1e9e06fae622946955f939051e7cc895cfab8", size = 1797246, upload-time = "2026-01-03T17:32:05.255Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/15/be122de1f67e6953add23335c8ece6d314ab67c8bebb3f181063010795a7/aiohttp-3.13.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a19884d2ee70b06d9204b2727a7b9f983d0c684c650254679e716b0b77920632", size = 1627131, upload-time = "2026-01-03T17:32:07.607Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/12/12/70eedcac9134cfa3219ab7af31ea56bc877395b1ac30d65b1bc4b27d0438/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5f8ca7f2bb6ba8348a3614c7918cc4bb73268c5ac2a207576b7afea19d3d9f64", size = 1795196, upload-time = "2026-01-03T17:32:09.59Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/32/11/b30e1b1cd1f3054af86ebe60df96989c6a414dd87e27ad16950eee420bea/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:b0d95340658b9d2f11d9697f59b3814a9d3bb4b7a7c20b131df4bcef464037c0", size = 1782841, upload-time = "2026-01-03T17:32:11.445Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/88/0d/d98a9367b38912384a17e287850f5695c528cff0f14f791ce8ee2e4f7796/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:a1e53262fd202e4b40b70c3aff944a8155059beedc8a89bba9dc1f9ef06a1b56", size = 1795193, upload-time = "2026-01-03T17:32:13.705Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/43/a5/a2dfd1f5ff5581632c7f6a30e1744deda03808974f94f6534241ef60c751/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:d60ac9663f44168038586cab2157e122e46bdef09e9368b37f2d82d354c23f72", size = 1621979, upload-time = "2026-01-03T17:32:15.965Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/f0/12973c382ae7c1cccbc4417e129c5bf54c374dfb85af70893646e1f0e749/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:90751b8eed69435bac9ff4e3d2f6b3af1f57e37ecb0fbeee59c0174c9e2d41df", size = 1822193, upload-time = "2026-01-03T17:32:18.219Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/5f/24155e30ba7f8c96918af1350eb0663e2430aad9e001c0489d89cd708ab1/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:fc353029f176fd2b3ec6cfc71be166aba1936fe5d73dd1992ce289ca6647a9aa", size = 1769801, upload-time = "2026-01-03T17:32:20.25Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/f8/7314031ff5c10e6ece114da79b338ec17eeff3a079e53151f7e9f43c4723/aiohttp-3.13.3-cp314-cp314t-win32.whl", hash = "sha256:2e41b18a58da1e474a057b3d35248d8320029f61d70a37629535b16a0c8f3767", size = 466523, upload-time = "2026-01-03T17:32:22.215Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b4/63/278a98c715ae467624eafe375542d8ba9b4383a016df8fdefe0ae28382a7/aiohttp-3.13.3-cp314-cp314t-win_amd64.whl", hash = "sha256:44531a36aa2264a1860089ffd4dce7baf875ee5a6079d5fb42e261c704ef7344", size = 499694, upload-time = "2026-01-03T17:32:24.546Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/29/6657cc37ae04cacc2dbf53fb730a06b6091cc4cbe745028e047c53e6d840/aiohttp-3.13.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:e0a2c961fc92abeff61d6444f2ce6ad35bb982db9fc8ff8a47455beacf454a57", size = 749363, upload-time = "2026-03-28T17:17:24.044Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/7f/30ccdf67ca3d24b610067dc63d64dcb91e5d88e27667811640644aa4a85d/aiohttp-3.13.4-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:153274535985a0ff2bff1fb6c104ed547cec898a09213d21b0f791a44b14d933", size = 499317, upload-time = "2026-03-28T17:17:26.199Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/93/13/e372dd4e68ad04ee25dafb050c7f98b0d91ea643f7352757e87231102555/aiohttp-3.13.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:351f3171e2458da3d731ce83f9e6b9619e325c45cbd534c7759750cabf453ad7", size = 500477, upload-time = "2026-03-28T17:17:28.279Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/fe/ee6298e8e586096fb6f5eddd31393d8544f33ae0792c71ecbb4c2bef98ac/aiohttp-3.13.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f989ac8bc5595ff761a5ccd32bdb0768a117f36dd1504b1c2c074ed5d3f4df9c", size = 1737227, upload-time = "2026-03-28T17:17:30.587Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/b9/a7a0463a09e1a3fe35100f74324f23644bfc3383ac5fd5effe0722a5f0b7/aiohttp-3.13.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d36fc1709110ec1e87a229b201dd3ddc32aa01e98e7868083a794609b081c349", size = 1694036, upload-time = "2026-03-28T17:17:33.29Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/57/7c/8972ae3fb7be00a91aee6b644b2a6a909aedb2c425269a3bfd90115e6f8f/aiohttp-3.13.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:42adaeea83cbdf069ab94f5103ce0787c21fb1a0153270da76b59d5578302329", size = 1786814, upload-time = "2026-03-28T17:17:36.035Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/93/01/c81e97e85c774decbaf0d577de7d848934e8166a3a14ad9f8aa5be329d28/aiohttp-3.13.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:92deb95469928cc41fd4b42a95d8012fa6df93f6b1c0a83af0ffbc4a5e218cde", size = 1866676, upload-time = "2026-03-28T17:17:38.441Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5a/5f/5b46fe8694a639ddea2cd035bf5729e4677ea882cb251396637e2ef1590d/aiohttp-3.13.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0c0c7c07c4257ef3a1df355f840bc62d133bcdef5c1c5ba75add3c08553e2eed", size = 1740842, upload-time = "2026-03-28T17:17:40.783Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/a2/0d4b03d011cca6b6b0acba8433193c1e484efa8d705ea58295590fe24203/aiohttp-3.13.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f062c45de8a1098cb137a1898819796a2491aec4e637a06b03f149315dff4d8f", size = 1566508, upload-time = "2026-03-28T17:17:43.235Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/17/e689fd500da52488ec5f889effd6404dece6a59de301e380f3c64f167beb/aiohttp-3.13.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:76093107c531517001114f0ebdb4f46858ce818590363e3e99a4a2280334454a", size = 1700569, upload-time = "2026-03-28T17:17:46.165Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/0d/66402894dbcf470ef7db99449e436105ea862c24f7ea4c95c683e635af35/aiohttp-3.13.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:6f6ec32162d293b82f8b63a16edc80769662fbd5ae6fbd4936d3206a2c2cc63b", size = 1707407, upload-time = "2026-03-28T17:17:48.825Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/eb/af0ab1a3650092cbd8e14ef29e4ab0209e1460e1c299996c3f8288b3f1ff/aiohttp-3.13.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:5903e2db3d202a00ad9f0ec35a122c005e85d90c9836ab4cda628f01edf425e2", size = 1752214, upload-time = "2026-03-28T17:17:51.206Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5a/bf/72326f8a98e4c666f292f03c385545963cc65e358835d2a7375037a97b57/aiohttp-3.13.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2d5bea57be7aca98dbbac8da046d99b5557c5cf4e28538c4c786313078aca09e", size = 1562162, upload-time = "2026-03-28T17:17:53.634Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/67/9f/13b72435f99151dd9a5469c96b3b5f86aa29b7e785ca7f35cf5e538f74c0/aiohttp-3.13.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:bcf0c9902085976edc0232b75006ef38f89686901249ce14226b6877f88464fb", size = 1768904, upload-time = "2026-03-28T17:17:55.991Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/18/bc/28d4970e7d5452ac7776cdb5431a1164a0d9cf8bd2fffd67b4fb463aa56d/aiohttp-3.13.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c3295f98bfeed2e867cab588f2a146a9db37a85e3ae9062abf46ba062bd29165", size = 1723378, upload-time = "2026-03-28T17:17:58.348Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/53/74/b32458ca1a7f34d65bdee7aef2036adbe0438123d3d53e2b083c453c24dd/aiohttp-3.13.4-cp314-cp314-win32.whl", hash = "sha256:a598a5c5767e1369d8f5b08695cab1d8160040f796c4416af76fd773d229b3c9", size = 438711, upload-time = "2026-03-28T17:18:00.728Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/40/b2/54b487316c2df3e03a8f3435e9636f8a81a42a69d942164830d193beb56a/aiohttp-3.13.4-cp314-cp314-win_amd64.whl", hash = "sha256:c555db4bc7a264bead5a7d63d92d41a1122fcd39cc62a4db815f45ad46f9c2c8", size = 464977, upload-time = "2026-03-28T17:18:03.367Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/fb/e41b63c6ce71b07a59243bb8f3b457ee0c3402a619acb9d2c0d21ef0e647/aiohttp-3.13.4-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:45abbbf09a129825d13c18c7d3182fecd46d9da3cfc383756145394013604ac1", size = 781549, upload-time = "2026-03-28T17:18:05.779Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/97/53/532b8d28df1e17e44c4d9a9368b78dcb6bf0b51037522136eced13afa9e8/aiohttp-3.13.4-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:74c80b2bc2c2adb7b3d1941b2b60701ee2af8296fc8aad8b8bc48bc25767266c", size = 514383, upload-time = "2026-03-28T17:18:08.096Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1b/1f/62e5d400603e8468cd635812d99cb81cfdc08127a3dc474c647615f31339/aiohttp-3.13.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c97989ae40a9746650fa196894f317dafc12227c808c774929dda0ff873a5954", size = 518304, upload-time = "2026-03-28T17:18:10.642Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/57/2326b37b10896447e3c6e0cbef4fe2486d30913639a5cfd1332b5d870f82/aiohttp-3.13.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dae86be9811493f9990ef44fff1685f5c1a3192e9061a71a109d527944eed551", size = 1893433, upload-time = "2026-03-28T17:18:13.121Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/b4/a24d82112c304afdb650167ef2fe190957d81cbddac7460bedd245f765aa/aiohttp-3.13.4-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:1db491abe852ca2fa6cc48a3341985b0174b3741838e1341b82ac82c8bd9e871", size = 1755901, upload-time = "2026-03-28T17:18:16.21Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9e/2d/0883ef9d878d7846287f036c162a951968f22aabeef3ac97b0bea6f76d5d/aiohttp-3.13.4-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0e5d701c0aad02a7dce72eef6b93226cf3734330f1a31d69ebbf69f33b86666e", size = 1876093, upload-time = "2026-03-28T17:18:18.703Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ad/52/9204bb59c014869b71971addad6778f005daa72a96eed652c496789d7468/aiohttp-3.13.4-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8ac32a189081ae0a10ba18993f10f338ec94341f0d5df8fff348043962f3c6f8", size = 1970815, upload-time = "2026-03-28T17:18:21.858Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/b5/e4eb20275a866dde0f570f411b36c6b48f7b53edfe4f4071aa1b0728098a/aiohttp-3.13.4-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:98e968cdaba43e45c73c3f306fca418c8009a957733bac85937c9f9cf3f4de27", size = 1816223, upload-time = "2026-03-28T17:18:24.729Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/23/e98075c5bb146aa61a1239ee1ac7714c85e814838d6cebbe37d3fe19214a/aiohttp-3.13.4-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca114790c9144c335d538852612d3e43ea0f075288f4849cf4b05d6cd2238ce7", size = 1649145, upload-time = "2026-03-28T17:18:27.269Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/c1/7bad8be33bb06c2bb224b6468874346026092762cbec388c3bdb65a368ee/aiohttp-3.13.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ea2e071661ba9cfe11eabbc81ac5376eaeb3061f6e72ec4cc86d7cdd1ffbdbbb", size = 1816562, upload-time = "2026-03-28T17:18:29.847Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/10/c00323348695e9a5e316825969c88463dcc24c7e9d443244b8a2c9cf2eae/aiohttp-3.13.4-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:34e89912b6c20e0fd80e07fa401fd218a410aa1ce9f1c2f1dad6db1bd0ce0927", size = 1800333, upload-time = "2026-03-28T17:18:32.269Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/43/9b2147a1df3559f49bd723e22905b46a46c068a53adb54abdca32c4de180/aiohttp-3.13.4-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:0e217cf9f6a42908c52b46e42c568bd57adc39c9286ced31aaace614b6087965", size = 1820617, upload-time = "2026-03-28T17:18:35.238Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a9/7f/b3481a81e7a586d02e99387b18c6dafff41285f6efd3daa2124c01f87eae/aiohttp-3.13.4-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:0c296f1221e21ba979f5ac1964c3b78cfde15c5c5f855ffd2caab337e9cd9182", size = 1643417, upload-time = "2026-03-28T17:18:37.949Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8f/72/07181226bc99ce1124e0f89280f5221a82d3ae6a6d9d1973ce429d48e52b/aiohttp-3.13.4-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:d99a9d168ebaffb74f36d011750e490085ac418f4db926cce3989c8fe6cb6b1b", size = 1849286, upload-time = "2026-03-28T17:18:40.534Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/e6/1b3566e103eca6da5be4ae6713e112a053725c584e96574caf117568ffef/aiohttp-3.13.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cb19177205d93b881f3f89e6081593676043a6828f59c78c17a0fd6c1fbed2ba", size = 1782635, upload-time = "2026-03-28T17:18:43.073Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/58/1b11c71904b8d079eb0c39fe664180dd1e14bebe5608e235d8bfbadc8929/aiohttp-3.13.4-cp314-cp314t-win32.whl", hash = "sha256:c606aa5656dab6552e52ca368e43869c916338346bfaf6304e15c58fb113ea30", size = 472537, upload-time = "2026-03-28T17:18:46.286Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/8f/87c56a1a1977d7dddea5b31e12189665a140fdb48a71e9038ff90bb564ec/aiohttp-3.13.4-cp314-cp314t-win_amd64.whl", hash = "sha256:014dcc10ec8ab8db681f0d68e939d1e9286a5aa2b993cbbdb0db130853e02144", size = 506381, upload-time = "2026-03-28T17:18:48.74Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -347,55 +347,55 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "cryptography"
|
||||
version = "46.0.5"
|
||||
version = "46.0.7"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "cffi", marker = "platform_python_implementation != 'PyPy'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/60/04/ee2a9e8542e4fa2773b81771ff8349ff19cdd56b7258a0cc442639052edb/cryptography-46.0.5.tar.gz", hash = "sha256:abace499247268e3757271b2f1e244b36b06f8515cf27c4d49468fc9eb16e93d", size = 750064, upload-time = "2026-02-10T19:18:38.255Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/47/93/ac8f3d5ff04d54bc814e961a43ae5b0b146154c89c61b47bb07557679b18/cryptography-46.0.7.tar.gz", hash = "sha256:e4cfd68c5f3e0bfdad0d38e023239b96a2fe84146481852dffbcca442c245aa5", size = 750652, upload-time = "2026-04-08T01:57:54.692Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/81/b0bb27f2ba931a65409c6b8a8b358a7f03c0e46eceacddff55f7c84b1f3b/cryptography-46.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:351695ada9ea9618b3500b490ad54c739860883df6c1f555e088eaf25b1bbaad", size = 7176289, upload-time = "2026-02-10T19:17:08.274Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/9e/6b4397a3e3d15123de3b1806ef342522393d50736c13b20ec4c9ea6693a6/cryptography-46.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c18ff11e86df2e28854939acde2d003f7984f721eba450b56a200ad90eeb0e6b", size = 4275637, upload-time = "2026-02-10T19:17:10.53Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/63/e7/471ab61099a3920b0c77852ea3f0ea611c9702f651600397ac567848b897/cryptography-46.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d7e3d356b8cd4ea5aff04f129d5f66ebdc7b6f8eae802b93739ed520c47c79b", size = 4424742, upload-time = "2026-02-10T19:17:12.388Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/53/a18500f270342d66bf7e4d9f091114e31e5ee9e7375a5aba2e85a91e0044/cryptography-46.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:50bfb6925eff619c9c023b967d5b77a54e04256c4281b0e21336a130cd7fc263", size = 4277528, upload-time = "2026-02-10T19:17:13.853Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/29/c2e812ebc38c57b40e7c583895e73c8c5adb4d1e4a0cc4c5a4fdab2b1acc/cryptography-46.0.5-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:803812e111e75d1aa73690d2facc295eaefd4439be1023fefc4995eaea2af90d", size = 4947993, upload-time = "2026-02-10T19:17:15.618Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6b/e7/237155ae19a9023de7e30ec64e5d99a9431a567407ac21170a046d22a5a3/cryptography-46.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ee190460e2fbe447175cda91b88b84ae8322a104fc27766ad09428754a618ed", size = 4456855, upload-time = "2026-02-10T19:17:17.221Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2d/87/fc628a7ad85b81206738abbd213b07702bcbdada1dd43f72236ef3cffbb5/cryptography-46.0.5-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:f145bba11b878005c496e93e257c1e88f154d278d2638e6450d17e0f31e558d2", size = 3984635, upload-time = "2026-02-10T19:17:18.792Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/29/65b55622bde135aedf4565dc509d99b560ee4095e56989e815f8fd2aa910/cryptography-46.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e9251e3be159d1020c4030bd2e5f84d6a43fe54b6c19c12f51cde9542a2817b2", size = 4277038, upload-time = "2026-02-10T19:17:20.256Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/36/45e76c68d7311432741faf1fbf7fac8a196a0a735ca21f504c75d37e2558/cryptography-46.0.5-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:47fb8a66058b80e509c47118ef8a75d14c455e81ac369050f20ba0d23e77fee0", size = 4912181, upload-time = "2026-02-10T19:17:21.825Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/1a/c1ba8fead184d6e3d5afcf03d569acac5ad063f3ac9fb7258af158f7e378/cryptography-46.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:4c3341037c136030cb46e4b1e17b7418ea4cbd9dd207e4a6f3b2b24e0d4ac731", size = 4456482, upload-time = "2026-02-10T19:17:25.133Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/e5/3fb22e37f66827ced3b902cf895e6a6bc1d095b5b26be26bd13c441fdf19/cryptography-46.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:890bcb4abd5a2d3f852196437129eb3667d62630333aacc13dfd470fad3aaa82", size = 4405497, upload-time = "2026-02-10T19:17:26.66Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/df/9d58bb32b1121a8a2f27383fabae4d63080c7ca60b9b5c88be742be04ee7/cryptography-46.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80a8d7bfdf38f87ca30a5391c0c9ce4ed2926918e017c29ddf643d0ed2778ea1", size = 4667819, upload-time = "2026-02-10T19:17:28.569Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/ed/325d2a490c5e94038cdb0117da9397ece1f11201f425c4e9c57fe5b9f08b/cryptography-46.0.5-cp311-abi3-win32.whl", hash = "sha256:60ee7e19e95104d4c03871d7d7dfb3d22ef8a9b9c6778c94e1c8fcc8365afd48", size = 3028230, upload-time = "2026-02-10T19:17:30.518Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/5a/ac0f49e48063ab4255d9e3b79f5def51697fce1a95ea1370f03dc9db76f6/cryptography-46.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:38946c54b16c885c72c4f59846be9743d699eee2b69b6988e0a00a01f46a61a4", size = 3480909, upload-time = "2026-02-10T19:17:32.083Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/13/3d278bfa7a15a96b9dc22db5a12ad1e48a9eb3d40e1827ef66a5df75d0d0/cryptography-46.0.5-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:94a76daa32eb78d61339aff7952ea819b1734b46f73646a07decb40e5b3448e2", size = 7119287, upload-time = "2026-02-10T19:17:33.801Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/67/c8/581a6702e14f0898a0848105cbefd20c058099e2c2d22ef4e476dfec75d7/cryptography-46.0.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5be7bf2fb40769e05739dd0046e7b26f9d4670badc7b032d6ce4db64dddc0678", size = 4265728, upload-time = "2026-02-10T19:17:35.569Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dd/4a/ba1a65ce8fc65435e5a849558379896c957870dd64fecea97b1ad5f46a37/cryptography-46.0.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe346b143ff9685e40192a4960938545c699054ba11d4f9029f94751e3f71d87", size = 4408287, upload-time = "2026-02-10T19:17:36.938Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/67/8ffdbf7b65ed1ac224d1c2df3943553766914a8ca718747ee3871da6107e/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:c69fd885df7d089548a42d5ec05be26050ebcd2283d89b3d30676eb32ff87dee", size = 4270291, upload-time = "2026-02-10T19:17:38.748Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/e5/f52377ee93bc2f2bba55a41a886fd208c15276ffbd2569f2ddc89d50e2c5/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:8293f3dea7fc929ef7240796ba231413afa7b68ce38fd21da2995549f5961981", size = 4927539, upload-time = "2026-02-10T19:17:40.241Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/02/cfe39181b02419bbbbcf3abdd16c1c5c8541f03ca8bda240debc467d5a12/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:1abfdb89b41c3be0365328a410baa9df3ff8a9110fb75e7b52e66803ddabc9a9", size = 4442199, upload-time = "2026-02-10T19:17:41.789Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c0/96/2fcaeb4873e536cf71421a388a6c11b5bc846e986b2b069c79363dc1648e/cryptography-46.0.5-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:d66e421495fdb797610a08f43b05269e0a5ea7f5e652a89bfd5a7d3c1dee3648", size = 3960131, upload-time = "2026-02-10T19:17:43.379Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/d2/b27631f401ddd644e94c5cf33c9a4069f72011821cf3dc7309546b0642a0/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:4e817a8920bfbcff8940ecfd60f23d01836408242b30f1a708d93198393a80b4", size = 4270072, upload-time = "2026-02-10T19:17:45.481Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/a7/60d32b0370dae0b4ebe55ffa10e8599a2a59935b5ece1b9f06edb73abdeb/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:68f68d13f2e1cb95163fa3b4db4bf9a159a418f5f6e7242564fc75fcae667fd0", size = 4892170, upload-time = "2026-02-10T19:17:46.997Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/b9/cf73ddf8ef1164330eb0b199a589103c363afa0cf794218c24d524a58eab/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a3d1fae9863299076f05cb8a778c467578262fae09f9dc0ee9b12eb4268ce663", size = 4441741, upload-time = "2026-02-10T19:17:48.661Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/eb/eee00b28c84c726fe8fa0158c65afe312d9c3b78d9d01daf700f1f6e37ff/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c4143987a42a2397f2fc3b4d7e3a7d313fbe684f67ff443999e803dd75a76826", size = 4396728, upload-time = "2026-02-10T19:17:50.058Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/65/f4/6bc1a9ed5aef7145045114b75b77c2a8261b4d38717bd8dea111a63c3442/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7d731d4b107030987fd61a7f8ab512b25b53cef8f233a97379ede116f30eb67d", size = 4652001, upload-time = "2026-02-10T19:17:51.54Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/ef/5d00ef966ddd71ac2e6951d278884a84a40ffbd88948ef0e294b214ae9e4/cryptography-46.0.5-cp314-cp314t-win32.whl", hash = "sha256:c3bcce8521d785d510b2aad26ae2c966092b7daa8f45dd8f44734a104dc0bc1a", size = 3003637, upload-time = "2026-02-10T19:17:52.997Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b7/57/f3f4160123da6d098db78350fdfd9705057aad21de7388eacb2401dceab9/cryptography-46.0.5-cp314-cp314t-win_amd64.whl", hash = "sha256:4d8ae8659ab18c65ced284993c2265910f6c9e650189d4e3f68445ef82a810e4", size = 3469487, upload-time = "2026-02-10T19:17:54.549Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/fa/a66aa722105ad6a458bebd64086ca2b72cdd361fed31763d20390f6f1389/cryptography-46.0.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4108d4c09fbbf2789d0c926eb4152ae1760d5a2d97612b92d508d96c861e4d31", size = 7170514, upload-time = "2026-02-10T19:17:56.267Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0f/04/c85bdeab78c8bc77b701bf0d9bdcf514c044e18a46dcff330df5448631b0/cryptography-46.0.5-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1f30a86d2757199cb2d56e48cce14deddf1f9c95f1ef1b64ee91ea43fe2e18", size = 4275349, upload-time = "2026-02-10T19:17:58.419Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/32/9b87132a2f91ee7f5223b091dc963055503e9b442c98fc0b8a5ca765fab0/cryptography-46.0.5-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:039917b0dc418bb9f6edce8a906572d69e74bd330b0b3fea4f79dab7f8ddd235", size = 4420667, upload-time = "2026-02-10T19:18:00.619Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a1/a6/a7cb7010bec4b7c5692ca6f024150371b295ee1c108bdc1c400e4c44562b/cryptography-46.0.5-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ba2a27ff02f48193fc4daeadf8ad2590516fa3d0adeeb34336b96f7fa64c1e3a", size = 4276980, upload-time = "2026-02-10T19:18:02.379Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/7c/c4f45e0eeff9b91e3f12dbd0e165fcf2a38847288fcfd889deea99fb7b6d/cryptography-46.0.5-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:61aa400dce22cb001a98014f647dc21cda08f7915ceb95df0c9eaf84b4b6af76", size = 4939143, upload-time = "2026-02-10T19:18:03.964Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/19/e1b8f964a834eddb44fa1b9a9976f4e414cbb7aa62809b6760c8803d22d1/cryptography-46.0.5-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ce58ba46e1bc2aac4f7d9290223cead56743fa6ab94a5d53292ffaac6a91614", size = 4453674, upload-time = "2026-02-10T19:18:05.588Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/db/ed/db15d3956f65264ca204625597c410d420e26530c4e2943e05a0d2f24d51/cryptography-46.0.5-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:420d0e909050490d04359e7fdb5ed7e667ca5c3c402b809ae2563d7e66a92229", size = 3978801, upload-time = "2026-02-10T19:18:07.167Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/41/e2/df40a31d82df0a70a0daf69791f91dbb70e47644c58581d654879b382d11/cryptography-46.0.5-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:582f5fcd2afa31622f317f80426a027f30dc792e9c80ffee87b993200ea115f1", size = 4276755, upload-time = "2026-02-10T19:18:09.813Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/33/45/726809d1176959f4a896b86907b98ff4391a8aa29c0aaaf9450a8a10630e/cryptography-46.0.5-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:bfd56bb4b37ed4f330b82402f6f435845a5f5648edf1ad497da51a8452d5d62d", size = 4901539, upload-time = "2026-02-10T19:18:11.263Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/99/0f/a3076874e9c88ecb2ecc31382f6e7c21b428ede6f55aafa1aa272613e3cd/cryptography-46.0.5-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a3d507bb6a513ca96ba84443226af944b0f7f47dcc9a399d110cd6146481d24c", size = 4452794, upload-time = "2026-02-10T19:18:12.914Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/ef/ffeb542d3683d24194a38f66ca17c0a4b8bf10631feef44a7ef64e631b1a/cryptography-46.0.5-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f16fbdf4da055efb21c22d81b89f155f02ba420558db21288b3d0035bafd5f4", size = 4404160, upload-time = "2026-02-10T19:18:14.375Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/96/93/682d2b43c1d5f1406ed048f377c0fc9fc8f7b0447a478d5c65ab3d3a66eb/cryptography-46.0.5-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ced80795227d70549a411a4ab66e8ce307899fad2220ce5ab2f296e687eacde9", size = 4667123, upload-time = "2026-02-10T19:18:15.886Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/45/2d/9c5f2926cb5300a8eefc3f4f0b3f3df39db7f7ce40c8365444c49363cbda/cryptography-46.0.5-cp38-abi3-win32.whl", hash = "sha256:02f547fce831f5096c9a567fd41bc12ca8f11df260959ecc7c3202555cc47a72", size = 3010220, upload-time = "2026-02-10T19:18:17.361Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/48/ef/0c2f4a8e31018a986949d34a01115dd057bf536905dca38897bacd21fac3/cryptography-46.0.5-cp38-abi3-win_amd64.whl", hash = "sha256:556e106ee01aa13484ce9b0239bca667be5004efb0aabbed28d353df86445595", size = 3467050, upload-time = "2026-02-10T19:18:18.899Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0b/5d/4a8f770695d73be252331e60e526291e3df0c9b27556a90a6b47bccca4c2/cryptography-46.0.7-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:ea42cbe97209df307fdc3b155f1b6fa2577c0defa8f1f7d3be7d31d189108ad4", size = 7179869, upload-time = "2026-04-08T01:56:17.157Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/45/6d80dc379b0bbc1f9d1e429f42e4cb9e1d319c7a8201beffd967c516ea01/cryptography-46.0.7-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b36a4695e29fe69215d75960b22577197aca3f7a25b9cf9d165dcfe9d80bc325", size = 4275492, upload-time = "2026-04-08T01:56:19.36Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/9a/1765afe9f572e239c3469f2cb429f3ba7b31878c893b246b4b2994ffe2fe/cryptography-46.0.7-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5ad9ef796328c5e3c4ceed237a183f5d41d21150f972455a9d926593a1dcb308", size = 4426670, upload-time = "2026-04-08T01:56:21.415Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8f/3e/af9246aaf23cd4ee060699adab1e47ced3f5f7e7a8ffdd339f817b446462/cryptography-46.0.7-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:73510b83623e080a2c35c62c15298096e2a5dc8d51c3b4e1740211839d0dea77", size = 4280275, upload-time = "2026-04-08T01:56:23.539Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0f/54/6bbbfc5efe86f9d71041827b793c24811a017c6ac0fd12883e4caa86b8ed/cryptography-46.0.7-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:cbd5fb06b62bd0721e1170273d3f4d5a277044c47ca27ee257025146c34cbdd1", size = 4928402, upload-time = "2026-04-08T01:56:25.624Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2d/cf/054b9d8220f81509939599c8bdbc0c408dbd2bdd41688616a20731371fe0/cryptography-46.0.7-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:420b1e4109cc95f0e5700eed79908cef9268265c773d3a66f7af1eef53d409ef", size = 4459985, upload-time = "2026-04-08T01:56:27.309Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/46/4e4e9c6040fb01c7467d47217d2f882daddeb8828f7df800cb806d8a2288/cryptography-46.0.7-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:24402210aa54baae71d99441d15bb5a1919c195398a87b563df84468160a65de", size = 3990652, upload-time = "2026-04-08T01:56:29.095Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/36/5f/313586c3be5a2fbe87e4c9a254207b860155a8e1f3cca99f9910008e7d08/cryptography-46.0.7-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:8a469028a86f12eb7d2fe97162d0634026d92a21f3ae0ac87ed1c4a447886c83", size = 4279805, upload-time = "2026-04-08T01:56:30.928Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/69/33/60dfc4595f334a2082749673386a4d05e4f0cf4df8248e63b2c3437585f2/cryptography-46.0.7-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:9694078c5d44c157ef3162e3bf3946510b857df5a3955458381d1c7cfc143ddb", size = 4892883, upload-time = "2026-04-08T01:56:32.614Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/0b/333ddab4270c4f5b972f980adef4faa66951a4aaf646ca067af597f15563/cryptography-46.0.7-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:42a1e5f98abb6391717978baf9f90dc28a743b7d9be7f0751a6f56a75d14065b", size = 4459756, upload-time = "2026-04-08T01:56:34.306Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/14/633913398b43b75f1234834170947957c6b623d1701ffc7a9600da907e89/cryptography-46.0.7-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:91bbcb08347344f810cbe49065914fe048949648f6bd5c2519f34619142bbe85", size = 4410244, upload-time = "2026-04-08T01:56:35.977Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/10/f2/19ceb3b3dc14009373432af0c13f46aa08e3ce334ec6eff13492e1812ccd/cryptography-46.0.7-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5d1c02a14ceb9148cc7816249f64f623fbfee39e8c03b3650d842ad3f34d637e", size = 4674868, upload-time = "2026-04-08T01:56:38.034Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/bb/a5c213c19ee94b15dfccc48f363738633a493812687f5567addbcbba9f6f/cryptography-46.0.7-cp311-abi3-win32.whl", hash = "sha256:d23c8ca48e44ee015cd0a54aeccdf9f09004eba9fc96f38c911011d9ff1bd457", size = 3026504, upload-time = "2026-04-08T01:56:39.666Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/02/7788f9fefa1d060ca68717c3901ae7fffa21ee087a90b7f23c7a603c32ae/cryptography-46.0.7-cp311-abi3-win_amd64.whl", hash = "sha256:397655da831414d165029da9bc483bed2fe0e75dde6a1523ec2fe63f3c46046b", size = 3488363, upload-time = "2026-04-08T01:56:41.893Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/56/15619b210e689c5403bb0540e4cb7dbf11a6bf42e483b7644e471a2812b3/cryptography-46.0.7-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:d151173275e1728cf7839aaa80c34fe550c04ddb27b34f48c232193df8db5842", size = 7119671, upload-time = "2026-04-08T01:56:44Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/74/66/e3ce040721b0b5599e175ba91ab08884c75928fbeb74597dd10ef13505d2/cryptography-46.0.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:db0f493b9181c7820c8134437eb8b0b4792085d37dbb24da050476ccb664e59c", size = 4268551, upload-time = "2026-04-08T01:56:46.071Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/03/11/5e395f961d6868269835dee1bafec6a1ac176505a167f68b7d8818431068/cryptography-46.0.7-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ebd6daf519b9f189f85c479427bbd6e9c9037862cf8fe89ee35503bd209ed902", size = 4408887, upload-time = "2026-04-08T01:56:47.718Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/40/53/8ed1cf4c3b9c8e611e7122fb56f1c32d09e1fff0f1d77e78d9ff7c82653e/cryptography-46.0.7-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:b7b412817be92117ec5ed95f880defe9cf18a832e8cafacf0a22337dc1981b4d", size = 4271354, upload-time = "2026-04-08T01:56:49.312Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/46/cf71e26025c2e767c5609162c866a78e8a2915bbcfa408b7ca495c6140c4/cryptography-46.0.7-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:fbfd0e5f273877695cb93baf14b185f4878128b250cc9f8e617ea0c025dfb022", size = 4905845, upload-time = "2026-04-08T01:56:50.916Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c0/ea/01276740375bac6249d0a971ebdf6b4dc9ead0ee0a34ef3b5a88c1a9b0d4/cryptography-46.0.7-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:ffca7aa1d00cf7d6469b988c581598f2259e46215e0140af408966a24cf086ce", size = 4444641, upload-time = "2026-04-08T01:56:52.882Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/4c/7d258f169ae71230f25d9f3d06caabcff8c3baf0978e2b7d65e0acac3827/cryptography-46.0.7-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:60627cf07e0d9274338521205899337c5d18249db56865f943cbe753aa96f40f", size = 3967749, upload-time = "2026-04-08T01:56:54.597Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/2a/2ea0767cad19e71b3530e4cad9605d0b5e338b6a1e72c37c9c1ceb86c333/cryptography-46.0.7-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:80406c3065e2c55d7f49a9550fe0c49b3f12e5bfff5dedb727e319e1afb9bf99", size = 4270942, upload-time = "2026-04-08T01:56:56.416Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/41/3d/fe14df95a83319af25717677e956567a105bb6ab25641acaa093db79975d/cryptography-46.0.7-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:c5b1ccd1239f48b7151a65bc6dd54bcfcc15e028c8ac126d3fada09db0e07ef1", size = 4871079, upload-time = "2026-04-08T01:56:58.31Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9c/59/4a479e0f36f8f378d397f4eab4c850b4ffb79a2f0d58704b8fa0703ddc11/cryptography-46.0.7-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:d5f7520159cd9c2154eb61eb67548ca05c5774d39e9c2c4339fd793fe7d097b2", size = 4443999, upload-time = "2026-04-08T01:57:00.508Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/28/17/b59a741645822ec6d04732b43c5d35e4ef58be7bfa84a81e5ae6f05a1d33/cryptography-46.0.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:fcd8eac50d9138c1d7fc53a653ba60a2bee81a505f9f8850b6b2888555a45d0e", size = 4399191, upload-time = "2026-04-08T01:57:02.654Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/59/6a/bb2e166d6d0e0955f1e9ff70f10ec4b2824c9cfcdb4da772c7dd69cc7d80/cryptography-46.0.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:65814c60f8cc400c63131584e3e1fad01235edba2614b61fbfbfa954082db0ee", size = 4655782, upload-time = "2026-04-08T01:57:04.592Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/95/b6/3da51d48415bcb63b00dc17c2eff3a651b7c4fed484308d0f19b30e8cb2c/cryptography-46.0.7-cp314-cp314t-win32.whl", hash = "sha256:fdd1736fed309b4300346f88f74cd120c27c56852c3838cab416e7a166f67298", size = 3002227, upload-time = "2026-04-08T01:57:06.91Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/32/a8/9f0e4ed57ec9cebe506e58db11ae472972ecb0c659e4d52bbaee80ca340a/cryptography-46.0.7-cp314-cp314t-win_amd64.whl", hash = "sha256:e06acf3c99be55aa3b516397fe42f5855597f430add9c17fa46bf2e0fb34c9bb", size = 3475332, upload-time = "2026-04-08T01:57:08.807Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a7/7f/cd42fc3614386bc0c12f0cb3c4ae1fc2bbca5c9662dfed031514911d513d/cryptography-46.0.7-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:462ad5cb1c148a22b2e3bcc5ad52504dff325d17daf5df8d88c17dda1f75f2a4", size = 7165618, upload-time = "2026-04-08T01:57:10.645Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/d0/36a49f0262d2319139d2829f773f1b97ef8aef7f97e6e5bd21455e5a8fb5/cryptography-46.0.7-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:84d4cced91f0f159a7ddacad249cc077e63195c36aac40b4150e7a57e84fffe7", size = 4270628, upload-time = "2026-04-08T01:57:12.885Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/6c/1a42450f464dda6ffbe578a911f773e54dd48c10f9895a23a7e88b3e7db5/cryptography-46.0.7-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:128c5edfe5e5938b86b03941e94fac9ee793a94452ad1365c9fc3f4f62216832", size = 4415405, upload-time = "2026-04-08T01:57:14.923Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9a/92/4ed714dbe93a066dc1f4b4581a464d2d7dbec9046f7c8b7016f5286329e2/cryptography-46.0.7-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5e51be372b26ef4ba3de3c167cd3d1022934bc838ae9eaad7e644986d2a3d163", size = 4272715, upload-time = "2026-04-08T01:57:16.638Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b7/e6/a26b84096eddd51494bba19111f8fffe976f6a09f132706f8f1bf03f51f7/cryptography-46.0.7-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:cdf1a610ef82abb396451862739e3fc93b071c844399e15b90726ef7470eeaf2", size = 4918400, upload-time = "2026-04-08T01:57:19.021Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/08/ffd537b605568a148543ac3c2b239708ae0bd635064bab41359252ef88ed/cryptography-46.0.7-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1d25aee46d0c6f1a501adcddb2d2fee4b979381346a78558ed13e50aa8a59067", size = 4450634, upload-time = "2026-04-08T01:57:21.185Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/16/01/0cd51dd86ab5b9befe0d031e276510491976c3a80e9f6e31810cce46c4ad/cryptography-46.0.7-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:cdfbe22376065ffcf8be74dc9a909f032df19bc58a699456a21712d6e5eabfd0", size = 3985233, upload-time = "2026-04-08T01:57:22.862Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/92/49/819d6ed3a7d9349c2939f81b500a738cb733ab62fbecdbc1e38e83d45e12/cryptography-46.0.7-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:abad9dac36cbf55de6eb49badd4016806b3165d396f64925bf2999bcb67837ba", size = 4271955, upload-time = "2026-04-08T01:57:24.814Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/80/07/ad9b3c56ebb95ed2473d46df0847357e01583f4c52a85754d1a55e29e4d0/cryptography-46.0.7-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:935ce7e3cfdb53e3536119a542b839bb94ec1ad081013e9ab9b7cfd478b05006", size = 4879888, upload-time = "2026-04-08T01:57:26.88Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b8/c7/201d3d58f30c4c2bdbe9b03844c291feb77c20511cc3586daf7edc12a47b/cryptography-46.0.7-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:35719dc79d4730d30f1c2b6474bd6acda36ae2dfae1e3c16f2051f215df33ce0", size = 4449961, upload-time = "2026-04-08T01:57:29.068Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/ef/649750cbf96f3033c3c976e112265c33906f8e462291a33d77f90356548c/cryptography-46.0.7-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:7bbc6ccf49d05ac8f7d7b5e2e2c33830d4fe2061def88210a126d130d7f71a85", size = 4401696, upload-time = "2026-04-08T01:57:31.029Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/41/52/a8908dcb1a389a459a29008c29966c1d552588d4ae6d43f3a1a4512e0ebe/cryptography-46.0.7-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a1529d614f44b863a7b480c6d000fe93b59acee9c82ffa027cfadc77521a9f5e", size = 4664256, upload-time = "2026-04-08T01:57:33.144Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/fa/f0ab06238e899cc3fb332623f337a7364f36f4bb3f2534c2bb95a35b132c/cryptography-46.0.7-cp38-abi3-win32.whl", hash = "sha256:f247c8c1a1fb45e12586afbb436ef21ff1e80670b2861a90353d9b025583d246", size = 3013001, upload-time = "2026-04-08T01:57:34.933Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/f1/00ce3bde3ca542d1acd8f8cfa38e446840945aa6363f9b74746394b14127/cryptography-46.0.7-cp38-abi3-win_amd64.whl", hash = "sha256:506c4ff91eff4f82bdac7633318a526b1d1309fc07ca76a3ad182cb5b686d6d3", size = 3472985, upload-time = "2026-04-08T01:57:36.714Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -445,6 +445,7 @@ dependencies = [
|
||||
{ name = "bcrypt" },
|
||||
{ name = "croniter" },
|
||||
{ name = "fastapi" },
|
||||
{ name = "mcp" },
|
||||
{ name = "paramiko" },
|
||||
{ name = "pillow" },
|
||||
{ name = "pydantic", extra = ["email"] },
|
||||
@@ -466,6 +467,7 @@ requires-dist = [
|
||||
{ name = "bcrypt", specifier = ">=5.0.0" },
|
||||
{ name = "croniter", specifier = ">=6.0.0" },
|
||||
{ name = "fastapi", specifier = ">=0.127.0" },
|
||||
{ name = "mcp", specifier = ">=1.26.0" },
|
||||
{ name = "paramiko", specifier = ">=4.0.0" },
|
||||
{ name = "pillow", specifier = ">=12.0.0" },
|
||||
{ name = "pydantic", extras = ["email"], specifier = ">=2.12.5" },
|
||||
@@ -607,6 +609,15 @@ http2 = [
|
||||
{ name = "h2" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httpx-sse"
|
||||
version = "0.4.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/0f/4c/751061ffa58615a32c31b2d82e8482be8dd4a89154f003147acee90f2be9/httpx_sse-0.4.3.tar.gz", hash = "sha256:9b1ed0127459a66014aec3c56bebd93da3c1bc8bb6618c8082039a44889a755d", size = 15943, upload-time = "2025-10-10T21:48:22.271Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/fd/6668e5aec43ab844de6fc74927e155a3b37bf40d7c3790e49fc0406b6578/httpx_sse-0.4.3-py3-none-any.whl", hash = "sha256:0ac1c9fe3c0afad2e0ebb25a934a59f4c7823b60792691f779fad2c5568830fc", size = 8960, upload-time = "2025-10-10T21:48:21.158Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hyperframe"
|
||||
version = "6.1.0"
|
||||
@@ -652,6 +663,58 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/14/2f/967ba146e6d58cf6a652da73885f52fc68001525b4197effc174321d70b4/jmespath-1.1.0-py3-none-any.whl", hash = "sha256:a5663118de4908c91729bea0acadca56526eb2698e83de10cd116ae0f4e97c64", size = 20419, upload-time = "2026-01-22T16:35:24.919Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jsonschema"
|
||||
version = "4.26.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "attrs" },
|
||||
{ name = "jsonschema-specifications" },
|
||||
{ name = "referencing" },
|
||||
{ name = "rpds-py" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b3/fc/e067678238fa451312d4c62bf6e6cf5ec56375422aee02f9cb5f909b3047/jsonschema-4.26.0.tar.gz", hash = "sha256:0c26707e2efad8aa1bfc5b7ce170f3fccc2e4918ff85989ba9ffa9facb2be326", size = 366583, upload-time = "2026-01-07T13:41:07.246Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/69/90/f63fb5873511e014207a475e2bb4e8b2e570d655b00ac19a9a0ca0a385ee/jsonschema-4.26.0-py3-none-any.whl", hash = "sha256:d489f15263b8d200f8387e64b4c3a75f06629559fb73deb8fdfb525f2dab50ce", size = 90630, upload-time = "2026-01-07T13:41:05.306Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jsonschema-specifications"
|
||||
version = "2025.9.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "referencing" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/19/74/a633ee74eb36c44aa6d1095e7cc5569bebf04342ee146178e2d36600708b/jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d", size = 32855, upload-time = "2025-09-08T01:34:59.186Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mcp"
|
||||
version = "1.26.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio" },
|
||||
{ name = "httpx" },
|
||||
{ name = "httpx-sse" },
|
||||
{ name = "jsonschema" },
|
||||
{ name = "pydantic" },
|
||||
{ name = "pydantic-settings" },
|
||||
{ name = "pyjwt", extra = ["crypto"] },
|
||||
{ name = "python-multipart" },
|
||||
{ name = "pywin32", marker = "sys_platform == 'win32'" },
|
||||
{ name = "sse-starlette" },
|
||||
{ name = "starlette" },
|
||||
{ name = "typing-extensions" },
|
||||
{ name = "typing-inspection" },
|
||||
{ name = "uvicorn", marker = "sys_platform != 'emscripten'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/fc/6d/62e76bbb8144d6ed86e202b5edd8a4cb631e7c8130f3f4893c3f90262b10/mcp-1.26.0.tar.gz", hash = "sha256:db6e2ef491eecc1a0d93711a76f28dec2e05999f93afd48795da1c1137142c66", size = 608005, upload-time = "2026-01-24T19:40:32.468Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/d9/eaa1f80170d2b7c5ba23f3b59f766f3a0bb41155fbc32a69adfa1adaaef9/mcp-1.26.0-py3-none-any.whl", hash = "sha256:904a21c33c25aa98ddbeb47273033c435e595bbacfdb177f4bd87f6dceebe1ca", size = 233615, upload-time = "2026-01-24T19:40:30.652Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "milvus-lite"
|
||||
version = "2.5.1"
|
||||
@@ -914,11 +977,11 @@ sdist = { url = "https://files.pythonhosted.org/packages/44/66/2c17bae31c9066137
|
||||
|
||||
[[package]]
|
||||
name = "pyasn1"
|
||||
version = "0.6.2"
|
||||
version = "0.6.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/fe/b6/6e630dff89739fcd427e3f72b3d905ce0acb85a45d4ec3e2678718a3487f/pyasn1-0.6.2.tar.gz", hash = "sha256:9b59a2b25ba7e4f8197db7686c09fb33e658b98339fadb826e9512629017833b", size = 146586, upload-time = "2026-01-16T18:04:18.534Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/5c/5f/6583902b6f79b399c9c40674ac384fd9cd77805f9e6205075f828ef11fb2/pyasn1-0.6.3.tar.gz", hash = "sha256:697a8ecd6d98891189184ca1fa05d1bb00e2f84b5977c481452050549c8a72cf", size = 148685, upload-time = "2026-03-17T01:06:53.382Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/44/b5/a96872e5184f354da9c84ae119971a0a4c221fe9b27a4d94bd43f2596727/pyasn1-0.6.2-py3-none-any.whl", hash = "sha256:1eb26d860996a18e9b6ed05e7aae0e9fc21619fcee6af91cca9bad4fbea224bf", size = 83371, upload-time = "2026-01-16T18:04:17.174Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/a0/7d793dce3fa811fe047d6ae2431c672364b462850c6235ae306c0efd025f/pyasn1-0.6.3-py3-none-any.whl", hash = "sha256:a80184d120f0864a52a073acc6fc642847d0be408e7c7252f31390c0f4eadcde", size = 83997, upload-time = "2026-03-17T01:06:52.036Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -989,6 +1052,20 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic-settings"
|
||||
version = "2.13.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pydantic" },
|
||||
{ name = "python-dotenv" },
|
||||
{ name = "typing-inspection" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/52/6d/fffca34caecc4a3f97bda81b2098da5e8ab7efc9a66e819074a11955d87e/pydantic_settings-2.13.1.tar.gz", hash = "sha256:b4c11847b15237fb0171e1462bf540e294affb9b86db4d9aa5c01730bdbe4025", size = 223826, upload-time = "2026-02-19T13:45:08.055Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/00/4b/ccc026168948fec4f7555b9164c724cf4125eac006e176541483d2c959be/pydantic_settings-2.13.1-py3-none-any.whl", hash = "sha256:d56fd801823dbeae7f0975e1f8c8e25c258eb75d278ea7abb5d9cebb01b56237", size = 58929, upload-time = "2026-02-19T13:45:06.034Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyjwt"
|
||||
version = "2.11.0"
|
||||
@@ -998,6 +1075,11 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/01/c26ce75ba460d5cd503da9e13b21a33804d38c2165dec7b716d06b13010c/pyjwt-2.11.0-py3-none-any.whl", hash = "sha256:94a6bde30eb5c8e04fee991062b534071fd1439ef58d2adc9ccb823e7bcd0469", size = 28224, upload-time = "2026-01-30T19:59:54.539Z" },
|
||||
]
|
||||
|
||||
[package.optional-dependencies]
|
||||
crypto = [
|
||||
{ name = "cryptography" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pymilvus"
|
||||
version = "2.6.8"
|
||||
@@ -1141,6 +1223,56 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/08/13/8ce16f808297e16968269de44a14f4fef19b64d9766be1d6ba5ba78b579d/qdrant_client-1.16.2-py3-none-any.whl", hash = "sha256:442c7ef32ae0f005e88b5d3c0783c63d4912b97ae756eb5e052523be682f17d3", size = 377186, upload-time = "2025-12-12T10:58:29.282Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "referencing"
|
||||
version = "0.37.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "attrs" },
|
||||
{ name = "rpds-py" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/22/f5/df4e9027acead3ecc63e50fe1e36aca1523e1719559c499951bb4b53188f/referencing-0.37.0.tar.gz", hash = "sha256:44aefc3142c5b842538163acb373e24cce6632bd54bdb01b21ad5863489f50d8", size = 78036, upload-time = "2025-10-13T15:30:48.871Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl", hash = "sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231", size = 26766, upload-time = "2025-10-13T15:30:47.625Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rpds-py"
|
||||
version = "0.30.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/20/af/3f2f423103f1113b36230496629986e0ef7e199d2aa8392452b484b38ced/rpds_py-0.30.0.tar.gz", hash = "sha256:dd8ff7cf90014af0c0f787eea34794ebf6415242ee1d6fa91eaba725cc441e84", size = 69469, upload-time = "2025-11-30T20:24:38.837Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/86/81/dad16382ebbd3d0e0328776d8fd7ca94220e4fa0798d1dc5e7da48cb3201/rpds_py-0.30.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:68f19c879420aa08f61203801423f6cd5ac5f0ac4ac82a2368a9fcd6a9a075e0", size = 362099, upload-time = "2025-11-30T20:23:27.316Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/60/19f7884db5d5603edf3c6bce35408f45ad3e97e10007df0e17dd57af18f8/rpds_py-0.30.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ec7c4490c672c1a0389d319b3a9cfcd098dcdc4783991553c332a15acf7249be", size = 353192, upload-time = "2025-11-30T20:23:29.151Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bf/c4/76eb0e1e72d1a9c4703c69607cec123c29028bff28ce41588792417098ac/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f251c812357a3fed308d684a5079ddfb9d933860fc6de89f2b7ab00da481e65f", size = 384080, upload-time = "2025-11-30T20:23:30.785Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/72/87/87ea665e92f3298d1b26d78814721dc39ed8d2c74b86e83348d6b48a6f31/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac98b175585ecf4c0348fd7b29c3864bda53b805c773cbf7bfdaffc8070c976f", size = 394841, upload-time = "2025-11-30T20:23:32.209Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/ad/7783a89ca0587c15dcbf139b4a8364a872a25f861bdb88ed99f9b0dec985/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3e62880792319dbeb7eb866547f2e35973289e7d5696c6e295476448f5b63c87", size = 516670, upload-time = "2025-11-30T20:23:33.742Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5b/3c/2882bdac942bd2172f3da574eab16f309ae10a3925644e969536553cb4ee/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4e7fc54e0900ab35d041b0601431b0a0eb495f0851a0639b6ef90f7741b39a18", size = 408005, upload-time = "2025-11-30T20:23:35.253Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/81/9a91c0111ce1758c92516a3e44776920b579d9a7c09b2b06b642d4de3f0f/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47e77dc9822d3ad616c3d5759ea5631a75e5809d5a28707744ef79d7a1bcfcad", size = 382112, upload-time = "2025-11-30T20:23:36.842Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cf/8e/1da49d4a107027e5fbc64daeab96a0706361a2918da10cb41769244b805d/rpds_py-0.30.0-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:b4dc1a6ff022ff85ecafef7979a2c6eb423430e05f1165d6688234e62ba99a07", size = 399049, upload-time = "2025-11-30T20:23:38.343Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/df/5a/7ee239b1aa48a127570ec03becbb29c9d5a9eb092febbd1699d567cae859/rpds_py-0.30.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4559c972db3a360808309e06a74628b95eaccbf961c335c8fe0d590cf587456f", size = 415661, upload-time = "2025-11-30T20:23:40.263Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/70/ea/caa143cf6b772f823bc7929a45da1fa83569ee49b11d18d0ada7f5ee6fd6/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:0ed177ed9bded28f8deb6ab40c183cd1192aa0de40c12f38be4d59cd33cb5c65", size = 565606, upload-time = "2025-11-30T20:23:42.186Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/64/91/ac20ba2d69303f961ad8cf55bf7dbdb4763f627291ba3d0d7d67333cced9/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ad1fa8db769b76ea911cb4e10f049d80bf518c104f15b3edb2371cc65375c46f", size = 591126, upload-time = "2025-11-30T20:23:44.086Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/21/20/7ff5f3c8b00c8a95f75985128c26ba44503fb35b8e0259d812766ea966c7/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:46e83c697b1f1c72b50e5ee5adb4353eef7406fb3f2043d64c33f20ad1c2fc53", size = 553371, upload-time = "2025-11-30T20:23:46.004Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/72/c7/81dadd7b27c8ee391c132a6b192111ca58d866577ce2d9b0ca157552cce0/rpds_py-0.30.0-cp314-cp314-win32.whl", hash = "sha256:ee454b2a007d57363c2dfd5b6ca4a5d7e2c518938f8ed3b706e37e5d470801ed", size = 215298, upload-time = "2025-11-30T20:23:47.696Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/d2/1aaac33287e8cfb07aab2e6b8ac1deca62f6f65411344f1433c55e6f3eb8/rpds_py-0.30.0-cp314-cp314-win_amd64.whl", hash = "sha256:95f0802447ac2d10bcc69f6dc28fe95fdf17940367b21d34e34c737870758950", size = 228604, upload-time = "2025-11-30T20:23:49.501Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e8/95/ab005315818cc519ad074cb7784dae60d939163108bd2b394e60dc7b5461/rpds_py-0.30.0-cp314-cp314-win_arm64.whl", hash = "sha256:613aa4771c99f03346e54c3f038e4cc574ac09a3ddfb0e8878487335e96dead6", size = 222391, upload-time = "2025-11-30T20:23:50.96Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9e/68/154fe0194d83b973cdedcdcc88947a2752411165930182ae41d983dcefa6/rpds_py-0.30.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:7e6ecfcb62edfd632e56983964e6884851786443739dbfe3582947e87274f7cb", size = 364868, upload-time = "2025-11-30T20:23:52.494Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/83/69/8bbc8b07ec854d92a8b75668c24d2abcb1719ebf890f5604c61c9369a16f/rpds_py-0.30.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a1d0bc22a7cdc173fedebb73ef81e07faef93692b8c1ad3733b67e31e1b6e1b8", size = 353747, upload-time = "2025-11-30T20:23:54.036Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ab/00/ba2e50183dbd9abcce9497fa5149c62b4ff3e22d338a30d690f9af970561/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d08f00679177226c4cb8c5265012eea897c8ca3b93f429e546600c971bcbae7", size = 383795, upload-time = "2025-11-30T20:23:55.556Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/6f/86f0272b84926bcb0e4c972262f54223e8ecc556b3224d281e6598fc9268/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5965af57d5848192c13534f90f9dd16464f3c37aaf166cc1da1cae1fd5a34898", size = 393330, upload-time = "2025-11-30T20:23:57.033Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/e9/0e02bb2e6dc63d212641da45df2b0bf29699d01715913e0d0f017ee29438/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a4e86e34e9ab6b667c27f3211ca48f73dba7cd3d90f8d5b11be56e5dbc3fb4e", size = 518194, upload-time = "2025-11-30T20:23:58.637Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ee/ca/be7bca14cf21513bdf9c0606aba17d1f389ea2b6987035eb4f62bd923f25/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d3e6b26f2c785d65cc25ef1e5267ccbe1b069c5c21b8cc724efee290554419", size = 408340, upload-time = "2025-11-30T20:24:00.2Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/c7/736e00ebf39ed81d75544c0da6ef7b0998f8201b369acf842f9a90dc8fce/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:626a7433c34566535b6e56a1b39a7b17ba961e97ce3b80ec62e6f1312c025551", size = 383765, upload-time = "2025-11-30T20:24:01.759Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/3f/da50dfde9956aaf365c4adc9533b100008ed31aea635f2b8d7b627e25b49/rpds_py-0.30.0-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:acd7eb3f4471577b9b5a41baf02a978e8bdeb08b4b355273994f8b87032000a8", size = 396834, upload-time = "2025-11-30T20:24:03.687Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4e/00/34bcc2565b6020eab2623349efbdec810676ad571995911f1abdae62a3a0/rpds_py-0.30.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fe5fa731a1fa8a0a56b0977413f8cacac1768dad38d16b3a296712709476fbd5", size = 415470, upload-time = "2025-11-30T20:24:05.232Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/28/882e72b5b3e6f718d5453bd4d0d9cf8df36fddeb4ddbbab17869d5868616/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:74a3243a411126362712ee1524dfc90c650a503502f135d54d1b352bd01f2404", size = 565630, upload-time = "2025-11-30T20:24:06.878Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/97/04a65539c17692de5b85c6e293520fd01317fd878ea1995f0367d4532fb1/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:3e8eeb0544f2eb0d2581774be4c3410356eba189529a6b3e36bbbf9696175856", size = 591148, upload-time = "2025-11-30T20:24:08.445Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/85/70/92482ccffb96f5441aab93e26c4d66489eb599efdcf96fad90c14bbfb976/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:dbd936cde57abfee19ab3213cf9c26be06d60750e60a8e4dd85d1ab12c8b1f40", size = 556030, upload-time = "2025-11-30T20:24:10.956Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/53/7c7e784abfa500a2b6b583b147ee4bb5a2b3747a9166bab52fec4b5b5e7d/rpds_py-0.30.0-cp314-cp314t-win32.whl", hash = "sha256:dc824125c72246d924f7f796b4f63c1e9dc810c7d9e2355864b3c3a73d59ade0", size = 211570, upload-time = "2025-11-30T20:24:12.735Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/02/fa464cdfbe6b26e0600b62c528b72d8608f5cc49f96b8d6e38c95d60c676/rpds_py-0.30.0-cp314-cp314t-win_amd64.whl", hash = "sha256:27f4b0e92de5bfbc6f86e43959e6edd1425c33b5e69aab0984a72047f2bcf1e3", size = 226532, upload-time = "2025-11-30T20:24:14.634Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rsa"
|
||||
version = "4.9.1"
|
||||
@@ -1183,6 +1315,19 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sse-starlette"
|
||||
version = "3.3.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio" },
|
||||
{ name = "starlette" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/5a/9f/c3695c2d2d4ef70072c3a06992850498b01c6bc9be531950813716b426fa/sse_starlette-3.3.2.tar.gz", hash = "sha256:678fca55a1945c734d8472a6cad186a55ab02840b4f6786f5ee8770970579dcd", size = 32326, upload-time = "2026-02-28T11:24:34.36Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/61/28/8cb142d3fe80c4a2d8af54ca0b003f47ce0ba920974e7990fa6e016402d1/sse_starlette-3.3.2-py3-none-any.whl", hash = "sha256:5c3ea3dad425c601236726af2f27689b74494643f57017cafcb6f8c9acfbb862", size = 14270, upload-time = "2026-02-28T11:24:32.984Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "starlette"
|
||||
version = "0.52.1"
|
||||
|
||||
@@ -10,38 +10,7 @@ import { Routes, Route, Navigate } from 'react-router';
|
||||
import SetupPage from './pages/SetupPage.tsx';
|
||||
import { I18nProvider } from './i18n';
|
||||
|
||||
function AppInner() {
|
||||
const [status, setStatus] = useState<SystemStatus | null>(null);
|
||||
useEffect(() => {
|
||||
async function checkInitialization() {
|
||||
try {
|
||||
const status = await getStatus();
|
||||
setStatus(status);
|
||||
document.title = status.title;
|
||||
let favicon = document.querySelector("link[rel*='icon']") as HTMLLinkElement | null;
|
||||
if (!favicon) {
|
||||
favicon = document.createElement('link');
|
||||
favicon.rel = 'icon';
|
||||
document.head.appendChild(favicon);
|
||||
}
|
||||
if (favicon) {
|
||||
favicon.href = status.favicon || status.logo;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Failed to check initialization status:", error);
|
||||
}
|
||||
}
|
||||
checkInitialization();
|
||||
}, []);
|
||||
|
||||
if (status === null) {
|
||||
return (
|
||||
<div style={{ display: 'flex', justifyContent: 'center', alignItems: 'center', height: '100vh' }}>
|
||||
<Spin size="large" />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function AppInner({ status }: { status: SystemStatus }) {
|
||||
return (
|
||||
<SystemContext.Provider value={status}>
|
||||
<AuthProvider>
|
||||
@@ -61,9 +30,41 @@ function AppInner() {
|
||||
}
|
||||
|
||||
export default function App() {
|
||||
const [status, setStatus] = useState<SystemStatus | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
async function checkInitialization() {
|
||||
try {
|
||||
const nextStatus = await getStatus();
|
||||
setStatus(nextStatus);
|
||||
document.title = nextStatus.title;
|
||||
let favicon = document.querySelector("link[rel*='icon']") as HTMLLinkElement | null;
|
||||
if (!favicon) {
|
||||
favicon = document.createElement('link');
|
||||
favicon.rel = 'icon';
|
||||
document.head.appendChild(favicon);
|
||||
}
|
||||
if (favicon) {
|
||||
favicon.href = nextStatus.favicon || nextStatus.logo;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Failed to check initialization status:", error);
|
||||
}
|
||||
}
|
||||
checkInitialization();
|
||||
}, []);
|
||||
|
||||
if (status === null) {
|
||||
return (
|
||||
<div style={{ display: 'flex', justifyContent: 'center', alignItems: 'center', height: '100vh' }}>
|
||||
<Spin size="large" />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<I18nProvider>
|
||||
<AppInner />
|
||||
<I18nProvider defaultLanguage={status.default_language}>
|
||||
<AppInner status={status} />
|
||||
</I18nProvider>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -10,13 +10,28 @@ export interface AdapterItem {
|
||||
sub_path?: string | null;
|
||||
}
|
||||
|
||||
export interface AdapterUsage {
|
||||
id: number;
|
||||
name: string;
|
||||
type: string;
|
||||
path: string;
|
||||
supported: boolean;
|
||||
used_bytes?: number | null;
|
||||
total_bytes?: number | null;
|
||||
free_bytes?: number | null;
|
||||
source?: string | null;
|
||||
scope?: string | null;
|
||||
reason?: string | null;
|
||||
}
|
||||
|
||||
export interface AdapterTypeField {
|
||||
key: string;
|
||||
label: string;
|
||||
type: 'string' | 'password' | 'number' | 'boolean';
|
||||
type: 'string' | 'password' | 'number' | 'boolean' | 'select';
|
||||
required?: boolean;
|
||||
placeholder?: string;
|
||||
default?: any;
|
||||
options?: string[];
|
||||
}
|
||||
|
||||
export interface AdapterTypeMeta {
|
||||
@@ -30,4 +45,6 @@ export const adaptersApi = {
|
||||
update: (id: number, payload: Omit<AdapterItem, 'id'>) => request<AdapterItem>(`/adapters/${id}`, { method: 'PUT', json: payload }),
|
||||
remove: (id: number) => request<void>(`/adapters/${id}`, { method: 'DELETE' }),
|
||||
available: () => request<AdapterTypeMeta[]>('/adapters/available'),
|
||||
usage: () => request<AdapterUsage[]>('/adapters/usage'),
|
||||
usageById: (id: number) => request<AdapterUsage>(`/adapters/${id}/usage`),
|
||||
};
|
||||
|
||||
@@ -9,12 +9,18 @@ export interface AgentChatContext {
|
||||
export interface AgentChatRequest {
|
||||
messages: AgentChatMessage[];
|
||||
auto_execute?: boolean;
|
||||
approved_tool_call_ids?: string[];
|
||||
rejected_tool_call_ids?: string[];
|
||||
approved_mcp_call_ids?: string[];
|
||||
rejected_mcp_call_ids?: string[];
|
||||
context?: AgentChatContext;
|
||||
}
|
||||
|
||||
export interface PendingToolCall {
|
||||
export interface McpCall {
|
||||
id: string;
|
||||
name: string;
|
||||
arguments: Record<string, any>;
|
||||
}
|
||||
|
||||
export interface PendingMcpCall {
|
||||
id: string;
|
||||
name: string;
|
||||
arguments: Record<string, any>;
|
||||
@@ -23,16 +29,16 @@ export interface PendingToolCall {
|
||||
|
||||
export interface AgentChatResponse {
|
||||
messages: AgentChatMessage[];
|
||||
pending_tool_calls?: PendingToolCall[];
|
||||
pending_mcp_calls?: PendingMcpCall[];
|
||||
}
|
||||
|
||||
export type AgentSseEvent =
|
||||
| { event: 'assistant_start'; data: { id: string } }
|
||||
| { event: 'assistant_delta'; data: { id: string; delta: string } }
|
||||
| { event: 'assistant_end'; data: { id: string; message: AgentChatMessage } }
|
||||
| { event: 'tool_start'; data: { tool_call_id: string; name: string } }
|
||||
| { event: 'tool_end'; data: { tool_call_id: string; name: string; message: AgentChatMessage } }
|
||||
| { event: 'pending'; data: { pending_tool_calls: PendingToolCall[] } }
|
||||
| { event: 'mcp_call_start'; data: { mcp_call_id: string; name: string } }
|
||||
| { event: 'mcp_call_end'; data: { mcp_call_id: string; name: string; message: AgentChatMessage } }
|
||||
| { event: 'pending'; data: { pending_mcp_calls: PendingMcpCall[] } }
|
||||
| { event: 'done'; data: AgentChatResponse };
|
||||
|
||||
export const agentApi = {
|
||||
|
||||
@@ -71,7 +71,7 @@ async function request<T = any>(url: string, options: RequestOptions = {}): Prom
|
||||
}
|
||||
|
||||
export { vfsApi, type VfsEntry, type DirListing } from './vfs';
|
||||
export { adaptersApi, type AdapterItem, type AdapterTypeField, type AdapterTypeMeta } from './adapters';
|
||||
export { adaptersApi, type AdapterItem, type AdapterTypeField, type AdapterTypeMeta, type AdapterUsage } from './adapters';
|
||||
export { shareApi, type ShareInfo, type ShareInfoWithPassword } from './share';
|
||||
export { offlineDownloadsApi, type OfflineDownloadTask, type OfflineDownloadCreate, type TaskProgress } from './offlineDownloads';
|
||||
export default request;
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import request from './client';
|
||||
import type { Lang } from '../i18n/lang';
|
||||
|
||||
export async function getConfig(key: string) {
|
||||
return request<{ key: string; value: string }>('/config/?key=' + encodeURIComponent(key));
|
||||
@@ -25,6 +26,7 @@ export interface SystemStatus {
|
||||
logo: string;
|
||||
favicon: string;
|
||||
is_initialized: boolean;
|
||||
default_language?: Lang;
|
||||
app_domain?: string;
|
||||
file_domain?: string;
|
||||
}
|
||||
|
||||
@@ -86,7 +86,12 @@ export const vfsApi = {
|
||||
thumb: (path: string, w=256, h=256, fit='cover') =>
|
||||
request<ArrayBuffer>(`/fs/thumb/${encodeURI(path.replace(/^\/+/, ''))}?w=${w}&h=${h}&fit=${fit}`),
|
||||
streamUrl: (path: string) => `${API_BASE_URL}/fs/stream/${encodeURI(path.replace(/^\/+/, ''))}`,
|
||||
stat: (path: string) => request(`/fs/stat/${encodeURI(path.replace(/^\/+/, ''))}`),
|
||||
stat: (path: string, options?: { verbose?: boolean }) => {
|
||||
const params = new URLSearchParams();
|
||||
if (options?.verbose) params.set('verbose', 'true');
|
||||
const query = params.toString();
|
||||
return request(`/fs/stat/${encodeURI(path.replace(/^\/+/, ''))}${query ? `?${query}` : ''}`);
|
||||
},
|
||||
getTempLinkToken: (path: string, expiresIn: number = 3600) =>
|
||||
request<{token: string, path: string, url: string}>(`/fs/temp-link/${encodeURI(path.replace(/^\/+/, ''))}?expires_in=${expiresIn}`),
|
||||
getTempPublicUrl: (token: string) => `${API_BASE_URL}/fs/public/${token}`,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import React, { useEffect, useMemo, useRef } from 'react';
|
||||
import type { AppComponentProps, AppOpenComponentProps } from '../types';
|
||||
import type { PluginItem } from '../../api/plugins';
|
||||
import { useI18n } from '../../i18n';
|
||||
|
||||
export interface PluginAppHostProps extends AppComponentProps {
|
||||
plugin: PluginItem;
|
||||
@@ -34,6 +35,7 @@ export const PluginAppHost: React.FC<PluginAppHostProps> = ({
|
||||
entry,
|
||||
onRequestClose,
|
||||
}) => {
|
||||
const { lang } = useI18n();
|
||||
const iframeRef = useRef<HTMLIFrameElement>(null);
|
||||
const onCloseRef = useRef(onRequestClose);
|
||||
onCloseRef.current = onRequestClose;
|
||||
@@ -45,10 +47,11 @@ export const PluginAppHost: React.FC<PluginAppHostProps> = ({
|
||||
pluginVersion: plugin.version || '',
|
||||
pluginStyles: JSON.stringify(getPluginStylePaths(plugin)),
|
||||
mode: 'file',
|
||||
lang,
|
||||
filePath,
|
||||
entry: JSON.stringify(entry),
|
||||
}),
|
||||
[plugin, filePath, entry]
|
||||
[plugin, filePath, entry, lang]
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
@@ -86,6 +89,7 @@ export interface PluginAppOpenHostProps extends AppOpenComponentProps {
|
||||
* 注意:同源且不加 sandbox 时,不是安全沙箱(插件仍可通过 window.parent 访问宿主)。
|
||||
*/
|
||||
export const PluginAppOpenHost: React.FC<PluginAppOpenHostProps> = ({ plugin, onRequestClose }) => {
|
||||
const { lang } = useI18n();
|
||||
const iframeRef = useRef<HTMLIFrameElement>(null);
|
||||
const onCloseRef = useRef(onRequestClose);
|
||||
onCloseRef.current = onRequestClose;
|
||||
@@ -97,8 +101,9 @@ export const PluginAppOpenHost: React.FC<PluginAppOpenHostProps> = ({ plugin, on
|
||||
pluginVersion: plugin.version || '',
|
||||
pluginStyles: JSON.stringify(getPluginStylePaths(plugin)),
|
||||
mode: 'app',
|
||||
lang,
|
||||
}),
|
||||
[plugin]
|
||||
[plugin, lang]
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
|
||||
@@ -3,7 +3,7 @@ import { Avatar, Button, Divider, Flex, Input, List, Modal, Space, Switch, Tag,
|
||||
import { RobotOutlined, SendOutlined, DeleteOutlined, ToolOutlined, DownOutlined, UpOutlined, CodeOutlined, CopyOutlined, LoadingOutlined } from '@ant-design/icons';
|
||||
import ReactMarkdown from 'react-markdown';
|
||||
import type { TextAreaRef } from 'antd/es/input/TextArea';
|
||||
import { agentApi, type AgentChatMessage, type PendingToolCall } from '../api/agent';
|
||||
import { agentApi, type AgentChatMessage, type PendingMcpCall } from '../api/agent';
|
||||
import { useI18n } from '../i18n';
|
||||
import '../styles/ai-agent.css';
|
||||
|
||||
@@ -108,7 +108,7 @@ const AiAgentWidget = memo(function AiAgentWidget({ currentPath, open, onOpenCha
|
||||
const [input, setInput] = useState('');
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [messages, setMessages] = useState<AgentChatMessage[]>([]);
|
||||
const [pending, setPending] = useState<PendingToolCall[]>([]);
|
||||
const [pending, setPending] = useState<PendingMcpCall[]>([]);
|
||||
const [expandedTools, setExpandedTools] = useState<Record<string, boolean>>({});
|
||||
const [expandedRaw, setExpandedRaw] = useState<Record<string, boolean>>({});
|
||||
const [runningTools, setRunningTools] = useState<Record<string, string>>({});
|
||||
@@ -153,16 +153,14 @@ const AiAgentWidget = memo(function AiAgentWidget({ currentPath, open, onOpenCha
|
||||
for (const msg of messages) {
|
||||
if (!msg || typeof msg !== 'object') continue;
|
||||
if (msg.role !== 'assistant') continue;
|
||||
const toolCalls = (msg as any).tool_calls;
|
||||
const toolCalls = (msg as any).mcp_calls;
|
||||
if (!Array.isArray(toolCalls)) continue;
|
||||
for (const call of toolCalls) {
|
||||
const id = typeof call?.id === 'string' ? call.id : '';
|
||||
const fn = call?.function;
|
||||
const name = typeof fn?.name === 'string' ? fn.name : '';
|
||||
const rawArgs = typeof fn?.arguments === 'string' ? fn.arguments : '';
|
||||
const name = typeof call?.name === 'string' ? call.name : '';
|
||||
const args = isPlainObject(call?.arguments) ? call.arguments : {};
|
||||
if (!id || !name) continue;
|
||||
const parsedArgs = tryParseJson<Record<string, any>>(rawArgs) || {};
|
||||
map.set(id, { name, args: parsedArgs });
|
||||
map.set(id, { name, args });
|
||||
}
|
||||
}
|
||||
return map;
|
||||
@@ -179,7 +177,7 @@ const AiAgentWidget = memo(function AiAgentWidget({ currentPath, open, onOpenCha
|
||||
assistantIndexRef.current = {};
|
||||
|
||||
setLoading(true);
|
||||
const approvedIds = payload.approved_tool_call_ids || [];
|
||||
const approvedIds = payload.approved_mcp_call_ids || [];
|
||||
if (Array.isArray(approvedIds) && approvedIds.length > 0) {
|
||||
const preRunning: Record<string, string> = {};
|
||||
approvedIds.forEach((id) => {
|
||||
@@ -196,8 +194,8 @@ const AiAgentWidget = memo(function AiAgentWidget({ currentPath, open, onOpenCha
|
||||
messages: payload.messages,
|
||||
auto_execute: autoExecute,
|
||||
context: effectivePath ? { current_path: effectivePath } : undefined,
|
||||
approved_tool_call_ids: payload.approved_tool_call_ids,
|
||||
rejected_tool_call_ids: payload.rejected_tool_call_ids,
|
||||
approved_mcp_call_ids: payload.approved_mcp_call_ids,
|
||||
rejected_mcp_call_ids: payload.rejected_mcp_call_ids,
|
||||
},
|
||||
(evt) => {
|
||||
if (seq !== streamSeqRef.current) return;
|
||||
@@ -241,16 +239,16 @@ const AiAgentWidget = memo(function AiAgentWidget({ currentPath, open, onOpenCha
|
||||
delete assistantIndexRef.current[id];
|
||||
return;
|
||||
}
|
||||
case 'tool_start': {
|
||||
const toolCallId = String((evt.data as any)?.tool_call_id || '');
|
||||
case 'mcp_call_start': {
|
||||
const toolCallId = String((evt.data as any)?.mcp_call_id || '');
|
||||
const name = String((evt.data as any)?.name || '');
|
||||
if (!toolCallId) return;
|
||||
if (name) toolNameByIdRef.current[toolCallId] = name;
|
||||
setRunningTools((prev) => ({ ...prev, [toolCallId]: name || prev[toolCallId] || '' }));
|
||||
return;
|
||||
}
|
||||
case 'tool_end': {
|
||||
const toolCallId = String((evt.data as any)?.tool_call_id || '');
|
||||
case 'mcp_call_end': {
|
||||
const toolCallId = String((evt.data as any)?.mcp_call_id || '');
|
||||
const name = String((evt.data as any)?.name || '');
|
||||
const msg = (evt.data as any)?.message;
|
||||
if (toolCallId && name) toolNameByIdRef.current[toolCallId] = name;
|
||||
@@ -267,14 +265,14 @@ const AiAgentWidget = memo(function AiAgentWidget({ currentPath, open, onOpenCha
|
||||
return;
|
||||
}
|
||||
case 'pending': {
|
||||
const items = Array.isArray((evt.data as any)?.pending_tool_calls) ? (evt.data as any).pending_tool_calls : [];
|
||||
const items = Array.isArray((evt.data as any)?.pending_mcp_calls) ? (evt.data as any).pending_mcp_calls : [];
|
||||
setPending(items);
|
||||
return;
|
||||
}
|
||||
case 'done': {
|
||||
const base = baseMessagesRef.current || [];
|
||||
const newMessages = Array.isArray((evt.data as any)?.messages) ? (evt.data as any).messages : [];
|
||||
const nextPending = Array.isArray((evt.data as any)?.pending_tool_calls) ? (evt.data as any).pending_tool_calls : [];
|
||||
const nextPending = Array.isArray((evt.data as any)?.pending_mcp_calls) ? (evt.data as any).pending_mcp_calls : [];
|
||||
setMessages([...base, ...newMessages]);
|
||||
setPending(nextPending);
|
||||
setRunningTools({});
|
||||
@@ -326,23 +324,23 @@ const AiAgentWidget = memo(function AiAgentWidget({ currentPath, open, onOpenCha
|
||||
}, []);
|
||||
|
||||
const approveOne = useCallback(async (id: string) => {
|
||||
await runStream({ messages, approved_tool_call_ids: [id] });
|
||||
await runStream({ messages, approved_mcp_call_ids: [id] });
|
||||
}, [messages, runStream]);
|
||||
|
||||
const rejectOne = useCallback(async (id: string) => {
|
||||
await runStream({ messages, rejected_tool_call_ids: [id] });
|
||||
await runStream({ messages, rejected_mcp_call_ids: [id] });
|
||||
}, [messages, runStream]);
|
||||
|
||||
const approveAll = useCallback(async () => {
|
||||
const ids = pending.map((p) => p.id).filter(Boolean);
|
||||
if (ids.length === 0) return;
|
||||
await runStream({ messages, approved_tool_call_ids: ids });
|
||||
await runStream({ messages, approved_mcp_call_ids: ids });
|
||||
}, [messages, pending, runStream]);
|
||||
|
||||
const rejectAll = useCallback(async () => {
|
||||
const ids = pending.map((p) => p.id).filter(Boolean);
|
||||
if (ids.length === 0) return;
|
||||
await runStream({ messages, rejected_tool_call_ids: ids });
|
||||
await runStream({ messages, rejected_mcp_call_ids: ids });
|
||||
}, [messages, pending, runStream]);
|
||||
|
||||
const messageItems = useMemo(() => {
|
||||
@@ -665,7 +663,7 @@ const AiAgentWidget = memo(function AiAgentWidget({ currentPath, open, onOpenCha
|
||||
const role = String((m as any).role);
|
||||
const isUser = role === 'user';
|
||||
const isTool = role === 'tool';
|
||||
const toolCallId = typeof (m as any).tool_call_id === 'string' ? String((m as any).tool_call_id) : '';
|
||||
const toolCallId = typeof (m as any).mcp_call_id === 'string' ? String((m as any).mcp_call_id) : '';
|
||||
const toolInfo = toolCallId ? toolCallsById.get(toolCallId) : null;
|
||||
const toolName = toolInfo?.name || (toolCallId ? toolNameByIdRef.current[toolCallId] : '') || '';
|
||||
const msgKey = toolCallId ? `tool:${toolCallId}` : `${role}:${idx}`;
|
||||
|
||||
@@ -2,8 +2,8 @@ import { createContext, useCallback, useContext, useMemo, useState, useEffect }
|
||||
import type { PropsWithChildren } from 'react';
|
||||
import en from './locales/en.json';
|
||||
import zhOverrides from './locales/zh.json';
|
||||
import { normalizeLang, persistLang, readStoredLang, type Lang } from './lang';
|
||||
|
||||
type Lang = 'zh' | 'en';
|
||||
type Dict = Record<string, string>;
|
||||
|
||||
const dicts: Record<Lang, Dict> = {
|
||||
@@ -11,9 +11,13 @@ const dicts: Record<Lang, Dict> = {
|
||||
zh: { ...en, ...zhOverrides },
|
||||
};
|
||||
|
||||
interface SetLangOptions {
|
||||
persist?: boolean;
|
||||
}
|
||||
|
||||
export interface I18nContextValue {
|
||||
lang: Lang;
|
||||
setLang: (lang: Lang) => void;
|
||||
setLang: (lang: Lang, options?: SetLangOptions) => void;
|
||||
t: (key: string, params?: Record<string, string | number>) => string;
|
||||
}
|
||||
|
||||
@@ -24,13 +28,26 @@ function interpolate(template: string, params?: Record<string, string | number>)
|
||||
return template.replace(/\{(\w+)\}/g, (_, k) => String(params[k] ?? `{${k}}`));
|
||||
}
|
||||
|
||||
export function I18nProvider({ children }: PropsWithChildren) {
|
||||
const [lang, setLangState] = useState<Lang>(() => (localStorage.getItem('lang') as Lang) || 'zh');
|
||||
interface I18nProviderProps {
|
||||
defaultLanguage?: Lang;
|
||||
}
|
||||
|
||||
const setLang = useCallback((l: Lang) => {
|
||||
setLangState(l);
|
||||
localStorage.setItem('lang', l);
|
||||
}, []);
|
||||
export function I18nProvider({ children, defaultLanguage }: PropsWithChildren<I18nProviderProps>) {
|
||||
const fallbackLang = normalizeLang(defaultLanguage, 'zh');
|
||||
const [lang, setLangState] = useState<Lang>(() => readStoredLang() ?? fallbackLang);
|
||||
|
||||
const setLang = useCallback((nextLang: Lang, options?: SetLangOptions) => {
|
||||
const normalized = normalizeLang(nextLang, fallbackLang);
|
||||
setLangState(normalized);
|
||||
if (options?.persist === false) return;
|
||||
persistLang(normalized);
|
||||
}, [fallbackLang]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!readStoredLang()) {
|
||||
setLangState(fallbackLang);
|
||||
}
|
||||
}, [fallbackLang]);
|
||||
|
||||
useEffect(() => {
|
||||
document.documentElement.lang = lang;
|
||||
|
||||
42
web/src/i18n/lang.ts
Normal file
42
web/src/i18n/lang.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
export type Lang = 'zh' | 'en';
|
||||
|
||||
const LANG_STORAGE_KEY = 'lang';
|
||||
|
||||
export function parseLang(raw: unknown): Lang | null {
|
||||
if (typeof raw !== 'string') return null;
|
||||
const value = raw.trim().toLowerCase();
|
||||
if (!value) return null;
|
||||
if (value === 'en' || value.startsWith('en-')) return 'en';
|
||||
if (value === 'zh' || value.startsWith('zh-')) return 'zh';
|
||||
return null;
|
||||
}
|
||||
|
||||
export function normalizeLang(raw: unknown, fallback: Lang = 'zh'): Lang {
|
||||
return parseLang(raw) ?? fallback;
|
||||
}
|
||||
|
||||
export function readStoredLang(): Lang | null {
|
||||
if (typeof window === 'undefined') return null;
|
||||
try {
|
||||
return parseLang(window.localStorage.getItem(LANG_STORAGE_KEY));
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export function persistLang(lang: Lang): void {
|
||||
if (typeof window === 'undefined') return;
|
||||
try {
|
||||
window.localStorage.setItem(LANG_STORAGE_KEY, lang);
|
||||
} catch {
|
||||
void 0;
|
||||
}
|
||||
}
|
||||
|
||||
export function getActiveLang(fallback: Lang = 'zh'): Lang {
|
||||
if (typeof document !== 'undefined') {
|
||||
const documentLang = parseLang(document.documentElement.lang);
|
||||
if (documentLang) return documentLang;
|
||||
}
|
||||
return readStoredLang() ?? fallback;
|
||||
}
|
||||
@@ -27,10 +27,13 @@
|
||||
"Register failed": "Register failed",
|
||||
"Please input email!": "Please input email!",
|
||||
"Profile": "Profile",
|
||||
"Client Authorization": "Client Authorization",
|
||||
"Account Settings": "Account Settings",
|
||||
"Language": "Language",
|
||||
"Chinese": "中文",
|
||||
"English": "English",
|
||||
"Default Language": "Default Language",
|
||||
"Used when the user has not selected a language": "Used when the user has not selected a language",
|
||||
"Full Name": "Full Name",
|
||||
"Email": "Email",
|
||||
"Change Password": "Change Password",
|
||||
@@ -511,6 +514,8 @@
|
||||
"Unique name": "Unique name",
|
||||
"Select adapter type": "Select adapter type",
|
||||
"/ or /drive": "/ or /drive",
|
||||
"Used Capacity": "Used Capacity",
|
||||
"Capacity Usage": "Capacity Usage",
|
||||
"Adapter Config": "Adapter Config",
|
||||
"adapter.type.local": "Local Filesystem",
|
||||
"adapter.type.foxel": "Foxel Node",
|
||||
|
||||
@@ -50,8 +50,13 @@
|
||||
"Register failed": "注册失败",
|
||||
"Please input email!": "请输入邮箱!",
|
||||
"Profile": "个人资料",
|
||||
"Client Authorization": "客户端授权",
|
||||
"Account Settings": "账户设置",
|
||||
"Language": "语言",
|
||||
"Chinese": "中文",
|
||||
"English": "English",
|
||||
"Default Language": "默认语言",
|
||||
"Used when the user has not selected a language": "用户未手动选择语言时使用",
|
||||
"Full Name": "昵称",
|
||||
"Email": "邮箱",
|
||||
"Change Password": "修改密码",
|
||||
@@ -508,6 +513,8 @@
|
||||
"Unique name": "唯一名称",
|
||||
"Select adapter type": "选择适配器类型",
|
||||
"/ or /drive": "/或/drive",
|
||||
"Used Capacity": "已使用容量",
|
||||
"Capacity Usage": "容量使用",
|
||||
"Adapter Config": "适配器配置",
|
||||
"adapter.type.local": "本地文件系统",
|
||||
"adapter.type.foxel": "Foxel 节点",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Layout, Button, Dropdown, theme, Flex, Avatar, Typography, Tooltip } from 'antd';
|
||||
import { SearchOutlined, MenuUnfoldOutlined, LogoutOutlined, UserOutlined, RobotOutlined, BellOutlined } from '@ant-design/icons';
|
||||
import { memo, useState } from 'react';
|
||||
import { Layout, Button, Dropdown, theme, Flex, Avatar, Typography, Tooltip, Modal, QRCode } from 'antd';
|
||||
import { SearchOutlined, MenuUnfoldOutlined, LogoutOutlined, UserOutlined, RobotOutlined, BellOutlined, QrcodeOutlined } from '@ant-design/icons';
|
||||
import { memo, useMemo, useState } from 'react';
|
||||
import SearchDialog from './SearchDialog.tsx';
|
||||
import { authApi } from '../api/auth.ts';
|
||||
import { useNavigate } from 'react-router';
|
||||
@@ -26,11 +26,16 @@ const TopHeader = memo(function TopHeader({ collapsed, onToggle, onOpenAiAgent,
|
||||
const [searchOpen, setSearchOpen] = useState(false);
|
||||
const navigate = useNavigate();
|
||||
const { t } = useI18n();
|
||||
const { user } = useAuth();
|
||||
const { user, token: authToken } = useAuth();
|
||||
const [profileOpen, setProfileOpen] = useState(false);
|
||||
const [clientAuthOpen, setClientAuthOpen] = useState(false);
|
||||
const [noticesOpen, setNoticesOpen] = useState(false);
|
||||
const status = useSystemStatus();
|
||||
const { isMobile } = useResponsive();
|
||||
const clientAuthPayload = useMemo(() => JSON.stringify({
|
||||
base_url: window.location.origin,
|
||||
token: authToken || '',
|
||||
}), [authToken]);
|
||||
|
||||
const handleLogout = () => {
|
||||
authApi.logout();
|
||||
@@ -38,6 +43,7 @@ const TopHeader = memo(function TopHeader({ collapsed, onToggle, onOpenAiAgent,
|
||||
};
|
||||
|
||||
const openProfile = () => setProfileOpen(true);
|
||||
const openClientAuth = () => setClientAuthOpen(true);
|
||||
|
||||
return (
|
||||
<Header
|
||||
@@ -96,6 +102,7 @@ const TopHeader = memo(function TopHeader({ collapsed, onToggle, onOpenAiAgent,
|
||||
menu={{
|
||||
items: [
|
||||
{ key: 'profile', label: t('Profile'), icon: <UserOutlined />, onClick: openProfile },
|
||||
{ key: 'client-auth', label: t('Client Authorization'), icon: <QrcodeOutlined />, onClick: openClientAuth },
|
||||
{ key: 'logout', label: t('Log Out'), icon: <LogoutOutlined />, onClick: handleLogout },
|
||||
],
|
||||
}}
|
||||
@@ -114,6 +121,18 @@ const TopHeader = memo(function TopHeader({ collapsed, onToggle, onOpenAiAgent,
|
||||
</Button>
|
||||
</Dropdown>
|
||||
<ProfileModal open={profileOpen} onClose={() => setProfileOpen(false)} />
|
||||
<Modal
|
||||
title={t('Client Authorization')}
|
||||
open={clientAuthOpen}
|
||||
onCancel={() => setClientAuthOpen(false)}
|
||||
footer={null}
|
||||
width={320}
|
||||
centered
|
||||
>
|
||||
<Flex justify="center" style={{ padding: '8px 0' }}>
|
||||
<QRCode value={clientAuthPayload} size={220} />
|
||||
</Flex>
|
||||
</Modal>
|
||||
<NoticesModal open={noticesOpen} onClose={() => setNoticesOpen(false)} version={status?.version || ''} />
|
||||
</Flex>
|
||||
</Header>
|
||||
|
||||
@@ -1,12 +1,29 @@
|
||||
import { memo, useState, useEffect, useCallback } from 'react';
|
||||
import { Table, Button, Space, Drawer, Form, Input, Switch, message, Typography, Popconfirm, Select } from 'antd';
|
||||
import PageCard from '../components/PageCard';
|
||||
import { adaptersApi, type AdapterItem, type AdapterTypeMeta } from '../api/client';
|
||||
import { adaptersApi, type AdapterItem, type AdapterTypeMeta, type AdapterUsage } from '../api/client';
|
||||
import { useI18n } from '../i18n';
|
||||
|
||||
const formatBytes = (bytes?: number | null) => {
|
||||
if (bytes === null || bytes === undefined) return '-';
|
||||
if (bytes === 0) return '0 B';
|
||||
const units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB'];
|
||||
const index = Math.min(Math.floor(Math.log(bytes) / Math.log(1024)), units.length - 1);
|
||||
const value = bytes / (1024 ** index);
|
||||
return `${value.toFixed(value >= 10 || index === 0 ? 0 : 1)} ${units[index]}`;
|
||||
};
|
||||
|
||||
const formatUsage = (usage?: AdapterUsage) => {
|
||||
if (!usage?.supported || usage.used_bytes === null || usage.used_bytes === undefined) return '-';
|
||||
const used = formatBytes(usage.used_bytes);
|
||||
if (usage.total_bytes === null || usage.total_bytes === undefined) return used;
|
||||
return `${used} / ${formatBytes(usage.total_bytes)}`;
|
||||
};
|
||||
|
||||
const AdaptersPage = memo(function AdaptersPage() {
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [data, setData] = useState<AdapterItem[]>([]);
|
||||
const [usageMap, setUsageMap] = useState<Record<number, AdapterUsage>>({});
|
||||
const [open, setOpen] = useState(false);
|
||||
const [editing, setEditing] = useState<AdapterItem | null>(null);
|
||||
const [form] = Form.useForm();
|
||||
@@ -16,12 +33,14 @@ const AdaptersPage = memo(function AdaptersPage() {
|
||||
const fetchList = useCallback(async () => {
|
||||
setLoading(true);
|
||||
try {
|
||||
const [list, types] = await Promise.all([
|
||||
const [list, types, usages] = await Promise.all([
|
||||
adaptersApi.list(),
|
||||
adaptersApi.available()
|
||||
adaptersApi.available(),
|
||||
adaptersApi.usage()
|
||||
]);
|
||||
setData(list);
|
||||
setAvailableTypes(types);
|
||||
setUsageMap(Object.fromEntries(usages.map(item => [item.id, item])));
|
||||
} catch (e: any) {
|
||||
message.error(e.message || t('Load failed'));
|
||||
} finally {
|
||||
@@ -137,11 +156,47 @@ const AdaptersPage = memo(function AdaptersPage() {
|
||||
return label === key ? type : label;
|
||||
}, [t]);
|
||||
|
||||
const usageSummary = Object.values(usageMap).reduce(
|
||||
(acc, usage) => {
|
||||
if (!usage.supported) return acc;
|
||||
if (usage.used_bytes !== null && usage.used_bytes !== undefined) {
|
||||
acc.used += usage.used_bytes;
|
||||
acc.hasUsed = true;
|
||||
}
|
||||
if (usage.total_bytes !== null && usage.total_bytes !== undefined) {
|
||||
acc.total += usage.total_bytes;
|
||||
acc.hasTotal = true;
|
||||
}
|
||||
return acc;
|
||||
},
|
||||
{ used: 0, total: 0, hasUsed: false, hasTotal: false }
|
||||
);
|
||||
|
||||
const pageTitle = (
|
||||
<Space size={12} wrap>
|
||||
<span>{t('Storage Adapters')}</span>
|
||||
{(usageSummary.hasUsed || usageSummary.hasTotal) && (
|
||||
<Typography.Text type="secondary" style={{ fontSize: 13, fontWeight: 400 }}>
|
||||
{usageSummary.hasUsed ? formatBytes(usageSummary.used) : '-'}
|
||||
{' / '}
|
||||
{usageSummary.hasTotal ? formatBytes(usageSummary.total) : '-'}
|
||||
</Typography.Text>
|
||||
)}
|
||||
</Space>
|
||||
);
|
||||
|
||||
const columns = [
|
||||
{ title: t('Name'), dataIndex: 'name' },
|
||||
{ title: t('Type'), dataIndex: 'type', width: 140, render: (value: string) => renderTypeLabel(value) },
|
||||
{ title: t('Mount Path'), dataIndex: 'path', width: 140, render: (v: string) => v || '-' },
|
||||
{ title: t('Sub Path'), dataIndex: 'sub_path', width: 140, render: (v: string) => v || '-' },
|
||||
{
|
||||
title: t('Capacity Usage'),
|
||||
width: 180,
|
||||
render: (_: any, rec: AdapterItem) => {
|
||||
return formatUsage(usageMap[rec.id]);
|
||||
}
|
||||
},
|
||||
{
|
||||
title: t('Enabled'),
|
||||
dataIndex: 'enabled',
|
||||
@@ -180,6 +235,14 @@ const AdaptersPage = memo(function AdaptersPage() {
|
||||
let valuePropName: string | undefined;
|
||||
if (field.type === 'password') inputNode = <Input.Password placeholder={field.placeholder} />;
|
||||
if (field.type === 'number') inputNode = <Input type="number" placeholder={field.placeholder} />;
|
||||
if (field.type === 'select') {
|
||||
inputNode = (
|
||||
<Select
|
||||
placeholder={field.placeholder}
|
||||
options={(field.options || []).map(option => ({ value: option, label: t(option) }))}
|
||||
/>
|
||||
);
|
||||
}
|
||||
if (field.type === 'boolean') {
|
||||
inputNode = <Switch />;
|
||||
valuePropName = 'checked';
|
||||
@@ -200,7 +263,7 @@ const AdaptersPage = memo(function AdaptersPage() {
|
||||
|
||||
return (
|
||||
<PageCard
|
||||
title={t('Storage Adapters')}
|
||||
title={pageTitle}
|
||||
extra={
|
||||
<Space wrap>
|
||||
<Button onClick={fetchList} loading={loading}>{t('Refresh')}</Button>
|
||||
|
||||
@@ -181,7 +181,7 @@ const FileExplorerPage = memo(function FileExplorerPage() {
|
||||
setDetailLoading(true);
|
||||
try {
|
||||
const fullPath = (entryBasePath === '/' ? '' : entryBasePath) + '/' + entry.name;
|
||||
const stat = await vfsApi.stat(fullPath);
|
||||
const stat = await vfsApi.stat(fullPath, { verbose: true });
|
||||
setDetailData(stat as Record<string, unknown>);
|
||||
} catch (error) {
|
||||
const messageText = error instanceof Error ? error.message : String(error);
|
||||
|
||||
@@ -255,16 +255,20 @@ export const ContextMenu: React.FC<ContextMenuProps> = (props) => {
|
||||
height="auto"
|
||||
styles={{ body: { padding: 8 } }}
|
||||
>
|
||||
<Menu
|
||||
items={items}
|
||||
selectable={false}
|
||||
onClick={({ key }) => {
|
||||
const handler = handlerMap.get(String(key));
|
||||
if (handler) handler();
|
||||
onClose();
|
||||
}}
|
||||
style={{ borderRadius: token.borderRadius, background: 'transparent', border: 'none' }}
|
||||
/>
|
||||
<div onClick={(e) => e.stopPropagation()}>
|
||||
<Menu
|
||||
items={items}
|
||||
mode="inline"
|
||||
selectable={false}
|
||||
onClick={({ key }) => {
|
||||
const handler = handlerMap.get(String(key));
|
||||
if (!handler) return;
|
||||
handler();
|
||||
onClose();
|
||||
}}
|
||||
style={{ borderRadius: token.borderRadius, background: 'transparent', border: 'none' }}
|
||||
/>
|
||||
</div>
|
||||
</Drawer>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -182,71 +182,77 @@ export const Header: React.FC<HeaderProps> = ({
|
||||
},
|
||||
};
|
||||
|
||||
if (isMobile) {
|
||||
return (
|
||||
<Flex align="center" gap={6} style={{ padding: '10px 12px', borderBottom: `1px solid ${token.colorBorderSecondary}`, minWidth: 0 }}>
|
||||
<Button size="small" icon={<ArrowUpOutlined />} onClick={onGoUp} disabled={path === '/'} />
|
||||
{renderBreadcrumb()}
|
||||
<Space size={4} style={{ flexShrink: 0 }}>
|
||||
<Button size="small" icon={<ReloadOutlined />} onClick={onRefresh} loading={loading} aria-label={t('Refresh')} />
|
||||
<Button size="small" icon={<PlusOutlined />} onClick={onCreateDir} aria-label={t('New Folder')} />
|
||||
<Button size="small" icon={<UploadOutlined />} onClick={onUploadFile} aria-label={t('Upload Files')} />
|
||||
<Dropdown menu={{ items: mobileMoreItems }}>
|
||||
<Button size="small" icon={<MoreOutlined />} aria-label={t('More')} />
|
||||
</Dropdown>
|
||||
</Space>
|
||||
</Flex>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<Flex vertical={isMobile} gap={isMobile ? 10 : 12} style={{ padding: isMobile ? '10px 12px' : '10px 16px', borderBottom: `1px solid ${token.colorBorderSecondary}` }}>
|
||||
<Flex vertical gap={12} style={{ padding: '10px 16px', borderBottom: `1px solid ${token.colorBorderSecondary}` }}>
|
||||
<Flex align="center" gap={8} style={{ minWidth: 0 }}>
|
||||
<Button size="small" icon={<ArrowUpOutlined />} onClick={onGoUp} disabled={path === '/'} />
|
||||
{!isMobile && <Typography.Text strong>{t('File Manager')}</Typography.Text>}
|
||||
{!isMobile && <Divider type="vertical" />}
|
||||
<Typography.Text strong>{t('File Manager')}</Typography.Text>
|
||||
<Divider type="vertical" />
|
||||
{renderBreadcrumb()}
|
||||
</Flex>
|
||||
|
||||
<Flex align="center" justify="space-between" gap={8} style={{ flexWrap: 'wrap' }}>
|
||||
<Space size={8} wrap>
|
||||
<Button size="small" icon={<ReloadOutlined />} onClick={onRefresh} loading={loading} aria-label={t('Refresh')}>
|
||||
{!isMobile && t('Refresh')}
|
||||
{t('Refresh')}
|
||||
</Button>
|
||||
<Button size="small" icon={<PlusOutlined />} onClick={onCreateDir} aria-label={t('New Folder')}>
|
||||
{!isMobile && t('New Folder')}
|
||||
{t('New Folder')}
|
||||
</Button>
|
||||
{isMobile ? (
|
||||
<Button size="small" icon={<UploadOutlined />} onClick={onUploadFile} aria-label={t('Upload Files')} />
|
||||
) : (
|
||||
<Dropdown.Button
|
||||
size="small"
|
||||
icon={<UploadOutlined />}
|
||||
onClick={onUploadFile}
|
||||
menu={uploadMenu}
|
||||
>
|
||||
{t('Upload')}
|
||||
</Dropdown.Button>
|
||||
)}
|
||||
{isMobile && (
|
||||
<Dropdown menu={{ items: mobileMoreItems }}>
|
||||
<Button size="small" icon={<MoreOutlined />} aria-label={t('More')} />
|
||||
</Dropdown>
|
||||
)}
|
||||
<Dropdown.Button
|
||||
size="small"
|
||||
icon={<UploadOutlined />}
|
||||
onClick={onUploadFile}
|
||||
menu={uploadMenu}
|
||||
>
|
||||
{t('Upload')}
|
||||
</Dropdown.Button>
|
||||
</Space>
|
||||
|
||||
{!isMobile && (
|
||||
<Space size={8} wrap>
|
||||
<Select
|
||||
size="small"
|
||||
value={sortBy}
|
||||
onChange={(val) => onSortChange(val, sortOrder)}
|
||||
style={{ width: 112 }}
|
||||
options={[
|
||||
{ value: 'name', label: t('Name') },
|
||||
{ value: 'size', label: t('Size') },
|
||||
{ value: 'mtime', label: t('Modified Time') },
|
||||
]}
|
||||
/>
|
||||
<Button
|
||||
size="small"
|
||||
icon={sortOrder === 'asc' ? <ArrowUpOutlined /> : <ArrowDownOutlined />}
|
||||
onClick={() => onSortChange(sortBy, sortOrder === 'asc' ? 'desc' : 'asc')}
|
||||
/>
|
||||
<Segmented
|
||||
size="small"
|
||||
value={viewMode}
|
||||
onChange={(value) => onSetViewMode(value as ViewMode)}
|
||||
options={[
|
||||
{ label: <Tooltip title={t('Grid')}><AppstoreOutlined /></Tooltip>, value: 'grid' },
|
||||
{ label: <Tooltip title={t('List')}><UnorderedListOutlined /></Tooltip>, value: 'list' },
|
||||
]}
|
||||
/>
|
||||
</Space>
|
||||
)}
|
||||
<Space size={8} wrap>
|
||||
<Select
|
||||
size="small"
|
||||
value={sortBy}
|
||||
onChange={(val) => onSortChange(val, sortOrder)}
|
||||
style={{ width: 112 }}
|
||||
options={[
|
||||
{ value: 'name', label: t('Name') },
|
||||
{ value: 'size', label: t('Size') },
|
||||
{ value: 'mtime', label: t('Modified Time') },
|
||||
]}
|
||||
/>
|
||||
<Button
|
||||
size="small"
|
||||
icon={sortOrder === 'asc' ? <ArrowUpOutlined /> : <ArrowDownOutlined />}
|
||||
onClick={() => onSortChange(sortBy, sortOrder === 'asc' ? 'desc' : 'asc')}
|
||||
/>
|
||||
<Segmented
|
||||
size="small"
|
||||
value={viewMode}
|
||||
onChange={(value) => onSetViewMode(value as ViewMode)}
|
||||
options={[
|
||||
{ label: <Tooltip title={t('Grid')}><AppstoreOutlined /></Tooltip>, value: 'grid' },
|
||||
{ label: <Tooltip title={t('List')}><UnorderedListOutlined /></Tooltip>, value: 'list' },
|
||||
]}
|
||||
/>
|
||||
</Space>
|
||||
</Flex>
|
||||
</Flex>
|
||||
);
|
||||
|
||||
@@ -168,22 +168,19 @@ export function useFileActions({ path, refresh, clearSelection, onShare, onGetDi
|
||||
refresh();
|
||||
}, [normalizeDestination, normalizeFullPath, t, buildEntryDestination, refresh]);
|
||||
|
||||
const doDownload = useCallback(async (entry: VfsEntry) => {
|
||||
const doDownload = useCallback((entry: VfsEntry) => {
|
||||
if (entry.is_dir) {
|
||||
message.warning(t('Downloading folders is not supported'));
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const buf = await vfsApi.readFile((path === '/' ? '' : path) + '/' + entry.name);
|
||||
const blob = new Blob([buf]);
|
||||
const url = URL.createObjectURL(blob);
|
||||
const url = vfsApi.streamUrl((path === '/' ? '' : path) + '/' + entry.name);
|
||||
const a = document.createElement('a');
|
||||
a.href = url;
|
||||
a.download = entry.name;
|
||||
document.body.appendChild(a);
|
||||
a.click();
|
||||
document.body.removeChild(a);
|
||||
URL.revokeObjectURL(url);
|
||||
} catch (e: any) {
|
||||
message.error(e.message || t('Download failed'));
|
||||
}
|
||||
|
||||
@@ -521,7 +521,7 @@ const PluginsPage = memo(function PluginsPage() {
|
||||
};
|
||||
|
||||
return (
|
||||
<div style={{ height: 'calc(100vh - 88px)', display: 'flex', flexDirection: 'column', minHeight: 0, overflow: 'hidden' }}>
|
||||
<div style={{ flex: 1, display: 'flex', flexDirection: 'column', minHeight: 0, overflow: 'hidden' }}>
|
||||
<div style={{ display: 'flex', alignItems: 'center', gap: 8, marginBottom: 12, flexWrap: 'wrap' }}>
|
||||
<Upload
|
||||
accept=".foxpkg"
|
||||
|
||||
@@ -554,9 +554,9 @@ const ProcessorsPage = memo(function ProcessorsPage() {
|
||||
return (
|
||||
<>
|
||||
{contextHolder}
|
||||
<Flex gap={16} style={{ height: 'calc(100vh - 88px)' }}>
|
||||
<Flex gap={16} style={{ flex: 1, minHeight: 0, overflow: 'hidden' }}>
|
||||
<Card
|
||||
style={{ flex: '0 0 320px', minWidth: 280, display: 'flex', flexDirection: 'column' }}
|
||||
style={{ flex: '0 0 320px', minWidth: 280, minHeight: 0, display: 'flex', flexDirection: 'column' }}
|
||||
title={t('Processor List')}
|
||||
extra={
|
||||
<Space size={8}>
|
||||
@@ -564,13 +564,13 @@ const ProcessorsPage = memo(function ProcessorsPage() {
|
||||
<Button size="small" onClick={handleReloadProcessors} loading={reloading}>{t('Reload')}</Button>
|
||||
</Space>
|
||||
}
|
||||
styles={{ body: { padding: 0, flex: 1, display: 'flex' } }}
|
||||
styles={{ body: { padding: 0, flex: 1, minHeight: 0, display: 'flex' } }}
|
||||
>
|
||||
{renderProcessorList()}
|
||||
</Card>
|
||||
|
||||
<Card
|
||||
style={{ flex: 1, minWidth: 0, display: 'flex', flexDirection: 'column' }}
|
||||
style={{ flex: 1, minWidth: 0, minHeight: 0, display: 'flex', flexDirection: 'column' }}
|
||||
title={selectedProcessorMeta ? `${selectedProcessorMeta.name} (${selectedProcessorMeta.type})` : t('Select a processor')}
|
||||
extra={
|
||||
<Space size={8}>
|
||||
@@ -582,7 +582,7 @@ const ProcessorsPage = memo(function ProcessorsPage() {
|
||||
</Button>
|
||||
</Space>
|
||||
}
|
||||
styles={{ body: { padding: 0, flex: 1, display: 'flex', flexDirection: 'column' } }}
|
||||
styles={{ body: { padding: 0, flex: 1, minHeight: 0, display: 'flex', flexDirection: 'column' } }}
|
||||
>
|
||||
<Tabs
|
||||
activeKey={activeTab}
|
||||
|
||||
@@ -66,7 +66,7 @@ export default function SystemSettingsPage({ tabKey, onTabNavigate }: SystemSett
|
||||
});
|
||||
}, [t]);
|
||||
|
||||
const handleSave = async (values: Record<string, unknown>) => {
|
||||
const handleSave = async (values: Record<string, unknown>): Promise<boolean> => {
|
||||
setLoading(true);
|
||||
try {
|
||||
for (const [key, value] of Object.entries(values)) {
|
||||
@@ -81,10 +81,13 @@ export default function SystemSettingsPage({ tabKey, onTabNavigate }: SystemSett
|
||||
if (Object.keys(values).some(k => Object.values(THEME_KEYS).includes(k))) {
|
||||
await refreshTheme();
|
||||
}
|
||||
return true;
|
||||
} catch (e: any) {
|
||||
message.error(e.message || t('Save failed'));
|
||||
return false;
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
setLoading(false);
|
||||
};
|
||||
|
||||
// 离开“外观设置”时,恢复后端持久化配置(取消未保存的预览)
|
||||
|
||||
@@ -2,6 +2,7 @@ import { Alert, Button, Divider, Form, Input, Select, Switch, message } from 'an
|
||||
import { useEffect, useMemo, useState } from 'react';
|
||||
import { rolesApi, type RoleInfo } from '../../../api/roles';
|
||||
import { useI18n } from '../../../i18n';
|
||||
import { normalizeLang, readStoredLang } from '../../../i18n/lang';
|
||||
|
||||
interface AppConfigKey {
|
||||
key: string;
|
||||
@@ -12,7 +13,7 @@ interface AppConfigKey {
|
||||
interface AppSettingsTabProps {
|
||||
config: Record<string, string>;
|
||||
loading: boolean;
|
||||
onSave: (values: Record<string, unknown>) => Promise<void>;
|
||||
onSave: (values: Record<string, unknown>) => Promise<boolean>;
|
||||
configKeys: AppConfigKey[];
|
||||
}
|
||||
|
||||
@@ -22,7 +23,7 @@ export default function AppSettingsTab({
|
||||
onSave,
|
||||
configKeys,
|
||||
}: AppSettingsTabProps) {
|
||||
const { t } = useI18n();
|
||||
const { t, setLang } = useI18n();
|
||||
const [rolesLoading, setRolesLoading] = useState(false);
|
||||
const [roles, setRoles] = useState<RoleInfo[]>([]);
|
||||
|
||||
@@ -52,6 +53,7 @@ export default function AppSettingsTab({
|
||||
const roleId = roleIdRaw ? Number(roleIdRaw) : undefined;
|
||||
return {
|
||||
...Object.fromEntries(configKeys.map(({ key, default: def }) => [key, config[key] ?? def ?? ''])),
|
||||
APP_DEFAULT_LANGUAGE: normalizeLang(config.APP_DEFAULT_LANGUAGE, 'zh'),
|
||||
AUTH_ALLOW_REGISTER: allowRegister,
|
||||
AUTH_DEFAULT_REGISTER_ROLE_ID: Number.isFinite(roleId) ? roleId : undefined,
|
||||
};
|
||||
@@ -66,12 +68,17 @@ export default function AppSettingsTab({
|
||||
for (const { key } of configKeys) {
|
||||
payload[key] = vals[key];
|
||||
}
|
||||
const defaultLanguage = normalizeLang(vals.APP_DEFAULT_LANGUAGE, 'zh');
|
||||
payload.APP_DEFAULT_LANGUAGE = defaultLanguage;
|
||||
const allow = !!vals.AUTH_ALLOW_REGISTER;
|
||||
payload.AUTH_ALLOW_REGISTER = allow ? 'true' : 'false';
|
||||
if (allow) {
|
||||
payload.AUTH_DEFAULT_REGISTER_ROLE_ID = String(vals.AUTH_DEFAULT_REGISTER_ROLE_ID);
|
||||
}
|
||||
await onSave(payload);
|
||||
const saved = await onSave(payload);
|
||||
if (saved && !readStoredLang()) {
|
||||
setLang(defaultLanguage, { persist: false });
|
||||
}
|
||||
}}
|
||||
style={{ marginTop: 24 }}
|
||||
key={JSON.stringify(config)}
|
||||
@@ -82,6 +89,20 @@ export default function AppSettingsTab({
|
||||
</Form.Item>
|
||||
))}
|
||||
|
||||
<Form.Item
|
||||
name="APP_DEFAULT_LANGUAGE"
|
||||
label={t('Default Language')}
|
||||
extra={t('Used when the user has not selected a language')}
|
||||
>
|
||||
<Select
|
||||
size="large"
|
||||
options={[
|
||||
{ value: 'zh', label: t('Chinese') },
|
||||
{ value: 'en', label: t('English') },
|
||||
]}
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
<Divider titlePlacement="left">{t('Registration Settings')}</Divider>
|
||||
|
||||
<Alert
|
||||
|
||||
@@ -13,7 +13,7 @@ interface ThemeKeyMap {
|
||||
interface AppearanceSettingsTabProps {
|
||||
config: Record<string, string>;
|
||||
loading: boolean;
|
||||
onSave: (values: Record<string, unknown>) => Promise<void>;
|
||||
onSave: (values: Record<string, unknown>) => Promise<boolean>;
|
||||
themeKeys: ThemeKeyMap;
|
||||
}
|
||||
|
||||
|
||||
@@ -28,7 +28,7 @@ import {
|
||||
interface EmailSettingsTabProps {
|
||||
config: Record<string, string>;
|
||||
loading: boolean;
|
||||
onSave: (values: Record<string, unknown>) => Promise<void>;
|
||||
onSave: (values: Record<string, unknown>) => Promise<boolean>;
|
||||
}
|
||||
|
||||
interface EmailFormValues {
|
||||
|
||||
@@ -5,7 +5,7 @@ import { useI18n } from '../../../i18n';
|
||||
interface ProtocolMappingsTabProps {
|
||||
config: Record<string, string>;
|
||||
loading: boolean;
|
||||
onSave: (values: Record<string, unknown>) => Promise<void>;
|
||||
onSave: (values: Record<string, unknown>) => Promise<boolean>;
|
||||
}
|
||||
|
||||
const WEBDAV_KEY = 'WEBDAV_MAPPING_ENABLED';
|
||||
|
||||
@@ -7,12 +7,14 @@ import type { PluginItem } from './api/plugins';
|
||||
import { pluginsApi } from './api/plugins';
|
||||
import request from './api/client';
|
||||
import { vfsApi, type VfsEntry } from './api/vfs';
|
||||
import { parseLang } from './i18n/lang';
|
||||
|
||||
type FrameMode = 'file' | 'app';
|
||||
|
||||
type FrameQuery = {
|
||||
pluginKey: string;
|
||||
mode: FrameMode;
|
||||
lang: string;
|
||||
filePath: string;
|
||||
pluginVersion: string;
|
||||
pluginStyles: string[] | null;
|
||||
@@ -65,6 +67,7 @@ function getQuery(): FrameQuery {
|
||||
const params = new URLSearchParams(window.location.search);
|
||||
const pluginKey = (params.get('pluginKey') || '').trim();
|
||||
const mode = (params.get('mode') || 'file') as FrameMode;
|
||||
const lang = (params.get('lang') || '').trim();
|
||||
const filePath = (params.get('filePath') || '').trim();
|
||||
const pluginVersion = (params.get('pluginVersion') || '').trim();
|
||||
|
||||
@@ -88,7 +91,7 @@ function getQuery(): FrameQuery {
|
||||
}
|
||||
: null;
|
||||
|
||||
return { pluginKey, mode, filePath, pluginVersion, pluginStyles, entry };
|
||||
return { pluginKey, mode, lang, filePath, pluginVersion, pluginStyles, entry };
|
||||
}
|
||||
|
||||
function postToParent(data: any) {
|
||||
@@ -279,9 +282,14 @@ async function buildFileContext(filePath: string, entryOverride: VfsEntry | null
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const query = getQuery();
|
||||
const frameLang = parseLang(query.lang);
|
||||
if (frameLang) {
|
||||
document.documentElement.lang = frameLang;
|
||||
}
|
||||
initExternals();
|
||||
|
||||
const { pluginKey, mode, filePath, pluginVersion, pluginStyles, entry } = getQuery();
|
||||
const { pluginKey, mode, filePath, pluginVersion, pluginStyles, entry } = query;
|
||||
if (!pluginKey) {
|
||||
renderStatus('Missing pluginKey in query string', true);
|
||||
return;
|
||||
|
||||
@@ -21,8 +21,7 @@ import { pluginsApi } from '../api/plugins';
|
||||
// 类型定义
|
||||
import type { VfsEntry, DirListing } from '../api/client';
|
||||
import type { PluginItem } from '../api/plugins';
|
||||
|
||||
type Lang = 'zh' | 'en';
|
||||
import { getActiveLang, normalizeLang, type Lang } from '../i18n/lang';
|
||||
type Dict = Record<string, string>;
|
||||
type Dicts = Partial<Record<Lang, Dict>>;
|
||||
|
||||
@@ -197,10 +196,8 @@ declare global {
|
||||
* 初始化并暴露外部依赖
|
||||
*/
|
||||
export function initExternals(): void {
|
||||
const normalizeLang = (raw: unknown): Lang => (raw === 'en' ? 'en' : 'zh');
|
||||
|
||||
const i18nApi = {
|
||||
getLang: () => normalizeLang(localStorage.getItem('lang')),
|
||||
getLang: () => getActiveLang(),
|
||||
subscribe: (cb: (lang: Lang) => void) => {
|
||||
const handler = (e: Event) => {
|
||||
const lang = (e as CustomEvent)?.detail?.lang as Lang;
|
||||
|
||||
@@ -51,7 +51,7 @@ const ShellBody = memo(function ShellBody() {
|
||||
};
|
||||
|
||||
return (
|
||||
<Layout style={{ minHeight: '100dvh', background: 'var(--ant-color-bg-layout)' }}>
|
||||
<Layout style={{ height: '100dvh', overflow: 'hidden', background: 'var(--ant-color-bg-layout)' }}>
|
||||
{!isMobile && (
|
||||
<SideNav
|
||||
collapsed={collapsed}
|
||||
@@ -85,7 +85,7 @@ const ShellBody = memo(function ShellBody() {
|
||||
/>
|
||||
)}
|
||||
|
||||
<Layout style={{ background: 'var(--ant-color-bg-layout)', minWidth: 0 }}>
|
||||
<Layout style={{ background: 'var(--ant-color-bg-layout)', minWidth: 0, minHeight: 0, overflow: 'hidden' }}>
|
||||
<TopHeader
|
||||
collapsed={collapsed}
|
||||
onToggle={handleToggleNav}
|
||||
@@ -94,15 +94,17 @@ const ShellBody = memo(function ShellBody() {
|
||||
/>
|
||||
<Layout.Content
|
||||
style={{
|
||||
flex: 1,
|
||||
padding: isMobile ? 12 : 16,
|
||||
background: 'var(--ant-color-bg-layout)',
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
minHeight: 0,
|
||||
overflow: 'hidden',
|
||||
}}
|
||||
>
|
||||
<div style={{ flex: 1, minHeight: 0, background: 'var(--ant-color-bg-layout)' }}>
|
||||
<Flex vertical gap={16} style={{ minHeight: '100%', height: '100%' }}>
|
||||
<div style={{ flex: 1, minHeight: 0, background: 'var(--ant-color-bg-layout)', overflow: 'hidden' }}>
|
||||
<Flex vertical style={{ minHeight: 0, height: '100%' }}>
|
||||
{navKey === 'adapters' && <AdaptersPage />}
|
||||
{navKey === 'files' && <FileExplorerPage />}
|
||||
{navKey === 'share' && <SharePage />}
|
||||
|
||||
Reference in New Issue
Block a user