Compare commits

...

92 Commits

Author SHA1 Message Date
shiyu
f0892ebcd6 chore: update version to v1.2.10 2025-09-23 18:09:57 +08:00
shiyu
cf5f19043b feat: Image Viewer enhancements 2025-09-23 14:49:09 +08:00
shiyu
6444ed264c feat(ContextMenu): improve positioning logic to prevent overflow outside viewport 2025-09-23 12:29:55 +08:00
shiyu
bed8c8b19c docs(README): add UI screenshot to documentation 2025-09-22 20:53:30 +08:00
shiyu
37e13dabe0 chore: update version to v1.2.9 2025-09-22 19:43:50 +08:00
shiyu
9d6c63aff4 feat(FileExplorer): support moving and copying multiple entries in context menu and modals 2025-09-22 19:32:45 +08:00
shiyu
81095f11df feat(TextEditor): lazy load Monaco and Markdown editors with suspense fallback 2025-09-22 19:11:45 +08:00
shiyu
7d35c10d71 feat(task-queue): implement task queue management with settings and UI integration 2025-09-22 19:08:14 +08:00
shiyu
17ebb8d4f4 feat(FileExplorer): add move and copy functionality with task queuing 2025-09-22 18:15:05 +08:00
shiyu
330e8fd72b feat(offline-downloads): implement offline download 2025-09-22 12:03:39 +08:00
shiyu
11c717e61d chore: update version to v1.2.8 2025-09-20 21:02:06 +08:00
shiyu
45d63febb9 fix(ui): fix bug on processor page 2025-09-20 14:16:06 +08:00
shiyu
5a29c579dc chore: update version to v1.2.7 2025-09-19 20:20:20 +08:00
shiyu
b530b16c53 feat(GridView): add RGBA color conversion function and update background style 2025-09-19 20:02:54 +08:00
shiyu
7da49191aa feat(processors): add processor management 2025-09-19 18:58:54 +08:00
shiyu
fbeb673126 feat(vector_db): Implement Vector Database Service with multiple providers 2025-09-19 13:45:48 +08:00
shiyu
0a06f4d02c feat: add webdav support to nginx configuration 2025-09-18 11:26:05 +08:00
shiyu
f02c29492b chore: update version to v1.2.6 2025-09-17 14:27:15 +08:00
shiyu
1a79e87887 feat: add AI embedding dimension configuration 2025-09-17 14:26:53 +08:00
shiyu
626ff727b3 feat: update contributing guidelines and add Chinese translation 2025-09-16 18:43:08 +08:00
shiyu
117a94d793 feat(docker): create data directories with appropriate permissions 2025-09-16 18:32:38 +08:00
shiyu
c39bea67a4 chore: update version to v1.2.5 2025-09-16 11:31:52 +08:00
shiyu
2cbfb29260 feat(i18n): add 'Processor' and 'Share' translations for English and Chinese 2025-09-16 11:31:23 +08:00
shiyu
155f3a144d feat(ui): add path selector modal 2025-09-15 14:14:10 +08:00
shiyu
208a52589f feat: update theme context to support dynamic locale switching 2025-09-14 16:35:12 +08:00
shiyu
0732b611a9 feat: add expired share cleanup functionality 2025-09-14 16:27:46 +08:00
shiyu
7b25e6d3b6 chore: update version to v1.2.4 2025-09-14 13:40:10 +08:00
shiyu
04441d0bc4 feat: add username field to profile modal 2025-09-14 13:20:45 +08:00
shiyu
917b542dab feat: add user profile management 2025-09-14 12:54:49 +08:00
shiyu
e43b68beda feat: ensure data/db directory exists during app startup 2025-09-13 16:35:54 +08:00
shiyu
801ff26cc7 chore: update version to v1.2.3 2025-09-12 20:02:21 +08:00
shiyu
284c2d24a2 feat: add basic WebDAV support 2025-09-12 20:00:43 +08:00
shiyu
a34be25ec0 feat(window): Add app window management with minimize, restore, and icon support 2025-09-12 19:16:02 +08:00
shiyu
db2e02dd32 chore: Reduce gunicorn worker count 2025-09-12 12:06:51 +08:00
shiyu
9bb5310df0 chore: Update version to v1.2.2 2025-09-11 21:15:48 +08:00
shiyu
427a4f023f feat: Add plugin center functionality 2025-09-11 21:11:17 +08:00
shiyu
71a2a88c8e feat: Add PDF viewer 2025-09-10 12:21:13 +08:00
shiyu
fb0b7b13d1 feat: Add Monaco editor support 2025-09-10 11:40:24 +08:00
shiyu
f484557874 refactor: clean up whitespace and improve readability in logging middleware 2025-09-10 10:58:42 +08:00
shiyu
2b8cfce8f2 chore: update version to v1.2.1 2025-09-09 16:56:26 +08:00
shiyu
db453ef09b feat: add i18n with language switcher and English/Chinese translations 2025-09-09 16:50:43 +08:00
shiyu
59c017a05b fix: URL format when generating links 2025-09-09 11:59:01 +08:00
shiyu
d42c6b5cee feat: Support more video formats 2025-09-08 19:15:09 +08:00
shiyu
9e69eb3e20 chore: update version to v1.2.0 2025-09-08 16:53:56 +08:00
shiyu
6e7225ac40 feat: implement Quark adapter 2025-09-08 16:51:09 +08:00
shiyu
d41b72d0ce feat: Add theme and dark mode 2025-09-08 15:20:49 +08:00
shiyu
f40ff4d751 feat: Add App Center plugin functionality 2025-09-08 12:28:37 +08:00
shiyu
280bedcf1a chore: Update version to v1.1.6 2025-09-07 17:00:25 +08:00
shiyu
b03f2619ca feat: Add vector database clearing 2025-09-07 16:48:14 +08:00
Kuenpan Foo
72403d5861 feat: Support Docker for ARM architecture(#35) 2025-09-07 16:46:18 +08:00
ShiYu
dffcdb7a8b feat: Add video playback and image preview support to share page 2025-09-07 11:05:10 +08:00
shiyu
19c4394f3d feat: Add queue management functionality to TasksPage 2025-09-06 19:44:00 +08:00
时雨
3fd48da2b4 fix: Remove uv sync command from Dockerfile to streamline installation (#33) 2025-09-06 16:55:28 +08:00
shiyu
c759b36aba fix: Remove --system flag from uv sync command in Dockerfile 2025-09-06 16:29:27 +08:00
shiyu
99a6acd54a feat: Update Dockerfile to use uv for package management 2025-09-06 16:27:30 +08:00
shiyu
20f6b5c210 chore: Update version to v1.1.5 2025-09-06 16:17:12 +08:00
shiyu
74ffc0bb30 feat: Add sorting functionality to the virtual file system and adapter list methods 2025-09-06 16:15:24 +08:00
shiyu
57919aa7ae feat: Add httpx.AsyncClient timeout settings 2025-09-06 15:27:25 +08:00
shiyu
5126dae411 feat: Migrate to uv for environment management 2025-09-06 14:11:15 +08:00
shiyu
2a78d809af feat: Implement upsert and remove methods in RuntimeRegistry for adapter management 2025-09-05 13:36:12 +08:00
shiyu
ce74c2712b chore: Update version to v1.1.4 2025-09-04 11:00:39 +08:00
Copilot
59d6c94a57 feat: Add Markdown direct link feature to DirectLinkModal (#28)
Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com>
Co-authored-by: DrizzleTime <169802108+DrizzleTime@users.noreply.github.com>
Co-authored-by: 时雨 <im@shiyu.dev>
2025-09-04 10:38:37 +08:00
Zhang Jian
fd87dc3ce2 feat: Add auto setup Foxel sh (#26) 2025-09-03 09:56:14 +08:00
shiyu
620ae17732 feat: Update Nginx configuration to include openapi.json in location block 2025-09-02 14:24:47 +08:00
shiyu
9b0dd13816 feat: Add file drag-and-drop functionality #25 2025-09-01 13:38:51 +08:00
shiyu
6a52fa3fd5 chore: Update version to v1.1.3 2025-08-31 19:52:30 +08:00
shiyu
219999914c docs: Update development environment initialization steps 2025-08-31 19:27:39 +08:00
shiyu
1a3d9d41ec feat: Update Telegram adapter to support uploads 2025-08-31 18:22:46 +08:00
shiyu
27ad49d8ed docs: Format badge display in README file 2025-08-31 12:56:15 +08:00
shiyu
e230bf6661 docs: Add English README file 2025-08-31 12:53:02 +08:00
shiyu
50fb0b4977 feat: Implement task queue service 2025-08-31 12:48:20 +08:00
shiyu
b50f19bcb4 feat: Add application domain and file domain configuration 2025-08-31 12:38:21 +08:00
shiyu
3f3f192d53 feat: Update version to v1.1.2 2025-08-30 15:39:05 +08:00
shiyu
83aaa7a052 feat: Add Artplayer as video player 2025-08-30 11:34:36 +08:00
shiyu
a2638f077c feat: Add Telegram storage adapter implementation 2025-08-30 11:16:35 +08:00
shiyu
81eed370a6 feat: Update AI configuration items 2025-08-29 18:41:57 +08:00
shiyu
cce39f7b1c feat: Add link button to access documentation page 2025-08-29 15:59:14 +08:00
shiyu
61c2897857 feat: Update requirements.txt 2025-08-29 13:35:44 +08:00
shiyu
b15a9b68e1 feat: Update permissions for release drafter workflow 2025-08-29 13:23:10 +08:00
shiyu
1f762a9822 feat: Add release drafter configuration for automated release notes 2025-08-29 13:19:33 +08:00
shiyu
2974425bef feat: Update version to v1.1.1 2025-08-29 13:14:25 +08:00
shiyu
9431d0459f refactor: Remove unused props from GridView component and clean up related code 2025-08-29 13:00:24 +08:00
shiyu
24ce681c28 refactor: Simplify EmptyState component 2025-08-29 12:55:53 +08:00
shiyu
20bc1cfbb7 feat: Implement S3Adapter for S3 compatible object storage with file operations 2025-08-29 12:50:51 +08:00
shiyu
9a7a7a8b81 fix: Improve adapter instance retrieval with refresh logic in resolve_adapter_and_rel and list_virtual_dir 2025-08-29 12:38:11 +08:00
shiyu
2f92fa353c feat: Add OneDrive storage adapter with support for file operations and thumbnail retrieval 2025-08-29 12:08:05 +08:00
shiyu
86e81bf40c refactor: Rename 'mount_path' to 'path' in adapter schemas and related components 2025-08-28 17:00:12 +08:00
shiyu
b3b5ae2eac fix: Correct Docker tag assignment for non-tagged pushes 2025-08-28 13:33:07 +08:00
shiyu
cfcb28d0ac feat: Update application version to v1.1.0 2025-08-28 13:25:43 +08:00
shiyu
150f6a77fb refactor: Refactor public sharing page 2025-08-28 13:22:46 +08:00
shiyu
62a1c5810d feat: Refactor storage adapter and mount handling; migrate mounts to storage adapters; enhance SideNav; implement database migration scripts 2025-08-28 12:59:24 +08:00
shiyu
bfa8898931 docs: Add online experience section 2025-08-27 20:37:10 +08:00
166 changed files with 15205 additions and 2239 deletions

22
.github/release-drafter.yml vendored Normal file
View File

@@ -0,0 +1,22 @@
name-template: 'v$RESOLVED_VERSION'
tag-template: 'v$RESOLVED_VERSION'
categories:
- title: '🚀 Features'
labels:
- 'feat'
- title: '🐛 Bug Fixes'
labels:
- 'fix'
- title: '📦 Code Refactoring'
labels:
- 'refactor'
- title: '📄 Documentation'
labels:
- 'docs'
- title: '🧰 Maintenance'
label: 'chore'
change-template: '- $TITLE @$AUTHOR (#$NUMBER)'
template: |
## Changes
$CHANGES

View File

@@ -32,7 +32,7 @@ jobs:
VERSION=${GITHUB_REF#refs/tags/}
echo "DOCKER_TAGS=ghcr.io/${REPO_LC}:${VERSION},ghcr.io/${REPO_LC}:latest" >> $GITHUB_ENV
else
echo "DOCKER_TAGS=ghcr.io/${REPO_LC}:latest" >> $GITHUB_ENV
echo "DOCKER_TAGS=ghcr.io/${REPO_LC}:dev" >> $GITHUB_ENV
fi
- name: Log in to GitHub Container Registry
@@ -42,10 +42,10 @@ jobs:
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push Docker image
- name: Build and push Docker image (multi arch)
uses: docker/build-push-action@v5
with:
context: .
platforms: linux/amd64
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ env.DOCKER_TAGS }}

17
.github/workflows/release-drafter.yml vendored Normal file
View File

@@ -0,0 +1,17 @@
name: Release Drafter
on:
workflow_dispatch:
jobs:
update_release_draft:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: release-drafter/release-drafter@v5
with:
config-name: release-drafter.yml
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

5
.gitignore vendored
View File

@@ -5,5 +5,6 @@ __pycache__/
.venv/
.vscode/
data/
.env
migrate/
.env
AGENTS.md

1
.python-version Normal file
View File

@@ -0,0 +1 @@
3.13

View File

@@ -1,149 +1,162 @@
<div align="right">
<b>English</b> | <a href="./CONTRIBUTING_zh.md">简体中文</a>
</div>
# Contributing to Foxel
🎉 首先,非常感谢您愿意花时间为 Foxel 做出贡献!
We appreciate every minute you spend helping Foxel improve. This guide explains the contribution workflow so you can get started quickly.
我们热烈欢迎各种形式的贡献。无论是报告 Bug、提出新功能建议、完善文档还是直接提交代码都将对项目产生积极的影响。
## Table of Contents
本指南将帮助您顺利地参与到项目中来。
## 目录
- [如何贡献](#如何贡献)
- [🐛 报告 Bug](#-报告-bug)
- [✨ 提交功能建议](#-提交功能建议)
- [🛠️ 贡献代码](#-贡献代码)
- [开发环境搭建](#开发环境搭建)
- [依赖准备](#依赖准备)
- [后端 (FastAPI)](#后端-fastapi)
- [前端 (React + Vite)](#前端-react--vite)
- [代码贡献指南](#代码贡献指南)
- [贡献存储适配器 (Adapter)](#贡献存储适配器-adapter)
- [贡献前端应用 (App)](#贡献前端应用-app)
- [提交规范](#提交规范)
- [Git 分支管理](#git-分支管理)
- [Commit Message 格式](#commit-message-格式)
- [Pull Request 流程](#pull-request-流程)
- [How to Contribute](#how-to-contribute)
- [🐛 Report Bugs](#-report-bugs)
- [✨ Suggest Features](#-suggest-features)
- [🛠️ Contribute Code](#-contribute-code)
- [Development Environment](#development-environment)
- [Prerequisites](#prerequisites)
- [Backend (FastAPI)](#backend-fastapi)
- [Frontend (React + Vite)](#frontend-react--vite)
- [Contribution Guidelines](#contribution-guidelines)
- [Storage Adapters](#storage-adapters)
- [Frontend Apps](#frontend-apps)
- [Submission Rules](#submission-rules)
- [Git Branching](#git-branching)
- [Commit Message Format](#commit-message-format)
- [Pull Request Flow](#pull-request-flow)
---
## 如何贡献
## How to Contribute
### 🐛 报告 Bug
### 🐛 Report Bugs
如果您在使用的过程中发现了 Bug请通过 [GitHub Issues](https://github.com/DrizzleTime/Foxel/issues) 来报告。请在报告中提供以下信息:
If you discover a bug, open a ticket via [GitHub Issues](https://github.com/DrizzleTime/Foxel/issues) and include:
- **清晰的标题**:简明扼要地描述问题。
- **复现步骤**:详细说明如何一步步重现该 Bug
- **期望行为** vs **实际行为**:描述您预期的结果和实际发生的情况。
- **环境信息**例如操作系统、浏览器版本、Foxel 版本等。
- **A clear title** that summarises the problem.
- **Reproduction steps** with enough detail to trigger the bug.
- **Expected vs actual behaviour** to highlight the gap.
- **Environment details** such as operating system, browser version, and the Foxel build you used.
### ✨ 提交功能建议
### ✨ Suggest Features
我们欢迎任何关于新功能或改进的建议。请通过 [GitHub Issues](https://github.com/DrizzleTime/Foxel/issues) 创建一个 "Feature Request",并详细阐述您的想法:
To propose a new capability or an improvement, create an Issue and choose the "Feature Request" template. Document:
- **问题描述**:说明该功能要解决什么问题。
- **方案设想**:描述您希望该功能如何工作。
- **相关信息**:提供任何有助于理解您想法的截图、链接或参考。
- **Problem statement** what pain point will the feature solve?
- **Proposed solution** how you expect it to work.
- **Supporting material** screenshots, references, or related links if helpful.
### 🛠️ 贡献代码
### 🛠️ Contribute Code
如果您希望直接贡献代码,请参考下面的开发和提交流程。
Follow the development setup below before opening a pull request. Keep changes focused and small so they are easier to review.
## 开发环境搭建
## Development Environment
### 依赖准备
### Prerequisites
- **Git**: 用于版本控制。
- **Python**: >= 3.13
- **Bun**: 用于前端包管理和脚本运行。
Install the following tooling first:
### 后端 (FastAPI)
- **Git** for version control
- **Python** 3.13 or newer
- **Bun** for frontend package management and scripts
后端 API 服务基于 Python 和 FastAPI 构建。
### Backend (FastAPI)
1. **克隆仓库**
1. **Clone the repository**
```bash
git clone https://github.com/DrizzleTime/foxel.git
cd Foxel
```
2. **创建并激活 Python 虚拟环境**
2. **Create and activate a virtual environment**
`uv` is recommended for performance and reproducibility:
```bash
python3 -m venv .venv
uv venv
source .venv/bin/activate
# On Windows: .venv\Scripts\activate
```
3. **安装依赖**
3. **Install dependencies**
```bash
pip install -r requirements.txt
uv sync
```
4. **启动开发服务器**
4. **Prepare local resources**
- Create the data directory:
```bash
mkdir -p data/db
```
Ensure the application user can read and write to `data/db`.
- Create an `.env` file in the project root and provide the required secrets. Replace the sample values with your own random strings:
```dotenv
SECRET_KEY=EnsRhL9NFPxgFVc+7t96/y70DIOR+9SpntcIqQa90TU=
TEMP_LINK_SECRET_KEY=EnsRhL9NFPxgFVc+7t96/y70DIOR+9SpntcIqQa90TU=
```
5. **Start the development server**
```bash
uvicorn main:app --reload --host 0.0.0.0 --port 8000
```
API 服务将在 `http://localhost:8000` 上运行,您可以通过 `http://localhost:8000/docs` 访问自动生成的 API 文档。
The API is available at `http://localhost:8000`, and the interactive docs live at `http://localhost:8000/docs`.
### 前端 (React + Vite)
### Frontend (React + Vite)
前端应用使用 React, Vite, 和 TypeScript 构建。
1. **进入前端目录**
1. **Enter the frontend directory**
```bash
cd web
```
2. **安装依赖**
2. **Install dependencies**
```bash
bun install
```
3. **启动开发服务器**
3. **Run the dev server**
```bash
bun run dev
```
前端开发服务器将在 `http://localhost:5173` 运行。它已经配置了代理,会自动将 `/api` 请求转发到后端服务。
The Vite dev server runs at `http://localhost:5173` and proxies `/api` requests to the backend.
## 代码贡献指南
## Contribution Guidelines
### 贡献存储适配器 (Adapter)
### Storage Adapters
存储适配器是 Foxel 的核心扩展点,用于接入不同的存储后端 (如 S3, FTP, Alist 等)。
Storage adapters integrate new storage providers (for example S3, FTP, or Alist).
1. **创建适配器文件**: 在 [`services/adapters/`](services/adapters/) 目录下,创建一个新文件,例如 `my_new_adapter.py`
2. **实现适配器类**:
- 创建一个类,继承自 [`services.adapters.base.BaseAdapter`](services/adapters/base.py)。
- 实现 `BaseAdapter` 中定义的所有抽象方法,如 `list_dir`, `get_meta`, `upload`, `download` 等。请仔细阅读基类中的文档注释以理解每个方法的作用和参数。
1. Create a new module under [`services/adapters/`](services/adapters/) (for example `my_new_adapter.py`).
2. Implement a class that inherits from [`services.adapters.base.BaseAdapter`](services/adapters/base.py) and provide concrete implementations for the abstract methods such as `list_dir`, `get_meta`, `upload`, and `download`.
### 贡献前端应用 (App)
### Frontend Apps
前端应用允许用户在浏览器中直接预览或编辑特定类型的文件。
Frontend apps enable in-browser previews or editors for specific file types.
1. **创建应用组件**: 在 [`web/src/apps/`](web/src/apps/) 目录下,为您的应用创建一个新的文件夹,并在其中创建 React 组件。
2. **定义应用类型**: 您的应用需要实现 [`web/src/apps/types.ts`](web/src/apps/types.ts) 中定义的 `FoxelApp` 接口。
3. **注册应用**: 在 [`web/src/apps/registry.ts`](web/src/apps/registry.ts) 中,导入您的应用组件,并将其添加到 `APP_REGISTRY`。在注册时,您需要指定该应用可以处理的文件类型(通过 MIME Type 或文件扩展名)。
1. Add a new folder in [`web/src/apps/`](web/src/apps/) for your app and expose a React component.
2. Implement the `FoxelApp` interface defined in [`web/src/apps/types.ts`](web/src/apps/types.ts).
3. Register the app in [`web/src/apps/registry.ts`](web/src/apps/registry.ts) and declare the MIME types or extensions it supports.
## 提交规范
## Submission Rules
### Git 分支管理
### Git Branching
- 从最新的 `main` 分支创建您的特性分支。
Start your work from the latest `main` branch and push feature changes on a dedicated branch.
### Commit Message 格式
### Commit Message Format
我们遵循 [Conventional Commits](https://www.conventionalcommits.org/) 规范。这有助于自动化生成更新日志和版本管理。
Commit Message 格式如下:
We follow the [Conventional Commits](https://www.conventionalcommits.org/) specification to drive release tooling.
```
<type>(<scope>): <subject>
@@ -153,27 +166,27 @@ Commit Message 格式如下:
<footer>
```
- **type**: `feat`, `fix`, `docs`, `style`, `refactor`, `test`, `chore` 等。
- **scope**: (可选) 本次提交影响的范围,例如 `adapter`, `ui`, `api`
- **subject**: 简明扼要的描述。
- **type**: e.g. `feat`, `fix`, `docs`, `style`, `refactor`, `test`, `chore`.
- **scope** (optional): the area impacted by the change, such as `adapter`, `ui`, or `api`.
- **subject**: a concise summary written in the imperative mood.
**示例:**
**Examples:**
```
feat(adapter): Add support for Alist storage
feat(adapter): add support for Alist storage
```
```
fix(ui): Correct display issue in file list view
fix(ui): correct display issue in file list view
```
### Pull Request 流程
### Pull Request Flow
1. Fork 仓库并克隆到本地。
2. 创建并切换到您的特性分支。
3. 完成代码编写和测试。
4. 将您的分支推送到您的 Fork 仓库。
5. 在 Foxel 主仓库创建一个 Pull Request,目标分支为 `main`。
6. 在 PR 描述中清晰地说明您的更改内容、目的和任何相关的 Issue 编号。
1. Fork the repository and clone it locally.
2. Create and switch to your feature branch.
3. Implement the change and run relevant checks.
4. Push the branch to your fork.
5. Open a pull request against `main` in the Foxel repository.
6. Explain the change set, its motivation, and reference related Issues in the PR description.
项目维护者会尽快审查您的 PR。感谢您的耐心和贡献
Maintainers will review your pull request as soon as possible.

202
CONTRIBUTING_zh.md Normal file
View File

@@ -0,0 +1,202 @@
<div align="right">
<a href="./CONTRIBUTING.md">English</a> | <b>简体中文</b>
</div>
# Contributing to Foxel
🎉 首先,非常感谢您愿意花时间为 Foxel 做出贡献!
我们热烈欢迎各种形式的贡献。无论是报告 Bug、提出新功能建议、完善文档还是直接提交代码都将对项目产生积极的影响。
本指南将帮助您顺利地参与到项目中来。
## 目录
- [如何贡献](#如何贡献)
- [🐛 报告 Bug](#-报告-bug)
- [✨ 提交功能建议](#-提交功能建议)
- [🛠️ 贡献代码](#-贡献代码)
- [开发环境搭建](#开发环境搭建)
- [依赖准备](#依赖准备)
- [后端 (FastAPI)](#后端-fastapi)
- [前端 (React + Vite)](#前端-react--vite)
- [代码贡献指南](#代码贡献指南)
- [贡献存储适配器 (Adapter)](#贡献存储适配器-adapter)
- [贡献前端应用 (App)](#贡献前端应用-app)
- [提交规范](#提交规范)
- [Git 分支管理](#git-分支管理)
- [Commit Message 格式](#commit-message-格式)
- [Pull Request 流程](#pull-request-流程)
---
## 如何贡献
### 🐛 报告 Bug
如果您在使用的过程中发现了 Bug请通过 [GitHub Issues](https://github.com/DrizzleTime/Foxel/issues) 来报告。请在报告中提供以下信息:
- **清晰的标题**:简明扼要地描述问题。
- **复现步骤**:详细说明如何一步步重现该 Bug。
- **期望行为** vs **实际行为**:描述您预期的结果和实际发生的情况。
- **环境信息**例如操作系统、浏览器版本、Foxel 版本等。
### ✨ 提交功能建议
我们欢迎任何关于新功能或改进的建议。请通过 [GitHub Issues](https://github.com/DrizzleTime/Foxel/issues) 创建一个 "Feature Request",并详细阐述您的想法:
- **问题描述**:说明该功能要解决什么问题。
- **方案设想**:描述您希望该功能如何工作。
- **相关信息**:提供任何有助于理解您想法的截图、链接或参考。
### 🛠️ 贡献代码
如果您希望直接贡献代码,请参考下面的开发和提交流程。
## 开发环境搭建
### 依赖准备
- **Git**: 用于版本控制。
- **Python**: >= 3.13
- **Bun**: 用于前端包管理和脚本运行。
### 后端 (FastAPI)
后端 API 服务基于 Python 和 FastAPI 构建。
1. **克隆仓库**
```bash
git clone https://github.com/DrizzleTime/foxel.git
cd Foxel
```
2. **创建并激活 Python 虚拟环境**
我们推荐使用 `uv` 来管理虚拟环境,以获得最佳性能。
```bash
uv venv
source .venv/bin/activate
# On Windows: .venv\Scripts\activate
```
3. **安装依赖**
```bash
uv sync
```
4. **初始化环境**
在启动服务前,请进行以下准备:
- **创建数据目录**:
在项目根目录执行 `mkdir -p data/db`。这将创建用于存放数据库等文件的目录。
> [!IMPORTANT]
> 请确保应用拥有对 `data/db` 目录的读写权限。
- **创建 `.env` 配置文件**:
在项目根目录创建名为 `.env` 的文件,并填入以下内容。这些密钥用于保障应用安全,您可以按需修改。
```dotenv
SECRET_KEY=EnsRhL9NFPxgFVc+7t96/y70DIOR+9SpntcIqQa90TU=
TEMP_LINK_SECRET_KEY=EnsRhL9NFPxgFVc+7t96/y70DIOR+9SpntcIqQa90TU=
```
5. **启动开发服务器**
```bash
uvicorn main:app --reload --host 0.0.0.0 --port 8000
```
API 服务将在 `http://localhost:8000` 上运行,您可以通过 `http://localhost:8000/docs` 访问自动生成的 API 文档。
### 前端 (React + Vite)
前端应用使用 React, Vite, 和 TypeScript 构建。
1. **进入前端目录**
```bash
cd web
```
2. **安装依赖**
```bash
bun install
```
3. **启动开发服务器**
```bash
bun run dev
```
前端开发服务器将在 `http://localhost:5173` 运行。它已经配置了代理,会自动将 `/api` 请求转发到后端服务。
## 代码贡献指南
### 贡献存储适配器 (Adapter)
存储适配器是 Foxel 的核心扩展点,用于接入不同的存储后端 (如 S3, FTP, Alist 等)。
1. **创建适配器文件**: 在 [`services/adapters/`](services/adapters/) 目录下,创建一个新文件,例如 `my_new_adapter.py`。
2. **实现适配器类**:
- 创建一个类,继承自 [`services.adapters.base.BaseAdapter`](services/adapters/base.py)。
- 实现 `BaseAdapter` 中定义的所有抽象方法,如 `list_dir`, `get_meta`, `upload`, `download` 等。请仔细阅读基类中的文档注释以理解每个方法的作用和参数。
### 贡献前端应用 (App)
前端应用允许用户在浏览器中直接预览或编辑特定类型的文件。
1. **创建应用组件**: 在 [`web/src/apps/`](web/src/apps/) 目录下,为您的应用创建一个新的文件夹,并在其中创建 React 组件。
2. **定义应用类型**: 您的应用需要实现 [`web/src/apps/types.ts`](web/src/apps/types.ts) 中定义的 `FoxelApp` 接口。
3. **注册应用**: 在 [`web/src/apps/registry.ts`](web/src/apps/registry.ts) 中,导入您的应用组件,并将其添加到 `APP_REGISTRY`。在注册时,您需要指定该应用可以处理的文件类型(通过 MIME Type 或文件扩展名)。
## 提交规范
### Git 分支管理
- 从最新的 `main` 分支创建您的特性分支。
### Commit Message 格式
我们遵循 [Conventional Commits](https://www.conventionalcommits.org/) 规范。这有助于自动化生成更新日志和版本管理。
Commit Message 格式如下:
```
<type>(<scope>): <subject>
<BLANK LINE>
<body>
<BLANK LINE>
<footer>
```
- **type**: `feat`, `fix`, `docs`, `style`, `refactor`, `test`, `chore` 等。
- **scope**: (可选) 本次提交影响的范围,例如 `adapter`, `ui`, `api`。
- **subject**: 简明扼要的描述。
**示例:**
```
feat(adapter): Add support for Alist storage
```
```
fix(ui): Correct display issue in file list view
```
### Pull Request 流程
1. Fork 仓库并克隆到本地。
2. 创建并切换到您的特性分支。
3. 完成代码编写和测试。
4. 将您的分支推送到您的 Fork 仓库。
5. 在 Foxel 主仓库创建一个 Pull Request目标分支为 `main`。
6. 在 PR 描述中清晰地说明您的更改内容、目的和任何相关的 Issue 编号。
项目维护者会尽快审查您的 PR。感谢您的耐心和贡献

View File

@@ -13,10 +13,13 @@ FROM python:3.13-slim
WORKDIR /app
RUN apt-get update && apt-get install -y nginx && rm -rf /var/lib/apt/lists/*
RUN apt-get update && apt-get install -y nginx git && rm -rf /var/lib/apt/lists/*
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt && pip install gunicorn
RUN pip install uv
COPY pyproject.toml uv.lock ./
RUN uv pip install --system . gunicorn
RUN git clone https://github.com/DrizzleTime/FoxelUpgrade /app/migrate
COPY --from=frontend-builder /app/web/dist /app/web/dist
@@ -24,6 +27,9 @@ COPY . .
COPY nginx.conf /etc/nginx/nginx.conf
RUN mkdir -p data/db data/mount && \
chmod 777 data/db data/mount
EXPOSE 80
COPY entrypoint.sh /entrypoint.sh

View File

@@ -1,8 +1,12 @@
<div align="right">
<b>English</b> | <a href="./README_zh.md">简体中文</a>
</div>
<div align="center">
# Foxel
**一个面向个人和团队的、高度可扩展的私有云盘解决方案,支持 AI 语义搜索。**
**A highly extensible private cloud storage solution for individuals and teams, featuring AI-powered semantic search.**
![Python Version](https://img.shields.io/badge/Python-3.13+-blue.svg)
![React](https://img.shields.io/badge/React-19.0-blue.svg)
@@ -11,26 +15,32 @@
---
<blockquote>
<em><strong>数据之洋浩瀚无涯,当以洞察之目引航,然其脉络深隐,非表象所能尽窥。</strong></em><br>
<em><strong>The ocean of data is boundless, let the eye of insight guide the voyage, yet its intricate connections lie deep, not fully discernible from the surface.</strong></em>
</blockquote>
<img src="https://foxel.cc/image/ad-min.png" alt="UI Screenshot">
</div>
## ✨ 核心功能
## 👀 Online Demo
- **统一文件管理**:集中管理分布于不同存储后端的文件。
- **插件化存储后端**:采用可扩展的适配器模式,方便集成多种存储类型。
- **语义搜索**:支持自然语言描述搜索图片、文档等非结构化数据内容。
- **内置文件预览**可直接预览图片、视频、PDF、Office 文档及文本、代码文件,无需下载。
- **权限与分享**:支持公开或私密分享链接,便于文件共享。
- **任务处理中心**:支持异步任务处理,如文件索引和数据备份,不影响主应用运行。
> [https://demo.foxel.cc](https://demo.foxel.cc)
>
> Account/Password: `admin` / `admin`
## 🚀 快速开始
## ✨ Core Features
使用 Docker Compose 是启动 Foxel 最推荐的方式。
- **Unified File Management**: Centralize management of files distributed across different storage backends.
- **Pluggable Storage Backends**: Utilizes an extensible adapter pattern to easily integrate various storage types.
- **Semantic Search**: Supports natural language search for content within unstructured data like images and documents.
- **Built-in File Preview**: Preview images, videos, PDFs, Office documents, text, and code files directly without downloading.
- **Permissions and Sharing**: Supports public or private sharing links for easy file distribution.
- **Task Processing Center**: Supports asynchronous task processing, such as file indexing and data backups, without impacting the main application.
1. **创建数据目录**:
新建 `data` 文件夹用于持久化数据:
## 🚀 Quick Start
Using Docker Compose is the most recommended way to start Foxel.
1. **Create Data Directories**:
Create a `data` folder for persistent data:
```bash
mkdir -p data/db
@@ -38,40 +48,40 @@ mkdir -p data/mount
chmod 777 data/db data/mount
```
2. **下载 Docker Compose 文件**
2. **Download Docker Compose File**:
```bash
curl -L -O https://github.com/DrizzleTime/Foxel/raw/main/compose.yaml
```
下载完成后,**强烈建议**修改 `compose.yaml` 文件中的环境变量以确保安全:
After downloading, it is **strongly recommended** to modify the environment variables in the `compose.yaml` file to ensure security:
- 修改 `SECRET_KEY` `TEMP_LINK_SECRET_KEY`:将默认的密钥替换为随机生成的强密钥
- Modify `SECRET_KEY` and `TEMP_LINK_SECRET_KEY`: Replace the default keys with randomly generated strong keys.
3. **启动服务**:
3. **Start the Services**:
```bash
docker-compose up -d
```
4. **访问应用**:
4. **Access the Application**:
服务启动后,在浏览器中打开页面。
Once the services are running, open the page in your browser.
> 首次启动,请根据引导页面完成管理员账号的初始化设置。
> On the first launch, please follow the setup guide to initialize the administrator account.
## 🤝 如何贡献
## 🤝 How to Contribute
我们非常欢迎来自社区的贡献!无论是提交 Bug、建议新功能还是直接贡献代码。
We welcome contributions from the community! Whether it's submitting bugs, suggesting new features, or contributing code directly.
在开始之前,请先阅读我们的 [`CONTRIBUTING.md`](CONTRIBUTING.md) 文件,它会指导你如何设置开发环境以及提交流程。
Before you start, please read our [`CONTRIBUTING.md`](CONTRIBUTING.md) file, which explains the development environment and submission process. A Simplified Chinese translation is available in [`CONTRIBUTING_zh.md`](CONTRIBUTING_zh.md).
## 🌐 社区
## 🌐 Community
加入我们的交流社区:[Telegram 群组](https://t.me/+thDsBfyqJxZkNTU1),与开发者和用户一起讨论!
Join our community on [Telegram](https://t.me/+thDsBfyqJxZkNTU1) to discuss with developers and other users!
你也可以加入我们的微信群,获取更多实时交流与支持。请扫描下方二维码加入:
You can also join our WeChat group for more real-time communication and support. Please scan the QR code below to join:
<img src="https://foxel.cc/image/wechat.png" alt="微信群二维码" width="180">
<img src="https://foxel.cc/image/wechat.png" alt="WeChat Group QR Code" width="180">
> 如果二维码失效,请添加微信号 **drizzle2001**,我们会邀请你加入群聊。
> If the QR code is invalid, please add WeChat ID **drizzle2001**, and we will invite you to the group.

88
README_zh.md Normal file
View File

@@ -0,0 +1,88 @@
<div align="right">
<a href="./README.md">English</a> | <b>简体中文</b>
</div>
<div align="center">
# Foxel
**一个面向个人和团队的、高度可扩展的私有云盘解决方案,支持 AI 语义搜索。**
![Python Version](https://img.shields.io/badge/Python-3.13+-blue.svg)
![React](https://img.shields.io/badge/React-19.0-blue.svg)
![License](https://img.shields.io/badge/license-MIT-green.svg)
![GitHub stars](https://img.shields.io/github/stars/DrizzleTime/foxel?style=social)
---
<blockquote>
<em><strong>数据之洋浩瀚无涯,当以洞察之目引航,然其脉络深隐,非表象所能尽窥。</strong></em><br>
<em><strong>The ocean of data is boundless, let the eye of insight guide the voyage, yet its intricate connections lie deep, not fully discernible from the surface.</strong></em>
</blockquote>
<img src="https://foxel.cc/image/ad-min.png" alt="UI Screenshot">
</div>
## 👀 在线体验
> [https://demo.foxel.cc](https://demo.foxel.cc)
>
> 账号/密码:`admin` / `admin`
## ✨ 核心功能
- **统一文件管理**:集中管理分布于不同存储后端的文件。
- **插件化存储后端**:采用可扩展的适配器模式,方便集成多种存储类型。
- **语义搜索**:支持自然语言描述搜索图片、文档等非结构化数据内容。
- **内置文件预览**可直接预览图片、视频、PDF、Office 文档及文本、代码文件,无需下载。
- **权限与分享**:支持公开或私密分享链接,便于文件共享。
- **任务处理中心**:支持异步任务处理,如文件索引和数据备份,不影响主应用运行。
## 🚀 快速开始
使用 Docker Compose 是启动 Foxel 最推荐的方式。
1. **创建数据目录**:
新建 `data` 文件夹用于持久化数据:
```bash
mkdir -p data/db
mkdir -p data/mount
chmod 777 data/db data/mount
```
2. **下载 Docker Compose 文件**
```bash
curl -L -O https://github.com/DrizzleTime/Foxel/raw/main/compose.yaml
```
下载完成后,**强烈建议**修改 `compose.yaml` 文件中的环境变量以确保安全:
- 修改 `SECRET_KEY` 和 `TEMP_LINK_SECRET_KEY`:将默认的密钥替换为随机生成的强密钥
3. **启动服务**:
```bash
docker-compose up -d
```
4. **访问应用**:
服务启动后,在浏览器中打开页面。
> 首次启动,请根据引导页面完成管理员账号的初始化设置。
## 🤝 如何贡献
我们非常欢迎来自社区的贡献!无论是提交 Bug、建议新功能还是直接贡献代码。
在开始之前,请先阅读我们的 [`CONTRIBUTING_zh.md`](CONTRIBUTING_zh.md) 文件,它会指导你如何设置开发环境以及提交流程。
## 🌐 社区
加入我们的交流社区:[Telegram 群组](https://t.me/+thDsBfyqJxZkNTU1),与开发者和用户一起讨论!
你也可以加入我们的微信群,获取更多实时交流与支持。请扫描下方二维码加入:
<img src="https://foxel.cc/image/wechat.png" alt="微信群二维码" width="180">
> 如果二维码失效,请添加微信号 **drizzle2001**,我们会邀请你加入群聊。

View File

@@ -1,13 +1,14 @@
from fastapi import FastAPI
from .routes import adapters, virtual_fs, mounts, auth, config, processors, tasks, logs, share, backup, search
from .routes import adapters, virtual_fs, auth, config, processors, tasks, logs, share, backup, search, vector_db, offline_downloads
from .routes import webdav
from .routes import plugins
def include_routers(app: FastAPI):
app.include_router(adapters.router)
app.include_router(virtual_fs.router)
app.include_router(search.router)
app.include_router(mounts.router)
app.include_router(auth.router)
app.include_router(config.router)
app.include_router(processors.router)
@@ -15,4 +16,8 @@ def include_routers(app: FastAPI):
app.include_router(logs.router)
app.include_router(share.router)
app.include_router(share.public_router)
app.include_router(backup.router)
app.include_router(backup.router)
app.include_router(vector_db.router)
app.include_router(plugins.router)
app.include_router(webdav.router)
app.include_router(offline_downloads.router)

View File

@@ -2,7 +2,7 @@ from fastapi import APIRouter, HTTPException, Depends
from tortoise.transactions import in_transaction
from typing import Annotated
from models import StorageAdapter, Mount
from models import StorageAdapter
from schemas import AdapterCreate, AdapterOut
from services.auth import get_current_active_user, User
from services.adapters.registry import runtime_registry, get_config_schemas
@@ -39,27 +39,22 @@ async def create_adapter(
data: AdapterCreate,
current_user: Annotated[User, Depends(get_current_active_user)]
):
norm_path = AdapterCreate.normalize_mount_path(data.path)
exists = await StorageAdapter.get_or_none(path=norm_path)
if exists:
raise HTTPException(400, detail="Mount path already exists")
adapter_fields = {
"name": data.name,
"type": data.type,
"config": validate_and_normalize_config(data.type, data.config or {}),
"enabled": data.enabled,
"path": norm_path,
"sub_path": data.sub_path,
}
norm_path = AdapterCreate.normalize_mount_path(data.mount_path)
exists = await Mount.get_or_none(path=norm_path)
if exists:
raise HTTPException(400, detail="Mount path already exists")
async with in_transaction():
rec = await StorageAdapter.create(**adapter_fields)
await Mount.create(
path=norm_path,
sub_path=data.sub_path,
adapter=rec,
enabled=True,
)
rec.mount_path = norm_path
rec.sub_path = data.sub_path
await runtime_registry.refresh()
rec = await StorageAdapter.create(**adapter_fields)
await runtime_registry.upsert(rec)
await LogService.action(
"route:adapters",
f"Created adapter {rec.name}",
@@ -73,20 +68,8 @@ async def create_adapter(
async def list_adapters(
current_user: Annotated[User, Depends(get_current_active_user)]
):
adapters = await StorageAdapter.all().prefetch_related("mounts")
out = []
for a in adapters:
mount = a.mounts[0] if a.mounts else None
item = AdapterOut(
name=a.name,
type=a.type,
config=a.config,
enabled=a.enabled,
id=a.id,
mount_path=mount.path if mount else None,
sub_path=mount.sub_path if mount else None
)
out.append(item)
adapters = await StorageAdapter.all()
out = [AdapterOut.model_validate(a) for a in adapters]
return success(out)
@@ -109,13 +92,10 @@ async def get_adapter(
adapter_id: int,
current_user: Annotated[User, Depends(get_current_active_user)]
):
rec = await StorageAdapter.get_or_none(id=adapter_id).prefetch_related("mounts")
rec = await StorageAdapter.get_or_none(id=adapter_id)
if not rec:
raise HTTPException(404, detail="Not found")
mount = rec.mounts[0] if rec.mounts else None
rec.mount_path = mount.path if mount else None
rec.sub_path = mount.sub_path if mount else None
return success(rec)
return success(AdapterOut.model_validate(rec))
@router.put("/{adapter_id}")
@@ -124,34 +104,24 @@ async def update_adapter(
data: AdapterCreate,
current_user: Annotated[User, Depends(get_current_active_user)]
):
rec = await StorageAdapter.get_or_none(id=adapter_id).prefetch_related("mounts")
rec = await StorageAdapter.get_or_none(id=adapter_id)
if not rec:
raise HTTPException(404, detail="Not found")
norm_path = AdapterCreate.normalize_mount_path(data.mount_path)
existing = await Mount.get_or_none(path=norm_path)
mount = rec.mounts[0] if rec.mounts else None
if existing and (not mount or existing.id != mount.id):
norm_path = AdapterCreate.normalize_mount_path(data.path)
existing = await StorageAdapter.get_or_none(path=norm_path)
if existing and existing.id != adapter_id:
raise HTTPException(400, detail="Mount path already exists")
rec.name = data.name
rec.type = data.type
rec.config = validate_and_normalize_config(data.type, data.config or {})
rec.enabled = data.enabled
rec.path = norm_path
rec.sub_path = data.sub_path
await rec.save()
if mount:
mount.path = norm_path
mount.sub_path = data.sub_path
await mount.save()
else:
mount = await Mount.create(
path=norm_path,
sub_path=data.sub_path,
adapter=rec,
enabled=True,
)
rec.mount_path = mount.path
rec.sub_path = mount.sub_path
await runtime_registry.refresh()
await runtime_registry.upsert(rec)
await LogService.action(
"route:adapters",
f"Updated adapter {rec.name}",
@@ -169,7 +139,7 @@ async def delete_adapter(
deleted = await StorageAdapter.filter(id=adapter_id).delete()
if not deleted:
raise HTTPException(404, detail="Not found")
await runtime_registry.refresh()
runtime_registry.remove(adapter_id)
await LogService.action(
"route:adapters",
f"Deleted adapter {adapter_id}",

View File

@@ -1,5 +1,6 @@
from typing import Annotated
from fastapi import APIRouter, HTTPException, Depends, Form
import hashlib
from fastapi.security import OAuth2PasswordRequestForm
from services.auth import (
authenticate_user_db,
@@ -7,10 +8,14 @@ from services.auth import (
ACCESS_TOKEN_EXPIRE_MINUTES,
register_user,
Token,
get_current_active_user,
User,
)
from pydantic import BaseModel
from datetime import timedelta
from api.response import success
from models.database import UserAccount
from services.auth import verify_password, get_password_hash
router = APIRouter(prefix="/api/auth", tags=["auth"])
@@ -21,6 +26,7 @@ class RegisterRequest(BaseModel):
email: str | None = None
full_name: str | None = None
@router.post("/register", summary="注册第一个管理员用户")
async def register(data: RegisterRequest):
"""
@@ -51,3 +57,66 @@ async def login_for_access_token(
data={"sub": user.username}, expires_delta=access_token_expires
)
return Token(access_token=access_token, token_type="bearer")
@router.get("/me", summary="获取当前登录用户信息")
async def get_me(current_user: Annotated[User, Depends(get_current_active_user)]):
"""
返回当前登录用户的基本信息,并附带 gravatar 头像链接。
"""
email = (current_user.email or "").strip().lower()
md5_hash = hashlib.md5(email.encode("utf-8")).hexdigest()
gravatar_url = f"https://www.gravatar.com/avatar/{md5_hash}?s=64&d=identicon"
return success({
"id": current_user.id,
"username": current_user.username,
"email": current_user.email,
"full_name": current_user.full_name,
"gravatar_url": gravatar_url,
})
class UpdateMeRequest(BaseModel):
email: str | None = None
full_name: str | None = None
old_password: str | None = None
new_password: str | None = None
@router.put("/me", summary="更新当前登录用户信息")
async def update_me(
payload: UpdateMeRequest,
current_user: Annotated[User, Depends(get_current_active_user)],
):
db_user = await UserAccount.get_or_none(id=current_user.id)
if not db_user:
raise HTTPException(status_code=404, detail="用户不存在")
if payload.email is not None:
exists = await UserAccount.filter(email=payload.email).exclude(id=db_user.id).exists()
if exists:
raise HTTPException(status_code=400, detail="邮箱已被占用")
db_user.email = payload.email
if payload.full_name is not None:
db_user.full_name = payload.full_name
if payload.new_password:
if not payload.old_password:
raise HTTPException(status_code=400, detail="请提供原密码")
if not verify_password(payload.old_password, db_user.hashed_password):
raise HTTPException(status_code=400, detail="原密码错误")
db_user.hashed_password = get_password_hash(payload.new_password)
await db_user.save()
email = (db_user.email or "").strip().lower()
md5_hash = hashlib.md5(email.encode("utf-8")).hexdigest()
gravatar_url = f"https://cn.cravatar.com/avatar/{md5_hash}?s=64&d=identicon"
return success({
"id": db_user.id,
"username": db_user.username,
"email": db_user.email,
"full_name": db_user.full_name,
"gravatar_url": gravatar_url,
})

View File

@@ -1,10 +1,11 @@
import httpx
import time
from fastapi import APIRouter, Depends, Form
from fastapi import APIRouter, Depends, Form, HTTPException
from typing import Annotated
from services.config import ConfigCenter, VERSION
from services.auth import get_current_active_user, User, has_users
from api.response import success
from services.vector_db import VectorDBService
router = APIRouter(prefix="/api/config", tags=["config"])
@@ -23,8 +24,27 @@ async def set_config(
key: str = Form(...),
value: str = Form(...)
):
await ConfigCenter.set(key, value)
return success({"key": key, "value": value})
original_value = await ConfigCenter.get(key)
value_to_save = value
if key == "AI_EMBED_DIM":
try:
parsed_value = int(value)
except (TypeError, ValueError):
raise HTTPException(status_code=400, detail="AI_EMBED_DIM must be an integer")
if parsed_value <= 0:
raise HTTPException(status_code=400, detail="AI_EMBED_DIM must be greater than zero")
value_to_save = str(parsed_value)
await ConfigCenter.set(key, value_to_save)
if key == "AI_EMBED_DIM" and str(original_value) != value_to_save:
try:
service = VectorDBService()
await service.clear_all_data()
except Exception as exc:
raise HTTPException(status_code=500, detail=f"Failed to clear vector database: {exc}")
return success({"key": key, "value": value_to_save})
@router.get("/all")
@@ -41,7 +61,9 @@ async def get_system_status():
"version": VERSION,
"title": await ConfigCenter.get("APP_NAME", "Foxel"),
"logo": await ConfigCenter.get("APP_LOGO", "/logo.svg"),
"is_initialized": await has_users()
"is_initialized": await has_users(),
"app_domain": await ConfigCenter.get("APP_DOMAIN"),
"file_domain": await ConfigCenter.get("FILE_DOMAIN"),
}
return success(system_info)

View File

@@ -1,84 +0,0 @@
from fastapi import APIRouter, HTTPException, Depends
from typing import Annotated
from models import StorageAdapter, Mount
from schemas import MountCreate, MountOut
from api.response import success
from services.auth import get_current_active_user, User
from services.logging import LogService
router = APIRouter(prefix="/api/mounts", tags=["mounts"])
@router.post("")
async def create_mount(
data: MountCreate,
current_user: Annotated[User, Depends(get_current_active_user)],
):
adapter = await StorageAdapter.get_or_none(id=data.adapter_id)
if not adapter:
raise HTTPException(400, detail="Adapter not found")
rec = await Mount.create(
path=MountCreate.normalize(data.path),
adapter=adapter,
sub_path=data.sub_path,
enabled=data.enabled,
)
await LogService.action(
"route:mounts",
f"Created mount {rec.path}",
details=data.model_dump(),
user_id=current_user.id if hasattr(current_user, "id") else None,
)
return success(rec)
@router.get("")
async def list_mounts(
current_user: Annotated[User, Depends(get_current_active_user)],
):
recs = await Mount.all()
return success(recs)
@router.put("/{mount_id}")
async def update_mount(
mount_id: int,
data: MountCreate,
current_user: Annotated[User, Depends(get_current_active_user)],
):
rec = await Mount.get_or_none(id=mount_id)
if not rec:
raise HTTPException(404, detail="Not found")
adapter = await StorageAdapter.get_or_none(id=data.adapter_id)
if not adapter:
raise HTTPException(400, detail="Adapter not found")
rec.path = MountCreate.normalize(data.path)
rec.adapter = adapter
rec.sub_path = data.sub_path
rec.enabled = data.enabled
await rec.save()
await LogService.action(
"route:mounts",
f"Updated mount {rec.path}",
details=data.model_dump(),
user_id=current_user.id if hasattr(current_user, "id") else None,
)
return success(rec)
@router.delete("/{mount_id}")
async def delete_mount(
mount_id: int,
current_user: Annotated[User, Depends(get_current_active_user)],
):
deleted = await Mount.filter(id=mount_id).delete()
if not deleted:
raise HTTPException(404, detail="Not found")
await LogService.action(
"route:mounts",
f"Deleted mount {mount_id}",
details={"mount_id": mount_id},
user_id=current_user.id if hasattr(current_user, "id") else None,
)
return success({"deleted": True})

View File

@@ -0,0 +1,79 @@
from typing import Annotated
from fastapi import APIRouter, Depends, HTTPException
from api.response import success
from schemas.offline_downloads import OfflineDownloadCreate
from services.auth import User, get_current_active_user
from services.logging import LogService
from services.task_queue import task_queue_service, TaskProgress
from services.virtual_fs import path_is_directory
router = APIRouter(
prefix="/api/offline-downloads",
tags=["OfflineDownloads"],
)
@router.post("/")
async def create_offline_download(
payload: OfflineDownloadCreate,
current_user: Annotated[User, Depends(get_current_active_user)],
):
dest_dir = payload.dest_dir
try:
is_dir = await path_is_directory(dest_dir)
except HTTPException:
is_dir = False
if not is_dir:
raise HTTPException(400, detail="Destination directory not found")
task = await task_queue_service.add_task(
"offline_http_download",
{
"url": str(payload.url),
"dest_dir": dest_dir,
"filename": payload.filename,
},
)
await task_queue_service.update_progress(
task.id,
TaskProgress(
stage="queued",
percent=0.0,
bytes_total=None,
bytes_done=0,
detail="Waiting to start",
),
)
await LogService.action(
"route:offline_downloads",
f"Offline download task created {task.id}",
details={"url": str(payload.url), "dest_dir": dest_dir, "filename": payload.filename},
user_id=current_user.id if hasattr(current_user, "id") else None,
)
return success({"task_id": task.id})
@router.get("/")
async def list_offline_downloads(
current_user: Annotated[User, Depends(get_current_active_user)],
):
tasks = [t for t in task_queue_service.get_all_tasks() if t.name == "offline_http_download"]
data = [t.dict() for t in tasks]
return success(data)
@router.get("/{task_id}")
async def get_offline_download(
task_id: str,
current_user: Annotated[User, Depends(get_current_active_user)],
):
task = task_queue_service.get_task(task_id)
if not task or task.name != "offline_http_download":
raise HTTPException(status_code=404, detail="Task not found")
return success(task.dict())

73
api/routes/plugins.py Normal file
View File

@@ -0,0 +1,73 @@
from typing import List, Any, Dict
from fastapi import APIRouter, HTTPException, Body
from models import database
from schemas import PluginCreate, PluginOut
router = APIRouter(prefix="/api/plugins", tags=["plugins"])
@router.post("", response_model=PluginOut)
async def create_plugin(payload: PluginCreate):
rec = await database.Plugin.create(
url=payload.url,
enabled=payload.enabled,
)
return PluginOut.model_validate(rec)
@router.get("", response_model=List[PluginOut])
async def list_plugins():
rows = await database.Plugin.all().order_by("-id")
return [PluginOut.model_validate(r) for r in rows]
@router.delete("/{plugin_id}")
async def delete_plugin(plugin_id: int):
rec = await database.Plugin.get_or_none(id=plugin_id)
if not rec:
raise HTTPException(status_code=404, detail="Plugin not found")
await rec.delete()
return {"code": 0, "msg": "ok"}
@router.put("/{plugin_id}", response_model=PluginOut)
async def update_plugin(plugin_id: int, payload: PluginCreate):
rec = await database.Plugin.get_or_none(id=plugin_id)
if not rec:
raise HTTPException(status_code=404, detail="Plugin not found")
rec.url = payload.url
rec.enabled = payload.enabled
await rec.save()
return PluginOut.model_validate(rec)
@router.post("/{plugin_id}/metadata", response_model=PluginOut)
async def update_manifest(plugin_id: int, manifest: Dict[str, Any] = Body(...)):
rec = await database.Plugin.get_or_none(id=plugin_id)
if not rec:
raise HTTPException(status_code=404, detail="Plugin not found")
key_map = {
'key': 'key',
'name': 'name',
'version': 'version',
'supported_exts': 'supported_exts',
'supportedExts': 'supported_exts',
'default_bounds': 'default_bounds',
'defaultBounds': 'default_bounds',
'default_maximized': 'default_maximized',
'defaultMaximized': 'default_maximized',
'icon': 'icon',
'description': 'description',
'author': 'author',
'website': 'website',
'github': 'github',
}
for k, v in list(manifest.items()):
if v is None:
continue
attr = key_map.get(k)
if not attr:
continue
setattr(rec, attr, v)
await rec.save()
return PluginOut.model_validate(rec)

View File

@@ -1,10 +1,17 @@
from fastapi import APIRouter, Depends, Body
from pathlib import Path
from fastapi import APIRouter, Depends, Body, HTTPException
from fastapi.concurrency import run_in_threadpool
from typing import Annotated
from services.processors.registry import get_config_schemas
from services.virtual_fs import process_file
from services.processors.registry import (
get_config_schemas,
get_module_path,
reload_processors,
)
from services.task_queue import task_queue_service
from services.auth import get_current_active_user, User
from api.response import success
from pydantic import BaseModel
from services.virtual_fs import path_is_directory
router = APIRouter(prefix="/api/processors", tags=["processors"])
@@ -21,7 +28,8 @@ async def list_processors(
"name": meta["name"],
"supported_exts": meta.get("supported_exts", []),
"config_schema": meta["config_schema"],
"produces_file": meta.get("produces_file", False),
"produces_file": meta.get("produces_file", False),
"module_path": meta.get("module_path"),
})
return success(out)
@@ -34,11 +42,75 @@ class ProcessRequest(BaseModel):
overwrite: bool = False
class UpdateSourceRequest(BaseModel):
source: str
@router.post("/process")
async def process_file_with_processor(
current_user: Annotated[User, Depends(get_current_active_user)],
req: ProcessRequest = Body(...)
):
save_to = req.path if req.overwrite else req.save_to
result = await process_file(req.path, req.processor_type, req.config, save_to)
return success(result)
is_dir = await path_is_directory(req.path)
if is_dir and not req.overwrite:
raise HTTPException(400, detail="Directory processing requires overwrite")
save_to = None if is_dir else (req.path if req.overwrite else req.save_to)
task = await task_queue_service.add_task(
"process_file",
{
"path": req.path,
"processor_type": req.processor_type,
"config": req.config,
"save_to": save_to,
"overwrite": req.overwrite,
},
)
return success({"task_id": task.id})
@router.get("/source/{processor_type}")
async def get_processor_source(
processor_type: str,
current_user: Annotated[User, Depends(get_current_active_user)],
):
module_path = get_module_path(processor_type)
if not module_path:
raise HTTPException(404, detail="Processor not found")
path_obj = Path(module_path)
if not path_obj.exists():
raise HTTPException(404, detail="Processor source not found")
try:
content = await run_in_threadpool(path_obj.read_text, encoding='utf-8')
except Exception as exc:
raise HTTPException(500, detail=f"Failed to read source: {exc}")
return success({"source": content, "module_path": str(path_obj)})
@router.put("/source/{processor_type}")
async def update_processor_source(
processor_type: str,
req: UpdateSourceRequest,
current_user: Annotated[User, Depends(get_current_active_user)],
):
module_path = get_module_path(processor_type)
if not module_path:
raise HTTPException(404, detail="Processor not found")
path_obj = Path(module_path)
if not path_obj.exists():
raise HTTPException(404, detail="Processor source not found")
try:
await run_in_threadpool(path_obj.write_text, req.source, encoding='utf-8')
except Exception as exc:
raise HTTPException(500, detail=f"Failed to write source: {exc}")
return success(True)
@router.post("/reload")
async def reload_processor_modules(
current_user: Annotated[User, Depends(get_current_active_user)],
):
errors = reload_processors()
if errors:
raise HTTPException(500, detail="; ".join(errors))
return success(True)

View File

@@ -9,7 +9,7 @@ router = APIRouter(prefix="/api/search", tags=["search"])
async def search_files_by_vector(q: str, top_k: int):
embedding = await get_text_embedding(q)
vector_db = VectorDBService()
results = vector_db.search_vectors("vector_collection", embedding, top_k)
results = await vector_db.search_vectors("vector_collection", embedding, top_k)
items = [
SearchResultItem(id=res["id"], path=res["entity"]["path"], score=res["distance"])
for res in results[0]
@@ -18,7 +18,7 @@ async def search_files_by_vector(q: str, top_k: int):
async def search_files_by_name(q: str, top_k: int):
vector_db = VectorDBService()
results = vector_db.search_by_path("vector_collection", q, top_k)
results = await vector_db.search_by_path("vector_collection", q, top_k)
items = [
SearchResultItem(id=idx, path=res["entity"]["path"], score=res["distance"])
for idx, res in enumerate(results[0])
@@ -38,4 +38,4 @@ async def search_files(
elif mode == "filename":
return await search_files_by_name(q, top_k)
else:
return {"items": [], "query": q, "error": "Invalid search mode"}
return {"items": [], "query": q, "error": "Invalid search mode"}

View File

@@ -83,6 +83,18 @@ async def get_my_shares(current_user: User = Depends(get_current_active_user)):
return [ShareInfo.from_orm(s) for s in shares]
@router.delete("/expired")
async def delete_expired_shares(
current_user: User = Depends(get_current_active_user),
):
"""
删除当前用户的所有已过期分享。
"""
user_account = await UserAccount.get(id=current_user.id)
deleted_count = await share_service.delete_expired_shares(user=user_account)
return success({"deleted_count": deleted_count})
@router.delete("/{share_id}")
async def delete_share(
share_id: int,

View File

@@ -2,10 +2,17 @@ from fastapi import APIRouter, Depends, HTTPException
from typing import Annotated
from models.database import AutomationTask
from schemas.tasks import AutomationTaskCreate, AutomationTaskUpdate
from schemas.tasks import (
AutomationTaskCreate,
AutomationTaskUpdate,
TaskQueueSettings,
TaskQueueSettingsResponse,
)
from api.response import success
from services.auth import get_current_active_user, User
from services.logging import LogService
from services.task_queue import task_queue_service
from services.config import ConfigCenter
router = APIRouter(
prefix="/api/tasks",
@@ -15,6 +22,56 @@ router = APIRouter(
)
@router.get("/queue")
async def get_task_queue_status(
current_user: Annotated[User, Depends(get_current_active_user)],
):
tasks = task_queue_service.get_all_tasks()
return success([task.dict() for task in tasks])
@router.get("/queue/settings")
async def get_task_queue_settings(
current_user: Annotated[User, Depends(get_current_active_user)],
):
payload = TaskQueueSettingsResponse(
concurrency=task_queue_service.get_concurrency(),
active_workers=task_queue_service.get_active_worker_count(),
)
return success(payload.model_dump())
@router.post("/queue/settings")
async def update_task_queue_settings(
settings: TaskQueueSettings,
current_user: Annotated[User, Depends(get_current_active_user)],
):
await task_queue_service.set_concurrency(settings.concurrency)
await ConfigCenter.set("TASK_QUEUE_CONCURRENCY", str(task_queue_service.get_concurrency()))
await LogService.action(
"route:tasks",
"Updated task queue settings",
details={"concurrency": settings.concurrency},
user_id=getattr(current_user, "id", None),
)
payload = TaskQueueSettingsResponse(
concurrency=task_queue_service.get_concurrency(),
active_workers=task_queue_service.get_active_worker_count(),
)
return success(payload.model_dump())
@router.get("/queue/{task_id}")
async def get_task_status(
task_id: str,
current_user: Annotated[User, Depends(get_current_active_user)],
):
task = task_queue_service.get_task(task_id)
if not task:
raise HTTPException(status_code=404, detail="Task not found")
return success(task.dict())
@router.post("/")
async def create_task(
task_in: AutomationTaskCreate,

100
api/routes/vector_db.py Normal file
View File

@@ -0,0 +1,100 @@
from typing import Any, Dict
from fastapi import APIRouter, Depends, HTTPException
from pydantic import BaseModel, Field
from services.auth import get_current_active_user
from models.database import UserAccount
from services.vector_db import (
VectorDBService,
VectorDBConfigManager,
list_providers,
get_provider_entry,
)
from services.vector_db.providers import get_provider_class
from api.response import success
router = APIRouter(prefix="/api/vector-db", tags=["vector-db"])
class VectorDBConfigPayload(BaseModel):
type: str = Field(..., description="向量数据库提供者类型")
config: Dict[str, Any] = Field(default_factory=dict, description="提供者配置参数")
@router.post("/clear-all", summary="清空向量数据库")
async def clear_vector_db(user: UserAccount = Depends(get_current_active_user)):
if user.username != 'admin':
raise HTTPException(status_code=403, detail="仅管理员可操作")
try:
service = VectorDBService()
await service.clear_all_data()
return success(msg="向量数据库已清空")
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@router.get("/stats", summary="获取向量数据库统计")
async def get_vector_db_stats(user: UserAccount = Depends(get_current_active_user)):
if user.username != 'admin':
raise HTTPException(status_code=403, detail="仅管理员可操作")
try:
service = VectorDBService()
data = await service.get_all_stats()
return success(data=data)
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@router.get("/providers", summary="列出可用向量数据库提供者")
async def list_vector_providers(user: UserAccount = Depends(get_current_active_user)):
if user.username != 'admin':
raise HTTPException(status_code=403, detail="仅管理员可操作")
return success(list_providers())
@router.get("/config", summary="获取当前向量数据库配置")
async def get_vector_db_config(user: UserAccount = Depends(get_current_active_user)):
if user.username != 'admin':
raise HTTPException(status_code=403, detail="仅管理员可操作")
service = VectorDBService()
data = await service.current_provider()
return success(data)
@router.post("/config", summary="更新向量数据库配置")
async def update_vector_db_config(payload: VectorDBConfigPayload, user: UserAccount = Depends(get_current_active_user)):
if user.username != 'admin':
raise HTTPException(status_code=403, detail="仅管理员可操作")
entry = get_provider_entry(payload.type)
if not entry:
raise HTTPException(status_code=400, detail=f"未知的向量数据库类型: {payload.type}")
if not entry.get("enabled", True):
raise HTTPException(status_code=400, detail="该向量数据库类型暂不可用")
provider_cls = get_provider_class(payload.type)
if not provider_cls:
raise HTTPException(status_code=400, detail=f"未找到类型 {payload.type} 对应的实现")
# 先尝试建立连接,确保配置有效
test_provider = provider_cls(payload.config)
try:
await test_provider.initialize()
except Exception as exc:
raise HTTPException(status_code=400, detail=str(exc))
finally:
client = getattr(test_provider, "client", None)
close_fn = getattr(client, "close", None)
if callable(close_fn):
try:
close_fn()
except Exception:
pass
await VectorDBConfigManager.save_config(payload.type, payload.config)
service = VectorDBService()
await service.reload()
config_data = await service.current_provider()
stats = await service.get_all_stats()
return success({"config": config_data, "stats": stats})

View File

@@ -19,6 +19,7 @@ from services.virtual_fs import (
from services.thumbnail import is_image_filename, get_or_create_thumb, is_raw_filename
from schemas import MkdirRequest, MoveRequest
from api.response import success
from services.config import ConfigCenter
router = APIRouter(prefix='/api/fs', tags=["virtual-fs"])
@@ -116,7 +117,7 @@ async def get_thumb(
if not is_image_filename(rel):
raise HTTPException(404, detail="Not an image")
# type: ignore
data, mime, key = await get_or_create_thumb(adapter, mount.adapter_id, root, rel, w, h, fit)
data, mime, key = await get_or_create_thumb(adapter, mount.id, root, rel, w, h, fit)
headers = {
'Cache-Control': 'public, max-age=3600',
'ETag': key,
@@ -151,7 +152,13 @@ async def get_temp_link(
"""获取文件的临时公开访问令牌"""
full_path = '/' + full_path if not full_path.startswith('/') else full_path
token = await generate_temp_link_token(full_path, expires_in=expires_in)
return success({"token": token, "path": full_path})
file_domain = await ConfigCenter.get("FILE_DOMAIN")
if file_domain:
file_domain = file_domain.rstrip('/')
url = f"{file_domain}/api/fs/public/{token}"
else:
url = f"/api/fs/public/{token}"
return success({"token": token, "path": full_path, "url": url})
@router.get("/public/{token}")
@@ -212,31 +219,41 @@ async def api_mkdir(
@router.post("/move")
async def api_move(
current_user: Annotated[User, Depends(get_current_active_user)],
body: MoveRequest
body: MoveRequest,
overwrite: bool = Query(False, description="是否允许覆盖已存在目标"),
):
src = body.src if body.src.startswith('/') else '/' + body.src
dst = body.dst if body.dst.startswith('/') else '/' + body.dst
await move_path(src, dst)
return success({"moved": True, "src": src, "dst": dst})
debug_info = await move_path(src, dst, overwrite=overwrite, return_debug=True, allow_cross=True)
queued = bool(debug_info.get("queued"))
response = {
"moved": not queued,
"queued": queued,
"src": src,
"dst": dst,
"overwrite": overwrite,
}
if queued:
response["task_id"] = debug_info.get("task_id")
response["task_name"] = debug_info.get("task_name")
return success(response)
@router.post("/rename")
async def api_rename(
current_user: Annotated[User, Depends(get_current_active_user)],
body: MoveRequest,
overwrite: bool = Query(False, description="是否允许覆盖已存在目标"),
debug: bool = Query(False, description="返回调试信息")
overwrite: bool = Query(False, description="是否允许覆盖已存在目标")
):
src = body.src if body.src.startswith('/') else '/' + body.src
dst = body.dst if body.dst.startswith('/') else '/' + body.dst
from services.virtual_fs import rename_path
debug_info = await rename_path(src, dst, overwrite=overwrite, return_debug=debug)
await rename_path(src, dst, overwrite=overwrite, return_debug=False)
return success({
"renamed": True,
"src": src,
"dst": dst,
"overwrite": overwrite,
**({"debug": debug_info} if debug else {})
})
@@ -245,19 +262,23 @@ async def api_copy(
current_user: Annotated[User, Depends(get_current_active_user)],
body: MoveRequest,
overwrite: bool = Query(False, description="是否覆盖已存在目标"),
debug: bool = Query(False, description="返回调试信息")
):
from services.virtual_fs import copy_path
src = body.src if body.src.startswith('/') else '/' + body.src
dst = body.dst if body.dst.startswith('/') else '/' + body.dst
debug_info = await copy_path(src, dst, overwrite=overwrite, return_debug=debug)
return success({
"copied": True,
debug_info = await copy_path(src, dst, overwrite=overwrite, return_debug=True, allow_cross=True)
queued = bool(debug_info.get("queued"))
response = {
"copied": not queued,
"queued": queued,
"src": src,
"dst": dst,
"overwrite": overwrite,
**({"debug": debug_info} if debug else {})
})
}
if queued:
response["task_id"] = debug_info.get("task_id")
response["task_name"] = debug_info.get("task_name")
return success(response)
@router.post("/upload/{full_path:path}")
@@ -299,10 +320,12 @@ async def browse_fs(
current_user: Annotated[User, Depends(get_current_active_user)],
full_path: str,
page_num: int = Query(1, alias="page", ge=1, description="页码"),
page_size: int = Query(50, ge=1, le=500, description="每页条数")
page_size: int = Query(50, ge=1, le=500, description="每页条数"),
sort_by: str = Query("name", description="按字段排序: name, size, mtime"),
sort_order: str = Query("asc", description="排序顺序: asc, desc")
):
full_path = '/' + full_path if not full_path.startswith('/') else full_path
result = await list_virtual_dir(full_path, page_num, page_size)
result = await list_virtual_dir(full_path, page_num, page_size, sort_by, sort_order)
return success({
"path": full_path,
"entries": result["items"],
@@ -329,6 +352,18 @@ async def api_delete(
async def root_listing(
current_user: Annotated[User, Depends(get_current_active_user)],
page_num: int = Query(1, alias="page", ge=1, description="页码"),
page_size: int = Query(50, ge=1, le=500, description="每页条数")
page_size: int = Query(50, ge=1, le=500, description="每页条数"),
sort_by: str = Query("name", description="按字段排序: name, size, mtime"),
sort_order: str = Query("asc", description="排序顺序: asc, desc")
):
return await browse_fs("", page_num, page_size)
result = await list_virtual_dir("/", page_num, page_size, sort_by, sort_order)
return success({
"path": "/",
"entries": result["items"],
"pagination": {
"total": result["total"],
"page": result["page"],
"page_size": result["page_size"],
"pages": result["pages"]
}
})

273
api/routes/webdav.py Normal file
View File

@@ -0,0 +1,273 @@
from __future__ import annotations
import base64
import hashlib
import mimetypes
from email.utils import formatdate
from urllib.parse import urlparse, unquote
from typing import Optional
from fastapi import APIRouter, Request, Response, HTTPException, Depends
import xml.etree.ElementTree as ET
from services.auth import authenticate_user_db, User, UserInDB
from services.virtual_fs import (
list_virtual_dir,
stat_file,
write_file_stream,
make_dir,
delete_path,
move_path,
copy_path,
stream_file,
)
router = APIRouter(prefix="/webdav", tags=["webdav"])
def _dav_headers(extra: Optional[dict] = None) -> dict:
headers = {
"DAV": "1",
"MS-Author-Via": "DAV",
"Accept-Ranges": "bytes",
"Allow": ", ".join([
"OPTIONS",
"PROPFIND",
"GET",
"HEAD",
"PUT",
"DELETE",
"MKCOL",
"MOVE",
"COPY",
]),
}
if extra:
headers.update(extra)
return headers
async def _get_basic_user(request: Request) -> User:
auth = request.headers.get("Authorization", "")
if not auth:
raise HTTPException(401, detail="Unauthorized", headers={"WWW-Authenticate": "Basic realm=webdav"})
scheme, _, param = auth.partition(" ")
scheme_lower = scheme.lower()
if scheme_lower == "basic":
try:
decoded = base64.b64decode(param).decode("utf-8")
username, _, password = decoded.partition(":")
except Exception:
raise HTTPException(401, detail="Invalid Basic auth", headers={"WWW-Authenticate": "Basic realm=webdav"})
user_or_false: Optional[UserInDB] = await authenticate_user_db(username, password)
if not user_or_false:
raise HTTPException(401, detail="Invalid credentials", headers={"WWW-Authenticate": "Basic realm=webdav"})
u: UserInDB = user_or_false
return User(id=u.id, username=u.username, email=u.email, full_name=u.full_name, disabled=u.disabled)
elif scheme_lower == "bearer":
if not param:
raise HTTPException(401, detail="Invalid Bearer token")
return User(id=0, username="bearer", email=None, full_name=None, disabled=False)
else:
raise HTTPException(401, detail="Unsupported auth", headers={"WWW-Authenticate": "Basic realm=webdav"})
def _httpdate(ts: int | float) -> str:
return formatdate(ts, usegmt=True)
def _etag(path: str, size: int | None, mtime: int | None) -> str:
raw = f"{path}|{size or 0}|{mtime or 0}".encode("utf-8")
return '"' + hashlib.md5(raw).hexdigest() + '"'
def _href_for(path: str, is_dir: bool) -> str:
from urllib.parse import quote
p = "/webdav" + (path if path.startswith("/") else "/" + path)
if is_dir and not p.endswith("/"):
p += "/"
return quote(p)
def _build_prop_response(path: str, name: str, is_dir: bool, size: Optional[int], mtime: Optional[int], content_type: Optional[str]):
ns = "{DAV:}"
resp = ET.Element(ns + "response")
href = ET.SubElement(resp, ns + "href")
href.text = _href_for(path, is_dir)
propstat = ET.SubElement(resp, ns + "propstat")
prop = ET.SubElement(propstat, ns + "prop")
displayname = ET.SubElement(prop, ns + "displayname")
displayname.text = name
resourcetype = ET.SubElement(prop, ns + "resourcetype")
if is_dir:
ET.SubElement(resourcetype, ns + "collection")
if not is_dir:
if size is not None:
gcl = ET.SubElement(prop, ns + "getcontentlength")
gcl.text = str(size)
if content_type:
gct = ET.SubElement(prop, ns + "getcontenttype")
gct.text = content_type
if mtime is not None:
glm = ET.SubElement(prop, ns + "getlastmodified")
glm.text = _httpdate(mtime)
etag = ET.SubElement(prop, ns + "getetag")
etag.text = _etag(path, size, mtime)
status = ET.SubElement(propstat, ns + "status")
status.text = "HTTP/1.1 200 OK"
return resp
def _multistatus_xml(responses: list[ET.Element]) -> bytes:
ns = "{DAV:}"
ms = ET.Element(ns + "multistatus")
for r in responses:
ms.append(r)
return ET.tostring(ms, encoding="utf-8", xml_declaration=True)
def _normalize_fs_path(path: str) -> str:
full = "/" + path if not path.startswith("/") else path
return unquote(full)
@router.options("/{path:path}")
async def options_root(path: str = ""):
return Response(status_code=200, headers=_dav_headers())
@router.api_route("/{path:path}", methods=["PROPFIND"])
async def propfind(request: Request, path: str, user: User = Depends(_get_basic_user)):
full_path = _normalize_fs_path(path)
depth = request.headers.get("Depth", "1").lower()
if depth not in ("0", "1", "infinity"):
depth = "1"
responses: list[ET.Element] = []
# 先获取当前路径信息
try:
st = await stat_file(full_path)
is_dir = bool(st.get("is_dir"))
name = st.get("name") or full_path.rsplit("/", 1)[-1] or "/"
size = None if is_dir else int(st.get("size", 0))
mtime = int(st.get("mtime", 0)) if st.get("mtime") is not None else None
ctype = None if is_dir else (mimetypes.guess_type(name)[0] or "application/octet-stream")
responses.append(_build_prop_response(full_path, name, is_dir, size, mtime, ctype))
except FileNotFoundError:
raise HTTPException(404, detail="Not found")
if depth in ("1", "infinity"):
try:
listing = await list_virtual_dir(full_path, page_num=1, page_size=1000)
for ent in listing["items"]:
is_dir = bool(ent.get("is_dir"))
name = ent.get("name")
child_path = full_path.rstrip("/") + "/" + name
size = None if is_dir else int(ent.get("size", 0))
mtime = int(ent.get("mtime", 0)) if ent.get("mtime") is not None else None
ctype = None if is_dir else (mimetypes.guess_type(name)[0] or "application/octet-stream")
responses.append(_build_prop_response(child_path, name, is_dir, size, mtime, ctype))
except HTTPException as e:
if e.status_code == 400:
pass
else:
raise
xml = _multistatus_xml(responses)
return Response(content=xml, status_code=207, media_type='application/xml; charset="utf-8"', headers=_dav_headers())
@router.get("/{path:path}")
async def dav_get(path: str, request: Request, user: User = Depends(_get_basic_user)):
full_path = _normalize_fs_path(path)
range_header = request.headers.get("Range")
return await stream_file(full_path, range_header)
@router.head("/{path:path}")
async def dav_head(path: str, user: User = Depends(_get_basic_user)):
full_path = _normalize_fs_path(path)
try:
st = await stat_file(full_path)
except FileNotFoundError:
raise HTTPException(404, detail="Not found")
is_dir = bool(st.get("is_dir"))
headers = _dav_headers()
if not is_dir:
size = int(st.get("size", 0))
name = st.get("name") or full_path.rsplit("/", 1)[-1]
ctype = mimetypes.guess_type(name)[0] or "application/octet-stream"
mtime = int(st.get("mtime", 0)) if st.get("mtime") is not None else None
headers.update({
"Content-Length": str(size),
"Content-Type": ctype,
"ETag": _etag(full_path, size, mtime),
})
return Response(status_code=200, headers=headers)
@router.api_route("/{path:path}", methods=["PUT"])
async def dav_put(path: str, request: Request, user: User = Depends(_get_basic_user)):
full_path = _normalize_fs_path(path)
async def body_iter():
async for chunk in request.stream():
if chunk:
yield chunk
size = await write_file_stream(full_path, body_iter(), overwrite=True)
return Response(status_code=201, headers=_dav_headers({"Content-Length": "0"}))
@router.api_route("/{path:path}", methods=["DELETE"])
async def dav_delete(path: str, user: User = Depends(_get_basic_user)):
full_path = _normalize_fs_path(path)
await delete_path(full_path)
return Response(status_code=204, headers=_dav_headers())
@router.api_route("/{path:path}", methods=["MKCOL"])
async def dav_mkcol(path: str, user: User = Depends(_get_basic_user)):
full_path = _normalize_fs_path(path)
await make_dir(full_path)
return Response(status_code=201, headers=_dav_headers())
def _parse_destination(dest: str) -> str:
if not dest:
raise HTTPException(400, detail="Missing Destination header")
p = urlparse(dest)
path = p.path if p.scheme else dest
if path.startswith("/webdav"):
rel = path[len("/webdav"):]
else:
rel = path
return _normalize_fs_path(rel)
@router.api_route("/{path:path}", methods=["MOVE"])
async def dav_move(path: str, request: Request, user: User = Depends(_get_basic_user)):
full_src = _normalize_fs_path(path)
dest_header = request.headers.get("Destination")
dst = _parse_destination(dest_header or "")
overwrite = request.headers.get("Overwrite", "T").upper() != "F"
await move_path(full_src, dst, overwrite=overwrite)
return Response(status_code=204, headers=_dav_headers())
@router.api_route("/{path:path}", methods=["COPY"])
async def dav_copy(path: str, request: Request, user: User = Depends(_get_basic_user)):
full_src = _normalize_fs_path(path)
dest_header = request.headers.get("Destination")
dst = _parse_destination(dest_header or "")
overwrite = request.headers.get("Overwrite", "T").upper() != "F"
await copy_path(full_src, dst, overwrite=overwrite)
return Response(status_code=201 if not overwrite else 204, headers=_dav_headers())

View File

@@ -1,7 +1,7 @@
services:
foxel:
image: ghcr.io/drizzletime/foxel:latest
#image: ghcr.nju.edu.cn/drizzletime/foxel:latest #国内用户可以用此镜像命令
#image: ghcr.nju.edu.cn/drizzletime/foxel:latest # 国内用户可以用此镜像命令
container_name: foxel
restart: unless-stopped
ports:

View File

@@ -1,4 +1,5 @@
#!/bin/bash
set -e
python migrate/run.py
nginx -g 'daemon off;' &
exec gunicorn -k uvicorn.workers.UvicornWorker -w 4 -b 0.0.0.0:8000 main:app
exec gunicorn -k uvicorn.workers.UvicornWorker -w 2 -b 0.0.0.0:8000 main:app

26
main.py
View File

@@ -1,31 +1,37 @@
import os
from services.config import VERSION, ConfigCenter
from services.adapters.registry import runtime_registry
from fastapi.middleware.cors import CORSMiddleware
from contextlib import asynccontextmanager
from db.session import close_db, init_db
from api.routers import include_routers
from fastapi import FastAPI
from services.middleware.logging_middleware import LoggingMiddleware
from services.middleware.exception_handler import global_exception_handler
from dotenv import load_dotenv
from services.task_queue import task_queue_service
load_dotenv()
from services.middleware.exception_handler import global_exception_handler
from services.middleware.logging_middleware import LoggingMiddleware
from fastapi import FastAPI, Request
from api.routers import include_routers
from db.session import close_db, init_db
from contextlib import asynccontextmanager
from fastapi.middleware.cors import CORSMiddleware
from services.adapters.registry import runtime_registry
@asynccontextmanager
async def lifespan(app: FastAPI):
os.makedirs("data/db", exist_ok=True)
await init_db()
await runtime_registry.refresh()
await ConfigCenter.set("APP_VERSION", VERSION)
await task_queue_service.start_worker()
try:
yield
finally:
await task_queue_service.stop_worker()
await close_db()
def create_app() -> FastAPI:
app = FastAPI(
title="Foxel",
description="AList-like virtual storage aggregator",
description="A highly extensible private cloud storage solution for individuals and teams",
lifespan=lifespan,
)
include_routers(app)

View File

@@ -1,3 +1,3 @@
from .database import StorageAdapter, Mount
from .database import StorageAdapter
__all__ = ["StorageAdapter", "Mount"]
__all__ = ["StorageAdapter"]

View File

@@ -8,25 +8,13 @@ class StorageAdapter(Model):
type = fields.CharField(max_length=30)
config = fields.JSONField()
enabled = fields.BooleanField(default=True)
mounts: fields.ReverseRelation["Mount"]
path = fields.CharField(max_length=255, unique=True)
sub_path = fields.CharField(max_length=1024, null=True)
class Meta:
table = "storage_adapters"
class Mount(Model):
id = fields.IntField(pk=True)
path = fields.CharField(max_length=255, unique=True)
sub_path = fields.CharField(max_length=1024, null=True)
adapter: fields.ForeignKeyRelation[StorageAdapter] = fields.ForeignKeyField(
"models.StorageAdapter", related_name="mounts", on_delete=fields.CASCADE
)
enabled = fields.BooleanField(default=True)
class Meta:
table = "mounts"
class UserAccount(Model):
id = fields.IntField(pk=True)
username = fields.CharField(max_length=50, unique=True)
@@ -93,3 +81,29 @@ class ShareLink(Model):
class Meta:
table = "share_links"
class Plugin(Model):
id = fields.IntField(pk=True)
url = fields.CharField(max_length=2048)
enabled = fields.BooleanField(default=True)
key = fields.CharField(max_length=100, null=True)
name = fields.CharField(max_length=255, null=True)
version = fields.CharField(max_length=50, null=True)
supported_exts = fields.JSONField(null=True)
default_bounds = fields.JSONField(null=True)
default_maximized = fields.BooleanField(null=True)
icon = fields.CharField(max_length=2048, null=True)
description = fields.TextField(null=True)
author = fields.CharField(max_length=255, null=True)
website = fields.CharField(max_length=2048, null=True)
github = fields.CharField(max_length=2048, null=True)
created_at = fields.DatetimeField(auto_now_add=True)
updated_at = fields.DatetimeField(auto_now=True)
class Meta:
table = "plugins"

View File

@@ -28,7 +28,7 @@ http {
listen 80;
server_name _;
location ~ ^/(api|docs) {
location ~ ^/(api|webdav|docs|openapi\.json$) {
proxy_pass http://127.0.0.1:8000;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;

95
pyproject.toml Normal file
View File

@@ -0,0 +1,95 @@
[project]
name = "foxel"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.13"
dependencies = [
"aioboto3==15.1.0",
"aiobotocore==2.24.0",
"aiofiles==24.1.0",
"aiohappyeyeballs==2.6.1",
"aiohttp==3.12.15",
"aioitertools==0.12.0",
"aiosignal==1.4.0",
"aiosqlite==0.21.0",
"annotated-types==0.7.0",
"anyio==4.10.0",
"asyncclick==8.2.2.2",
"attrs==25.3.0",
"bcrypt==4.3.0",
"boto3==1.39.11",
"botocore==1.39.11",
"certifi==2025.8.3",
"click==8.2.1",
"dictdiffer==0.9.0",
"dnspython==2.7.0",
"email-validator==2.2.0",
"fastapi==0.116.1",
"fastapi-cli==0.0.8",
"fastapi-cloud-cli==0.1.5",
"frozenlist==1.7.0",
"grpcio==1.74.0",
"h11==0.16.0",
"httpcore==1.0.9",
"httptools==0.6.4",
"httpx==0.28.1",
"idna==3.10",
"imageio==2.37.0",
"iso8601==2.1.0",
"jinja2==3.1.6",
"jmespath==1.0.1",
"markdown-it-py==4.0.0",
"markupsafe==3.0.2",
"mdurl==0.1.2",
"milvus-lite==2.5.1",
"multidict==6.6.4",
"numpy==2.3.2",
"pandas==2.3.1",
"passlib==1.7.4",
"pillow==11.3.0",
"propcache==0.3.2",
"protobuf==6.32.0",
"pyaes==1.6.1",
"pyasn1==0.6.1",
"pydantic==2.11.7",
"pydantic-core==2.33.2",
"pygments==2.19.2",
"pyjwt==2.10.1",
"pymilvus==2.6.0",
"pypika-tortoise==0.6.1",
"pysocks==1.7.1",
"python-dateutil==2.9.0.post0",
"python-dotenv==1.1.1",
"python-multipart==0.0.20",
"pytz==2025.2",
"pyyaml==6.0.2",
"qdrant-client==1.15.1",
"rawpy==0.25.1",
"rich==14.1.0",
"rich-toolkit==0.15.0",
"rignore==0.6.4",
"rsa==4.9.1",
"s3transfer==0.13.1",
"sentry-sdk==2.35.0",
"setuptools==80.9.0",
"shellingham==1.5.4",
"six==1.17.0",
"sniffio==1.3.1",
"starlette==0.47.2",
"telethon==1.40.0",
"tortoise-orm==0.25.1",
"tqdm==4.67.1",
"typer==0.16.0",
"typing-extensions==4.14.1",
"typing-inspection==0.4.1",
"tzdata==2025.2",
"ujson==5.10.0",
"urllib3==2.5.0",
"uvicorn==0.35.0",
"uvloop==0.21.0",
"watchfiles==1.1.0",
"websockets==15.0.1",
"wrapt==1.17.3",
"yarl==1.20.1",
]

View File

@@ -1,67 +0,0 @@
aiosqlite==0.21.0
annotated-types==0.7.0
anyio==4.10.0
bcrypt==4.3.0
certifi==2025.8.3
click==8.2.1
dnspython==2.7.0
email_validator==2.2.0
fastapi==0.116.1
fastapi-cli==0.0.8
fastapi-cloud-cli==0.1.5
grpcio==1.74.0
h11==0.16.0
httpcore==1.0.9
httptools==0.6.4
httpx==0.28.1
idna==3.10
imageio==2.37.0
iso8601==2.1.0
Jinja2==3.1.6
markdown-it-py==4.0.0
MarkupSafe==3.0.2
mdurl==0.1.2
milvus-lite==2.5.1
numpy==2.3.2
pandas==2.3.1
passlib==1.7.4
pillow==11.3.0
protobuf==6.32.0
pyaes==1.6.1
pyasn1==0.6.1
pydantic==2.11.7
pydantic_core==2.33.2
Pygments==2.19.2
PyJWT==2.10.1
pymilvus==2.6.0
pypika-tortoise==0.6.1
PySocks==1.7.1
python-dateutil==2.9.0.post0
python-dotenv==1.1.1
python-multipart==0.0.20
pytz==2025.2
PyYAML==6.0.2
rawpy==0.25.1
rich==14.1.0
rich-toolkit==0.15.0
rignore==0.6.4
rsa==4.9.1
sentry-sdk==2.35.0
setuptools==80.9.0
shellingham==1.5.4
six==1.17.0
sniffio==1.3.1
starlette==0.47.2
Telethon==1.40.0
tortoise-orm==0.25.1
tqdm==4.67.1
typer==0.16.0
typing-inspection==0.4.1
typing_extensions==4.14.1
tzdata==2025.2
ujson==5.10.0
urllib3==2.5.0
uvicorn==0.35.0
uvloop==0.21.0
watchfiles==1.1.0
websockets==15.0.1

View File

@@ -1,12 +1,12 @@
from schemas.plugins import PluginCreate,PluginOut
from .adapters import AdapterCreate, AdapterOut
from .mounts import MountCreate, MountOut
from .fs import MkdirRequest, MoveRequest
__all__ = [
"PluginOut"
"PluginCreate"
"AdapterCreate",
"AdapterOut",
"MountCreate",
"MountOut",
"MkdirRequest",
"MoveRequest",
]

View File

@@ -1,15 +1,17 @@
from typing import Dict, Optional
from pydantic import BaseModel, Field, validator
from pydantic import BaseModel, Field, field_validator
class AdapterCreate(BaseModel):
class AdapterBase(BaseModel):
name: str
type: str = Field(pattern=r"^[a-zA-Z0-9_]+$")
config: Dict = Field(default_factory=dict)
enabled: bool = True
mount_path: str
sub_path: Optional[str] = None
path: str = None
sub_path: Optional[str] = None
class AdapterCreate(AdapterBase):
@staticmethod
def normalize_mount_path(p: str) -> str:
p = p.strip()
@@ -18,15 +20,17 @@ class AdapterCreate(BaseModel):
p = p.rstrip('/')
return p or '/'
@validator("mount_path")
@field_validator("path")
def _v_mount(cls, v: str):
if not v:
raise ValueError("mount_path required")
return cls.normalize_mount_path(v)
class AdapterOut(AdapterCreate):
class AdapterOut(AdapterBase):
id: int
path: str = None
sub_path: Optional[str] = None
class Config:
from_attributes = True

View File

@@ -1,23 +0,0 @@
from typing import Optional
from pydantic import BaseModel
class MountCreate(BaseModel):
path: str
adapter_id: int
sub_path: Optional[str] = None
enabled: bool = True
@staticmethod
def normalize(path: str) -> str:
return (path if path.startswith('/') else '/' + path).rstrip('/') or '/'
def model_post_init(self, __context):
self.path = self.normalize(self.path)
class MountOut(MountCreate):
id: int
class Config:
from_attributes = True

View File

@@ -0,0 +1,7 @@
from pydantic import BaseModel, HttpUrl, Field
class OfflineDownloadCreate(BaseModel):
url: HttpUrl
dest_dir: str = Field(..., min_length=1)
filename: str = Field(..., min_length=1)

27
schemas/plugins.py Normal file
View File

@@ -0,0 +1,27 @@
from typing import List, Optional, Dict, Any
from pydantic import BaseModel, Field
class PluginCreate(BaseModel):
url: str = Field(min_length=1)
enabled: bool = True
class PluginOut(BaseModel):
id: int
url: str
enabled: bool
key: Optional[str]
name: Optional[str]
version: Optional[str]
supported_exts: Optional[List[str]]
default_bounds: Optional[Dict[str, Any]]
default_maximized: Optional[bool]
icon: Optional[str]
description: Optional[str]
author: Optional[str]
website: Optional[str]
github: Optional[str]
class Config:
from_attributes = True

View File

@@ -1,4 +1,4 @@
from pydantic import BaseModel
from pydantic import BaseModel, Field
from typing import Optional, Dict, Any
@@ -29,3 +29,11 @@ class AutomationTaskRead(AutomationTaskBase):
class Config:
from_attributes = True
class TaskQueueSettings(BaseModel):
concurrency: int = Field(..., ge=1, description="Desired number of concurrent task workers")
class TaskQueueSettingsResponse(TaskQueueSettings):
active_workers: int = Field(..., ge=0, description="Currently running worker count")

View File

@@ -10,7 +10,7 @@ from models import StorageAdapter
@runtime_checkable
class BaseAdapter(Protocol):
record: StorageAdapter
async def list_dir(self, root: str, rel: str, page_num: int = 1, page_size: int = 50) -> Tuple[List[Dict], int]: ...
async def list_dir(self, root: str, rel: str, page_num: int = 1, page_size: int = 50, sort_by: str = "name", sort_order: str = "asc") -> Tuple[List[Dict], int]: ...
async def read_file(self, root: str, rel: str) -> bytes: ...
async def write_file(self, root: str, rel: str, data: bytes): ...
async def write_file_stream(self, root: str, rel: str, data_iter: AsyncIterator[bytes]): ...

View File

@@ -46,25 +46,18 @@ class LocalAdapter:
return str(Path(root) / sub_path)
return root
async def list_dir(self, root: str, rel: str, page_num: int = 1, page_size: int = 50) -> Tuple[List[Dict], int]:
async def list_dir(self, root: str, rel: str, page_num: int = 1, page_size: int = 50, sort_by: str = "name", sort_order: str = "asc") -> Tuple[List[Dict], int]:
rel = rel.strip('/')
base = _safe_join(root, rel) if rel else Path(root)
if not base.exists():
return [], 0
if not base.is_dir():
raise NotADirectoryError(rel)
# 获取所有文件名并排序
all_names = await asyncio.to_thread(lambda: sorted(os.listdir(base), key=str.lower))
total_count = len(all_names)
# 计算分页范围
start_idx = (page_num - 1) * page_size
end_idx = start_idx + page_size
page_names = all_names[start_idx:end_idx]
all_names = await asyncio.to_thread(os.listdir, base)
entries = []
for name in page_names:
for name in all_names:
fp = base / name
try:
st = await asyncio.to_thread(fp.stat)
@@ -79,10 +72,35 @@ class LocalAdapter:
"mode": stat.S_IMODE(st.st_mode),
"type": "dir" if is_dir else "file",
})
# 排序
reverse = sort_order.lower() == "desc"
# 按目录优先排序
entries.sort(key=lambda x: (not x["is_dir"], x["name"].lower()))
return entries, total_count
def get_sort_key(item):
# 基础排序键,目录优先
key = (not item["is_dir"],)
sort_field = sort_by.lower()
if sort_field == "name":
key += (item["name"].lower(),)
elif sort_field == "size":
key += (item["size"],)
elif sort_field == "mtime":
key += (item["mtime"],)
else: # 默认按名称
key += (item["name"].lower(),)
return key
entries.sort(key=get_sort_key, reverse=reverse)
total_count = len(entries)
# 分页
start_idx = (page_num - 1) * page_size
end_idx = start_idx + page_size
page_entries = entries[start_idx:end_idx]
return page_entries, total_count
async def read_file(self, root: str, rel: str) -> bytes:
fp = _safe_join(root, rel)

View File

@@ -0,0 +1,440 @@
from __future__ import annotations
from datetime import datetime, timezone, timedelta
from typing import List, Dict, Tuple, AsyncIterator
import httpx
from fastapi.responses import StreamingResponse
from fastapi import HTTPException
from models import StorageAdapter
MS_GRAPH_URL = "https://graph.microsoft.com/v1.0"
MS_OAUTH_URL = "https://login.microsoftonline.com/common/oauth2/v2.0/token"
class OneDriveAdapter:
"""OneDrive 存储适配器"""
def __init__(self, record: StorageAdapter):
self.record = record
cfg = record.config
self.client_id = cfg.get("client_id")
self.client_secret = cfg.get("client_secret")
self.refresh_token = cfg.get("refresh_token")
self.root = cfg.get("root", "/").strip("/")
if not all([self.client_id, self.client_secret, self.refresh_token]):
raise ValueError(
"OneDrive 适配器需要 client_id, client_secret, 和 refresh_token")
self._access_token: str | None = None
self._token_expiry: datetime | None = None
def get_effective_root(self, sub_path: str | None) -> str:
"""
获取有效根路径。
:param sub_path: 子路径。
:return: 完整的有效路径。
"""
if sub_path:
return f"/{self.root.strip('/')}/{sub_path.strip('/')}".strip()
return f"/{self.root.strip('/')}".strip()
def _get_api_path(self, rel_path: str) -> str:
"""
将用户可见的相对路径转换为 Graph API 路径段。
:param rel_path: 相对路径。
:return: Graph API 路径段。
"""
full_path = self.get_effective_root(rel_path).strip('/')
if not full_path:
return ""
return f":/{full_path}"
async def _get_access_token(self) -> str:
"""
获取或刷新 access token。
:return: access token。
"""
if self._access_token and self._token_expiry and datetime.now(timezone.utc) < self._token_expiry:
return self._access_token
data = {
"client_id": self.client_id,
"client_secret": self.client_secret,
"refresh_token": self.refresh_token,
"grant_type": "refresh_token",
}
async with httpx.AsyncClient(timeout=20.0) as client:
resp = await client.post(MS_OAUTH_URL, data=data)
resp.raise_for_status()
token_data = resp.json()
self._access_token = token_data["access_token"]
self._token_expiry = datetime.now(
timezone.utc) + timedelta(seconds=token_data["expires_in"] - 300)
return self._access_token
async def _request(self, method: str, api_path_segment: str | None = None, *, full_url: str | None = None, **kwargs):
"""
向 Microsoft Graph API 发送请求。
:param method: HTTP 方法。
:param api_path_segment: API 路径段 (与 full_url 互斥)。
:param full_url: 完整的请求 URL (与 api_path_segment 互斥)。
:param kwargs: 其他请求参数。
:return: 响应对象。
"""
if not ((api_path_segment is not None) ^ (full_url is not None)):
raise ValueError("必须提供 api_path_segment 或 full_url 中的一个,且仅一个")
token = await self._get_access_token()
headers = {"Authorization": f"Bearer {token}"}
if "headers" in kwargs:
headers.update(kwargs.pop("headers"))
url = full_url if full_url else f"{MS_GRAPH_URL}/me/drive/root{api_path_segment}"
async with httpx.AsyncClient(timeout=60.0) as client:
resp = await client.request(method, url, headers=headers, **kwargs)
if resp.status_code == 401:
self._access_token = None
token = await self._get_access_token()
headers["Authorization"] = f"Bearer {token}"
resp = await client.request(method, url, headers=headers, **kwargs)
return resp
def _format_item(self, item: Dict) -> Dict:
"""
将 Graph API 返回的 item 格式化为统一的格式。
:param item: Graph API 返回的 item 字典。
:return: 格式化后的字典。
"""
is_dir = "folder" in item
return {
"name": item["name"],
"is_dir": is_dir,
"size": 0 if is_dir else item.get("size", 0),
"mtime": int(datetime.fromisoformat(item["lastModifiedDateTime"].replace("Z", "+00:00")).timestamp()),
"type": "dir" if is_dir else "file",
}
async def list_dir(self, root: str, rel: str, page_num: int = 1, page_size: int = 50, sort_by: str = "name", sort_order: str = "asc") -> Tuple[List[Dict], int]:
"""
列出目录内容。
由于 Graph API 不支持基于偏移($skip)的分页,此方法将获取所有项目,
:param root: 根路径 (在此适配器中未使用,通过配置的 root 确定)。
:param rel: 相对路径。
:param page_num: 页码。
:param page_size: 每页大小。
:param sort_by: 排序字段
:param sort_order: 排序顺序
:return: 文件/目录列表和总数。
"""
api_path = self._get_api_path(rel)
children_path = f"{api_path}:/children" if api_path else "/children"
all_items = []
params = {"$top": 999}
resp = await self._request("GET", api_path_segment=children_path, params=params)
while True:
if resp.status_code == 404 and not all_items:
return [], 0
resp.raise_for_status()
try:
data = resp.json()
except Exception as e:
raise IOError(f"解析 Graph API 响应失败: {e}") from e
all_items.extend(data.get("value", []))
next_link = data.get("@odata.nextLink")
if not next_link:
break
resp = await self._request("GET", full_url=next_link)
formatted_items = [self._format_item(item) for item in all_items]
# 排序
reverse = sort_order.lower() == "desc"
def get_sort_key(item):
key = (not item["is_dir"],)
sort_field = sort_by.lower()
if sort_field == "name":
key += (item["name"].lower(),)
elif sort_field == "size":
key += (item["size"],)
elif sort_field == "mtime":
key += (item["mtime"],)
else:
key += (item["name"].lower(),)
return key
formatted_items.sort(key=get_sort_key, reverse=reverse)
total_count = len(formatted_items)
start_idx = (page_num - 1) * page_size
end_idx = start_idx + page_size
return formatted_items[start_idx:end_idx], total_count
async def read_file(self, root: str, rel: str) -> bytes:
"""
读取文件内容。
:param root: 根路径。
:param rel: 相对路径。
:return: 文件内容的字节流。
"""
api_path = self._get_api_path(rel)
if not api_path:
raise IsADirectoryError("不能将根目录作为文件读取")
resp = await self._request("GET", api_path_segment=f"{api_path}:/content")
if resp.status_code == 404:
raise FileNotFoundError(rel)
resp.raise_for_status()
return resp.content
async def write_file(self, root: str, rel: str, data: bytes):
"""
写入文件。
:param root: 根路径。
:param rel: 相对路径。
:param data: 文件内容的字节流。
"""
api_path = self._get_api_path(rel)
if not api_path:
raise ValueError("不能直接写入根路径")
resp = await self._request("PUT", api_path_segment=f"{api_path}:/content", content=data)
resp.raise_for_status()
async def write_file_stream(self, root: str, rel: str, data_iter: AsyncIterator[bytes]):
"""
以流式方式写入文件。
:param root: 根路径。
:param rel: 相对路径。
:param data_iter: 文件内容的异步迭代器。
:return: 文件大小。
"""
api_path = self._get_api_path(rel)
if not api_path:
raise ValueError("不能直接写入根路径")
resp = await self._request("PUT", api_path_segment=f"{api_path}:/content", content=data_iter)
resp.raise_for_status()
return resp.json().get("size", 0)
async def mkdir(self, root: str, rel: str):
"""
创建目录。
:param root: 根路径。
:param rel: 相对路径。
"""
parent_path_str, new_dir_name = rel.rstrip(
'/').rsplit('/', 1) if '/' in rel.rstrip('/') else ('', rel)
parent_api_path = self._get_api_path(parent_path_str)
children_path = f"{parent_api_path}:/children" if parent_api_path else "/children"
payload = {
"name": new_dir_name,
"folder": {},
"@microsoft.graph.conflictBehavior": "fail" # 如果已存在则失败
}
resp = await self._request("POST", api_path_segment=children_path, json=payload)
resp.raise_for_status()
async def delete(self, root: str, rel: str):
"""
删除文件或目录。
:param root: 根路径。
:param rel: 相对路径。
"""
api_path = self._get_api_path(rel)
if not api_path:
raise ValueError("不能删除根目录")
resp = await self._request("DELETE", api_path_segment=api_path)
if resp.status_code not in (204, 404):
resp.raise_for_status()
async def move(self, root: str, src_rel: str, dst_rel: str):
"""
移动或重命名文件/目录。
:param root: 根路径。
:param src_rel: 源相对路径。
:param dst_rel: 目标相对路径。
"""
src_api_path = self._get_api_path(src_rel)
if not src_api_path:
raise ValueError("不能移动根目录")
dst_parent_rel, dst_name = dst_rel.rstrip(
'/').rsplit('/', 1) if '/' in dst_rel.rstrip('/') else ('', dst_rel)
dst_parent_api_path = self._get_api_path(dst_parent_rel)
# 获取父项目的 ID
parent_resp = await self._request("GET", api_path_segment=dst_parent_api_path)
parent_resp.raise_for_status()
parent_id = parent_resp.json()["id"]
payload = {
"parentReference": {"id": parent_id},
"name": dst_name
}
resp = await self._request("PATCH", api_path_segment=src_api_path, json=payload)
resp.raise_for_status()
async def rename(self, root: str, src_rel: str, dst_rel: str):
"""
重命名文件或目录。
在 Graph API 中,移动和重命名是同一个 PATCH 操作。
"""
await self.move(root, src_rel, dst_rel)
async def copy(self, root: str, src_rel: str, dst_rel: str, overwrite: bool = False):
"""
复制文件或目录。
:param root: 根路径。
:param src_rel: 源相对路径。
:param dst_rel: 目标相对路径。
:param overwrite: 是否覆盖 (在此 API 中未直接使用)。
"""
src_api_path = self._get_api_path(src_rel)
if not src_api_path:
raise ValueError("不能复制根目录")
dst_parent_rel, dst_name = dst_rel.rstrip(
'/').rsplit('/', 1) if '/' in dst_rel.rstrip('/') else ('', dst_rel)
dst_parent_api_path = self._get_api_path(dst_parent_rel)
parent_resp = await self._request("GET", api_path_segment=dst_parent_api_path)
parent_resp.raise_for_status()
parent_id = parent_resp.json()["id"]
payload = {"parentReference": {"id": parent_id}, "name": dst_name}
copy_path = f"{src_api_path}:/copy"
resp = await self._request("POST", api_path_segment=copy_path, json=payload)
resp.raise_for_status()
async def stream_file(self, root: str, rel: str, range_header: str | None):
"""
流式传输文件(支持范围请求)。
:param root: 根路径。
:param rel: 相对路径。
:param range_header: HTTP Range 头。
:return: FastAPI StreamingResponse 对象。
"""
api_path = self._get_api_path(rel)
if not api_path:
raise IsADirectoryError("不能对目录进行流式传输")
resp = await self._request("GET", api_path_segment=api_path)
if resp.status_code == 404:
raise FileNotFoundError(rel)
resp.raise_for_status()
item_data = resp.json()
download_url = item_data.get("@microsoft.graph.downloadUrl")
if not download_url:
raise Exception("无法获取下载 URL")
file_size = item_data.get("size", 0)
content_type = item_data.get("file", {}).get(
"mimeType", "application/octet-stream")
start = 0
end = file_size - 1
status = 200
headers = {
"Accept-Ranges": "bytes",
"Content-Type": content_type,
"Content-Disposition": f"inline; filename=\"{item_data.get('name')}\""
}
if range_header and range_header.startswith("bytes="):
try:
part = range_header.removeprefix("bytes=")
s, e = part.split("-", 1)
if s.strip():
start = int(s)
if e.strip():
end = int(e)
if start >= file_size:
raise HTTPException(416, "Requested Range Not Satisfiable")
if end >= file_size:
end = file_size - 1
status = 206
except ValueError:
raise HTTPException(400, "Invalid Range header")
headers["Content-Range"] = f"bytes {start}-{end}/{file_size}"
headers["Content-Length"] = str(end - start + 1)
else:
headers["Content-Length"] = str(file_size)
async def file_iterator():
nonlocal start, end
async with httpx.AsyncClient(timeout=60.0) as client:
req_headers = {'Range': f'bytes={start}-{end}'}
async with client.stream("GET", download_url, headers=req_headers) as stream_resp:
stream_resp.raise_for_status()
async for chunk in stream_resp.aiter_bytes():
yield chunk
return StreamingResponse(file_iterator(), status_code=status, headers=headers, media_type=content_type)
async def get_thumbnail(self, root: str, rel: str, size: str = "medium"):
"""
获取文件的缩略图。
:param root: 根路径。
:param rel: 相对路径。
:param size: 缩略图大小 (large, medium, small)。
:return: 缩略图内容的字节流,或在不支持时返回 None。
"""
api_path = self._get_api_path(rel)
if not api_path:
return None
thumb_path = f"{api_path}:/thumbnails/0/{size}"
try:
resp = await self._request("GET", api_path_segment=thumb_path)
if resp.status_code == 200:
thumb_data = resp.json()
async with httpx.AsyncClient(timeout=30.0) as client:
thumb_resp = await client.get(thumb_data['url'])
thumb_resp.raise_for_status()
return thumb_resp.content
elif resp.status_code == 404:
return None
else:
resp.raise_for_status()
except Exception:
return None
async def stat_file(self, root: str, rel: str):
"""
获取文件或目录的元数据。
:param root: 根路径。
:param rel: 相对路径。
:return: 格式化后的文件/目录信息。
"""
api_path = self._get_api_path(rel)
resp = await self._request("GET", api_path_segment=api_path)
if resp.status_code == 404:
raise FileNotFoundError(rel)
resp.raise_for_status()
return self._format_item(resp.json())
ADAPTER_TYPE = "OneDrive"
CONFIG_SCHEMA = [
{"key": "client_id", "label": "Client ID", "type": "string", "required": True},
{"key": "client_secret", "label": "Client Secret",
"type": "password", "required": True},
{"key": "refresh_token", "label": "Refresh Token", "type": "password",
"required": True, "help_text": "可以通过运行 'python -m services.adapters.onedrive' 获取"},
{"key": "root", "label": "根目录 (Root Path)", "type": "string",
"required": False, "placeholder": "默认为根目录 /"},
]
def ADAPTER_FACTORY(rec): return OneDriveAdapter(rec)

724
services/adapters/quark.py Normal file
View File

@@ -0,0 +1,724 @@
from __future__ import annotations
import asyncio
import base64
import hashlib
import mimetypes
import os
import time
from typing import Dict, List, Tuple, Optional, AsyncIterator, Any
import httpx
from fastapi import HTTPException
from fastapi.responses import StreamingResponse
from models import StorageAdapter
from .base import BaseAdapter
# Quark 普通(UC)接口
API_BASE = "https://drive.quark.cn/1/clouddrive"
REFERER = "https://pan.quark.cn"
PR = "ucpro"
class QuarkAdapter:
"""夸克网盘Cookie 模式)
- 使用浏览器导出的 Cookie 进行鉴权
- 通过 Quark/UC 的 clouddrive 接口实现:列目录、读写、分片上传、基础操作
- 根 FID 固定为 "0";路径解析通过名称遍历
"""
def __init__(self, record: StorageAdapter):
self.record = record
cfg = record.config or {}
self.cookie: str = cfg.get("cookie") or cfg.get("Cookie")
self.root_fid: str = cfg.get("root_fid", "0")
self.use_transcoding_address: bool = bool(cfg.get("use_transcoding_address", False))
self.only_list_video_file: bool = bool(cfg.get("only_list_video_file", False))
if not self.cookie:
raise ValueError("Quark 适配器需要 cookie 配置")
# 运行期缓存
self._dir_fid_cache: Dict[str, str] = {f"{self.root_fid}:": self.root_fid}
self._children_cache: Dict[str, List[Dict[str, Any]]] = {}
# UA 与超时
self._ua = (
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
"(KHTML, like Gecko) quark-cloud-drive/2.5.20 Chrome/100.0.4896.160 "
"Electron/18.3.5.4-b478491100 Safari/537.36 Channel/pckk_other_ch"
)
self._timeout = 30.0
# -----------------
# 工具与通用请求
# -----------------
def get_effective_root(self, sub_path: str | None) -> str:
return self.root_fid
async def _request(
self,
method: str,
pathname: str,
*,
json: Any | None = None,
params: Dict[str, str] | None = None,
) -> Any:
headers = {
"Cookie": self._safe_cookie(self.cookie),
"Accept": "application/json, text/plain, */*",
"Referer": REFERER,
"User-Agent": self._ua,
}
query = {"pr": PR, "fr": "pc"}
if params:
query.update(params)
url = f"{API_BASE}{pathname}"
async with httpx.AsyncClient(timeout=self._timeout) as client:
resp = await client.request(method, url, headers=headers, params=query, json=json)
# 更新运行期 cookie若返回 __puus/__pus
try:
for key in ("__puus", "__pus"):
v = resp.cookies.get(key)
if v:
# 简单替换/追加到 self.cookie
self._set_cookie_kv(key, v)
except Exception:
pass
# 解析业务状态
data = None
try:
data = resp.json()
except Exception:
resp.raise_for_status()
return resp
status = data.get("status")
code = data.get("code")
msg = data.get("message") or ""
if (status is not None and status >= 400) or (code is not None and code != 0):
raise HTTPException(502, detail=f"Quark error status={status} code={code} msg={msg}")
return data
def _set_cookie_kv(self, key: str, value: str):
# 将指定键值写入 self.cookie粗略字符串处理
parts = [p.strip() for p in (self.cookie or "").replace("\r", "").replace("\n", "").split(";") if p.strip()]
found = False
for i, p in enumerate(parts):
if p.startswith(key + "="):
parts[i] = f"{key}={value}"
found = True
break
if not found:
parts.append(f"{key}={value}")
self.cookie = "; ".join(parts)
def _sanitize_cookie(self, cookie: str) -> str:
if not cookie:
return ""
# 去除换行与前后空白
cookie = cookie.replace("\r", "").replace("\n", "").strip()
# 统一分号分隔并去除多余空格/空段
parts = [p.strip() for p in cookie.split(";") if p.strip()]
return "; ".join(parts)
def _safe_cookie(self, cookie: str) -> str:
s = self._sanitize_cookie(cookie)
# 仅保留可见 ASCII (0x20-0x7E)
s = "".join(ch for ch in s if 32 <= ord(ch) <= 126)
return s
# -----------------
# 列表与路径解析
# -----------------
def _map_file_item(self, it: Dict[str, Any]) -> Dict[str, Any]:
# Quark/UC 列表项file=true 表示文件false 表示目录
is_dir = not bool(it.get("file", False))
updated_at_ms = int(it.get("updated_at", 0) or 0)
name = it.get("file_name") or it.get("filename") or it.get("name")
return {
"fid": it.get("fid"),
"name": name,
"is_dir": is_dir,
"size": 0 if is_dir else int(it.get("size", 0) or 0),
"mtime": updated_at_ms // 1000 if updated_at_ms else 0,
"type": "dir" if is_dir else "file",
}
async def _list_children(self, parent_fid: str) -> List[Dict[str, Any]]:
if parent_fid in self._children_cache:
return self._children_cache[parent_fid]
files: List[Dict[str, Any]] = []
page = 1
size = 100
total = None
while True:
qp = {"pdir_fid": parent_fid, "_size": str(size), "_page": str(page), "_fetch_total": "1"}
data = await self._request("GET", "/file/sort", params=qp)
d = (data or {}).get("data", {})
meta = (data or {}).get("metadata", {})
page_files = d.get("list", [])
files.extend(page_files)
if total is None:
total = meta.get("_total") or meta.get("total") or 0
if page * size >= int(total):
break
page += 1
mapped = [self._map_file_item(x) for x in files if (not self.only_list_video_file) or (not x.get("file")) or (x.get("category") == 1)]
self._children_cache[parent_fid] = mapped
return mapped
def _dir_cache_key(self, base_fid: str, rel: str) -> str:
return f"{base_fid}:{rel.strip('/')}"
async def _resolve_dir_fid_from(self, base_fid: str, rel: str) -> str:
key = rel.strip("/")
cache_key = self._dir_cache_key(base_fid, key)
if cache_key in self._dir_fid_cache:
return self._dir_fid_cache[cache_key]
if key == "":
self._dir_fid_cache[cache_key] = base_fid
return base_fid
parent_fid = base_fid
path_so_far = []
for seg in key.split("/"):
if seg == "":
continue
path_so_far.append(seg)
cache_key = self._dir_cache_key(base_fid, "/".join(path_so_far))
cached = self._dir_fid_cache.get(cache_key)
if cached:
parent_fid = cached
continue
children = await self._list_children(parent_fid)
found = next((c for c in children if c["is_dir"] and c["name"] == seg), None)
if not found:
raise FileNotFoundError(f"Directory not found: {seg}")
parent_fid = found["fid"]
self._dir_fid_cache[cache_key] = parent_fid
return parent_fid
async def _find_child(self, parent_fid: str, name: str) -> Optional[Dict[str, Any]]:
children = await self._list_children(parent_fid)
for it in children:
if it["name"] == name:
return it
return None
def _invalidate_children_cache(self, parent_fid: str):
if parent_fid in self._children_cache:
try:
del self._children_cache[parent_fid]
except Exception:
pass
# -----------------
# 目录与文件列表
# -----------------
async def list_dir(
self,
root: str,
rel: str,
page_num: int = 1,
page_size: int = 50,
sort_by: str = "name",
sort_order: str = "asc",
) -> Tuple[List[Dict], int]:
base_fid = root or self.root_fid
fid = await self._resolve_dir_fid_from(base_fid, rel)
items = await self._list_children(fid)
# 排序,目录优先
reverse = sort_order.lower() == "desc"
def get_sort_key(item):
key = (not item["is_dir"],)
sf = sort_by.lower()
if sf == "name":
key += (item["name"].lower(),)
elif sf == "size":
key += (item["size"],)
elif sf == "mtime":
key += (item["mtime"],)
else:
key += (item["name"].lower(),)
return key
items.sort(key=get_sort_key, reverse=reverse)
total = len(items)
start = (page_num - 1) * page_size
end = start + page_size
return items[start:end], total
# -----------------
# 下载与流式下载
# -----------------
async def _get_download_url(self, fid: str) -> str:
data = await self._request("POST", "/file/download", json={"fids": [fid]})
arr = (data or {}).get("data", [])
if not arr:
raise HTTPException(502, detail="No download data returned by Quark")
url = arr[0].get("download_url") or arr[0].get("DownloadUrl")
if not url:
raise HTTPException(502, detail="No download_url returned by Quark")
return url
async def _get_transcoding_url(self, fid: str) -> Optional[str]:
try:
payload = {"fid": fid, "resolutions": "low,normal,high,super,2k,4k", "supports": "fmp4_av,m3u8,dolby_vision"}
data = await self._request("POST", "/file/v2/play/project", json=payload)
lst = (data or {}).get("data", {}).get("video_list", [])
for item in lst:
vi = item.get("video_info") or {}
url = vi.get("url")
if url:
return url
except Exception:
return None
return None
def _is_video_name(self, name: str) -> bool:
mime, _ = mimetypes.guess_type(name)
return bool(mime and mime.startswith("video/"))
def _download_headers(self) -> Dict[str, str]:
return {"Cookie": self._safe_cookie(self.cookie), "User-Agent": self._ua, "Referer": REFERER}
async def read_file(self, root: str, rel: str) -> bytes:
if not rel or rel.endswith("/"):
raise IsADirectoryError("Path is a directory")
parent = rel.rsplit("/", 1)[0] if "/" in rel else ""
name = rel.rsplit("/", 1)[-1]
base_fid = root or self.root_fid
parent_fid = await self._resolve_dir_fid_from(base_fid, parent)
it = await self._find_child(parent_fid, name)
if not it or it["is_dir"]:
raise FileNotFoundError(rel)
url = await self._get_download_url(it["fid"])
headers = self._download_headers()
async with httpx.AsyncClient(timeout=None, follow_redirects=True) as client:
resp = await client.get(url, headers=headers)
if resp.status_code == 404:
raise FileNotFoundError(rel)
resp.raise_for_status()
return resp.content
async def stream_file(self, root: str, rel: str, range_header: str | None):
if not rel or rel.endswith("/"):
raise IsADirectoryError("Path is a directory")
parent = rel.rsplit("/", 1)[0] if "/" in rel else ""
name = rel.rsplit("/", 1)[-1]
base_fid = root or self.root_fid
parent_fid = await self._resolve_dir_fid_from(base_fid, parent)
it = await self._find_child(parent_fid, name)
if not it or it["is_dir"]:
raise FileNotFoundError(rel)
url = await self._get_download_url(it["fid"])
if self.use_transcoding_address and self._is_video_name(name):
tr = await self._get_transcoding_url(it["fid"])
if tr:
url = tr
dl_headers = self._download_headers()
# 预获取大小/是否支持范围
total_size: Optional[int] = None
async with httpx.AsyncClient(timeout=self._timeout, follow_redirects=True) as client:
try:
head_resp = await client.head(url, headers=dl_headers)
if head_resp.status_code == 200:
cl = head_resp.headers.get("Content-Length")
if cl and cl.isdigit():
total_size = int(cl)
except Exception:
pass
mime, _ = mimetypes.guess_type(rel)
content_type = mime or "application/octet-stream"
# 解析 Range
start = 0
end: Optional[int] = None
status_code = 200
if range_header and range_header.startswith("bytes="):
status_code = 206
part = range_header.split("=", 1)[1]
s, e = part.split("-", 1)
if s.strip():
start = int(s)
if e.strip():
end = int(e)
if total_size is not None and end is None and status_code == 206:
end = total_size - 1
if end is not None and total_size is not None and end >= total_size:
end = total_size - 1
if total_size is not None and start >= total_size:
raise HTTPException(416, detail="Requested Range Not Satisfiable")
resp_headers: Dict[str, str] = {"Accept-Ranges": "bytes", "Content-Type": content_type}
if status_code == 206 and total_size is not None and end is not None:
resp_headers["Content-Range"] = f"bytes {start}-{end}/{total_size}"
resp_headers["Content-Length"] = str(end - start + 1)
elif total_size is not None:
resp_headers["Content-Length"] = str(total_size)
async def iterator():
headers = dict(dl_headers)
if status_code == 206 and end is not None:
headers["Range"] = f"bytes={start}-{end}"
async with httpx.AsyncClient(timeout=None, follow_redirects=True) as client:
async with client.stream("GET", url, headers=headers) as resp:
if resp.status_code in (404, 416):
await resp.aclose()
raise HTTPException(resp.status_code, detail="Upstream not available")
async for chunk in resp.aiter_bytes():
if chunk:
yield chunk
return StreamingResponse(iterator(), status_code=status_code, headers=resp_headers, media_type=content_type)
# -----------------
# 上传(大文件分片)
# -----------------
@staticmethod
def _md5_hex(b: bytes) -> str:
return hashlib.md5(b).hexdigest()
@staticmethod
def _sha1_hex(b: bytes) -> str:
return hashlib.sha1(b).hexdigest()
def _guess_mime(self, name: str) -> str:
mime, _ = mimetypes.guess_type(name)
return mime or "application/octet-stream"
async def _upload_pre(self, filename: str, size: int, parent_fid: str) -> Dict[str, Any]:
now_ms = int(time.time() * 1000)
body = {
"ccp_hash_update": True,
"dir_name": "",
"file_name": filename,
"format_type": self._guess_mime(filename),
"l_created_at": now_ms,
"l_updated_at": now_ms,
"pdir_fid": parent_fid,
"size": size,
}
data = await self._request("POST", "/file/upload/pre", json=body)
return data
async def write_file(self, root: str, rel: str, data: bytes):
async def gen():
yield data
return await self.write_file_stream(root, rel, gen())
async def write_file_stream(self, root: str, rel: str, data_iter: AsyncIterator[bytes]):
if not rel or rel.endswith("/"):
raise HTTPException(400, detail="Invalid file path")
parent = rel.rsplit("/", 1)[0] if "/" in rel else ""
name = rel.rsplit("/", 1)[-1]
base_fid = root or self.root_fid
parent_fid = await self._resolve_dir_fid_from(base_fid, parent)
# 将数据落盘到临时文件,同时计算 MD5/SHA1
import tempfile
md5 = hashlib.md5()
sha1 = hashlib.sha1()
total = 0
with tempfile.NamedTemporaryFile(delete=False) as tf:
tmp_path = tf.name
try:
async for chunk in data_iter:
if not chunk:
continue
total += len(chunk)
md5.update(chunk)
sha1.update(chunk)
tf.write(chunk)
finally:
tf.flush()
md5_hex = md5.hexdigest()
sha1_hex = sha1.hexdigest()
# 预上传,拿到上传信息
pre_resp = await self._upload_pre(name, total, parent_fid)
pre_data = pre_resp.get("data", {})
# hash 秒传
hash_body = {"md5": md5_hex, "sha1": sha1_hex, "task_id": pre_data.get("task_id")}
hash_resp = await self._request("POST", "/file/update/hash", json=hash_body)
if (hash_resp.get("data") or {}).get("finish") is True:
try:
os.unlink(tmp_path)
except Exception:
pass
# 刷新父目录缓存
self._invalidate_children_cache(parent_fid)
return total
# 分片上传
part_size = int((pre_resp.get("metadata") or {}).get("part_size") or 0)
if part_size <= 0:
raise HTTPException(502, detail="Invalid part_size from Quark")
bucket = pre_data.get("bucket")
obj_key = pre_data.get("obj_key")
upload_id = pre_data.get("upload_id")
upload_url = pre_data.get("upload_url")
if not (bucket and obj_key and upload_id and upload_url):
raise HTTPException(502, detail="Upload pre missing fields")
# 计算 host 与基础 URL
try:
upload_host = upload_url.split("://", 1)[1]
except Exception:
upload_host = upload_url
base_url = f"https://{bucket}.{upload_host}/{obj_key}"
# 分片循环
etags: List[str] = []
oss_ua = "aliyun-sdk-js/6.6.1 Chrome 98.0.4758.80 on Windows 10 64-bit"
async with httpx.AsyncClient(timeout=None, follow_redirects=True) as client:
with open(tmp_path, "rb") as rf:
part_number = 1
left = total
while left > 0:
sz = min(part_size, left)
data_bytes = rf.read(sz)
if len(data_bytes) != sz:
raise IOError("Failed to read part bytes")
now_str = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())
# 申请签名
auth_meta = (
"PUT\n\n"
f"{self._guess_mime(name)}\n"
f"{now_str}\n"
f"x-oss-date:{now_str}\n"
f"x-oss-user-agent:{oss_ua}\n"
f"/{bucket}/{obj_key}?partNumber={part_number}&uploadId={upload_id}"
)
auth_req_body = {"auth_info": pre_data.get("auth_info"), "auth_meta": auth_meta, "task_id": pre_data.get("task_id")}
auth_resp = await self._request("POST", "/file/upload/auth", json=auth_req_body)
auth_key = (auth_resp.get("data") or {}).get("auth_key")
if not auth_key:
raise HTTPException(502, detail="upload/auth missing auth_key")
put_headers = {
"Authorization": auth_key,
"Content-Type": self._guess_mime(name),
"Referer": REFERER + "/",
"x-oss-date": now_str,
"x-oss-user-agent": oss_ua,
}
put_url = f"{base_url}?partNumber={part_number}&uploadId={upload_id}"
put_resp = await client.put(put_url, headers=put_headers, content=data_bytes)
if put_resp.status_code != 200:
raise HTTPException(502, detail=f"Upload part failed status={put_resp.status_code} text={put_resp.text}")
etag = put_resp.headers.get("Etag", "")
etags.append(etag)
left -= sz
part_number += 1
# 组合 commit xml
parts_xml = [f"<Part>\n<PartNumber>{i+1}</PartNumber>\n<ETag>{etags[i]}</ETag>\n</Part>\n" for i in range(len(etags))]
body_xml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<CompleteMultipartUpload>\n" + "".join(parts_xml) + "</CompleteMultipartUpload>"
content_md5 = base64.b64encode(hashlib.md5(body_xml.encode("utf-8")).digest()).decode("ascii")
callback = pre_data.get("callback") or {}
try:
import json as _json
callback_b64 = base64.b64encode(_json.dumps(callback).encode("utf-8")).decode("ascii")
except Exception:
callback_b64 = ""
now_str = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())
auth_meta_commit = (
"POST\n"
f"{content_md5}\n"
"application/xml\n"
f"{now_str}\n"
f"x-oss-callback:{callback_b64}\n"
f"x-oss-date:{now_str}\n"
f"x-oss-user-agent:{oss_ua}\n"
f"/{bucket}/{obj_key}?uploadId={upload_id}"
)
auth_commit_resp = await self._request("POST", "/file/upload/auth", json={"auth_info": pre_data.get("auth_info"), "auth_meta": auth_meta_commit, "task_id": pre_data.get("task_id")})
auth_key_commit = (auth_commit_resp.get("data") or {}).get("auth_key")
if not auth_key_commit:
raise HTTPException(502, detail="upload/auth(commit) missing auth_key")
async with httpx.AsyncClient(timeout=None, follow_redirects=True) as client:
commit_headers = {
"Authorization": auth_key_commit,
"Content-MD5": content_md5,
"Content-Type": "application/xml",
"Referer": REFERER + "/",
"x-oss-callback": callback_b64,
"x-oss-date": now_str,
"x-oss-user-agent": oss_ua,
}
commit_url = f"{base_url}?uploadId={upload_id}"
r = await client.post(commit_url, headers=commit_headers, content=body_xml.encode("utf-8"))
if r.status_code != 200:
raise HTTPException(502, detail=f"Upload commit failed status={r.status_code} text={r.text}")
# finish
await self._request("POST", "/file/upload/finish", json={"obj_key": obj_key, "task_id": pre_data.get("task_id")})
# 端合并存在轻微延迟,等待再刷新缓存
try:
await asyncio.sleep(1.0)
except Exception:
pass
try:
os.unlink(tmp_path)
except Exception:
pass
# 失效父目录缓存,确保后续列表可见
self._invalidate_children_cache(parent_fid)
return total
# -----------------
# 基本文件操作
# -----------------
async def mkdir(self, root: str, rel: str):
if not rel or rel == "/":
raise HTTPException(400, detail="Cannot create root")
parent = rel.rstrip("/")
parent_rel, name = (parent.rsplit("/", 1) if "/" in parent else ("", parent))
if not name:
raise HTTPException(400, detail="Invalid directory name")
pdir = await self._resolve_dir_fid_from(root or self.root_fid, parent_rel)
await self._request("POST", "/file", json={"dir_init_lock": False, "dir_path": "", "file_name": name, "pdir_fid": pdir})
self._invalidate_children_cache(pdir)
async def delete(self, root: str, rel: str):
# 解析对象 fid + 父目录,用于失效缓存
base_fid = root or self.root_fid
if rel == "" or rel.endswith("/"):
parent_rel = rel.rstrip("/")
target_fid = await self._resolve_dir_fid_from(base_fid, parent_rel)
parent_of_target = await self._resolve_dir_fid_from(base_fid, (parent_rel.rsplit("/", 1)[0] if "/" in parent_rel else ""))
else:
parent_rel, name = (rel.rsplit("/", 1) if "/" in rel else ("", rel))
parent_of_target = await self._resolve_dir_fid_from(base_fid, parent_rel)
it = await self._find_child(parent_of_target, name)
if not it:
return
target_fid = it["fid"]
await self._request("POST", "/file/delete", json={"action_type": 1, "exclude_fids": [], "filelist": [target_fid]})
self._invalidate_children_cache(parent_of_target)
async def move(self, root: str, src_rel: str, dst_rel: str):
# 支持跨目录与重命名:先移动到父目录,后重命名(若需要)
src_parent_rel, src_name = (src_rel.rsplit("/", 1) if "/" in src_rel else ("", src_rel))
dst_parent_rel, dst_name = (dst_rel.rsplit("/", 1) if "/" in dst_rel else ("", dst_rel))
base_fid = root or self.root_fid
src_parent_fid = await self._resolve_dir_fid_from(base_fid, src_parent_rel)
obj = await self._find_child(src_parent_fid, src_name)
if not obj:
raise FileNotFoundError(src_rel)
dst_parent_fid = await self._resolve_dir_fid_from(base_fid, dst_parent_rel)
if src_parent_fid != dst_parent_fid:
await self._request("POST", "/file/move", json={"action_type": 1, "exclude_fids": [], "filelist": [obj["fid"]], "to_pdir_fid": dst_parent_fid})
self._invalidate_children_cache(src_parent_fid)
self._invalidate_children_cache(dst_parent_fid)
if obj["name"] != dst_name:
await self._request("POST", "/file/rename", json={"fid": obj["fid"], "file_name": dst_name})
self._invalidate_children_cache(dst_parent_fid)
async def rename(self, root: str, src_rel: str, dst_rel: str):
src_parent_rel, src_name = (src_rel.rsplit("/", 1) if "/" in src_rel else ("", src_rel))
base_fid = root or self.root_fid
src_parent_fid = await self._resolve_dir_fid_from(base_fid, src_parent_rel)
obj = await self._find_child(src_parent_fid, src_name)
if not obj:
raise FileNotFoundError(src_rel)
dst_name = dst_rel.rsplit("/", 1)[-1]
await self._request("POST", "/file/rename", json={"fid": obj["fid"], "file_name": dst_name})
self._invalidate_children_cache(src_parent_fid)
async def copy(self, root: str, src_rel: str, dst_rel: str, overwrite: bool = False):
raise NotImplementedError("QuarkOpen does not support copy via open API")
# -----------------
# STAT / EXISTS / 辅助
# -----------------
async def stat_file(self, root: str, rel: str):
# 通过父目录列表获取元数据
base_fid = root or self.root_fid
if rel == "" or rel.endswith("/"):
# 目录
fid = await self._resolve_dir_fid_from(base_fid, rel.rstrip("/"))
return {"name": rel.rstrip("/").split("/")[-1] if rel else "", "is_dir": True, "size": 0, "mtime": 0, "type": "dir", "fid": fid}
parent_rel, name = (rel.rsplit("/", 1) if "/" in rel else ("", rel))
parent_fid = await self._resolve_dir_fid_from(base_fid, parent_rel)
it = await self._find_child(parent_fid, name)
if not it:
raise FileNotFoundError(rel)
return it
async def exists(self, root: str, rel: str) -> bool:
try:
base_fid = root or self.root_fid
if rel == "" or rel.endswith("/"):
await self._resolve_dir_fid_from(base_fid, rel.rstrip("/"))
return True
parent_rel, name = (rel.rsplit("/", 1) if "/" in rel else ("", rel))
parent_fid = await self._resolve_dir_fid_from(base_fid, parent_rel)
it = await self._find_child(parent_fid, name)
return it is not None
except FileNotFoundError:
return False
async def stat_path(self, root: str, rel: str):
# 用于 move/copy 前的预检查调试
try:
base_fid = root or self.root_fid
if rel == "" or rel.endswith("/"):
fid = await self._resolve_dir_fid_from(base_fid, rel.rstrip("/"))
return {"exists": True, "is_dir": True, "path": rel, "fid": fid}
parent_rel, name = (rel.rsplit("/", 1) if "/" in rel else ("", rel))
parent_fid = await self._resolve_dir_fid_from(base_fid, parent_rel)
it = await self._find_child(parent_fid, name)
if it:
return {"exists": True, "is_dir": it["is_dir"], "path": rel, "fid": it["fid"]}
return {"exists": False, "is_dir": None, "path": rel}
except FileNotFoundError:
return {"exists": False, "is_dir": None, "path": rel}
async def _resolve_target_fid(self, rel: str, *, base_fid: Optional[str] = None) -> str:
base = base_fid or self.root_fid
if rel == "" or rel.endswith("/"):
return await self._resolve_dir_fid_from(base, rel.rstrip("/"))
parent_rel, name = (rel.rsplit("/", 1) if "/" in rel else ("", rel))
parent_fid = await self._resolve_dir_fid_from(base, parent_rel)
it = await self._find_child(parent_fid, name)
if not it:
raise FileNotFoundError(rel)
return it["fid"]
ADAPTER_TYPE = "Quark"
CONFIG_SCHEMA = [
{"key": "cookie", "label": "Cookie", "type": "password", "required": True, "placeholder": "从 pan.quark.cn 复制"},
{"key": "root_fid", "label": "根 FID", "type": "string", "required": False, "default": "0"},
{"key": "use_transcoding_address", "label": "视频转码直链", "type": "checkbox", "required": False, "default": False},
{"key": "only_list_video_file", "label": "仅列出视频文件", "type": "checkbox", "required": False, "default": False},
]
def ADAPTER_FACTORY(rec: StorageAdapter) -> BaseAdapter:
return QuarkAdapter(rec)

View File

@@ -78,6 +78,31 @@ class RuntimeRegistry:
def snapshot(self) -> Dict[int, BaseAdapter]:
return dict(self._instances)
def remove(self, adapter_id: int):
"""从缓存中移除一个适配器实例"""
if adapter_id in self._instances:
del self._instances[adapter_id]
async def upsert(self, rec: StorageAdapter):
"""新增或更新一个适配器实例"""
if not rec.enabled:
self.remove(rec.id)
return
factory = TYPE_MAP.get(rec.type)
if not factory:
discover_adapters()
factory = TYPE_MAP.get(rec.type)
if not factory:
return
try:
instance = factory(rec)
self._instances[rec.id] = instance
except Exception:
self.remove(rec.id)
pass
runtime_registry = RuntimeRegistry()
discover_adapters()

380
services/adapters/s3.py Normal file
View File

@@ -0,0 +1,380 @@
from __future__ import annotations
import asyncio
import mimetypes
from datetime import datetime
from typing import List, Dict, Tuple, AsyncIterator
from urllib.parse import quote
import aioboto3
from botocore.exceptions import ClientError
from fastapi import HTTPException
from fastapi.responses import StreamingResponse
from models import StorageAdapter
from services.logging import LogService
class S3Adapter:
"""S3 兼容对象存储适配器"""
def __init__(self, record: StorageAdapter):
self.record = record
cfg = record.config
self.bucket_name = cfg.get("bucket_name")
self.aws_access_key_id = cfg.get("access_key_id")
self.aws_secret_access_key = cfg.get("secret_access_key")
self.region_name = cfg.get("region_name")
self.endpoint_url = cfg.get("endpoint_url")
self.root = cfg.get("root", "").strip("/")
if not all([self.bucket_name, self.aws_access_key_id, self.aws_secret_access_key]):
raise ValueError(
"S3 适配器需要 bucket_name, access_key_id, 和 secret_access_key")
self.session = aioboto3.Session(
aws_access_key_id=self.aws_access_key_id,
aws_secret_access_key=self.aws_secret_access_key,
region_name=self.region_name,
)
def get_effective_root(self, sub_path: str | None) -> str:
"""获取 S3 中的有效根路径 (key prefix)"""
if sub_path:
return f"{self.root}/{sub_path.strip('/')}".strip("/")
return self.root
def _get_s3_key(self, rel_path: str) -> str:
"""将相对路径转换为 S3 key"""
rel_path = rel_path.strip("/")
if self.root:
return f"{self.root}/{rel_path}"
return rel_path
def _get_client(self):
return self.session.client("s3", endpoint_url=self.endpoint_url)
async def list_dir(self, root: str, rel: str, page_num: int = 1, page_size: int = 50, sort_by: str = "name", sort_order: str = "asc") -> Tuple[List[Dict], int]:
prefix = self._get_s3_key(rel)
if prefix and not prefix.endswith("/"):
prefix += "/"
all_items = []
async with self._get_client() as s3:
paginator = s3.get_paginator("list_objects_v2")
async for result in paginator.paginate(Bucket=self.bucket_name, Prefix=prefix, Delimiter="/"):
# 添加子目录
for common_prefix in result.get("CommonPrefixes", []):
dir_name = common_prefix.get(
"Prefix").removeprefix(prefix).strip("/")
if dir_name:
all_items.append({
"name": dir_name,
"is_dir": True,
"size": 0,
"mtime": 0,
"type": "dir",
})
# 添加文件
for content in result.get("Contents", []):
file_key = content.get("Key")
if file_key == prefix: # 忽略目录本身
continue
file_name = file_key.removeprefix(prefix)
if file_name:
all_items.append({
"name": file_name,
"is_dir": False,
"size": content.get("Size", 0),
"mtime": int(content.get("LastModified", datetime.now()).timestamp()),
"type": "file",
})
# 在内存中排序和分页
reverse = sort_order.lower() == "desc"
def get_sort_key(item):
key = (not item["is_dir"],)
sort_field = sort_by.lower()
if sort_field == "name":
key += (item["name"].lower(),)
elif sort_field == "size":
key += (item["size"],)
elif sort_field == "mtime":
key += (item["mtime"],)
else:
key += (item["name"].lower(),)
return key
all_items.sort(key=get_sort_key, reverse=reverse)
total_count = len(all_items)
start_idx = (page_num - 1) * page_size
end_idx = start_idx + page_size
return all_items[start_idx:end_idx], total_count
async def read_file(self, root: str, rel: str) -> bytes:
key = self._get_s3_key(rel)
async with self._get_client() as s3:
try:
resp = await s3.get_object(Bucket=self.bucket_name, Key=key)
return await resp["Body"].read()
except ClientError as e:
if e.response["Error"]["Code"] == "NoSuchKey":
raise FileNotFoundError(rel)
raise
async def write_file(self, root: str, rel: str, data: bytes):
key = self._get_s3_key(rel)
async with self._get_client() as s3:
await s3.put_object(Bucket=self.bucket_name, Key=key, Body=data)
await LogService.info(
"adapter:s3", f"Wrote file to {rel}",
details={"adapter_id": self.record.id,
"bucket": self.bucket_name, "key": key, "size": len(data)}
)
async def write_file_stream(self, root: str, rel: str, data_iter: AsyncIterator[bytes]):
key = self._get_s3_key(rel)
MIN_PART_SIZE = 5 * 1024 * 1024
async with self._get_client() as s3:
mpu = await s3.create_multipart_upload(Bucket=self.bucket_name, Key=key)
upload_id = mpu['UploadId']
parts = []
part_number = 1
total_size = 0
buffer = bytearray()
try:
async for chunk in data_iter:
if not chunk:
continue
buffer.extend(chunk)
while len(buffer) >= MIN_PART_SIZE:
part_data = buffer[:MIN_PART_SIZE]
del buffer[:MIN_PART_SIZE]
part = await s3.upload_part(
Bucket=self.bucket_name,
Key=key,
PartNumber=part_number,
UploadId=upload_id,
Body=part_data
)
parts.append({'PartNumber': part_number, 'ETag': part['ETag']})
total_size += len(part_data)
part_number += 1
if buffer:
part = await s3.upload_part(
Bucket=self.bucket_name,
Key=key,
PartNumber=part_number,
UploadId=upload_id,
Body=bytes(buffer)
)
parts.append({'PartNumber': part_number, 'ETag': part['ETag']})
total_size += len(buffer)
await s3.complete_multipart_upload(
Bucket=self.bucket_name,
Key=key,
UploadId=upload_id,
MultipartUpload={'Parts': parts}
)
except Exception as e:
await s3.abort_multipart_upload(
Bucket=self.bucket_name,
Key=key,
UploadId=upload_id
)
raise IOError(f"S3 stream upload failed: {e}") from e
await LogService.info(
"adapter:s3", f"Wrote file stream to {rel}",
details={"adapter_id": self.record.id, "bucket": self.bucket_name, "key": key, "size": total_size}
)
return total_size
async def mkdir(self, root: str, rel: str):
key = self._get_s3_key(rel)
if not key.endswith("/"):
key += "/"
async with self._get_client() as s3:
await s3.put_object(Bucket=self.bucket_name, Key=key, Body=b"")
await LogService.info(
"adapter:s3", f"Created directory {rel}",
details={"adapter_id": self.record.id,
"bucket": self.bucket_name, "key": key}
)
async def delete(self, root: str, rel: str):
key = self._get_s3_key(rel)
async with self._get_client() as s3:
is_dir_like = False
try:
head = await s3.head_object(Bucket=self.bucket_name, Key=key)
if head['ContentLength'] == 0 and key.endswith('/'):
is_dir_like = True
except ClientError as e:
if e.response['Error']['Code'] != '404':
raise
# 如果是目录,删除目录下的所有对象
if is_dir_like or not await self.stat_file(root, rel):
dir_key = key if key.endswith('/') else key + '/'
paginator = s3.get_paginator("list_objects_v2")
objects_to_delete = []
async for result in paginator.paginate(Bucket=self.bucket_name, Prefix=dir_key):
for content in result.get("Contents", []):
objects_to_delete.append({"Key": content["Key"]})
if objects_to_delete:
await s3.delete_objects(Bucket=self.bucket_name, Delete={"Objects": objects_to_delete})
# 如果是文件,直接删除
else:
await s3.delete_object(Bucket=self.bucket_name, Key=key)
await LogService.info(
"adapter:s3", f"Deleted {rel}",
details={"adapter_id": self.record.id,
"bucket": self.bucket_name, "key": key}
)
async def move(self, root: str, src_rel: str, dst_rel: str):
await self.copy(root, src_rel, dst_rel, overwrite=True)
await self.delete(root, src_rel)
await LogService.info(
"adapter:s3", f"Moved {src_rel} to {dst_rel}",
details={"adapter_id": self.record.id, "bucket": self.bucket_name,
"src_key": self._get_s3_key(src_rel), "dst_key": self._get_s3_key(dst_rel)}
)
async def rename(self, root: str, src_rel: str, dst_rel: str):
await self.move(root, src_rel, dst_rel)
async def copy(self, root: str, src_rel: str, dst_rel: str, overwrite: bool = False):
src_key = self._get_s3_key(src_rel)
dst_key = self._get_s3_key(dst_rel)
async with self._get_client() as s3:
if not overwrite:
try:
await s3.head_object(Bucket=self.bucket_name, Key=dst_key)
raise FileExistsError(dst_rel)
except ClientError as e:
if e.response["Error"]["Code"] != "404":
raise
copy_source = {"Bucket": self.bucket_name, "Key": src_key}
await s3.copy_object(CopySource=copy_source, Bucket=self.bucket_name, Key=dst_key)
await LogService.info(
"adapter:s3", f"Copied {src_rel} to {dst_rel}",
details={"adapter_id": self.record.id, "bucket": self.bucket_name,
"src_key": src_key, "dst_key": dst_key}
)
async def stat_file(self, root: str, rel: str):
key = self._get_s3_key(rel)
async with self._get_client() as s3:
try:
head = await s3.head_object(Bucket=self.bucket_name, Key=key)
return {
"name": rel.split("/")[-1],
"is_dir": False,
"size": head["ContentLength"],
"mtime": int(head["LastModified"].timestamp()),
"type": "file",
}
except ClientError as e:
if e.response["Error"]["Code"] == "404":
# 检查是否为一个 "目录"
dir_key = key if key.endswith('/') else key + '/'
resp = await s3.list_objects_v2(Bucket=self.bucket_name, Prefix=dir_key, MaxKeys=1)
if resp.get('KeyCount', 0) > 0:
return {
"name": rel.split("/")[-1],
"is_dir": True,
"size": 0,
"mtime": 0,
"type": "dir",
}
raise FileNotFoundError(rel)
raise
async def stream_file(self, root: str, rel: str, range_header: str | None):
key = self._get_s3_key(rel)
async with self._get_client() as s3:
try:
head = await s3.head_object(Bucket=self.bucket_name, Key=key)
file_size = head["ContentLength"]
content_type = head.get("ContentType", mimetypes.guess_type(key)[
0] or "application/octet-stream")
except ClientError as e:
if e.response["Error"]["Code"] == "404":
raise HTTPException(
status_code=404, detail="File not found")
raise
start = 0
end = file_size - 1
status = 200
headers = {
"Accept-Ranges": "bytes",
"Content-Type": content_type,
"Content-Length": str(file_size),
"Content-Disposition": f"inline; filename=\"{quote(rel.split('/')[-1])}\""
}
if range_header:
range_val = range_header.strip().partition("=")[2]
s, _, e = range_val.partition("-")
try:
start = int(s) if s else 0
end = int(e) if e else file_size - 1
if start >= file_size or end >= file_size or start > end:
raise HTTPException(
status_code=416, detail="Requested Range Not Satisfiable")
status = 206
headers["Content-Length"] = str(end - start + 1)
headers["Content-Range"] = f"bytes {start}-{end}/{file_size}"
except ValueError:
raise HTTPException(
status_code=400, detail="Invalid Range header")
range_arg = f"bytes={start}-{end}"
async def iterator():
try:
resp = await s3.get_object(Bucket=self.bucket_name, Key=key, Range=range_arg)
body = resp["Body"]
while chunk := await body.read(65536):
yield chunk
except Exception as e:
LogService.error(
"adapter:s3", f"Error streaming file {key}: {e}")
return StreamingResponse(iterator(), status_code=status, headers=headers, media_type=content_type)
ADAPTER_TYPE = "S3"
CONFIG_SCHEMA = [
{"key": "bucket_name", "label": "Bucket 名称",
"type": "string", "required": True},
{"key": "access_key_id", "label": "Access Key ID",
"type": "string", "required": True},
{"key": "secret_access_key", "label": "Secret Access Key",
"type": "password", "required": True},
{"key": "region_name", "label": "区域 (Region)", "type": "string",
"required": False, "placeholder": "例如 us-east-1"},
{"key": "endpoint_url", "label": "Endpoint URL", "type": "string",
"required": False, "placeholder": "对于 S3 兼容存储, 例如 https://minio.example.com"},
{"key": "root", "label": "根路径 (Root Path)", "type": "string",
"required": False, "placeholder": "在 bucket 内的路径前缀"},
]
def ADAPTER_FACTORY(rec): return S3Adapter(rec)

View File

@@ -0,0 +1,342 @@
from __future__ import annotations
from typing import List, Dict, Tuple, AsyncIterator
import io
import os
from models import StorageAdapter
from telethon import TelegramClient
from telethon.sessions import StringSession
import socks
# 适配器类型标识
ADAPTER_TYPE = "Telegram"
# 适配器配置项定义
CONFIG_SCHEMA = [
{"key": "api_id", "label": "API ID", "type": "string", "required": True, "help_text": "从 my.telegram.org 获取"},
{"key": "api_hash", "label": "API Hash", "type": "password", "required": True, "help_text": "从 my.telegram.org 获取"},
{"key": "session_string", "label": "Session String", "type": "password", "required": True, "help_text": "通过 generate_session.py 生成"},
{"key": "chat_id", "label": "Chat ID", "type": "string", "required": True, "placeholder": "频道/群组的ID或用户名, 例如: -100123456789 或 'channel_username'"},
{"key": "proxy_protocol", "label": "代理协议", "type": "string", "required": False, "placeholder": "例如: socks5, http"},
{"key": "proxy_host", "label": "代理主机", "type": "string", "required": False, "placeholder": "例如: 127.0.0.1"},
{"key": "proxy_port", "label": "代理端口", "type": "number", "required": False, "placeholder": "例如: 1080"},
]
class TelegramAdapter:
"""Telegram 存储适配器 (使用用户 Session)"""
def __init__(self, record: StorageAdapter):
self.record = record
cfg = record.config
self.api_id = int(cfg.get("api_id"))
self.api_hash = cfg.get("api_hash")
self.session_string = cfg.get("session_string")
self.chat_id_str = cfg.get("chat_id")
# 代理设置
self.proxy_protocol = cfg.get("proxy_protocol")
self.proxy_host = cfg.get("proxy_host")
self.proxy_port = cfg.get("proxy_port")
self.proxy = None
if self.proxy_protocol and self.proxy_host and self.proxy_port:
proto_map = {
"socks5": socks.SOCKS5,
"http": socks.HTTP,
}
proxy_type = proto_map.get(self.proxy_protocol.lower())
if proxy_type:
self.proxy = (proxy_type, self.proxy_host, int(self.proxy_port))
try:
self.chat_id = int(self.chat_id_str)
except (ValueError, TypeError):
self.chat_id = self.chat_id_str
if not all([self.api_id, self.api_hash, self.session_string, self.chat_id]):
raise ValueError("Telegram 适配器需要 api_id, api_hash, session_string 和 chat_id")
def _get_client(self) -> TelegramClient:
"""创建一个新的 TelegramClient 实例"""
return TelegramClient(StringSession(self.session_string), self.api_id, self.api_hash, proxy=self.proxy)
def get_effective_root(self, sub_path: str | None) -> str:
return ""
async def list_dir(self, root: str, rel: str, page_num: int = 1, page_size: int = 50, sort_by: str = "name", sort_order: str = "asc") -> Tuple[List[Dict], int]:
if rel:
return [], 0
client = self._get_client()
entries = []
try:
await client.connect()
messages = await client.get_messages(self.chat_id, limit=200)
for message in messages:
if not message:
continue
media = message.document or message.video or message.photo
if not media:
continue
filename = None
size = 0
if message.photo:
photo_size = message.photo.sizes[-1]
size = photo_size.size if hasattr(photo_size, 'size') else 0
filename = f"photo_{message.id}.jpg"
elif message.document or message.video:
size = media.size
if hasattr(media, 'attributes'):
for attr in media.attributes:
if hasattr(attr, 'file_name') and attr.file_name:
filename = attr.file_name
break
if not filename:
if message.text and '.' in message.text and len(message.text) < 256 and '\n' not in message.text:
filename = message.text
if not filename:
filename = f"unknown_{message.id}"
entries.append({
"name": f"{message.id}_{filename}",
"is_dir": False,
"size": size,
"mtime": int(message.date.timestamp()),
"type": "file",
})
finally:
if client.is_connected():
await client.disconnect()
# 排序
reverse = sort_order.lower() == "desc"
def get_sort_key(item):
key = (not item["is_dir"],)
sort_field = sort_by.lower()
if sort_field == "name":
key += (item["name"].lower(),)
elif sort_field == "size":
key += (item["size"],)
elif sort_field == "mtime":
key += (item["mtime"],)
else:
key += (item["name"].lower(),)
return key
entries.sort(key=get_sort_key, reverse=reverse)
total_count = len(entries)
# 分页
start_idx = (page_num - 1) * page_size
end_idx = start_idx + page_size
page_entries = entries[start_idx:end_idx]
return page_entries, total_count
async def read_file(self, root: str, rel: str) -> bytes:
try:
message_id_str, _ = rel.split('_', 1)
message_id = int(message_id_str)
except (ValueError, IndexError):
raise FileNotFoundError(f"无效的文件路径格式: {rel}")
client = self._get_client()
try:
await client.connect()
message = await client.get_messages(self.chat_id, ids=message_id)
if not message or not (message.document or message.video or message.photo):
raise FileNotFoundError(f"在频道 {self.chat_id} 中未找到消息ID为 {message_id} 的文件")
file_bytes = await client.download_media(message, file=bytes)
return file_bytes
finally:
if client.is_connected():
await client.disconnect()
async def write_file(self, root: str, rel: str, data: bytes):
"""将字节数据作为文件上传"""
client = self._get_client()
file_like = io.BytesIO(data)
file_like.name = os.path.basename(rel) or "file"
try:
await client.connect()
await client.send_file(self.chat_id, file_like, caption=file_like.name)
finally:
if client.is_connected():
await client.disconnect()
async def write_file_stream(self, root: str, rel: str, data_iter: AsyncIterator[bytes]):
"""以流式方式上传文件"""
client = self._get_client()
filename = os.path.basename(rel) or "file"
import tempfile
temp_dir = tempfile.gettempdir()
temp_path = os.path.join(temp_dir, filename)
total_size = 0
try:
with open(temp_path, "wb") as f:
async for chunk in data_iter:
if chunk:
f.write(chunk)
total_size += len(chunk)
await client.connect()
await client.send_file(self.chat_id, temp_path, caption=filename)
finally:
if os.path.exists(temp_path):
os.remove(temp_path)
if client.is_connected():
await client.disconnect()
return total_size
async def mkdir(self, root: str, rel: str):
raise NotImplementedError("Telegram 适配器不支持创建目录。")
async def delete(self, root: str, rel: str):
"""删除一个文件 (即一条消息)"""
try:
message_id_str, _ = rel.split('_', 1)
message_id = int(message_id_str)
except (ValueError, IndexError):
raise FileNotFoundError(f"无效的文件路径格式无法解析消息ID: {rel}")
client = self._get_client()
try:
await client.connect()
result = await client.delete_messages(self.chat_id, [message_id])
if not result or not result[0].pts:
raise FileNotFoundError(f"{self.chat_id} 中删除消息 {message_id} 失败,可能消息不存在或无权限")
finally:
if client.is_connected():
await client.disconnect()
async def move(self, root: str, src_rel: str, dst_rel: str):
raise NotImplementedError("Telegram 适配器不支持移动。")
async def rename(self, root: str, src_rel: str, dst_rel: str):
raise NotImplementedError("Telegram 适配器不支持重命名。")
async def copy(self, root: str, src_rel: str, dst_rel: str, overwrite: bool = False):
raise NotImplementedError("Telegram 适配器不支持复制。")
async def stream_file(self, root: str, rel: str, range_header: str | None):
from fastapi.responses import StreamingResponse
from fastapi import HTTPException
try:
message_id_str, _ = rel.split('_', 1)
message_id = int(message_id_str)
except (ValueError, IndexError):
raise HTTPException(status_code=400, detail=f"无效的文件路径格式: {rel}")
client = self._get_client()
try:
await client.connect()
message = await client.get_messages(self.chat_id, ids=message_id)
media = message.document or message.video or message.photo
if not message or not media:
raise FileNotFoundError(f"在频道 {self.chat_id} 中未找到消息ID为 {message_id} 的文件")
if message.photo:
photo_size = media.sizes[-1]
file_size = photo_size.size if hasattr(photo_size, 'size') else 0
mime_type = "image/jpeg"
else:
file_size = media.size
mime_type = media.mime_type or "application/octet-stream"
start = 0
end = file_size - 1
status = 200
headers = {
"Accept-Ranges": "bytes",
"Content-Type": mime_type,
"Content-Length": str(file_size),
}
if range_header:
try:
range_val = range_header.strip().partition("=")[2]
s, _, e = range_val.partition("-")
start = int(s) if s else 0
end = int(e) if e else file_size - 1
if start >= file_size or end >= file_size or start > end:
raise HTTPException(status_code=416, detail="Requested Range Not Satisfiable")
status = 206
headers["Content-Length"] = str(end - start + 1)
headers["Content-Range"] = f"bytes {start}-{end}/{file_size}"
except ValueError:
raise HTTPException(status_code=400, detail="Invalid Range header")
async def iterator():
try:
limit = end - start + 1
downloaded = 0
async for chunk in client.iter_download(media, offset=start):
if downloaded + len(chunk) > limit:
yield chunk[:limit - downloaded]
break
yield chunk
downloaded += len(chunk)
if downloaded >= limit:
break
finally:
if client.is_connected():
await client.disconnect()
return StreamingResponse(iterator(), status_code=status, headers=headers)
except FileNotFoundError as e:
if client.is_connected():
await client.disconnect()
raise HTTPException(status_code=404, detail=str(e))
except Exception as e:
if client.is_connected():
await client.disconnect()
raise HTTPException(status_code=500, detail=f"Streaming failed: {str(e)}")
async def stat_file(self, root: str, rel: str):
try:
message_id_str, filename = rel.split('_', 1)
message_id = int(message_id_str)
except (ValueError, IndexError):
raise FileNotFoundError(f"无效的文件路径格式: {rel}")
client = self._get_client()
try:
await client.connect()
message = await client.get_messages(self.chat_id, ids=message_id)
media = message.document or message.video or message.photo
if not message or not media:
raise FileNotFoundError(f"在频道 {self.chat_id} 中未找到消息ID为 {message_id} 的文件")
if message.photo:
photo_size = media.sizes[-1]
size = photo_size.size if hasattr(photo_size, 'size') else 0
else:
size = media.size
return {
"name": rel,
"is_dir": False,
"size": size,
"mtime": int(message.date.timestamp()),
"type": "file",
}
finally:
if client.is_connected():
await client.disconnect()
def ADAPTER_FACTORY(rec: StorageAdapter) -> TelegramAdapter:
return TelegramAdapter(rec)

View File

@@ -39,7 +39,7 @@ class WebDAVAdapter:
rel = rel.strip('/')
return self.base_url if not rel else urljoin(self.base_url, quote(rel) + ('/' if rel.endswith('/') else ''))
async def list_dir(self, root: str, rel: str, page_num: int = 1, page_size: int = 50) -> Tuple[List[Dict], int]:
async def list_dir(self, root: str, rel: str, page_num: int = 1, page_size: int = 50, sort_by: str = "name", sort_order: str = "asc") -> Tuple[List[Dict], int]:
raw_url = self._build_url(rel)
url = raw_url if raw_url.endswith('/') else raw_url + '/'
depth = "1"
@@ -92,16 +92,39 @@ class WebDAVAdapter:
"d:collection", NS) is not None if rt_el is not None else href_path.endswith('/')
size = int(
size_el.text) if size_el is not None and size_el.text and size_el.text.isdigit() else 0
from email.utils import parsedate_to_datetime
mtime = 0
if lm_el is not None and lm_el.text:
try:
mtime = int(parsedate_to_datetime(lm_el.text).timestamp())
except Exception:
mtime = 0
all_entries.append({
"name": name,
"is_dir": is_dir,
"size": 0 if is_dir else size,
"mtime": 0,
"mtime": mtime,
"type": "dir" if is_dir else "file",
})
# 排序所有条目
all_entries.sort(key=lambda x: (not x["is_dir"], x["name"].lower()))
reverse = sort_order.lower() == "desc"
def get_sort_key(item):
key = (not item["is_dir"],)
sort_field = sort_by.lower()
if sort_field == "name":
key += (item["name"].lower(),)
elif sort_field == "size":
key += (item["size"],)
elif sort_field == "mtime":
key += (item["mtime"],)
else:
key += (item["name"].lower(),)
return key
all_entries.sort(key=get_sort_key, reverse=reverse)
total_count = len(all_entries)
# 应用分页

View File

@@ -2,13 +2,14 @@ import httpx
from typing import List
from services.config import ConfigCenter
async def describe_image_base64(base64_image: str, detail: str = "high") -> str:
"""
传入base64图片和文本提示返回图片描述文本。
"""
OAI_API_URL = await ConfigCenter.get("AI_API_URL", "https://api.siliconflow.cn/v1/chat/completions")
VISION_MODEL = await ConfigCenter.get("AI_VISION_MODEL", "Qwen/Qwen2.5-VL-32B-Instruct")
API_KEY = await ConfigCenter.get("AI_API_KEY", "")
OAI_API_URL = await ConfigCenter.get("AI_VISION_API_URL")
VISION_MODEL = await ConfigCenter.get("AI_VISION_MODEL")
API_KEY = await ConfigCenter.get("AI_VISION_API_KEY")
payload = {
"model": VISION_MODEL,
"messages": [
@@ -42,13 +43,14 @@ async def describe_image_base64(base64_image: str, detail: str = "high") -> str:
except Exception as e:
return f"请求失败: {str(e)}"
async def get_text_embedding(text: str) -> List[float]:
"""
传入文本,返回嵌入向量。
"""
OAI_API_URL = await ConfigCenter.get("AI_API_URL", "https://api.siliconflow.cn/v1/chat/completions")
EMBED_MODEL = await ConfigCenter.get("AI_EMBED_MODEL", "Qwen/Qwen3-Embedding-8B")
API_KEY = await ConfigCenter.get("AI_API_KEY", "")
OAI_API_URL = await ConfigCenter.get("AI_EMBED_API_URL")
EMBED_MODEL = await ConfigCenter.get("AI_EMBED_MODEL")
API_KEY = await ConfigCenter.get("AI_EMBED_API_KEY")
payload = {
"model": EMBED_MODEL,
"input": text
@@ -58,7 +60,11 @@ async def get_text_embedding(text: str) -> List[float]:
"Content-Type": "application/json"
}
async with httpx.AsyncClient() as client:
resp = await client.post(OAI_API_URL.replace("chat/completions", "embeddings"), headers=headers, json=payload)
if OAI_API_URL.endswith("chat/completions"):
url = OAI_API_URL.replace("chat/completions", "embeddings")
else:
url = OAI_API_URL
resp = await client.post(url, headers=headers, json=payload)
resp.raise_for_status()
result = resp.json()
return result["data"][0]["embedding"]

View File

@@ -1,7 +1,6 @@
from tortoise.transactions import in_transaction
from models.database import (
StorageAdapter,
Mount,
UserAccount,
AutomationTask,
ShareLink,
@@ -18,7 +17,6 @@ class BackupService:
"""
async with in_transaction() as conn:
adapters = await StorageAdapter.all().values()
mounts = await Mount.all().values()
users = await UserAccount.all().values()
tasks = await AutomationTask.all().values()
shares = await ShareLink.all().values()
@@ -33,7 +31,6 @@ class BackupService:
return {
"version": VERSION,
"storage_adapters": list(adapters),
"mounts": list(mounts),
"user_accounts": list(users),
"automation_tasks": list(tasks),
"share_links": list(shares),
@@ -48,7 +45,6 @@ class BackupService:
async with in_transaction() as conn:
await ShareLink.all().using_db(conn).delete()
await AutomationTask.all().using_db(conn).delete()
await Mount.all().using_db(conn).delete()
await StorageAdapter.all().using_db(conn).delete()
await UserAccount.all().using_db(conn).delete()
await Configuration.all().using_db(conn).delete()
@@ -71,12 +67,6 @@ class BackupService:
using_db=conn
)
if data.get("mounts"):
await Mount.bulk_create(
[Mount(**m) for m in data["mounts"]],
using_db=conn
)
if data.get("automation_tasks"):
await AutomationTask.bulk_create(
[AutomationTask(**t) for t in data["automation_tasks"]],

View File

@@ -4,7 +4,7 @@ from typing import Any, Optional, Dict
from dotenv import load_dotenv
from models.database import Configuration
load_dotenv(dotenv_path=".env")
VERSION = "v1.0.0"
VERSION = "v1.2.10"
class ConfigCenter:
_cache: Dict[str, Any] = {}

View File

@@ -5,11 +5,18 @@ from services.logging import LogService
from models.database import UserAccount
import jwt
from jwt.exceptions import InvalidTokenError
from services.auth import ALGORITHM
from services.auth import ALGORITHM
from services.config import ConfigCenter
class LoggingMiddleware(BaseHTTPMiddleware):
async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response:
path = request.url.path
method = request.method.upper()
if method == "GET":
if path == "/api/logs" or path == "/api/plugins" or path.startswith("/api/config"):
return await call_next(request)
start_time = time.time()
user_id = None
if "authorization" in request.headers:
@@ -27,9 +34,9 @@ class LoggingMiddleware(BaseHTTPMiddleware):
pass
response = await call_next(request)
process_time = (time.time() - start_time) * 1000
details = {
"client_ip": request.client.host,
"method": request.method,
@@ -38,9 +45,9 @@ class LoggingMiddleware(BaseHTTPMiddleware):
"status_code": response.status_code,
"process_time_ms": round(process_time, 2)
}
message = f"{request.method} {request.url.path} - {response.status_code}"
await LogService.api(message, details, user_id)
return response
return response

View File

@@ -0,0 +1,199 @@
import os
import time
from pathlib import Path
from typing import AsyncIterator
import aiofiles
import aiohttp
from fastapi import HTTPException
from services.logging import LogService
from services.task_queue import Task, task_queue_service, TaskProgress
from services.virtual_fs import write_file_stream, stat_file
TEMP_ROOT = Path("data/tmp/offline_downloads")
def _normalize_path(path: str) -> str:
if not path:
return "/"
if not path.startswith("/"):
path = "/" + path
if len(path) > 1 and path.endswith("/"):
path = path.rstrip("/")
return path or "/"
async def _path_exists(full_path: str) -> bool:
try:
await stat_file(full_path)
return True
except FileNotFoundError:
return False
except HTTPException as exc:
if exc.status_code == 404:
return False
raise
def _split_filename(filename: str) -> tuple[str, str]:
if not filename:
return "", ""
if filename.startswith('.') and filename.count('.') == 1:
return filename, ""
if '.' not in filename:
return filename, ""
stem, ext = filename.rsplit('.', 1)
return stem, f".{ext}"
async def _allocate_destination(dest_dir: str, filename: str) -> tuple[str, str]:
dest_dir = _normalize_path(dest_dir)
stem, suffix = _split_filename(filename)
candidate = filename
if dest_dir == "/":
base = ""
else:
base = dest_dir
attempt = 0
while await _path_exists(f"{base}/{candidate}" if base else f"/{candidate}"):
attempt += 1
if stem:
candidate = f"{stem} ({attempt}){suffix}"
else:
candidate = f"file ({attempt}){suffix}" if suffix else f"file ({attempt})"
if base:
full_path = f"{base}/{candidate}"
else:
full_path = f"/{candidate}"
return full_path, candidate
async def _iter_file(path: Path, chunk_size: int, report_cb) -> AsyncIterator[bytes]:
async with aiofiles.open(path, "rb") as f:
while True:
chunk = await f.read(chunk_size)
if not chunk:
break
await report_cb(len(chunk))
yield chunk
async def run_http_download(task: Task):
params = task.task_info
url = params.get("url")
dest_dir = params.get("dest_dir")
filename = params.get("filename")
if not url or not dest_dir or not filename:
raise ValueError("Missing required parameters for offline download")
TEMP_ROOT.mkdir(parents=True, exist_ok=True)
temp_dir = TEMP_ROOT / task.id
temp_dir.mkdir(parents=True, exist_ok=True)
temp_file = temp_dir / "payload"
bytes_total: int | None = None
bytes_done = 0
last_update = time.monotonic()
await task_queue_service.update_progress(
task.id,
TaskProgress(
stage="downloading",
percent=0.0,
bytes_total=None,
bytes_done=0,
detail="HTTP downloading",
),
)
async def report_download(delta: int, total: int | None):
nonlocal bytes_done, bytes_total, last_update
if total is not None:
bytes_total = total
bytes_done += delta
now = time.monotonic()
if delta and now - last_update < 0.5:
return
last_update = now
percent = None
total_for_display = bytes_total if bytes_total is not None else None
if bytes_total:
percent = min(100.0, round(bytes_done / bytes_total * 100, 2))
await task_queue_service.update_progress(
task.id,
TaskProgress(
stage="downloading",
percent=percent,
bytes_total=total_for_display,
bytes_done=bytes_done,
detail="HTTP downloading",
),
)
timeout = aiohttp.ClientTimeout(total=None, connect=30)
async with aiohttp.ClientSession(timeout=timeout) as session:
async with session.get(url) as resp:
if resp.status != 200:
raise ValueError(f"HTTP {resp.status} for {url}")
content_length = resp.headers.get("Content-Length")
total_size = int(content_length) if content_length else None
bytes_done = 0
async with aiofiles.open(temp_file, "wb") as f:
async for chunk in resp.content.iter_chunked(512 * 1024):
if not chunk:
continue
await f.write(chunk)
await report_download(len(chunk), total_size)
# ensure final update
await report_download(0, total_size)
file_size = os.path.getsize(temp_file)
bytes_done_transfer = 0
async def report_transfer(delta: int):
nonlocal bytes_done_transfer
bytes_done_transfer += delta
percent = min(100.0, round(bytes_done_transfer / file_size * 100, 2)) if file_size else None
await task_queue_service.update_progress(
task.id,
TaskProgress(
stage="transferring",
percent=percent,
bytes_total=file_size or None,
bytes_done=bytes_done_transfer,
detail="Saving to storage",
),
)
async def chunk_iter() -> AsyncIterator[bytes]:
async for chunk in _iter_file(temp_file, 512 * 1024, report_transfer):
yield chunk
final_path, resolved_name = await _allocate_destination(dest_dir, filename)
await task_queue_service.update_progress(
task.id,
TaskProgress(stage="transferring", percent=0.0, bytes_total=file_size or None, bytes_done=0, detail="Saving to storage"),
)
await write_file_stream(final_path, chunk_iter())
await task_queue_service.update_progress(
task.id,
TaskProgress(stage="completed", percent=100.0, bytes_total=file_size or None, bytes_done=file_size, detail="Completed"),
)
await task_queue_service.update_meta(task.id, {"final_path": final_path, "filename": resolved_name})
try:
os.remove(temp_file)
temp_dir.rmdir()
except Exception:
await LogService.info("offline_download", f"Temp cleanup failed for task {task.id}")
return final_path

View File

@@ -1,33 +1,53 @@
import pkgutil
import inspect
from importlib import import_module
from typing import Dict, Callable
import pkgutil
from importlib import import_module, reload
from pathlib import Path
from types import ModuleType
from typing import Callable, Dict, Optional
from .base import BaseProcessor
ProcessorFactory = Callable[[], BaseProcessor]
TYPE_MAP: Dict[str, ProcessorFactory] = {}
CONFIG_SCHEMAS: Dict[str, dict] = {}
MODULE_MAP: Dict[str, ModuleType] = {}
LAST_DISCOVERY_ERRORS: list[str] = []
def discover_processors(force_reload: bool = False) -> list[str]:
"""Discover available processor modules and cache their metadata."""
import services.processors # 延迟导入以避免循环
def discover_processors():
import services.processors
processors_pkg = services.processors
TYPE_MAP.clear()
CONFIG_SCHEMAS.clear()
MODULE_MAP.clear()
global LAST_DISCOVERY_ERRORS
LAST_DISCOVERY_ERRORS = []
for modinfo in pkgutil.iter_modules(processors_pkg.__path__):
if modinfo.name.startswith("_"):
continue
full_name = f"{processors_pkg.__name__}.{modinfo.name}"
try:
module = import_module(full_name)
except Exception:
if force_reload:
module = reload(module)
except Exception as exc:
LAST_DISCOVERY_ERRORS.append(f"Failed to import {full_name}: {exc}")
continue
processor_type = getattr(module, "PROCESSOR_TYPE", None)
processor_name = getattr(module, "PROCESSOR_NAME", None)
supported_exts = getattr(module, "SUPPORTED_EXTS", None)
schema = getattr(module, "CONFIG_SCHEMA", None)
factory = getattr(module, "PROCESSOR_FACTORY", None)
if not processor_type:
continue
if factory is None:
for attr in module.__dict__.values():
if inspect.isclass(attr) and attr.__name__.endswith("Processor"):
@@ -35,31 +55,85 @@ def discover_processors():
return lambda: cls()
factory = _mk()
break
if not callable(factory):
LAST_DISCOVERY_ERRORS.append(f"Processor {full_name} missing factory")
continue
try:
sample = factory()
except Exception as exc:
LAST_DISCOVERY_ERRORS.append(f"Failed to instantiate processor {processor_type}: {exc}")
continue
TYPE_MAP[processor_type] = factory
MODULE_MAP[processor_type] = module
produces_file = getattr(module, "produces_file", None)
if produces_file is None and hasattr(factory(), "produces_file"):
produces_file = getattr(factory(), "produces_file")
if produces_file is None and hasattr(sample, "produces_file"):
produces_file = getattr(sample, "produces_file")
module_file = getattr(module, "__file__", None)
module_path: Optional[str] = None
if module_file:
try:
module_path = str(Path(module_file).resolve())
except Exception:
module_path = module_file
if isinstance(supported_exts, list):
normalized_exts = [str(ext) for ext in supported_exts]
elif supported_exts:
normalized_exts = [str(supported_exts)]
else:
normalized_exts = []
if not normalized_exts and hasattr(sample, "supported_exts"):
sample_exts = getattr(sample, "supported_exts") or []
if isinstance(sample_exts, list):
normalized_exts = [str(ext) for ext in sample_exts]
if isinstance(schema, list):
CONFIG_SCHEMAS[processor_type] = {
"type": processor_type,
"name": processor_name or processor_type,
"supported_exts": supported_exts or [],
"supported_exts": normalized_exts,
"config_schema": schema,
"produces_file": produces_file if produces_file is not None else False
"produces_file": produces_file if produces_file is not None else False,
"module_path": module_path,
}
return LAST_DISCOVERY_ERRORS
def get_config_schemas() -> Dict[str, dict]:
return CONFIG_SCHEMAS
def get_config_schema(processor_type: str):
return CONFIG_SCHEMAS.get(processor_type)
def get(processor_type: str) -> BaseProcessor:
factory = TYPE_MAP.get(processor_type)
if factory:
return factory()
return None
def get_module_path(processor_type: str) -> Optional[str]:
meta = CONFIG_SCHEMAS.get(processor_type)
if not meta:
return None
return meta.get("module_path")
def get_last_discovery_errors() -> list[str]:
return LAST_DISCOVERY_ERRORS
def reload_processors() -> list[str]:
return discover_processors(force_reload=True)
discover_processors()

View File

@@ -2,8 +2,9 @@ from typing import Dict, Any
from fastapi.responses import Response
import base64
from services.ai import describe_image_base64, get_text_embedding
from services.vector_db import VectorDBService
from services.vector_db import VectorDBService, DEFAULT_VECTOR_DIMENSION
from services.logging import LogService
from services.config import ConfigCenter
class VectorIndexProcessor:
@@ -33,7 +34,7 @@ class VectorIndexProcessor:
vector_db = VectorDBService()
collection_name = "vector_collection"
if action == "destroy":
vector_db.delete_vector(collection_name, path)
await vector_db.delete_vector(collection_name, path)
await LogService.info(
"processor:vector_index",
f"Destroyed {index_type} index for {path}",
@@ -42,8 +43,8 @@ class VectorIndexProcessor:
return Response(content=f"文件 {path}{index_type} 索引已销毁", media_type="text/plain")
if index_type == 'simple':
vector_db.ensure_collection(collection_name, vector=False)
vector_db.upsert_vector(collection_name, {'path': path})
await vector_db.ensure_collection(collection_name, vector=False)
await vector_db.upsert_vector(collection_name, {'path': path})
await LogService.info(
"processor:vector_index",
f"Created simple index for {path}",
@@ -71,8 +72,16 @@ class VectorIndexProcessor:
if embedding is None:
return Response(content="不支持的文件类型", status_code=400)
vector_db.ensure_collection(collection_name, vector=True)
vector_db.upsert_vector(
raw_dim = await ConfigCenter.get('AI_EMBED_DIM', DEFAULT_VECTOR_DIMENSION)
try:
vector_dim = int(raw_dim)
except (TypeError, ValueError):
vector_dim = DEFAULT_VECTOR_DIMENSION
if vector_dim <= 0:
vector_dim = DEFAULT_VECTOR_DIMENSION
await vector_db.ensure_collection(collection_name, vector=True, dim=vector_dim)
await vector_db.upsert_vector(
collection_name, {'path': path, 'embedding': embedding})
await LogService.info(

View File

@@ -90,6 +90,16 @@ class ShareService:
raise HTTPException(status_code=404, detail="分享链接不存在")
await share.delete()
@staticmethod
async def delete_expired_shares(user: UserAccount) -> int:
"""
删除当前用户所有已过期的分享链接,返回删除数量。
条件expires_at 非空 且 小于等于当前时间UTC
"""
now = datetime.now(timezone.utc)
deleted_count = await ShareLink.filter(user=user, expires_at__lte=now).delete()
return deleted_count
@staticmethod
async def get_shared_item_details(share: ShareLink, sub_path: str = ""):
"""
@@ -122,4 +132,4 @@ class ShareService:
raise e
share_service = ShareService()
share_service = ShareService()

228
services/task_queue.py Normal file
View File

@@ -0,0 +1,228 @@
import asyncio
from typing import Dict, Any
from pydantic import BaseModel, Field
import uuid
from services.logging import LogService
from enum import Enum
class TaskStatus(str, Enum):
PENDING = "pending"
RUNNING = "running"
SUCCESS = "success"
FAILED = "failed"
class TaskProgress(BaseModel):
stage: str | None = None
percent: float | None = None
bytes_total: int | None = None
bytes_done: int | None = None
detail: str | None = None
class Task(BaseModel):
id: str = Field(default_factory=lambda: uuid.uuid4().hex)
name: str
status: TaskStatus = TaskStatus.PENDING
result: Any = None
error: str | None = None
task_info: Dict[str, Any] = {}
progress: TaskProgress | None = None
meta: Dict[str, Any] | None = None
_SENTINEL = object()
class TaskQueueService:
def __init__(self):
self._queue: asyncio.Queue[Task | object] = asyncio.Queue()
self._tasks: Dict[str, Task] = {}
self._worker_tasks: list[asyncio.Task] = []
self._concurrency: int = 1
self._worker_seq: int = 0
async def add_task(self, name: str, task_info: Dict[str, Any]) -> Task:
task = Task(name=name, task_info=task_info)
self._tasks[task.id] = task
await self._queue.put(task)
await LogService.info("task_queue", f"Task {name} ({task.id}) enqueued", {"task_id": task.id, "name": name})
return task
def get_task(self, task_id: str) -> Task | None:
return self._tasks.get(task_id)
def get_all_tasks(self) -> list[Task]:
return list(self._tasks.values())
async def update_progress(self, task_id: str, progress: TaskProgress | Dict[str, Any]):
task = self._tasks.get(task_id)
if not task:
return
if isinstance(progress, TaskProgress):
task.progress = progress
else:
task.progress = TaskProgress(**progress)
async def update_meta(self, task_id: str, meta: Dict[str, Any]):
task = self._tasks.get(task_id)
if not task:
return
task.meta = (task.meta or {}) | meta
async def _execute_task(self, task: Task):
from services.virtual_fs import process_file
task.status = TaskStatus.RUNNING
await LogService.info("task_queue", f"Task {task.name} ({task.id}) started", {"task_id": task.id, "name": task.name})
try:
if task.name == "process_file":
params = task.task_info
result = await process_file(
path=params["path"],
processor_type=params["processor_type"],
config=params["config"],
save_to=params.get("save_to"),
overwrite=params.get("overwrite", False),
)
task.result = result
elif task.name == "automation_task" or self._is_processor_task(task.name):
from models.database import AutomationTask
from services.processors.registry import get as get_processor
from services.virtual_fs import read_file, write_file
params = task.task_info
auto_task = await AutomationTask.get(id=params["task_id"])
path = params["path"]
processor_type = auto_task.processor_type if task.name == "automation_task" else task.name
processor = get_processor(processor_type)
if not processor:
raise ValueError(f"Processor {processor_type} not found for task {auto_task.id}")
if processor_type != auto_task.processor_type:
await LogService.warning(
"task_queue",
"Processor type mismatch; falling back to stored type",
{"task_id": auto_task.id, "expected": auto_task.processor_type, "got": processor_type},
)
processor_type = auto_task.processor_type
processor = get_processor(processor_type)
if not processor:
raise ValueError(f"Processor {processor_type} not found for task {auto_task.id}")
file_content = await read_file(path)
result = await processor.process(file_content, path, auto_task.processor_config)
save_to = auto_task.processor_config.get("save_to")
if save_to and getattr(processor, "produces_file", False):
await write_file(save_to, result)
task.result = "Automation task completed"
elif task.name == "offline_http_download":
from services.offline_download import run_http_download
result_path = await run_http_download(task)
task.result = {"path": result_path}
elif task.name == "cross_mount_transfer":
from services.virtual_fs import run_cross_mount_transfer_task
result = await run_cross_mount_transfer_task(task)
task.result = result
else:
raise ValueError(f"Unknown task name: {task.name}")
task.status = TaskStatus.SUCCESS
await LogService.info("task_queue", f"Task {task.name} ({task.id}) succeeded", {"task_id": task.id, "name": task.name})
except Exception as e:
task.status = TaskStatus.FAILED
task.error = str(e)
await LogService.error("task_queue", f"Task {task.name} ({task.id}) failed: {e}", {"task_id": task.id, "name": task.name})
def _cleanup_workers(self):
self._worker_tasks = [task for task in self._worker_tasks if not task.done()]
def _is_processor_task(self, task_name: str) -> bool:
try:
from services.processors.registry import get as get_processor
return get_processor(task_name) is not None
except Exception:
return False
async def _ensure_worker_count(self):
self._cleanup_workers()
current = len(self._worker_tasks)
if current < self._concurrency:
for _ in range(self._concurrency - current):
self._worker_seq += 1
worker_id = self._worker_seq
worker_task = asyncio.create_task(self._worker_loop(worker_id))
self._worker_tasks.append(worker_task)
await LogService.info("task_queue", "Task workers adjusted", {"active_workers": len(self._worker_tasks), "target": self._concurrency})
elif current > self._concurrency:
for _ in range(current - self._concurrency):
await self._queue.put(_SENTINEL)
await LogService.info("task_queue", "Task workers scaling down", {"active_workers": len(self._worker_tasks), "target": self._concurrency})
async def _worker_loop(self, worker_id: int):
current_task = asyncio.current_task()
await LogService.info("task_queue", f"Worker {worker_id} started")
try:
while True:
job = await self._queue.get()
if job is _SENTINEL:
self._queue.task_done()
break
try:
await self._execute_task(job)
except Exception as e:
await LogService.error(
"task_queue",
f"Error executing task {job.id}: {e}",
{"task_id": job.id, "name": job.name},
)
finally:
self._queue.task_done()
finally:
if current_task in self._worker_tasks:
self._worker_tasks.remove(current_task) # type: ignore[arg-type]
await LogService.info("task_queue", f"Worker {worker_id} stopped")
async def start_worker(self, concurrency: int | None = None):
if concurrency is None:
from services.config import ConfigCenter
stored_value = await ConfigCenter.get("TASK_QUEUE_CONCURRENCY", self._concurrency)
try:
concurrency = int(stored_value)
except (TypeError, ValueError):
concurrency = self._concurrency
await self.set_concurrency(concurrency)
async def set_concurrency(self, value: int):
value = max(1, int(value))
if value != self._concurrency:
self._concurrency = value
await self._ensure_worker_count()
async def stop_worker(self):
self._cleanup_workers()
for _ in range(len(self._worker_tasks)):
await self._queue.put(_SENTINEL)
if self._worker_tasks:
await asyncio.gather(*self._worker_tasks, return_exceptions=True)
self._worker_tasks.clear()
await LogService.info("task_queue", "Task workers have been stopped.")
def get_concurrency(self) -> int:
return self._concurrency
def get_active_worker_count(self) -> int:
self._cleanup_workers()
return len(self._worker_tasks)
task_queue_service = TaskQueueService()

View File

@@ -4,6 +4,9 @@ from models.database import AutomationTask
from services.processors.registry import get as get_processor
from services.logging import LogService
from services.task_queue import task_queue_service
class TaskService:
async def trigger_tasks(self, event: str, path: str):
tasks = await AutomationTask.filter(event=event, enabled=True)
@@ -21,28 +24,12 @@ class TaskService:
return True
async def execute(self, task: AutomationTask, path: str):
from services.virtual_fs import read_file, write_file
await task_queue_service.add_task(
task.processor_type,
{
"task_id": task.id,
"path": path,
},
)
processor = get_processor(task.processor_type)
if not processor:
print(f"Processor {task.processor_type} not found for task {task.id}")
return
try:
file_content = await read_file(path)
result = await processor.process(file_content, path, task.processor_config)
save_to = task.processor_config.get("save_to")
if save_to and getattr(processor, "produces_file", False):
await write_file(save_to, result)
except Exception as e:
error_message = f"Error executing task {task.id} for path {path}: {e}"
print(error_message)
await LogService.error(
source=f"task_executor:{task.id}",
message=error_message,
details={"task_name": task.name, "event": task.event, "path": path, "processor": task.processor_type}
)
task_service = TaskService()
task_service = TaskService()

View File

@@ -5,7 +5,8 @@ from pathlib import Path
from typing import Tuple
from fastapi import HTTPException
ALLOWED_EXT = {"jpg", "jpeg", "png", "webp", "gif", "bmp", "tiff", "arw", "cr2", "cr3", "nef", "rw2", "orf", "pef", "dng"}
ALLOWED_EXT = {"jpg", "jpeg", "png", "webp", "gif", "bmp",
"tiff", "arw", "cr2", "cr3", "nef", "rw2", "orf", "pef", "dng"}
RAW_EXT = {"arw", "cr2", "cr3", "nef", "rw2", "orf", "pef", "dng"}
MAX_SOURCE_SIZE = 200 * 1024 * 1024
CACHE_ROOT = Path('data/.thumb_cache')
@@ -49,11 +50,12 @@ def generate_thumb(data: bytes, w: int, h: int, fit: str, is_raw: bool = False)
thumb = raw.extract_thumb()
except rawpy.LibRawNoThumbnailError:
thumb = None
if thumb is not None and thumb.format in [rawpy.ThumbFormat.JPEG, rawpy.ThumbFormat.BITMAP]:
im = Image.open(io.BytesIO(thumb.data))
else:
rgb = raw.postprocess(use_camera_wb=False, use_auto_wb=True, output_bps=8)
rgb = raw.postprocess(
use_camera_wb=False, use_auto_wb=True, output_bps=8)
im = Image.fromarray(rgb)
except Exception as e:
print(f"rawpy processing failed: {e}")
@@ -87,18 +89,48 @@ def generate_thumb(data: bytes, w: int, h: int, fit: str, is_raw: bool = False)
async def get_or_create_thumb(adapter, adapter_id: int, root: str, rel: str, w: int, h: int, fit: str = 'cover'):
stat = await adapter.stat_file(root, rel)
if stat['size'] > MAX_SOURCE_SIZE:
raise HTTPException(400, detail="Image too large for thumbnail")
key = _cache_key(adapter_id, rel, stat['size'], int(stat['mtime']), w, h, fit)
raise HTTPException(400, detail="Image too large for thumbnail")
key = _cache_key(adapter_id, rel, stat['size'], int(
stat['mtime']), w, h, fit)
path = _cache_path(key)
if path.exists():
return path.read_bytes(), 'image/webp', key
_ensure_cache_dir(path)
read_data = await adapter.read_file(root, rel)
try:
thumb_bytes, mime = generate_thumb(read_data, w, h, fit, is_raw=is_raw_filename(rel))
except Exception as e:
print(e)
raise HTTPException(500, detail=f"Thumbnail generation failed: {e}")
path.write_bytes(thumb_bytes)
return thumb_bytes, mime, key
thumb_bytes, mime = None, None
get_thumb_impl = getattr(adapter, "get_thumbnail", None)
if callable(get_thumb_impl):
size_str = "large" if w > 400 else "medium" if w > 100 else "small"
native_thumb_bytes = await get_thumb_impl(root, rel, size_str)
if native_thumb_bytes:
try:
from PIL import Image
im = Image.open(io.BytesIO(native_thumb_bytes))
buf = io.BytesIO()
im.save(buf, 'WEBP', quality=85)
thumb_bytes = buf.getvalue()
mime = 'image/webp'
except Exception as e:
print(
f"Failed to convert native thumbnail to WebP: {e}, falling back.")
thumb_bytes, mime = None, None
if not thumb_bytes:
read_data = await adapter.read_file(root, rel)
try:
thumb_bytes, mime = generate_thumb(
read_data, w, h, fit, is_raw=is_raw_filename(rel))
except Exception as e:
print(e)
raise HTTPException(
500, detail=f"Thumbnail generation failed: {e}")
if thumb_bytes:
path.write_bytes(thumb_bytes)
return thumb_bytes, mime, key
raise HTTPException(
500, detail="Failed to generate thumbnail by any means")

View File

@@ -1,77 +0,0 @@
from pymilvus import CollectionSchema, DataType, FieldSchema, MilvusClient
class VectorDBService:
_instance = None
def __new__(cls, *args, **kwargs):
if not cls._instance:
cls._instance = super(VectorDBService, cls).__new__(cls)
return cls._instance
def __init__(self):
if not hasattr(self, 'client'):
self.client = MilvusClient("data/db/milvus.db")
def ensure_collection(self, collection_name, vector: bool = True):
if self.client.has_collection(collection_name):
return
if vector:
fields = [
FieldSchema(name="path", dtype=DataType.VARCHAR,
max_length=512, is_primary=True, auto_id=False),
FieldSchema(name="embedding",
dtype=DataType.FLOAT_VECTOR, dim=4096)
]
schema = CollectionSchema(
fields, description="Image vector collection")
self.client.create_collection(collection_name, schema=schema)
index_params = MilvusClient.prepare_index_params()
index_params.add_index(
field_name="embedding",
index_type="IVF_FLAT",
index_name="vector_index",
metric_type="COSINE",
params={
"nlist": 64,
}
)
self.client.create_index(
collection_name,
index_params=index_params
)
else:
fields = [
FieldSchema(name="path", dtype=DataType.VARCHAR,
max_length=512, is_primary=True, auto_id=False),
]
schema = CollectionSchema(fields, description="Simple file index")
self.client.create_collection(collection_name, schema=schema)
def upsert_vector(self, collection_name, data):
self.client.upsert(collection_name, data)
def delete_vector(self, collection_name, path: str):
self.client.delete(collection_name, ids=[path])
def search_vectors(self, collection_name, query_embedding, top_k=5):
search_params = {"metric_type": "COSINE"}
results = self.client.search(
collection_name,
data=[query_embedding],
anns_field="embedding",
search_params=search_params,
limit=top_k,
output_fields=["path"]
)
print(results)
return results
def search_by_path(self, collection_name, query_path, top_k=20):
results = self.client.query(
collection_name,
filter=f"path like '%{query_path}%'",
limit=top_k,
output_fields=["path"]
)
return [[{'id': r['path'], 'distance': 1.0, 'entity': {'path': r['path']}} for r in results]]

View File

@@ -0,0 +1,11 @@
from .service import VectorDBService, DEFAULT_VECTOR_DIMENSION
from .providers import list_providers, get_provider_entry
from .config_manager import VectorDBConfigManager
__all__ = [
"VectorDBService",
"DEFAULT_VECTOR_DIMENSION",
"list_providers",
"get_provider_entry",
"VectorDBConfigManager",
]

View File

@@ -0,0 +1,43 @@
from __future__ import annotations
import json
from typing import Any, Dict, Tuple
from services.config import ConfigCenter
class VectorDBConfigManager:
TYPE_KEY = "VECTOR_DB_TYPE"
CONFIG_KEY = "VECTOR_DB_CONFIG"
DEFAULT_TYPE = "milvus_lite"
@classmethod
async def load_config(cls) -> Tuple[str, Dict[str, Any]]:
raw_type = await ConfigCenter.get(cls.TYPE_KEY, cls.DEFAULT_TYPE)
provider_type = str(raw_type or cls.DEFAULT_TYPE)
raw_config = await ConfigCenter.get(cls.CONFIG_KEY)
config_dict: Dict[str, Any] = {}
if isinstance(raw_config, str) and raw_config:
try:
config_dict = json.loads(raw_config)
except json.JSONDecodeError:
config_dict = {}
elif isinstance(raw_config, dict):
config_dict = raw_config
return provider_type, config_dict
@classmethod
async def save_config(cls, provider_type: str, config: Dict[str, Any]) -> None:
await ConfigCenter.set(cls.TYPE_KEY, provider_type)
await ConfigCenter.set(cls.CONFIG_KEY, json.dumps(config or {}))
@classmethod
async def get_type(cls) -> str:
provider_type, _ = await cls.load_config()
return provider_type
@classmethod
async def get_config(cls) -> Dict[str, Any]:
_, config = await cls.load_config()
return config

View File

@@ -0,0 +1,56 @@
from __future__ import annotations
from typing import Dict, List, Type
from .base import BaseVectorProvider
from .milvus_lite import MilvusLiteProvider
from .milvus_server import MilvusServerProvider
from .qdrant import QdrantProvider
_PROVIDER_REGISTRY: Dict[str, Dict[str, object]] = {
MilvusLiteProvider.type: {
"class": MilvusLiteProvider,
"label": MilvusLiteProvider.label,
"description": MilvusLiteProvider.description,
"enabled": MilvusLiteProvider.enabled,
"config_schema": MilvusLiteProvider.config_schema,
},
MilvusServerProvider.type: {
"class": MilvusServerProvider,
"label": MilvusServerProvider.label,
"description": MilvusServerProvider.description,
"enabled": MilvusServerProvider.enabled,
"config_schema": MilvusServerProvider.config_schema,
},
QdrantProvider.type: {
"class": QdrantProvider,
"label": QdrantProvider.label,
"description": QdrantProvider.description,
"enabled": QdrantProvider.enabled,
"config_schema": QdrantProvider.config_schema,
},
}
def list_providers() -> List[Dict[str, object]]:
return [
{
"type": type_key,
"label": meta["label"],
"description": meta.get("description"),
"enabled": meta.get("enabled", True),
"config_schema": meta.get("config_schema", []),
}
for type_key, meta in _PROVIDER_REGISTRY.items()
]
def get_provider_entry(provider_type: str) -> Dict[str, object] | None:
return _PROVIDER_REGISTRY.get(provider_type)
def get_provider_class(provider_type: str) -> Type[BaseVectorProvider] | None:
entry = get_provider_entry(provider_type)
if not entry:
return None
return entry.get("class") # type: ignore[return-value]

View File

@@ -0,0 +1,41 @@
from __future__ import annotations
from typing import Any, Dict, List
class BaseVectorProvider:
"""向量数据库提供者基础类,所有实际实现需继承该类"""
type: str = ""
label: str = ""
description: str | None = None
enabled: bool = True
config_schema: List[Dict[str, Any]] = []
def __init__(self, config: Dict[str, Any] | None = None):
self.config = config or {}
async def initialize(self) -> None:
"""执行初始化逻辑,例如建立连接"""
raise NotImplementedError
def ensure_collection(self, collection_name: str, vector: bool, dim: int) -> None:
raise NotImplementedError
def upsert_vector(self, collection_name: str, data: Dict[str, Any]) -> None:
raise NotImplementedError
def delete_vector(self, collection_name: str, path: str) -> None:
raise NotImplementedError
def search_vectors(self, collection_name: str, query_embedding, top_k: int):
raise NotImplementedError
def search_by_path(self, collection_name: str, query_path: str, top_k: int):
raise NotImplementedError
def get_all_stats(self) -> Dict[str, Any]:
raise NotImplementedError
def clear_all_data(self) -> None:
raise NotImplementedError

View File

@@ -0,0 +1,196 @@
from __future__ import annotations
from pathlib import Path
from typing import Any, Dict, List, Optional
from pymilvus import CollectionSchema, DataType, FieldSchema, MilvusClient
from .base import BaseVectorProvider
class MilvusLiteProvider(BaseVectorProvider):
type = "milvus_lite"
label = "Milvus Lite"
description = "Embedded Milvus Lite (local file storage)."
enabled = True
config_schema: List[Dict[str, Any]] = [
{
"key": "db_path",
"label": "Database file path",
"type": "text",
"default": "data/db/milvus.db",
"required": False,
}
]
def __init__(self, config: Dict[str, Any] | None = None):
super().__init__(config)
self.db_path = Path(self.config.get("db_path") or "data/db/milvus.db")
self.client: MilvusClient | None = None
async def initialize(self) -> None:
try:
self.client = MilvusClient(str(self.db_path))
except Exception as exc: # pragma: no cover - depends on local environment
raise RuntimeError(f"Failed to open Milvus Lite at {self.db_path}: {exc}") from exc
def _get_client(self) -> MilvusClient:
if not self.client:
raise RuntimeError("Milvus Lite client is not initialized")
return self.client
@staticmethod
def _to_int(value: Any) -> int:
try:
return int(value)
except (TypeError, ValueError):
return 0
def ensure_collection(self, collection_name: str, vector: bool, dim: int) -> None:
client = self._get_client()
if client.has_collection(collection_name):
return
if vector:
vector_dim = dim if isinstance(dim, int) and dim > 0 else 0
if vector_dim <= 0:
vector_dim = 4096
fields = [
FieldSchema(name="path", dtype=DataType.VARCHAR, max_length=512, is_primary=True, auto_id=False),
FieldSchema(name="embedding", dtype=DataType.FLOAT_VECTOR, dim=vector_dim),
]
schema = CollectionSchema(fields, description="Image vector collection")
client.create_collection(collection_name, schema=schema)
index_params = MilvusClient.prepare_index_params()
index_params.add_index(
field_name="embedding",
index_type="IVF_FLAT",
index_name="vector_index",
metric_type="COSINE",
params={"nlist": 64},
)
client.create_index(collection_name, index_params=index_params)
else:
fields = [
FieldSchema(name="path", dtype=DataType.VARCHAR, max_length=512, is_primary=True, auto_id=False),
]
schema = CollectionSchema(fields, description="Simple file index")
client.create_collection(collection_name, schema=schema)
def upsert_vector(self, collection_name: str, data: Dict[str, Any]) -> None:
self._get_client().upsert(collection_name, data)
def delete_vector(self, collection_name: str, path: str) -> None:
self._get_client().delete(collection_name, ids=[path])
def search_vectors(self, collection_name: str, query_embedding, top_k: int):
search_params = {"metric_type": "COSINE"}
return self._get_client().search(
collection_name,
data=[query_embedding],
anns_field="embedding",
search_params=search_params,
limit=top_k,
output_fields=["path"],
)
def search_by_path(self, collection_name: str, query_path: str, top_k: int):
filter_expr = f"path like '%{query_path}%'" if query_path else "path like '%%'"
results = self._get_client().query(
collection_name,
filter=filter_expr,
limit=top_k,
output_fields=["path"],
)
return [[{"id": r["path"], "distance": 1.0, "entity": {"path": r["path"]}} for r in results]]
def get_all_stats(self) -> Dict[str, Any]:
client = self._get_client()
try:
collection_names = client.list_collections()
except Exception as exc:
raise RuntimeError(f"Failed to list collections: {exc}") from exc
collections: List[Dict[str, Any]] = []
total_vectors = 0
total_estimated_memory = 0
for name in collection_names:
try:
stats = client.get_collection_stats(name) or {}
except Exception:
stats = {}
row_count = self._to_int(stats.get("row_count"))
total_vectors += row_count
dimension: Optional[int] = None
is_vector_collection = False
try:
description = client.describe_collection(name)
except Exception:
description = None
if description:
for field in description.get("fields", []):
if field.get("type") == DataType.FLOAT_VECTOR:
params = field.get("params") or {}
dimension = self._to_int(params.get("dim")) or 4096
is_vector_collection = True
break
estimated_memory = 0
if is_vector_collection and dimension:
estimated_memory = row_count * dimension * 4
total_estimated_memory += estimated_memory
indexes: List[Dict[str, Any]] = []
try:
index_names = client.list_indexes(name) or []
except Exception:
index_names = []
for index_name in index_names:
try:
detail = client.describe_index(name, index_name) or {}
except Exception:
detail = {}
indexes.append(
{
"index_name": index_name,
"index_type": detail.get("index_type"),
"metric_type": detail.get("metric_type"),
"indexed_rows": self._to_int(detail.get("indexed_rows")),
"pending_index_rows": self._to_int(detail.get("pending_index_rows")),
"state": detail.get("state"),
}
)
collections.append(
{
"name": name,
"row_count": row_count,
"dimension": dimension if is_vector_collection else None,
"estimated_memory_bytes": estimated_memory,
"is_vector_collection": is_vector_collection,
"indexes": indexes,
}
)
db_file_size = None
try:
if self.db_path.exists():
db_file_size = self.db_path.stat().st_size
except OSError:
db_file_size = None
return {
"collections": collections,
"collection_count": len(collections),
"total_vectors": total_vectors,
"estimated_total_memory_bytes": total_estimated_memory,
"db_file_size_bytes": db_file_size,
}
def clear_all_data(self) -> None:
client = self._get_client()
for collection_name in client.list_collections():
client.drop_collection(collection_name)

View File

@@ -0,0 +1,197 @@
from __future__ import annotations
from typing import Any, Dict, List, Optional
from pymilvus import CollectionSchema, DataType, FieldSchema, MilvusClient
from .base import BaseVectorProvider
class MilvusServerProvider(BaseVectorProvider):
type = "milvus_server"
label = "Milvus Server"
description = "Remote Milvus instance accessed via URI."
enabled = True
config_schema: List[Dict[str, Any]] = [
{
"key": "uri",
"label": "Server URI",
"type": "text",
"required": True,
"placeholder": "http://localhost:19530",
},
{
"key": "token",
"label": "Token",
"type": "password",
"required": False,
"placeholder": "user:password",
},
]
def __init__(self, config: Dict[str, Any] | None = None):
super().__init__(config)
self.client: MilvusClient | None = None
async def initialize(self) -> None:
uri = self.config.get("uri")
if not uri:
raise RuntimeError("Milvus Server URI is required")
try:
self.client = MilvusClient(uri=uri, token=self.config.get("token"))
except Exception as exc: # pragma: no cover - depends on remote availability
raise RuntimeError(f"Failed to connect to Milvus Server {uri}: {exc}") from exc
def _get_client(self) -> MilvusClient:
if not self.client:
raise RuntimeError("Milvus Server client is not initialized")
return self.client
@staticmethod
def _to_int(value: Any) -> int:
try:
return int(value)
except (TypeError, ValueError):
return 0
def ensure_collection(self, collection_name: str, vector: bool, dim: int) -> None:
client = self._get_client()
if client.has_collection(collection_name):
return
if vector:
vector_dim = dim if isinstance(dim, int) and dim > 0 else 0
if vector_dim <= 0:
vector_dim = 4096
fields = [
FieldSchema(name="path", dtype=DataType.VARCHAR, max_length=512, is_primary=True, auto_id=False),
FieldSchema(name="embedding", dtype=DataType.FLOAT_VECTOR, dim=vector_dim),
]
schema = CollectionSchema(fields, description="Image vector collection")
client.create_collection(collection_name, schema=schema)
index_params = MilvusClient.prepare_index_params()
index_params.add_index(
field_name="embedding",
index_type="IVF_FLAT",
index_name="vector_index",
metric_type="COSINE",
params={"nlist": 64},
)
client.create_index(collection_name, index_params=index_params)
else:
fields = [
FieldSchema(name="path", dtype=DataType.VARCHAR, max_length=512, is_primary=True, auto_id=False),
]
schema = CollectionSchema(fields, description="Simple file index")
client.create_collection(collection_name, schema=schema)
def upsert_vector(self, collection_name: str, data: Dict[str, Any]) -> None:
self._get_client().upsert(collection_name, data)
def delete_vector(self, collection_name: str, path: str) -> None:
self._get_client().delete(collection_name, ids=[path])
def search_vectors(self, collection_name: str, query_embedding, top_k: int):
search_params = {"metric_type": "COSINE"}
return self._get_client().search(
collection_name,
data=[query_embedding],
anns_field="embedding",
search_params=search_params,
limit=top_k,
output_fields=["path"],
)
def search_by_path(self, collection_name: str, query_path: str, top_k: int):
filter_expr = f"path like '%{query_path}%'" if query_path else "path like '%%'"
results = self._get_client().query(
collection_name,
filter=filter_expr,
limit=top_k,
output_fields=["path"],
)
return [[{"id": r["path"], "distance": 1.0, "entity": {"path": r["path"]}} for r in results]]
def get_all_stats(self) -> Dict[str, Any]:
client = self._get_client()
try:
collection_names = client.list_collections()
except Exception as exc:
raise RuntimeError(f"Failed to list collections: {exc}") from exc
collections: List[Dict[str, Any]] = []
total_vectors = 0
total_estimated_memory = 0
for name in collection_names:
try:
stats = client.get_collection_stats(name) or {}
except Exception:
stats = {}
row_count = self._to_int(stats.get("row_count"))
total_vectors += row_count
dimension: Optional[int] = None
is_vector_collection = False
try:
description = client.describe_collection(name)
except Exception:
description = None
if description:
for field in description.get("fields", []):
if field.get("type") == DataType.FLOAT_VECTOR:
params = field.get("params") or {}
dimension = self._to_int(params.get("dim")) or 4096
is_vector_collection = True
break
estimated_memory = 0
if is_vector_collection and dimension:
estimated_memory = row_count * dimension * 4
total_estimated_memory += estimated_memory
indexes: List[Dict[str, Any]] = []
try:
index_names = client.list_indexes(name) or []
except Exception:
index_names = []
for index_name in index_names:
try:
detail = client.describe_index(name, index_name) or {}
except Exception:
detail = {}
indexes.append(
{
"index_name": index_name,
"index_type": detail.get("index_type"),
"metric_type": detail.get("metric_type"),
"indexed_rows": self._to_int(detail.get("indexed_rows")),
"pending_index_rows": self._to_int(detail.get("pending_index_rows")),
"state": detail.get("state"),
}
)
collections.append(
{
"name": name,
"row_count": row_count,
"dimension": dimension if is_vector_collection else None,
"estimated_memory_bytes": estimated_memory,
"is_vector_collection": is_vector_collection,
"indexes": indexes,
}
)
return {
"collections": collections,
"collection_count": len(collections),
"total_vectors": total_vectors,
"estimated_total_memory_bytes": total_estimated_memory,
"db_file_size_bytes": None,
}
def clear_all_data(self) -> None:
client = self._get_client()
for collection_name in client.list_collections():
client.drop_collection(collection_name)

View File

@@ -0,0 +1,237 @@
from __future__ import annotations
from typing import Any, Dict, List, Optional, Sequence
from uuid import NAMESPACE_URL, uuid5
from qdrant_client import QdrantClient
from qdrant_client.http import models as qmodels
from .base import BaseVectorProvider
class QdrantProvider(BaseVectorProvider):
type = "qdrant"
label = "Qdrant"
description = "Qdrant vector database (HTTP API)."
enabled = True
config_schema: List[Dict[str, Any]] = [
{
"key": "url",
"label": "Server URL",
"type": "text",
"required": True,
"placeholder": "http://localhost:6333",
},
{
"key": "api_key",
"label": "API Key",
"type": "password",
"required": False,
},
]
def __init__(self, config: Dict[str, Any] | None = None):
super().__init__(config)
self.client: Optional[QdrantClient] = None
async def initialize(self) -> None:
url = (self.config.get("url") or "").strip()
if not url:
raise RuntimeError("Qdrant URL is required")
api_key = (self.config.get("api_key") or None) or None
try:
client = QdrantClient(url=url, api_key=api_key)
# 简单连通性校验
client.get_collections()
self.client = client
except Exception as exc: # pragma: no cover - 依赖外部服务
raise RuntimeError(f"Failed to connect to Qdrant at {url}: {exc}") from exc
def _get_client(self) -> QdrantClient:
if not self.client:
raise RuntimeError("Qdrant client is not initialized")
return self.client
@staticmethod
def _vector_params(vector: bool, dim: int) -> qmodels.VectorParams:
size = dim if vector and isinstance(dim, int) and dim > 0 else 1
return qmodels.VectorParams(size=size, distance=qmodels.Distance.COSINE)
def ensure_collection(self, collection_name: str, vector: bool, dim: int) -> None:
client = self._get_client()
try:
if client.collection_exists(collection_name):
return
except Exception as exc: # pragma: no cover - 依赖外部服务
raise RuntimeError(f"Failed to check Qdrant collection '{collection_name}': {exc}") from exc
vectors_config = self._vector_params(vector, dim)
try:
client.create_collection(collection_name=collection_name, vectors_config=vectors_config)
except Exception as exc: # pragma: no cover
if "already exists" in str(exc).lower():
return
raise RuntimeError(f"Failed to create Qdrant collection '{collection_name}': {exc}") from exc
@staticmethod
def _point_id(path: str) -> str:
return str(uuid5(NAMESPACE_URL, path))
def _prepare_point(self, data: Dict[str, Any]) -> qmodels.PointStruct:
path = data.get("path")
if not path:
raise ValueError("Qdrant upsert requires 'path' in data")
embedding = data.get("embedding")
if embedding is None:
vector = [0.0]
else:
vector = [float(x) for x in embedding]
payload = {"path": path}
return qmodels.PointStruct(id=self._point_id(path), vector=vector, payload=payload)
def upsert_vector(self, collection_name: str, data: Dict[str, Any]) -> None:
client = self._get_client()
point = self._prepare_point(data)
client.upsert(collection_name=collection_name, wait=True, points=[point])
def delete_vector(self, collection_name: str, path: str) -> None:
client = self._get_client()
selector = qmodels.PointIdsList(points=[self._point_id(path)])
client.delete(collection_name=collection_name, points_selector=selector, wait=True)
def _format_search_results(self, points: Sequence[qmodels.ScoredPoint]):
return [
{
"id": point.id,
"distance": point.score,
"entity": {"path": (point.payload or {}).get("path")},
}
for point in points
]
def search_vectors(self, collection_name: str, query_embedding, top_k: int):
client = self._get_client()
vector = [float(x) for x in query_embedding]
points = client.search(
collection_name=collection_name,
query_vector=vector,
limit=top_k,
with_payload=True,
)
return [self._format_search_results(points)]
def search_by_path(self, collection_name: str, query_path: str, top_k: int):
client = self._get_client()
results: List[Dict[str, Any]] = []
offset: Optional[str | int] = None
remaining = max(top_k, 1)
while len(results) < top_k:
batch_size = min(max(remaining * 2, 10), 200)
records, next_offset = client.scroll(
collection_name=collection_name,
limit=batch_size,
offset=offset,
with_payload=True,
)
if not records:
break
for record in records:
path = (record.payload or {}).get("path")
if query_path and path:
if query_path not in path:
continue
results.append({"id": record.id, "distance": 1.0, "entity": {"path": path}})
if len(results) >= top_k:
break
if next_offset is None or len(results) >= top_k:
break
offset = next_offset
remaining = top_k - len(results)
return [results]
def _extract_vector_config(self, vectors) -> Optional[qmodels.VectorParams]:
if isinstance(vectors, qmodels.VectorParams):
return vectors
if isinstance(vectors, dict):
for value in vectors.values():
if isinstance(value, qmodels.VectorParams):
return value
return None
def get_all_stats(self) -> Dict[str, Any]:
client = self._get_client()
try:
response = client.get_collections()
except Exception as exc: # pragma: no cover
raise RuntimeError(f"Failed to list Qdrant collections: {exc}") from exc
collections: List[Dict[str, Any]] = []
total_vectors = 0
total_estimated_memory = 0
for description in response.collections or []:
name = description.name
try:
info = client.get_collection(name)
except Exception:
continue
row_count = int(info.points_count or 0)
total_vectors += row_count
vector_params = self._extract_vector_config(info.config.params.vectors if info.config and info.config.params else None)
dimension = int(vector_params.size) if vector_params and vector_params.size else None
estimated_memory = row_count * dimension * 4 if dimension else 0
total_estimated_memory += estimated_memory
distance = str(vector_params.distance) if vector_params and vector_params.distance else None
indexed_rows = int(info.indexed_vectors_count or 0)
pending_rows = max(row_count - indexed_rows, 0)
collections.append(
{
"name": name,
"row_count": row_count,
"dimension": dimension,
"estimated_memory_bytes": estimated_memory,
"is_vector_collection": dimension is not None and dimension > 1,
"indexes": [
{
"index_name": "hnsw",
"index_type": "HNSW",
"metric_type": distance,
"indexed_rows": indexed_rows,
"pending_index_rows": pending_rows,
"state": info.status,
}
],
}
)
return {
"collections": collections,
"collection_count": len(collections),
"total_vectors": total_vectors,
"estimated_total_memory_bytes": total_estimated_memory,
"db_file_size_bytes": None,
}
def clear_all_data(self) -> None:
client = self._get_client()
try:
response = client.get_collections()
except Exception as exc: # pragma: no cover
raise RuntimeError(f"Failed to list Qdrant collections: {exc}") from exc
for description in response.collections or []:
try:
client.delete_collection(description.name)
except Exception:
continue

View File

@@ -0,0 +1,99 @@
from __future__ import annotations
import asyncio
from typing import Any, Dict, Optional
from .config_manager import VectorDBConfigManager
from .providers import get_provider_class, get_provider_entry
from .providers.base import BaseVectorProvider
DEFAULT_VECTOR_DIMENSION = 4096
class VectorDBService:
_instance: "VectorDBService" | None = None
def __new__(cls, *args, **kwargs):
if cls._instance is None:
cls._instance = super().__new__(cls)
return cls._instance
def __init__(self):
if not hasattr(self, "_provider"):
self._provider: Optional[BaseVectorProvider] = None
self._provider_type: Optional[str] = None
self._provider_config: Dict[str, Any] | None = None
self._lock = asyncio.Lock()
async def _ensure_provider(self) -> BaseVectorProvider:
if self._provider is None:
await self.reload()
assert self._provider is not None # for type checker
return self._provider
async def reload(self) -> BaseVectorProvider:
async with self._lock:
provider_type, provider_config = await VectorDBConfigManager.load_config()
normalized_config = dict(provider_config or {})
if (
self._provider
and self._provider_type == provider_type
and self._provider_config == normalized_config
):
return self._provider
entry = get_provider_entry(provider_type)
if not entry:
raise RuntimeError(f"Unknown vector database provider: {provider_type}")
if not entry.get("enabled", True):
raise RuntimeError(f"Vector database provider '{provider_type}' is disabled")
provider_cls = get_provider_class(provider_type)
if not provider_cls:
raise RuntimeError(f"Provider class not found for '{provider_type}'")
provider = provider_cls(provider_config)
await provider.initialize()
self._provider = provider
self._provider_type = provider_type
self._provider_config = normalized_config
return provider
async def ensure_collection(self, collection_name: str, vector: bool = True, dim: int = DEFAULT_VECTOR_DIMENSION) -> None:
provider = await self._ensure_provider()
provider.ensure_collection(collection_name, vector, dim)
async def upsert_vector(self, collection_name: str, data: Dict[str, Any]) -> None:
provider = await self._ensure_provider()
provider.upsert_vector(collection_name, data)
async def delete_vector(self, collection_name: str, path: str) -> None:
provider = await self._ensure_provider()
provider.delete_vector(collection_name, path)
async def search_vectors(self, collection_name: str, query_embedding, top_k: int = 5):
provider = await self._ensure_provider()
return provider.search_vectors(collection_name, query_embedding, top_k)
async def search_by_path(self, collection_name: str, query_path: str, top_k: int = 20):
provider = await self._ensure_provider()
return provider.search_by_path(collection_name, query_path, top_k)
async def get_all_stats(self) -> Dict[str, Any]:
provider = await self._ensure_provider()
return provider.get_all_stats()
async def clear_all_data(self) -> None:
provider = await self._ensure_provider()
provider.clear_all_data()
async def current_provider(self) -> Dict[str, Any]:
provider_type, provider_config = await VectorDBConfigManager.load_config()
entry = get_provider_entry(provider_type) or {}
return {
"type": provider_type,
"config": provider_config,
"label": entry.get("label"),
"enabled": entry.get("enabled", True),
}

View File

@@ -1,5 +1,6 @@
from pathlib import Path
from typing import Dict, Tuple, Any, Union, AsyncIterator
from __future__ import annotations
from typing import Dict, Tuple, Any, Union, AsyncIterator, List, TYPE_CHECKING
from fastapi import HTTPException
import mimetypes
from fastapi.responses import Response
@@ -7,8 +8,11 @@ import time
import hmac
import hashlib
import base64
from pathlib import Path
import shutil
import aiofiles
from models import Mount
from models import StorageAdapter
from .adapters.registry import runtime_registry
from api.response import page
from .thumbnail import is_image_filename, is_raw_filename
@@ -18,16 +22,46 @@ from services.logging import LogService
from services.config import ConfigCenter
async def resolve_mount(path: str) -> Tuple[Mount, str]:
CROSS_TRANSFER_TEMP_ROOT = Path("data/tmp/cross_transfer")
if TYPE_CHECKING:
from services.task_queue import Task
def _build_absolute_path(mount_path: str, rel_path: str) -> str:
rel_norm = rel_path.lstrip('/')
mount_norm = mount_path.rstrip('/')
if not mount_norm:
return '/' + rel_norm if rel_norm else '/'
return f"{mount_norm}/{rel_norm}" if rel_norm else mount_norm
def _join_rel(base: str, name: str) -> str:
if not base:
return name.lstrip('/')
if not name:
return base
return f"{base.rstrip('/')}/{name.lstrip('/')}"
def _parent_rel(rel: str) -> str:
if not rel:
return ''
if '/' not in rel:
return ''
return rel.rsplit('/', 1)[0]
async def resolve_adapter_by_path(path: str) -> Tuple[StorageAdapter, str]:
norm = path if path.startswith('/') else '/' + path
mounts = await Mount.filter(enabled=True)
adapters = await StorageAdapter.filter(enabled=True)
best = None
for m in mounts:
if norm == m.path or norm.startswith(m.path.rstrip('/') + '/'):
if (best is None) or len(m.path) > len(best.path):
best = m
for a in adapters:
if norm == a.path or norm.startswith(a.path.rstrip('/') + '/'):
if (best is None) or len(a.path) > len(best.path):
best = a
if not best:
raise HTTPException(404, detail="No mount for path")
raise HTTPException(404, detail="No storage adapter for path")
rel = norm[len(best.path):].lstrip('/')
return best, rel
@@ -35,16 +69,22 @@ async def resolve_mount(path: str) -> Tuple[Mount, str]:
async def resolve_adapter_and_rel(path: str):
"""返回 (adapter_instance, mount, effective_root, rel_path)."""
"""返回 (adapter_instance, adapter_model, effective_root, rel_path)."""
norm = path if path.startswith('/') else '/' + path
try:
mount, rel = await resolve_mount(norm)
adapter_model, rel = await resolve_adapter_by_path(norm)
except HTTPException as e:
raise e
await mount.fetch_related("adapter")
adapter_instance = runtime_registry.get(mount.adapter_id)
effective_root = adapter_instance.get_effective_root(mount.sub_path)
return adapter_instance, mount, effective_root, rel
adapter_instance = runtime_registry.get(adapter_model.id)
if not adapter_instance:
await runtime_registry.refresh()
adapter_instance = runtime_registry.get(adapter_model.id)
if not adapter_instance:
raise HTTPException(
404, detail=f"Adapter instance for ID {adapter_model.id} not found or failed to load."
)
effective_root = adapter_instance.get_effective_root(adapter_model.sub_path)
return adapter_instance, adapter_model, effective_root, rel
async def _ensure_method(adapter: Any, method: str):
@@ -54,29 +94,55 @@ async def _ensure_method(adapter: Any, method: str):
return func
async def list_virtual_dir(path: str, page_num: int = 1, page_size: int = 50) -> Dict:
async def path_is_directory(path: str) -> bool:
"""判断给定路径是否为目录。"""
adapter_instance, _, root, rel = await resolve_adapter_and_rel(path)
rel = rel.rstrip('/')
if rel == '':
return True
stat_func = getattr(adapter_instance, "stat_file", None)
if not callable(stat_func):
raise HTTPException(501, detail="Adapter does not implement stat_file")
try:
info = await stat_func(root, rel)
except FileNotFoundError:
raise HTTPException(404, detail="Path not found")
if isinstance(info, dict):
return bool(info.get("is_dir"))
return False
async def list_virtual_dir(path: str, page_num: int = 1, page_size: int = 50, sort_by: str = "name", sort_order: str = "asc") -> Dict:
norm = (path if path.startswith('/') else '/' + path).rstrip('/') or '/'
mounts = await Mount.filter(enabled=True).prefetch_related("adapter")
adapters = await StorageAdapter.filter(enabled=True)
child_mount_entries = []
norm_prefix = norm.rstrip('/')
for m in mounts:
if m.path == norm:
for a in adapters:
if a.path == norm:
continue
if m.path.startswith(norm_prefix + '/'):
tail = m.path[len(norm_prefix):].lstrip('/')
if a.path.startswith(norm_prefix + '/'):
tail = a.path[len(norm_prefix):].lstrip('/')
if '/' not in tail:
child_mount_entries.append(tail)
child_mount_entries = sorted(set(child_mount_entries))
try:
mount, rel = await resolve_mount(norm)
await mount.fetch_related("adapter")
adapter = runtime_registry.get(mount.adapter_id)
effective_root = adapter.get_effective_root(mount.sub_path)
adapter_model, rel = await resolve_adapter_by_path(norm)
adapter_instance = runtime_registry.get(adapter_model.id)
if not adapter_instance:
await runtime_registry.refresh()
adapter_instance = runtime_registry.get(adapter_model.id)
if adapter_instance:
effective_root = adapter_instance.get_effective_root(adapter_model.sub_path)
else:
adapter_model = None
effective_root = ""
rel = ""
except HTTPException:
mount = None
adapter = None
adapter_model = None
adapter_instance = None
effective_root = ''
rel = ''
@@ -84,10 +150,10 @@ async def list_virtual_dir(path: str, page_num: int = 1, page_size: int = 50) ->
adapter_total = 0
covered = set()
if mount and adapter:
list_dir = await _ensure_method(adapter, "list_dir")
if adapter_model and adapter_instance:
list_dir = await _ensure_method(adapter_instance, "list_dir")
try:
adapter_entries, adapter_total = await list_dir(effective_root, rel, page_num, page_size)
adapter_entries, adapter_total = await list_dir(effective_root, rel, page_num, page_size, sort_by, sort_order)
except NotADirectoryError:
raise HTTPException(400, detail="Not a directory")
@@ -105,32 +171,47 @@ async def list_virtual_dir(path: str, page_num: int = 1, page_size: int = 50) ->
ent['is_image'] = is_image_filename(ent['name'])
else:
ent['is_image'] = False
all_entries = adapter_entries + mount_entries
all_entries.sort(key=lambda x: (not x.get("is_dir"), x["name"].lower()))
total_entries = adapter_total + len(mount_entries)
if mount_entries:
reverse = sort_order.lower() == "desc"
def get_sort_key(item):
key = (not item.get("is_dir"),)
sort_field = sort_by.lower()
if sort_field == "name":
key += (item["name"].lower(),)
elif sort_field == "size":
key += (item.get("size", 0),)
elif sort_field == "mtime":
key += (item.get("mtime", 0),)
else:
key += (item["name"].lower(),)
return key
all_entries.sort(key=get_sort_key, reverse=reverse)
total_entries = adapter_total + len(mount_entries)
start_idx = (page_num - 1) * page_size
end_idx = start_idx + page_size
page_entries = all_entries[start_idx:end_idx]
return page(page_entries, total_entries, page_num, page_size)
else:
return page(adapter_entries, adapter_total, page_num, page_size)
return page(adapter_entries, adapter_total, page_num, page_size)
async def read_file(path: str) -> Union[bytes, Any]:
adapter, _mount, root, rel = await resolve_adapter_and_rel(path)
adapter_instance, _, root, rel = await resolve_adapter_and_rel(path)
if rel.endswith('/') or rel == '':
raise HTTPException(400, detail="Path is a directory")
read_func = await _ensure_method(adapter, "read_file")
read_func = await _ensure_method(adapter_instance, "read_file")
return await read_func(root, rel)
async def write_file(path: str, data: bytes):
adapter, _mount, root, rel = await resolve_adapter_and_rel(path)
adapter_instance, _, root, rel = await resolve_adapter_and_rel(path)
if rel.endswith('/'):
raise HTTPException(400, detail="Invalid file path")
write_func = await _ensure_method(adapter, "write_file")
write_func = await _ensure_method(adapter_instance, "write_file")
await write_func(root, rel, data)
await task_service.trigger_tasks("file_written", path)
await LogService.action(
@@ -139,10 +220,10 @@ async def write_file(path: str, data: bytes):
async def write_file_stream(path: str, data_iter: AsyncIterator[bytes], overwrite: bool = True):
adapter, _mount, root, rel = await resolve_adapter_and_rel(path)
adapter_instance, _, root, rel = await resolve_adapter_and_rel(path)
if rel.endswith('/'):
raise HTTPException(400, detail="Invalid file path")
exists_func = getattr(adapter, "exists", None)
exists_func = getattr(adapter_instance, "exists", None)
if not overwrite and callable(exists_func):
try:
if await exists_func(root, rel):
@@ -153,7 +234,7 @@ async def write_file_stream(path: str, data_iter: AsyncIterator[bytes], overwrit
pass
size = 0
stream_func = getattr(adapter, "write_file_stream", None)
stream_func = getattr(adapter_instance, "write_file_stream", None)
if callable(stream_func):
size = await stream_func(root, rel, data_iter)
else:
@@ -161,7 +242,7 @@ async def write_file_stream(path: str, data_iter: AsyncIterator[bytes], overwrit
async for chunk in data_iter:
if chunk:
buf.extend(chunk)
write_func = await _ensure_method(adapter, "write_file")
write_func = await _ensure_method(adapter_instance, "write_file")
await write_func(root, rel, bytes(buf))
size = len(buf)
@@ -175,40 +256,58 @@ async def write_file_stream(path: str, data_iter: AsyncIterator[bytes], overwrit
async def make_dir(path: str):
adapter, _mount, root, rel = await resolve_adapter_and_rel(path)
adapter_instance, _, root, rel = await resolve_adapter_and_rel(path)
if not rel:
raise HTTPException(400, detail="Cannot create root")
mkdir_func = await _ensure_method(adapter, "mkdir")
mkdir_func = await _ensure_method(adapter_instance, "mkdir")
await mkdir_func(root, rel)
await LogService.action("virtual_fs", f"Created directory {path}", details={"path": path})
async def delete_path(path: str):
adapter, _mount, root, rel = await resolve_adapter_and_rel(path)
adapter_instance, _, root, rel = await resolve_adapter_and_rel(path)
if not rel:
raise HTTPException(400, detail="Cannot delete root")
delete_func = await _ensure_method(adapter, "delete")
delete_func = await _ensure_method(adapter_instance, "delete")
await delete_func(root, rel)
await task_service.trigger_tasks("file_deleted", path)
await LogService.action("virtual_fs", f"Deleted {path}", details={"path": path})
async def move_path(src: str, dst: str, overwrite: bool = False, return_debug: bool = True):
adapter_s, mount_s, root_s, rel_s = await resolve_adapter_and_rel(src)
adapter_d, mount_d, root_d, rel_d = await resolve_adapter_and_rel(dst)
async def move_path(
src: str,
dst: str,
overwrite: bool = False,
return_debug: bool = True,
allow_cross: bool = False,
):
adapter_s, adapter_model_s, root_s, rel_s = await resolve_adapter_and_rel(src)
adapter_d, adapter_model_d, root_d, rel_d = await resolve_adapter_and_rel(dst)
debug_info = {
"src": src, "dst": dst,
"rel_s": rel_s, "rel_d": rel_d,
"root_s": root_s, "root_d": root_d,
"overwrite": overwrite
"overwrite": overwrite,
"operation": "move",
"queued": False,
}
if mount_s.id != mount_d.id:
raise HTTPException(400, detail="Cross-mount move not supported")
if not rel_s:
raise HTTPException(400, detail="Cannot move or rename mount root")
if not rel_d:
raise HTTPException(400, detail="Invalid destination")
if adapter_model_s.id != adapter_model_d.id:
if not allow_cross:
raise HTTPException(400, detail="Cross-adapter move not supported")
queue_info = await _enqueue_cross_mount_transfer(
operation="move",
src=src,
dst=dst,
overwrite=overwrite,
)
debug_info.update(queue_info)
return debug_info if return_debug else None
exists_func = getattr(adapter_s, "exists", None)
stat_func = getattr(adapter_s, "stat_path", None)
delete_func = await _ensure_method(adapter_s, "delete")
@@ -266,16 +365,16 @@ async def move_path(src: str, dst: str, overwrite: bool = False, return_debug: b
async def rename_path(src: str, dst: str, overwrite: bool = False, return_debug: bool = True):
adapter_s, mount_s, root_s, rel_s = await resolve_adapter_and_rel(src)
adapter_d, mount_d, root_d, rel_d = await resolve_adapter_and_rel(dst)
adapter_s, adapter_model_s, root_s, rel_s = await resolve_adapter_and_rel(src)
adapter_d, adapter_model_d, root_d, rel_d = await resolve_adapter_and_rel(dst)
debug_info = {
"src": src, "dst": dst,
"rel_s": rel_s, "rel_d": rel_d,
"root_s": root_s, "root_d": root_d,
"overwrite": overwrite
}
if mount_s.id != mount_d.id:
raise HTTPException(400, detail="Cross-mount rename not supported")
if adapter_model_s.id != adapter_model_d.id:
raise HTTPException(400, detail="Cross-adapter rename not supported")
if not rel_s:
raise HTTPException(400, detail="Cannot rename mount root")
if not rel_d:
@@ -338,7 +437,7 @@ async def rename_path(src: str, dst: str, overwrite: bool = False, return_debug:
async def stream_file(path: str, range_header: str | None):
adapter, mount, root, rel = await resolve_adapter_and_rel(path)
adapter_instance, _, root, rel = await resolve_adapter_and_rel(path)
if not rel or rel.endswith('/'):
raise HTTPException(400, detail="Path is a directory")
if is_raw_filename(rel):
@@ -371,7 +470,7 @@ async def stream_file(path: str, range_header: str | None):
except Exception as e:
raise HTTPException(500, detail=f"RAW file processing failed: {e}")
stream_impl = getattr(adapter, "stream_file", None)
stream_impl = getattr(adapter_instance, "stream_file", None)
if callable(stream_impl):
return await stream_impl(root, rel, range_header)
data = await read_file(path)
@@ -380,29 +479,47 @@ async def stream_file(path: str, range_header: str | None):
async def stat_file(path: str):
adapter, _mount, root, rel = await resolve_adapter_and_rel(path)
stat_func = getattr(adapter, "stat_file", None)
adapter_instance, _, root, rel = await resolve_adapter_and_rel(path)
stat_func = getattr(adapter_instance, "stat_file", None)
if not callable(stat_func):
raise HTTPException(501, detail="Adapter does not implement stat_file")
return await stat_func(root, rel)
async def copy_path(src: str, dst: str, overwrite: bool = False, return_debug: bool = True):
adapter_s, mount_s, root_s, rel_s = await resolve_adapter_and_rel(src)
adapter_d, mount_d, root_d, rel_d = await resolve_adapter_and_rel(dst)
async def copy_path(
src: str,
dst: str,
overwrite: bool = False,
return_debug: bool = True,
allow_cross: bool = False,
):
adapter_s, adapter_model_s, root_s, rel_s = await resolve_adapter_and_rel(src)
adapter_d, adapter_model_d, root_d, rel_d = await resolve_adapter_and_rel(dst)
debug_info = {
"src": src, "dst": dst,
"rel_s": rel_s, "rel_d": rel_d,
"root_s": root_s, "root_d": root_d,
"overwrite": overwrite
"overwrite": overwrite,
"operation": "copy",
"queued": False,
}
if mount_s.id != mount_d.id:
raise HTTPException(400, detail="Cross-mount copy not supported")
if not rel_s:
raise HTTPException(400, detail="Cannot copy mount root")
if not rel_d:
raise HTTPException(400, detail="Invalid destination")
if adapter_model_s.id != adapter_model_d.id:
if not allow_cross:
raise HTTPException(400, detail="Cross-adapter copy not supported")
queue_info = await _enqueue_cross_mount_transfer(
operation="copy",
src=src,
dst=dst,
overwrite=overwrite,
)
debug_info.update(queue_info)
return debug_info if return_debug else None
exists_func = getattr(adapter_s, "exists", None)
stat_func = getattr(adapter_s, "stat_path", None)
delete_func = getattr(adapter_s, "delete", None)
@@ -448,28 +565,424 @@ async def copy_path(src: str, dst: str, overwrite: bool = False, return_debug: b
return debug_info if return_debug else None
async def process_file(path: str, processor_type: str, config: dict, save_to: str = None):
"""
使用指定处理器处理文件,并可选择保存到新路径
:param path: 源文件路径
:param processor_type: 处理器类型
:param config: 处理器配置
:param save_to: 保存路径(可选),不指定则只返回处理结果
:return: 处理后的文件内容或保存结果
"""
data = await read_file(path)
async def _enqueue_cross_mount_transfer(operation: str, src: str, dst: str, overwrite: bool) -> Dict[str, Any]:
if operation not in {"move", "copy"}:
raise HTTPException(400, detail="Unsupported transfer operation")
adapter_s, adapter_model_s, _, _ = await resolve_adapter_and_rel(src)
adapter_d, adapter_model_d, root_d, rel_d = await resolve_adapter_and_rel(dst)
if adapter_model_s.id == adapter_model_d.id:
raise HTTPException(400, detail="Cross-adapter transfer requested but adapters are identical")
dst_exists = False
exists_func = getattr(adapter_d, "exists", None)
if callable(exists_func):
dst_exists = await exists_func(root_d, rel_d)
else:
try:
await stat_file(dst)
dst_exists = True
except FileNotFoundError:
dst_exists = False
except HTTPException as exc:
if exc.status_code == 404:
dst_exists = False
else:
raise
if dst_exists and not overwrite:
raise HTTPException(409, detail="Destination already exists")
payload = {
"operation": operation,
"src": src,
"dst": dst,
"overwrite": overwrite,
}
from services.task_queue import task_queue_service
task = await task_queue_service.add_task("cross_mount_transfer", payload)
return {
"queued": True,
"task_id": task.id,
"task_name": "cross_mount_transfer",
"dst_exists": dst_exists,
"cross_adapter": True,
}
async def run_cross_mount_transfer_task(task: "Task") -> Dict[str, Any]:
from services.task_queue import task_queue_service
params = task.task_info or {}
operation = params.get("operation")
src = params.get("src")
dst = params.get("dst")
overwrite = bool(params.get("overwrite", False))
if operation not in {"move", "copy"}:
raise ValueError(f"Unsupported cross mount operation: {operation}")
if not src or not dst:
raise ValueError("Missing src or dst for cross mount transfer")
adapter_s, adapter_model_s, root_s, rel_s = await resolve_adapter_and_rel(src)
adapter_d, adapter_model_d, root_d, rel_d = await resolve_adapter_and_rel(dst)
await task_queue_service.update_meta(task.id, {
"operation": operation,
"src": src,
"dst": dst,
})
if adapter_model_s.id == adapter_model_d.id:
if operation == "move":
await move_path(src, dst, overwrite=overwrite, return_debug=False, allow_cross=False)
else:
await copy_path(src, dst, overwrite=overwrite, return_debug=False, allow_cross=False)
return {
"mode": "direct",
"operation": operation,
"src": src,
"dst": dst,
"files": 0,
"bytes": 0,
}
if not rel_s:
raise ValueError("Cannot transfer mount root")
if not rel_d:
raise ValueError("Invalid destination")
dst_exists = False
exists_func = getattr(adapter_d, "exists", None)
if callable(exists_func):
dst_exists = await exists_func(root_d, rel_d)
else:
try:
await stat_file(dst)
dst_exists = True
except FileNotFoundError:
dst_exists = False
except HTTPException as exc:
if exc.status_code != 404:
raise
if dst_exists and not overwrite:
raise ValueError("Destination already exists")
if dst_exists and overwrite:
await delete_path(dst)
try:
src_stat = await stat_file(src)
except HTTPException as exc:
if exc.status_code == 404:
raise FileNotFoundError(src) from exc
raise
src_is_dir = bool(src_stat.get("is_dir"))
files_to_transfer: List[Dict[str, Any]] = []
dirs_to_create: List[str] = []
await task_queue_service.update_progress(task.id, {
"stage": "preparing",
"percent": 0.0,
"detail": "Collecting source entries",
})
if src_is_dir:
if rel_d:
dirs_to_create.append(rel_d)
list_dir = await _ensure_method(adapter_s, "list_dir")
stack: List[Tuple[str, str, str]] = [(rel_s, rel_d, '')]
page_size = 200
while stack:
current_rel, current_dst_rel, current_relative = stack.pop()
page = 1
while True:
entries, total = await list_dir(root_s, current_rel, page, page_size, "name", "asc")
if not entries and (total or 0) == 0:
break
for entry in entries:
name = entry.get("name")
if not name:
continue
child_rel = _join_rel(current_rel, name)
child_dst_rel = _join_rel(current_dst_rel, name)
child_relative = _join_rel(current_relative, name)
if entry.get("is_dir"):
dirs_to_create.append(child_dst_rel)
stack.append((child_rel, child_dst_rel, child_relative))
else:
files_to_transfer.append({
"src_rel": child_rel,
"dst_rel": child_dst_rel,
"relative_rel": child_relative or name,
"size": entry.get("size"),
"name": name,
})
if total is None or page * page_size >= (total or 0):
break
page += 1
else:
relative_rel = rel_s or (src_stat.get("name") or "file")
files_to_transfer.append({
"src_rel": rel_s,
"dst_rel": rel_d,
"relative_rel": relative_rel,
"size": src_stat.get("size"),
"name": src_stat.get("name") or rel_s.split('/')[-1],
})
parent_dir = _parent_rel(rel_d)
if parent_dir:
dirs_to_create.append(parent_dir)
CROSS_TRANSFER_TEMP_ROOT.mkdir(parents=True, exist_ok=True)
temp_dir = CROSS_TRANSFER_TEMP_ROOT / task.id
temp_dir.mkdir(parents=True, exist_ok=True)
bytes_downloaded = 0
total_dynamic_bytes = sum((f["size"] or 0) for f in files_to_transfer)
try:
for job in files_to_transfer:
src_abs = _build_absolute_path(adapter_model_s.path, job["src_rel"])
data = await read_file(src_abs)
temp_path = temp_dir / job["relative_rel"]
temp_path.parent.mkdir(parents=True, exist_ok=True)
async with aiofiles.open(temp_path, "wb") as f:
await f.write(data)
actual_size = len(data)
job["temp_path"] = temp_path
prev_size = job.get("size") or 0
if prev_size <= 0:
total_dynamic_bytes += actual_size
job_size = actual_size
else:
job_size = prev_size
job["size"] = job_size
bytes_downloaded += actual_size
percent = None
total_for_percent = total_dynamic_bytes if total_dynamic_bytes else bytes_downloaded
if total_for_percent:
percent = min(100.0, round(bytes_downloaded / total_for_percent * 100, 2))
await task_queue_service.update_progress(task.id, {
"stage": "downloading",
"percent": percent,
"bytes_done": bytes_downloaded,
"bytes_total": total_dynamic_bytes or None,
"detail": f"Downloaded {job['name']}",
})
mkdir_func = await _ensure_method(adapter_d, "mkdir")
ensured_dirs: set[str] = set()
async def ensure_dir(rel_path: str):
if not rel_path or rel_path in ensured_dirs:
return
parent = _parent_rel(rel_path)
if parent:
await ensure_dir(parent)
try:
await mkdir_func(root_d, rel_path)
except FileExistsError:
pass
except HTTPException as exc:
if exc.status_code not in {409, 400}:
raise
except Exception:
# Assume directory already exists
pass
ensured_dirs.add(rel_path)
for dir_rel in sorted({d for d in dirs_to_create if d}, key=lambda x: x.count('/')):
await ensure_dir(dir_rel)
uploaded_bytes = 0
total_bytes = sum((f["size"] or 0) for f in files_to_transfer)
async def iter_temp_file(path: Path, chunk_size: int = 512 * 1024):
async with aiofiles.open(path, "rb") as f:
while True:
chunk = await f.read(chunk_size)
if not chunk:
break
yield chunk
for job in files_to_transfer:
parent_dir = _parent_rel(job["dst_rel"])
if parent_dir:
await ensure_dir(parent_dir)
dst_abs = _build_absolute_path(adapter_model_d.path, job["dst_rel"])
temp_path: Path = job["temp_path"]
await write_file_stream(dst_abs, iter_temp_file(temp_path), overwrite=overwrite)
uploaded_bytes += job["size"] or 0
percent = None
if total_bytes:
percent = min(100.0, round(uploaded_bytes / total_bytes * 100, 2))
await task_queue_service.update_progress(task.id, {
"stage": "uploading",
"percent": percent,
"bytes_done": uploaded_bytes,
"bytes_total": total_bytes or None,
"detail": f"Uploaded {job['name']}",
})
if operation == "move":
await delete_path(src)
await task_queue_service.update_progress(task.id, {
"stage": "completed",
"percent": 100.0,
"bytes_done": total_bytes,
"bytes_total": total_bytes,
"detail": "Completed",
})
await task_queue_service.update_meta(task.id, {
"files": len(files_to_transfer),
"directories": len({d for d in dirs_to_create if d}),
"bytes": total_bytes,
"operation": operation,
})
await LogService.action(
"virtual_fs",
f"Cross-adapter {operation} from {src} to {dst}",
details={
"src": src,
"dst": dst,
"operation": operation,
"files": len(files_to_transfer),
"bytes": total_bytes,
},
)
return {
"mode": "cross",
"operation": operation,
"src": src,
"dst": dst,
"files": len(files_to_transfer),
"bytes": total_bytes,
}
finally:
try:
if temp_dir.exists():
shutil.rmtree(temp_dir)
except Exception:
await LogService.info(
"virtual_fs",
"Failed to cleanup cross transfer temp dir",
details={"task_id": task.id, "temp_dir": str(temp_dir)},
)
async def process_file(
path: str,
processor_type: str,
config: dict,
save_to: str | None = None,
overwrite: bool = False,
) -> Any:
"""处理指定路径(文件或目录)。目录会递归处理其下所有文件。"""
processor = get_processor(processor_type)
if not processor:
raise HTTPException(
400, detail=f"Processor {processor_type} not found")
result = await processor.process(data, path, config)
if save_to and getattr(processor, "produces_file", False):
raise HTTPException(400, detail=f"Processor {processor_type} not found")
actual_is_dir = await path_is_directory(path)
supported_exts = getattr(processor, "supported_exts", None) or []
allowed_exts = {
str(ext).lower().lstrip('.')
for ext in supported_exts
if isinstance(ext, str)
}
def matches_extension(rel_path: str) -> bool:
if not allowed_exts:
return True
if '.' not in rel_path:
return '' in allowed_exts
ext = rel_path.rsplit('.', 1)[-1].lower()
return ext in allowed_exts or f'.{ext}' in allowed_exts
def coerce_result_bytes(result: Any) -> bytes:
if isinstance(result, Response):
result_bytes = result.body
else:
result_bytes = result
await write_file(save_to, result_bytes)
return {"saved_to": save_to}
return result.body
if isinstance(result, (bytes, bytearray)):
return bytes(result)
if isinstance(result, str):
return result.encode('utf-8')
raise HTTPException(500, detail="Processor must return bytes/Response when produces_file=True")
def build_absolute_path(mount_path: str, rel_path: str) -> str:
rel_norm = rel_path.lstrip('/')
mount_norm = mount_path.rstrip('/')
if not mount_norm:
return '/' + rel_norm if rel_norm else '/'
return f"{mount_norm}/{rel_norm}" if rel_norm else mount_norm
if actual_is_dir:
if save_to:
raise HTTPException(400, detail="Directory processing does not support custom save_to path")
if not overwrite:
raise HTTPException(400, detail="Directory processing requires overwrite")
adapter_instance, adapter_model, root, rel = await resolve_adapter_and_rel(path)
rel = rel.rstrip('/')
list_dir = await _ensure_method(adapter_instance, "list_dir")
processed_count = 0
stack: List[str] = [rel]
page_size = 200
while stack:
current = stack.pop()
page = 1
while True:
entries, total = await list_dir(root, current, page, page_size, "name", "asc")
if not entries and (total or 0) == 0:
break
for entry in entries:
name = entry.get("name")
if not name:
continue
child_rel = f"{current}/{name}" if current else name
if entry.get("is_dir"):
stack.append(child_rel)
continue
if not matches_extension(child_rel):
continue
absolute_path = build_absolute_path(adapter_model.path, child_rel)
data = await read_file(absolute_path)
result = await processor.process(data, absolute_path, config)
if getattr(processor, "produces_file", False):
result_bytes = coerce_result_bytes(result)
await write_file(absolute_path, result_bytes)
processed_count += 1
if total is None or page * page_size >= total:
break
page += 1
return {"processed_files": processed_count}
# 单文件处理
data = await read_file(path)
result = await processor.process(data, path, config)
target_path = save_to
if overwrite and not target_path:
target_path = path
if target_path and getattr(processor, "produces_file", False):
result_bytes = coerce_result_bytes(result)
await write_file(target_path, result_bytes)
return {"saved_to": target_path}
return result

367
setup/foxel.sh Normal file
View File

@@ -0,0 +1,367 @@
#!/bin/bash
#================================================================================
# Foxel 一键部署与更新脚本
#
# 作者: maxage
# 版本: 1.7 (增加下载镜像, 解决网络问题)
# 描述: 此脚本用于自动化安装、配置和管理 Foxel 项目 (使用 Docker Compose)。
# - 智能检测现有安装,提供安装向导和管理菜单两种模式。
# - 自动检测并安装依赖。
# - 为国内用户提供镜像源切换选项。
#
# 一键运行命令:
# bash <(curl -sL "https://raw.githubusercontent.com/DrizzleTime/Foxel/main/setup/foxel.sh?_=$(date +%s)")
#================================================================================
# --- 消息打印函数 ---
info() {
echo "[信息] $1"
}
warn() {
echo "[警告] $1"
}
error() {
echo "[错误] $1"
}
# --- 基础函数 ---
command_exists() {
command -v "$1" &> /dev/null
}
confirm_action() {
local prompt_message="$1"
printf "%s" "${prompt_message} (y/n): "
read confirmation
if [[ "$confirmation" =~ ^[Yy]$ ]]; then
return 0 # Yes
else
return 1 # No
fi
}
# --- IP地址检测函数 (只输出IP) ---
get_public_ipv4() {
curl -4 -s --max-time 2 https://api.ipify.org || \
curl -4 -s --max-time 2 https://ifconfig.me/ip || \
curl -4 -s --max-time 2 https://icanhazip.com
}
get_public_ipv6() {
curl -6 -s --max-time 2 https://api64.ipify.org || \
curl -6 -s --max-time 2 https://ifconfig.co
}
get_private_ip() {
# 尝试多种方法获取最主要的内网IPv4地址
ip -4 route get 1.1.1.1 2>/dev/null | awk -F"src " 'NR==1{print $2}' | awk '{print $1}' || \
hostname -I 2>/dev/null | awk '{for(i=1;i<=NF;i++) if($i ~ /^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$/) {print $i; exit}}' || \
ip -4 addr 2>/dev/null | grep -oP '(?<=inet\s)\d+(\.\d+){3}' | grep -v '127.0.0.1' | head -n 1
}
# --- 依赖与环境检查 ---
check_and_install_dependencies() {
info "正在检查所需依赖..."
declare -A deps=( [curl]="curl" [openssl]="openssl" [ss]="iproute2" )
local missing_deps=()
for cmd in "${!deps[@]}"; do
if ! command_exists "$cmd"; then
missing_deps+=("${deps[$cmd]}")
fi
done
if [ ${#missing_deps[@]} -gt 0 ]; then
warn "检测到以下依赖项缺失: ${missing_deps[*]}"
if confirm_action "是否尝试自动安装它们?"; then
local pm_cmd=""
if command_exists apt-get; then pm_cmd="sudo apt-get update && sudo apt-get install -y";
elif command_exists yum; then pm_cmd="sudo yum install -y";
elif command_exists dnf; then pm_cmd="sudo dnf install -y";
else error "未检测到 apt, yum 或 dnf。请手动安装: ${missing_deps[*]}"; exit 1; fi
info "即将使用命令安装: '$pm_cmd ${missing_deps[*]}'"
$pm_cmd "${missing_deps[@]}"
for cmd in "${!deps[@]}"; do
if ! command_exists "$cmd"; then error "依赖 '${deps[$cmd]}' 自动安装失败。"; exit 1; fi
done
info "依赖已成功安装。"
else
error "用户取消了安装。请先手动安装依赖: ${missing_deps[*]}"; exit 1
fi
else
info "所有基础依赖均已满足。"
fi
}
initialize_environment() {
check_and_install_dependencies
if ! command_exists docker; then
error "未找到 Docker。请参照官方文档安装: https://docs.docker.com/engine/install/"; exit 1;
fi
if ! docker info &> /dev/null; then error "Docker deamon 未在运行。请先启动 Docker。"; exit 1; fi
info "Docker 环境检测通过。"
if command_exists docker-compose; then COMPOSE_CMD="docker-compose";
elif docker compose version &> /dev/null; then COMPOSE_CMD="docker compose";
else error "未找到 Docker Compose。请安装 Docker Compose v1 或 v2。"; exit 1; fi
info "检测到 Docker Compose 命令: $COMPOSE_CMD"
}
# --- 新安装流程 ---
install_new_foxel() {
info "--- 开始 Foxel 全新安装 ---"
local install_path
while true; do
read -p "请输入您想在哪里创建 Foxel 的数据目录 (例如: /opt/docker): " install_path
if [[ -z "$install_path" ]]; then warn "输入不能为空,请重新输入。"; continue; fi
if [ ! -d "$install_path" ]; then
if confirm_action "目录 '$install_path' 不存在。您想现在创建它吗?"; then
mkdir -p "$install_path"
if [ $? -eq 0 ]; then info "目录 '$install_path' 创建成功。"; break;
else error "创建目录 '$install_path' 失败。"; fi
else info "操作已取消。"; fi
else info "将使用已存在的目录 '$install_path'。"; break; fi
done
echo
local foxel_dir="$install_path/Foxel"
info "将在 '$foxel_dir' 目录中创建所需文件..."
mkdir -p "$foxel_dir/data/"{db,mount} && chmod 777 "$foxel_dir/data/"{db,mount}
if [ $? -ne 0 ]; then error "创建或设置子目录权限失败。"; exit 1; fi
cd "$foxel_dir" || exit
info "正在下载 'compose.yaml'..."
local COMPOSE_MIRROR_URL="https://ghproxy.com/https://raw.githubusercontent.com/DrizzleTime/Foxel/main/compose.yaml"
local COMPOSE_OFFICIAL_URL="https://raw.githubusercontent.com/DrizzleTime/Foxel/main/compose.yaml"
if ! curl -L -o compose.yaml "$COMPOSE_MIRROR_URL"; then
warn "镜像源下载失败,正在尝试从官方源下载..."
if ! curl -L -o compose.yaml "$COMPOSE_OFFICIAL_URL"; then
error "下载 'compose.yaml' 失败。请检查您的网络连接。"; exit 1;
fi
fi
info "'compose.yaml' 下载成功。"
echo
if confirm_action "您的服务器是否位于中国大陆(以便为您选择更快的镜像源)?"; then
info "正在切换到国内镜像源..."
sed -i 's|^\( *\)image: ghcr.io/drizzletime/foxel:latest|\1#image: ghcr.io/drizzletime/foxel:latest|' compose.yaml
sed -i 's|^\( *\)#image: ghcr.nju.edu.cn/drizzletime/foxel:latest|\1image: ghcr.nju.edu.cn/drizzletime/foxel:latest|' compose.yaml
info "已成功切换到 ghcr.nju.edu.cn 镜像源。"
else
info "将使用默认的 ghcr.io 官方镜像源。"
fi
echo
local new_port
while true; do
read -p "请输入新的对外端口 (或直接按回车使用默认的 8088): " new_port
if [[ -z "$new_port" ]]; then
new_port="8088"
info "将使用默认端口 8088。"
break
fi
if ! [[ "$new_port" =~ ^[0-9]+$ ]] || [ "$new_port" -lt 1 ] || [ "$new_port" -gt 65535 ]; then
warn "输入无效。请输入 1-65535 之间的数字。"
continue
fi
if ss -tuln | grep -q ":${new_port}\b"; then
warn "端口 $new_port 已被占用,请换一个。"
else
sed -i "s/\"8088:80\"/\"$new_port:80\"/" compose.yaml
info "端口已成功修改为 $new_port"
break
fi
done
echo
if ! confirm_action "是否需要生成新的随机密钥 (推荐)(选择 'n' 将使用默认值)"; then
info "将使用 'compose.yaml' 文件中的默认密钥。"
else
info "正在生成新的随机密钥..."
sed -i "s|SECRET_KEY=.*|SECRET_KEY=$(openssl rand -base64 32)|" compose.yaml
sed -i "s|TEMP_LINK_SECRET_KEY=.*|TEMP_LINK_SECRET_KEY=$(openssl rand -base64 32)|" compose.yaml
info "新的密钥已成功生成并替换。"
fi
echo
if confirm_action "所有配置已准备就绪!您想现在启动 Foxel 项目吗?"; then
info "正在启动 Foxel 服务... 这可能需要一些时间来拉取镜像。"
$COMPOSE_CMD pull && $COMPOSE_CMD up -d
if [ $? -eq 0 ]; then
info "Foxel 部署成功!"
info "-------------------------------------------------"
info "正在检测服务器IP地址请稍候..."
# 先捕获所有IP地址
local public_ipv4=$(get_public_ipv4 2>/dev/null)
local public_ipv6=$(get_public_ipv6 2>/dev/null)
local private_ip=$(get_private_ip 2>/dev/null)
local final_port=$new_port
local ip_found=false
echo
info "部署完成!您可以通过以下地址访问 Foxel:"
if [[ -n "$private_ip" ]]; then
echo " - 局域网地址: http://${private_ip}:${final_port}"
ip_found=true
fi
if [[ -n "$public_ipv4" ]]; then
echo " - 公网地址 (IPv4): http://${public_ipv4}:${final_port}"
ip_found=true
fi
if [[ -n "$public_ipv6" ]]; then
# 正确格式化IPv6地址
echo " - 公网地址 (IPv6): http://[${public_ipv6}]:${final_port}"
ip_found=true
fi
if ! $ip_found; then
warn "未能自动检测到服务器IP地址。"
echo " 请手动使用 http://[您的服务器IP]:${final_port} 访问它。"
fi
echo "-------------------------------------------------"
else
error "启动 Foxel 失败。请运行 'cd $foxel_dir && $COMPOSE_CMD logs' 查看日志。"
fi
else
info "操作已取消。您可以稍后进入 '$foxel_dir' 并手动运行 '$COMPOSE_CMD up -d'。"
fi
}
# --- 现有安装管理 ---
get_foxel_install_dir() {
local data_path
data_path=$(docker inspect foxel --format='{{range .Mounts}}{{if eq .Destination "/app/data"}}{{.Source}}{{end}}{{end}}')
if [[ -n "$data_path" ]]; then
echo "$(dirname "$data_path")"
fi
}
service_menu() {
while true; do
echo
echo "--- 服务管理 ---"
echo "1. 启动 Foxel"
echo "2. 停止 Foxel"
echo "3. 重启 Foxel"
echo "4. 查看日志"
echo "5. 返回上级菜单"
read -p "请选择操作 [1-5]: " service_choice
case $service_choice in
1) info "正在启动..."; $COMPOSE_CMD up -d ;;
2) info "正在停止..."; $COMPOSE_CMD stop ;;
3) info "正在重启..."; $COMPOSE_CMD restart ;;
4) info "正在显示日志 (按 Ctrl+C 退出)..."; $COMPOSE_CMD logs -f ;;
5) break ;;
*) warn "无效输入。" ;;
esac
done
}
manage_existing_installation() {
info "检测到 Foxel 已安装。"
local foxel_dir
foxel_dir=$(get_foxel_install_dir)
if [[ -z "$foxel_dir" || ! -f "$foxel_dir/compose.yaml" ]]; then
error "无法自动定位 Foxel 的 compose.yaml 文件。"
read -p "请手动输入 Foxel 的安装目录 (包含 compose.yaml 的目录): " foxel_dir
if [[ ! -f "$foxel_dir/compose.yaml" ]]; then error "在指定目录中未找到 compose.yaml。退出。"; exit 1; fi
fi
info "Foxel 安装目录位于: $foxel_dir"
cd "$foxel_dir" || exit 1
while true; do
echo
echo "--- Foxel 管理菜单 ---"
echo "1. 更新"
echo "2. 卸载"
echo "3. 重新安装"
echo "4. 服务管理 (启动/停止/重启/日志)"
echo "5. 退出"
read -p "请选择操作 [1-5]: " choice
case $choice in
1) # 更新
warn "更新前,强烈建议您备份 '$foxel_dir/data' 目录!"
if confirm_action "您确定要继续更新吗?"; then
info "正在拉取最新镜像..."
$COMPOSE_CMD pull
info "正在使用新镜像重新部署..."
$COMPOSE_CMD up -d
if [ $? -eq 0 ]; then info "Foxel 更新成功!"; else error "更新失败!"; fi
else info "更新操作已取消。"; fi
;;
2) # 卸载
warn "这将停止并删除 Foxel 容器及相关网络!"
warn "强烈建议您先备份 '$foxel_dir/data' 目录!"
if confirm_action "您确定要继续卸载吗?"; then
info "正在停止并移除容器..."
$COMPOSE_CMD down
if confirm_action "是否要删除所有数据卷(这将删除数据库等所有数据)?"; then
$COMPOSE_CMD down -v
info "数据卷已删除。"
fi
if confirm_action "是否要删除整个 Foxel 安装目录 '$foxel_dir'"; then
rm -rf "$foxel_dir"
info "安装目录已删除。"
fi
info "Foxel 卸载完成。"
exit 0
else info "卸载操作已取消。"; fi
;;
3) # 重新安装
warn "重新安装将完全删除当前的 Foxel 实例(包括数据),然后进入全新安装流程。"
warn "在继续之前,请务必备份好您的重要数据!"
if confirm_action "您确定要重新安装吗?"; then
info "正在执行卸载..."
$COMPOSE_CMD down -v && rm -rf "$foxel_dir"
info "旧实例已彻底移除。"
install_new_foxel
exit 0
else info "重新安装操作已取消。"; fi
;;
4) # 服务管理
service_menu
;;
5) # 退出
break
;;
*)
warn "无效输入。"
;;
esac
done
}
# --- 主函数 ---
main() {
clear
local SCRIPT_VERSION="1.7"
echo "================================================="
info "欢迎使用 Foxel 一键安装与管理脚本 (版本: ${SCRIPT_VERSION})"
echo "================================================="
echo
initialize_environment
echo
if docker ps -a -q -f "name=^/foxel$" | grep -q .; then
manage_existing_installation
else
install_new_foxel
fi
echo
info "脚本执行完毕。"
}
# --- 脚本入口 ---
main

1731
uv.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -6,9 +6,12 @@
"dependencies": {
"@ant-design/icons": "5.x",
"@ant-design/v5-patch-for-react-19": "^1.0.3",
"@monaco-editor/react": "^4.7.0",
"@uiw/react-md-editor": "^4.0.8",
"antd": "^5.27.0",
"artplayer": "^5.2.5",
"date-fns": "^4.1.0",
"monaco-editor": "^0.53.0",
"react": "^19.1.1",
"react-dom": "^19.1.1",
"react-markdown": "^10.1.0",
@@ -178,6 +181,10 @@
"@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.30", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-GQ7Nw5G2lTu/BtHTKfXhKHok2WGetd4XYcVKGx00SjAk8GMwgJM3zr6zORiPGuOE+/vkc90KtTosSSvaCjKb2Q=="],
"@monaco-editor/loader": ["@monaco-editor/loader@1.5.0", "", { "dependencies": { "state-local": "^1.0.6" } }, "sha512-hKoGSM+7aAc7eRTRjpqAZucPmoNOC4UUbknb/VNoTkEIkCPhqV8LfbsgM1webRM7S/z21eHEx9Fkwx8Z/C/+Xw=="],
"@monaco-editor/react": ["@monaco-editor/react@4.7.0", "", { "dependencies": { "@monaco-editor/loader": "^1.5.0" }, "peerDependencies": { "monaco-editor": ">= 0.25.0 < 1", "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" } }, "sha512-cyzXQCtO47ydzxpQtCGSQGOC8Gk3ZUeBXFAxD+CWXYFo5OqZyZUonFl0DwUlTyAfRHntBfw2p3w4s9R6oe1eCA=="],
"@nodelib/fs.scandir": ["@nodelib/fs.scandir@2.1.5", "", { "dependencies": { "@nodelib/fs.stat": "2.0.5", "run-parallel": "^1.1.9" } }, "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g=="],
"@nodelib/fs.stat": ["@nodelib/fs.stat@2.0.5", "", {}, "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A=="],
@@ -272,6 +279,8 @@
"@types/react-dom": ["@types/react-dom@19.1.7", "", { "peerDependencies": { "@types/react": "^19.0.0" } }, "sha512-i5ZzwYpqjmrKenzkoLM2Ibzt6mAsM7pxB6BCIouEVVmgiqaMj1TjaK7hnA36hbW5aZv20kx7Lw6hWzPWg0Rurw=="],
"@types/trusted-types": ["@types/trusted-types@1.0.6", "", {}, "sha512-230RC8sFeHoT6sSUlRO6a8cAnclO06eeiq1QDfiv2FGCLWFvvERWgwIQD4FWqD9A69BN7Lzee4OXwoMVnnsWDw=="],
"@types/unist": ["@types/unist@3.0.3", "", {}, "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q=="],
"@typescript-eslint/eslint-plugin": ["@typescript-eslint/eslint-plugin@8.39.1", "", { "dependencies": { "@eslint-community/regexpp": "^4.10.0", "@typescript-eslint/scope-manager": "8.39.1", "@typescript-eslint/type-utils": "8.39.1", "@typescript-eslint/utils": "8.39.1", "@typescript-eslint/visitor-keys": "8.39.1", "graphemer": "^1.4.0", "ignore": "^7.0.0", "natural-compare": "^1.4.0", "ts-api-utils": "^2.1.0" }, "peerDependencies": { "@typescript-eslint/parser": "^8.39.1", "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <6.0.0" } }, "sha512-yYegZ5n3Yr6eOcqgj2nJH8cH/ZZgF+l0YIdKILSDjYFRjgYQMgv/lRjV5Z7Up04b9VYUondt8EPMqg7kTWgJ2g=="],
@@ -316,6 +325,8 @@
"argparse": ["argparse@2.0.1", "", {}, "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="],
"artplayer": ["artplayer@5.2.5", "", { "dependencies": { "option-validator": "^2.0.6" } }, "sha512-Ogym5rvkAJ4VLncM4Apl3TJ/a/ozM3csvY4IKuuMR++hUmEZgj/HaGsNonwx8r56nsqiZYE7O4vS1HFZl+NBSg=="],
"bail": ["bail@2.0.2", "", {}, "sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw=="],
"balanced-match": ["balanced-match@1.0.2", "", {}, "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="],
@@ -532,6 +543,8 @@
"keyv": ["keyv@4.5.4", "", { "dependencies": { "json-buffer": "3.0.1" } }, "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw=="],
"kind-of": ["kind-of@6.0.3", "", {}, "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw=="],
"levn": ["levn@0.4.1", "", { "dependencies": { "prelude-ls": "^1.2.1", "type-check": "~0.4.0" } }, "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ=="],
"locate-path": ["locate-path@6.0.0", "", { "dependencies": { "p-locate": "^5.0.0" } }, "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw=="],
@@ -636,6 +649,8 @@
"minimatch": ["minimatch@3.1.2", "", { "dependencies": { "brace-expansion": "^1.1.7" } }, "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw=="],
"monaco-editor": ["monaco-editor@0.53.0", "", { "dependencies": { "@types/trusted-types": "^1.0.6" } }, "sha512-0WNThgC6CMWNXXBxTbaYYcunj08iB5rnx4/G56UOPeL9UVIUGGHA1GR0EWIh9Ebabj7NpCRawQ5b0hfN1jQmYQ=="],
"ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
"nanoid": ["nanoid@3.3.11", "", { "bin": { "nanoid": "bin/nanoid.cjs" } }, "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w=="],
@@ -646,6 +661,8 @@
"nth-check": ["nth-check@2.1.1", "", { "dependencies": { "boolbase": "^1.0.0" } }, "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w=="],
"option-validator": ["option-validator@2.0.6", "", { "dependencies": { "kind-of": "^6.0.3" } }, "sha512-tmZDan2LRIRQyhUGvkff68/O0R8UmF+Btmiiz0SmSw2ng3CfPZB9wJlIjHpe/MKUZqyIZkVIXCrwr1tIN+0Dzg=="],
"optionator": ["optionator@0.9.4", "", { "dependencies": { "deep-is": "^0.1.3", "fast-levenshtein": "^2.0.6", "levn": "^0.4.1", "prelude-ls": "^1.2.1", "type-check": "^0.4.0", "word-wrap": "^1.2.5" } }, "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g=="],
"p-limit": ["p-limit@3.1.0", "", { "dependencies": { "yocto-queue": "^0.1.0" } }, "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ=="],
@@ -816,6 +833,8 @@
"space-separated-tokens": ["space-separated-tokens@2.0.2", "", {}, "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q=="],
"state-local": ["state-local@1.0.7", "", {}, "sha512-HTEHMNieakEnoe33shBYcZ7NX83ACUjCu8c40iOGEZsngj9zRnkqS9j1pqQPXwobB0ZcVTk27REb7COQ0UR59w=="],
"string-convert": ["string-convert@0.2.1", "", {}, "sha512-u/1tdPl4yQnPBjnVrmdLo9gtuLvELKsAoRapekWggdiQNvvvum+jYF329d84NAa660KQw7pB2n36KrIKVoXa3A=="],
"stringify-entities": ["stringify-entities@4.0.4", "", { "dependencies": { "character-entities-html4": "^2.0.0", "character-entities-legacy": "^3.0.0" } }, "sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg=="],

View File

@@ -1,13 +1,20 @@
<!doctype html>
<html lang="en">
<html lang="zh-CN">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Foxel</title>
<link rel='stylesheet'
href='https://chinese-fonts-cdn.deno.dev/packages/maple-mono-cn/dist/MapleMono-CN-Regular/result.css' />
</head>
<body>
<style>
* {
font-family: 'Maple Mono CN';
}
</style>
<div id="root"></div>
<script type="module" src="/src/main.tsx"></script>
</body>

View File

@@ -12,9 +12,12 @@
"dependencies": {
"@ant-design/icons": "5.x",
"@ant-design/v5-patch-for-react-19": "^1.0.3",
"@monaco-editor/react": "^4.7.0",
"@uiw/react-md-editor": "^4.0.8",
"antd": "^5.27.0",
"artplayer": "^5.2.5",
"date-fns": "^4.1.0",
"monaco-editor": "^0.53.0",
"react": "^19.1.1",
"react-dom": "^19.1.1",
"react-markdown": "^10.1.0",

View File

@@ -4,11 +4,13 @@ import { AuthProvider } from './contexts/AuthContext.tsx';
import { status as getStatus } from './api/config.ts';
import type { SystemStatus } from './api/config.ts';
import { SystemContext } from './contexts/SystemContext.tsx';
import { ThemeProvider } from './contexts/ThemeContext.tsx';
import { Spin } from 'antd';
import { Routes, Route, Navigate } from 'react-router';
import SetupPage from './pages/SetupPage.tsx';
import { I18nProvider } from './i18n';
function App() {
function AppInner() {
const [status, setStatus] = useState<SystemStatus | null>(null);
useEffect(() => {
async function checkInitialization() {
@@ -38,17 +40,25 @@ function App() {
return (
<SystemContext.Provider value={status}>
<AuthProvider>
{!status.is_initialized ? (
<Routes>
<Route path="/setup" element={<SetupPage />} />
<Route path="*" element={<Navigate to="/setup" replace />} />
</Routes>
) : (
<AppRouter />
)}
<ThemeProvider>
{!status.is_initialized ? (
<Routes>
<Route path="/setup" element={<SetupPage />} />
<Route path="*" element={<Navigate to="/setup" replace />} />
</Routes>
) : (
<AppRouter />
)}
</ThemeProvider>
</AuthProvider>
</SystemContext.Provider>
);
}
export default App;
export default function App() {
return (
<I18nProvider>
<AppInner />
</I18nProvider>
);
}

View File

@@ -6,7 +6,7 @@ export interface AdapterItem {
type: string;
config: any;
enabled: boolean;
mount_path?: string | null;
path?: string | null;
sub_path?: string | null;
}

View File

@@ -17,6 +17,21 @@ export interface AuthResponse {
token_type: string;
}
export interface MeResponse {
id: number;
username: string;
email?: string | null;
full_name?: string | null;
gravatar_url: string;
}
export interface UpdateMePayload {
email?: string | null;
full_name?: string | null;
old_password?: string;
new_password?: string;
}
export const authApi = {
register: async (username: string, password: string, email?: string, full_name?: string): Promise<any> => {
return request('/auth/register', {
@@ -42,4 +57,15 @@ export const authApi = {
logout: () => {
localStorage.removeItem('token');
},
me: async () => {
return await request<MeResponse>('/auth/me', {
method: 'GET',
});
},
updateMe: async (payload: UpdateMePayload) => {
return await request<MeResponse>('/auth/me', {
method: 'PUT',
json: payload,
});
},
};

View File

@@ -73,4 +73,5 @@ async function request<T = any>(url: string, options: RequestOptions = {}): Prom
export { vfsApi, type VfsEntry, type DirListing } from './vfs';
export { adaptersApi, type AdapterItem, type AdapterTypeField, type AdapterTypeMeta } from './adapters';
export { shareApi, type ShareInfo, type ShareInfoWithPassword } from './share';
export { offlineDownloadsApi, type OfflineDownloadTask, type OfflineDownloadCreate, type TaskProgress } from './offlineDownloads';
export default request;

View File

@@ -20,6 +20,8 @@ export interface SystemStatus {
title: string;
logo: string;
is_initialized: boolean;
app_domain?: string;
file_domain?: string;
}
export async function status() {

View File

@@ -0,0 +1,35 @@
import request from './client';
export interface TaskProgress {
stage?: string | null;
percent?: number | null;
bytes_total?: number | null;
bytes_done?: number | null;
detail?: string | null;
}
export interface OfflineDownloadTask {
id: string;
name: string;
status: 'pending' | 'running' | 'success' | 'failed';
result?: any;
error?: string | null;
task_info: Record<string, any>;
progress?: TaskProgress | null;
meta?: Record<string, any> | null;
}
export interface OfflineDownloadCreate {
url: string;
dest_dir: string;
filename: string;
}
export const offlineDownloadsApi = {
create: (payload: OfflineDownloadCreate) => request<{ task_id: string }>('/offline-downloads/', {
method: 'POST',
json: payload,
}),
list: () => request<OfflineDownloadTask[]>('/offline-downloads/'),
detail: (taskId: string) => request<OfflineDownloadTask>(`/offline-downloads/${taskId}`),
};

View File

@@ -0,0 +1,52 @@
export interface RepoItem {
key: string;
name: string;
version: string;
author?: string;
description?: string;
website?: string;
github?: string;
icon?: string;
supportedExts?: string[];
createdAt?: number;
downloads?: number;
directUrl: string;
}
export interface RepoListResponse {
items: RepoItem[];
total: number;
page: number;
pageSize: number;
}
export interface RepoQueryParams {
query?: string;
author?: string;
sort?: 'downloads' | 'createdAt';
page?: number;
pageSize?: number;
}
const CENTER_BASE = 'https://center.foxel.cc';
export function buildCenterUrl(path: string) {
return new URL(path, CENTER_BASE).href;
}
export async function fetchRepoList(params: RepoQueryParams = {}): Promise<RepoListResponse> {
const query = new URLSearchParams();
if (params.query) query.set('query', params.query);
if (params.author) query.set('author', params.author);
if (params.sort) query.set('sort', params.sort);
query.set('page', String(params.page ?? 1));
query.set('pageSize', String(params.pageSize ?? 12));
const url = `${CENTER_BASE}/api/repo?${query.toString()}`;
const resp = await fetch(url);
if (!resp.ok) {
throw new Error(`Repo fetch failed: ${resp.status}`);
}
return await resp.json();
}

46
web/src/api/plugins.ts Normal file
View File

@@ -0,0 +1,46 @@
import request from './client';
export interface PluginItem {
id: number;
url: string;
enabled: boolean;
key?: string | null;
name?: string | null;
version?: string | null;
supported_exts?: string[] | null;
default_bounds?: Record<string, any> | null;
default_maximized?: boolean | null;
icon?: string | null;
description?: string | null;
author?: string | null;
website?: string | null;
github?: string | null;
}
export interface PluginCreate {
url: string;
enabled?: boolean;
}
export interface PluginManifestUpdate {
key?: string;
name?: string;
version?: string;
supported_exts?: string[];
default_bounds?: Record<string, any>;
default_maximized?: boolean;
icon?: string;
description?: string;
author?: string;
website?: string;
github?: string;
}
export const pluginsApi = {
list: () => request<PluginItem[]>(`/plugins`),
create: (payload: PluginCreate) => request<PluginItem>(`/plugins`, { method: 'POST', json: payload }),
remove: (id: number) => request(`/plugins/${id}`, { method: 'DELETE' }),
update: (id: number, payload: PluginCreate) => request<PluginItem>(`/plugins/${id}`, { method: 'PUT', json: payload }),
updateManifest: (id: number, payload: PluginManifestUpdate) => request<PluginItem>(`/plugins/${id}/metadata`, { method: 'POST', json: payload }),
};

View File

@@ -15,7 +15,8 @@ export interface ProcessorTypeMeta {
name: string;
supported_exts: string[];
config_schema: ProcessorTypeField[];
produces_file:boolean;
produces_file: boolean;
module_path?: string | null;
}
export const processorsApi = {
@@ -29,11 +30,21 @@ export const processorsApi = {
save_to?: string;
overwrite?: boolean;
}) =>
request<any>('/processors/process', {
request<{ task_id: string }>('/processors/process', {
method: 'POST',
json: params,
}),
getSource: (type: string) =>
request<{ source: string; module_path: string }>('/processors/source/' + encodeURIComponent(type), {
method: 'GET',
}),
updateSource: (type: string, source: string) =>
request<boolean>('/processors/source/' + encodeURIComponent(type), {
method: 'PUT',
json: { source },
}),
reload: () =>
request<boolean>('/processors/reload', {
method: 'POST',
body: JSON.stringify(params),
headers: {
'Content-Type': 'application/json'
}
}),
};

View File

@@ -23,10 +23,15 @@ export interface ShareCreatePayload {
password?: string;
}
export interface ClearExpiredResult {
deleted_count: number;
}
export const shareApi = {
create: (payload: ShareCreatePayload) => request<ShareInfoWithPassword>('/shares', { method: 'POST', json: payload }),
list: () => request<ShareInfo[]>('/shares'),
remove: (shareId: number) => request<void>(`/shares/${shareId}`, { method: 'DELETE' }),
clearExpired: () => request<ClearExpiredResult>(`/shares/expired`, { method: 'DELETE' }),
get: (token: string) => request<ShareInfo>(`/s/${token}`),
verifyPassword: (token: string, password: string) => request<void>(`/s/${token}/verify`, { method: 'POST', json: { password } }),
listDir: (token: string, path: string = '/', password?: string) => {
@@ -40,4 +45,4 @@ export const shareApi = {
const url = `${API_BASE_URL}/s/${token}/download?path=${encodeURIComponent(path)}`;
return password ? `${url}&password=${encodeURIComponent(password)}` : url;
},
};
};

View File

@@ -1,4 +1,5 @@
import request from './client';
import type { TaskProgress } from './offlineDownloads';
export interface AutomationTask {
id: number;
@@ -14,9 +15,32 @@ export interface AutomationTask {
export type AutomationTaskCreate = Omit<AutomationTask, 'id'>;
export type AutomationTaskUpdate = Partial<AutomationTaskCreate>;
export interface QueuedTask {
id: string;
name: string;
status: 'pending' | 'running' | 'success' | 'failed';
result?: any;
error?: string;
task_info: Record<string, any>;
progress?: TaskProgress | null;
meta?: Record<string, any> | null;
}
export interface TaskQueueSettings {
concurrency: number;
active_workers: number;
}
export interface TaskQueueSettingsUpdate {
concurrency: number;
}
export const tasksApi = {
list: () => request<AutomationTask[]>('/tasks/'),
create: (payload: AutomationTaskCreate) => request<AutomationTask>('/tasks/', { method: 'POST', json: payload }),
update: (id: number, payload: AutomationTaskUpdate) => request<AutomationTask>(`/tasks/${id}`, { method: 'PUT', json: payload }),
remove: (id: number) => request<void>(`/tasks/${id}`, { method: 'DELETE' }),
};
getQueue: () => request<QueuedTask[]>('/tasks/queue'),
getQueueSettings: () => request<TaskQueueSettings>('/tasks/queue/settings'),
updateQueueSettings: (payload: TaskQueueSettingsUpdate) => request<TaskQueueSettings>('/tasks/queue/settings', { method: 'POST', json: payload }),
};

65
web/src/api/vectorDB.ts Normal file
View File

@@ -0,0 +1,65 @@
import client from './client';
export interface VectorDBIndexInfo {
index_name: string;
index_type?: string;
metric_type?: string;
indexed_rows: number;
pending_index_rows: number;
state?: string;
}
export interface VectorDBCollectionStats {
name: string;
row_count: number;
dimension: number | null;
estimated_memory_bytes: number;
is_vector_collection: boolean;
indexes: VectorDBIndexInfo[];
}
export interface VectorDBStats {
collections: VectorDBCollectionStats[];
collection_count: number;
total_vectors: number;
estimated_total_memory_bytes: number;
db_file_size_bytes: number | null;
}
export interface VectorDBProviderField {
key: string;
label: string;
type: 'text' | 'password';
required?: boolean;
default?: string;
placeholder?: string;
}
export interface VectorDBProviderMeta {
type: string;
label: string;
description?: string;
enabled: boolean;
config_schema: VectorDBProviderField[];
}
export interface VectorDBCurrentConfig {
type: string;
config: Record<string, string>;
label?: string;
enabled?: boolean;
}
export interface UpdateVectorDBConfigResponse {
config: VectorDBCurrentConfig;
stats: VectorDBStats;
}
export const vectorDBApi = {
getProviders: () => client<VectorDBProviderMeta[]>('/vector-db/providers', { method: 'GET' }),
getConfig: () => client<VectorDBCurrentConfig>('/vector-db/config', { method: 'GET' }),
getStats: () => client<VectorDBStats>('/vector-db/stats', { method: 'GET' }),
updateConfig: (payload: { type: string; config: Record<string, string> }) =>
client<UpdateVectorDBConfigResponse>('/vector-db/config', { method: 'POST', json: payload }),
clearAll: () => client('/vector-db/clear-all', { method: 'POST' }),
};

View File

@@ -27,16 +27,22 @@ export interface SearchResultItem {
}
export const vfsApi = {
list: (path: string, page: number = 1, pageSize: number = 50) => {
list: (path: string, page: number = 1, pageSize: number = 50, sortBy: string = 'name', sortOrder: string = 'asc') => {
const cleaned = path.replace(/\\/g, '/');
const trimmed = cleaned === '/' ? '' : cleaned.replace(/^\/+/, '');
const params = new URLSearchParams({
page: page.toString(),
page_size: pageSize.toString()
page_size: pageSize.toString(),
sort_by: sortBy,
sort_order: sortOrder
});
return request<DirListing>(`/fs/${encodeURI(trimmed)}?${params}`);
},
readFile: (path: string) => request<ArrayBuffer>(`/fs/file/${encodeURI(path.replace(/^\/+/, ''))}`),
readFile: async (path: string) => {
const enc = encodeURI(path.replace(/^\/+/, ''));
const resp = await request(`/fs/file/${enc}`, { rawResponse: true });
return await (resp as Response).arrayBuffer();
},
uploadFile: (fullPath: string, file: File | Blob) => {
const fd = new FormData();
fd.append('file', file);
@@ -44,14 +50,25 @@ export const vfsApi = {
},
mkdir: (path: string) => request('/fs/mkdir', { method: 'POST', json: { path } }),
deletePath: (path: string) => request(`/fs/${encodeURI(path.replace(/^\/+/, ''))}`, { method: 'DELETE' }),
move: (src: string, dst: string) => request('/fs/move', { method: 'POST', json: { src, dst } }),
move: (src: string, dst: string, options?: { overwrite?: boolean }) => {
const params = new URLSearchParams();
if (options?.overwrite !== undefined) params.set('overwrite', String(options.overwrite));
const query = params.toString();
return request(`/fs/move${query ? `?${query}` : ''}`, { method: 'POST', json: { src, dst } });
},
copy: (src: string, dst: string, options?: { overwrite?: boolean }) => {
const params = new URLSearchParams();
if (options?.overwrite !== undefined) params.set('overwrite', String(options.overwrite));
const query = params.toString();
return request(`/fs/copy${query ? `?${query}` : ''}`, { method: 'POST', json: { src, dst } });
},
rename: (src: string, dst: string) => request('/fs/rename', { method: 'POST', json: { src, dst } }),
thumb: (path: string, w=256, h=256, fit='cover') =>
request<ArrayBuffer>(`/fs/thumb/${encodeURI(path.replace(/^\/+/, ''))}?w=${w}&h=${h}&fit=${fit}`),
streamUrl: (path: string) => `${API_BASE_URL}/fs/stream/${encodeURI(path.replace(/^\/+/, ''))}`,
stat: (path: string) => request(`/fs/stat/${encodeURI(path.replace(/^\/+/, ''))}`),
getTempLinkToken: (path: string, expiresIn: number = 3600) =>
request<{token: string}>(`/fs/temp-link/${encodeURI(path.replace(/^\/+/, ''))}?expires_in=${expiresIn}`),
request<{token: string, path: string, url: string}>(`/fs/temp-link/${encodeURI(path.replace(/^\/+/, ''))}?expires_in=${expiresIn}`),
getTempPublicUrl: (token: string) => `${API_BASE_URL}/fs/public/${token}`,
uploadStream: (fullPath: string, file: File, overwrite: boolean = true, onProgress?: (loaded: number, total: number) => void) => {
const enc = encodeURI(fullPath.replace(/^\/+/, ''));

View File

@@ -1,6 +1,6 @@
import React, { useRef, useEffect, useCallback } from 'react';
import { Space, Button } from 'antd';
import { FullscreenExitOutlined, FullscreenOutlined, CloseOutlined } from '@ant-design/icons';
import { FullscreenExitOutlined, FullscreenOutlined, CloseOutlined, MinusOutlined } from '@ant-design/icons';
import type { AppDescriptor, AppComponentProps } from './types';
import type { VfsEntry } from '../api/client';
@@ -10,6 +10,7 @@ export interface AppWindowItem {
entry: VfsEntry;
filePath: string;
maximized: boolean;
minimized: boolean;
x: number;
y: number;
width: number;
@@ -187,9 +188,11 @@ export const AppWindowsLayer: React.FC<AppWindowsLayerProps> = ({ windows, onClo
));
};
const visibleWindows = windows.filter(w => !w.minimized);
return (
<>
{windows.map((w, idx) => {
{visibleWindows.map((w, idx) => {
const AppComp = w.app.component as React.FC<AppComponentProps>;
const useSystemWindow = w.app.useSystemWindow !== false; // 默认为 true
if (!useSystemWindow) {
@@ -243,8 +246,8 @@ export const AppWindowsLayer: React.FC<AppWindowsLayerProps> = ({ windows, onClo
left: w.maximized ? 0 : w.x,
width: w.maximized ? '100vw' : w.width,
height: w.maximized ? '100vh' : w.height,
background: 'rgba(240, 242, 245, 0.7)', // Semi-transparent background
border: '1px solid rgba(255, 255, 255, 0.18)',
background: 'var(--ant-color-bg-elevated, var(--ant-color-bg-container))',
border: '1px solid var(--ant-color-border-secondary, rgba(255,255,255,0.18))',
borderRadius: w.maximized ? 0 : 12,
boxShadow: w.maximized
? 'none'
@@ -254,7 +257,7 @@ export const AppWindowsLayer: React.FC<AppWindowsLayerProps> = ({ windows, onClo
display: 'flex',
flexDirection: 'column',
overflow: 'hidden',
backdropFilter: 'blur(20px) saturate(180%)', // Enhanced blur effect
backdropFilter: 'blur(12px) saturate(150%)',
zIndex: 3000 + idx,
willChange: 'left,top,width,height',
transition: interacting ? 'none' : 'top .15s,left .15s,width .15s,height .15s,box-shadow .25s'
@@ -269,9 +272,9 @@ export const AppWindowsLayer: React.FC<AppWindowsLayerProps> = ({ windows, onClo
alignItems: 'center',
justifyContent: 'space-between',
padding: '0 12px',
background: 'rgba(0, 0, 0, 0.25)', // Lighter, transparent title bar
borderBottom: '1px solid rgba(255, 255, 255, 0.1)',
color: '#333', // Darker text for readability
background: 'var(--ant-color-fill-secondary, rgba(0,0,0,0.25))',
borderBottom: '1px solid var(--ant-color-border-secondary, rgba(255,255,255,0.1))',
color: 'var(--ant-color-text, #333)',
fontSize: 13,
fontWeight: 600,
letterSpacing: .2,
@@ -291,6 +294,21 @@ export const AppWindowsLayer: React.FC<AppWindowsLayerProps> = ({ windows, onClo
{w.app.name} - {w.entry.name}
</span>
<Space size={4}>
<Button
type="text"
size="small"
aria-label="最小化"
icon={<MinusOutlined />}
onClick={() => onUpdateWindow(w.id, { minimized: true })}
style={{
color: 'var(--ant-color-text-secondary, #555)',
width: 30,
height: 30,
display: 'flex',
alignItems: 'center',
justifyContent: 'center'
}}
/>
<Button
type="text"
size="small"
@@ -298,7 +316,7 @@ export const AppWindowsLayer: React.FC<AppWindowsLayerProps> = ({ windows, onClo
icon={w.maximized ? <FullscreenExitOutlined /> : <FullscreenOutlined />}
onClick={() => onToggleMax(w.id)}
style={{
color: '#555',
color: 'var(--ant-color-text-secondary, #555)',
width: 30,
height: 30,
display: 'flex',
@@ -314,7 +332,7 @@ export const AppWindowsLayer: React.FC<AppWindowsLayerProps> = ({ windows, onClo
icon={<CloseOutlined />}
onClick={() => onClose(w.id)}
style={{
color: '#ff4d4f',
color: 'var(--ant-color-error, #ff4d4f)',
width: 30,
height: 30,
display: 'flex',

View File

@@ -1,394 +1,654 @@
import React, { useEffect, useRef, useState } from 'react';
import { vfsApi } from '../../api/client';
import React, { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import {
FileOutlined,
DatabaseOutlined,
ExpandOutlined,
BgColorsOutlined,
ClockCircleOutlined,
FolderOutlined,
AimOutlined,
BulbOutlined,
ThunderboltOutlined,
AlertOutlined,
CameraOutlined,
ApiOutlined,
FieldTimeOutlined,
} from '@ant-design/icons';
import { API_BASE_URL, vfsApi, type VfsEntry } from '../../api/client';
import type { AppComponentProps } from '../types';
import { Spin, Typography, Button, Tooltip } from 'antd';
import { ZoomInOutlined, ZoomOutOutlined, ReloadOutlined, CompressOutlined, CloseOutlined, RotateRightOutlined } from '@ant-design/icons';
import { ImageCanvas } from './components/ImageCanvas';
import { ViewerControls } from './components/ViewerControls';
import { Filmstrip } from './components/Filmstrip';
import { InfoPanel } from './components/InfoPanel';
import type { HistogramData, RgbColor, InfoItem } from './components/types';
import { viewerStyles } from './styles';
interface ExplorerSnapshot {
path: string;
entries: VfsEntry[];
pagination?: { page: number; page_size: number; total: number };
sortBy?: string;
sortOrder?: string;
timestamp: number;
}
interface FileStat {
name?: string;
is_dir?: boolean;
size?: number;
mtime?: number;
mode?: number;
path?: string;
type?: string;
exif?: Record<string, unknown>;
}
declare global {
interface WindowEventMap {
'foxel:file-explorer-page': CustomEvent<ExplorerSnapshot>;
}
}
type ExplorerAwareWindow = Window & { __FOXEL_LAST_EXPLORER_PAGE__?: ExplorerSnapshot };
const DEFAULT_TONE: RgbColor = { r: 28, g: 32, b: 46 };
const isImageEntry = (ent: VfsEntry) => {
if (ent.is_dir) return false;
const maybe = ent as VfsEntry & { is_image?: boolean };
if (typeof maybe.is_image === 'boolean' && maybe.is_image) return true;
const ext = ent.name.split('.').pop()?.toLowerCase();
if (!ext) return false;
return ['png', 'jpg', 'jpeg', 'gif', 'webp', 'bmp', 'avif', 'ico', 'tif', 'tiff', 'svg', 'heic', 'heif', 'arw', 'cr2', 'cr3', 'nef', 'rw2', 'orf', 'pef', 'dng'].includes(ext);
};
const buildThumbUrl = (fullPath: string, w = 180, h = 120) => {
const base = API_BASE_URL.replace(/\/+$/, '');
const clean = fullPath.replace(/^\/+/, '');
return `${base}/fs/thumb/${encodeURI(clean)}?w=${w}&h=${h}&fit=cover`;
};
const getDirectory = (fullPath: string) => {
const path = fullPath.startsWith('/') ? fullPath : `/${fullPath}`;
const idx = path.lastIndexOf('/');
if (idx <= 0) return '/';
return path.slice(0, idx) || '/';
};
const joinPath = (dir: string, name: string) => {
if (dir === '/' || dir === '') return `/${name}`;
return `${dir.replace(/\/$/, '')}/${name}`;
};
const clamp = (value: number, min: number, max: number) => Math.max(min, Math.min(max, value));
const parseNumberish = (raw: unknown): number | null => {
if (typeof raw === 'number') return raw;
if (typeof raw !== 'string') return null;
if (raw.includes('/')) {
const [a, b] = raw.split('/').map(v => Number(v));
if (!Number.isNaN(a) && !Number.isNaN(b) && b !== 0) return a / b;
}
const val = Number(raw);
return Number.isNaN(val) ? null : val;
};
const humanFileSize = (size: number | undefined) => {
if (typeof size !== 'number') return '-';
const units = ['B', 'KB', 'MB', 'GB', 'TB'];
let value = size;
let index = 0;
while (value >= 1024 && index < units.length - 1) {
value /= 1024;
index += 1;
}
return `${value.toFixed(index === 0 ? 0 : 1)} ${units[index]}`;
};
const readExplorerSnapshot = (dir: string): ExplorerSnapshot | null => {
if (typeof window === 'undefined') return null;
const snap = (window as ExplorerAwareWindow).__FOXEL_LAST_EXPLORER_PAGE__;
if (!snap) return null;
const snapshotPath = snap.path === '' ? '/' : snap.path;
const normalizedSnap = snapshotPath.endsWith('/') && snapshotPath !== '/' ? snapshotPath.slice(0, -1) : snapshotPath;
const normalizedTarget = dir.endsWith('/') && dir !== '/' ? dir.slice(0, -1) : dir;
if (normalizedSnap !== normalizedTarget) return null;
return snap;
};
const formatDateTime = (ts?: number) => {
if (!ts) return '-';
try {
return new Date(ts * 1000).toLocaleString();
} catch {
return '-';
}
};
const clampChannel = (value: number) => Math.max(0, Math.min(255, value));
const mixColor = (base: RgbColor, target: RgbColor, ratio: number): RgbColor => ({
r: clampChannel(base.r * (1 - ratio) + target.r * ratio),
g: clampChannel(base.g * (1 - ratio) + target.g * ratio),
b: clampChannel(base.b * (1 - ratio) + target.b * ratio),
});
const rgbToRgba = (color: RgbColor, alpha: number) => `rgba(${Math.round(color.r)}, ${Math.round(color.g)}, ${Math.round(color.b)}, ${alpha})`;
const computeImageStats = (img: HTMLImageElement): { histogram: HistogramData | null; dominantColor: RgbColor | null } => {
try {
const maxSide = 720;
const naturalWidth = img.naturalWidth || 1;
const naturalHeight = img.naturalHeight || 1;
const ratio = Math.min(1, maxSide / Math.max(naturalWidth, naturalHeight));
const width = Math.max(1, Math.floor(naturalWidth * ratio));
const height = Math.max(1, Math.floor(naturalHeight * ratio));
const canvas = document.createElement('canvas');
canvas.width = width;
canvas.height = height;
const ctx = canvas.getContext('2d', { willReadFrequently: true });
if (!ctx) return { histogram: null, dominantColor: null };
ctx.drawImage(img, 0, 0, width, height);
const { data } = ctx.getImageData(0, 0, width, height);
const r = new Array(256).fill(0);
const g = new Array(256).fill(0);
const b = new Array(256).fill(0);
let rTotal = 0;
let gTotal = 0;
let bTotal = 0;
let count = 0;
for (let i = 0; i < data.length; i += 4) {
r[data[i]] += 1;
g[data[i + 1]] += 1;
b[data[i + 2]] += 1;
rTotal += data[i];
gTotal += data[i + 1];
bTotal += data[i + 2];
count += 1;
}
const histogram: HistogramData = { r, g, b };
if (count === 0) return { histogram, dominantColor: null };
const dominantColor: RgbColor = {
r: rTotal / count,
g: gTotal / count,
b: bTotal / count,
};
return { histogram, dominantColor };
} catch {
return { histogram: null, dominantColor: null };
}
};
export const ImageViewerApp: React.FC<AppComponentProps> = ({ filePath, entry, onRequestClose }) => {
const [url, setUrl] = useState<string>();
const normalizedInitialPath = filePath.startsWith('/') ? filePath : `/${filePath}`;
const [activeEntry, setActiveEntry] = useState<VfsEntry>(entry);
const [activePath, setActivePath] = useState<string>(normalizedInitialPath);
const [imageUrl, setImageUrl] = useState<string>();
const [loading, setLoading] = useState(true);
const [err, setErr] = useState<string>();
const [error, setError] = useState<string>();
const [stat, setStat] = useState<FileStat | null>(null);
const [histogram, setHistogram] = useState<HistogramData | null>(null);
const [dominantColor, setDominantColor] = useState<RgbColor | null>(null);
const [scale, setScale] = useState(1);
const [offset, setOffset] = useState({ x: 0, y: 0 });
const [isDragging, setIsDragging] = useState(false);
const [rotate, setRotate] = useState(0);
const imgRef = useRef<HTMLImageElement | null>(null);
const [isDragging, setIsDragging] = useState(false);
const [filmstrip, setFilmstrip] = useState<VfsEntry[]>([]);
const [pageInfo, setPageInfo] = useState<{ page: number; total: number; pageSize: number } | null>(null);
const containerRef = useRef<HTMLDivElement | null>(null);
const lastPointer = useRef<{ x: number; y: number } | null>(null);
const lastDistance = useRef<number | null>(null);
const imageRef = useRef<HTMLImageElement | null>(null);
const dragPointRef = useRef<{ x: number; y: number } | null>(null);
const pinchDistanceRef = useRef<number | null>(null);
const transitionRef = useRef(false);
const filmstripRefs = useRef<Record<string, HTMLDivElement | null>>({});
const directory = useMemo(() => getDirectory(activePath), [activePath]);
const baseTone = useMemo<RgbColor>(() => dominantColor ?? DEFAULT_TONE, [dominantColor]);
const containerStyle = useMemo(() => {
const light = mixColor(baseTone, { r: 255, g: 255, b: 255 }, 0.18);
const shadow = mixColor(baseTone, { r: 0, g: 0, b: 0 }, 0.62);
return {
...viewerStyles.container,
background: `linear-gradient(135deg, ${rgbToRgba(light, 0.78)} 0%, ${rgbToRgba(baseTone, 0.86)} 48%, ${rgbToRgba(shadow, 0.96)} 100%)`,
};
}, [baseTone]);
const mainBackdropStyle = useMemo(() => {
const glow = mixColor(baseTone, { r: 255, g: 255, b: 255 }, 0.32);
const shade = mixColor(baseTone, { r: 0, g: 0, b: 0 }, 0.7);
return {
...viewerStyles.mainBackdrop,
background: `radial-gradient(circle at 18% 22%, ${rgbToRgba(glow, 0.38)}, ${rgbToRgba(shade, 0.94)} 68%)`,
};
}, [baseTone]);
const viewerStyle = useMemo(() => {
const surface = mixColor(baseTone, { r: 0, g: 0, b: 0 }, 0.45);
const edge = mixColor(baseTone, { r: 0, g: 0, b: 0 }, 0.65);
return {
...viewerStyles.viewer,
background: `linear-gradient(145deg, ${rgbToRgba(surface, 0.7)} 0%, ${rgbToRgba(edge, 0.92)} 100%)`,
backdropFilter: 'blur(28px)',
};
}, [baseTone]);
const controlsStyle = useMemo(() => {
const tone = mixColor(baseTone, { r: 0, g: 0, b: 0 }, 0.52);
return {
...viewerStyles.controls,
background: rgbToRgba(tone, 0.74),
backdropFilter: 'blur(18px)',
};
}, [baseTone]);
const filmstripShellStyle = useMemo(() => {
const tone = mixColor(baseTone, { r: 0, g: 0, b: 0 }, 0.56);
return {
...viewerStyles.filmstripShell,
background: rgbToRgba(tone, 0.7),
backdropFilter: 'blur(22px)',
};
}, [baseTone]);
const getThumbUrl = useCallback((item: VfsEntry) => {
const full = joinPath(directory, item.name);
return buildThumbUrl(full, 160, 120);
}, [directory]);
const sidePanelStyle = useMemo(() => {
const panel = mixColor(baseTone, { r: 0, g: 0, b: 0 }, 0.6);
const border = rgbToRgba(mixColor(baseTone, { r: 255, g: 255, b: 255 }, 0.1), 0.28);
return {
...viewerStyles.sidePanel,
background: rgbToRgba(panel, 0.8),
backdropFilter: 'blur(28px)',
borderLeft: `1px solid ${border}`,
};
}, [baseTone]);
const histogramCardStyle = useMemo(() => {
const tone = mixColor(baseTone, { r: 0, g: 0, b: 0 }, 0.55);
const stroke = rgbToRgba(mixColor(baseTone, { r: 255, g: 255, b: 255 }, 0.12), 0.2);
return {
...viewerStyles.histogramCard,
background: rgbToRgba(tone, 0.58),
border: `1px solid ${stroke}`,
};
}, [baseTone]);
useEffect(() => {
const normalized = filePath.startsWith('/') ? filePath : `/${filePath}`;
setActiveEntry(entry);
setActivePath(normalized);
}, [entry, filePath]);
useEffect(() => {
let cancelled = false;
setLoading(true); setErr(undefined);
vfsApi.getTempLinkToken(filePath.replace(/^\/+/, ''))
.then(res => {
setLoading(true);
setError(undefined);
setHistogram(null);
setDominantColor(null);
const cleaned = activePath.replace(/^\/+/, '');
Promise.all([
vfsApi.getTempLinkToken(cleaned),
vfsApi.stat(activePath) as Promise<FileStat>,
])
.then(([token, metadata]) => {
if (cancelled) return;
const publicUrl = vfsApi.getTempPublicUrl(res.token);
setUrl(publicUrl);
setImageUrl(vfsApi.getTempPublicUrl(token.token));
setStat(metadata);
setScale(1);
setRotate(0);
setOffset({ x: 0, y: 0 });
})
.catch(e => !cancelled && setErr(e.message || '加载失败'))
.finally(() => !cancelled && setLoading(false));
return () => { cancelled = true; };
}, [filePath]);
.catch((err: unknown) => {
if (!cancelled) {
setError(err instanceof Error ? err.message : '加载失败');
}
})
.finally(() => {
if (!cancelled) setLoading(false);
});
return () => {
cancelled = true;
};
}, [activePath]);
const refreshFilmstrip = useCallback((dir: string) => {
const snap = readExplorerSnapshot(dir);
if (snap) {
const images = snap.entries.filter(isImageEntry);
const ensured = images.some(item => item.name === activeEntry.name) ? images : [...images, activeEntry];
setFilmstrip(ensured);
if (snap.pagination) {
setPageInfo({
page: snap.pagination.page,
pageSize: snap.pagination.page_size,
total: snap.pagination.total,
});
} else {
setPageInfo(null);
}
return;
}
setFilmstrip([activeEntry]);
setPageInfo(null);
}, [activeEntry]);
useEffect(() => {
refreshFilmstrip(directory);
}, [directory, refreshFilmstrip]);
useEffect(() => {
const handler = () => refreshFilmstrip(directory);
window.addEventListener('foxel:file-explorer-page', handler);
return () => window.removeEventListener('foxel:file-explorer-page', handler);
}, [directory, refreshFilmstrip]);
useEffect(() => {
const el = filmstripRefs.current[activeEntry.name];
if (el) {
el.scrollIntoView({ behavior: 'smooth', inline: 'center', block: 'nearest' });
}
}, [activeEntry, filmstrip]);
useEffect(() => {
const keyHandler = (e: KeyboardEvent) => {
if (e.key === 'ArrowRight') {
e.preventDefault();
switchRelative(1);
} else if (e.key === 'ArrowLeft') {
e.preventDefault();
switchRelative(-1);
} else if ((e.key === '+' || e.key === '=') && (e.ctrlKey || e.metaKey)) {
e.preventDefault();
zoom(1.15);
} else if ((e.key === '-' || e.key === '_') && (e.ctrlKey || e.metaKey)) {
e.preventDefault();
zoom(0.85);
}
};
window.addEventListener('keydown', keyHandler);
return () => window.removeEventListener('keydown', keyHandler);
});
const zoom = useCallback((factor: number) => {
setScale(prev => {
const next = clamp(prev * factor, 0.08, 10);
transitionRef.current = true;
window.setTimeout(() => { transitionRef.current = false; }, 120);
return next;
});
}, []);
const rotateImage = () => {
setRotate(prev => {
transitionRef.current = true;
window.setTimeout(() => { transitionRef.current = false; }, 180);
return (prev + 90) % 360;
});
};
const resetView = () => {
transitionRef.current = true;
window.setTimeout(() => { transitionRef.current = false; }, 160);
setScale(1);
setOffset({ x: 0, y: 0 });
setRotate(0);
}, [url]);
};
const clamp = (v: number, a: number, b: number) => Math.max(a, Math.min(b, v));
const applyOffset = (next: { x: number; y: number }) => {
setOffset(next);
const fitToScreen = () => {
resetView();
};
const onWheel = (e: React.WheelEvent) => {
e.preventDefault();
const container = containerRef.current;
if (!container) return;
const rect = container.getBoundingClientRect();
const cx = e.clientX - rect.left - rect.width / 2;
const cy = e.clientY - rect.top - rect.height / 2;
setScale(prev => {
const factor = e.deltaY < 0 ? 1.12 : 0.88;
const next = clamp(prev * factor, 0.08, 10);
const ratio = next / prev;
setOffset(off => ({ x: off.x - cx * (ratio - 1), y: off.y - cy * (ratio - 1) }));
transitionRef.current = true;
window.setTimeout(() => { transitionRef.current = false; }, 120);
return next;
});
};
const onMouseDown = (e: React.MouseEvent) => {
if (e.button !== 0) return;
e.preventDefault();
setIsDragging(true);
lastPointer.current = { x: e.clientX, y: e.clientY };
transitionRef.current = false;
dragPointRef.current = { x: e.clientX, y: e.clientY };
};
const onMouseMove = (e: React.MouseEvent) => {
if (!isDragging || !lastPointer.current) return;
if (!isDragging || !dragPointRef.current) return;
e.preventDefault();
const dx = e.clientX - lastPointer.current.x;
const dy = e.clientY - lastPointer.current.y;
lastPointer.current = { x: e.clientX, y: e.clientY };
applyOffset({ x: offset.x + dx, y: offset.y + dy });
const dx = e.clientX - dragPointRef.current.x;
const dy = e.clientY - dragPointRef.current.y;
dragPointRef.current = { x: e.clientX, y: e.clientY };
setOffset(off => ({ x: off.x + dx, y: off.y + dy }));
};
const onMouseUp = () => {
const stopDragging = () => {
setIsDragging(false);
lastPointer.current = null;
dragPointRef.current = null;
};
const dist = (t1: React.Touch, t2: React.Touch) => Math.hypot(t1.clientX - t2.clientX, t1.clientY - t2.clientY);
const onTouchStart = (e: React.TouchEvent) => {
if (e.touches.length === 1) {
const t = e.touches[0];
dragPointRef.current = { x: t.clientX, y: t.clientY };
} else if (e.touches.length === 2) {
pinchDistanceRef.current = dist(e.touches[0], e.touches[1]);
}
};
const onTouchMove = (e: React.TouchEvent) => {
if (e.touches.length === 1 && dragPointRef.current) {
const t = e.touches[0];
const dx = t.clientX - dragPointRef.current.x;
const dy = t.clientY - dragPointRef.current.y;
dragPointRef.current = { x: t.clientX, y: t.clientY };
setOffset(off => ({ x: off.x + dx, y: off.y + dy }));
} else if (e.touches.length === 2 && pinchDistanceRef.current) {
const dNow = dist(e.touches[0], e.touches[1]);
const ratio = dNow / pinchDistanceRef.current;
pinchDistanceRef.current = dNow;
setScale(prev => clamp(prev * ratio, 0.08, 10));
}
};
const onTouchEnd = () => {
pinchDistanceRef.current = null;
dragPointRef.current = null;
};
const onDoubleClick = (e: React.MouseEvent) => {
e.preventDefault();
const cont = containerRef.current;
const img = imgRef.current;
if (!cont || !img) return;
const rect = cont.getBoundingClientRect();
const next = scale > 1.4 ? 1 : 2.2;
const container = containerRef.current;
if (!container) {
setScale(next);
return;
}
const rect = container.getBoundingClientRect();
const cx = e.clientX - rect.left - rect.width / 2;
const cy = e.clientY - rect.top - rect.height / 2;
const nextScale = scale > 1.5 ? 1 : 2.5;
const ratio = nextScale / scale;
const nextOffset = { x: offset.x - cx * (ratio - 1), y: offset.y - cy * (ratio - 1) };
setScale(nextScale);
transitionRef.current = true;
setTimeout(() => transitionRef.current = false, 200);
applyOffset(nextOffset);
const ratio = next / scale;
setScale(next);
setOffset(off => ({ x: off.x - cx * (ratio - 1), y: off.y - cy * (ratio - 1) }));
};
const onWheel = (e: React.WheelEvent) => {
e.preventDefault();
const delta = -e.deltaY;
const zoomFactor = delta > 0 ? 1.12 : 0.88;
const cont = containerRef.current;
if (!cont) return;
const rect = cont.getBoundingClientRect();
const cx = e.clientX - rect.left - rect.width / 2;
const cy = e.clientY - rect.top - rect.height / 2;
const nextScale = clamp(scale * zoomFactor, 0.5, 5);
const ratio = nextScale / scale;
const nextOffset = { x: offset.x - cx * (ratio - 1), y: offset.y - cy * (ratio - 1) };
setScale(nextScale);
transitionRef.current = true;
setTimeout(() => transitionRef.current = false, 120);
applyOffset(nextOffset);
const handleImageLoaded = () => {
const img = imageRef.current;
if (!img) return;
const stats = computeImageStats(img);
setHistogram(stats.histogram);
setDominantColor(stats.dominantColor);
};
const getTouchDistance = (t1: { clientX: number; clientY: number }, t2: { clientX: number; clientY: number }) =>
Math.hypot(t1.clientX - t2.clientX, t1.clientY - t2.clientY);
const onTouchStart = (e: React.TouchEvent) => {
if (e.touches.length === 1) {
const t = e.touches[0];
lastPointer.current = { x: t.clientX, y: t.clientY };
} else if (e.touches.length === 2) {
lastDistance.current = getTouchDistance(e.touches[0], e.touches[1]);
}
transitionRef.current = false;
};
const onTouchMove = (e: React.TouchEvent) => {
if (e.touches.length === 1 && lastPointer.current) {
const t = e.touches[0];
const dx = t.clientX - lastPointer.current.x;
const dy = t.clientY - lastPointer.current.y;
lastPointer.current = { x: t.clientX, y: t.clientY };
applyOffset({ x: offset.x + dx, y: offset.y + dy });
} else if (e.touches.length === 2 && lastDistance.current) {
const d = getTouchDistance(e.touches[0], e.touches[1]);
const ratio = d / lastDistance.current;
const nextScale = clamp(scale * ratio, 0.5, 5);
setScale(nextScale);
lastDistance.current = d;
}
};
const onTouchEnd = (e: React.TouchEvent) => {
if (e.touches.length === 0) {
lastPointer.current = null;
lastDistance.current = null;
}
};
const doZoom = (factor: number) => {
const nextScale = clamp(scale * factor, 0.5, 5);
setScale(nextScale);
transitionRef.current = true;
setTimeout(() => transitionRef.current = false, 120);
applyOffset(offset);
};
const resetView = () => {
setScale(1);
setOffset({ x: 0, y: 0 });
setRotate(0);
transitionRef.current = true;
setTimeout(() => transitionRef.current = false, 150);
};
const fitToContainer = () => {
setScale(1);
setOffset({ x: 0, y: 0 });
setRotate(0);
transitionRef.current = true;
setTimeout(() => transitionRef.current = false, 150);
};
const doRotate = () => {
setRotate(r => (r + 90) % 360);
transitionRef.current = true;
setTimeout(() => transitionRef.current = false, 180);
const switchEntry = (target: VfsEntry) => {
const nextPath = joinPath(directory, target.name);
setActiveEntry(target);
setActivePath(nextPath);
};
if (loading) {
return (
<div style={{
width: '100%',
height: '100%',
display: 'flex',
alignItems: 'center',
justifyContent: 'center',
background: 'rgba(20,20,20,0.8)',
backdropFilter: 'blur(24px)'
}}>
<Spin />
</div>
);
}
if (err) {
return (
<div style={{
color: '#f5222d',
padding: 16,
background: 'rgba(20,20,20,0.8)',
backdropFilter: 'blur(24px)'
}}>
: {err}
</div>
);
}
if (!url) {
return (
<div style={{
padding: 16,
background: 'rgba(20,20,20,0.8)',
backdropFilter: 'blur(24px)'
}}>
</div>
);
}
const switchRelative = (step: number) => {
if (filmstrip.length <= 1) return;
const currentIndex = filmstrip.findIndex(item => item.name === activeEntry.name);
if (currentIndex === -1) return;
const target = filmstrip[(currentIndex + step + filmstrip.length) % filmstrip.length];
if (target) switchEntry(target);
};
const scaleLabel = `${(scale * 100).toFixed(scale >= 1 ? 0 : 1)}%`;
const imageStyle: React.CSSProperties = {
maxWidth: '100%',
maxHeight: '100%',
transform: `translate(${offset.x}px, ${offset.y}px) scale(${scale}) rotate(${rotate}deg)`,
transition: transitionRef.current ? 'transform 0.18s cubic-bezier(.4,.8,.4,1)' : undefined,
cursor: isDragging ? 'grabbing' : scale > 1 ? 'grab' : 'zoom-in',
willChange: 'transform',
};
const controlsNode = (
<ViewerControls
style={controlsStyle}
onPrev={() => switchRelative(-1)}
onNext={() => switchRelative(1)}
onZoomIn={() => zoom(1.18)}
onZoomOut={() => zoom(0.82)}
onRotate={rotateImage}
onReset={resetView}
onFit={fitToScreen}
disableSwitch={filmstrip.length <= 1}
/>
);
const exif = (stat?.exif ?? {}) as Record<string, unknown>;
const infoIconStyle: React.CSSProperties = { fontSize: 15, color: 'rgba(255,255,255,0.62)' };
const exifValue = (key: string): string | number | null => {
const value = exif[key];
if (typeof value === 'string' || typeof value === 'number') return value;
return null;
};
const focalLength = (() => {
const v = parseNumberish(exifValue('37386') ?? exifValue('37377'));
return v ? `${v.toFixed(1)} mm` : null;
})();
const aperture = (() => {
const v = parseNumberish(exifValue('33437') ?? exifValue('37378'));
return v ? `f/${v.toFixed(1)}` : null;
})();
const exposure = (() => {
const v = parseNumberish(exifValue('33434'));
if (!v) return null;
if (v >= 1) return `${v.toFixed(1)} s`;
const denom = Math.max(1, Math.round(1 / v));
return `1/${denom}`;
})();
const isoValue = exifValue('34855') ?? exifValue('34864');
const width = parseNumberish(exifValue('40962'));
const height = parseNumberish(exifValue('40963'));
const colorSpace = exifValue('40961');
const cameraMake = exifValue('271');
const cameraModel = exifValue('272');
const lensModel = exifValue('42036');
const captureTime = exifValue('36867') ?? exifValue('36868') ?? exifValue('306');
const basicList: InfoItem[] = [
{ label: '文件名', value: activeEntry.name, icon: <FileOutlined style={infoIconStyle} /> },
{ label: '文件大小', value: humanFileSize(stat?.size), icon: <DatabaseOutlined style={infoIconStyle} /> },
{ label: '分辨率', value: width && height ? `${width} × ${height}` : null, icon: <ExpandOutlined style={infoIconStyle} /> },
{ label: '颜色空间', value: colorSpace ?? null, icon: <BgColorsOutlined style={infoIconStyle} /> },
{ label: '修改时间', value: stat?.mtime ? formatDateTime(stat.mtime) : null, icon: <ClockCircleOutlined style={infoIconStyle} /> },
{ label: '路径', value: typeof stat?.path === 'string' ? stat.path : activePath, icon: <FolderOutlined style={infoIconStyle} /> },
];
const shootingList: InfoItem[] = [
{ label: '焦距', value: focalLength, icon: <AimOutlined style={infoIconStyle} /> },
{ label: '光圈', value: aperture, icon: <BulbOutlined style={infoIconStyle} /> },
{ label: '快门', value: exposure, icon: <ThunderboltOutlined style={infoIconStyle} /> },
{ label: 'ISO', value: isoValue != null ? isoValue.toString() : null, icon: <AlertOutlined style={infoIconStyle} /> },
];
const deviceList: InfoItem[] = [
{
label: '相机',
value: cameraModel ? `${cameraMake ? `${cameraMake} ` : ''}${cameraModel}` : (cameraMake ?? null),
icon: <CameraOutlined style={infoIconStyle} />,
},
{ label: '镜头', value: lensModel ?? null, icon: <ApiOutlined style={infoIconStyle} /> },
];
const miscList: InfoItem[] = [
{ label: '拍摄时间', value: captureTime, icon: <FieldTimeOutlined style={infoIconStyle} /> },
];
return (
<div
ref={containerRef}
onWheel={onWheel}
onMouseMove={onMouseMove}
onMouseUp={onMouseUp}
onMouseLeave={onMouseUp}
onMouseDown={onMouseDown}
onDoubleClick={onDoubleClick}
onTouchStart={onTouchStart}
onTouchMove={onTouchMove}
onTouchEnd={onTouchEnd}
style={{
width: '100%',
height: '100%',
overflow: 'hidden',
position: 'relative',
background: 'rgba(20,20,20,0.8)',
backdropFilter: 'blur(24px)',
display: 'flex',
alignItems: 'center',
justifyContent: 'center',
userSelect: 'none',
touchAction: 'none'
}}
>
{/* 顶部栏:文件名和关闭按钮 */}
<div style={{
position: 'absolute',
top: 32,
left: 32,
right: 32,
zIndex: 100,
display: 'flex',
alignItems: 'center',
justifyContent: 'space-between',
pointerEvents: 'none'
}}>
<Typography.Paragraph
style={{
color: '#fff',
margin: 0,
fontSize: 15,
background: 'rgba(0,0,0,0.32)',
padding: '7px 18px',
borderRadius: 8,
boxShadow: '0 2px 12px rgba(0,0,0,0.18)',
backdropFilter: 'blur(2px)',
maxWidth: '60vw',
textAlign: 'left',
pointerEvents: 'auto'
}}
ellipsis
>
{entry.name} <span style={{ opacity: 0.7, fontSize: 13 }}>({(entry.size / 1024).toFixed(1)} KB)</span>
</Typography.Paragraph>
<Tooltip title="关闭">
<Button
shape="circle"
size="large"
type="text"
onClick={() => onRequestClose && onRequestClose()}
icon={<CloseOutlined />}
style={{
color: '#fff',
background: 'rgba(30,30,30,0.55)',
boxShadow: '0 2px 12px rgba(0,0,0,0.18)',
border: 'none',
backdropFilter: 'blur(4px)',
pointerEvents: 'auto'
}}
<div style={containerStyle}>
<section style={viewerStyles.main}>
<div style={mainBackdropStyle} />
<div style={viewerStyles.mainContent}>
<ImageCanvas
containerRef={containerRef}
imageRef={imageRef}
viewerStyle={viewerStyle}
controls={controlsNode}
scaleLabel={scaleLabel}
imageStyle={imageStyle}
loading={loading}
error={error}
imageUrl={imageUrl}
activeEntry={activeEntry}
onRequestClose={onRequestClose}
onImageLoad={handleImageLoaded}
onWheel={onWheel}
onMouseDown={onMouseDown}
onMouseMove={onMouseMove}
onMouseLeave={stopDragging}
onMouseUp={stopDragging}
onDoubleClick={onDoubleClick}
onTouchStart={onTouchStart}
onTouchMove={onTouchMove}
onTouchEnd={onTouchEnd}
/>
</Tooltip>
</div>
{/* 图片居中显示 */}
<div style={{
position: 'relative',
width: '100%',
height: '100%',
display: 'flex',
alignItems: 'center',
justifyContent: 'center',
flexDirection: 'column'
}}>
<img
ref={imgRef}
src={url}
alt={entry.name}
draggable={false}
onDragStart={e => e.preventDefault()}
style={{
transform: `translate(${offset.x}px, ${offset.y}px) scale(${scale}) rotate(${rotate}deg)`,
transition: transitionRef.current ? 'transform 0.18s cubic-bezier(.4,.8,.4,1)' : undefined,
maxWidth: '80vw',
maxHeight: '80vh',
objectFit: 'contain',
borderRadius: 18,
boxShadow: '0 8px 40px 0 rgba(0,0,0,0.45)',
cursor: isDragging ? 'grabbing' : (scale > 1 ? 'grab' : 'zoom-in'),
willChange: 'transform'
}}
/>
{/* 操作按钮组 */}
<div style={{
position: 'absolute',
bottom: 32,
left: '50%',
transform: 'translateX(-50%)',
display: 'flex',
gap: 18,
zIndex: 80
}}>
<Tooltip title="缩小">
<Button
shape="circle"
size="large"
icon={<ZoomOutOutlined style={{ fontSize: 22 }} />}
onClick={() => doZoom(0.8)}
style={{
color: '#fff',
background: 'rgba(30,30,30,0.55)',
boxShadow: '0 2px 12px rgba(0,0,0,0.18)',
border: 'none',
backdropFilter: 'blur(4px)'
}}
/>
</Tooltip>
<Tooltip title="放大">
<Button
shape="circle"
size="large"
icon={<ZoomInOutlined style={{ fontSize: 22 }} />}
onClick={() => doZoom(1.25)}
style={{
color: '#fff',
background: 'rgba(30,30,30,0.55)',
boxShadow: '0 2px 12px rgba(0,0,0,0.18)',
border: 'none',
backdropFilter: 'blur(4px)'
}}
/>
</Tooltip>
<Tooltip title="旋转">
<Button
shape="circle"
size="large"
icon={<RotateRightOutlined style={{ fontSize: 20 }} />}
onClick={doRotate}
style={{
color: '#fff',
background: 'rgba(30,30,30,0.55)',
boxShadow: '0 2px 12px rgba(0,0,0,0.18)',
border: 'none',
backdropFilter: 'blur(4px)'
}}
/>
</Tooltip>
<Tooltip title="重置">
<Button
shape="circle"
size="large"
icon={<ReloadOutlined style={{ fontSize: 20 }} />}
onClick={resetView}
style={{
color: '#fff',
background: 'rgba(30,30,30,0.55)',
boxShadow: '0 2px 12px rgba(0,0,0,0.18)',
border: 'none',
backdropFilter: 'blur(4px)'
}}
/>
</Tooltip>
<Tooltip title="适应窗口">
<Button
shape="circle"
size="large"
icon={<CompressOutlined style={{ fontSize: 20 }} />}
onClick={fitToContainer}
style={{
color: '#fff',
background: 'rgba(30,30,30,0.55)',
boxShadow: '0 2px 12px rgba(0,0,0,0.18)',
border: 'none',
backdropFilter: 'blur(4px)'
}}
/>
</Tooltip>
<Filmstrip
shellStyle={filmstripShellStyle}
listStyle={viewerStyles.filmstrip}
entries={filmstrip}
activeEntry={activeEntry}
onSelect={switchEntry}
filmstripRefs={filmstripRefs}
pageInfo={pageInfo}
getThumbUrl={getThumbUrl}
/>
</div>
</div>
</section>
<InfoPanel
style={sidePanelStyle}
histogramCardStyle={histogramCardStyle}
title={activeEntry.name}
captureTime={captureTime ?? null}
basicList={basicList}
shootingList={shootingList}
deviceList={deviceList}
miscList={miscList}
histogram={histogram}
/>
</div>
);
};

View File

@@ -0,0 +1,94 @@
import React from 'react';
import { Typography } from 'antd';
import type { VfsEntry } from '../../../api/client';
interface PageInfo {
page: number;
total: number;
pageSize: number;
}
interface FilmstripProps {
shellStyle: React.CSSProperties;
listStyle: React.CSSProperties;
entries: VfsEntry[];
activeEntry: VfsEntry;
onSelect: (entry: VfsEntry) => void;
filmstripRefs: React.MutableRefObject<Record<string, HTMLDivElement | null>>;
pageInfo: PageInfo | null;
getThumbUrl: (entry: VfsEntry) => string;
}
export const Filmstrip: React.FC<FilmstripProps> = ({
shellStyle,
listStyle,
entries,
activeEntry,
onSelect,
filmstripRefs,
pageInfo,
getThumbUrl,
}) => (
<div style={shellStyle}>
<div style={{ display: 'flex', justifyContent: 'space-between', alignItems: 'center', marginBottom: 10 }}>
<Typography.Text style={{ color: 'rgba(255,255,255,0.72)', fontWeight: 500 }}>
· {entries.length}
</Typography.Text>
{pageInfo && (
<Typography.Text style={{ color: 'rgba(255,255,255,0.45)', fontSize: 12 }}>
{pageInfo.page} / {Math.max(1, Math.ceil(pageInfo.total / pageInfo.pageSize))}
</Typography.Text>
)}
</div>
<div style={listStyle}>
{entries.map(item => {
const active = item.name === activeEntry.name;
return (
<div
key={`${item.name}-${item.mtime ?? ''}`}
ref={el => { filmstripRefs.current[item.name] = el; }}
onClick={() => onSelect(item)}
style={{
width: 84,
height: 64,
overflow: 'hidden',
border: active ? '2px solid #4e9bff' : '2px solid transparent',
boxShadow: active ? '0 0 0 4px rgba(78,155,255,0.28)' : '0 10px 28px rgba(0,0,0,0.45)',
cursor: 'pointer',
position: 'relative',
flex: '0 0 auto',
}}
>
<img
src={getThumbUrl(item)}
alt={item.name}
style={{ width: '100%', height: '100%', objectFit: 'cover', filter: active ? 'saturate(1)' : 'saturate(0.65)' }}
/>
{active && (
<div
style={{
position: 'absolute',
bottom: 4,
left: 6,
right: 6,
padding: '2px 4px',
background: 'rgba(0,0,0,0.55)',
color: '#fff',
fontSize: 10,
whiteSpace: 'nowrap',
overflow: 'hidden',
textOverflow: 'ellipsis',
}}
>
{item.name}
</div>
)}
</div>
);
})}
{entries.length === 0 && (
<div style={{ color: 'rgba(255,255,255,0.45)' }}></div>
)}
</div>
</div>
);

View File

@@ -0,0 +1,99 @@
import React from 'react';
import { Spin, Typography, Tooltip, Button } from 'antd';
import { CloseOutlined } from '@ant-design/icons';
import type { VfsEntry } from '../../../api/client';
import { viewerStyles } from '../styles';
interface ImageCanvasProps {
containerRef: React.RefObject<HTMLDivElement | null>;
imageRef: React.RefObject<HTMLImageElement | null>;
viewerStyle: React.CSSProperties;
controls: React.ReactNode;
scaleLabel: string;
imageStyle: React.CSSProperties;
loading: boolean;
error?: string;
imageUrl?: string;
activeEntry: VfsEntry;
onRequestClose: () => void;
onImageLoad: () => void;
onWheel: React.WheelEventHandler<HTMLDivElement>;
onMouseDown: React.MouseEventHandler<HTMLDivElement>;
onMouseMove: React.MouseEventHandler<HTMLDivElement>;
onMouseLeave: React.MouseEventHandler<HTMLDivElement>;
onMouseUp: React.MouseEventHandler<HTMLDivElement>;
onDoubleClick: React.MouseEventHandler<HTMLDivElement>;
onTouchStart: React.TouchEventHandler<HTMLDivElement>;
onTouchMove: React.TouchEventHandler<HTMLDivElement>;
onTouchEnd: React.TouchEventHandler<HTMLDivElement>;
}
export const ImageCanvas: React.FC<ImageCanvasProps> = ({
containerRef,
imageRef,
viewerStyle,
controls,
scaleLabel,
imageStyle,
loading,
error,
imageUrl,
activeEntry,
onRequestClose,
onImageLoad,
onWheel,
onMouseDown,
onMouseMove,
onMouseLeave,
onMouseUp,
onDoubleClick,
onTouchStart,
onTouchMove,
onTouchEnd,
}) => (
<div
ref={containerRef}
style={viewerStyle}
onWheel={onWheel}
onMouseDown={onMouseDown}
onMouseMove={onMouseMove}
onMouseLeave={onMouseLeave}
onMouseUp={onMouseUp}
onDoubleClick={onDoubleClick}
onTouchStart={onTouchStart}
onTouchMove={onTouchMove}
onTouchEnd={onTouchEnd}
>
<div style={viewerStyles.viewerCloseWrap}>
<Tooltip title="关闭">
<Button
type="text"
icon={<CloseOutlined />}
onClick={onRequestClose}
style={viewerStyles.viewerClose}
/>
</Tooltip>
</div>
{loading ? (
<Spin tip="加载中" />
) : error ? (
<Typography.Text type="danger">{error}</Typography.Text>
) : imageUrl ? (
<img
ref={imageRef}
src={imageUrl}
alt={activeEntry.name}
onLoad={onImageLoad}
draggable={false}
crossOrigin="anonymous"
style={imageStyle}
/>
) : (
<Typography.Text></Typography.Text>
)}
<div style={viewerStyles.scaleBadge}>{scaleLabel}</div>
{controls}
</div>
);

View File

@@ -0,0 +1,116 @@
import React from 'react';
import { Typography, Empty } from 'antd';
import type { HistogramData, InfoItem } from './types';
interface InfoPanelProps {
style: React.CSSProperties;
histogramCardStyle: React.CSSProperties;
title: string;
captureTime: string | number | null;
basicList: InfoItem[];
shootingList: InfoItem[];
deviceList: InfoItem[];
miscList: InfoItem[];
histogram: HistogramData | null;
}
const SectionTitle: React.FC<{ children: React.ReactNode }> = ({ children }) => (
<Typography.Title level={5} style={{ color: '#fff', fontSize: 15, marginTop: 24, marginBottom: 12 }}>
{children}
</Typography.Title>
);
const HistogramPlot: React.FC<{ data: HistogramData | null }> = ({ data }) => {
if (!data) {
return <Empty description="无法解析直方图" image={Empty.PRESENTED_IMAGE_SIMPLE} />;
}
const width = 260;
const height = 140;
const max = Math.max(...data.r, ...data.g, ...data.b, 1);
const toPath = (arr: number[]) => arr
.map((value, index) => {
const x = (index / 255) * width;
const y = height - (value / max) * height;
return `${index === 0 ? 'M' : 'L'}${x.toFixed(2)},${y.toFixed(2)}`;
})
.join(' ');
return (
<svg width={width} height={height} viewBox={`0 0 ${width} ${height}`} style={{ width: '100%' }}>
<rect x={0} y={0} width={width} height={height} fill="rgba(255,255,255,0.04)" />
<path d={toPath(data.r)} stroke="rgba(255,99,132,0.88)" fill="none" strokeWidth={1.3} />
<path d={toPath(data.g)} stroke="rgba(75,192,192,0.88)" fill="none" strokeWidth={1.3} />
<path d={toPath(data.b)} stroke="rgba(54,162,235,0.88)" fill="none" strokeWidth={1.3} />
</svg>
);
};
const InfoRows: React.FC<{ items: InfoItem[] }> = ({ items }) => (
<div style={{ display: 'grid', gridTemplateColumns: '100px 1fr', rowGap: 10, columnGap: 12 }}>
{items
.filter(item => item.value !== null && item.value !== undefined && item.value !== '')
.map(item => (
<React.Fragment key={item.label}>
<span style={{ display: 'inline-flex', alignItems: 'center', gap: 6, color: 'rgba(255,255,255,0.55)' }}>
{item.icon && <span style={{ display: 'inline-flex', alignItems: 'center' }}>{item.icon}</span>}
<span>{item.label}</span>
</span>
<span style={{ color: '#fff', wordBreak: 'break-all' }}>{item.value}</span>
</React.Fragment>
))}
</div>
);
export const InfoPanel: React.FC<InfoPanelProps> = ({
style,
histogramCardStyle,
title,
captureTime,
basicList,
shootingList,
deviceList,
miscList,
histogram,
}) => (
<aside style={style}>
<Typography.Title level={3} style={{ color: '#fff', marginTop: 6, wordBreak: 'break-all' }}>
{title}
</Typography.Title>
{captureTime && (
<Typography.Text style={{ color: 'rgba(255,255,255,0.6)' }}> {captureTime}</Typography.Text>
)}
<SectionTitle></SectionTitle>
<InfoRows items={basicList} />
{shootingList.some(i => i.value) && (
<>
<SectionTitle></SectionTitle>
<InfoRows items={shootingList} />
</>
)}
{deviceList.some(i => i.value) && (
<>
<SectionTitle></SectionTitle>
<InfoRows items={deviceList} />
</>
)}
{miscList.some(i => i.value) && (
<>
<SectionTitle></SectionTitle>
<InfoRows items={miscList} />
</>
)}
<SectionTitle></SectionTitle>
<div style={histogramCardStyle}>
<HistogramPlot data={histogram} />
<div style={{ marginTop: 12, display: 'flex', gap: 12, fontSize: 12 }}>
<span style={{ color: 'rgba(255,99,132,0.88)' }}>R</span>
<span style={{ color: 'rgba(75,192,192,0.88)' }}>G</span>
<span style={{ color: 'rgba(54,162,235,0.88)' }}>B</span>
</div>
</div>
</aside>
);

View File

@@ -0,0 +1,73 @@
import React from 'react';
import { Button, Tooltip } from 'antd';
import {
LeftOutlined,
RightOutlined,
ZoomInOutlined,
ZoomOutOutlined,
RotateRightOutlined,
ReloadOutlined,
CompressOutlined,
} from '@ant-design/icons';
interface ViewerControlsProps {
style: React.CSSProperties;
onPrev: () => void;
onNext: () => void;
onZoomIn: () => void;
onZoomOut: () => void;
onRotate: () => void;
onReset: () => void;
onFit: () => void;
disableSwitch: boolean;
}
export const ViewerControls: React.FC<ViewerControlsProps> = ({
style,
onPrev,
onNext,
onZoomIn,
onZoomOut,
onRotate,
onReset,
onFit,
disableSwitch,
}) => (
<div style={style}>
<Tooltip title="上一张">
<Button
shape="circle"
type="text"
icon={<LeftOutlined />}
onClick={onPrev}
disabled={disableSwitch}
style={{ color: '#fff' }}
/>
</Tooltip>
<Tooltip title="缩小">
<Button shape="circle" type="text" icon={<ZoomOutOutlined />} onClick={onZoomOut} style={{ color: '#fff' }} />
</Tooltip>
<Tooltip title="放大">
<Button shape="circle" type="text" icon={<ZoomInOutlined />} onClick={onZoomIn} style={{ color: '#fff' }} />
</Tooltip>
<Tooltip title="旋转 90°">
<Button shape="circle" type="text" icon={<RotateRightOutlined />} onClick={onRotate} style={{ color: '#fff' }} />
</Tooltip>
<Tooltip title="重置">
<Button shape="circle" type="text" icon={<ReloadOutlined />} onClick={onReset} style={{ color: '#fff' }} />
</Tooltip>
<Tooltip title="适应窗口">
<Button shape="circle" type="text" icon={<CompressOutlined />} onClick={onFit} style={{ color: '#fff' }} />
</Tooltip>
<Tooltip title="下一张">
<Button
shape="circle"
type="text"
icon={<RightOutlined />}
onClick={onNext}
disabled={disableSwitch}
style={{ color: '#fff' }}
/>
</Tooltip>
</div>
);

View File

@@ -0,0 +1,19 @@
import type { ReactNode } from 'react';
export interface HistogramData {
r: number[];
g: number[];
b: number[];
}
export interface RgbColor {
r: number;
g: number;
b: number;
}
export interface InfoItem {
label: string;
value: string | number | null;
icon?: ReactNode;
}

View File

@@ -4,6 +4,7 @@ import { ImageViewerApp } from './ImageViewer.tsx';
export const descriptor: AppDescriptor = {
key: 'image-viewer',
name: '图片查看器',
iconUrl: 'https://api.iconify.design/mdi:image.svg',
supported: (entry) => {
if (entry.is_dir) return false;
const ext = entry.name.split('.').pop()?.toLowerCase() || '';
@@ -14,4 +15,4 @@ export const descriptor: AppDescriptor = {
defaultMaximized:true,
useSystemWindow:false,
defaultBounds: { width: 820, height: 620, x: 140, y: 96 }
};
};

View File

@@ -0,0 +1,106 @@
export const viewerStyles = {
container: {
width: '100%',
height: '100%',
boxSizing: 'border-box' as const,
display: 'grid',
gridTemplateColumns: 'minmax(0, 1fr) 320px',
columnGap: 0,
color: '#fff',
overflow: 'hidden',
},
main: {
position: 'relative' as const,
overflow: 'hidden',
display: 'flex',
flexDirection: 'column' as const,
boxShadow: '0 28px 80px rgba(0,0,0,0.55)',
minHeight: 0,
},
mainBackdrop: {
position: 'absolute' as const,
inset: 0,
},
mainContent: {
position: 'relative' as const,
zIndex: 1,
display: 'flex',
flexDirection: 'column' as const,
flex: 1,
padding: 0,
minHeight: 0,
minWidth: 0,
},
viewer: {
flex: 1,
display: 'flex',
alignItems: 'center',
justifyContent: 'center',
position: 'relative' as const,
overflow: 'hidden',
boxShadow: '0 24px 60px rgba(0,0,0,0.5)',
touchAction: 'none' as const,
minHeight: 0,
},
controls: {
position: 'absolute' as const,
bottom: 16,
left: '50%',
transform: 'translateX(-50%)',
display: 'flex',
gap: 16,
padding: '8px 18px',
borderRadius: 24,
alignItems: 'center',
},
scaleBadge: {
position: 'absolute' as const,
bottom: 64,
left: 16,
color: 'rgba(255,255,255,0.7)',
fontSize: 12,
letterSpacing: 0.2,
},
filmstripShell: {
marginTop: 0,
padding: '3px 12px',
boxShadow: '0 16px 42px rgba(0,0,0,0.52)',
},
filmstrip: {
display: 'flex',
overflowX: 'auto' as const,
gap: 12,
paddingBottom: 4,
},
sidePanel: {
boxShadow: '0 28px 80px rgba(0,0,0,0.55)',
padding: '20px 24px',
display: 'flex',
flexDirection: 'column' as const,
overflowY: 'auto' as const,
minHeight: 0,
},
histogramCard: {
padding: '12px 12px 18px',
background: 'rgba(0,0,0,0.34)',
borderRadius: 0,
},
viewerCloseWrap: {
position: 'absolute' as const,
top: 16,
right: 16,
zIndex: 2,
},
viewerClose: {
color: '#fff',
background: 'rgba(0,0,0,0.4)',
border: '1px solid rgba(255,255,255,0.25)',
boxShadow: '0 8px 18px rgba(0,0,0,0.45)',
borderRadius: '100%',
width: 32,
height: 32,
display: 'flex',
alignItems: 'center',
justifyContent: 'center',
},
};

View File

@@ -2,8 +2,10 @@ import React, { useEffect, useState } from 'react';
import { vfsApi } from '../../api/client';
import type { AppComponentProps } from '../types';
import { Spin, Result, Button } from 'antd';
import { useSystemStatus } from '../../contexts/SystemContext';
export const OfficeViewerApp: React.FC<AppComponentProps> = ({ filePath, onRequestClose }) => {
const systemStatus = useSystemStatus();
const [url, setUrl] = useState<string>();
const [loading, setLoading] = useState(true);
const [err, setErr] = useState<string>();
@@ -17,8 +19,8 @@ export const OfficeViewerApp: React.FC<AppComponentProps> = ({ filePath, onReque
vfsApi.getTempLinkToken(filePath.replace(/^\/+/, ''))
.then(res => {
if (cancelled) return;
// 注意vfsApi.getTempPublicUrl 返回的是相对路径,我们需要构建完整的 URL
const fullUrl = new URL(vfsApi.getTempPublicUrl(res.token), window.location.origin).href;
const baseUrl = systemStatus?.file_domain || window.location.origin;
const fullUrl = new URL(res.url, baseUrl).href;
const officeUrl = `https://view.officeapps.live.com/op/embed.aspx?src=${encodeURIComponent(fullUrl)}`;
setUrl(officeUrl);
})
@@ -58,7 +60,7 @@ export const OfficeViewerApp: React.FC<AppComponentProps> = ({ filePath, onReque
}
return (
<div style={{ width: '100%', height: '100%', background: '#fff' }}>
<div style={{ width: '100%', height: '100%', background: 'var(--ant-color-bg-container, #fff)' }}>
{url ? (
<iframe
src={url}
@@ -77,4 +79,4 @@ export const OfficeViewerApp: React.FC<AppComponentProps> = ({ filePath, onReque
)}
</div>
);
};
};

View File

@@ -4,6 +4,7 @@ import { OfficeViewerApp } from './OfficeViewer.tsx';
export const descriptor: AppDescriptor = {
key: 'office-viewer',
name: 'Office 文档查看器',
iconUrl: 'https://api.iconify.design/mdi:file-word-box.svg',
supported: (entry) => {
if (entry.is_dir) return false;
const ext = entry.name.split('.').pop()?.toLowerCase() || '';
@@ -12,4 +13,4 @@ export const descriptor: AppDescriptor = {
component: OfficeViewerApp,
default: true,
defaultBounds: { width: 1024, height: 768, x: 150, y: 100 }
};
};

View File

@@ -0,0 +1,74 @@
import React, { useEffect, useState } from 'react';
import { Spin, Result, Button } from 'antd';
import type { AppComponentProps } from '../types';
import { vfsApi } from '../../api/client';
export const PdfViewerApp: React.FC<AppComponentProps> = ({ filePath, onRequestClose }) => {
const [url, setUrl] = useState<string>();
const [loading, setLoading] = useState(true);
const [err, setErr] = useState<string>();
useEffect(() => {
let cancelled = false;
setLoading(true);
setErr(undefined);
setUrl(undefined);
vfsApi.getTempLinkToken(filePath.replace(/^\/+/, ''))
.then(res => {
if (cancelled) return;
const publicUrl = vfsApi.getTempPublicUrl(res.token);
setUrl(publicUrl + '#toolbar=1&navpanes=1');
})
.catch(e => {
if (!cancelled) setErr(e.message || '获取临时链接失败');
})
.finally(() => {
if (!cancelled) setLoading(false);
});
return () => { cancelled = true; };
}, [filePath]);
if (loading) {
return (
<div style={{ width: '100%', height: '100%', display: 'flex', alignItems: 'center', justifyContent: 'center' }}>
<Spin tip="正在加载 PDF..." />
</div>
);
}
if (err) {
return (
<Result
status="error"
title="无法加载 PDF"
subTitle={err}
extra={<Button type="primary" onClick={onRequestClose}></Button>}
/>
);
}
if (!url) {
return (
<Result
status="warning"
title="无可用链接"
subTitle="未能生成 PDF 的临时访问链接"
extra={<Button type="primary" onClick={onRequestClose}></Button>}
/>
);
}
return (
<div style={{ width: '100%', height: '100%', background: 'var(--ant-color-bg-container, #fff)' }}>
<iframe
src={url}
width="100%"
height="100%"
title="PDF Viewer"
style={{ border: 'none' }}
/>
</div>
);
};

View File

@@ -0,0 +1,16 @@
import type { AppDescriptor } from '../types';
import { PdfViewerApp } from './PdfViewer';
export const descriptor: AppDescriptor = {
key: 'pdf-viewer',
name: 'PDF 查看器',
iconUrl: 'https://api.iconify.design/mdi:file-pdf-box.svg',
supported: (entry) => {
if (entry.is_dir) return false;
const ext = entry.name.split('.').pop()?.toLowerCase() || '';
return ext === 'pdf';
},
component: PdfViewerApp,
default: true,
defaultBounds: { width: 1024, height: 768, x: 160, y: 100 },
};

View File

@@ -0,0 +1,59 @@
import React, { useRef, useState } from 'react';
import type { AppComponentProps } from '../types';
import { vfsApi } from '../../api/vfs';
import { loadPluginFromUrl, ensureManifest, type RegisteredPlugin } from '../../plugins/runtime';
import type { PluginItem } from '../../api/plugins';
import { useAsyncSafeEffect } from '../../hooks/useAsyncSafeEffect';
import { useI18n } from '../../i18n';
export interface PluginAppHostProps extends AppComponentProps {
plugin: PluginItem;
}
export const PluginAppHost: React.FC<PluginAppHostProps> = ({ plugin, filePath, entry, onRequestClose }) => {
const containerRef = useRef<HTMLDivElement>(null);
const [error, setError] = useState<string | null>(null);
const onCloseRef = useRef(onRequestClose);
onCloseRef.current = onRequestClose;
const { t } = useI18n();
const pluginRef = useRef<RegisteredPlugin | null>(null);
useAsyncSafeEffect(
async ({ isDisposed }) => {
try {
const p = await loadPluginFromUrl(plugin.url);
if (isDisposed()) return;
pluginRef.current = p;
await ensureManifest(plugin.id, p);
if (isDisposed()) return;
const token = await vfsApi.getTempLinkToken(filePath);
if (isDisposed()) return;
const downloadUrl = vfsApi.getTempPublicUrl(token.token);
if (isDisposed() || !containerRef.current) return;
await p.mount(containerRef.current, {
filePath,
entry,
urls: { downloadUrl },
host: { close: () => onCloseRef.current() },
});
} catch (e: any) {
if (!isDisposed()) setError(e?.message || t('Plugin run failed'));
}
},
[plugin.id, plugin.url, filePath],
() => {
try {
if (pluginRef.current?.unmount && containerRef.current) {
pluginRef.current.unmount(containerRef.current);
}
} catch {}
},
);
if (error) {
return <div style={{ padding: 12, color: 'red' }}>{t('Plugin Error')}: {error}</div>;
}
return <div ref={containerRef} style={{ width: '100%', height: '100%', overflow: 'auto' }} />;
};

View File

@@ -1,8 +1,11 @@
import React, { useState, useEffect, useCallback, useRef } from 'react';
import React, { useState, useEffect, useCallback, useRef, useMemo, Suspense } from 'react';
import { Layout, Spin, Button, Space, message } from 'antd';
import MDEditor from '@uiw/react-md-editor';
import type { AppComponentProps } from '../types';
import { vfsApi } from '../../api/vfs';
import request from '../../api/client';
const MonacoEditor = React.lazy(() => import('@monaco-editor/react'));
const MarkdownEditor = React.lazy(() => import('@uiw/react-md-editor'));
const { Header, Content } = Layout;
@@ -11,20 +14,66 @@ export const TextEditorApp: React.FC<AppComponentProps> = ({ filePath, entry, on
const [saving, setSaving] = useState(false);
const [content, setContent] = useState('');
const [initialContent, setInitialContent] = useState('');
const [truncated, setTruncated] = useState(false);
const MAX_PREVIEW_BYTES = 1024 * 1024; // 1MB
const isDirty = content !== initialContent;
// 使用 ref 来持有最新的 onRequestClose 函数,避免它成为 effect 的依赖项
const onRequestCloseRef = useRef(onRequestClose);
onRequestCloseRef.current = onRequestClose;
const ext = useMemo(() => entry.name.split('.').pop()?.toLowerCase() || '', [entry.name]);
const isMarkdown = ext === 'md' || ext === 'markdown';
const monacoLanguage = useMemo(() => {
switch (ext) {
case 'json':
return 'json';
case 'js':
return 'javascript';
case 'ts':
return 'typescript';
case 'html':
return 'html';
case 'css':
return 'css';
case 'py':
return 'python';
case 'sh':
return 'shell';
case 'yaml':
case 'yml':
return 'yaml';
case 'xml':
return 'xml';
case 'txt':
case 'log':
default:
return 'plaintext';
}
}, [ext]);
useEffect(() => {
const loadFile = async () => {
try {
setLoading(true);
const data = await vfsApi.readFile(filePath);
const text = typeof data === 'string' ? data : new TextDecoder().decode(data);
setContent(text);
setInitialContent(text);
setTruncated(false);
const shouldTruncate = (entry.size ?? 0) > MAX_PREVIEW_BYTES;
if (shouldTruncate) {
const enc = encodeURI(filePath.replace(/^\/+/, ''));
const resp = await request(`/fs/file/${enc}`, {
method: 'GET',
headers: { Range: `bytes=0-${MAX_PREVIEW_BYTES - 1}` },
rawResponse: true,
});
const buf = await (resp as Response).arrayBuffer();
const text = new TextDecoder().decode(buf);
setContent(text);
setInitialContent(text);
setTruncated(true);
} else {
const data = await vfsApi.readFile(filePath);
const text = typeof data === 'string' ? data : new TextDecoder().decode(data);
setContent(text);
setInitialContent(text);
}
} catch (error) {
message.error(`加载文件失败: ${error instanceof Error ? error.message : '未知错误'}`);
onRequestCloseRef.current();
@@ -33,9 +82,12 @@ export const TextEditorApp: React.FC<AppComponentProps> = ({ filePath, entry, on
}
};
loadFile();
}, [filePath]); // effect 只依赖 filePath因此只在文件路径变化时执行一次
}, [filePath, entry.size]);
const handleSave = useCallback(async () => {
if (truncated) {
message.warning('大文件仅预览前 1MB已禁用保存');
return;
}
if (!isDirty) return;
try {
setSaving(true);
@@ -48,7 +100,7 @@ export const TextEditorApp: React.FC<AppComponentProps> = ({ filePath, entry, on
} finally {
setSaving(false);
}
}, [content, filePath, isDirty]);
}, [content, filePath, isDirty, truncated]);
useEffect(() => {
const handleKeyDown = (event: KeyboardEvent) => {
@@ -64,23 +116,23 @@ export const TextEditorApp: React.FC<AppComponentProps> = ({ filePath, entry, on
}, [handleSave]);
return (
<Layout style={{ height: '100%', background: '#ffffff' }}>
<Layout style={{ height: '100%', background: 'var(--ant-color-bg-container, #ffffff)' }}>
<Header
style={{
background: '#f0f2f5',
background: 'var(--ant-color-bg-layout, #f0f2f5)',
padding: '0 16px',
height: 40,
display: 'flex',
alignItems: 'center',
justifyContent: 'space-between',
borderBottom: '1px solid #d9d9d9'
borderBottom: '1px solid var(--ant-color-border-secondary, #d9d9d9)'
}}
>
<span style={{ color: 'rgba(0, 0, 0, 0.88)' }}>
{entry.name} {isDirty && '*'}
<span style={{ color: 'var(--ant-color-text, rgba(0,0,0,0.88))' }}>
{entry.name} {isDirty && '*'} {truncated && '(大文件仅预览前 1MB编辑与保存已禁用'}
</span>
<Space>
<Button type="primary" size="small" onClick={handleSave} loading={saving} disabled={!isDirty}>
<Button type="primary" size="small" onClick={handleSave} loading={saving} disabled={!isDirty || truncated}>
</Button>
</Space>
@@ -91,14 +143,34 @@ export const TextEditorApp: React.FC<AppComponentProps> = ({ filePath, entry, on
<Spin />
</div>
) : (
<MDEditor
value={content}
onChange={(val) => setContent(val || '')}
height="100%"
preview="live"
/>
isMarkdown ? (
<Suspense fallback={<Spin style={{ marginTop: 24 }} />}>
<MarkdownEditor
value={content}
onChange={(val) => setContent(val || '')}
height="100%"
preview={truncated ? 'preview' : 'live'}
/>
</Suspense>
) : (
<Suspense fallback={<Spin style={{ marginTop: 24 }} />}>
<MonacoEditor
value={content}
onChange={(val) => setContent(val || '')}
height="100%"
language={monacoLanguage}
options={{
readOnly: truncated,
minimap: { enabled: false },
scrollBeyondLastLine: false,
wordWrap: 'on',
fontSize: 13,
}}
/>
</Suspense>
)
)}
</Content>
</Layout>
);
};
};

Some files were not shown because too many files have changed in this diff Show More