commit eadd3f8961356a090325a0bf28454324dd340b92
Author: Awuqing <3184394176@qq.com>
Date: Tue Mar 17 13:29:09 2026 +0800
first commit
diff --git a/.DS_Store b/.DS_Store
new file mode 100644
index 0000000..574bd24
Binary files /dev/null and b/.DS_Store differ
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..735a8b4
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1 @@
+web/node_modules/
\ No newline at end of file
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..90c2ccf
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,191 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to the Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by the Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding any notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ Copyright 2026 BackupX Contributors
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..1a8dd6b
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,30 @@
+.PHONY: build dev test clean
+
+# 一次性构建前后端
+build: build-server build-web
+
+build-server:
+ cd server && go build -o bin/backupx ./cmd/backupx
+
+build-web:
+ cd web && npm run build
+
+# 开发模式(分别在两个终端运行)
+dev-server:
+ cd server && go run ./cmd/backupx
+
+dev-web:
+ cd web && npm run dev
+
+# 运行所有测试
+test: test-server test-web
+
+test-server:
+ cd server && go test ./...
+
+test-web:
+ cd web && npm run test
+
+# 清理构建产物
+clean:
+ rm -rf server/bin web/dist
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..b15bbb5
--- /dev/null
+++ b/README.md
@@ -0,0 +1,436 @@
+
+
🛡️ BackupX
+
+ Self-hosted Server Backup Management Platform with Web UI
+
+
+ Features •
+ Quick Start •
+ Configuration •
+ Architecture •
+ Cluster •
+ Development •
+ API
+
+
+
+
+
+
+
+
+
+
+---
+
+BackupX 是一个面向 **Linux / macOS 服务器**的自托管备份管理平台。通过企业级 Web 控制台,轻松配置目录备份、数据库备份,并将备份文件安全存储到阿里云 OSS、腾讯云 COS、七牛云 Kodo、Google Drive、S3 兼容存储、WebDAV 或本地磁盘。
+
+支持 **多节点集群管理**,可统一管控分布在不同服务器上的备份任务。
+
+> **适用人群**:拥有 Linux 服务器的个人开发者 / 小团队 / 企业运维
+
+## Features
+
+### 📦 多种备份类型
+- **文件/目录** — 支持自定义排除规则(如 `node_modules`、`*.log`)
+- **MySQL** — 通过 `mysqldump` 原生工具
+- **SQLite** — 安全文件拷贝
+- **PostgreSQL** — 通过 `pg_dump` 原生工具
+
+### ☁️ 多云存储后端
+| 厂商 | 类型 | 说明 |
+|------|------|------|
+| 🇨🇳 **阿里云 OSS** | `aliyun_oss` | 自动组装 Endpoint,支持内网传输 |
+| 🇨🇳 **腾讯云 COS** | `tencent_cos` | 自动组装 Endpoint |
+| 🇨🇳 **七牛云 Kodo** | `qiniu_kodo` | 6 大区域精确映射 |
+| 🌍 **S3 Compatible** | `s3` | AWS S3 / MinIO / Cloudflare R2 等 |
+| 🌍 **Google Drive** | `google_drive` | 完整 OAuth 2.0 授权流程 |
+| 🌍 **WebDAV** | `webdav` | 坚果云 / Nextcloud 等 |
+| 💾 **本地磁盘** | `local_disk` | 备份到服务器本地目录 |
+
+> 国内云厂商仅需填写 **Region** 和 **AccessKey**,系统自动完成 Endpoint 组装,底层复用 S3 引擎零额外依赖。
+
+### 🖥️ 集群管理 (Master-Agent)
+- **节点管理** — 注册远程服务器节点,Token 认证
+- **本机节点** — 自动创建,单机用户零感知升级
+- **目录浏览** — 可视化文件树选择备份源路径,告别手动输入
+- **Agent 心跳** — 节点在线状态实时监控
+- **任务标签** — 按标签/节点分类管理备份任务
+
+### ⏰ 自动化与调度
+- Cron 表达式定时调度
+- 可视化 Cron 编辑器
+- 自动保留策略(按天数 / 按份数过期清理)
+- 最大并发备份数限制
+
+### 🔐 安全
+- JWT 认证 + bcrypt 密码存储
+- AES-256-GCM 加密存储敏感配置(数据库密码、OAuth Token)
+- 可选备份文件加密
+- 登录限流防暴力破解
+- 节点 Token 认证(一次性显示,安全传输)
+
+### 📊 监控与通知
+- 仪表盘统计(成功率、存储用量、备份趋势图表)
+- 邮件 / Webhook / Telegram 通知
+- 实时备份执行日志 (SSE)
+
+### 🌐 其他
+- 中英文国际化 (i18n)
+- 零外部依赖(内嵌 SQLite,单二进制部署)
+- systemd 服务支持
+
+## Quick Start
+
+### 从源码构建
+
+```bash
+# 克隆项目
+git clone https://github.com/yourname/backupx.git
+cd backupx
+
+# 一键构建前后端
+make build
+
+# 启动后端服务(默认监听 :8340)
+cd server && ./bin/backupx
+```
+
+### 访问 Web UI
+
+打开浏览器访问 `http://your-server:8340`,首次使用会引导您创建管理员账户。
+
+## Configuration
+
+配置文件路径默认为 `./config.yaml`,也可通过环境变量 `BACKUPX_` 前缀覆盖。
+
+```yaml
+# config.yaml
+server:
+ host: "0.0.0.0"
+ port: 8340
+ mode: "release" # debug | release
+
+database:
+ path: "./data/backupx.db" # SQLite 数据库路径
+
+security:
+ jwt_secret: "" # 留空则自动生成
+ jwt_expire: "24h"
+ encryption_key: "" # AES 加密密钥,留空自动生成
+
+backup:
+ temp_dir: "/tmp/backupx" # 备份临时文件目录
+ max_concurrent: 2 # 最大并发备份数
+
+log:
+ level: "info" # debug | info | warn | error
+ file: "./data/backupx.log"
+ max_size: 100 # 日志文件大小上限 (MB)
+ max_backups: 3 # 保留旧日志文件数
+ max_age: 30 # 日志保留天数
+```
+
+> 💡 `jwt_secret` 和 `encryption_key` 首次启动时自动生成并持久化到数据库,无需手动配置。
+
+## Architecture
+
+```
+ ┌─────────────────────┐
+ │ Nginx (反向代理) │
+ │ / → 前端静态文件 │
+ │ /api → :8340 │
+ └─────────┬───────────┘
+ │
+ ▼
+┌──────────────────────────────────────────────────────┐
+│ BackupX Master (Go API Server) │
+│ :8340 │
+│ │
+│ ┌──────┐ ┌────────────┐ ┌───────────────────────┐│
+│ │ Auth │ │Backup Engine│ │ Storage Registry ││
+│ └──────┘ └──────┬─────┘ │ ┌─────────────────┐ ││
+│ │ │ │ Aliyun OSS │ ││
+│ ┌──────────┐ │ │ │ Tencent COS │ ││
+│ │ Cron │◄───┘ │ │ Qiniu Kodo │ ││
+│ │Scheduler │ │ │ S3 Compatible │ ││
+│ └──────────┘ │ │ Google Drive │ ││
+│ │ │ WebDAV │ ││
+│ ┌──────────┐ │ │ Local Disk │ ││
+│ │ Notify │ │ └─────────────────┘ ││
+│ │ Module │ └───────────────────────┘│
+│ └──────────┘ │
+│ │
+│ ┌──────────────┐ ┌────────────────────┐ │
+│ │ Node Manager │ │ SQLite (backupx.db)│ │
+│ └──────┬───────┘ └────────────────────┘ │
+└─────────┼────────────────────────────────────────────┘
+ │ Heartbeat / Task Dispatch
+ ▼
+┌──────────────────┐ ┌──────────────────┐
+│ Agent Node A │ │ Agent Node B │
+│ (远程服务器) │ │ (远程服务器) │
+└──────────────────┘ └──────────────────┘
+```
+
+### 技术栈
+
+| 组件 | 技术 |
+|------|------|
+| **后端** | Go · Gin · GORM · SQLite · robfig/cron |
+| **前端** | React 18 · TypeScript · ArcoDesign · Vite · Zustand · ECharts |
+| **存储** | AWS SDK v2 (S3/OSS/COS/Kodo) · Google Drive API v3 · gowebdav |
+| **安全** | JWT · bcrypt · AES-256-GCM |
+| **日志** | zap + lumberjack (自动轮转) |
+
+## Cluster Mode
+
+BackupX 支持 **Master-Agent** 模式,可管理多台服务器的备份任务。
+
+### 工作原理
+
+1. **Master** 为运行 BackupX Web 控制台的主控服务器
+2. **Agent** 部署在需要备份的远程服务器上
+3. Agent 启动后通过 Token 向 Master 注册并定期发送心跳
+4. Master 将备份任务下发至对应 Agent 执行
+
+### 添加节点
+
+```bash
+# 在 Web 控制台 → 节点管理 → 添加节点
+# 系统将生成一个唯一的 64 位十六进制 Token
+
+# 在远程服务器上配置 Agent 启动参数
+./backupx-agent --master http://master-server:8340 --token
+```
+
+### 目录探针 API
+
+Master 提供 `GET /api/nodes/:id/fs/list?path=/` 接口,可远程浏览节点的文件系统目录。前端在创建备份任务的"源路径"输入时可使用树形选择器直接浏览目标机器的目录结构。
+
+## Project Structure
+
+```
+backupx/
+├── server/ # Go 后端
+│ ├── cmd/backupx/ # 入口
+│ ├── internal/
+│ │ ├── app/ # 应用组装 (DI)
+│ │ ├── backup/ # 备份引擎 (file/mysql/sqlite/pgsql)
+│ │ ├── config/ # 配置加载 (viper)
+│ │ ├── http/ # HTTP 处理器 + 路由
+│ │ ├── model/ # GORM 数据模型
+│ │ ├── notify/ # 通知 (email/webhook/telegram)
+│ │ ├── repository/ # 数据访问层
+│ │ ├── scheduler/ # Cron 调度器
+│ │ ├── security/ # JWT + 限流
+│ │ ├── service/ # 业务逻辑层
+│ │ └── storage/ # 存储后端 (插件化)
+│ │ ├── aliyun/ # 阿里云 OSS
+│ │ ├── tencent/ # 腾讯云 COS
+│ │ ├── qiniu/ # 七牛云 Kodo
+│ │ ├── s3/ # S3 Compatible
+│ │ ├── googledrive/ # Google Drive
+│ │ ├── webdav/ # WebDAV
+│ │ └── localdisk/ # 本地磁盘
+│ └── pkg/ # 工具包 (compress/crypto/response)
+├── web/ # React 前端
+│ └── src/
+│ ├── components/ # 通用组件 (CronEditor/PathSelector/...)
+│ ├── pages/ # 页面 (Dashboard/Tasks/Storage/Nodes/...)
+│ ├── services/ # API 请求封装
+│ ├── stores/ # Zustand 状态管理
+│ ├── locales/ # i18n 语言包 (zh-CN/en-US)
+│ └── router/ # 路由配置
+├── deploy/ # 部署配置
+│ ├── nginx.conf # Nginx 参考配置
+│ ├── backupx.service # systemd 服务单元
+│ └── install.sh # 一键安装脚本
+└── Makefile # 构建命令
+```
+
+## Development
+
+### 前置条件
+
+- **Go** ≥ 1.21
+- **Node.js** ≥ 18
+- **npm**
+
+### 开发模式
+
+```bash
+# 终端 1:启动后端 (热重载需配合 air)
+make dev-server
+
+# 终端 2:启动前端 (Vite HMR)
+make dev-web
+```
+
+### 运行测试
+
+```bash
+# 运行全部测试
+make test
+
+# 仅后端
+make test-server # go test ./...
+
+# 仅前端
+make test-web # npm run test
+```
+
+### 构建
+
+```bash
+# 构建前后端
+make build
+
+# 清理构建产物
+make clean
+```
+
+## Deployment
+
+### 一键安装 (推荐)
+
+```bash
+# 先构建
+make build
+
+# 以 root 执行安装脚本
+sudo ./deploy/install.sh
+```
+
+安装脚本将自动:
+1. 创建 `backupx` 系统用户
+2. 安装二进制到 `/opt/backupx/bin/`
+3. 部署前端到 `/opt/backupx/web/`
+4. 生成配置文件 `/etc/backupx/config.yaml`
+5. 注册并启动 systemd 服务
+6. 配置 Nginx 反向代理(如已安装)
+
+### 手动部署
+
+```bash
+# 1. 构建
+cd server && go build -o backupx ./cmd/backupx
+cd ../web && npm run build
+
+# 2. 部署文件
+scp server/backupx your-server:/opt/backupx/bin/
+scp -r web/dist/ your-server:/opt/backupx/web/
+scp server/config.example.yaml your-server:/etc/backupx/config.yaml
+
+# 3. 启动
+ssh your-server '/opt/backupx/bin/backupx -config /etc/backupx/config.yaml'
+```
+
+### Nginx 配置示例
+
+```nginx
+server {
+ listen 80;
+ server_name backup.example.com;
+
+ # 前端静态文件
+ location / {
+ root /opt/backupx/web;
+ try_files $uri $uri/ /index.html;
+ }
+
+ # API 反向代理
+ location /api/ {
+ proxy_pass http://127.0.0.1:8340;
+ proxy_set_header Host $host;
+ proxy_set_header X-Real-IP $remote_addr;
+ }
+}
+```
+
+## API Reference
+
+所有 API 均以 `/api` 为前缀,使用 JWT Bearer Token 认证(除特殊标注外)。
+
+| 模块 | 端点 | 说明 |
+|------|------|------|
+| **认证** | `POST /api/auth/setup` | 首次初始化管理员 |
+| | `POST /api/auth/login` | 登录获取 Token |
+| | `POST /api/auth/logout` | 登出 |
+| | `GET /api/auth/profile` | 当前用户信息 |
+| | `PUT /api/auth/password` | 修改密码 |
+| **备份任务** | `GET/POST /api/backup/tasks` | 任务列表 / 创建 |
+| | `GET/PUT/DELETE /api/backup/tasks/:id` | 详情 / 更新 / 删除 |
+| | `PUT /api/backup/tasks/:id/toggle` | 启用/禁用 |
+| | `POST /api/backup/tasks/:id/run` | 手动触发执行 |
+| **备份记录** | `GET /api/backup/records` | 记录列表 (支持筛选) |
+| | `GET /api/backup/records/:id` | 记录详情 |
+| | `GET /api/backup/records/:id/logs/stream` | 实时执行日志 (SSE) |
+| | `GET /api/backup/records/:id/download` | 下载备份文件 |
+| | `POST /api/backup/records/:id/restore` | 恢复备份 |
+| **存储目标** | `GET/POST /api/storage-targets` | 存储列表 / 添加 |
+| | `GET/PUT/DELETE /api/storage-targets/:id` | 详情 / 更新 / 删除 |
+| | `POST /api/storage-targets/test` | 测试连接 |
+| | `POST /api/storage-targets/:id/test` | 测试已保存连接 |
+| | `GET /api/storage-targets/:id/usage` | 查询用量 |
+| **节点管理** | `GET/POST /api/nodes` | 节点列表 / 添加 |
+| | `GET/DELETE /api/nodes/:id` | 详情 / 删除 |
+| | `GET /api/nodes/:id/fs/list` | 目录浏览 |
+| | `POST /api/agent/heartbeat` | Agent 心跳 ⚡ |
+| **通知** | `GET/POST /api/notifications` | 通知列表 / 添加 |
+| | `POST /api/notifications/test` | 测试通知 |
+| | `POST /api/notifications/:id/test` | 测试已保存通知 |
+| **仪表盘** | `GET /api/dashboard/stats` | 概览统计 |
+| | `GET /api/dashboard/timeline` | 备份趋势时间线 |
+| **系统** | `GET /api/system/info` | 系统信息 (版本/磁盘) |
+| | `GET/PUT /api/settings` | 系统设置读写 |
+
+> ⚡ `POST /api/agent/heartbeat` 为公开端点,使用 Node Token 认证而非 JWT。
+
+## 云存储配置指南
+
+### 阿里云 OSS
+
+1. 登录[阿里云控制台](https://oss.console.aliyun.com/),创建 Bucket
+2. 前往 RAM 控制台创建 AccessKey
+3. 在 BackupX 添加存储目标时选择"阿里云 OSS"
+4. 填写 Region(如 `cn-hangzhou`)和 AccessKey,系统自动组装 Endpoint
+
+### 腾讯云 COS
+
+1. 登录[腾讯云控制台](https://console.cloud.tencent.com/cos),创建存储桶
+2. 前往 API 密钥管理创建 SecretId/SecretKey
+3. Bucket 名称格式为 `BucketName-APPID`(如 `backup-1250000000`)
+
+### 七牛云 Kodo
+
+1. 登录[七牛云控制台](https://portal.qiniu.com/),创建存储空间
+2. 支持区域:`z0`(华东) / `cn-east-2`(华东-浙江2) / `z1`(华北) / `z2`(华南) / `na0`(北美) / `as0`(东南亚)
+
+### Google Drive
+
+1. 前往 [Google Cloud Console](https://console.cloud.google.com/) 创建项目
+2. 启用 **Google Drive API**
+3. 创建 **OAuth 2.0 客户端 ID**(Web 应用类型)
+4. 添加重定向 URI:`http://your-server/api/storage-targets/google-drive/callback`
+5. 在 BackupX 存储管理页面填入 Client ID / Secret,点击授权
+
+## Contributing
+
+欢迎提交 Issue 和 Pull Request!
+
+1. Fork 本项目
+2. 创建功能分支 (`git checkout -b feature/amazing-feature`)
+3. 提交更改 (`git commit -m 'Add amazing feature'`)
+4. 推送到分支 (`git push origin feature/amazing-feature`)
+5. 创建 Pull Request
+
+## License
+
+本项目采用 [Apache License 2.0](LICENSE) 开源协议。
+
+---
+
+
+ Made with ❤️ for self-hosters
+
diff --git a/deploy/backupx.service b/deploy/backupx.service
new file mode 100644
index 0000000..1d89354
--- /dev/null
+++ b/deploy/backupx.service
@@ -0,0 +1,18 @@
+[Unit]
+Description=BackupX API Service
+After=network-online.target
+Wants=network-online.target
+
+[Service]
+Type=simple
+User=backupx
+Group=backupx
+WorkingDirectory=/opt/backupx
+ExecStart=/opt/backupx/bin/backupx -config /etc/backupx/config.yaml
+Restart=on-failure
+RestartSec=5
+NoNewPrivileges=true
+LimitNOFILE=65535
+
+[Install]
+WantedBy=multi-user.target
diff --git a/deploy/install.sh b/deploy/install.sh
new file mode 100755
index 0000000..6f0a5f5
--- /dev/null
+++ b/deploy/install.sh
@@ -0,0 +1,72 @@
+#!/bin/sh
+set -eu
+
+PROJECT_ROOT=$(CDPATH= cd -- "$(dirname -- "$0")/.." && pwd)
+PREFIX="${PREFIX:-/opt/backupx}"
+ETC_DIR="${ETC_DIR:-/etc/backupx}"
+SERVICE_NAME="backupx"
+APP_USER="backupx"
+APP_GROUP="backupx"
+BIN_SOURCE="${BIN_SOURCE:-$PROJECT_ROOT/server/backupx}"
+WEB_SOURCE="${WEB_SOURCE:-$PROJECT_ROOT/web/dist}"
+CONFIG_TEMPLATE="${CONFIG_TEMPLATE:-$PROJECT_ROOT/server/config.example.yaml}"
+SERVICE_SOURCE="${SERVICE_SOURCE:-$PROJECT_ROOT/deploy/backupx.service}"
+NGINX_SOURCE="${NGINX_SOURCE:-$PROJECT_ROOT/deploy/nginx.conf}"
+
+if [ "$(id -u)" -ne 0 ]; then
+ echo "请使用 root 或 sudo 执行安装脚本。" >&2
+ exit 1
+fi
+
+if [ ! -f "$BIN_SOURCE" ]; then
+ echo "未找到后端二进制:$BIN_SOURCE" >&2
+ echo "请先执行:cd \"$PROJECT_ROOT/server\" && go build -o backupx ./cmd/backupx" >&2
+ exit 1
+fi
+
+if [ ! -d "$WEB_SOURCE" ]; then
+ echo "未找到前端构建产物:$WEB_SOURCE" >&2
+ echo "请先执行:cd \"$PROJECT_ROOT/web\" && npm run build" >&2
+ exit 1
+fi
+
+if ! getent group "$APP_GROUP" >/dev/null 2>&1; then
+ groupadd --system "$APP_GROUP"
+fi
+
+if ! id "$APP_USER" >/dev/null 2>&1; then
+ useradd --system --gid "$APP_GROUP" --home-dir "$PREFIX" --shell /usr/sbin/nologin "$APP_USER"
+fi
+
+install -d -o "$APP_USER" -g "$APP_GROUP" "$PREFIX" "$PREFIX/bin" "$PREFIX/web" "$PREFIX/data" "$ETC_DIR"
+install -m 0755 "$BIN_SOURCE" "$PREFIX/bin/backupx"
+cp -R "$WEB_SOURCE/." "$PREFIX/web/"
+chown -R "$APP_USER:$APP_GROUP" "$PREFIX"
+
+if [ ! -f "$ETC_DIR/config.yaml" ]; then
+ install -m 0640 "$CONFIG_TEMPLATE" "$ETC_DIR/config.yaml"
+fi
+
+install -m 0644 "$SERVICE_SOURCE" "/etc/systemd/system/$SERVICE_NAME.service"
+systemctl daemon-reload
+systemctl enable --now "$SERVICE_NAME"
+
+if [ -d "/etc/nginx/conf.d" ]; then
+ install -m 0644 "$NGINX_SOURCE" "/etc/nginx/conf.d/$SERVICE_NAME.conf"
+ if command -v nginx >/dev/null 2>&1; then
+ nginx -t
+ systemctl reload nginx || true
+ fi
+fi
+
+cat < 0 {
+ cmd.Env = append(os.Environ(), options.Env...)
+ }
+ return cmd.Run()
+}
diff --git a/server/internal/backup/command_executor.go b/server/internal/backup/command_executor.go
new file mode 100644
index 0000000..68b5476
--- /dev/null
+++ b/server/internal/backup/command_executor.go
@@ -0,0 +1,37 @@
+//go:build ignore
+
+package backup
+
+import (
+ "context"
+ "io"
+ "os"
+ "os/exec"
+)
+
+type CommandExecutor interface {
+ LookPath(file string) (string, error)
+ Run(ctx context.Context, name string, args []string, env map[string]string, stdin io.Reader, stdout io.Writer, stderr io.Writer) error
+}
+
+type OSCommandExecutor struct{}
+
+func NewOSCommandExecutor() *OSCommandExecutor {
+ return &OSCommandExecutor{}
+}
+
+func (e *OSCommandExecutor) LookPath(file string) (string, error) {
+ return exec.LookPath(file)
+}
+
+func (e *OSCommandExecutor) Run(ctx context.Context, name string, args []string, env map[string]string, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
+ command := exec.CommandContext(ctx, name, args...)
+ command.Stdin = stdin
+ command.Stdout = stdout
+ command.Stderr = stderr
+ command.Env = os.Environ()
+ for key, value := range env {
+ command.Env = append(command.Env, key+"="+value)
+ }
+ return command.Run()
+}
diff --git a/server/internal/backup/database_names.go b/server/internal/backup/database_names.go
new file mode 100644
index 0000000..bcc88e3
--- /dev/null
+++ b/server/internal/backup/database_names.go
@@ -0,0 +1,16 @@
+package backup
+
+import "strings"
+
+func normalizeDatabaseNames(items []string) []string {
+ result := make([]string, 0, len(items))
+ for _, item := range items {
+ for _, part := range strings.Split(item, ",") {
+ trimmed := strings.TrimSpace(part)
+ if trimmed != "" {
+ result = append(result, trimmed)
+ }
+ }
+ }
+ return result
+}
diff --git a/server/internal/backup/database_runners_test.go b/server/internal/backup/database_runners_test.go
new file mode 100644
index 0000000..23b0c2a
--- /dev/null
+++ b/server/internal/backup/database_runners_test.go
@@ -0,0 +1,106 @@
+package backup
+
+import (
+ "bytes"
+ "context"
+ "errors"
+ "io"
+ "os"
+ "testing"
+)
+
+type fakeCommandExecutor struct {
+ lastName string
+ lastArgs []string
+ env []string
+ lookupErr error
+ runFunc func(name string, args []string, options CommandOptions) error
+}
+
+func (f *fakeCommandExecutor) LookPath(string) (string, error) {
+ if f.lookupErr != nil {
+ return "", f.lookupErr
+ }
+ return "/usr/bin/fake", nil
+}
+
+func (f *fakeCommandExecutor) Run(_ context.Context, name string, args []string, options CommandOptions) error {
+ f.lastName = name
+ f.lastArgs = append([]string{}, args...)
+ f.env = append([]string{}, options.Env...)
+ if f.runFunc != nil {
+ return f.runFunc(name, args, options)
+ }
+ return nil
+}
+
+func TestMySQLRunnerUsesExpectedCommands(t *testing.T) {
+ executor := &fakeCommandExecutor{runFunc: func(name string, args []string, options CommandOptions) error {
+ if options.Stdout != nil {
+ _, _ = io.WriteString(options.Stdout, "mysql dump")
+ }
+ return nil
+ }}
+ runner := NewMySQLRunner(executor)
+ result, err := runner.Run(context.Background(), TaskSpec{Name: "mysql", Type: "mysql", Database: DatabaseSpec{Host: "127.0.0.1", Port: 3306, User: "root", Password: "secret", Names: []string{"app, audit"}}}, NopLogWriter{})
+ if err != nil {
+ t.Fatalf("Run returned error: %v", err)
+ }
+ if executor.lastName != "mysqldump" {
+ t.Fatalf("expected mysqldump, got %s", executor.lastName)
+ }
+ if len(executor.lastArgs) == 0 || executor.lastArgs[len(executor.lastArgs)-2] != "app" || executor.lastArgs[len(executor.lastArgs)-1] != "audit" {
+ t.Fatalf("unexpected mysql args: %#v", executor.lastArgs)
+ }
+ if _, err := os.Stat(result.ArtifactPath); err != nil {
+ t.Fatalf("artifact file missing: %v", err)
+ }
+}
+
+func TestPostgreSQLRunnerRestoreUsesPsql(t *testing.T) {
+ executor := &fakeCommandExecutor{}
+ runner := NewPostgreSQLRunner(executor)
+ artifact := filepathJoinTempFile(t, "restore.sql", "select 1;")
+ if err := runner.Restore(context.Background(), TaskSpec{Name: "postgres", Type: "postgresql", Database: DatabaseSpec{Host: "127.0.0.1", Port: 5432, User: "postgres", Password: "secret"}}, artifact, NopLogWriter{}); err != nil {
+ t.Fatalf("Restore returned error: %v", err)
+ }
+ if executor.lastName != "psql" {
+ t.Fatalf("expected psql, got %s", executor.lastName)
+ }
+}
+
+func TestMySQLRunnerReturnsLookupError(t *testing.T) {
+ runner := NewMySQLRunner(&fakeCommandExecutor{lookupErr: errors.New("missing")})
+ _, err := runner.Run(context.Background(), TaskSpec{Name: "mysql", Type: "mysql", Database: DatabaseSpec{Host: "127.0.0.1", Port: 3306, User: "root", Password: "secret", Names: []string{"app"}}}, NopLogWriter{})
+ if err == nil {
+ t.Fatal("expected error when mysqldump is missing")
+ }
+}
+
+func filepathJoinTempFile(t *testing.T, name string, content string) string {
+ t.Helper()
+ filePath := t.TempDir() + "/" + name
+ if err := os.WriteFile(filePath, []byte(content), 0o644); err != nil {
+ t.Fatalf("WriteFile returned error: %v", err)
+ }
+ return filePath
+}
+
+func TestPostgreSQLRunnerRunAppendsMultipleDatabaseDumps(t *testing.T) {
+ executor := &fakeCommandExecutor{runFunc: func(name string, args []string, options CommandOptions) error {
+ _, _ = io.Copy(options.Stdout, bytes.NewBufferString(args[len(args)-1]))
+ return nil
+ }}
+ runner := NewPostgreSQLRunner(executor)
+ result, err := runner.Run(context.Background(), TaskSpec{Name: "pg", Type: "postgresql", Database: DatabaseSpec{Host: "127.0.0.1", Port: 5432, User: "postgres", Password: "secret", Names: []string{"app", "audit"}}}, NopLogWriter{})
+ if err != nil {
+ t.Fatalf("Run returned error: %v", err)
+ }
+ content, err := os.ReadFile(result.ArtifactPath)
+ if err != nil {
+ t.Fatalf("ReadFile returned error: %v", err)
+ }
+ if !bytes.Contains(content, []byte("app")) || !bytes.Contains(content, []byte("audit")) {
+ t.Fatalf("unexpected pg dump content: %s", string(content))
+ }
+}
diff --git a/server/internal/backup/file_runner.go b/server/internal/backup/file_runner.go
new file mode 100644
index 0000000..e1840cf
--- /dev/null
+++ b/server/internal/backup/file_runner.go
@@ -0,0 +1,191 @@
+package backup
+
+import (
+ "archive/tar"
+ "context"
+ "fmt"
+ "io"
+ "os"
+ "path"
+ "path/filepath"
+ "strings"
+)
+
+type FileRunner struct{}
+
+func NewFileRunner() *FileRunner {
+ return &FileRunner{}
+}
+
+func (r *FileRunner) Type() string {
+ return "file"
+}
+
+func (r *FileRunner) Run(_ context.Context, task TaskSpec, writer LogWriter) (*RunResult, error) {
+ sourcePath := filepath.Clean(strings.TrimSpace(task.SourcePath))
+ if sourcePath == "" {
+ return nil, fmt.Errorf("source path is required")
+ }
+ info, err := os.Stat(sourcePath)
+ if err != nil {
+ return nil, fmt.Errorf("stat source path: %w", err)
+ }
+ tempDir, artifactPath, err := createTempArtifact(task.TempDir, task.Name, "tar")
+ if err != nil {
+ return nil, err
+ }
+ artifactFile, err := os.Create(artifactPath)
+ if err != nil {
+ return nil, fmt.Errorf("create tar artifact: %w", err)
+ }
+ defer artifactFile.Close()
+ tw := tar.NewWriter(artifactFile)
+ defer tw.Close()
+ baseParent := filepath.Dir(sourcePath)
+ excludes := normalizeExcludePatterns(task.ExcludePatterns)
+ writer.WriteLine(fmt.Sprintf("开始打包文件备份:%s", sourcePath))
+ fileCount := 0
+ dirCount := 0
+ walkErr := filepath.Walk(sourcePath, func(currentPath string, currentInfo os.FileInfo, walkErr error) error {
+ if walkErr != nil {
+ writer.WriteLine(fmt.Sprintf("⚠ 无法访问 %s: %v", currentPath, walkErr))
+ return nil
+ }
+ relPath, err := filepath.Rel(baseParent, currentPath)
+ if err != nil {
+ return err
+ }
+ archiveName := filepath.ToSlash(relPath)
+ if shouldExcludeEntry(archiveName, currentInfo.IsDir(), excludes) {
+ if currentInfo.IsDir() {
+ writer.WriteLine(fmt.Sprintf("跳过排除目录 %s", archiveName))
+ return filepath.SkipDir
+ }
+ return nil
+ }
+ if currentPath == sourcePath && currentInfo.IsDir() {
+ return nil
+ }
+
+ if currentInfo.IsDir() {
+ dirCount++
+ writer.WriteLine(fmt.Sprintf("📁 进入目录 %s", archiveName))
+ }
+
+ header, err := tar.FileInfoHeader(currentInfo, "")
+ if err != nil {
+ return err
+ }
+ header.Name = archiveName
+ if err := tw.WriteHeader(header); err != nil {
+ return err
+ }
+
+ if currentInfo.Mode().IsRegular() {
+ file, err := os.Open(currentPath)
+ if err != nil {
+ return err
+ }
+ defer file.Close()
+ if _, err := io.CopyN(tw, file, currentInfo.Size()); err != nil && err != io.EOF {
+ return err
+ }
+ fileCount++
+ if fileCount%100 == 0 {
+ writer.WriteLine(fmt.Sprintf("已打包 %d 个文件...", fileCount))
+ }
+ }
+ return nil
+ })
+ if walkErr != nil {
+ return nil, fmt.Errorf("walk source path: %w", walkErr)
+ }
+ if info.IsDir() {
+ writer.WriteLine(fmt.Sprintf("目录打包完成(%d 个目录,%d 个文件)", dirCount, fileCount))
+ } else {
+ writer.WriteLine("文件打包完成")
+ }
+ return &RunResult{ArtifactPath: artifactPath, FileName: filepath.Base(artifactPath), TempDir: tempDir}, nil
+}
+
+func (r *FileRunner) Restore(_ context.Context, task TaskSpec, artifactPath string, writer LogWriter) error {
+ artifactFile, err := os.Open(artifactPath)
+ if err != nil {
+ return fmt.Errorf("open tar artifact: %w", err)
+ }
+ defer artifactFile.Close()
+ targetParent := filepath.Dir(filepath.Clean(strings.TrimSpace(task.SourcePath)))
+ if err := os.MkdirAll(targetParent, 0o755); err != nil {
+ return fmt.Errorf("create restore parent: %w", err)
+ }
+ tr := tar.NewReader(artifactFile)
+ for {
+ header, err := tr.Next()
+ if err == io.EOF {
+ break
+ }
+ if err != nil {
+ return fmt.Errorf("read tar entry: %w", err)
+ }
+ cleanName := path.Clean(strings.TrimSpace(header.Name))
+ if cleanName == "." || cleanName == "" {
+ continue
+ }
+ targetPath := filepath.Clean(filepath.Join(targetParent, filepath.FromSlash(cleanName)))
+ parentWithSep := filepath.Clean(targetParent) + string(filepath.Separator)
+ if targetPath != filepath.Clean(targetParent) && !strings.HasPrefix(targetPath, parentWithSep) {
+ return fmt.Errorf("tar entry escapes restore path")
+ }
+ switch header.Typeflag {
+ case tar.TypeDir:
+ if err := os.MkdirAll(targetPath, os.FileMode(header.Mode)); err != nil {
+ return fmt.Errorf("create restore dir: %w", err)
+ }
+ case tar.TypeReg, tar.TypeRegA:
+ if err := os.MkdirAll(filepath.Dir(targetPath), 0o755); err != nil {
+ return fmt.Errorf("create restore parent dir: %w", err)
+ }
+ file, err := os.OpenFile(targetPath, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, os.FileMode(header.Mode))
+ if err != nil {
+ return fmt.Errorf("create restore file: %w", err)
+ }
+ if _, err := io.Copy(file, tr); err != nil {
+ file.Close()
+ return fmt.Errorf("write restore file: %w", err)
+ }
+ if err := file.Close(); err != nil {
+ return fmt.Errorf("close restore file: %w", err)
+ }
+ }
+ }
+ writer.WriteLine("文件恢复完成")
+ return nil
+}
+
+func normalizeExcludePatterns(items []string) []string {
+ result := make([]string, 0, len(items))
+ for _, item := range items {
+ trimmed := strings.TrimSpace(item)
+ if trimmed != "" {
+ result = append(result, filepath.ToSlash(trimmed))
+ }
+ }
+ return result
+}
+
+func shouldExcludeEntry(relPath string, isDir bool, patterns []string) bool {
+ relPath = filepath.ToSlash(relPath)
+ base := path.Base(relPath)
+ for _, pattern := range patterns {
+ if matched, _ := path.Match(pattern, relPath); matched {
+ return true
+ }
+ if matched, _ := path.Match(pattern, base); matched {
+ return true
+ }
+ if isDir && strings.TrimSuffix(pattern, "/") == base {
+ return true
+ }
+ }
+ return false
+}
diff --git a/server/internal/backup/file_runner_test.go b/server/internal/backup/file_runner_test.go
new file mode 100644
index 0000000..d54c530
--- /dev/null
+++ b/server/internal/backup/file_runner_test.go
@@ -0,0 +1,69 @@
+package backup
+
+import (
+ "archive/tar"
+ "context"
+ "os"
+ "path/filepath"
+ "testing"
+)
+
+type bufferWriter struct{ lines []string }
+
+func (w *bufferWriter) WriteLine(message string) { w.lines = append(w.lines, message) }
+
+func TestFileRunnerRunAndRestore(t *testing.T) {
+ tempDir := t.TempDir()
+ sourceDir := filepath.Join(tempDir, "site")
+ if err := os.MkdirAll(filepath.Join(sourceDir, "node_modules"), 0o755); err != nil {
+ t.Fatalf("MkdirAll returned error: %v", err)
+ }
+ if err := os.WriteFile(filepath.Join(sourceDir, "index.html"), []byte("ok"), 0o644); err != nil {
+ t.Fatalf("WriteFile returned error: %v", err)
+ }
+ if err := os.WriteFile(filepath.Join(sourceDir, "app.log"), []byte("skip"), 0o644); err != nil {
+ t.Fatalf("WriteFile returned error: %v", err)
+ }
+ if err := os.WriteFile(filepath.Join(sourceDir, "node_modules", "pkg.json"), []byte("skip-dir"), 0o644); err != nil {
+ t.Fatalf("WriteFile returned error: %v", err)
+ }
+ runner := NewFileRunner()
+ writer := &bufferWriter{}
+ result, err := runner.Run(context.Background(), TaskSpec{Name: "site files", Type: "file", SourcePath: sourceDir, ExcludePatterns: []string{"*.log", "node_modules"}}, writer)
+ if err != nil {
+ t.Fatalf("Run returned error: %v", err)
+ }
+ archiveFile, err := os.Open(result.ArtifactPath)
+ if err != nil {
+ t.Fatalf("Open returned error: %v", err)
+ }
+ defer archiveFile.Close()
+ reader := tar.NewReader(archiveFile)
+ entries := map[string]bool{}
+ for {
+ header, err := reader.Next()
+ if err != nil {
+ break
+ }
+ entries[header.Name] = true
+ }
+ if !entries["site/index.html"] {
+ t.Fatalf("expected site/index.html in archive, got %#v", entries)
+ }
+ if entries["site/app.log"] || entries["site/node_modules/pkg.json"] {
+ t.Fatalf("unexpected excluded entries: %#v", entries)
+ }
+ if err := os.RemoveAll(sourceDir); err != nil {
+ t.Fatalf("RemoveAll returned error: %v", err)
+ }
+ if err := runner.Restore(context.Background(), TaskSpec{Name: "site files", Type: "file", SourcePath: sourceDir}, result.ArtifactPath, writer); err != nil {
+ t.Fatalf("Restore returned error: %v", err)
+ }
+ content, err := os.ReadFile(filepath.Join(sourceDir, "index.html"))
+ if err != nil {
+ t.Fatalf("ReadFile returned error: %v", err)
+ }
+ if string(content) != "ok" {
+ t.Fatalf("unexpected restored content: %s", string(content))
+ }
+}
diff --git a/server/internal/backup/helpers.go b/server/internal/backup/helpers.go
new file mode 100644
index 0000000..c8d7a93
--- /dev/null
+++ b/server/internal/backup/helpers.go
@@ -0,0 +1,41 @@
+package backup
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "strings"
+ "time"
+)
+
+func createTempArtifact(baseDir, taskName string, extension string) (string, string, error) {
+ tempDir, err := os.MkdirTemp(baseDir, "backupx-run-*")
+ if err != nil {
+ return "", "", fmt.Errorf("create temp dir: %w", err)
+ }
+ base := sanitizeFileName(taskName)
+ if base == "" {
+ base = "backup"
+ }
+ fileName := fmt.Sprintf("%s_%s.%s", base, time.Now().UTC().Format("20060102T150405"), strings.TrimPrefix(extension, "."))
+ return tempDir, filepath.Join(tempDir, fileName), nil
+}
+
+func sanitizeFileName(value string) string {
+ builder := strings.Builder{}
+ for _, char := range strings.TrimSpace(value) {
+ switch {
+ case char >= 'a' && char <= 'z':
+ builder.WriteRune(char)
+ case char >= 'A' && char <= 'Z':
+ builder.WriteRune(char + ('a' - 'A'))
+ case char >= '0' && char <= '9':
+ builder.WriteRune(char)
+ case char == '-' || char == '_':
+ builder.WriteRune(char)
+ case char == ' ' || char == '.':
+ builder.WriteRune('_')
+ }
+ }
+ return strings.Trim(builder.String(), "_")
+}
diff --git a/server/internal/backup/log_hub.go b/server/internal/backup/log_hub.go
new file mode 100644
index 0000000..59540cb
--- /dev/null
+++ b/server/internal/backup/log_hub.go
@@ -0,0 +1,110 @@
+package backup
+
+import (
+ "sync"
+ "time"
+)
+
+type LogHub struct {
+ mu sync.RWMutex
+ streams map[uint]*logStreamState
+}
+
+type logStreamState struct {
+ nextSequence int64
+ events []LogEvent
+ subscribers map[int]chan LogEvent
+ nextSubID int
+ completed bool
+ status string
+}
+
+func NewLogHub() *LogHub {
+ return &LogHub{streams: make(map[uint]*logStreamState)}
+}
+
+func (h *LogHub) Append(recordID uint, level, message string) LogEvent {
+ h.mu.Lock()
+ defer h.mu.Unlock()
+ state := h.ensureState(recordID)
+ state.nextSequence++
+ event := LogEvent{RecordID: recordID, Sequence: state.nextSequence, Level: level, Message: message, Timestamp: time.Now().UTC(), Status: state.status}
+ state.events = append(state.events, event)
+ for _, subscriber := range state.subscribers {
+ select {
+ case subscriber <- event:
+ default:
+ }
+ }
+ return event
+}
+
+func (h *LogHub) Snapshot(recordID uint) []LogEvent {
+ h.mu.RLock()
+ defer h.mu.RUnlock()
+ state, ok := h.streams[recordID]
+ if !ok {
+ return nil
+ }
+ result := make([]LogEvent, len(state.events))
+ copy(result, state.events)
+ return result
+}
+
+func (h *LogHub) Subscribe(recordID uint, buffer int) (<-chan LogEvent, func()) {
+ if buffer <= 0 {
+ buffer = 32
+ }
+ h.mu.Lock()
+ defer h.mu.Unlock()
+ state := h.ensureState(recordID)
+ state.nextSubID++
+ id := state.nextSubID
+ channel := make(chan LogEvent, buffer)
+ state.subscribers[id] = channel
+ for _, event := range state.events {
+ channel <- event
+ }
+ cancel := func() {
+ h.mu.Lock()
+ defer h.mu.Unlock()
+ stream, ok := h.streams[recordID]
+ if !ok {
+ return
+ }
+ subscriber, ok := stream.subscribers[id]
+ if !ok {
+ return
+ }
+ delete(stream.subscribers, id)
+ close(subscriber)
+ }
+ return channel, cancel
+}
+
+func (h *LogHub) Complete(recordID uint, status string) {
+ h.mu.Lock()
+ defer h.mu.Unlock()
+ state := h.ensureState(recordID)
+ state.completed = true
+ state.status = status
+ state.nextSequence++
+ event := LogEvent{RecordID: recordID, Sequence: state.nextSequence, Level: "info", Message: "stream completed", Timestamp: time.Now().UTC(), Completed: true, Status: status}
+ state.events = append(state.events, event)
+ for _, subscriber := range state.subscribers {
+ select {
+ case subscriber <- event:
+ default:
+ }
+ }
+}
+
+func (h *LogHub) ensureState(recordID uint) *logStreamState {
+ state, ok := h.streams[recordID]
+ if ok {
+ return state
+ }
+ state = &logStreamState{subscribers: make(map[int]chan LogEvent), status: "running"}
+ h.streams[recordID] = state
+ return state
+}
diff --git a/server/internal/backup/log_hub_test.go b/server/internal/backup/log_hub_test.go
new file mode 100644
index 0000000..ae90aa1
--- /dev/null
+++ b/server/internal/backup/log_hub_test.go
@@ -0,0 +1,26 @@
+package backup
+
+import "testing"
+
+func TestLogHubAppendSubscribeAndComplete(t *testing.T) {
+ hub := NewLogHub()
+ channel, cancel := hub.Subscribe(1, 4)
+ defer cancel()
+ first := hub.Append(1, "info", "started")
+ if first.Sequence != 1 || first.Message != "started" {
+ t.Fatalf("unexpected first event: %#v", first)
+ }
+ snapshot := hub.Snapshot(1)
+ if len(snapshot) != 1 {
+ t.Fatalf("expected snapshot size 1, got %d", len(snapshot))
+ }
+ event := <-channel
+ if event.Message != "started" {
+ t.Fatalf("unexpected streamed event: %#v", event)
+ }
+ hub.Complete(1, "success")
+ completeEvent := <-channel
+ if !completeEvent.Completed || completeEvent.Status != "success" {
+ t.Fatalf("unexpected completion event: %#v", completeEvent)
+ }
+}
diff --git a/server/internal/backup/logger.go b/server/internal/backup/logger.go
new file mode 100644
index 0000000..4dc1a03
--- /dev/null
+++ b/server/internal/backup/logger.go
@@ -0,0 +1,56 @@
+package backup
+
+import (
+ "fmt"
+ "strings"
+ "sync"
+)
+
+type ExecutionLogger struct {
+ recordID uint
+ hub *LogHub
+ mu sync.Mutex
+ buffer strings.Builder
+}
+
+func NewExecutionLogger(recordID uint, hub *LogHub) *ExecutionLogger {
+ return &ExecutionLogger{recordID: recordID, hub: hub}
+}
+
+func (l *ExecutionLogger) Write(level, message string) {
+ trimmed := strings.TrimSpace(message)
+ if trimmed == "" {
+ return
+ }
+ l.mu.Lock()
+ defer l.mu.Unlock()
+ if l.buffer.Len() > 0 {
+ l.buffer.WriteByte('\n')
+ }
+ l.buffer.WriteString(trimmed)
+ if l.hub != nil {
+ l.hub.Append(l.recordID, level, trimmed)
+ }
+}
+
+func (l *ExecutionLogger) Infof(format string, args ...any) {
+ l.Write("info", fmt.Sprintf(format, args...))
+}
+
+func (l *ExecutionLogger) Errorf(format string, args ...any) {
+ l.Write("error", fmt.Sprintf(format, args...))
+}
+
+func (l *ExecutionLogger) Warnf(format string, args ...any) {
+ l.Write("warn", fmt.Sprintf(format, args...))
+}
+
+func (l *ExecutionLogger) WriteLine(message string) {
+ l.Infof("%s", message)
+}
+
+func (l *ExecutionLogger) String() string {
+ l.mu.Lock()
+ defer l.mu.Unlock()
+ return l.buffer.String()
+}
diff --git a/server/internal/backup/mysql_runner.go b/server/internal/backup/mysql_runner.go
new file mode 100644
index 0000000..2497db4
--- /dev/null
+++ b/server/internal/backup/mysql_runner.go
@@ -0,0 +1,163 @@
+package backup
+
+import (
+ "bufio"
+ "bytes"
+ "context"
+ "fmt"
+ "os"
+ "path/filepath"
+ "strconv"
+ "strings"
+ "time"
+)
+
+type MySQLRunner struct {
+ executor CommandExecutor
+}
+
+func NewMySQLRunner(executor CommandExecutor) *MySQLRunner {
+ if executor == nil {
+ executor = NewOSCommandExecutor()
+ }
+ return &MySQLRunner{executor: executor}
+}
+
+func (r *MySQLRunner) Type() string {
+ return "mysql"
+}
+
+func (r *MySQLRunner) Run(ctx context.Context, task TaskSpec, writer LogWriter) (*RunResult, error) {
+ if _, err := r.executor.LookPath("mysqldump"); err != nil {
+ return nil, fmt.Errorf("未找到 mysqldump 命令 (请确保服务器已安装 mysql-client 或 mariadb-client)")
+ }
+ startedAt := task.StartedAt
+ if startedAt.IsZero() {
+ startedAt = time.Now().UTC()
+ }
+ tempDir, err := CreateTaskTempDir(task.Name, startedAt)
+ if err != nil {
+ return nil, err
+ }
+ fileName := BuildArtifactName(task.Name, startedAt, "sql")
+ artifactPath := filepath.Join(tempDir, fileName)
+ file, err := os.Create(artifactPath)
+ if err != nil {
+ return nil, fmt.Errorf("create mysql dump file: %w", err)
+ }
+ defer file.Close()
+ dbNames := normalizeDatabaseNames(task.Database.Names)
+ if len(dbNames) == 0 {
+ return nil, fmt.Errorf("mysql database names are required")
+ }
+ args := []string{
+ "--host", task.Database.Host,
+ "--port", strconv.Itoa(task.Database.Port),
+ "--user", task.Database.User,
+ "--single-transaction",
+ "--quick",
+ "--routines",
+ "--triggers",
+ "--events",
+ "--no-tablespaces",
+ "--net-buffer-length=32768",
+ "--databases",
+ }
+ args = append(args, dbNames...)
+
+ writer.WriteLine(fmt.Sprintf("连接到 MySQL: %s:%d", task.Database.Host, task.Database.Port))
+ writer.WriteLine(fmt.Sprintf("备份数据库: %s", strings.Join(dbNames, ", ")))
+
+ stderrWriter := newLogLineWriter(writer, "mysqldump")
+ writer.WriteLine("开始执行 mysqldump")
+ if err := r.executor.Run(ctx, "mysqldump", args, CommandOptions{Stdout: file, Stderr: stderrWriter, Env: mysqlEnv(task.Database.Password)}); err != nil {
+ return nil, fmt.Errorf("run mysqldump: %w: %s", err, stderrWriter.collected())
+ }
+ info, err := file.Stat()
+ if err != nil {
+ return nil, fmt.Errorf("stat mysql dump file: %w", err)
+ }
+ writer.WriteLine(fmt.Sprintf("MySQL 导出完成(文件大小: %s)", formatFileSize(info.Size())))
+ return &RunResult{ArtifactPath: artifactPath, FileName: fileName, TempDir: tempDir, Size: info.Size(), StorageKey: BuildStorageKey("mysql", startedAt, fileName)}, nil
+}
+
+func (r *MySQLRunner) Restore(ctx context.Context, task TaskSpec, artifactPath string, writer LogWriter) error {
+ if _, err := r.executor.LookPath("mysql"); err != nil {
+ return fmt.Errorf("未找到 mysql 命令 (请确保服务器已安装 mysql-client 或 mariadb-client)")
+ }
+ input, err := os.Open(filepath.Clean(artifactPath))
+ if err != nil {
+ return fmt.Errorf("open mysql restore file: %w", err)
+ }
+ defer input.Close()
+ stderr := &bytes.Buffer{}
+ args := []string{"--host", task.Database.Host, "--port", strconv.Itoa(task.Database.Port), "--user", task.Database.User}
+ writer.WriteLine("开始执行 mysql 恢复")
+ if err := r.executor.Run(ctx, "mysql", args, CommandOptions{Stdin: input, Stderr: stderr, Env: mysqlEnv(task.Database.Password)}); err != nil {
+ return fmt.Errorf("run mysql restore: %w: %s", err, strings.TrimSpace(stderr.String()))
+ }
+ writer.WriteLine("MySQL 恢复完成")
+ return nil
+}
+
+func mysqlEnv(password string) []string {
+ if strings.TrimSpace(password) == "" {
+ return nil
+ }
+ return []string{"MYSQL_PWD=" + password}
+}
+
+// logLineWriter streams each line of output to a LogWriter in real-time.
+type logLineWriter struct {
+ writer LogWriter
+ prefix string
+ buf bytes.Buffer
+}
+
+func newLogLineWriter(w LogWriter, prefix string) *logLineWriter {
+ return &logLineWriter{writer: w, prefix: prefix}
+}
+
+func (w *logLineWriter) Write(p []byte) (int, error) {
+ n := len(p)
+ w.buf.Write(p)
+ scanner := bufio.NewScanner(strings.NewReader(w.buf.String()))
+ var remaining string
+ for scanner.Scan() {
+ line := strings.TrimSpace(scanner.Text())
+ if line != "" {
+ w.writer.WriteLine(fmt.Sprintf("[%s] %s", w.prefix, line))
+ }
+ }
+ // Keep any partial last line (no newline yet)
+ lastNl := bytes.LastIndexByte(p, '\n')
+ if lastNl >= 0 {
+ remaining = w.buf.String()[w.buf.Len()-(len(p)-lastNl-1):]
+ w.buf.Reset()
+ w.buf.WriteString(remaining)
+ }
+ return n, nil
+}
+
+func (w *logLineWriter) collected() string {
+ return strings.TrimSpace(w.buf.String())
+}
+
+func formatFileSize(size int64) string {
+ const (
+ KB = 1024
+ MB = KB * 1024
+ GB = MB * 1024
+ )
+ switch {
+ case size >= GB:
+ return fmt.Sprintf("%.2f GB", float64(size)/float64(GB))
+ case size >= MB:
+ return fmt.Sprintf("%.2f MB", float64(size)/float64(MB))
+ case size >= KB:
+ return fmt.Sprintf("%.2f KB", float64(size)/float64(KB))
+ default:
+ return fmt.Sprintf("%d B", size)
+ }
+}
+
diff --git a/server/internal/backup/postgres_runner.go b/server/internal/backup/postgres_runner.go
new file mode 100644
index 0000000..d7fff0d
--- /dev/null
+++ b/server/internal/backup/postgres_runner.go
@@ -0,0 +1,171 @@
+//go:build ignore
+
+package backup
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "os"
+ "path/filepath"
+ "strings"
+)
+
+type PostgreSQLRunner struct {
+ executor CommandExecutor
+}
+
+func NewPostgreSQLRunner(executor CommandExecutor) *PostgreSQLRunner {
+ if executor == nil {
+ executor = NewOSCommandExecutor()
+ }
+ return &PostgreSQLRunner{executor: executor}
+}
+
+func (r *PostgreSQLRunner) Type() string {
+ return "postgresql"
+}
+
+func (r *PostgreSQLRunner) Run(ctx context.Context, spec TaskSpec, logger LogSink) (*Result, error) {
+ if _, err := r.executor.LookPath("pg_dump"); err != nil {
+ return nil, fmt.Errorf("pg_dump is required: %w", err)
+ }
+ databases := splitDatabaseNames(spec.DBName)
+ if len(databases) == 0 {
+ return nil, fmt.Errorf("postgresql database name is required")
+ }
+ tempDir, err := CreateTaskTempDir(spec.TaskName, spec.StartedAt)
+ if err != nil {
+ return nil, err
+ }
+ if len(databases) == 1 {
+ return r.dumpSingleDatabase(ctx, spec, databases[0], tempDir, logger)
+ }
+ multiDumpDir := filepath.Join(tempDir, "postgres-dumps")
+ if err := os.MkdirAll(multiDumpDir, 0o755); err != nil {
+ return nil, fmt.Errorf("create postgres multi dump directory: %w", err)
+ }
+ for _, databaseName := range databases {
+ if _, err := r.dumpDatabaseToFile(ctx, spec, databaseName, filepath.Join(multiDumpDir, sanitizeDumpName(databaseName)+".sql"), logger); err != nil {
+ return nil, err
+ }
+ }
+ fileName := BuildArtifactName(spec.TaskName, spec.StartedAt, "tar.gz")
+ artifactPath := filepath.Join(tempDir, fileName)
+ size, err := CreateTarGz(ctx, multiDumpDir, nil, artifactPath, logger)
+ if err != nil {
+ return nil, err
+ }
+ return &Result{ArtifactPath: artifactPath, FileName: fileName, Size: size, StorageKey: BuildStorageKey("postgresql", spec.StartedAt, fileName)}, nil
+}
+
+func (r *PostgreSQLRunner) Restore(ctx context.Context, spec TaskSpec, artifactPath string, logger LogSink) error {
+ if _, err := r.executor.LookPath("psql"); err != nil {
+ return fmt.Errorf("psql is required: %w", err)
+ }
+ databases := splitDatabaseNames(spec.DBName)
+ if len(databases) == 0 {
+ return fmt.Errorf("postgresql database name is required")
+ }
+ if strings.HasSuffix(strings.ToLower(artifactPath), ".tar.gz") {
+ restoreDir, err := CreateTaskTempDir(spec.TaskName+"-restore", spec.StartedAt)
+ if err != nil {
+ return err
+ }
+ if err := ExtractTarGz(ctx, artifactPath, restoreDir, logger); err != nil {
+ return err
+ }
+ for _, databaseName := range databases {
+ filePath := filepath.Join(restoreDir, filepath.Base(restoreDir), sanitizeDumpName(databaseName)+".sql")
+ if _, err := os.Stat(filePath); err != nil {
+ fallback := filepath.Join(restoreDir, "postgres-dumps", sanitizeDumpName(databaseName)+".sql")
+ filePath = fallback
+ }
+ if err := r.restoreDatabaseFromFile(ctx, spec, databaseName, filePath, logger); err != nil {
+ return err
+ }
+ }
+ return nil
+ }
+ return r.restoreDatabaseFromFile(ctx, spec, databases[0], artifactPath, logger)
+}
+
+func (r *PostgreSQLRunner) dumpSingleDatabase(ctx context.Context, spec TaskSpec, databaseName string, tempDir string, logger LogSink) (*Result, error) {
+ fileName := BuildArtifactName(spec.TaskName, spec.StartedAt, "sql")
+ artifactPath := filepath.Join(tempDir, fileName)
+ size, err := r.dumpDatabaseToFile(ctx, spec, databaseName, artifactPath, logger)
+ if err != nil {
+ return nil, err
+ }
+ return &Result{ArtifactPath: artifactPath, FileName: fileName, Size: size, StorageKey: BuildStorageKey("postgresql", spec.StartedAt, fileName)}, nil
+}
+
+func (r *PostgreSQLRunner) dumpDatabaseToFile(ctx context.Context, spec TaskSpec, databaseName string, artifactPath string, logger LogSink) (int64, error) {
+ output, err := os.Create(filepath.Clean(artifactPath))
+ if err != nil {
+ return 0, fmt.Errorf("create postgres dump file: %w", err)
+ }
+ defer output.Close()
+ stderr := &bytes.Buffer{}
+ args := []string{"-h", spec.DBHost, "-p", fmt.Sprintf("%d", spec.DBPort), "-U", spec.DBUser, "-d", databaseName, "--no-owner", "--no-privileges"}
+ if logger != nil {
+ logger.Infof("开始执行 pg_dump:%s", databaseName)
+ }
+ if err := r.executor.Run(ctx, "pg_dump", args, postgresEnv(spec.DBPassword), nil, output, stderr); err != nil {
+ return 0, fmt.Errorf("run pg_dump: %w: %s", err, strings.TrimSpace(stderr.String()))
+ }
+ info, err := output.Stat()
+ if err != nil {
+ return 0, fmt.Errorf("stat postgres dump file: %w", err)
+ }
+ return info.Size(), nil
+}
+
+func (r *PostgreSQLRunner) restoreDatabaseFromFile(ctx context.Context, spec TaskSpec, databaseName string, artifactPath string, logger LogSink) error {
+ input, err := os.Open(filepath.Clean(artifactPath))
+ if err != nil {
+ return fmt.Errorf("open postgres restore file: %w", err)
+ }
+ defer input.Close()
+ stderr := &bytes.Buffer{}
+ args := []string{"-h", spec.DBHost, "-p", fmt.Sprintf("%d", spec.DBPort), "-U", spec.DBUser, "-d", databaseName}
+ if logger != nil {
+ logger.Infof("开始执行 psql 恢复:%s", databaseName)
+ }
+ if err := r.executor.Run(ctx, "psql", args, postgresEnv(spec.DBPassword), input, nil, stderr); err != nil {
+ return fmt.Errorf("run psql restore: %w: %s", err, strings.TrimSpace(stderr.String()))
+ }
+ return nil
+}
+
+func postgresEnv(password string) map[string]string {
+ if strings.TrimSpace(password) == "" {
+ return nil
+ }
+ return map[string]string{"PGPASSWORD": password}
+}
+
+func splitDatabaseNames(value string) []string {
+ parts := strings.Split(value, ",")
+ result := make([]string, 0, len(parts))
+ for _, part := range parts {
+ trimmed := strings.TrimSpace(part)
+ if trimmed == "" {
+ continue
+ }
+ result = append(result, trimmed)
+ }
+ return result
+}
+
+func sanitizeDumpName(value string) string {
+ trimmed := strings.TrimSpace(strings.ToLower(value))
+ trimmed = strings.ReplaceAll(trimmed, " ", "-")
+ trimmed = strings.ReplaceAll(trimmed, "/", "-")
+ trimmed = strings.ReplaceAll(trimmed, "\\", "-")
+ trimmed = strings.Trim(trimmed, "-._")
+ if trimmed == "" {
+ return "database"
+ }
+ return trimmed
+}
diff --git a/server/internal/backup/postgresql_runner.go b/server/internal/backup/postgresql_runner.go
new file mode 100644
index 0000000..b262c3d
--- /dev/null
+++ b/server/internal/backup/postgresql_runner.go
@@ -0,0 +1,80 @@
+package backup
+
+import (
+ "context"
+ "fmt"
+ "os"
+ "path/filepath"
+ "strconv"
+ "strings"
+)
+
+type PostgreSQLRunner struct {
+ executor CommandExecutor
+}
+
+func NewPostgreSQLRunner(executor CommandExecutor) *PostgreSQLRunner {
+ if executor == nil {
+ executor = NewOSCommandExecutor()
+ }
+ return &PostgreSQLRunner{executor: executor}
+}
+
+func (r *PostgreSQLRunner) Type() string {
+ return "postgresql"
+}
+
+func (r *PostgreSQLRunner) Run(ctx context.Context, task TaskSpec, writer LogWriter) (*RunResult, error) {
+ if _, err := r.executor.LookPath("pg_dump"); err != nil {
+ return nil, fmt.Errorf("未找到 pg_dump 命令 (请确保服务器已安装 postgresql-client)")
+ }
+ tempDir, artifactPath, err := createTempArtifact(task.TempDir, task.Name, "sql")
+ if err != nil {
+ return nil, err
+ }
+ file, err := os.Create(artifactPath)
+ if err != nil {
+ return nil, fmt.Errorf("create postgresql dump file: %w", err)
+ }
+ defer file.Close()
+ dbNames := normalizeDatabaseNames(task.Database.Names)
+ if len(dbNames) == 0 {
+ return nil, fmt.Errorf("postgresql database names are required")
+ }
+ writer.WriteLine(fmt.Sprintf("连接到 PostgreSQL: %s:%d", task.Database.Host, task.Database.Port))
+ writer.WriteLine(fmt.Sprintf("备份数据库: %s", strings.Join(dbNames, ", ")))
+ stderrWriter := newLogLineWriter(writer, "pg_dump")
+ for index, name := range dbNames {
+ args := []string{"--clean", "--if-exists", "--create", "--format=plain", "-h", task.Database.Host, "-p", strconv.Itoa(task.Database.Port), "-U", task.Database.User, "--dbname", name}
+ writer.WriteLine(fmt.Sprintf("开始导出数据库 [%d/%d]: %s", index+1, len(dbNames), name))
+ if err := r.executor.Run(ctx, "pg_dump", args, CommandOptions{Stdout: file, Stderr: stderrWriter, Env: append(os.Environ(), "PGPASSWORD="+task.Database.Password)}); err != nil {
+ return nil, fmt.Errorf("run pg_dump for %s: %w", name, err)
+ }
+ writer.WriteLine(fmt.Sprintf("数据库 %s 导出完成", name))
+ if index < len(dbNames)-1 {
+ if _, err := file.WriteString("\n\n"); err != nil {
+ return nil, fmt.Errorf("write dump separator: %w", err)
+ }
+ }
+ }
+ info, _ := file.Stat()
+ sizeStr := "未知"
+ if info != nil {
+ sizeStr = formatFileSize(info.Size())
+ }
+ writer.WriteLine(fmt.Sprintf("PostgreSQL 导出完成(文件大小: %s)", sizeStr))
+ return &RunResult{ArtifactPath: artifactPath, FileName: filepath.Base(artifactPath), TempDir: tempDir}, nil
+}
+
+func (r *PostgreSQLRunner) Restore(ctx context.Context, task TaskSpec, artifactPath string, writer LogWriter) error {
+ if _, err := r.executor.LookPath("psql"); err != nil {
+ return fmt.Errorf("未找到 psql 命令 (请确保服务器已安装 postgresql-client)")
+ }
+ writer.WriteLine("开始执行 psql 恢复")
+ args := []string{"-h", task.Database.Host, "-p", strconv.Itoa(task.Database.Port), "-U", task.Database.User, "-d", "postgres", "-f", artifactPath}
+ if err := r.executor.Run(ctx, "psql", args, CommandOptions{Env: append(os.Environ(), "PGPASSWORD="+task.Database.Password)}); err != nil {
+ return fmt.Errorf("run psql restore: %w", err)
+ }
+ writer.WriteLine("PostgreSQL 恢复完成")
+ return nil
+}
diff --git a/server/internal/backup/registry.go b/server/internal/backup/registry.go
new file mode 100644
index 0000000..fcaf60f
--- /dev/null
+++ b/server/internal/backup/registry.go
@@ -0,0 +1,62 @@
+package backup
+
+import (
+ "fmt"
+ "sort"
+ "strings"
+ "sync"
+)
+
+type Registry struct {
+ mu sync.RWMutex
+ runners map[string]BackupRunner
+}
+
+func NewRegistry(runners ...BackupRunner) *Registry {
+ registry := &Registry{runners: make(map[string]BackupRunner)}
+ for _, runner := range runners {
+ registry.Register(runner)
+ }
+ return registry
+}
+
+func (r *Registry) Register(runner BackupRunner) {
+ if runner == nil {
+ return
+ }
+ r.mu.Lock()
+ defer r.mu.Unlock()
+ if r.runners == nil {
+ r.runners = make(map[string]BackupRunner)
+ }
+ r.runners[normalizeTaskType(runner.Type())] = runner
+}
+
+func (r *Registry) Runner(taskType string) (BackupRunner, error) {
+ r.mu.RLock()
+ defer r.mu.RUnlock()
+ runner, ok := r.runners[normalizeTaskType(taskType)]
+ if !ok {
+ return nil, fmt.Errorf("unsupported backup task type: %s", taskType)
+ }
+ return runner, nil
+}
+
+func (r *Registry) Types() []string {
+ r.mu.RLock()
+ defer r.mu.RUnlock()
+ items := make([]string, 0, len(r.runners))
+ for key := range r.runners {
+ items = append(items, key)
+ }
+ sort.Strings(items)
+ return items
+}
+
+func normalizeTaskType(value string) string {
+ normalized := strings.TrimSpace(strings.ToLower(value))
+ if normalized == "pgsql" {
+ return "postgresql"
+ }
+ return normalized
+}
diff --git a/server/internal/backup/registry_test.go b/server/internal/backup/registry_test.go
new file mode 100644
index 0000000..0dc903d
--- /dev/null
+++ b/server/internal/backup/registry_test.go
@@ -0,0 +1,23 @@
+package backup
+
+import (
+ "context"
+ "testing"
+)
+
+type stubRunner struct{ taskType string }
+
+func (r stubRunner) Type() string { return r.taskType }
+func (r stubRunner) Run(context.Context, TaskSpec, LogWriter) (*RunResult, error) { return nil, nil }
+func (r stubRunner) Restore(context.Context, TaskSpec, string, LogWriter) error { return nil }
+
+func TestRegistryResolvesNormalizedType(t *testing.T) {
+ registry := NewRegistry(stubRunner{taskType: "postgresql"})
+ runner, err := registry.Runner("pgsql")
+ if err != nil {
+ t.Fatalf("Runner returned error: %v", err)
+ }
+ if runner.Type() != "postgresql" {
+ t.Fatalf("unexpected runner type: %s", runner.Type())
+ }
+}
diff --git a/server/internal/backup/retention/service.go b/server/internal/backup/retention/service.go
new file mode 100644
index 0000000..5413c93
--- /dev/null
+++ b/server/internal/backup/retention/service.go
@@ -0,0 +1,82 @@
+package retention
+
+import (
+ "context"
+ "fmt"
+ "strings"
+ "time"
+
+ "backupx/server/internal/model"
+ "backupx/server/internal/repository"
+ "backupx/server/internal/storage"
+)
+
+type CleanupResult struct {
+ DeletedRecords int
+ DeletedObjects int
+ Warnings []string
+}
+
+type Service struct {
+ records repository.BackupRecordRepository
+ now func() time.Time
+}
+
+func NewService(records repository.BackupRecordRepository) *Service {
+ return &Service{records: records, now: func() time.Time { return time.Now().UTC() }}
+}
+
+func (s *Service) Cleanup(ctx context.Context, task *model.BackupTask, provider storage.StorageProvider) (*CleanupResult, error) {
+ if task == nil {
+ return nil, fmt.Errorf("backup task is required")
+ }
+ records, err := s.records.ListSuccessfulByTask(ctx, task.ID)
+ if err != nil {
+ return nil, fmt.Errorf("list successful records: %w", err)
+ }
+ candidates := selectRecordsToDelete(records, task.RetentionDays, task.MaxBackups, s.now())
+ result := &CleanupResult{}
+ for _, record := range candidates {
+ if strings.TrimSpace(record.StoragePath) != "" {
+ if provider == nil {
+ result.Warnings = append(result.Warnings, fmt.Sprintf("record %d missing storage provider for cleanup", record.ID))
+ continue
+ }
+ if err := provider.Delete(ctx, record.StoragePath); err != nil {
+ result.Warnings = append(result.Warnings, fmt.Sprintf("delete storage object %s failed: %v", record.StoragePath, err))
+ continue
+ }
+ result.DeletedObjects++
+ }
+ if err := s.records.Delete(ctx, record.ID); err != nil {
+ result.Warnings = append(result.Warnings, fmt.Sprintf("delete backup record %d failed: %v", record.ID, err))
+ continue
+ }
+ result.DeletedRecords++
+ }
+ return result, nil
+}
+
+func selectRecordsToDelete(records []model.BackupRecord, retentionDays int, maxBackups int, now time.Time) []model.BackupRecord {
+ selected := make(map[uint]model.BackupRecord)
+ if maxBackups > 0 && len(records) > maxBackups {
+ for _, record := range records[maxBackups:] {
+ selected[record.ID] = record
+ }
+ }
+ if retentionDays > 0 {
+ cutoff := now.AddDate(0, 0, -retentionDays)
+ for _, record := range records {
+ if record.CompletedAt != nil && record.CompletedAt.Before(cutoff) {
+ selected[record.ID] = record
+ }
+ }
+ }
+ result := make([]model.BackupRecord, 0, len(selected))
+ for _, record := range records {
+ if selectedRecord, ok := selected[record.ID]; ok {
+ result = append(result, selectedRecord)
+ }
+ }
+ return result
+}
diff --git a/server/internal/backup/retention/service_test.go b/server/internal/backup/retention/service_test.go
new file mode 100644
index 0000000..27c1fc1
--- /dev/null
+++ b/server/internal/backup/retention/service_test.go
@@ -0,0 +1,115 @@
+package retention
+
+import (
+ "context"
+ "fmt"
+ "io"
+ "testing"
+ "time"
+
+ "backupx/server/internal/model"
+ "backupx/server/internal/repository"
+ "backupx/server/internal/storage"
+)
+
+type fakeRecordRepository struct {
+ records []model.BackupRecord
+ deleted []uint
+ deleteErrs map[uint]error
+}
+
+func (r *fakeRecordRepository) List(context.Context, repository.BackupRecordListOptions) ([]model.BackupRecord, error) {
+ return nil, nil
+}
+func (r *fakeRecordRepository) FindByID(context.Context, uint) (*model.BackupRecord, error) {
+ return nil, nil
+}
+func (r *fakeRecordRepository) Create(context.Context, *model.BackupRecord) error { return nil }
+func (r *fakeRecordRepository) Update(context.Context, *model.BackupRecord) error { return nil }
+func (r *fakeRecordRepository) Delete(_ context.Context, id uint) error {
+ if err := r.deleteErrs[id]; err != nil {
+ return err
+ }
+ r.deleted = append(r.deleted, id)
+ return nil
+}
+func (r *fakeRecordRepository) ListRecent(context.Context, int) ([]model.BackupRecord, error) {
+ return nil, nil
+}
+func (r *fakeRecordRepository) ListSuccessfulByTask(_ context.Context, _ uint) ([]model.BackupRecord, error) {
+ return r.records, nil
+}
+func (r *fakeRecordRepository) Count(context.Context) (int64, error) { return 0, nil }
+func (r *fakeRecordRepository) CountSince(context.Context, time.Time) (int64, error) { return 0, nil }
+func (r *fakeRecordRepository) CountSuccessSince(context.Context, time.Time) (int64, error) {
+ return 0, nil
+}
+func (r *fakeRecordRepository) SumFileSize(context.Context) (int64, error) { return 0, nil }
+func (r *fakeRecordRepository) TimelineSince(context.Context, time.Time) ([]repository.BackupTimelinePoint, error) {
+ return nil, nil
+}
+func (r *fakeRecordRepository) StorageUsage(context.Context) ([]repository.BackupStorageUsageItem, error) {
+ return nil, nil
+}
+
+type fakeProvider struct {
+ deleted []string
+ failKey string
+}
+
+func (p *fakeProvider) Type() string { return storage.ProviderTypeLocalDisk }
+func (p *fakeProvider) TestConnection(context.Context) error { return nil }
+func (p *fakeProvider) Upload(context.Context, string, io.Reader, int64, map[string]string) error {
+ return nil
+}
+func (p *fakeProvider) Download(context.Context, string) (io.ReadCloser, error) { return nil, nil }
+func (p *fakeProvider) Delete(_ context.Context, objectKey string) error {
+ if objectKey == p.failKey {
+ return fmt.Errorf("delete failed")
+ }
+ p.deleted = append(p.deleted, objectKey)
+ return nil
+}
+func (p *fakeProvider) List(context.Context, string) ([]storage.ObjectInfo, error) { return nil, nil }
+
+func TestSelectRecordsToDelete(t *testing.T) {
+ now := time.Date(2026, 3, 7, 16, 0, 0, 0, time.UTC)
+ completedNew := now.Add(-24 * time.Hour)
+ completedOld := now.Add(-15 * 24 * time.Hour)
+ records := []model.BackupRecord{
+ {ID: 3, CompletedAt: &completedNew},
+ {ID: 2, CompletedAt: &completedNew},
+ {ID: 1, CompletedAt: &completedOld},
+ }
+ selected := selectRecordsToDelete(records, 7, 2, now)
+ if len(selected) != 1 || selected[0].ID != 1 {
+ t.Fatalf("unexpected selected records: %#v", selected)
+ }
+}
+
+func TestCleanupDeletesExpiredRecords(t *testing.T) {
+ now := time.Date(2026, 3, 7, 16, 0, 0, 0, time.UTC)
+ completedNew := now.Add(-24 * time.Hour)
+ completedOld := now.Add(-15 * 24 * time.Hour)
+ repo := &fakeRecordRepository{records: []model.BackupRecord{
+ {ID: 3, TaskID: 1, StoragePath: "records/3", CompletedAt: &completedNew},
+ {ID: 2, TaskID: 1, StoragePath: "records/2", CompletedAt: &completedNew},
+ {ID: 1, TaskID: 1, StoragePath: "records/1", CompletedAt: &completedOld},
+ }}
+ provider := &fakeProvider{}
+ service := NewService(repo)
+ service.now = func() time.Time { return now }
+ result, err := service.Cleanup(context.Background(), &model.BackupTask{ID: 1, RetentionDays: 7, MaxBackups: 2}, provider)
+ if err != nil {
+ t.Fatalf("Cleanup returned error: %v", err)
+ }
+ if result.DeletedRecords != 1 || result.DeletedObjects != 1 {
+ t.Fatalf("unexpected cleanup result: %#v", result)
+ }
+ if len(repo.deleted) != 1 || repo.deleted[0] != 1 {
+ t.Fatalf("unexpected deleted records: %#v", repo.deleted)
+ }
+ if len(provider.deleted) != 1 || provider.deleted[0] != "records/1" {
+ t.Fatalf("unexpected deleted objects: %#v", provider.deleted)
+ }
+}
diff --git a/server/internal/backup/sqlite_runner.go b/server/internal/backup/sqlite_runner.go
new file mode 100644
index 0000000..ad66e48
--- /dev/null
+++ b/server/internal/backup/sqlite_runner.go
@@ -0,0 +1,74 @@
+package backup
+
+import (
+ "context"
+ "fmt"
+ "io"
+ "os"
+ "path/filepath"
+ "strings"
+)
+
+type SQLiteRunner struct{}
+
+func NewSQLiteRunner() *SQLiteRunner {
+ return &SQLiteRunner{}
+}
+
+func (r *SQLiteRunner) Type() string {
+ return "sqlite"
+}
+
+func (r *SQLiteRunner) Run(_ context.Context, task TaskSpec, writer LogWriter) (*RunResult, error) {
+ dbPath := filepath.Clean(strings.TrimSpace(task.Database.Path))
+ if dbPath == "" {
+ return nil, fmt.Errorf("sqlite database path is required")
+ }
+ if _, err := os.Stat(dbPath); err != nil {
+ return nil, fmt.Errorf("stat sqlite database: %w", err)
+ }
+ tempDir, artifactPath, err := createTempArtifact(task.TempDir, task.Name, strings.TrimPrefix(filepath.Ext(dbPath), "."))
+ if err != nil {
+ return nil, err
+ }
+ if filepath.Ext(artifactPath) == "." || filepath.Ext(artifactPath) == "" {
+ artifactPath += ".sqlite"
+ }
+ if err := copyFile(dbPath, artifactPath); err != nil {
+ return nil, err
+ }
+ writer.WriteLine("SQLite 备份文件已复制")
+ return &RunResult{ArtifactPath: artifactPath, FileName: filepath.Base(artifactPath), TempDir: tempDir}, nil
+}
+
+func (r *SQLiteRunner) Restore(_ context.Context, task TaskSpec, artifactPath string, writer LogWriter) error {
+ dbPath := filepath.Clean(strings.TrimSpace(task.Database.Path))
+ if dbPath == "" {
+ return fmt.Errorf("sqlite database path is required")
+ }
+ if err := copyFile(artifactPath, dbPath); err != nil {
+ return err
+ }
+ writer.WriteLine("SQLite 数据库已恢复")
+ return nil
+}
+
+func copyFile(sourcePath string, targetPath string) error {
+ source, err := os.Open(sourcePath)
+ if err != nil {
+ return fmt.Errorf("open source file: %w", err)
+ }
+ defer source.Close()
+ if err := os.MkdirAll(filepath.Dir(targetPath), 0o755); err != nil {
+ return fmt.Errorf("create target directory: %w", err)
+ }
+ target, err := os.Create(targetPath)
+ if err != nil {
+ return fmt.Errorf("create target file: %w", err)
+ }
+ defer target.Close()
+ if _, err := io.Copy(target, source); err != nil {
+ return fmt.Errorf("copy file content: %w", err)
+ }
+ return nil
+}
diff --git a/server/internal/backup/sqlite_runner_test.go b/server/internal/backup/sqlite_runner_test.go
new file mode 100644
index 0000000..41a1c0b
--- /dev/null
+++ b/server/internal/backup/sqlite_runner_test.go
@@ -0,0 +1,34 @@
+package backup
+
+import (
+ "context"
+ "os"
+ "path/filepath"
+ "testing"
+)
+
+func TestSQLiteRunnerRunAndRestore(t *testing.T) {
+ tempDir := t.TempDir()
+ dbPath := filepath.Join(tempDir, "data.db")
+ if err := os.WriteFile(dbPath, []byte("sqlite-data"), 0o644); err != nil {
+ t.Fatalf("WriteFile returned error: %v", err)
+ }
+ runner := NewSQLiteRunner()
+ result, err := runner.Run(context.Background(), TaskSpec{Name: "sqlite backup", Type: "sqlite", Database: DatabaseSpec{Path: dbPath}}, NopLogWriter{})
+ if err != nil {
+ t.Fatalf("Run returned error: %v", err)
+ }
+ if err := os.WriteFile(dbPath, []byte("mutated"), 0o644); err != nil {
+ t.Fatalf("WriteFile returned error: %v", err)
+ }
+ if err := runner.Restore(context.Background(), TaskSpec{Name: "sqlite backup", Type: "sqlite", Database: DatabaseSpec{Path: dbPath}}, result.ArtifactPath, NopLogWriter{}); err != nil {
+ t.Fatalf("Restore returned error: %v", err)
+ }
+ content, err := os.ReadFile(dbPath)
+ if err != nil {
+ t.Fatalf("ReadFile returned error: %v", err)
+ }
+ if string(content) != "sqlite-data" {
+ t.Fatalf("unexpected restored content: %s", string(content))
+ }
+}
diff --git a/server/internal/backup/temp_files.go b/server/internal/backup/temp_files.go
new file mode 100644
index 0000000..db77fc2
--- /dev/null
+++ b/server/internal/backup/temp_files.go
@@ -0,0 +1,64 @@
+package backup
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "regexp"
+ "strings"
+ "time"
+)
+
+var fileNameCleaner = regexp.MustCompile(`[^a-zA-Z0-9._-]+`)
+
+func EnsureTempRoot() (string, error) {
+ root := filepath.Join(os.TempDir(), "backupx")
+ if err := os.MkdirAll(root, 0o755); err != nil {
+ return "", fmt.Errorf("create backup temp root: %w", err)
+ }
+ return root, nil
+}
+
+func CreateTaskTempDir(taskName string, startedAt time.Time) (string, error) {
+ root, err := EnsureTempRoot()
+ if err != nil {
+ return "", err
+ }
+ name := sanitizeTaskName(taskName)
+ if name == "" {
+ name = "backup"
+ }
+ path := filepath.Join(root, fmt.Sprintf("%s_%s", name, startedAt.UTC().Format("20060102_150405")))
+ if err := os.MkdirAll(path, 0o755); err != nil {
+ return "", fmt.Errorf("create task temp dir: %w", err)
+ }
+ return path, nil
+}
+
+func BuildArtifactName(taskName string, startedAt time.Time, extension string) string {
+ name := sanitizeTaskName(taskName)
+ if name == "" {
+ name = "backup"
+ }
+ ext := strings.TrimSpace(extension)
+ if ext != "" && !strings.HasPrefix(ext, ".") {
+ ext = "." + ext
+ }
+ return fmt.Sprintf("%s_%s%s", name, startedAt.UTC().Format("20060102_150405"), ext)
+}
+
+func BuildStorageKey(backupType string, startedAt time.Time, fileName string) string {
+ typeName := strings.TrimSpace(strings.ToLower(backupType))
+ if typeName == "" {
+ typeName = "file"
+ }
+ return filepath.ToSlash(filepath.Join("BackupX", typeName, startedAt.UTC().Format("060102"), fileName))
+}
+
+func sanitizeTaskName(value string) string {
+ trimmed := strings.TrimSpace(strings.ToLower(value))
+ trimmed = strings.ReplaceAll(trimmed, " ", "-")
+ trimmed = fileNameCleaner.ReplaceAllString(trimmed, "-")
+ trimmed = strings.Trim(trimmed, "-._")
+ return trimmed
+}
diff --git a/server/internal/backup/types.go b/server/internal/backup/types.go
new file mode 100644
index 0000000..27bffe0
--- /dev/null
+++ b/server/internal/backup/types.go
@@ -0,0 +1,73 @@
+package backup
+
+import (
+ "context"
+ "time"
+)
+
+type DatabaseSpec struct {
+ Host string
+ Port int
+ User string
+ Password string
+ Names []string
+ Path string
+}
+
+type TaskSpec struct {
+ ID uint
+ Name string
+ Type string
+ SourcePath string
+ ExcludePatterns []string
+ Database DatabaseSpec
+ StorageTargetID uint
+ StorageTargetType string
+ Compression string
+ Encrypt bool
+ RetentionDays int
+ MaxBackups int
+ StartedAt time.Time
+ TempDir string
+}
+
+type RunResult struct {
+ ArtifactPath string
+ FileName string
+ TempDir string
+ Size int64
+ StorageKey string
+}
+
+type LogEvent struct {
+ RecordID uint `json:"recordId"`
+ Sequence int64 `json:"sequence"`
+ Level string `json:"level"`
+ Message string `json:"message"`
+ Timestamp time.Time `json:"timestamp"`
+ Completed bool `json:"completed"`
+ Status string `json:"status"`
+}
+
+type LogWriter interface {
+ WriteLine(message string)
+}
+
+type LogSink interface {
+ Infof(format string, args ...any)
+ Warnf(format string, args ...any)
+ Errorf(format string, args ...any)
+}
+
+type NopLogWriter struct{}
+
+func (NopLogWriter) WriteLine(string) {}
+func (NopLogWriter) Infof(string, ...any) {}
+func (NopLogWriter) Warnf(string, ...any) {}
+func (NopLogWriter) Errorf(string, ...any) {}
+
+type BackupRunner interface {
+ Type() string
+ Run(ctx context.Context, task TaskSpec, writer LogWriter) (*RunResult, error)
+ Restore(ctx context.Context, task TaskSpec, artifactPath string, writer LogWriter) error
+}
diff --git a/server/internal/config/config.go b/server/internal/config/config.go
new file mode 100644
index 0000000..d6025fb
--- /dev/null
+++ b/server/internal/config/config.go
@@ -0,0 +1,143 @@
+package config
+
+import (
+ "fmt"
+ "strings"
+ "time"
+
+ "github.com/spf13/viper"
+)
+
+type Config struct {
+ Server ServerConfig `mapstructure:"server"`
+ Database DatabaseConfig `mapstructure:"database"`
+ Security SecurityConfig `mapstructure:"security"`
+ Backup BackupConfig `mapstructure:"backup"`
+ Log LogConfig `mapstructure:"log"`
+}
+
+type ServerConfig struct {
+ Host string `mapstructure:"host"`
+ Port int `mapstructure:"port"`
+ Mode string `mapstructure:"mode"`
+}
+
+type DatabaseConfig struct {
+ Path string `mapstructure:"path"`
+}
+
+type SecurityConfig struct {
+ JWTSecret string `mapstructure:"jwt_secret"`
+ JWTExpire string `mapstructure:"jwt_expire"`
+ EncryptionKey string `mapstructure:"encryption_key"`
+}
+
+type BackupConfig struct {
+ TempDir string `mapstructure:"temp_dir"`
+ MaxConcurrent int `mapstructure:"max_concurrent"`
+}
+
+type LogConfig struct {
+ Level string `mapstructure:"level"`
+ File string `mapstructure:"file"`
+ MaxSize int `mapstructure:"max_size"`
+ MaxBackups int `mapstructure:"max_backups"`
+ MaxAge int `mapstructure:"max_age"`
+}
+
+func Load(configPath string) (Config, error) {
+ v := viper.New()
+ applyDefaults(v)
+ v.SetConfigType("yaml")
+ v.SetEnvPrefix("BACKUPX")
+ v.SetEnvKeyReplacer(strings.NewReplacer(".", "_"))
+ v.AutomaticEnv()
+
+ if configPath != "" {
+ v.SetConfigFile(configPath)
+ if err := v.ReadInConfig(); err != nil {
+ return Config{}, fmt.Errorf("read config: %w", err)
+ }
+ } else {
+ v.SetConfigName("config")
+ v.AddConfigPath(".")
+ v.AddConfigPath("./server")
+ v.AddConfigPath("/etc/backupx")
+ if err := v.ReadInConfig(); err != nil {
+ if _, ok := err.(viper.ConfigFileNotFoundError); !ok {
+ return Config{}, fmt.Errorf("read config: %w", err)
+ }
+ }
+ }
+
+ var cfg Config
+ if err := v.Unmarshal(&cfg); err != nil {
+ return Config{}, fmt.Errorf("decode config: %w", err)
+ }
+
+ if cfg.Server.Host == "" {
+ cfg.Server.Host = "0.0.0.0"
+ }
+ if cfg.Server.Port == 0 {
+ cfg.Server.Port = 8340
+ }
+ if cfg.Server.Mode == "" {
+ cfg.Server.Mode = "release"
+ }
+ if cfg.Database.Path == "" {
+ cfg.Database.Path = "./data/backupx.db"
+ }
+ if cfg.Security.JWTExpire == "" {
+ cfg.Security.JWTExpire = "24h"
+ }
+ if cfg.Backup.TempDir == "" {
+ cfg.Backup.TempDir = "/tmp/backupx"
+ }
+ if cfg.Backup.MaxConcurrent <= 0 {
+ cfg.Backup.MaxConcurrent = 2
+ }
+ if cfg.Log.Level == "" {
+ cfg.Log.Level = "info"
+ }
+ if cfg.Log.File == "" {
+ cfg.Log.File = "./data/backupx.log"
+ }
+ if cfg.Log.MaxSize <= 0 {
+ cfg.Log.MaxSize = 100
+ }
+ if cfg.Log.MaxBackups <= 0 {
+ cfg.Log.MaxBackups = 3
+ }
+ if cfg.Log.MaxAge <= 0 {
+ cfg.Log.MaxAge = 30
+ }
+
+ return cfg, nil
+}
+
+func MustJWTDuration(cfg SecurityConfig) time.Duration {
+ duration, err := time.ParseDuration(cfg.JWTExpire)
+ if err != nil {
+ return 24 * time.Hour
+ }
+ return duration
+}
+
+func (c Config) Address() string {
+ return fmt.Sprintf("%s:%d", c.Server.Host, c.Server.Port)
+}
+
+func applyDefaults(v *viper.Viper) {
+ v.SetDefault("server.host", "0.0.0.0")
+ v.SetDefault("server.port", 8340)
+ v.SetDefault("server.mode", "release")
+ v.SetDefault("database.path", "./data/backupx.db")
+ v.SetDefault("security.jwt_expire", "24h")
+ v.SetDefault("backup.temp_dir", "/tmp/backupx")
+ v.SetDefault("backup.max_concurrent", 2)
+ v.SetDefault("log.level", "info")
+ v.SetDefault("log.file", "./data/backupx.log")
+ v.SetDefault("log.max_size", 100)
+ v.SetDefault("log.max_backups", 3)
+ v.SetDefault("log.max_age", 30)
+}
diff --git a/server/internal/config/config_test.go b/server/internal/config/config_test.go
new file mode 100644
index 0000000..bf40354
--- /dev/null
+++ b/server/internal/config/config_test.go
@@ -0,0 +1,20 @@
+package config
+
+import "testing"
+
+func TestLoadUsesDefaultsWithoutConfigFile(t *testing.T) {
+ cfg, err := Load("")
+ if err != nil {
+ t.Fatalf("Load returned error: %v", err)
+ }
+
+ if cfg.Server.Host != "0.0.0.0" {
+ t.Fatalf("expected default host, got %s", cfg.Server.Host)
+ }
+ if cfg.Server.Port != 8340 {
+ t.Fatalf("expected default port, got %d", cfg.Server.Port)
+ }
+ if cfg.Database.Path != "./data/backupx.db" {
+ t.Fatalf("expected default database path, got %s", cfg.Database.Path)
+ }
+}
diff --git a/server/internal/database/database.go b/server/internal/database/database.go
new file mode 100644
index 0000000..dcf1032
--- /dev/null
+++ b/server/internal/database/database.go
@@ -0,0 +1,32 @@
+package database
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+
+ "backupx/server/internal/config"
+ "backupx/server/internal/model"
+ "github.com/glebarez/sqlite"
+ "go.uber.org/zap"
+ "gorm.io/gorm"
+ gormlogger "gorm.io/gorm/logger"
+)
+
+func Open(cfg config.DatabaseConfig, logger *zap.Logger) (*gorm.DB, error) {
+ if err := os.MkdirAll(filepath.Dir(cfg.Path), 0o755); err != nil {
+ return nil, fmt.Errorf("create database dir: %w", err)
+ }
+
+ db, err := gorm.Open(sqlite.Open(cfg.Path), &gorm.Config{Logger: gormlogger.Default.LogMode(gormlogger.Silent)})
+ if err != nil {
+ return nil, fmt.Errorf("open sqlite: %w", err)
+ }
+
+ if err := db.AutoMigrate(&model.User{}, &model.SystemConfig{}, &model.StorageTarget{}, &model.OAuthSession{}, &model.BackupTask{}, &model.BackupRecord{}, &model.Notification{}, &model.Node{}); err != nil {
+ return nil, fmt.Errorf("migrate schema: %w", err)
+ }
+
+ logger.Info("database initialized", zap.String("path", cfg.Path))
+ return db, nil
+}
diff --git a/server/internal/http/auth_handler.go b/server/internal/http/auth_handler.go
new file mode 100644
index 0000000..6b25b73
--- /dev/null
+++ b/server/internal/http/auth_handler.go
@@ -0,0 +1,91 @@
+package http
+
+import (
+ "backupx/server/internal/apperror"
+ "backupx/server/internal/service"
+ "backupx/server/pkg/response"
+ "github.com/gin-gonic/gin"
+)
+
+type AuthHandler struct {
+ authService *service.AuthService
+}
+
+func NewAuthHandler(authService *service.AuthService) *AuthHandler {
+ return &AuthHandler{authService: authService}
+}
+
+func (h *AuthHandler) SetupStatus(c *gin.Context) {
+ initialized, err := h.authService.SetupStatus(c.Request.Context())
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, gin.H{"initialized": initialized})
+}
+
+func (h *AuthHandler) Setup(c *gin.Context) {
+ var input service.SetupInput
+ if err := c.ShouldBindJSON(&input); err != nil {
+ response.Error(c, apperror.BadRequest("AUTH_SETUP_INVALID", "初始化参数不合法", err))
+ return
+ }
+ payload, err := h.authService.Setup(c.Request.Context(), input)
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, payload)
+}
+
+func (h *AuthHandler) Login(c *gin.Context) {
+ var input service.LoginInput
+ if err := c.ShouldBindJSON(&input); err != nil {
+ response.Error(c, apperror.BadRequest("AUTH_LOGIN_INVALID", "登录参数不合法", err))
+ return
+ }
+ payload, err := h.authService.Login(c.Request.Context(), input, ClientKey(c))
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, payload)
+}
+
+func (h *AuthHandler) Profile(c *gin.Context) {
+ subjectValue, _ := c.Get(contextUserSubjectKey)
+ subject, err := service.SubjectFromContextValue(subjectValue)
+ if err != nil {
+ response.Error(c, apperror.Unauthorized("AUTH_INVALID_SUBJECT", "无效登录态", err))
+ return
+ }
+ user, err := h.authService.GetCurrentUser(c.Request.Context(), subject)
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, user)
+}
+
+func (h *AuthHandler) ChangePassword(c *gin.Context) {
+ subjectValue, _ := c.Get(contextUserSubjectKey)
+ subject, err := service.SubjectFromContextValue(subjectValue)
+ if err != nil {
+ response.Error(c, apperror.Unauthorized("AUTH_INVALID_SUBJECT", "无效登录态", err))
+ return
+ }
+ var input service.ChangePasswordInput
+ if err := c.ShouldBindJSON(&input); err != nil {
+ response.Error(c, apperror.BadRequest("AUTH_PASSWORD_INVALID", "参数不合法", err))
+ return
+ }
+ if err := h.authService.ChangePassword(c.Request.Context(), subject, input); err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, gin.H{"changed": true})
+}
+
+func (h *AuthHandler) Logout(c *gin.Context) {
+ response.Success(c, gin.H{"loggedOut": true})
+}
diff --git a/server/internal/http/backup_record_handler.go b/server/internal/http/backup_record_handler.go
new file mode 100644
index 0000000..354817b
--- /dev/null
+++ b/server/internal/http/backup_record_handler.go
@@ -0,0 +1,189 @@
+package http
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+ "strconv"
+ "strings"
+ "time"
+
+ "backupx/server/internal/apperror"
+ "backupx/server/internal/backup"
+ "backupx/server/internal/service"
+ "backupx/server/pkg/response"
+ "github.com/gin-gonic/gin"
+)
+
+type BackupRecordHandler struct {
+ service *service.BackupRecordService
+}
+
+func NewBackupRecordHandler(recordService *service.BackupRecordService) *BackupRecordHandler {
+ return &BackupRecordHandler{service: recordService}
+}
+
+func (h *BackupRecordHandler) List(c *gin.Context) {
+ filter, err := buildRecordFilter(c)
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ items, err := h.service.List(c.Request.Context(), filter)
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, items)
+}
+
+func (h *BackupRecordHandler) Get(c *gin.Context) {
+ id, ok := parseUintParam(c, "id")
+ if !ok {
+ return
+ }
+ item, err := h.service.Get(c.Request.Context(), id)
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, item)
+}
+
+func (h *BackupRecordHandler) StreamLogs(c *gin.Context) {
+ id, ok := parseUintParam(c, "id")
+ if !ok {
+ return
+ }
+ detail, err := h.service.Get(c.Request.Context(), id)
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ events := detail.LogEvents
+ completed := detail.Status != "running"
+ channel, cancel, err := h.service.SubscribeLogs(c.Request.Context(), id, 64)
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ defer cancel()
+ c.Writer.Header().Set("Content-Type", "text/event-stream")
+ c.Writer.Header().Set("Cache-Control", "no-cache")
+ c.Writer.Header().Set("Connection", "keep-alive")
+ flusher, ok := c.Writer.(interface{ Flush() })
+ if !ok {
+ response.Error(c, apperror.Internal("BACKUP_RECORD_STREAM_UNSUPPORTED", "当前连接不支持日志流", nil))
+ return
+ }
+ for _, event := range events {
+ if err := writeSSEEvent(c.Writer, event); err != nil {
+ return
+ }
+ flusher.Flush()
+ }
+ if completed {
+ return
+ }
+ for {
+ select {
+ case <-c.Request.Context().Done():
+ return
+ case event, ok := <-channel:
+ if !ok {
+ return
+ }
+ if err := writeSSEEvent(c.Writer, event); err != nil {
+ return
+ }
+ flusher.Flush()
+ if event.Completed {
+ return
+ }
+ }
+ }
+}
+
+func (h *BackupRecordHandler) Download(c *gin.Context) {
+ id, ok := parseUintParam(c, "id")
+ if !ok {
+ return
+ }
+ result, err := h.service.Download(c.Request.Context(), id)
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ defer result.Reader.Close()
+ c.Header("Content-Disposition", fmt.Sprintf("attachment; filename=%q", result.FileName))
+ c.Header("Content-Type", "application/octet-stream")
+ _, _ = io.Copy(c.Writer, result.Reader)
+}
+
+func (h *BackupRecordHandler) Restore(c *gin.Context) {
+ id, ok := parseUintParam(c, "id")
+ if !ok {
+ return
+ }
+ if err := h.service.Restore(c.Request.Context(), id); err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, gin.H{"restored": true})
+}
+
+func (h *BackupRecordHandler) Delete(c *gin.Context) {
+ id, ok := parseUintParam(c, "id")
+ if !ok {
+ return
+ }
+ if err := h.service.Delete(c.Request.Context(), id); err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, gin.H{"deleted": true})
+}
+
+func buildRecordFilter(c *gin.Context) (service.BackupRecordListInput, error) {
+ var filter service.BackupRecordListInput
+ if taskIDValue := strings.TrimSpace(c.Query("taskId")); taskIDValue != "" {
+ parsed, ok := parseUintString(taskIDValue)
+ if !ok {
+ return filter, apperror.BadRequest("BACKUP_RECORD_FILTER_INVALID", "taskId 不合法", nil)
+ }
+ filter.TaskID = &parsed
+ }
+ filter.Status = strings.TrimSpace(c.Query("status"))
+ if dateFrom := strings.TrimSpace(c.Query("dateFrom")); dateFrom != "" {
+ parsed, err := time.Parse(time.RFC3339, dateFrom)
+ if err != nil {
+ return filter, apperror.BadRequest("BACKUP_RECORD_FILTER_INVALID", "dateFrom 必须为 RFC3339 时间格式", err)
+ }
+ filter.DateFrom = &parsed
+ }
+ if dateTo := strings.TrimSpace(c.Query("dateTo")); dateTo != "" {
+ parsed, err := time.Parse(time.RFC3339, dateTo)
+ if err != nil {
+ return filter, apperror.BadRequest("BACKUP_RECORD_FILTER_INVALID", "dateTo 必须为 RFC3339 时间格式", err)
+ }
+ filter.DateTo = &parsed
+ }
+ return filter, nil
+}
+
+func writeSSEEvent(writer io.Writer, event backup.LogEvent) error {
+ payload, err := json.Marshal(event)
+ if err != nil {
+ return err
+ }
+ _, err = fmt.Fprintf(writer, "event: log\ndata: %s\n\n", payload)
+ return err
+}
+
+func parseUintString(value string) (uint, bool) {
+ parsed, err := strconv.ParseUint(strings.TrimSpace(value), 10, 64)
+ if err != nil {
+ return 0, false
+ }
+ return uint(parsed), true
+}
diff --git a/server/internal/http/backup_run_handler.go b/server/internal/http/backup_run_handler.go
new file mode 100644
index 0000000..8fc94dc
--- /dev/null
+++ b/server/internal/http/backup_run_handler.go
@@ -0,0 +1,28 @@
+package http
+
+import (
+ "backupx/server/internal/service"
+ "backupx/server/pkg/response"
+ "github.com/gin-gonic/gin"
+)
+
+type BackupRunHandler struct {
+ service *service.BackupExecutionService
+}
+
+func NewBackupRunHandler(executionService *service.BackupExecutionService) *BackupRunHandler {
+ return &BackupRunHandler{service: executionService}
+}
+
+func (h *BackupRunHandler) Run(c *gin.Context) {
+ id, ok := parseUintParam(c, "id")
+ if !ok {
+ return
+ }
+ record, err := h.service.RunTaskByID(c.Request.Context(), id)
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, record)
+}
diff --git a/server/internal/http/backup_task_handler.go b/server/internal/http/backup_task_handler.go
new file mode 100644
index 0000000..596e5c7
--- /dev/null
+++ b/server/internal/http/backup_task_handler.go
@@ -0,0 +1,109 @@
+package http
+
+import (
+ "backupx/server/internal/apperror"
+ "backupx/server/internal/service"
+ "backupx/server/pkg/response"
+ "github.com/gin-gonic/gin"
+)
+
+type BackupTaskHandler struct {
+ service *service.BackupTaskService
+}
+
+func NewBackupTaskHandler(taskService *service.BackupTaskService) *BackupTaskHandler {
+ return &BackupTaskHandler{service: taskService}
+}
+
+func (h *BackupTaskHandler) List(c *gin.Context) {
+ items, err := h.service.List(c.Request.Context())
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, items)
+}
+
+func (h *BackupTaskHandler) Get(c *gin.Context) {
+ id, ok := parseUintParam(c, "id")
+ if !ok {
+ return
+ }
+ item, err := h.service.Get(c.Request.Context(), id)
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, item)
+}
+
+func (h *BackupTaskHandler) Create(c *gin.Context) {
+ var input service.BackupTaskUpsertInput
+ if err := c.ShouldBindJSON(&input); err != nil {
+ response.Error(c, apperror.BadRequest("BACKUP_TASK_INVALID", "备份任务参数不合法", err))
+ return
+ }
+ item, err := h.service.Create(c.Request.Context(), input)
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, item)
+}
+
+func (h *BackupTaskHandler) Update(c *gin.Context) {
+ id, ok := parseUintParam(c, "id")
+ if !ok {
+ return
+ }
+ var input service.BackupTaskUpsertInput
+ if err := c.ShouldBindJSON(&input); err != nil {
+ response.Error(c, apperror.BadRequest("BACKUP_TASK_INVALID", "备份任务参数不合法", err))
+ return
+ }
+ item, err := h.service.Update(c.Request.Context(), id, input)
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, item)
+}
+
+func (h *BackupTaskHandler) Delete(c *gin.Context) {
+ id, ok := parseUintParam(c, "id")
+ if !ok {
+ return
+ }
+ if err := h.service.Delete(c.Request.Context(), id); err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, gin.H{"deleted": true})
+}
+
+func (h *BackupTaskHandler) Toggle(c *gin.Context) {
+ id, ok := parseUintParam(c, "id")
+ if !ok {
+ return
+ }
+ var input service.BackupTaskToggleInput
+ if err := c.ShouldBindJSON(&input); err != nil && err.Error() != "EOF" {
+ response.Error(c, apperror.BadRequest("BACKUP_TASK_TOGGLE_INVALID", "备份任务启停参数不合法", err))
+ return
+ }
+ current, err := h.service.Get(c.Request.Context(), id)
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ enabled := !current.Enabled
+ if input.Enabled != nil {
+ enabled = *input.Enabled
+ }
+ item, err := h.service.Toggle(c.Request.Context(), id, enabled)
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, item)
+}
diff --git a/server/internal/http/context.go b/server/internal/http/context.go
new file mode 100644
index 0000000..4acbd2d
--- /dev/null
+++ b/server/internal/http/context.go
@@ -0,0 +1,3 @@
+package http
+
+const contextUserSubjectKey = "userSubject"
diff --git a/server/internal/http/dashboard_handler.go b/server/internal/http/dashboard_handler.go
new file mode 100644
index 0000000..d95eed3
--- /dev/null
+++ b/server/internal/http/dashboard_handler.go
@@ -0,0 +1,46 @@
+package http
+
+import (
+ "strconv"
+ "strings"
+
+ "backupx/server/internal/apperror"
+ "backupx/server/internal/service"
+ "backupx/server/pkg/response"
+ "github.com/gin-gonic/gin"
+)
+
+type DashboardHandler struct {
+ service *service.DashboardService
+}
+
+func NewDashboardHandler(dashboardService *service.DashboardService) *DashboardHandler {
+ return &DashboardHandler{service: dashboardService}
+}
+
+func (h *DashboardHandler) Stats(c *gin.Context) {
+ payload, err := h.service.Stats(c.Request.Context())
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, payload)
+}
+
+func (h *DashboardHandler) Timeline(c *gin.Context) {
+ days := 30
+ if value := strings.TrimSpace(c.Query("days")); value != "" {
+ parsed, err := strconv.Atoi(value)
+ if err != nil {
+ response.Error(c, apperror.BadRequest("DASHBOARD_TIMELINE_INVALID", "days 必须为整数", err))
+ return
+ }
+ days = parsed
+ }
+ payload, err := h.service.Timeline(c.Request.Context(), days)
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, payload)
+}
diff --git a/server/internal/http/middleware.go b/server/internal/http/middleware.go
new file mode 100644
index 0000000..9df1d64
--- /dev/null
+++ b/server/internal/http/middleware.go
@@ -0,0 +1,57 @@
+package http
+
+import (
+ stdhttp "net/http"
+ "strings"
+
+ "backupx/server/internal/apperror"
+ "backupx/server/internal/security"
+ "backupx/server/pkg/response"
+ "github.com/gin-gonic/gin"
+)
+
+// CORSMiddleware handles Cross-Origin Resource Sharing for the API.
+func CORSMiddleware() gin.HandlerFunc {
+ return func(c *gin.Context) {
+ c.Header("Access-Control-Allow-Origin", "*")
+ c.Header("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS")
+ c.Header("Access-Control-Allow-Headers", "Origin, Content-Type, Accept, Authorization")
+ c.Header("Access-Control-Max-Age", "86400")
+
+ if c.Request.Method == stdhttp.MethodOptions {
+ c.AbortWithStatus(stdhttp.StatusNoContent)
+ return
+ }
+ c.Next()
+ }
+}
+
+func AuthMiddleware(jwtManager *security.JWTManager) gin.HandlerFunc {
+ return func(c *gin.Context) {
+ header := strings.TrimSpace(c.GetHeader("Authorization"))
+ if !strings.HasPrefix(header, "Bearer ") {
+ response.Error(c, apperror.Unauthorized("AUTH_REQUIRED", "请先登录", nil))
+ c.Abort()
+ return
+ }
+
+ tokenString := strings.TrimSpace(strings.TrimPrefix(header, "Bearer "))
+ claims, err := jwtManager.Parse(tokenString)
+ if err != nil {
+ response.Error(c, apperror.Unauthorized("AUTH_INVALID_TOKEN", "登录状态已失效,请重新登录", err))
+ c.Abort()
+ return
+ }
+
+ c.Set(contextUserSubjectKey, claims.Subject)
+ c.Next()
+ }
+}
+
+func ClientKey(c *gin.Context) string {
+ ip := strings.TrimSpace(c.ClientIP())
+ if ip == "" {
+ return "unknown"
+ }
+ return ip
+}
diff --git a/server/internal/http/node_handler.go b/server/internal/http/node_handler.go
new file mode 100644
index 0000000..4f8c4aa
--- /dev/null
+++ b/server/internal/http/node_handler.go
@@ -0,0 +1,101 @@
+package http
+
+import (
+ stdhttp "net/http"
+ "strconv"
+
+ "backupx/server/internal/service"
+ "backupx/server/pkg/response"
+ "github.com/gin-gonic/gin"
+)
+
+type NodeHandler struct {
+ service *service.NodeService
+}
+
+func NewNodeHandler(service *service.NodeService) *NodeHandler {
+ return &NodeHandler{service: service}
+}
+
+func (h *NodeHandler) List(c *gin.Context) {
+ items, err := h.service.List(c.Request.Context())
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, items)
+}
+
+func (h *NodeHandler) Get(c *gin.Context) {
+ id, err := strconv.ParseUint(c.Param("id"), 10, 32)
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ item, err := h.service.Get(c.Request.Context(), uint(id))
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, item)
+}
+
+func (h *NodeHandler) Create(c *gin.Context) {
+ var input service.NodeCreateInput
+ if err := c.ShouldBindJSON(&input); err != nil {
+ c.JSON(stdhttp.StatusBadRequest, gin.H{"code": "INVALID_INPUT", "message": err.Error()})
+ return
+ }
+ token, err := h.service.Create(c.Request.Context(), input)
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, gin.H{"token": token})
+}
+
+func (h *NodeHandler) Delete(c *gin.Context) {
+ id, err := strconv.ParseUint(c.Param("id"), 10, 32)
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ if err := h.service.Delete(c.Request.Context(), uint(id)); err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, nil)
+}
+
+func (h *NodeHandler) ListDirectory(c *gin.Context) {
+ id, err := strconv.ParseUint(c.Param("id"), 10, 32)
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ path := c.DefaultQuery("path", "/")
+ entries, err := h.service.ListDirectory(c.Request.Context(), uint(id), path)
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, entries)
+}
+
+func (h *NodeHandler) Heartbeat(c *gin.Context) {
+ var input struct {
+ Token string `json:"token" binding:"required"`
+ Hostname string `json:"hostname"`
+ IPAddress string `json:"ipAddress"`
+ AgentVersion string `json:"agentVersion"`
+ }
+ if err := c.ShouldBindJSON(&input); err != nil {
+ c.JSON(stdhttp.StatusBadRequest, gin.H{"code": "INVALID_INPUT", "message": err.Error()})
+ return
+ }
+ if err := h.service.Heartbeat(c.Request.Context(), input.Token, input.Hostname, input.IPAddress, input.AgentVersion); err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, gin.H{"status": "ok"})
+}
diff --git a/server/internal/http/notification_handler.go b/server/internal/http/notification_handler.go
new file mode 100644
index 0000000..b4f005c
--- /dev/null
+++ b/server/internal/http/notification_handler.go
@@ -0,0 +1,107 @@
+package http
+
+import (
+ "backupx/server/internal/apperror"
+ "backupx/server/internal/service"
+ "backupx/server/pkg/response"
+ "github.com/gin-gonic/gin"
+)
+
+type NotificationHandler struct {
+ service *service.NotificationService
+}
+
+func NewNotificationHandler(notificationService *service.NotificationService) *NotificationHandler {
+ return &NotificationHandler{service: notificationService}
+}
+
+func (h *NotificationHandler) List(c *gin.Context) {
+ items, err := h.service.List(c.Request.Context())
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, items)
+}
+
+func (h *NotificationHandler) Get(c *gin.Context) {
+ id, ok := parseUintParam(c, "id")
+ if !ok {
+ return
+ }
+ item, err := h.service.Get(c.Request.Context(), id)
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, item)
+}
+
+func (h *NotificationHandler) Create(c *gin.Context) {
+ var input service.NotificationUpsertInput
+ if err := c.ShouldBindJSON(&input); err != nil {
+ response.Error(c, apperror.BadRequest("NOTIFICATION_INVALID", "通知配置参数不合法", err))
+ return
+ }
+ item, err := h.service.Create(c.Request.Context(), input)
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, item)
+}
+
+func (h *NotificationHandler) Update(c *gin.Context) {
+ id, ok := parseUintParam(c, "id")
+ if !ok {
+ return
+ }
+ var input service.NotificationUpsertInput
+ if err := c.ShouldBindJSON(&input); err != nil {
+ response.Error(c, apperror.BadRequest("NOTIFICATION_INVALID", "通知配置参数不合法", err))
+ return
+ }
+ item, err := h.service.Update(c.Request.Context(), id, input)
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, item)
+}
+
+func (h *NotificationHandler) Delete(c *gin.Context) {
+ id, ok := parseUintParam(c, "id")
+ if !ok {
+ return
+ }
+ if err := h.service.Delete(c.Request.Context(), id); err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, gin.H{"deleted": true})
+}
+
+func (h *NotificationHandler) Test(c *gin.Context) {
+ var input service.NotificationUpsertInput
+ if err := c.ShouldBindJSON(&input); err != nil {
+ response.Error(c, apperror.BadRequest("NOTIFICATION_INVALID", "通知配置参数不合法", err))
+ return
+ }
+ if err := h.service.Test(c.Request.Context(), input); err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, gin.H{"success": true})
+}
+
+func (h *NotificationHandler) TestSaved(c *gin.Context) {
+ id, ok := parseUintParam(c, "id")
+ if !ok {
+ return
+ }
+ if err := h.service.TestSaved(c.Request.Context(), id); err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, gin.H{"success": true})
+}
diff --git a/server/internal/http/router.go b/server/internal/http/router.go
new file mode 100644
index 0000000..dd46381
--- /dev/null
+++ b/server/internal/http/router.go
@@ -0,0 +1,152 @@
+package http
+
+import (
+ "errors"
+ stdhttp "net/http"
+
+ "backupx/server/internal/apperror"
+ "backupx/server/internal/config"
+ "backupx/server/internal/repository"
+ "backupx/server/internal/security"
+ "backupx/server/internal/service"
+ "backupx/server/pkg/response"
+ "github.com/gin-gonic/gin"
+ "go.uber.org/zap"
+)
+
+type RouterDependencies struct {
+ Config config.Config
+ Version string
+ Logger *zap.Logger
+ AuthService *service.AuthService
+ SystemService *service.SystemService
+ StorageTargetService *service.StorageTargetService
+ BackupTaskService *service.BackupTaskService
+ BackupExecutionService *service.BackupExecutionService
+ BackupRecordService *service.BackupRecordService
+ NotificationService *service.NotificationService
+ DashboardService *service.DashboardService
+ SettingsService *service.SettingsService
+ NodeService *service.NodeService
+ JWTManager *security.JWTManager
+ UserRepository repository.UserRepository
+ SystemConfigRepo repository.SystemConfigRepository
+}
+
+func NewRouter(deps RouterDependencies) *gin.Engine {
+ gin.SetMode(deps.Config.Server.Mode)
+ engine := gin.New()
+ engine.Use(gin.Recovery())
+ engine.Use(CORSMiddleware())
+ engine.Use(requestLogger(deps.Logger))
+
+ authHandler := NewAuthHandler(deps.AuthService)
+ systemHandler := NewSystemHandler(deps.SystemService)
+ storageTargetHandler := NewStorageTargetHandler(deps.StorageTargetService)
+ backupTaskHandler := NewBackupTaskHandler(deps.BackupTaskService)
+ backupRunHandler := NewBackupRunHandler(deps.BackupExecutionService)
+ backupRecordHandler := NewBackupRecordHandler(deps.BackupRecordService)
+ notificationHandler := NewNotificationHandler(deps.NotificationService)
+ dashboardHandler := NewDashboardHandler(deps.DashboardService)
+ settingsHandler := NewSettingsHandler(deps.SettingsService)
+
+ api := engine.Group("/api")
+ {
+ auth := api.Group("/auth")
+ {
+ auth.GET("/setup/status", authHandler.SetupStatus)
+ auth.POST("/setup", authHandler.Setup)
+ auth.POST("/login", authHandler.Login)
+ auth.POST("/logout", AuthMiddleware(deps.JWTManager), authHandler.Logout)
+ auth.GET("/profile", AuthMiddleware(deps.JWTManager), authHandler.Profile)
+ auth.PUT("/password", AuthMiddleware(deps.JWTManager), authHandler.ChangePassword)
+ }
+
+ system := api.Group("/system")
+ system.Use(AuthMiddleware(deps.JWTManager))
+ system.GET("/info", systemHandler.Info)
+
+ storageTargets := api.Group("/storage-targets")
+ storageTargets.Use(AuthMiddleware(deps.JWTManager))
+ storageTargets.GET("", storageTargetHandler.List)
+ storageTargets.GET("/:id", storageTargetHandler.Get)
+ storageTargets.POST("", storageTargetHandler.Create)
+ storageTargets.PUT("/:id", storageTargetHandler.Update)
+ storageTargets.DELETE("/:id", storageTargetHandler.Delete)
+ storageTargets.POST("/test", storageTargetHandler.TestConnection)
+ storageTargets.POST("/:id/test", storageTargetHandler.TestSavedConnection)
+ storageTargets.GET("/:id/usage", storageTargetHandler.GetUsage)
+ storageTargets.POST("/google-drive/auth-url", storageTargetHandler.StartGoogleDriveOAuth)
+ storageTargets.POST("/google-drive/complete", storageTargetHandler.CompleteGoogleDriveOAuth)
+ storageTargets.GET("/google-drive/callback", storageTargetHandler.HandleGoogleDriveCallback)
+ storageTargets.GET("/:id/google-drive/profile", storageTargetHandler.GoogleDriveProfile)
+
+ backupTasks := api.Group("/backup/tasks")
+ backupTasks.Use(AuthMiddleware(deps.JWTManager))
+ backupTasks.GET("", backupTaskHandler.List)
+ backupTasks.GET("/:id", backupTaskHandler.Get)
+ backupTasks.POST("", backupTaskHandler.Create)
+ backupTasks.PUT("/:id", backupTaskHandler.Update)
+ backupTasks.DELETE("/:id", backupTaskHandler.Delete)
+ backupTasks.PUT("/:id/toggle", backupTaskHandler.Toggle)
+ backupTasks.POST("/:id/run", backupRunHandler.Run)
+
+ backupRecords := api.Group("/backup/records")
+ backupRecords.Use(AuthMiddleware(deps.JWTManager))
+ backupRecords.GET("", backupRecordHandler.List)
+ backupRecords.GET("/:id", backupRecordHandler.Get)
+ backupRecords.GET("/:id/logs/stream", backupRecordHandler.StreamLogs)
+ backupRecords.GET("/:id/download", backupRecordHandler.Download)
+ backupRecords.POST("/:id/restore", backupRecordHandler.Restore)
+ backupRecords.DELETE("/:id", backupRecordHandler.Delete)
+ dashboard := api.Group("/dashboard")
+ dashboard.Use(AuthMiddleware(deps.JWTManager))
+ dashboard.GET("/stats", dashboardHandler.Stats)
+ dashboard.GET("/timeline", dashboardHandler.Timeline)
+
+ notifications := api.Group("/notifications")
+ notifications.Use(AuthMiddleware(deps.JWTManager))
+ notifications.GET("", notificationHandler.List)
+ notifications.GET("/:id", notificationHandler.Get)
+ notifications.POST("", notificationHandler.Create)
+ notifications.PUT("/:id", notificationHandler.Update)
+ notifications.DELETE("/:id", notificationHandler.Delete)
+ notifications.POST("/test", notificationHandler.Test)
+ notifications.POST("/:id/test", notificationHandler.TestSaved)
+
+ settings := api.Group("/settings")
+ settings.Use(AuthMiddleware(deps.JWTManager))
+ settings.GET("", settingsHandler.Get)
+ settings.PUT("", settingsHandler.Update)
+
+ nodeHandler := NewNodeHandler(deps.NodeService)
+ nodes := api.Group("/nodes")
+ nodes.Use(AuthMiddleware(deps.JWTManager))
+ nodes.GET("", nodeHandler.List)
+ nodes.GET("/:id", nodeHandler.Get)
+ nodes.POST("", nodeHandler.Create)
+ nodes.DELETE("/:id", nodeHandler.Delete)
+ nodes.GET("/:id/fs/list", nodeHandler.ListDirectory)
+
+ // Agent heartbeat (public, token-authenticated)
+ api.POST("/agent/heartbeat", nodeHandler.Heartbeat)
+ }
+
+ engine.NoRoute(func(c *gin.Context) {
+ response.Error(c, apperror.New(stdhttp.StatusNotFound, "NOT_FOUND", "接口不存在", errors.New("route not found")))
+ })
+
+ return engine
+}
+
+func requestLogger(logger *zap.Logger) gin.HandlerFunc {
+ return func(c *gin.Context) {
+ c.Next()
+ logger.Info("http request",
+ zap.String("method", c.Request.Method),
+ zap.String("path", c.Request.URL.Path),
+ zap.Int("status", c.Writer.Status()),
+ zap.String("client_ip", c.ClientIP()),
+ )
+ }
+}
diff --git a/server/internal/http/router_test.go b/server/internal/http/router_test.go
new file mode 100644
index 0000000..3520817
--- /dev/null
+++ b/server/internal/http/router_test.go
@@ -0,0 +1,94 @@
+package http
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "net/http"
+ "net/http/httptest"
+ "path/filepath"
+ "testing"
+ "time"
+
+ "backupx/server/internal/config"
+ "backupx/server/internal/database"
+ "backupx/server/internal/logger"
+ "backupx/server/internal/repository"
+ "backupx/server/internal/security"
+ "backupx/server/internal/service"
+)
+
+func TestSetupLoginAndProfileFlow(t *testing.T) {
+ tempDir := t.TempDir()
+ cfg := config.Config{
+ Server: config.ServerConfig{Host: "127.0.0.1", Port: 8340, Mode: "test"},
+ Database: config.DatabaseConfig{Path: filepath.Join(tempDir, "backupx.db")},
+ Security: config.SecurityConfig{JWTExpire: "24h"},
+ Log: config.LogConfig{Level: "error"},
+ }
+
+ log, err := logger.New(cfg.Log)
+ if err != nil {
+ t.Fatalf("logger.New error: %v", err)
+ }
+ db, err := database.Open(cfg.Database, log)
+ if err != nil {
+ t.Fatalf("database.Open error: %v", err)
+ }
+
+ userRepo := repository.NewUserRepository(db)
+ systemConfigRepo := repository.NewSystemConfigRepository(db)
+ resolved, err := service.ResolveSecurity(context.Background(), cfg.Security, systemConfigRepo)
+ if err != nil {
+ t.Fatalf("ResolveSecurity error: %v", err)
+ }
+ jwtManager := security.NewJWTManager(resolved.JWTSecret, time.Hour)
+ authService := service.NewAuthService(userRepo, systemConfigRepo, jwtManager, security.NewLoginRateLimiter(5, time.Minute))
+ systemService := service.NewSystemService(cfg, "test", time.Now().UTC())
+
+ router := NewRouter(RouterDependencies{
+ Config: cfg,
+ Version: "test",
+ Logger: log,
+ AuthService: authService,
+ SystemService: systemService,
+ JWTManager: jwtManager,
+ UserRepository: userRepo,
+ SystemConfigRepo: systemConfigRepo,
+ })
+
+ setupBody, _ := json.Marshal(map[string]string{
+ "username": "admin",
+ "password": "password-123",
+ "displayName": "Admin",
+ })
+ setupRequest := httptest.NewRequest(http.MethodPost, "/api/auth/setup", bytes.NewBuffer(setupBody))
+ setupRequest.Header.Set("Content-Type", "application/json")
+ setupRecorder := httptest.NewRecorder()
+ router.ServeHTTP(setupRecorder, setupRequest)
+
+ if setupRecorder.Code != http.StatusOK {
+ t.Fatalf("expected setup 200, got %d", setupRecorder.Code)
+ }
+
+ var setupResponse struct {
+ Data struct {
+ Token string `json:"token"`
+ } `json:"data"`
+ }
+ if err := json.Unmarshal(setupRecorder.Body.Bytes(), &setupResponse); err != nil {
+ t.Fatalf("unmarshal setup response: %v", err)
+ }
+ if setupResponse.Data.Token == "" {
+ t.Fatalf("expected token in setup response")
+ }
+
+ profileRequest := httptest.NewRequest(http.MethodGet, "/api/auth/profile", nil)
+ profileRequest.Header.Set("Authorization", "Bearer "+setupResponse.Data.Token)
+ profileRecorder := httptest.NewRecorder()
+ router.ServeHTTP(profileRecorder, profileRequest)
+
+ if profileRecorder.Code != http.StatusOK {
+ t.Fatalf("expected profile 200, got %d", profileRecorder.Code)
+ }
+}
diff --git a/server/internal/http/settings_handler.go b/server/internal/http/settings_handler.go
new file mode 100644
index 0000000..42f233b
--- /dev/null
+++ b/server/internal/http/settings_handler.go
@@ -0,0 +1,39 @@
+package http
+
+import (
+ "backupx/server/internal/apperror"
+ "backupx/server/internal/service"
+ "backupx/server/pkg/response"
+ "github.com/gin-gonic/gin"
+)
+
+type SettingsHandler struct {
+ settingsService *service.SettingsService
+}
+
+func NewSettingsHandler(settingsService *service.SettingsService) *SettingsHandler {
+ return &SettingsHandler{settingsService: settingsService}
+}
+
+func (h *SettingsHandler) Get(c *gin.Context) {
+ settings, err := h.settingsService.GetAll(c.Request.Context())
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, settings)
+}
+
+func (h *SettingsHandler) Update(c *gin.Context) {
+ var input map[string]string
+ if err := c.ShouldBindJSON(&input); err != nil {
+ response.Error(c, apperror.BadRequest("SETTINGS_INVALID", "设置参数不合法", err))
+ return
+ }
+ settings, err := h.settingsService.Update(c.Request.Context(), input)
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, settings)
+}
diff --git a/server/internal/http/storage_target_handler.go b/server/internal/http/storage_target_handler.go
new file mode 100644
index 0000000..0eb1ccf
--- /dev/null
+++ b/server/internal/http/storage_target_handler.go
@@ -0,0 +1,244 @@
+package http
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+
+ "backupx/server/internal/apperror"
+ "backupx/server/internal/service"
+ "backupx/server/pkg/response"
+ "github.com/gin-gonic/gin"
+)
+
+type StorageTargetHandler struct {
+ service *service.StorageTargetService
+}
+
+type storageTargetGoogleDriveAuthRequest struct {
+ TargetID *uint `json:"targetId"`
+ Name string `json:"name"`
+ Type string `json:"type"`
+ Description string `json:"description"`
+ Enabled bool `json:"enabled"`
+ Config map[string]any `json:"config"`
+ ClientID string `json:"clientId"`
+ ClientSecret string `json:"clientSecret"`
+ FolderID string `json:"folderId"`
+}
+
+func NewStorageTargetHandler(service *service.StorageTargetService) *StorageTargetHandler {
+ return &StorageTargetHandler{service: service}
+}
+
+func (h *StorageTargetHandler) List(c *gin.Context) {
+ items, err := h.service.List(c.Request.Context())
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, items)
+}
+
+func (h *StorageTargetHandler) Get(c *gin.Context) {
+ id, ok := parseUintParam(c, "id")
+ if !ok {
+ return
+ }
+ item, err := h.service.Get(c.Request.Context(), id)
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, item)
+}
+
+func (h *StorageTargetHandler) Create(c *gin.Context) {
+ var input service.StorageTargetUpsertInput
+ if err := c.ShouldBindJSON(&input); err != nil {
+ response.Error(c, apperror.BadRequest("STORAGE_TARGET_INVALID", "存储目标参数不合法", err))
+ return
+ }
+ item, err := h.service.Create(c.Request.Context(), input)
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, item)
+}
+
+func (h *StorageTargetHandler) Update(c *gin.Context) {
+ id, ok := parseUintParam(c, "id")
+ if !ok {
+ return
+ }
+ var input service.StorageTargetUpsertInput
+ if err := c.ShouldBindJSON(&input); err != nil {
+ response.Error(c, apperror.BadRequest("STORAGE_TARGET_INVALID", "存储目标参数不合法", err))
+ return
+ }
+ item, err := h.service.Update(c.Request.Context(), id, input)
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, item)
+}
+
+func (h *StorageTargetHandler) Delete(c *gin.Context) {
+ id, ok := parseUintParam(c, "id")
+ if !ok {
+ return
+ }
+ if err := h.service.Delete(c.Request.Context(), id); err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, gin.H{"deleted": true})
+}
+
+func (h *StorageTargetHandler) TestConnection(c *gin.Context) {
+ var payload service.StorageTargetUpsertInput
+ if err := c.ShouldBindJSON(&payload); err != nil {
+ response.Error(c, apperror.BadRequest("STORAGE_TARGET_TEST_INVALID", "测试连接参数不合法", err))
+ return
+ }
+ if err := h.service.TestConnection(c.Request.Context(), service.StorageTargetTestInput{Payload: payload}); err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, gin.H{"success": true, "message": "连接成功"})
+}
+
+func (h *StorageTargetHandler) TestSavedConnection(c *gin.Context) {
+ id, ok := parseUintParam(c, "id")
+ if !ok {
+ return
+ }
+ if err := h.service.TestConnection(c.Request.Context(), service.StorageTargetTestInput{TargetID: &id}); err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, gin.H{"success": true, "message": "连接成功"})
+}
+
+func (h *StorageTargetHandler) StartGoogleDriveOAuth(c *gin.Context) {
+ var request storageTargetGoogleDriveAuthRequest
+ if err := c.ShouldBindJSON(&request); err != nil {
+ response.Error(c, apperror.BadRequest("STORAGE_GOOGLE_OAUTH_INVALID", "Google Drive 授权参数不合法", err))
+ return
+ }
+ input := service.GoogleDriveAuthStartInput{
+ TargetID: request.TargetID,
+ Name: strings.TrimSpace(request.Name),
+ Description: strings.TrimSpace(request.Description),
+ Enabled: request.Enabled,
+ ClientID: firstNonEmpty(asString(request.Config["clientId"]), request.ClientID),
+ ClientSecret: firstNonEmpty(asString(request.Config["clientSecret"]), request.ClientSecret),
+ FolderID: firstNonEmpty(asString(request.Config["folderId"]), request.FolderID),
+ }
+ result, err := h.service.StartGoogleDriveOAuth(c.Request.Context(), input, requestOrigin(c))
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, gin.H{"authUrl": result.AuthorizationURL})
+}
+
+func (h *StorageTargetHandler) CompleteGoogleDriveOAuth(c *gin.Context) {
+ var input service.GoogleDriveAuthCompleteInput
+ if err := c.ShouldBindJSON(&input); err != nil {
+ response.Error(c, apperror.BadRequest("STORAGE_GOOGLE_OAUTH_INVALID", "Google Drive 回调参数不合法", err))
+ return
+ }
+ item, err := h.service.CompleteGoogleDriveOAuth(c.Request.Context(), input)
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, item)
+}
+
+func (h *StorageTargetHandler) HandleGoogleDriveCallback(c *gin.Context) {
+ if queryError := strings.TrimSpace(c.Query("error")); queryError != "" {
+ response.Success(c, gin.H{"success": false, "message": queryError})
+ return
+ }
+ input := service.GoogleDriveAuthCompleteInput{State: strings.TrimSpace(c.Query("state")), Code: strings.TrimSpace(c.Query("code"))}
+ if input.State == "" || input.Code == "" {
+ response.Error(c, apperror.BadRequest("STORAGE_GOOGLE_OAUTH_INVALID", "Google Drive 回调参数不合法", nil))
+ return
+ }
+ item, err := h.service.CompleteGoogleDriveOAuth(c.Request.Context(), input)
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, gin.H{"success": true, "message": "Google Drive 授权成功", "target": item})
+}
+
+func (h *StorageTargetHandler) GoogleDriveProfile(c *gin.Context) {
+ id, ok := parseUintParam(c, "id")
+ if !ok {
+ return
+ }
+ profile, err := h.service.GoogleDriveProfile(c.Request.Context(), id)
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, profile)
+}
+
+func parseUintParam(c *gin.Context, key string) (uint, bool) {
+ value := strings.TrimSpace(c.Param(key))
+ parsed, err := strconv.ParseUint(value, 10, 64)
+ if err != nil {
+ response.Error(c, apperror.BadRequest("INVALID_ID", fmt.Sprintf("参数 %s 不合法", key), err))
+ return 0, false
+ }
+ return uint(parsed), true
+}
+
+func requestOrigin(c *gin.Context) string {
+ origin := strings.TrimSpace(c.GetHeader("Origin"))
+ if origin != "" {
+ return origin
+ }
+ scheme := strings.TrimSpace(c.GetHeader("X-Forwarded-Proto"))
+ if scheme == "" {
+ if c.Request.TLS != nil {
+ scheme = "https"
+ } else {
+ scheme = "http"
+ }
+ }
+ return fmt.Sprintf("%s://%s", scheme, c.Request.Host)
+}
+
+func asString(value any) string {
+ text, _ := value.(string)
+ return strings.TrimSpace(text)
+}
+
+func firstNonEmpty(values ...string) string {
+ for _, value := range values {
+ if strings.TrimSpace(value) != "" {
+ return strings.TrimSpace(value)
+ }
+ }
+ return ""
+}
+
+func (h *StorageTargetHandler) GetUsage(c *gin.Context) {
+ id, ok := parseUintParam(c, "id")
+ if !ok {
+ return
+ }
+ usage, err := h.service.GetUsage(c.Request.Context(), id)
+ if err != nil {
+ response.Error(c, err)
+ return
+ }
+ response.Success(c, usage)
+}
diff --git a/server/internal/http/system_handler.go b/server/internal/http/system_handler.go
new file mode 100644
index 0000000..d3cc78f
--- /dev/null
+++ b/server/internal/http/system_handler.go
@@ -0,0 +1,19 @@
+package http
+
+import (
+ "backupx/server/internal/service"
+ "backupx/server/pkg/response"
+ "github.com/gin-gonic/gin"
+)
+
+type SystemHandler struct {
+ systemService *service.SystemService
+}
+
+func NewSystemHandler(systemService *service.SystemService) *SystemHandler {
+ return &SystemHandler{systemService: systemService}
+}
+
+func (h *SystemHandler) Info(c *gin.Context) {
+ response.Success(c, h.systemService.GetInfo(c.Request.Context()))
+}
diff --git a/server/internal/httpapi/auth_handler.go b/server/internal/httpapi/auth_handler.go
new file mode 100644
index 0000000..60b5486
--- /dev/null
+++ b/server/internal/httpapi/auth_handler.go
@@ -0,0 +1,98 @@
+//go:build ignore
+
+package httpapi
+
+import (
+ "net/http"
+
+ "backupx/server/internal/service"
+ "backupx/server/pkg/response"
+ "github.com/gin-gonic/gin"
+ "go.uber.org/zap"
+)
+
+type authHandler struct {
+ service *service.AuthService
+ logger *zap.Logger
+}
+
+type setupRequest struct {
+ Username string `json:"username" binding:"required,min=3,max=64"`
+ Password string `json:"password" binding:"required,min=8,max=128"`
+ DisplayName string `json:"displayName" binding:"required,min=1,max=128"`
+}
+
+type loginRequest struct {
+ Username string `json:"username" binding:"required,min=3,max=64"`
+ Password string `json:"password" binding:"required,min=8,max=128"`
+}
+
+func newAuthHandler(service *service.AuthService, logger *zap.Logger) *authHandler {
+ return &authHandler{service: service, logger: logger}
+}
+
+func (h *authHandler) registerRoutes(router gin.IRouter, protected gin.IRouter) {
+ router.GET("/auth/setup/status", h.getSetupStatus)
+ router.POST("/auth/setup", h.setup)
+ router.POST("/auth/login", h.login)
+ protected.GET("/auth/profile", h.profile)
+}
+
+func (h *authHandler) getSetupStatus(c *gin.Context) {
+ initialized, err := h.service.GetSetupStatus(c.Request.Context())
+ if err != nil {
+ writeError(c, h.logger, err)
+ return
+ }
+ response.Success(c, gin.H{"initialized": initialized})
+}
+
+func (h *authHandler) setup(c *gin.Context) {
+ payload, err := bindJSON[setupRequest](c, h.logger)
+ if err != nil {
+ writeError(c, h.logger, err)
+ return
+ }
+ result, err := h.service.Setup(c.Request.Context(), service.SetupInput{
+ Username: payload.Username,
+ Password: payload.Password,
+ DisplayName: payload.DisplayName,
+ })
+ if err != nil {
+ writeError(c, h.logger, err)
+ return
+ }
+ c.JSON(http.StatusCreated, response.Envelope{Code: "OK", Message: "success", Data: result})
+}
+
+func (h *authHandler) login(c *gin.Context) {
+ payload, err := bindJSON[loginRequest](c, h.logger)
+ if err != nil {
+ writeError(c, h.logger, err)
+ return
+ }
+ result, err := h.service.Login(c.Request.Context(), service.LoginInput{
+ Username: payload.Username,
+ Password: payload.Password,
+ RemoteAddr: c.ClientIP(),
+ })
+ if err != nil {
+ writeError(c, h.logger, err)
+ return
+ }
+ response.Success(c, result)
+}
+
+func (h *authHandler) profile(c *gin.Context) {
+ userID, err := getUserID(c)
+ if err != nil {
+ response.Error(c, http.StatusUnauthorized, "AUTH_UNAUTHORIZED", "认证信息无效")
+ return
+ }
+ result, err := h.service.GetCurrentUser(c.Request.Context(), userID)
+ if err != nil {
+ writeError(c, h.logger, err)
+ return
+ }
+ response.Success(c, result)
+}
diff --git a/server/internal/httpapi/context.go b/server/internal/httpapi/context.go
new file mode 100644
index 0000000..2a86fd8
--- /dev/null
+++ b/server/internal/httpapi/context.go
@@ -0,0 +1,23 @@
+//go:build ignore
+
+package httpapi
+
+import (
+ "fmt"
+
+ "github.com/gin-gonic/gin"
+)
+
+const claimsContextKey = "authClaims"
+
+func getUserID(c *gin.Context) (uint, error) {
+ value, ok := c.Get(claimsContextKey)
+ if !ok {
+ return 0, fmt.Errorf("missing auth claims")
+ }
+ claims, ok := value.(AuthClaims)
+ if !ok {
+ return 0, fmt.Errorf("invalid auth claims")
+ }
+ return claims.UserID, nil
+}
diff --git a/server/internal/httpapi/middleware.go b/server/internal/httpapi/middleware.go
new file mode 100644
index 0000000..494298d
--- /dev/null
+++ b/server/internal/httpapi/middleware.go
@@ -0,0 +1,92 @@
+//go:build ignore
+
+package httpapi
+
+import (
+ "errors"
+ "fmt"
+ "net/http"
+ "strings"
+
+ "backupx/server/internal/apperror"
+ "backupx/server/internal/security"
+ "backupx/server/pkg/response"
+ "github.com/gin-gonic/gin"
+ "go.uber.org/zap"
+)
+
+type AuthClaims struct {
+ UserID uint
+ Username string
+ Role string
+}
+
+func Recovery(logger *zap.Logger) gin.HandlerFunc {
+ return func(c *gin.Context) {
+ defer func() {
+ if recovered := recover(); recovered != nil {
+ logger.Error("panic recovered", zap.Any("panic", recovered), zap.String("path", c.Request.URL.Path))
+ response.Error(c, http.StatusInternalServerError, "INTERNAL_ERROR", "服务器内部错误")
+ c.Abort()
+ }
+ }()
+ c.Next()
+ }
+}
+
+func RequestLogger(logger *zap.Logger) gin.HandlerFunc {
+ return func(c *gin.Context) {
+ c.Next()
+ logger.Info("http request",
+ zap.String("method", c.Request.Method),
+ zap.String("path", c.Request.URL.Path),
+ zap.Int("status", c.Writer.Status()),
+ zap.String("client_ip", c.ClientIP()),
+ )
+ }
+}
+
+func AuthMiddleware(jwtManager *security.JWTManager) gin.HandlerFunc {
+ return func(c *gin.Context) {
+ authorization := strings.TrimSpace(c.GetHeader("Authorization"))
+ if authorization == "" || !strings.HasPrefix(strings.ToLower(authorization), "bearer ") {
+ response.Error(c, http.StatusUnauthorized, "AUTH_UNAUTHORIZED", "缺少有效的认证令牌")
+ c.Abort()
+ return
+ }
+ tokenValue := strings.TrimSpace(strings.TrimPrefix(authorization, "Bearer"))
+ if tokenValue == authorization {
+ tokenValue = strings.TrimSpace(strings.TrimPrefix(authorization, "bearer"))
+ }
+ claims, err := jwtManager.Parse(tokenValue)
+ if err != nil {
+ response.Error(c, http.StatusUnauthorized, "AUTH_UNAUTHORIZED", "认证令牌无效或已过期")
+ c.Abort()
+ return
+ }
+ c.Set(claimsContextKey, AuthClaims{UserID: claims.UserID, Username: claims.Username, Role: claims.Role})
+ c.Next()
+ }
+}
+
+func writeError(c *gin.Context, logger *zap.Logger, err error) {
+ var appErr *apperror.AppError
+ if errors.As(err, &appErr) {
+ if appErr.Err != nil {
+ logger.Warn("request failed", zap.String("code", appErr.Code), zap.Error(appErr.Err))
+ }
+ response.Error(c, appErr.Status, appErr.Code, appErr.Message)
+ return
+ }
+ logger.Error("unexpected error", zap.Error(err))
+ response.Error(c, http.StatusInternalServerError, "INTERNAL_ERROR", "服务器内部错误")
+}
+
+func bindJSON[T any](c *gin.Context, logger *zap.Logger) (*T, error) {
+ var payload T
+ if err := c.ShouldBindJSON(&payload); err != nil {
+ logger.Warn("bind json failed", zap.Error(err))
+ return nil, apperror.Wrap(http.StatusBadRequest, "INVALID_REQUEST", fmt.Sprintf("请求参数错误: %v", err), err)
+ }
+ return &payload, nil
+}
diff --git a/server/internal/httpapi/router.go b/server/internal/httpapi/router.go
new file mode 100644
index 0000000..6f5cb62
--- /dev/null
+++ b/server/internal/httpapi/router.go
@@ -0,0 +1,38 @@
+//go:build ignore
+
+package httpapi
+
+import (
+ "backupx/server/internal/security"
+ "backupx/server/internal/service"
+ "github.com/gin-gonic/gin"
+ "go.uber.org/zap"
+)
+
+type Dependencies struct {
+ Logger *zap.Logger
+ AuthService *service.AuthService
+ SystemService *service.SystemService
+ JWTManager *security.JWTManager
+ Mode string
+}
+
+func NewRouter(deps Dependencies) *gin.Engine {
+ gin.SetMode(deps.Mode)
+ router := gin.New()
+ router.Use(Recovery(deps.Logger), RequestLogger(deps.Logger))
+
+ api := router.Group("/api")
+ authHandler := newAuthHandler(deps.AuthService, deps.Logger)
+ systemHandler := newSystemHandler(deps.SystemService)
+ protected := api.Group("")
+ protected.Use(AuthMiddleware(deps.JWTManager))
+
+ authHandler.registerRoutes(api, protected)
+ systemHandler.registerRoutes(protected)
+ api.GET("/healthz", func(c *gin.Context) {
+ c.JSON(200, gin.H{"status": "ok"})
+ })
+
+ return router
+}
diff --git a/server/internal/httpapi/router_test.go b/server/internal/httpapi/router_test.go
new file mode 100644
index 0000000..947227c
--- /dev/null
+++ b/server/internal/httpapi/router_test.go
@@ -0,0 +1,96 @@
+//go:build ignore
+
+package httpapi
+
+import (
+ "bytes"
+ "encoding/json"
+ "net/http"
+ "net/http/httptest"
+ "path/filepath"
+ "testing"
+ "time"
+
+ "backupx/server/internal/config"
+ "backupx/server/internal/database"
+ "backupx/server/internal/logger"
+ "backupx/server/internal/repository"
+ "backupx/server/internal/security"
+ "backupx/server/internal/service"
+)
+
+func TestSetupLoginProfileAndSystemInfo(t *testing.T) {
+ tmpDir := t.TempDir()
+ cfg := config.Config{
+ Server: config.ServerConfig{Mode: "test"},
+ Database: config.DatabaseConfig{Path: filepath.Join(tmpDir, "backupx.db")},
+ Security: config.SecurityConfig{JWTSecret: "test-jwt-secret", JWTExpire: "1h", EncryptionKey: "test-encryption-key"},
+ Log: config.LogConfig{Level: "error"},
+ }
+ log, err := logger.New(cfg.Log)
+ if err != nil {
+ t.Fatalf("logger.New() error = %v", err)
+ }
+ db, err := database.Open(cfg.Database, log)
+ if err != nil {
+ t.Fatalf("database.Open() error = %v", err)
+ }
+ jwtManager := security.NewJWTManager(cfg.Security.JWTSecret, time.Hour)
+ authService := service.NewAuthService(repository.NewUserRepository(db), jwtManager, security.NewLoginLimiter(5, time.Minute))
+ systemService := service.NewSystemService(cfg, "test", time.Now().Add(-time.Minute))
+ router := NewRouter(Dependencies{Logger: log, AuthService: authService, SystemService: systemService, JWTManager: jwtManager, Mode: "test"})
+
+ setupBody := map[string]string{"username": "admin", "password": "super-secret", "displayName": "管理员"}
+ setupResp := performJSONRequest(t, router, http.MethodPost, "/api/auth/setup", setupBody, "")
+ if setupResp.Code != http.StatusCreated {
+ t.Fatalf("unexpected setup status: %d body=%s", setupResp.Code, setupResp.Body.String())
+ }
+ var setupPayload struct {
+ Code string `json:"code"`
+ Data struct {
+ Token string `json:"token"`
+ } `json:"data"`
+ }
+ if err := json.Unmarshal(setupResp.Body.Bytes(), &setupPayload); err != nil {
+ t.Fatalf("decode setup response: %v", err)
+ }
+ if setupPayload.Data.Token == "" {
+ t.Fatal("expected token in setup response")
+ }
+
+ profileResp := performJSONRequest(t, router, http.MethodGet, "/api/auth/profile", nil, setupPayload.Data.Token)
+ if profileResp.Code != http.StatusOK {
+ t.Fatalf("unexpected profile status: %d body=%s", profileResp.Code, profileResp.Body.String())
+ }
+
+ loginBody := map[string]string{"username": "admin", "password": "super-secret"}
+ loginResp := performJSONRequest(t, router, http.MethodPost, "/api/auth/login", loginBody, "")
+ if loginResp.Code != http.StatusOK {
+ t.Fatalf("unexpected login status: %d body=%s", loginResp.Code, loginResp.Body.String())
+ }
+
+ systemResp := performJSONRequest(t, router, http.MethodGet, "/api/system/info", nil, setupPayload.Data.Token)
+ if systemResp.Code != http.StatusOK {
+ t.Fatalf("unexpected system info status: %d body=%s", systemResp.Code, systemResp.Body.String())
+ }
+}
+
+func performJSONRequest(t *testing.T, handler http.Handler, method string, path string, payload any, token string) *httptest.ResponseRecorder {
+ t.Helper()
+ var body []byte
+ if payload != nil {
+ encoded, err := json.Marshal(payload)
+ if err != nil {
+ t.Fatalf("json.Marshal() error = %v", err)
+ }
+ body = encoded
+ }
+ request := httptest.NewRequest(method, path, bytes.NewReader(body))
+ request.Header.Set("Content-Type", "application/json")
+ if token != "" {
+ request.Header.Set("Authorization", "Bearer "+token)
+ }
+ response := httptest.NewRecorder()
+ handler.ServeHTTP(response, request)
+ return response
+}
diff --git a/server/internal/httpapi/system_handler.go b/server/internal/httpapi/system_handler.go
new file mode 100644
index 0000000..68cc948
--- /dev/null
+++ b/server/internal/httpapi/system_handler.go
@@ -0,0 +1,25 @@
+//go:build ignore
+
+package httpapi
+
+import (
+ "backupx/server/internal/service"
+ "backupx/server/pkg/response"
+ "github.com/gin-gonic/gin"
+)
+
+type systemHandler struct {
+ service *service.SystemService
+}
+
+func newSystemHandler(service *service.SystemService) *systemHandler {
+ return &systemHandler{service: service}
+}
+
+func (h *systemHandler) registerRoutes(protected gin.IRouter) {
+ protected.GET("/system/info", h.info)
+}
+
+func (h *systemHandler) info(c *gin.Context) {
+ response.Success(c, h.service.GetInfo())
+}
diff --git a/server/internal/logger/logger.go b/server/internal/logger/logger.go
new file mode 100644
index 0000000..ea07491
--- /dev/null
+++ b/server/internal/logger/logger.go
@@ -0,0 +1,53 @@
+package logger
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "strings"
+
+ "backupx/server/internal/config"
+ "github.com/natefinch/lumberjack"
+ "go.uber.org/zap"
+ "go.uber.org/zap/zapcore"
+)
+
+func New(cfg config.LogConfig) (*zap.Logger, error) {
+ level := parseLevel(cfg.Level)
+ encoderCfg := zap.NewProductionEncoderConfig()
+ encoderCfg.TimeKey = "time"
+ encoderCfg.EncodeTime = zapcore.ISO8601TimeEncoder
+ encoder := zapcore.NewJSONEncoder(encoderCfg)
+
+ writers := []zapcore.WriteSyncer{zapcore.AddSync(os.Stdout)}
+ if cfg.File != "" {
+ if err := os.MkdirAll(filepath.Dir(cfg.File), 0o755); err != nil {
+ return nil, fmt.Errorf("create log dir: %w", err)
+ }
+ rotator := &lumberjack.Logger{
+ Filename: cfg.File,
+ MaxSize: cfg.MaxSize,
+ MaxBackups: cfg.MaxBackups,
+ MaxAge: cfg.MaxAge,
+ LocalTime: false,
+ Compress: true,
+ }
+ writers = append(writers, zapcore.AddSync(rotator))
+ }
+
+ core := zapcore.NewCore(encoder, zapcore.NewMultiWriteSyncer(writers...), level)
+ return zap.New(core, zap.AddCaller(), zap.AddStacktrace(zapcore.ErrorLevel)), nil
+}
+
+func parseLevel(value string) zapcore.Level {
+ switch strings.ToLower(strings.TrimSpace(value)) {
+ case "debug":
+ return zapcore.DebugLevel
+ case "warn":
+ return zapcore.WarnLevel
+ case "error":
+ return zapcore.ErrorLevel
+ default:
+ return zapcore.InfoLevel
+ }
+}
diff --git a/server/internal/model/backup_record.go b/server/internal/model/backup_record.go
new file mode 100644
index 0000000..d884d65
--- /dev/null
+++ b/server/internal/model/backup_record.go
@@ -0,0 +1,32 @@
+package model
+
+import "time"
+
+const (
+ BackupRecordStatusRunning = "running"
+ BackupRecordStatusSuccess = "success"
+ BackupRecordStatusFailed = "failed"
+)
+
+type BackupRecord struct {
+ ID uint `gorm:"primaryKey" json:"id"`
+ TaskID uint `gorm:"column:task_id;index;not null" json:"taskId"`
+ Task BackupTask `json:"task,omitempty"`
+ StorageTargetID uint `gorm:"column:storage_target_id;index;not null" json:"storageTargetId"`
+ StorageTarget StorageTarget `json:"storageTarget,omitempty"`
+ Status string `gorm:"size:20;index;not null" json:"status"`
+ FileName string `gorm:"column:file_name;size:255" json:"fileName"`
+ FileSize int64 `gorm:"column:file_size;not null;default:0" json:"fileSize"`
+ StoragePath string `gorm:"column:storage_path;size:500" json:"storagePath"`
+ DurationSeconds int `gorm:"column:duration_seconds;not null;default:0" json:"durationSeconds"`
+ ErrorMessage string `gorm:"column:error_message;size:2000" json:"errorMessage"`
+ LogContent string `gorm:"column:log_content;type:text" json:"logContent"`
+ StartedAt time.Time `gorm:"column:started_at;index;not null" json:"startedAt"`
+ CompletedAt *time.Time `gorm:"column:completed_at;index" json:"completedAt,omitempty"`
+ CreatedAt time.Time `json:"createdAt"`
+ UpdatedAt time.Time `json:"updatedAt"`
+}
+
+func (BackupRecord) TableName() string {
+ return "backup_records"
+}
diff --git a/server/internal/model/backup_task.go b/server/internal/model/backup_task.go
new file mode 100644
index 0000000..2b295d4
--- /dev/null
+++ b/server/internal/model/backup_task.go
@@ -0,0 +1,50 @@
+package model
+
+import "time"
+
+const (
+ BackupTaskTypeFile = "file"
+ BackupTaskTypeMySQL = "mysql"
+ BackupTaskTypeSQLite = "sqlite"
+ BackupTaskTypePostgreSQL = "postgresql"
+)
+
+const (
+ BackupTaskStatusIdle = "idle"
+ BackupTaskStatusRunning = "running"
+ BackupTaskStatusSuccess = "success"
+ BackupTaskStatusFailed = "failed"
+)
+
+type BackupTask struct {
+ ID uint `gorm:"primaryKey" json:"id"`
+ Name string `gorm:"size:100;uniqueIndex;not null" json:"name"`
+ Type string `gorm:"size:20;index;not null" json:"type"`
+ Enabled bool `gorm:"not null;default:true" json:"enabled"`
+ CronExpr string `gorm:"column:cron_expr;size:64" json:"cronExpr"`
+ SourcePath string `gorm:"column:source_path;size:500" json:"sourcePath"`
+ ExcludePatterns string `gorm:"column:exclude_patterns;type:text" json:"excludePatterns"`
+ DBHost string `gorm:"column:db_host;size:255" json:"dbHost"`
+ DBPort int `gorm:"column:db_port" json:"dbPort"`
+ DBUser string `gorm:"column:db_user;size:100" json:"dbUser"`
+ DBPasswordCiphertext string `gorm:"column:db_password_ciphertext;type:text" json:"-"`
+ DBName string `gorm:"column:db_name;size:255" json:"dbName"`
+ DBPath string `gorm:"column:db_path;size:500" json:"dbPath"`
+ StorageTargetID uint `gorm:"column:storage_target_id;index;not null" json:"storageTargetId"`
+ StorageTarget StorageTarget `json:"storageTarget,omitempty"`
+ NodeID uint `gorm:"column:node_id;index;default:0" json:"nodeId"`
+ Node Node `json:"node,omitempty"`
+ Tags string `gorm:"column:tags;size:500" json:"tags"`
+ RetentionDays int `gorm:"column:retention_days;not null;default:30" json:"retentionDays"`
+ Compression string `gorm:"size:10;not null;default:'gzip'" json:"compression"`
+ Encrypt bool `gorm:"not null;default:false" json:"encrypt"`
+ MaxBackups int `gorm:"column:max_backups;not null;default:10" json:"maxBackups"`
+ LastRunAt *time.Time `gorm:"column:last_run_at" json:"lastRunAt,omitempty"`
+ LastStatus string `gorm:"column:last_status;size:20;not null;default:'idle'" json:"lastStatus"`
+ CreatedAt time.Time `json:"createdAt"`
+ UpdatedAt time.Time `json:"updatedAt"`
+}
+
+func (BackupTask) TableName() string {
+ return "backup_tasks"
+}
diff --git a/server/internal/model/node.go b/server/internal/model/node.go
new file mode 100644
index 0000000..87aca51
--- /dev/null
+++ b/server/internal/model/node.go
@@ -0,0 +1,30 @@
+package model
+
+import "time"
+
+const (
+ NodeStatusOnline = "online"
+ NodeStatusOffline = "offline"
+)
+
+// Node represents a managed server node in the cluster.
+// The default "local" node is auto-created for single-machine backward compatibility.
+type Node struct {
+ ID uint `gorm:"primaryKey" json:"id"`
+ Name string `gorm:"size:128;uniqueIndex;not null" json:"name"`
+ Hostname string `gorm:"size:255" json:"hostname"`
+ IPAddress string `gorm:"column:ip_address;size:64" json:"ipAddress"`
+ Token string `gorm:"size:128;uniqueIndex;not null" json:"-"`
+ Status string `gorm:"size:20;not null;default:'offline'" json:"status"`
+ IsLocal bool `gorm:"not null;default:false" json:"isLocal"`
+ OS string `gorm:"size:64" json:"os"`
+ Arch string `gorm:"size:32" json:"arch"`
+ AgentVer string `gorm:"column:agent_version;size:32" json:"agentVersion"`
+ LastSeen time.Time `gorm:"column:last_seen" json:"lastSeen"`
+ CreatedAt time.Time `json:"createdAt"`
+ UpdatedAt time.Time `json:"updatedAt"`
+}
+
+func (Node) TableName() string {
+ return "nodes"
+}
diff --git a/server/internal/model/notification.go b/server/internal/model/notification.go
new file mode 100644
index 0000000..da1828f
--- /dev/null
+++ b/server/internal/model/notification.go
@@ -0,0 +1,19 @@
+package model
+
+import "time"
+
+type Notification struct {
+ ID uint `gorm:"primaryKey" json:"id"`
+ Type string `gorm:"size:20;index;not null" json:"type"`
+ Name string `gorm:"size:100;uniqueIndex;not null" json:"name"`
+ ConfigCiphertext string `gorm:"column:config_ciphertext;type:text;not null" json:"-"`
+ Enabled bool `gorm:"not null;default:true" json:"enabled"`
+ OnSuccess bool `gorm:"column:on_success;not null;default:false" json:"onSuccess"`
+ OnFailure bool `gorm:"column:on_failure;not null;default:true" json:"onFailure"`
+ CreatedAt time.Time `json:"createdAt"`
+ UpdatedAt time.Time `json:"updatedAt"`
+}
+
+func (Notification) TableName() string {
+ return "notifications"
+}
diff --git a/server/internal/model/oauth_session.go b/server/internal/model/oauth_session.go
new file mode 100644
index 0000000..b17b9e8
--- /dev/null
+++ b/server/internal/model/oauth_session.go
@@ -0,0 +1,19 @@
+package model
+
+import "time"
+
+type OAuthSession struct {
+ ID uint `gorm:"primaryKey" json:"id"`
+ ProviderType string `gorm:"column:provider_type;size:32;index;not null" json:"providerType"`
+ State string `gorm:"size:255;uniqueIndex;not null" json:"state"`
+ PayloadCiphertext string `gorm:"column:payload_ciphertext;type:text;not null" json:"-"`
+ TargetID *uint `gorm:"column:target_id" json:"targetId,omitempty"`
+ ExpiresAt time.Time `gorm:"column:expires_at;index;not null" json:"expiresAt"`
+ UsedAt *time.Time `gorm:"column:used_at" json:"usedAt,omitempty"`
+ CreatedAt time.Time `json:"createdAt"`
+ UpdatedAt time.Time `json:"updatedAt"`
+}
+
+func (OAuthSession) TableName() string {
+ return "oauth_sessions"
+}
diff --git a/server/internal/model/storage_target.go b/server/internal/model/storage_target.go
new file mode 100644
index 0000000..fd8fb9f
--- /dev/null
+++ b/server/internal/model/storage_target.go
@@ -0,0 +1,22 @@
+package model
+
+import "time"
+
+type StorageTarget struct {
+ ID uint `gorm:"primaryKey" json:"id"`
+ Name string `gorm:"size:128;uniqueIndex;not null" json:"name"`
+ Type string `gorm:"size:32;index;not null" json:"type"`
+ Description string `gorm:"size:255" json:"description"`
+ Enabled bool `gorm:"not null;default:true" json:"enabled"`
+ ConfigCiphertext string `gorm:"column:config_ciphertext;type:text;not null" json:"-"`
+ ConfigVersion int `gorm:"not null;default:1" json:"configVersion"`
+ LastTestedAt *time.Time `gorm:"column:last_tested_at" json:"lastTestedAt,omitempty"`
+ LastTestStatus string `gorm:"column:last_test_status;size:32;not null;default:'unknown'" json:"lastTestStatus"`
+ LastTestMessage string `gorm:"column:last_test_message;size:512" json:"lastTestMessage"`
+ CreatedAt time.Time `json:"createdAt"`
+ UpdatedAt time.Time `json:"updatedAt"`
+}
+
+func (StorageTarget) TableName() string {
+ return "storage_targets"
+}
diff --git a/server/internal/model/system_config.go b/server/internal/model/system_config.go
new file mode 100644
index 0000000..4de18a4
--- /dev/null
+++ b/server/internal/model/system_config.go
@@ -0,0 +1,16 @@
+package model
+
+import "time"
+
+type SystemConfig struct {
+ ID uint `gorm:"primaryKey" json:"id"`
+ Key string `gorm:"size:128;uniqueIndex;not null" json:"key"`
+ Value string `gorm:"type:text;not null" json:"value"`
+ Encrypted bool `gorm:"not null;default:false" json:"encrypted"`
+ CreatedAt time.Time `json:"createdAt"`
+ UpdatedAt time.Time `json:"updatedAt"`
+}
+
+func (SystemConfig) TableName() string {
+ return "system_configs"
+}
diff --git a/server/internal/model/user.go b/server/internal/model/user.go
new file mode 100644
index 0000000..581ed09
--- /dev/null
+++ b/server/internal/model/user.go
@@ -0,0 +1,18 @@
+package model
+
+import "time"
+
+type User struct {
+ ID uint `gorm:"primaryKey" json:"id"`
+ Username string `gorm:"size:64;uniqueIndex;not null" json:"username"`
+ PasswordHash string `gorm:"column:password_hash;not null" json:"-"`
+ DisplayName string `gorm:"size:128;not null" json:"displayName"`
+ Email string `gorm:"size:255" json:"email"`
+ Role string `gorm:"size:32;not null;default:admin" json:"role"`
+ CreatedAt time.Time `json:"createdAt"`
+ UpdatedAt time.Time `json:"updatedAt"`
+}
+
+func (User) TableName() string {
+ return "users"
+}
diff --git a/server/internal/notify/email.go b/server/internal/notify/email.go
new file mode 100644
index 0000000..eb38a4f
--- /dev/null
+++ b/server/internal/notify/email.go
@@ -0,0 +1,88 @@
+package notify
+
+import (
+ "context"
+ "crypto/tls"
+ "fmt"
+ "net/smtp"
+ "strconv"
+ "strings"
+)
+
+type EmailNotifier struct{}
+
+func NewEmailNotifier() *EmailNotifier { return &EmailNotifier{} }
+func (n *EmailNotifier) Type() string { return "email" }
+func (n *EmailNotifier) SensitiveFields() []string { return []string{"password"} }
+
+func (n *EmailNotifier) Validate(config map[string]any) error {
+ host := strings.TrimSpace(asString(config["host"]))
+ port := asInt(config["port"])
+ from := strings.TrimSpace(asString(config["from"]))
+ to := strings.TrimSpace(asString(config["to"]))
+ if host == "" || port <= 0 || from == "" || to == "" {
+ return fmt.Errorf("email host/port/from/to are required")
+ }
+ return nil
+}
+
+func (n *EmailNotifier) Send(_ context.Context, config map[string]any, message Message) error {
+ if err := n.Validate(config); err != nil {
+ return err
+ }
+ host := strings.TrimSpace(asString(config["host"]))
+ port := asInt(config["port"])
+ username := strings.TrimSpace(asString(config["username"]))
+ password := strings.TrimSpace(asString(config["password"]))
+ from := strings.TrimSpace(asString(config["from"]))
+ toList := splitCommaValues(asString(config["to"]))
+ address := host + ":" + strconv.Itoa(port)
+ headers := []string{"From: " + from, "To: " + strings.Join(toList, ", "), "Subject: " + message.Title, "MIME-Version: 1.0", "Content-Type: text/plain; charset=UTF-8", "", message.Body}
+ var auth smtp.Auth
+ if username != "" {
+ auth = smtp.PlainAuth("", username, password, host)
+ }
+
+ rawMessage := []byte(strings.Join(headers, "\r\n"))
+
+ if port == 465 {
+ tlsConfig := &tls.Config{ServerName: host}
+ conn, err := tls.Dial("tcp", address, tlsConfig)
+ if err != nil {
+ return fmt.Errorf("dial tls for smtp port 465 failed: %w", err)
+ }
+ client, err := smtp.NewClient(conn, host)
+ if err != nil {
+ return fmt.Errorf("create smtp client over tls failed: %w", err)
+ }
+ defer client.Close()
+ if auth != nil {
+ if ok, _ := client.Extension("AUTH"); ok {
+ if err = client.Auth(auth); err != nil {
+ return fmt.Errorf("smtp auth failed: %w", err)
+ }
+ }
+ }
+ if err = client.Mail(from); err != nil {
+ return fmt.Errorf("smtp mail from failed: %w", err)
+ }
+ for _, toAddr := range toList {
+ if err = client.Rcpt(toAddr); err != nil {
+ return fmt.Errorf("smtp rcpt failed for %s: %w", toAddr, err)
+ }
+ }
+ writer, err := client.Data()
+ if err != nil {
+ return fmt.Errorf("smtp data failed: %w", err)
+ }
+ if _, err = writer.Write(rawMessage); err != nil {
+ return fmt.Errorf("smtp write message failed: %w", err)
+ }
+ if err = writer.Close(); err != nil {
+ return fmt.Errorf("smtp data close failed: %w", err)
+ }
+ return client.Quit()
+ }
+
+ return smtp.SendMail(address, auth, from, toList, rawMessage)
+}
diff --git a/server/internal/notify/helpers.go b/server/internal/notify/helpers.go
new file mode 100644
index 0000000..5dba164
--- /dev/null
+++ b/server/internal/notify/helpers.go
@@ -0,0 +1,49 @@
+package notify
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+)
+
+func asString(value any) string {
+ text, _ := value.(string)
+ return strings.TrimSpace(text)
+}
+
+func asInt(value any) int {
+ switch actual := value.(type) {
+ case int:
+ return actual
+ case int64:
+ return int(actual)
+ case float64:
+ return int(actual)
+ case string:
+ parsed, _ := strconv.Atoi(strings.TrimSpace(actual))
+ return parsed
+ default:
+ return 0
+ }
+}
+
+func splitCommaValues(value string) []string {
+ items := strings.Split(value, ",")
+ result := make([]string, 0, len(items))
+ for _, item := range items {
+ trimmed := strings.TrimSpace(item)
+ if trimmed != "" {
+ result = append(result, trimmed)
+ }
+ }
+ return result
+}
+
+func validateRequiredConfig(config map[string]any, fields ...string) error {
+ for _, field := range fields {
+ if strings.TrimSpace(asString(config[field])) == "" {
+ return fmt.Errorf("%s is required", field)
+ }
+ }
+ return nil
+}
diff --git a/server/internal/notify/registry.go b/server/internal/notify/registry.go
new file mode 100644
index 0000000..1ed96ef
--- /dev/null
+++ b/server/internal/notify/registry.go
@@ -0,0 +1,75 @@
+package notify
+
+import (
+ "context"
+ "fmt"
+ "sort"
+ "sync"
+)
+
+type Registry struct {
+ mu sync.RWMutex
+ notifiers map[string]Notifier
+}
+
+func NewRegistry(notifiers ...Notifier) *Registry {
+ registry := &Registry{notifiers: make(map[string]Notifier)}
+ for _, notifier := range notifiers {
+ registry.Register(notifier)
+ }
+ return registry
+}
+
+func (r *Registry) Register(notifier Notifier) {
+ if notifier == nil {
+ return
+ }
+ r.mu.Lock()
+ defer r.mu.Unlock()
+ if r.notifiers == nil {
+ r.notifiers = make(map[string]Notifier)
+ }
+ r.notifiers[notifier.Type()] = notifier
+}
+
+func (r *Registry) Types() []string {
+ r.mu.RLock()
+ defer r.mu.RUnlock()
+ items := make([]string, 0, len(r.notifiers))
+ for key := range r.notifiers {
+ items = append(items, key)
+ }
+ sort.Strings(items)
+ return items
+}
+
+func (r *Registry) SensitiveFields(notificationType string) []string {
+ notifier, ok := r.Notifier(notificationType)
+ if !ok {
+ return nil
+ }
+ return notifier.SensitiveFields()
+}
+
+func (r *Registry) Validate(notificationType string, config map[string]any) error {
+ notifier, ok := r.Notifier(notificationType)
+ if !ok {
+ return fmt.Errorf("unsupported notification type: %s", notificationType)
+ }
+ return notifier.Validate(config)
+}
+
+func (r *Registry) Send(ctx context.Context, notificationType string, config map[string]any, message Message) error {
+ notifier, ok := r.Notifier(notificationType)
+ if !ok {
+ return fmt.Errorf("unsupported notification type: %s", notificationType)
+ }
+ return notifier.Send(ctx, config, message)
+}
+
+func (r *Registry) Notifier(notificationType string) (Notifier, bool) {
+ r.mu.RLock()
+ defer r.mu.RUnlock()
+ notifier, ok := r.notifiers[notificationType]
+ return notifier, ok
+}
diff --git a/server/internal/notify/telegram.go b/server/internal/notify/telegram.go
new file mode 100644
index 0000000..5c62a11
--- /dev/null
+++ b/server/internal/notify/telegram.go
@@ -0,0 +1,54 @@
+package notify
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "fmt"
+ "net/http"
+ "strings"
+ "time"
+)
+
+type TelegramNotifier struct {
+ client *http.Client
+}
+
+func NewTelegramNotifier() *TelegramNotifier {
+ return &TelegramNotifier{client: &http.Client{Timeout: 10 * time.Second}}
+}
+func (n *TelegramNotifier) Type() string { return "telegram" }
+func (n *TelegramNotifier) SensitiveFields() []string { return []string{"botToken"} }
+
+func (n *TelegramNotifier) Validate(config map[string]any) error {
+ if strings.TrimSpace(asString(config["botToken"])) == "" || strings.TrimSpace(asString(config["chatId"])) == "" {
+ return fmt.Errorf("telegram botToken/chatId are required")
+ }
+ return nil
+}
+
+func (n *TelegramNotifier) Send(ctx context.Context, config map[string]any, message Message) error {
+ if err := n.Validate(config); err != nil {
+ return err
+ }
+ botToken := strings.TrimSpace(asString(config["botToken"]))
+ chatID := strings.TrimSpace(asString(config["chatId"]))
+ payload, err := json.Marshal(map[string]any{"chat_id": chatID, "text": message.Title + "\n\n" + message.Body})
+ if err != nil {
+ return fmt.Errorf("marshal telegram payload: %w", err)
+ }
+ request, err := http.NewRequestWithContext(ctx, http.MethodPost, "https://api.telegram.org/bot"+botToken+"/sendMessage", bytes.NewReader(payload))
+ if err != nil {
+ return fmt.Errorf("create telegram request: %w", err)
+ }
+ request.Header.Set("Content-Type", "application/json")
+ response, err := n.client.Do(request)
+ if err != nil {
+ return fmt.Errorf("send telegram request: %w", err)
+ }
+ defer response.Body.Close()
+ if response.StatusCode >= http.StatusBadRequest {
+ return fmt.Errorf("telegram response status: %s", response.Status)
+ }
+ return nil
+}
diff --git a/server/internal/notify/types.go b/server/internal/notify/types.go
new file mode 100644
index 0000000..7121b87
--- /dev/null
+++ b/server/internal/notify/types.go
@@ -0,0 +1,16 @@
+package notify
+
+import "context"
+
+type Message struct {
+ Title string `json:"title"`
+ Body string `json:"body"`
+ Fields map[string]any `json:"fields,omitempty"`
+}
+
+type Notifier interface {
+ Type() string
+ SensitiveFields() []string
+ Validate(config map[string]any) error
+ Send(ctx context.Context, config map[string]any, message Message) error
+}
diff --git a/server/internal/notify/webhook.go b/server/internal/notify/webhook.go
new file mode 100644
index 0000000..46d8c13
--- /dev/null
+++ b/server/internal/notify/webhook.go
@@ -0,0 +1,55 @@
+package notify
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "fmt"
+ "net/http"
+ "strings"
+ "time"
+)
+
+type WebhookNotifier struct {
+ client *http.Client
+}
+
+func NewWebhookNotifier() *WebhookNotifier {
+ return &WebhookNotifier{client: &http.Client{Timeout: 10 * time.Second}}
+}
+func (n *WebhookNotifier) Type() string { return "webhook" }
+func (n *WebhookNotifier) SensitiveFields() []string { return []string{"secret"} }
+
+func (n *WebhookNotifier) Validate(config map[string]any) error {
+ if strings.TrimSpace(asString(config["url"])) == "" {
+ return fmt.Errorf("webhook url is required")
+ }
+ return nil
+}
+
+func (n *WebhookNotifier) Send(ctx context.Context, config map[string]any, message Message) error {
+ if err := n.Validate(config); err != nil {
+ return err
+ }
+ body, err := json.Marshal(map[string]any{"title": message.Title, "body": message.Body, "fields": message.Fields})
+ if err != nil {
+ return fmt.Errorf("marshal webhook payload: %w", err)
+ }
+ request, err := http.NewRequestWithContext(ctx, http.MethodPost, strings.TrimSpace(asString(config["url"])), bytes.NewReader(body))
+ if err != nil {
+ return fmt.Errorf("create webhook request: %w", err)
+ }
+ request.Header.Set("Content-Type", "application/json")
+ if secret := strings.TrimSpace(asString(config["secret"])); secret != "" {
+ request.Header.Set("X-BackupX-Secret", secret)
+ }
+ response, err := n.client.Do(request)
+ if err != nil {
+ return fmt.Errorf("send webhook request: %w", err)
+ }
+ defer response.Body.Close()
+ if response.StatusCode >= http.StatusBadRequest {
+ return fmt.Errorf("webhook response status: %s", response.Status)
+ }
+ return nil
+}
diff --git a/server/internal/repository/backup_record_repository.go b/server/internal/repository/backup_record_repository.go
new file mode 100644
index 0000000..9ff3eb5
--- /dev/null
+++ b/server/internal/repository/backup_record_repository.go
@@ -0,0 +1,183 @@
+package repository
+
+import (
+ "context"
+ "errors"
+ "time"
+
+ "backupx/server/internal/model"
+ "gorm.io/gorm"
+)
+
+type BackupRecordListOptions struct {
+ TaskID *uint
+ Status string
+ DateFrom *time.Time
+ DateTo *time.Time
+ Limit int
+ Offset int
+}
+
+type BackupTimelinePoint struct {
+ Date string `json:"date"`
+ Total int64 `json:"total"`
+ Success int64 `json:"success"`
+ Failed int64 `json:"failed"`
+}
+
+type BackupStorageUsageItem struct {
+ StorageTargetID uint `json:"storageTargetId"`
+ TotalSize int64 `json:"totalSize"`
+}
+
+type BackupRecordRepository interface {
+ List(context.Context, BackupRecordListOptions) ([]model.BackupRecord, error)
+ FindByID(context.Context, uint) (*model.BackupRecord, error)
+ Create(context.Context, *model.BackupRecord) error
+ Update(context.Context, *model.BackupRecord) error
+ Delete(context.Context, uint) error
+ ListRecent(context.Context, int) ([]model.BackupRecord, error)
+ ListSuccessfulByTask(context.Context, uint) ([]model.BackupRecord, error)
+ Count(context.Context) (int64, error)
+ CountSince(context.Context, time.Time) (int64, error)
+ CountSuccessSince(context.Context, time.Time) (int64, error)
+ SumFileSize(context.Context) (int64, error)
+ TimelineSince(context.Context, time.Time) ([]BackupTimelinePoint, error)
+ StorageUsage(context.Context) ([]BackupStorageUsageItem, error)
+}
+
+type GormBackupRecordRepository struct {
+ db *gorm.DB
+}
+
+func NewBackupRecordRepository(db *gorm.DB) *GormBackupRecordRepository {
+ return &GormBackupRecordRepository{db: db}
+}
+
+func (r *GormBackupRecordRepository) List(ctx context.Context, options BackupRecordListOptions) ([]model.BackupRecord, error) {
+ query := r.db.WithContext(ctx).Model(&model.BackupRecord{}).Preload("Task").Preload("Task.StorageTarget").Order("started_at desc")
+ if options.TaskID != nil {
+ query = query.Where("task_id = ?", *options.TaskID)
+ }
+ if options.Status != "" {
+ query = query.Where("status = ?", options.Status)
+ }
+ if options.DateFrom != nil {
+ query = query.Where("started_at >= ?", options.DateFrom.UTC())
+ }
+ if options.DateTo != nil {
+ query = query.Where("started_at <= ?", options.DateTo.UTC())
+ }
+ if options.Limit > 0 {
+ query = query.Limit(options.Limit)
+ }
+ if options.Offset > 0 {
+ query = query.Offset(options.Offset)
+ }
+ var items []model.BackupRecord
+ if err := query.Find(&items).Error; err != nil {
+ return nil, err
+ }
+ return items, nil
+}
+
+func (r *GormBackupRecordRepository) FindByID(ctx context.Context, id uint) (*model.BackupRecord, error) {
+ var item model.BackupRecord
+ if err := r.db.WithContext(ctx).Preload("Task").Preload("Task.StorageTarget").First(&item, id).Error; err != nil {
+ if errors.Is(err, gorm.ErrRecordNotFound) {
+ return nil, nil
+ }
+ return nil, err
+ }
+ return &item, nil
+}
+
+func (r *GormBackupRecordRepository) Create(ctx context.Context, item *model.BackupRecord) error {
+ return r.db.WithContext(ctx).Create(item).Error
+}
+
+func (r *GormBackupRecordRepository) Update(ctx context.Context, item *model.BackupRecord) error {
+ return r.db.WithContext(ctx).Save(item).Error
+}
+
+func (r *GormBackupRecordRepository) Delete(ctx context.Context, id uint) error {
+ return r.db.WithContext(ctx).Delete(&model.BackupRecord{}, id).Error
+}
+
+func (r *GormBackupRecordRepository) ListRecent(ctx context.Context, limit int) ([]model.BackupRecord, error) {
+ if limit <= 0 {
+ limit = 10
+ }
+ var items []model.BackupRecord
+ if err := r.db.WithContext(ctx).Preload("Task").Preload("Task.StorageTarget").Order("started_at desc").Limit(limit).Find(&items).Error; err != nil {
+ return nil, err
+ }
+ return items, nil
+}
+
+func (r *GormBackupRecordRepository) ListSuccessfulByTask(ctx context.Context, taskID uint) ([]model.BackupRecord, error) {
+ var items []model.BackupRecord
+ if err := r.db.WithContext(ctx).Where("task_id = ? AND status = ?", taskID, "success").Order("completed_at desc, id desc").Find(&items).Error; err != nil {
+ return nil, err
+ }
+ return items, nil
+}
+
+func (r *GormBackupRecordRepository) Count(ctx context.Context) (int64, error) {
+ var count int64
+ if err := r.db.WithContext(ctx).Model(&model.BackupRecord{}).Count(&count).Error; err != nil {
+ return 0, err
+ }
+ return count, nil
+}
+
+func (r *GormBackupRecordRepository) CountSince(ctx context.Context, since time.Time) (int64, error) {
+ var count int64
+ if err := r.db.WithContext(ctx).Model(&model.BackupRecord{}).Where("started_at >= ?", since.UTC()).Count(&count).Error; err != nil {
+ return 0, err
+ }
+ return count, nil
+}
+
+func (r *GormBackupRecordRepository) CountSuccessSince(ctx context.Context, since time.Time) (int64, error) {
+ var count int64
+ if err := r.db.WithContext(ctx).Model(&model.BackupRecord{}).Where("started_at >= ? AND status = ?", since.UTC(), "success").Count(&count).Error; err != nil {
+ return 0, err
+ }
+ return count, nil
+}
+
+func (r *GormBackupRecordRepository) SumFileSize(ctx context.Context) (int64, error) {
+ var sum int64
+ if err := r.db.WithContext(ctx).Model(&model.BackupRecord{}).Select("COALESCE(SUM(file_size), 0)").Scan(&sum).Error; err != nil {
+ return 0, err
+ }
+ return sum, nil
+}
+
+func (r *GormBackupRecordRepository) TimelineSince(ctx context.Context, since time.Time) ([]BackupTimelinePoint, error) {
+ var items []BackupTimelinePoint
+ query := `
+ SELECT
+ strftime('%Y-%m-%d', started_at) AS date,
+ COUNT(*) AS total,
+ SUM(CASE WHEN status = 'success' THEN 1 ELSE 0 END) AS success,
+ SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END) AS failed
+ FROM backup_records
+ WHERE started_at >= ?
+ GROUP BY strftime('%Y-%m-%d', started_at)
+ ORDER BY date ASC
+ `
+ if err := r.db.WithContext(ctx).Raw(query, since.UTC()).Scan(&items).Error; err != nil {
+ return nil, err
+ }
+ return items, nil
+}
+
+func (r *GormBackupRecordRepository) StorageUsage(ctx context.Context) ([]BackupStorageUsageItem, error) {
+ var items []BackupStorageUsageItem
+ if err := r.db.WithContext(ctx).Model(&model.BackupRecord{}).Select("storage_target_id, COALESCE(SUM(file_size), 0) AS total_size").Group("storage_target_id").Order("storage_target_id asc").Scan(&items).Error; err != nil {
+ return nil, err
+ }
+ return items, nil
+}
diff --git a/server/internal/repository/backup_record_repository_test.go b/server/internal/repository/backup_record_repository_test.go
new file mode 100644
index 0000000..c7765e0
--- /dev/null
+++ b/server/internal/repository/backup_record_repository_test.go
@@ -0,0 +1,115 @@
+package repository
+
+import (
+ "context"
+ "path/filepath"
+ "testing"
+ "time"
+
+ "backupx/server/internal/config"
+ "backupx/server/internal/database"
+ "backupx/server/internal/logger"
+ "backupx/server/internal/model"
+)
+
+func newBackupRecordTestRepository(t *testing.T) *GormBackupRecordRepository {
+ t.Helper()
+ log, err := logger.New(config.LogConfig{Level: "error"})
+ if err != nil {
+ t.Fatalf("logger.New returned error: %v", err)
+ }
+ db, err := database.Open(config.DatabaseConfig{Path: filepath.Join(t.TempDir(), "backupx.db")}, log)
+ if err != nil {
+ t.Fatalf("database.Open returned error: %v", err)
+ }
+ storageTarget := &model.StorageTarget{Name: "local", Type: "local_disk", Enabled: true, ConfigCiphertext: "{}", ConfigVersion: 1, LastTestStatus: "unknown"}
+ if err := db.Create(storageTarget).Error; err != nil {
+ t.Fatalf("seed storage target error: %v", err)
+ }
+ task := &model.BackupTask{Name: "website", Type: "file", Enabled: true, SourcePath: "/srv/www/site", StorageTargetID: storageTarget.ID, RetentionDays: 30, Compression: "gzip", MaxBackups: 10, LastStatus: "idle"}
+ if err := db.Create(task).Error; err != nil {
+ t.Fatalf("seed backup task error: %v", err)
+ }
+ return NewBackupRecordRepository(db)
+}
+
+func TestBackupRecordRepositoryQueries(t *testing.T) {
+ ctx := context.Background()
+ repo := newBackupRecordTestRepository(t)
+ now := time.Now().UTC()
+ completedAt := now.Add(2 * time.Minute)
+ record := &model.BackupRecord{
+ TaskID: 1,
+ StorageTargetID: 1,
+ Status: "success",
+ FileName: "website.tar.gz",
+ FileSize: 1024,
+ StoragePath: "tasks/1/website.tar.gz",
+ DurationSeconds: 120,
+ LogContent: "done",
+ StartedAt: now,
+ CompletedAt: &completedAt,
+ }
+ if err := repo.Create(ctx, record); err != nil {
+ t.Fatalf("Create returned error: %v", err)
+ }
+ stored, err := repo.FindByID(ctx, record.ID)
+ if err != nil {
+ t.Fatalf("FindByID returned error: %v", err)
+ }
+ if stored == nil || stored.FileName != "website.tar.gz" {
+ t.Fatalf("unexpected stored record: %#v", stored)
+ }
+ listed, err := repo.List(ctx, BackupRecordListOptions{TaskID: &record.TaskID, Status: "success"})
+ if err != nil {
+ t.Fatalf("List returned error: %v", err)
+ }
+ if len(listed) != 1 {
+ t.Fatalf("expected one listed record, got %d", len(listed))
+ }
+ recent, err := repo.ListRecent(ctx, 5)
+ if err != nil {
+ t.Fatalf("ListRecent returned error: %v", err)
+ }
+ if len(recent) != 1 {
+ t.Fatalf("expected one recent record, got %d", len(recent))
+ }
+ total, err := repo.Count(ctx)
+ if err != nil {
+ t.Fatalf("Count returned error: %v", err)
+ }
+ if total != 1 {
+ t.Fatalf("expected total count 1, got %d", total)
+ }
+ successCount, err := repo.CountSuccessSince(ctx, now.Add(-time.Hour))
+ if err != nil {
+ t.Fatalf("CountSuccessSince returned error: %v", err)
+ }
+ if successCount != 1 {
+ t.Fatalf("expected success count 1, got %d", successCount)
+ }
+ sum, err := repo.SumFileSize(ctx)
+ if err != nil {
+ t.Fatalf("SumFileSize returned error: %v", err)
+ }
+ if sum != 1024 {
+ t.Fatalf("expected file size sum 1024, got %d", sum)
+ }
+ timeline, err := repo.TimelineSince(ctx, now.Add(-time.Hour))
+ if err != nil {
+ t.Fatalf("TimelineSince returned error: %v", err)
+ }
+ if len(timeline) != 1 || timeline[0].Success != 1 {
+ t.Fatalf("unexpected timeline: %#v", timeline)
+ }
+ usage, err := repo.StorageUsage(ctx)
+ if err != nil {
+ t.Fatalf("StorageUsage returned error: %v", err)
+ }
+ if len(usage) != 1 || usage[0].TotalSize != 1024 {
+ t.Fatalf("unexpected usage: %#v", usage)
+ }
+ if err := repo.Delete(ctx, record.ID); err != nil {
+ t.Fatalf("Delete returned error: %v", err)
+ }
+}
diff --git a/server/internal/repository/backup_task_repository.go b/server/internal/repository/backup_task_repository.go
new file mode 100644
index 0000000..05d0601
--- /dev/null
+++ b/server/internal/repository/backup_task_repository.go
@@ -0,0 +1,116 @@
+package repository
+
+import (
+ "context"
+ "errors"
+
+ "backupx/server/internal/model"
+ "gorm.io/gorm"
+)
+
+type BackupTaskListOptions struct {
+ Type string
+ Enabled *bool
+}
+
+type BackupTaskRepository interface {
+ List(context.Context, BackupTaskListOptions) ([]model.BackupTask, error)
+ FindByID(context.Context, uint) (*model.BackupTask, error)
+ FindByName(context.Context, string) (*model.BackupTask, error)
+ ListSchedulable(context.Context) ([]model.BackupTask, error)
+ Count(context.Context) (int64, error)
+ CountEnabled(context.Context) (int64, error)
+ CountByStorageTargetID(context.Context, uint) (int64, error)
+ Create(context.Context, *model.BackupTask) error
+ Update(context.Context, *model.BackupTask) error
+ Delete(context.Context, uint) error
+}
+
+type GormBackupTaskRepository struct {
+ db *gorm.DB
+}
+
+func NewBackupTaskRepository(db *gorm.DB) *GormBackupTaskRepository {
+ return &GormBackupTaskRepository{db: db}
+}
+
+func (r *GormBackupTaskRepository) List(ctx context.Context, options BackupTaskListOptions) ([]model.BackupTask, error) {
+ query := r.db.WithContext(ctx).Model(&model.BackupTask{}).Preload("StorageTarget").Order("updated_at desc")
+ if options.Type != "" {
+ query = query.Where("type = ?", options.Type)
+ }
+ if options.Enabled != nil {
+ query = query.Where("enabled = ?", *options.Enabled)
+ }
+ var items []model.BackupTask
+ if err := query.Find(&items).Error; err != nil {
+ return nil, err
+ }
+ return items, nil
+}
+
+func (r *GormBackupTaskRepository) FindByID(ctx context.Context, id uint) (*model.BackupTask, error) {
+ var item model.BackupTask
+ if err := r.db.WithContext(ctx).Preload("StorageTarget").First(&item, id).Error; err != nil {
+ if errors.Is(err, gorm.ErrRecordNotFound) {
+ return nil, nil
+ }
+ return nil, err
+ }
+ return &item, nil
+}
+
+func (r *GormBackupTaskRepository) FindByName(ctx context.Context, name string) (*model.BackupTask, error) {
+ var item model.BackupTask
+ if err := r.db.WithContext(ctx).Where("name = ?", name).First(&item).Error; err != nil {
+ if errors.Is(err, gorm.ErrRecordNotFound) {
+ return nil, nil
+ }
+ return nil, err
+ }
+ return &item, nil
+}
+
+func (r *GormBackupTaskRepository) ListSchedulable(ctx context.Context) ([]model.BackupTask, error) {
+ var items []model.BackupTask
+ if err := r.db.WithContext(ctx).Preload("StorageTarget").Where("enabled = ? AND cron_expr <> ''", true).Order("id asc").Find(&items).Error; err != nil {
+ return nil, err
+ }
+ return items, nil
+}
+
+func (r *GormBackupTaskRepository) Count(ctx context.Context) (int64, error) {
+ var count int64
+ if err := r.db.WithContext(ctx).Model(&model.BackupTask{}).Count(&count).Error; err != nil {
+ return 0, err
+ }
+ return count, nil
+}
+
+func (r *GormBackupTaskRepository) CountEnabled(ctx context.Context) (int64, error) {
+ var count int64
+ if err := r.db.WithContext(ctx).Model(&model.BackupTask{}).Where("enabled = ?", true).Count(&count).Error; err != nil {
+ return 0, err
+ }
+ return count, nil
+}
+
+func (r *GormBackupTaskRepository) CountByStorageTargetID(ctx context.Context, storageTargetID uint) (int64, error) {
+ var count int64
+ if err := r.db.WithContext(ctx).Model(&model.BackupTask{}).Where("storage_target_id = ?", storageTargetID).Count(&count).Error; err != nil {
+ return 0, err
+ }
+ return count, nil
+}
+
+func (r *GormBackupTaskRepository) Create(ctx context.Context, item *model.BackupTask) error {
+ return r.db.WithContext(ctx).Create(item).Error
+}
+
+func (r *GormBackupTaskRepository) Update(ctx context.Context, item *model.BackupTask) error {
+ return r.db.WithContext(ctx).Save(item).Error
+}
+
+func (r *GormBackupTaskRepository) Delete(ctx context.Context, id uint) error {
+ return r.db.WithContext(ctx).Delete(&model.BackupTask{}, id).Error
+}
diff --git a/server/internal/repository/backup_task_repository_test.go b/server/internal/repository/backup_task_repository_test.go
new file mode 100644
index 0000000..e29ea18
--- /dev/null
+++ b/server/internal/repository/backup_task_repository_test.go
@@ -0,0 +1,94 @@
+package repository
+
+import (
+ "context"
+ "path/filepath"
+ "testing"
+
+ "backupx/server/internal/config"
+ "backupx/server/internal/database"
+ "backupx/server/internal/logger"
+ "backupx/server/internal/model"
+)
+
+func newBackupTaskTestRepository(t *testing.T) *GormBackupTaskRepository {
+ t.Helper()
+ log, err := logger.New(config.LogConfig{Level: "error"})
+ if err != nil {
+ t.Fatalf("logger.New returned error: %v", err)
+ }
+ db, err := database.Open(config.DatabaseConfig{Path: filepath.Join(t.TempDir(), "backupx.db")}, log)
+ if err != nil {
+ t.Fatalf("database.Open returned error: %v", err)
+ }
+ if err := db.Create(&model.StorageTarget{Name: "local", Type: "local_disk", Enabled: true, ConfigCiphertext: "{}", ConfigVersion: 1, LastTestStatus: "unknown"}).Error; err != nil {
+ t.Fatalf("seed storage target error: %v", err)
+ }
+ return NewBackupTaskRepository(db)
+}
+
+func TestBackupTaskRepositoryCRUD(t *testing.T) {
+ ctx := context.Background()
+ repo := newBackupTaskTestRepository(t)
+ task := &model.BackupTask{
+ Name: "website",
+ Type: "file",
+ Enabled: true,
+ SourcePath: "/srv/www/site",
+ StorageTargetID: 1,
+ RetentionDays: 30,
+ Compression: "gzip",
+ MaxBackups: 10,
+ LastStatus: "idle",
+ }
+ if err := repo.Create(ctx, task); err != nil {
+ t.Fatalf("Create returned error: %v", err)
+ }
+ stored, err := repo.FindByID(ctx, task.ID)
+ if err != nil {
+ t.Fatalf("FindByID returned error: %v", err)
+ }
+ if stored == nil || stored.Name != "website" {
+ t.Fatalf("unexpected stored task: %#v", stored)
+ }
+ stored.Enabled = false
+ stored.CronExpr = "0 3 * * *"
+ if err := repo.Update(ctx, stored); err != nil {
+ t.Fatalf("Update returned error: %v", err)
+ }
+ schedulable, err := repo.ListSchedulable(ctx)
+ if err != nil {
+ t.Fatalf("ListSchedulable returned error: %v", err)
+ }
+ if len(schedulable) != 0 {
+ t.Fatalf("expected disabled task not schedulable, got %d", len(schedulable))
+ }
+ stored.Enabled = true
+ if err := repo.Update(ctx, stored); err != nil {
+ t.Fatalf("Update returned error: %v", err)
+ }
+ schedulable, err = repo.ListSchedulable(ctx)
+ if err != nil {
+ t.Fatalf("ListSchedulable returned error: %v", err)
+ }
+ if len(schedulable) != 1 {
+ t.Fatalf("expected one schedulable task, got %d", len(schedulable))
+ }
+ count, err := repo.CountByStorageTargetID(ctx, 1)
+ if err != nil {
+ t.Fatalf("CountByStorageTargetID returned error: %v", err)
+ }
+ if count != 1 {
+ t.Fatalf("expected referenced task count 1, got %d", count)
+ }
+ if err := repo.Delete(ctx, task.ID); err != nil {
+ t.Fatalf("Delete returned error: %v", err)
+ }
+ deleted, err := repo.FindByID(ctx, task.ID)
+ if err != nil {
+ t.Fatalf("FindByID after delete returned error: %v", err)
+ }
+ if deleted != nil {
+ t.Fatalf("expected task deleted, got %#v", deleted)
+ }
+}
diff --git a/server/internal/repository/node_repository.go b/server/internal/repository/node_repository.go
new file mode 100644
index 0000000..87ec957
--- /dev/null
+++ b/server/internal/repository/node_repository.go
@@ -0,0 +1,80 @@
+package repository
+
+import (
+ "context"
+ "errors"
+
+ "backupx/server/internal/model"
+ "gorm.io/gorm"
+)
+
+type NodeRepository interface {
+ List(context.Context) ([]model.Node, error)
+ FindByID(context.Context, uint) (*model.Node, error)
+ FindByToken(context.Context, string) (*model.Node, error)
+ FindLocal(context.Context) (*model.Node, error)
+ Create(context.Context, *model.Node) error
+ Update(context.Context, *model.Node) error
+ Delete(context.Context, uint) error
+}
+
+type GormNodeRepository struct {
+ db *gorm.DB
+}
+
+func NewNodeRepository(db *gorm.DB) *GormNodeRepository {
+ return &GormNodeRepository{db: db}
+}
+
+func (r *GormNodeRepository) List(ctx context.Context) ([]model.Node, error) {
+ var items []model.Node
+ if err := r.db.WithContext(ctx).Order("is_local desc, updated_at desc").Find(&items).Error; err != nil {
+ return nil, err
+ }
+ return items, nil
+}
+
+func (r *GormNodeRepository) FindByID(ctx context.Context, id uint) (*model.Node, error) {
+ var item model.Node
+ if err := r.db.WithContext(ctx).First(&item, id).Error; err != nil {
+ if errors.Is(err, gorm.ErrRecordNotFound) {
+ return nil, nil
+ }
+ return nil, err
+ }
+ return &item, nil
+}
+
+func (r *GormNodeRepository) FindByToken(ctx context.Context, token string) (*model.Node, error) {
+ var item model.Node
+ if err := r.db.WithContext(ctx).Where("token = ?", token).First(&item).Error; err != nil {
+ if errors.Is(err, gorm.ErrRecordNotFound) {
+ return nil, nil
+ }
+ return nil, err
+ }
+ return &item, nil
+}
+
+func (r *GormNodeRepository) FindLocal(ctx context.Context) (*model.Node, error) {
+ var item model.Node
+ if err := r.db.WithContext(ctx).Where("is_local = ?", true).First(&item).Error; err != nil {
+ if errors.Is(err, gorm.ErrRecordNotFound) {
+ return nil, nil
+ }
+ return nil, err
+ }
+ return &item, nil
+}
+
+func (r *GormNodeRepository) Create(ctx context.Context, item *model.Node) error {
+ return r.db.WithContext(ctx).Create(item).Error
+}
+
+func (r *GormNodeRepository) Update(ctx context.Context, item *model.Node) error {
+ return r.db.WithContext(ctx).Save(item).Error
+}
+
+func (r *GormNodeRepository) Delete(ctx context.Context, id uint) error {
+ return r.db.WithContext(ctx).Delete(&model.Node{}, id).Error
+}
diff --git a/server/internal/repository/notification_repository.go b/server/internal/repository/notification_repository.go
new file mode 100644
index 0000000..ab7999d
--- /dev/null
+++ b/server/internal/repository/notification_repository.go
@@ -0,0 +1,83 @@
+package repository
+
+import (
+ "context"
+ "errors"
+
+ "backupx/server/internal/model"
+ "gorm.io/gorm"
+)
+
+type NotificationRepository interface {
+ List(context.Context) ([]model.Notification, error)
+ ListEnabledForEvent(context.Context, bool) ([]model.Notification, error)
+ FindByID(context.Context, uint) (*model.Notification, error)
+ FindByName(context.Context, string) (*model.Notification, error)
+ Create(context.Context, *model.Notification) error
+ Update(context.Context, *model.Notification) error
+ Delete(context.Context, uint) error
+}
+
+type GormNotificationRepository struct {
+ db *gorm.DB
+}
+
+func NewNotificationRepository(db *gorm.DB) *GormNotificationRepository {
+ return &GormNotificationRepository{db: db}
+}
+
+func (r *GormNotificationRepository) List(ctx context.Context) ([]model.Notification, error) {
+ var items []model.Notification
+ if err := r.db.WithContext(ctx).Order("updated_at desc").Find(&items).Error; err != nil {
+ return nil, err
+ }
+ return items, nil
+}
+
+func (r *GormNotificationRepository) ListEnabledForEvent(ctx context.Context, success bool) ([]model.Notification, error) {
+ query := r.db.WithContext(ctx).Model(&model.Notification{}).Where("enabled = ?", true)
+ if success {
+ query = query.Where("on_success = ?", true)
+ } else {
+ query = query.Where("on_failure = ?", true)
+ }
+ var items []model.Notification
+ if err := query.Order("updated_at desc").Find(&items).Error; err != nil {
+ return nil, err
+ }
+ return items, nil
+}
+
+func (r *GormNotificationRepository) FindByID(ctx context.Context, id uint) (*model.Notification, error) {
+ var item model.Notification
+ if err := r.db.WithContext(ctx).First(&item, id).Error; err != nil {
+ if errors.Is(err, gorm.ErrRecordNotFound) {
+ return nil, nil
+ }
+ return nil, err
+ }
+ return &item, nil
+}
+
+func (r *GormNotificationRepository) FindByName(ctx context.Context, name string) (*model.Notification, error) {
+ var item model.Notification
+ if err := r.db.WithContext(ctx).Where("name = ?", name).First(&item).Error; err != nil {
+ if errors.Is(err, gorm.ErrRecordNotFound) {
+ return nil, nil
+ }
+ return nil, err
+ }
+ return &item, nil
+}
+
+func (r *GormNotificationRepository) Create(ctx context.Context, item *model.Notification) error {
+ return r.db.WithContext(ctx).Create(item).Error
+}
+
+func (r *GormNotificationRepository) Update(ctx context.Context, item *model.Notification) error {
+ return r.db.WithContext(ctx).Save(item).Error
+}
+
+func (r *GormNotificationRepository) Delete(ctx context.Context, id uint) error {
+ return r.db.WithContext(ctx).Delete(&model.Notification{}, id).Error
+}
diff --git a/server/internal/repository/notification_repository_test.go b/server/internal/repository/notification_repository_test.go
new file mode 100644
index 0000000..053c25d
--- /dev/null
+++ b/server/internal/repository/notification_repository_test.go
@@ -0,0 +1,69 @@
+package repository
+
+import (
+ "context"
+ "path/filepath"
+ "testing"
+
+ "backupx/server/internal/config"
+ "backupx/server/internal/database"
+ "backupx/server/internal/logger"
+ "backupx/server/internal/model"
+)
+
+func newNotificationTestRepository(t *testing.T) *GormNotificationRepository {
+ t.Helper()
+ log, err := logger.New(config.LogConfig{Level: "error"})
+ if err != nil {
+ t.Fatalf("logger.New returned error: %v", err)
+ }
+ db, err := database.Open(config.DatabaseConfig{Path: filepath.Join(t.TempDir(), "backupx.db")}, log)
+ if err != nil {
+ t.Fatalf("database.Open returned error: %v", err)
+ }
+ return NewNotificationRepository(db)
+}
+
+func TestNotificationRepositoryCRUD(t *testing.T) {
+ ctx := context.Background()
+ repo := newNotificationTestRepository(t)
+ item := &model.Notification{
+ Type: "webhook",
+ Name: "ops-webhook",
+ ConfigCiphertext: "ciphertext",
+ Enabled: true,
+ OnSuccess: false,
+ OnFailure: true,
+ }
+ if err := repo.Create(ctx, item); err != nil {
+ t.Fatalf("Create returned error: %v", err)
+ }
+ stored, err := repo.FindByName(ctx, "ops-webhook")
+ if err != nil {
+ t.Fatalf("FindByName returned error: %v", err)
+ }
+ if stored == nil || stored.Name != "ops-webhook" {
+ t.Fatalf("unexpected notification: %#v", stored)
+ }
+ enabledForFailure, err := repo.ListEnabledForEvent(ctx, false)
+ if err != nil {
+ t.Fatalf("ListEnabledForEvent returned error: %v", err)
+ }
+ if len(enabledForFailure) != 1 {
+ t.Fatalf("expected one failure notification, got %d", len(enabledForFailure))
+ }
+ stored.OnSuccess = true
+ if err := repo.Update(ctx, stored); err != nil {
+ t.Fatalf("Update returned error: %v", err)
+ }
+ enabledForSuccess, err := repo.ListEnabledForEvent(ctx, true)
+ if err != nil {
+ t.Fatalf("ListEnabledForEvent returned error: %v", err)
+ }
+ if len(enabledForSuccess) != 1 {
+ t.Fatalf("expected one success notification, got %d", len(enabledForSuccess))
+ }
+ if err := repo.Delete(ctx, item.ID); err != nil {
+ t.Fatalf("Delete returned error: %v", err)
+ }
+}
diff --git a/server/internal/repository/oauth_session_repository.go b/server/internal/repository/oauth_session_repository.go
new file mode 100644
index 0000000..f9ae244
--- /dev/null
+++ b/server/internal/repository/oauth_session_repository.go
@@ -0,0 +1,48 @@
+package repository
+
+import (
+ "context"
+ "errors"
+ "time"
+
+ "backupx/server/internal/model"
+ "gorm.io/gorm"
+)
+
+type OAuthSessionRepository interface {
+ Create(context.Context, *model.OAuthSession) error
+ Update(context.Context, *model.OAuthSession) error
+ FindByState(context.Context, string) (*model.OAuthSession, error)
+ DeleteExpired(context.Context, time.Time) error
+}
+
+type GormOAuthSessionRepository struct {
+ db *gorm.DB
+}
+
+func NewOAuthSessionRepository(db *gorm.DB) *GormOAuthSessionRepository {
+ return &GormOAuthSessionRepository{db: db}
+}
+
+func (r *GormOAuthSessionRepository) Create(ctx context.Context, item *model.OAuthSession) error {
+ return r.db.WithContext(ctx).Create(item).Error
+}
+
+func (r *GormOAuthSessionRepository) Update(ctx context.Context, item *model.OAuthSession) error {
+ return r.db.WithContext(ctx).Save(item).Error
+}
+
+func (r *GormOAuthSessionRepository) FindByState(ctx context.Context, state string) (*model.OAuthSession, error) {
+ var item model.OAuthSession
+ if err := r.db.WithContext(ctx).Where("state = ?", state).First(&item).Error; err != nil {
+ if errors.Is(err, gorm.ErrRecordNotFound) {
+ return nil, nil
+ }
+ return nil, err
+ }
+ return &item, nil
+}
+
+func (r *GormOAuthSessionRepository) DeleteExpired(ctx context.Context, before time.Time) error {
+ return r.db.WithContext(ctx).Where("expires_at <= ?", before).Delete(&model.OAuthSession{}).Error
+}
diff --git a/server/internal/repository/oauth_session_repository_test.go b/server/internal/repository/oauth_session_repository_test.go
new file mode 100644
index 0000000..6f1185a
--- /dev/null
+++ b/server/internal/repository/oauth_session_repository_test.go
@@ -0,0 +1,73 @@
+package repository
+
+import (
+ "context"
+ "path/filepath"
+ "testing"
+ "time"
+
+ "backupx/server/internal/config"
+ "backupx/server/internal/database"
+ "backupx/server/internal/logger"
+ "backupx/server/internal/model"
+)
+
+func newOAuthSessionTestRepository(t *testing.T) *GormOAuthSessionRepository {
+ t.Helper()
+ log, err := logger.New(config.LogConfig{Level: "error"})
+ if err != nil {
+ t.Fatalf("logger.New returned error: %v", err)
+ }
+ db, err := database.Open(config.DatabaseConfig{Path: filepath.Join(t.TempDir(), "backupx.db")}, log)
+ if err != nil {
+ t.Fatalf("database.Open returned error: %v", err)
+ }
+ return NewOAuthSessionRepository(db)
+}
+
+func TestOAuthSessionRepositoryCRUDAndDeleteExpired(t *testing.T) {
+ ctx := context.Background()
+ repo := newOAuthSessionTestRepository(t)
+ expiresAt := time.Now().UTC().Add(5 * time.Minute)
+ session := &model.OAuthSession{
+ ProviderType: "google_drive",
+ State: "oauth-state",
+ PayloadCiphertext: "ciphertext",
+ ExpiresAt: expiresAt,
+ }
+ if err := repo.Create(ctx, session); err != nil {
+ t.Fatalf("Create returned error: %v", err)
+ }
+ stored, err := repo.FindByState(ctx, "oauth-state")
+ if err != nil {
+ t.Fatalf("FindByState returned error: %v", err)
+ }
+ if stored == nil || stored.State != "oauth-state" {
+ t.Fatalf("unexpected stored session: %#v", stored)
+ }
+ now := time.Now().UTC()
+ stored.UsedAt = &now
+ if err := repo.Update(ctx, stored); err != nil {
+ t.Fatalf("Update returned error: %v", err)
+ }
+ if err := repo.DeleteExpired(ctx, time.Now().UTC().Add(-time.Minute)); err != nil {
+ t.Fatalf("DeleteExpired returned error: %v", err)
+ }
+ stillThere, err := repo.FindByState(ctx, "oauth-state")
+ if err != nil {
+ t.Fatalf("FindByState after DeleteExpired returned error: %v", err)
+ }
+ if stillThere == nil {
+ t.Fatalf("expected unexpired session to remain")
+ }
+ if err := repo.DeleteExpired(ctx, time.Now().UTC().Add(10*time.Minute)); err != nil {
+ t.Fatalf("DeleteExpired returned error: %v", err)
+ }
+ deleted, err := repo.FindByState(ctx, "oauth-state")
+ if err != nil {
+ t.Fatalf("FindByState after expiration delete returned error: %v", err)
+ }
+ if deleted != nil {
+ t.Fatalf("expected session to be deleted, got %#v", deleted)
+ }
+}
diff --git a/server/internal/repository/storage_target_repository.go b/server/internal/repository/storage_target_repository.go
new file mode 100644
index 0000000..a206176
--- /dev/null
+++ b/server/internal/repository/storage_target_repository.go
@@ -0,0 +1,68 @@
+package repository
+
+import (
+ "context"
+ "errors"
+
+ "backupx/server/internal/model"
+ "gorm.io/gorm"
+)
+
+type StorageTargetRepository interface {
+ List(context.Context) ([]model.StorageTarget, error)
+ FindByID(context.Context, uint) (*model.StorageTarget, error)
+ FindByName(context.Context, string) (*model.StorageTarget, error)
+ Create(context.Context, *model.StorageTarget) error
+ Update(context.Context, *model.StorageTarget) error
+ Delete(context.Context, uint) error
+}
+
+type GormStorageTargetRepository struct {
+ db *gorm.DB
+}
+
+func NewStorageTargetRepository(db *gorm.DB) *GormStorageTargetRepository {
+ return &GormStorageTargetRepository{db: db}
+}
+
+func (r *GormStorageTargetRepository) List(ctx context.Context) ([]model.StorageTarget, error) {
+ var items []model.StorageTarget
+ if err := r.db.WithContext(ctx).Order("updated_at desc").Find(&items).Error; err != nil {
+ return nil, err
+ }
+ return items, nil
+}
+
+func (r *GormStorageTargetRepository) FindByID(ctx context.Context, id uint) (*model.StorageTarget, error) {
+ var item model.StorageTarget
+ if err := r.db.WithContext(ctx).First(&item, id).Error; err != nil {
+ if errors.Is(err, gorm.ErrRecordNotFound) {
+ return nil, nil
+ }
+ return nil, err
+ }
+ return &item, nil
+}
+
+func (r *GormStorageTargetRepository) FindByName(ctx context.Context, name string) (*model.StorageTarget, error) {
+ var item model.StorageTarget
+ if err := r.db.WithContext(ctx).Where("name = ?", name).First(&item).Error; err != nil {
+ if errors.Is(err, gorm.ErrRecordNotFound) {
+ return nil, nil
+ }
+ return nil, err
+ }
+ return &item, nil
+}
+
+func (r *GormStorageTargetRepository) Create(ctx context.Context, item *model.StorageTarget) error {
+ return r.db.WithContext(ctx).Create(item).Error
+}
+
+func (r *GormStorageTargetRepository) Update(ctx context.Context, item *model.StorageTarget) error {
+ return r.db.WithContext(ctx).Save(item).Error
+}
+
+func (r *GormStorageTargetRepository) Delete(ctx context.Context, id uint) error {
+ return r.db.WithContext(ctx).Delete(&model.StorageTarget{}, id).Error
+}
diff --git a/server/internal/repository/storage_target_repository_test.go b/server/internal/repository/storage_target_repository_test.go
new file mode 100644
index 0000000..018dfe6
--- /dev/null
+++ b/server/internal/repository/storage_target_repository_test.go
@@ -0,0 +1,81 @@
+package repository
+
+import (
+ "context"
+ "path/filepath"
+ "testing"
+
+ "backupx/server/internal/config"
+ "backupx/server/internal/database"
+ "backupx/server/internal/logger"
+ "backupx/server/internal/model"
+)
+
+func openTestDB(t *testing.T) context.Context {
+ t.Helper()
+ return context.Background()
+}
+
+func newStorageTestRepository(t *testing.T) *GormStorageTargetRepository {
+ t.Helper()
+ log, err := logger.New(config.LogConfig{Level: "error"})
+ if err != nil {
+ t.Fatalf("logger.New returned error: %v", err)
+ }
+ db, err := database.Open(config.DatabaseConfig{Path: filepath.Join(t.TempDir(), "backupx.db")}, log)
+ if err != nil {
+ t.Fatalf("database.Open returned error: %v", err)
+ }
+ return NewStorageTargetRepository(db)
+}
+
+func TestStorageTargetRepositoryCRUD(t *testing.T) {
+ ctx := openTestDB(t)
+ repo := newStorageTestRepository(t)
+ item := &model.StorageTarget{
+ Name: "local",
+ Type: "local_disk",
+ Enabled: true,
+ ConfigCiphertext: "ciphertext",
+ ConfigVersion: 1,
+ LastTestStatus: "unknown",
+ }
+ if err := repo.Create(ctx, item); err != nil {
+ t.Fatalf("Create returned error: %v", err)
+ }
+ stored, err := repo.FindByID(ctx, item.ID)
+ if err != nil {
+ t.Fatalf("FindByID returned error: %v", err)
+ }
+ if stored == nil || stored.Name != "local" {
+ t.Fatalf("unexpected stored target: %#v", stored)
+ }
+ byName, err := repo.FindByName(ctx, "local")
+ if err != nil {
+ t.Fatalf("FindByName returned error: %v", err)
+ }
+ if byName == nil || byName.ID != item.ID {
+ t.Fatalf("expected target lookup by name to match, got %#v", byName)
+ }
+ stored.Description = "updated"
+ if err := repo.Update(ctx, stored); err != nil {
+ t.Fatalf("Update returned error: %v", err)
+ }
+ items, err := repo.List(ctx)
+ if err != nil {
+ t.Fatalf("List returned error: %v", err)
+ }
+ if len(items) != 1 || items[0].Description != "updated" {
+ t.Fatalf("unexpected list result: %#v", items)
+ }
+ if err := repo.Delete(ctx, item.ID); err != nil {
+ t.Fatalf("Delete returned error: %v", err)
+ }
+ deleted, err := repo.FindByID(ctx, item.ID)
+ if err != nil {
+ t.Fatalf("FindByID after delete returned error: %v", err)
+ }
+ if deleted != nil {
+ t.Fatalf("expected target to be deleted, got %#v", deleted)
+ }
+}
diff --git a/server/internal/repository/system_config_repository.go b/server/internal/repository/system_config_repository.go
new file mode 100644
index 0000000..f6ada88
--- /dev/null
+++ b/server/internal/repository/system_config_repository.go
@@ -0,0 +1,50 @@
+package repository
+
+import (
+ "context"
+ "errors"
+
+ "backupx/server/internal/model"
+ "gorm.io/gorm"
+ "gorm.io/gorm/clause"
+)
+
+type SystemConfigRepository interface {
+ GetByKey(context.Context, string) (*model.SystemConfig, error)
+ List(context.Context) ([]model.SystemConfig, error)
+ Upsert(context.Context, *model.SystemConfig) error
+}
+
+type GormSystemConfigRepository struct {
+ db *gorm.DB
+}
+
+func NewSystemConfigRepository(db *gorm.DB) *GormSystemConfigRepository {
+ return &GormSystemConfigRepository{db: db}
+}
+
+func (r *GormSystemConfigRepository) GetByKey(ctx context.Context, key string) (*model.SystemConfig, error) {
+ var item model.SystemConfig
+ if err := r.db.WithContext(ctx).Where("key = ?", key).First(&item).Error; err != nil {
+ if errors.Is(err, gorm.ErrRecordNotFound) {
+ return nil, nil
+ }
+ return nil, err
+ }
+ return &item, nil
+}
+
+func (r *GormSystemConfigRepository) List(ctx context.Context) ([]model.SystemConfig, error) {
+ var items []model.SystemConfig
+ if err := r.db.WithContext(ctx).Order("key ASC").Find(&items).Error; err != nil {
+ return nil, err
+ }
+ return items, nil
+}
+
+func (r *GormSystemConfigRepository) Upsert(ctx context.Context, item *model.SystemConfig) error {
+ return r.db.WithContext(ctx).Clauses(clause.OnConflict{
+ Columns: []clause.Column{{Name: "key"}},
+ DoUpdates: clause.AssignmentColumns([]string{"value", "encrypted", "updated_at"}),
+ }).Create(item).Error
+}
diff --git a/server/internal/repository/user_repository.go b/server/internal/repository/user_repository.go
new file mode 100644
index 0000000..32f858d
--- /dev/null
+++ b/server/internal/repository/user_repository.go
@@ -0,0 +1,63 @@
+package repository
+
+import (
+ "context"
+ "errors"
+
+ "backupx/server/internal/model"
+ "gorm.io/gorm"
+)
+
+type UserRepository interface {
+ Count(context.Context) (int64, error)
+ Create(context.Context, *model.User) error
+ Update(context.Context, *model.User) error
+ FindByUsername(context.Context, string) (*model.User, error)
+ FindByID(context.Context, uint) (*model.User, error)
+}
+
+type GormUserRepository struct {
+ db *gorm.DB
+}
+
+func NewUserRepository(db *gorm.DB) *GormUserRepository {
+ return &GormUserRepository{db: db}
+}
+
+func (r *GormUserRepository) Count(ctx context.Context) (int64, error) {
+ var count int64
+ if err := r.db.WithContext(ctx).Model(&model.User{}).Count(&count).Error; err != nil {
+ return 0, err
+ }
+ return count, nil
+}
+
+func (r *GormUserRepository) Create(ctx context.Context, user *model.User) error {
+ return r.db.WithContext(ctx).Create(user).Error
+}
+
+func (r *GormUserRepository) Update(ctx context.Context, user *model.User) error {
+ return r.db.WithContext(ctx).Save(user).Error
+}
+
+func (r *GormUserRepository) FindByUsername(ctx context.Context, username string) (*model.User, error) {
+ var user model.User
+ if err := r.db.WithContext(ctx).Where("username = ?", username).First(&user).Error; err != nil {
+ if errors.Is(err, gorm.ErrRecordNotFound) {
+ return nil, nil
+ }
+ return nil, err
+ }
+ return &user, nil
+}
+
+func (r *GormUserRepository) FindByID(ctx context.Context, id uint) (*model.User, error) {
+ var user model.User
+ if err := r.db.WithContext(ctx).First(&user, id).Error; err != nil {
+ if errors.Is(err, gorm.ErrRecordNotFound) {
+ return nil, nil
+ }
+ return nil, err
+ }
+ return &user, nil
+}
diff --git a/server/internal/scheduler/service.go b/server/internal/scheduler/service.go
new file mode 100644
index 0000000..22b44f1
--- /dev/null
+++ b/server/internal/scheduler/service.go
@@ -0,0 +1,109 @@
+package scheduler
+
+import (
+ "context"
+ "fmt"
+ "sync"
+ "time"
+
+ "backupx/server/internal/model"
+ "backupx/server/internal/repository"
+ servicepkg "backupx/server/internal/service"
+ "github.com/robfig/cron/v3"
+ "go.uber.org/zap"
+)
+
+type TaskRunner interface {
+ RunTaskByID(context.Context, uint) (*servicepkg.BackupRecordDetail, error)
+}
+
+type Service struct {
+ mu sync.Mutex
+ cron *cron.Cron
+ tasks repository.BackupTaskRepository
+ runner TaskRunner
+ logger *zap.Logger
+ entries map[uint]cron.EntryID
+}
+
+func NewService(tasks repository.BackupTaskRepository, runner TaskRunner, logger *zap.Logger) *Service {
+ parser := cron.NewParser(cron.SecondOptional | cron.Minute | cron.Hour | cron.Dom | cron.Month | cron.Dow | cron.Descriptor)
+ return &Service{cron: cron.New(cron.WithParser(parser), cron.WithLocation(time.UTC)), tasks: tasks, runner: runner, logger: logger, entries: make(map[uint]cron.EntryID)}
+}
+
+func (s *Service) Start(ctx context.Context) error {
+ if err := s.Reload(ctx); err != nil {
+ return err
+ }
+ s.cron.Start()
+ return nil
+}
+
+func (s *Service) Stop(ctx context.Context) error {
+ stopCtx := s.cron.Stop()
+ select {
+ case <-stopCtx.Done():
+ return nil
+ case <-ctx.Done():
+ return ctx.Err()
+ }
+}
+
+func (s *Service) Reload(ctx context.Context) error {
+ items, err := s.tasks.ListSchedulable(ctx)
+ if err != nil {
+ return err
+ }
+ s.mu.Lock()
+ defer s.mu.Unlock()
+ for taskID, entryID := range s.entries {
+ s.cron.Remove(entryID)
+ delete(s.entries, taskID)
+ }
+ for _, item := range items {
+ item := item
+ if err := s.syncTaskLocked(&item); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (s *Service) SyncTask(_ context.Context, task *model.BackupTask) error {
+ s.mu.Lock()
+ defer s.mu.Unlock()
+ return s.syncTaskLocked(task)
+}
+
+func (s *Service) RemoveTask(_ context.Context, taskID uint) error {
+ s.mu.Lock()
+ defer s.mu.Unlock()
+ if entryID, ok := s.entries[taskID]; ok {
+ s.cron.Remove(entryID)
+ delete(s.entries, taskID)
+ }
+ return nil
+}
+
+func (s *Service) syncTaskLocked(task *model.BackupTask) error {
+ if task == nil {
+ return fmt.Errorf("task is required")
+ }
+ if entryID, ok := s.entries[task.ID]; ok {
+ s.cron.Remove(entryID)
+ delete(s.entries, task.ID)
+ }
+ if !task.Enabled || task.CronExpr == "" {
+ return nil
+ }
+ entryID, err := s.cron.AddFunc(task.CronExpr, func() {
+ if _, runErr := s.runner.RunTaskByID(context.Background(), task.ID); runErr != nil && s.logger != nil {
+ s.logger.Warn("scheduled backup run failed", zap.Uint("task_id", task.ID), zap.Error(runErr))
+ }
+ })
+ if err != nil {
+ return err
+ }
+ s.entries[task.ID] = entryID
+ return nil
+}
diff --git a/server/internal/scheduler/service_test.go b/server/internal/scheduler/service_test.go
new file mode 100644
index 0000000..074bb97
--- /dev/null
+++ b/server/internal/scheduler/service_test.go
@@ -0,0 +1,58 @@
+package scheduler
+
+import (
+ "backupx/server/internal/repository"
+ servicepkg "backupx/server/internal/service"
+ "context"
+ "testing"
+ "time"
+
+ "backupx/server/internal/model"
+)
+
+type fakeTaskRepository struct {
+ items []model.BackupTask
+}
+
+func (r *fakeTaskRepository) List(context.Context, repository.BackupTaskListOptions) ([]model.BackupTask, error) {
+ return nil, nil
+}
+func (r *fakeTaskRepository) FindByID(context.Context, uint) (*model.BackupTask, error) {
+ return nil, nil
+}
+func (r *fakeTaskRepository) FindByName(context.Context, string) (*model.BackupTask, error) {
+ return nil, nil
+}
+func (r *fakeTaskRepository) ListSchedulable(context.Context) ([]model.BackupTask, error) {
+ return r.items, nil
+}
+func (r *fakeTaskRepository) Count(context.Context) (int64, error) { return 0, nil }
+func (r *fakeTaskRepository) CountEnabled(context.Context) (int64, error) { return 0, nil }
+func (r *fakeTaskRepository) CountByStorageTargetID(context.Context, uint) (int64, error) {
+ return 0, nil
+}
+func (r *fakeTaskRepository) Create(context.Context, *model.BackupTask) error { return nil }
+func (r *fakeTaskRepository) Update(context.Context, *model.BackupTask) error { return nil }
+func (r *fakeTaskRepository) Delete(context.Context, uint) error { return nil }
+
+type fakeRunner struct{ taskIDs []uint }
+
+func (r *fakeRunner) RunTaskByID(_ context.Context, id uint) (*servicepkg.BackupRecordDetail, error) {
+ r.taskIDs = append(r.taskIDs, id)
+ return nil, nil
+}
+
+func TestServiceSyncTaskAndTrigger(t *testing.T) {
+ repo := &fakeTaskRepository{}
+ runner := &fakeRunner{}
+ service := NewService(repo, runner, nil)
+ if err := service.SyncTask(context.Background(), &model.BackupTask{ID: 1, Enabled: true, CronExpr: "*/1 * * * * *"}); err != nil {
+ t.Fatalf("SyncTask returned error: %v", err)
+ }
+ service.cron.Start()
+ defer service.cron.Stop()
+ time.Sleep(1100 * time.Millisecond)
+ if len(runner.taskIDs) == 0 {
+ t.Fatalf("expected scheduled runner to be triggered")
+ }
+}
diff --git a/server/internal/security/jwt.go b/server/internal/security/jwt.go
new file mode 100644
index 0000000..285c86a
--- /dev/null
+++ b/server/internal/security/jwt.go
@@ -0,0 +1,60 @@
+//go:build ignore
+
+package security
+
+import (
+ "fmt"
+ "time"
+
+ "backupx/server/internal/model"
+ "github.com/golang-jwt/jwt/v5"
+)
+
+type Claims struct {
+ UserID uint `json:"userId"`
+ Username string `json:"username"`
+ Role string `json:"role"`
+ jwt.RegisteredClaims
+}
+
+type JWTManager struct {
+ secret []byte
+ duration time.Duration
+}
+
+func NewJWTManager(secret string, duration time.Duration) *JWTManager {
+ return &JWTManager{secret: []byte(secret), duration: duration}
+}
+
+func (m *JWTManager) IssueToken(user *model.User) (string, error) {
+ now := time.Now().UTC()
+ claims := Claims{
+ UserID: user.ID,
+ Username: user.Username,
+ Role: user.Role,
+ RegisteredClaims: jwt.RegisteredClaims{
+ Subject: fmt.Sprintf("%d", user.ID),
+ IssuedAt: jwt.NewNumericDate(now),
+ ExpiresAt: jwt.NewNumericDate(now.Add(m.duration)),
+ },
+ }
+ token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)
+ return token.SignedString(m.secret)
+}
+
+func (m *JWTManager) Parse(tokenValue string) (*Claims, error) {
+ token, err := jwt.ParseWithClaims(tokenValue, &Claims{}, func(token *jwt.Token) (any, error) {
+ if token.Method != jwt.SigningMethodHS256 {
+ return nil, fmt.Errorf("unexpected signing method")
+ }
+ return m.secret, nil
+ })
+ if err != nil {
+ return nil, err
+ }
+ claims, ok := token.Claims.(*Claims)
+ if !ok || !token.Valid {
+ return nil, fmt.Errorf("invalid token")
+ }
+ return claims, nil
+}
diff --git a/server/internal/security/jwt_test.go b/server/internal/security/jwt_test.go
new file mode 100644
index 0000000..9b6e455
--- /dev/null
+++ b/server/internal/security/jwt_test.go
@@ -0,0 +1,25 @@
+//go:build ignore
+
+package security
+
+import (
+ "testing"
+ "time"
+
+ "backupx/server/internal/model"
+)
+
+func TestJWTManagerIssueAndParse(t *testing.T) {
+ manager := NewJWTManager("test-secret", time.Hour)
+ token, err := manager.IssueToken(&model.User{ID: 7, Username: "admin", Role: "admin"})
+ if err != nil {
+ t.Fatalf("IssueToken() error = %v", err)
+ }
+ claims, err := manager.Parse(token)
+ if err != nil {
+ t.Fatalf("Parse() error = %v", err)
+ }
+ if claims.UserID != 7 || claims.Username != "admin" {
+ t.Fatalf("unexpected claims: %+v", claims)
+ }
+}
diff --git a/server/internal/security/limiter.go b/server/internal/security/limiter.go
new file mode 100644
index 0000000..91f5b24
--- /dev/null
+++ b/server/internal/security/limiter.go
@@ -0,0 +1,54 @@
+//go:build ignore
+
+package security
+
+import (
+ "sync"
+ "time"
+)
+
+type limiterEntry struct {
+ Count int
+ ResetAt time.Time
+}
+
+type LoginLimiter struct {
+ mu sync.Mutex
+ window time.Duration
+ max int
+ records map[string]limiterEntry
+}
+
+func NewLoginLimiter(max int, window time.Duration) *LoginLimiter {
+ return &LoginLimiter{window: window, max: max, records: make(map[string]limiterEntry)}
+}
+
+func (l *LoginLimiter) Allow(key string) bool {
+ l.mu.Lock()
+ defer l.mu.Unlock()
+ entry, ok := l.records[key]
+ if !ok || time.Now().After(entry.ResetAt) {
+ delete(l.records, key)
+ return true
+ }
+ return entry.Count < l.max
+}
+
+func (l *LoginLimiter) RegisterFailure(key string) {
+ l.mu.Lock()
+ defer l.mu.Unlock()
+ now := time.Now()
+ entry, ok := l.records[key]
+ if !ok || now.After(entry.ResetAt) {
+ l.records[key] = limiterEntry{Count: 1, ResetAt: now.Add(l.window)}
+ return
+ }
+ entry.Count++
+ l.records[key] = entry
+}
+
+func (l *LoginLimiter) Reset(key string) {
+ l.mu.Lock()
+ defer l.mu.Unlock()
+ delete(l.records, key)
+}
diff --git a/server/internal/security/password.go b/server/internal/security/password.go
new file mode 100644
index 0000000..4e81237
--- /dev/null
+++ b/server/internal/security/password.go
@@ -0,0 +1,17 @@
+package security
+
+import "golang.org/x/crypto/bcrypt"
+
+const PasswordCost = 12
+
+func HashPassword(password string) (string, error) {
+ hashed, err := bcrypt.GenerateFromPassword([]byte(password), PasswordCost)
+ if err != nil {
+ return "", err
+ }
+ return string(hashed), nil
+}
+
+func ComparePassword(hashedPassword, plainPassword string) error {
+ return bcrypt.CompareHashAndPassword([]byte(hashedPassword), []byte(plainPassword))
+}
diff --git a/server/internal/security/password_test.go b/server/internal/security/password_test.go
new file mode 100644
index 0000000..3b79abe
--- /dev/null
+++ b/server/internal/security/password_test.go
@@ -0,0 +1,16 @@
+package security
+
+import "testing"
+
+func TestHashAndComparePassword(t *testing.T) {
+ hash, err := HashPassword("super-secret-password")
+ if err != nil {
+ t.Fatalf("HashPassword returned error: %v", err)
+ }
+ if hash == "super-secret-password" {
+ t.Fatalf("expected hashed password to differ from plain text")
+ }
+ if err := ComparePassword(hash, "super-secret-password"); err != nil {
+ t.Fatalf("ComparePassword returned error: %v", err)
+ }
+}
diff --git a/server/internal/security/rate_limiter.go b/server/internal/security/rate_limiter.go
new file mode 100644
index 0000000..9ea57ad
--- /dev/null
+++ b/server/internal/security/rate_limiter.go
@@ -0,0 +1,50 @@
+package security
+
+import (
+ "sync"
+ "time"
+)
+
+type rateEntry struct {
+ count int
+ windowEnd time.Time
+}
+
+type LoginRateLimiter struct {
+ limit int
+ window time.Duration
+ mu sync.Mutex
+ items map[string]rateEntry
+}
+
+func NewLoginRateLimiter(limit int, window time.Duration) *LoginRateLimiter {
+ return &LoginRateLimiter{
+ limit: limit,
+ window: window,
+ items: make(map[string]rateEntry),
+ }
+}
+
+func (r *LoginRateLimiter) Allow(key string) bool {
+ now := time.Now().UTC()
+ r.mu.Lock()
+ defer r.mu.Unlock()
+
+ entry, ok := r.items[key]
+ if !ok || now.After(entry.windowEnd) {
+ r.items[key] = rateEntry{count: 0, windowEnd: now.Add(r.window)}
+ entry = r.items[key]
+ }
+ if entry.count >= r.limit {
+ return false
+ }
+ entry.count++
+ r.items[key] = entry
+ return true
+}
+
+func (r *LoginRateLimiter) Reset(key string) {
+ r.mu.Lock()
+ defer r.mu.Unlock()
+ delete(r.items, key)
+}
diff --git a/server/internal/security/secret.go b/server/internal/security/secret.go
new file mode 100644
index 0000000..ed4b20c
--- /dev/null
+++ b/server/internal/security/secret.go
@@ -0,0 +1,14 @@
+package security
+
+import (
+ "crypto/rand"
+ "encoding/base64"
+)
+
+func GenerateSecret(bytesLength int) (string, error) {
+ buffer := make([]byte, bytesLength)
+ if _, err := rand.Read(buffer); err != nil {
+ return "", err
+ }
+ return base64.RawURLEncoding.EncodeToString(buffer), nil
+}
diff --git a/server/internal/security/secret_store.go b/server/internal/security/secret_store.go
new file mode 100644
index 0000000..b43a1f2
--- /dev/null
+++ b/server/internal/security/secret_store.go
@@ -0,0 +1,93 @@
+//go:build ignore
+
+package security
+
+import (
+ "crypto/rand"
+ "encoding/hex"
+ "encoding/json"
+ "fmt"
+ "os"
+ "path/filepath"
+
+ "backupx/server/internal/config"
+)
+
+type PersistedSecrets struct {
+ JWTSecret string `json:"jwtSecret"`
+ EncryptionKey string `json:"encryptionKey"`
+}
+
+func EnsureSecrets(cfg *config.Config) error {
+ if cfg.Security.JWTSecret != "" && cfg.Security.EncryptionKey != "" {
+ return nil
+ }
+
+ storePath := filepath.Join(filepath.Dir(cfg.Database.Path), "backupx.secrets.json")
+ current, err := loadSecrets(storePath)
+ if err != nil {
+ return err
+ }
+ if current == nil {
+ current = &PersistedSecrets{}
+ }
+ if current.JWTSecret == "" {
+ current.JWTSecret, err = randomHex(32)
+ if err != nil {
+ return err
+ }
+ }
+ if current.EncryptionKey == "" {
+ current.EncryptionKey, err = randomHex(32)
+ if err != nil {
+ return err
+ }
+ }
+ if err := saveSecrets(storePath, current); err != nil {
+ return err
+ }
+ if cfg.Security.JWTSecret == "" {
+ cfg.Security.JWTSecret = current.JWTSecret
+ }
+ if cfg.Security.EncryptionKey == "" {
+ cfg.Security.EncryptionKey = current.EncryptionKey
+ }
+ return nil
+}
+
+func loadSecrets(path string) (*PersistedSecrets, error) {
+ content, err := os.ReadFile(path)
+ if err != nil {
+ if os.IsNotExist(err) {
+ return nil, nil
+ }
+ return nil, fmt.Errorf("read secrets: %w", err)
+ }
+ var secrets PersistedSecrets
+ if err := json.Unmarshal(content, &secrets); err != nil {
+ return nil, fmt.Errorf("decode secrets: %w", err)
+ }
+ return &secrets, nil
+}
+
+func saveSecrets(path string, secrets *PersistedSecrets) error {
+ if err := os.MkdirAll(filepath.Dir(path), 0o700); err != nil {
+ return fmt.Errorf("create secrets dir: %w", err)
+ }
+ content, err := json.MarshalIndent(secrets, "", " ")
+ if err != nil {
+ return fmt.Errorf("encode secrets: %w", err)
+ }
+ if err := os.WriteFile(path, content, 0o600); err != nil {
+ return fmt.Errorf("write secrets: %w", err)
+ }
+ return nil
+}
+
+func randomHex(size int) (string, error) {
+ bytes := make([]byte, size)
+ if _, err := rand.Read(bytes); err != nil {
+ return "", fmt.Errorf("generate random secret: %w", err)
+ }
+ return hex.EncodeToString(bytes), nil
+}
diff --git a/server/internal/security/token.go b/server/internal/security/token.go
new file mode 100644
index 0000000..f8cd3ce
--- /dev/null
+++ b/server/internal/security/token.go
@@ -0,0 +1,57 @@
+package security
+
+import (
+ "fmt"
+ "strconv"
+ "time"
+
+ "backupx/server/internal/model"
+ "github.com/golang-jwt/jwt/v5"
+)
+
+type Claims struct {
+ Username string `json:"username"`
+ Role string `json:"role"`
+ jwt.RegisteredClaims
+}
+
+type JWTManager struct {
+ secret []byte
+ expiry time.Duration
+}
+
+func NewJWTManager(secret string, expiry time.Duration) *JWTManager {
+ return &JWTManager{secret: []byte(secret), expiry: expiry}
+}
+
+func (m *JWTManager) Generate(user *model.User) (string, error) {
+ now := time.Now().UTC()
+ claims := Claims{
+ Username: user.Username,
+ Role: user.Role,
+ RegisteredClaims: jwt.RegisteredClaims{
+ Subject: strconv.FormatUint(uint64(user.ID), 10),
+ IssuedAt: jwt.NewNumericDate(now),
+ ExpiresAt: jwt.NewNumericDate(now.Add(m.expiry)),
+ },
+ }
+ token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)
+ return token.SignedString(m.secret)
+}
+
+func (m *JWTManager) Parse(tokenString string) (*Claims, error) {
+ token, err := jwt.ParseWithClaims(tokenString, &Claims{}, func(token *jwt.Token) (any, error) {
+ if token.Method != jwt.SigningMethodHS256 {
+ return nil, fmt.Errorf("unexpected signing method: %s", token.Method.Alg())
+ }
+ return m.secret, nil
+ })
+ if err != nil {
+ return nil, err
+ }
+ claims, ok := token.Claims.(*Claims)
+ if !ok || !token.Valid {
+ return nil, fmt.Errorf("invalid token claims")
+ }
+ return claims, nil
+}
diff --git a/server/internal/security/token_test.go b/server/internal/security/token_test.go
new file mode 100644
index 0000000..1db02e5
--- /dev/null
+++ b/server/internal/security/token_test.go
@@ -0,0 +1,30 @@
+package security
+
+import (
+ "testing"
+ "time"
+
+ "backupx/server/internal/model"
+)
+
+func TestJWTManagerGenerateAndParse(t *testing.T) {
+ manager := NewJWTManager("test-secret", time.Hour)
+ user := &model.User{ID: 7, Username: "admin", Role: "admin"}
+
+ token, err := manager.Generate(user)
+ if err != nil {
+ t.Fatalf("Generate returned error: %v", err)
+ }
+
+ claims, err := manager.Parse(token)
+ if err != nil {
+ t.Fatalf("Parse returned error: %v", err)
+ }
+
+ if claims.Subject != "7" {
+ t.Fatalf("expected subject 7, got %s", claims.Subject)
+ }
+ if claims.Username != "admin" {
+ t.Fatalf("expected username admin, got %s", claims.Username)
+ }
+}
diff --git a/server/internal/service/auth_service.go b/server/internal/service/auth_service.go
new file mode 100644
index 0000000..2e35e4c
--- /dev/null
+++ b/server/internal/service/auth_service.go
@@ -0,0 +1,194 @@
+package service
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "strconv"
+ "strings"
+
+ "backupx/server/internal/apperror"
+ "backupx/server/internal/model"
+ "backupx/server/internal/repository"
+ "backupx/server/internal/security"
+)
+
+type SetupInput struct {
+ Username string `json:"username" binding:"required,min=3,max=64"`
+ Password string `json:"password" binding:"required,min=8,max=128"`
+ DisplayName string `json:"displayName" binding:"required,min=1,max=128"`
+}
+
+type LoginInput struct {
+ Username string `json:"username" binding:"required,min=3,max=64"`
+ Password string `json:"password" binding:"required,min=8,max=128"`
+}
+
+type AuthPayload struct {
+ Token string `json:"token"`
+ User *UserOutput `json:"user"`
+}
+
+type UserOutput struct {
+ ID uint `json:"id"`
+ Username string `json:"username"`
+ DisplayName string `json:"displayName"`
+ Role string `json:"role"`
+}
+
+type AuthService struct {
+ users repository.UserRepository
+ configs repository.SystemConfigRepository
+ jwtManager *security.JWTManager
+ rateLimiter *security.LoginRateLimiter
+}
+
+func NewAuthService(
+ users repository.UserRepository,
+ configs repository.SystemConfigRepository,
+ jwtManager *security.JWTManager,
+ rateLimiter *security.LoginRateLimiter,
+) *AuthService {
+ return &AuthService{users: users, configs: configs, jwtManager: jwtManager, rateLimiter: rateLimiter}
+}
+
+func (s *AuthService) SetupStatus(ctx context.Context) (bool, error) {
+ count, err := s.users.Count(ctx)
+ if err != nil {
+ return false, apperror.Internal("AUTH_STATUS_FAILED", "无法检查初始化状态", err)
+ }
+ return count > 0, nil
+}
+
+func (s *AuthService) Setup(ctx context.Context, input SetupInput) (*AuthPayload, error) {
+ initialized, err := s.SetupStatus(ctx)
+ if err != nil {
+ return nil, err
+ }
+ if initialized {
+ return nil, apperror.Conflict("AUTH_SETUP_DISABLED", "系统已初始化,请直接登录", nil)
+ }
+
+ existing, err := s.users.FindByUsername(ctx, strings.TrimSpace(input.Username))
+ if err != nil {
+ return nil, apperror.Internal("AUTH_LOOKUP_FAILED", "无法检查账户状态", err)
+ }
+ if existing != nil {
+ return nil, apperror.Conflict("AUTH_USERNAME_EXISTS", "用户名已存在", nil)
+ }
+
+ hash, err := security.HashPassword(input.Password)
+ if err != nil {
+ return nil, apperror.Internal("AUTH_HASH_FAILED", "无法处理密码", err)
+ }
+
+ user := &model.User{
+ Username: strings.TrimSpace(input.Username),
+ PasswordHash: hash,
+ DisplayName: strings.TrimSpace(input.DisplayName),
+ Role: "admin",
+ }
+ if err := s.users.Create(ctx, user); err != nil {
+ return nil, apperror.Internal("AUTH_CREATE_USER_FAILED", "无法创建管理员账户", err)
+ }
+
+ token, err := s.jwtManager.Generate(user)
+ if err != nil {
+ return nil, apperror.Internal("AUTH_TOKEN_FAILED", "无法生成访问令牌", err)
+ }
+
+ return &AuthPayload{Token: token, User: ToUserOutput(user)}, nil
+}
+
+func (s *AuthService) Login(ctx context.Context, input LoginInput, clientKey string) (*AuthPayload, error) {
+ if clientKey == "" {
+ clientKey = "unknown"
+ }
+ if !s.rateLimiter.Allow(clientKey) {
+ return nil, apperror.TooManyRequests("AUTH_RATE_LIMITED", "登录尝试过于频繁,请稍后再试", nil)
+ }
+
+ user, err := s.users.FindByUsername(ctx, strings.TrimSpace(input.Username))
+ if err != nil {
+ return nil, apperror.Internal("AUTH_LOOKUP_FAILED", "无法执行登录校验", err)
+ }
+ if user == nil {
+ return nil, apperror.Unauthorized("AUTH_INVALID_CREDENTIALS", "用户名或密码错误", nil)
+ }
+ if err := security.ComparePassword(user.PasswordHash, input.Password); err != nil {
+ return nil, apperror.Unauthorized("AUTH_INVALID_CREDENTIALS", "用户名或密码错误", err)
+ }
+
+ s.rateLimiter.Reset(clientKey)
+ token, err := s.jwtManager.Generate(user)
+ if err != nil {
+ return nil, apperror.Internal("AUTH_TOKEN_FAILED", "无法生成访问令牌", err)
+ }
+ return &AuthPayload{Token: token, User: ToUserOutput(user)}, nil
+}
+
+func (s *AuthService) GetCurrentUser(ctx context.Context, subject string) (*UserOutput, error) {
+ userID, err := strconv.ParseUint(subject, 10, 64)
+ if err != nil {
+ return nil, apperror.Unauthorized("AUTH_INVALID_SUBJECT", "无效用户身份", err)
+ }
+ user, err := s.users.FindByID(ctx, uint(userID))
+ if err != nil {
+ return nil, apperror.Internal("AUTH_LOOKUP_FAILED", "无法获取当前用户", err)
+ }
+ if user == nil {
+ return nil, apperror.Unauthorized("AUTH_USER_NOT_FOUND", "当前用户不存在", errors.New("user not found"))
+ }
+ return ToUserOutput(user), nil
+}
+
+type ChangePasswordInput struct {
+ OldPassword string `json:"oldPassword" binding:"required,min=8,max=128"`
+ NewPassword string `json:"newPassword" binding:"required,min=8,max=128"`
+}
+
+func (s *AuthService) ChangePassword(ctx context.Context, subject string, input ChangePasswordInput) error {
+ userID, err := strconv.ParseUint(subject, 10, 64)
+ if err != nil {
+ return apperror.Unauthorized("AUTH_INVALID_SUBJECT", "无效用户身份", err)
+ }
+ user, err := s.users.FindByID(ctx, uint(userID))
+ if err != nil {
+ return apperror.Internal("AUTH_LOOKUP_FAILED", "无法获取当前用户", err)
+ }
+ if user == nil {
+ return apperror.Unauthorized("AUTH_USER_NOT_FOUND", "当前用户不存在", errors.New("user not found"))
+ }
+ if err := security.ComparePassword(user.PasswordHash, input.OldPassword); err != nil {
+ return apperror.BadRequest("AUTH_WRONG_PASSWORD", "旧密码不正确", err)
+ }
+ hash, err := security.HashPassword(input.NewPassword)
+ if err != nil {
+ return apperror.Internal("AUTH_HASH_FAILED", "无法处理密码", err)
+ }
+ user.PasswordHash = hash
+ if err := s.users.Update(ctx, user); err != nil {
+ return apperror.Internal("AUTH_UPDATE_FAILED", "密码修改失败", err)
+ }
+ return nil
+}
+
+func ToUserOutput(user *model.User) *UserOutput {
+ if user == nil {
+ return nil
+ }
+ return &UserOutput{
+ ID: user.ID,
+ Username: user.Username,
+ DisplayName: user.DisplayName,
+ Role: user.Role,
+ }
+}
+
+func SubjectFromContextValue(value any) (string, error) {
+ subject, ok := value.(string)
+ if !ok || strings.TrimSpace(subject) == "" {
+ return "", fmt.Errorf("invalid subject context")
+ }
+ return subject, nil
+}
diff --git a/server/internal/service/auth_service_test.go b/server/internal/service/auth_service_test.go
new file mode 100644
index 0000000..03b5c6e
--- /dev/null
+++ b/server/internal/service/auth_service_test.go
@@ -0,0 +1,162 @@
+package service
+
+import (
+ "context"
+ "testing"
+ "time"
+
+ "backupx/server/internal/model"
+ "backupx/server/internal/security"
+)
+
+type fakeUserRepository struct {
+ users []*model.User
+}
+
+func (r *fakeUserRepository) Count(context.Context) (int64, error) {
+ return int64(len(r.users)), nil
+}
+
+func (r *fakeUserRepository) Create(_ context.Context, user *model.User) error {
+ user.ID = uint(len(r.users) + 1)
+ r.users = append(r.users, user)
+ return nil
+}
+
+func (r *fakeUserRepository) FindByUsername(_ context.Context, username string) (*model.User, error) {
+ for _, user := range r.users {
+ if user.Username == username {
+ return user, nil
+ }
+ }
+ return nil, nil
+}
+
+func (r *fakeUserRepository) FindByID(_ context.Context, id uint) (*model.User, error) {
+ for _, user := range r.users {
+ if user.ID == id {
+ return user, nil
+ }
+ }
+ return nil, nil
+}
+
+func (r *fakeUserRepository) Update(_ context.Context, user *model.User) error {
+ for i, u := range r.users {
+ if u.ID == user.ID {
+ r.users[i] = user
+ return nil
+ }
+ }
+ return nil
+}
+
+type fakeSystemConfigRepository struct{}
+
+func (r *fakeSystemConfigRepository) GetByKey(context.Context, string) (*model.SystemConfig, error) {
+ return nil, nil
+}
+
+func (r *fakeSystemConfigRepository) List(context.Context) ([]model.SystemConfig, error) {
+ return nil, nil
+}
+
+func (r *fakeSystemConfigRepository) Upsert(context.Context, *model.SystemConfig) error {
+ return nil
+}
+
+func TestAuthServiceSetupAndLogin(t *testing.T) {
+ users := &fakeUserRepository{}
+ service := NewAuthService(
+ users,
+ &fakeSystemConfigRepository{},
+ security.NewJWTManager("test-secret", time.Hour),
+ security.NewLoginRateLimiter(5, time.Minute),
+ )
+
+ setupResult, err := service.Setup(context.Background(), SetupInput{
+ Username: "admin",
+ Password: "password-123",
+ DisplayName: "Admin",
+ })
+ if err != nil {
+ t.Fatalf("Setup returned error: %v", err)
+ }
+ if setupResult.User.Username != "admin" {
+ t.Fatalf("expected username admin, got %s", setupResult.User.Username)
+ }
+
+ loginResult, err := service.Login(context.Background(), LoginInput{
+ Username: "admin",
+ Password: "password-123",
+ }, "127.0.0.1")
+ if err != nil {
+ t.Fatalf("Login returned error: %v", err)
+ }
+ if loginResult.Token == "" {
+ t.Fatalf("expected non-empty token")
+ }
+}
+
+func newTestAuthService() (*AuthService, *fakeUserRepository) {
+ users := &fakeUserRepository{}
+ svc := NewAuthService(
+ users,
+ &fakeSystemConfigRepository{},
+ security.NewJWTManager("test-secret", time.Hour),
+ security.NewLoginRateLimiter(5, time.Minute),
+ )
+ return svc, users
+}
+
+func TestChangePassword(t *testing.T) {
+ svc, _ := newTestAuthService()
+ _, err := svc.Setup(context.Background(), SetupInput{
+ Username: "admin", Password: "password-123", DisplayName: "Admin",
+ })
+ if err != nil {
+ t.Fatalf("Setup: %v", err)
+ }
+
+ err = svc.ChangePassword(context.Background(), "1", ChangePasswordInput{
+ OldPassword: "password-123",
+ NewPassword: "new-password-456",
+ })
+ if err != nil {
+ t.Fatalf("ChangePassword: %v", err)
+ }
+
+ // Old password should no longer work
+ _, err = svc.Login(context.Background(), LoginInput{
+ Username: "admin", Password: "password-123",
+ }, "127.0.0.1")
+ if err == nil {
+ t.Fatalf("expected login with old password to fail")
+ }
+
+ // New password should work
+ _, err = svc.Login(context.Background(), LoginInput{
+ Username: "admin", Password: "new-password-456",
+ }, "127.0.0.1")
+ if err != nil {
+ t.Fatalf("login with new password: %v", err)
+ }
+}
+
+func TestChangePasswordWrongOld(t *testing.T) {
+ svc, _ := newTestAuthService()
+ _, err := svc.Setup(context.Background(), SetupInput{
+ Username: "admin", Password: "password-123", DisplayName: "Admin",
+ })
+ if err != nil {
+ t.Fatalf("Setup: %v", err)
+ }
+
+ err = svc.ChangePassword(context.Background(), "1", ChangePasswordInput{
+ OldPassword: "wrong-password",
+ NewPassword: "new-password-456",
+ })
+ if err == nil {
+ t.Fatalf("expected ChangePassword with wrong old password to fail")
+ }
+}
diff --git a/server/internal/service/backup_execution_service.go b/server/internal/service/backup_execution_service.go
new file mode 100644
index 0000000..9f01cc9
--- /dev/null
+++ b/server/internal/service/backup_execution_service.go
@@ -0,0 +1,487 @@
+package service
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "io"
+ "os"
+ "path/filepath"
+ "strings"
+ "time"
+
+ "backupx/server/internal/apperror"
+ "backupx/server/internal/backup"
+ backupretention "backupx/server/internal/backup/retention"
+ "backupx/server/internal/model"
+ "backupx/server/internal/repository"
+ "backupx/server/internal/storage"
+ "backupx/server/internal/storage/codec"
+ "backupx/server/pkg/compress"
+ backupcrypto "backupx/server/pkg/crypto"
+)
+
+type BackupExecutionNotification struct {
+ Task *model.BackupTask
+ Record *model.BackupRecord
+ Error error
+}
+
+type BackupResultNotifier interface {
+ NotifyBackupResult(context.Context, BackupExecutionNotification) error
+}
+
+type noopBackupNotifier struct{}
+
+func (noopBackupNotifier) NotifyBackupResult(context.Context, BackupExecutionNotification) error {
+ return nil
+}
+
+type DownloadedArtifact struct {
+ FileName string
+ Reader io.ReadCloser
+}
+
+type BackupExecutionService struct {
+ tasks repository.BackupTaskRepository
+ records repository.BackupRecordRepository
+ targets repository.StorageTargetRepository
+ storageRegistry *storage.Registry
+ runnerRegistry *backup.Registry
+ logHub *backup.LogHub
+ retention *backupretention.Service
+ cipher *codec.ConfigCipher
+ notifier BackupResultNotifier
+ async func(func())
+ now func() time.Time
+ tempDir string
+ semaphore chan struct{}
+}
+
+func NewBackupExecutionService(
+ tasks repository.BackupTaskRepository,
+ records repository.BackupRecordRepository,
+ targets repository.StorageTargetRepository,
+ storageRegistry *storage.Registry,
+ runnerRegistry *backup.Registry,
+ logHub *backup.LogHub,
+ retention *backupretention.Service,
+ cipher *codec.ConfigCipher,
+ notifier BackupResultNotifier,
+ tempDir string,
+ maxConcurrent int,
+) *BackupExecutionService {
+ if notifier == nil {
+ notifier = noopBackupNotifier{}
+ }
+ if tempDir == "" {
+ tempDir = "/tmp/backupx"
+ }
+ if maxConcurrent <= 0 {
+ maxConcurrent = 2
+ }
+ return &BackupExecutionService{
+ tasks: tasks,
+ records: records,
+ targets: targets,
+ storageRegistry: storageRegistry,
+ runnerRegistry: runnerRegistry,
+ logHub: logHub,
+ retention: retention,
+ cipher: cipher,
+ notifier: notifier,
+ async: func(job func()) {
+ go job()
+ },
+ now: func() time.Time { return time.Now().UTC() },
+ tempDir: tempDir,
+ semaphore: make(chan struct{}, maxConcurrent),
+ }
+}
+
+func (s *BackupExecutionService) RunTaskByID(ctx context.Context, id uint) (*BackupRecordDetail, error) {
+ return s.startTask(ctx, id, true)
+}
+
+func (s *BackupExecutionService) RunTaskByIDSync(ctx context.Context, id uint) (*BackupRecordDetail, error) {
+ return s.startTask(ctx, id, false)
+}
+
+func (s *BackupExecutionService) DownloadRecord(ctx context.Context, recordID uint) (*DownloadedArtifact, error) {
+ record, provider, err := s.loadRecordProvider(ctx, recordID)
+ if err != nil {
+ return nil, err
+ }
+ reader, err := provider.Download(ctx, record.StoragePath)
+ if err != nil {
+ return nil, apperror.Internal("BACKUP_RECORD_DOWNLOAD_FAILED", "无法下载备份文件", err)
+ }
+ fileName := record.FileName
+ if strings.TrimSpace(fileName) == "" {
+ fileName = filepath.Base(record.StoragePath)
+ }
+ return &DownloadedArtifact{FileName: fileName, Reader: reader}, nil
+}
+
+func (s *BackupExecutionService) RestoreRecord(ctx context.Context, recordID uint) error {
+ record, provider, err := s.loadRecordProvider(ctx, recordID)
+ if err != nil {
+ return err
+ }
+ task, err := s.tasks.FindByID(ctx, record.TaskID)
+ if err != nil {
+ return apperror.Internal("BACKUP_TASK_GET_FAILED", "无法获取关联备份任务", err)
+ }
+ if task == nil {
+ return apperror.New(404, "BACKUP_TASK_NOT_FOUND", "关联的备份任务不存在,无法执行恢复", fmt.Errorf("backup task %d not found", record.TaskID))
+ }
+ tempDir, err := os.MkdirTemp("", "backupx-restore-*")
+ if err != nil {
+ return apperror.Internal("BACKUP_RECORD_RESTORE_FAILED", "无法创建恢复目录", err)
+ }
+ defer os.RemoveAll(tempDir)
+ artifactPath := filepath.Join(tempDir, filepath.Base(record.FileName))
+ if strings.TrimSpace(filepath.Base(record.FileName)) == "" {
+ artifactPath = filepath.Join(tempDir, filepath.Base(record.StoragePath))
+ }
+ reader, err := provider.Download(ctx, record.StoragePath)
+ if err != nil {
+ return apperror.Internal("BACKUP_RECORD_RESTORE_FAILED", "无法下载备份文件", err)
+ }
+ if err := writeReaderToFile(artifactPath, reader); err != nil {
+ return apperror.Internal("BACKUP_RECORD_RESTORE_FAILED", "无法写入恢复文件", err)
+ }
+ preparedPath, err := s.prepareArtifactForRestore(artifactPath)
+ if err != nil {
+ return apperror.Internal("BACKUP_RECORD_RESTORE_FAILED", "无法准备恢复文件", err)
+ }
+ spec, err := s.buildTaskSpec(task, record.StartedAt)
+ if err != nil {
+ return err
+ }
+ runner, err := s.runnerRegistry.Runner(spec.Type)
+ if err != nil {
+ return apperror.BadRequest("BACKUP_TASK_INVALID", "不支持的备份任务类型", err)
+ }
+ if err := runner.Restore(ctx, spec, preparedPath, backup.NopLogWriter{}); err != nil {
+ return apperror.Internal("BACKUP_RECORD_RESTORE_FAILED", "恢复备份失败", err)
+ }
+ return nil
+}
+
+func (s *BackupExecutionService) DeleteRecord(ctx context.Context, recordID uint) error {
+ record, provider, err := s.loadRecordProvider(ctx, recordID)
+ if err != nil {
+ return err
+ }
+ if strings.TrimSpace(record.StoragePath) != "" {
+ if err := provider.Delete(ctx, record.StoragePath); err != nil {
+ return apperror.Internal("BACKUP_RECORD_DELETE_FAILED", "无法删除备份文件", err)
+ }
+ }
+ if err := s.records.Delete(ctx, recordID); err != nil {
+ return apperror.Internal("BACKUP_RECORD_DELETE_FAILED", "无法删除备份记录", err)
+ }
+ return nil
+}
+
+func (s *BackupExecutionService) startTask(ctx context.Context, id uint, async bool) (*BackupRecordDetail, error) {
+ task, err := s.tasks.FindByID(ctx, id)
+ if err != nil {
+ return nil, apperror.Internal("BACKUP_TASK_GET_FAILED", "无法获取备份任务详情", err)
+ }
+ if task == nil {
+ return nil, apperror.New(404, "BACKUP_TASK_NOT_FOUND", "备份任务不存在", fmt.Errorf("backup task %d not found", id))
+ }
+ startedAt := s.now()
+ record := &model.BackupRecord{TaskID: task.ID, StorageTargetID: task.StorageTargetID, Status: "running", StartedAt: startedAt}
+ if err := s.records.Create(ctx, record); err != nil {
+ return nil, apperror.Internal("BACKUP_RECORD_CREATE_FAILED", "无法创建备份记录", err)
+ }
+ task.LastRunAt = &startedAt
+ task.LastStatus = "running"
+ if err := s.tasks.Update(ctx, task); err != nil {
+ return nil, apperror.Internal("BACKUP_TASK_UPDATE_FAILED", "无法更新任务状态", err)
+ }
+ run := func() {
+ s.executeTask(context.Background(), task, record.ID, startedAt)
+ }
+ if async {
+ s.async(run)
+ } else {
+ run()
+ }
+ return s.getRecordDetail(ctx, record.ID)
+}
+
+func (s *BackupExecutionService) executeTask(ctx context.Context, task *model.BackupTask, recordID uint, startedAt time.Time) {
+ s.semaphore <- struct{}{}
+ defer func() { <-s.semaphore }()
+
+ logger := backup.NewExecutionLogger(recordID, s.logHub)
+ status := "failed"
+ errMessage := ""
+ var fileName string
+ var fileSize int64
+ var storagePath string
+ completeRecord := func() {
+ if finalizeErr := s.finalizeRecord(ctx, task, recordID, startedAt, status, errMessage, logger.String(), fileName, fileSize, storagePath); finalizeErr != nil {
+ logger.Errorf("写回备份记录失败:%v", finalizeErr)
+ }
+ if err := s.notifier.NotifyBackupResult(ctx, BackupExecutionNotification{Task: task, Record: &model.BackupRecord{ID: recordID, TaskID: task.ID, Status: status, FileName: fileName, FileSize: fileSize, StoragePath: storagePath, ErrorMessage: errMessage, StartedAt: startedAt}, Error: buildOptionalError(errMessage)}); err != nil {
+ logger.Warnf("发送备份通知失败:%v", err)
+ }
+ s.logHub.Complete(recordID, status)
+ }
+ defer completeRecord()
+
+ spec, err := s.buildTaskSpec(task, startedAt)
+ if err != nil {
+ errMessage = err.Error()
+ logger.Errorf("构建任务运行时配置失败:%v", err)
+ return
+ }
+ provider, err := s.resolveProvider(ctx, task.StorageTargetID)
+ if err != nil {
+ errMessage = err.Error()
+ logger.Errorf("创建存储客户端失败:%v", err)
+ return
+ }
+ runner, err := s.runnerRegistry.Runner(spec.Type)
+ if err != nil {
+ errMessage = err.Error()
+ logger.Errorf("获取备份执行器失败:%v", err)
+ return
+ }
+ result, err := runner.Run(ctx, spec, logger)
+ if err != nil {
+ errMessage = err.Error()
+ logger.Errorf("执行备份失败:%v", err)
+ return
+ }
+ defer os.RemoveAll(result.TempDir)
+ finalPath := result.ArtifactPath
+ if strings.EqualFold(task.Compression, "gzip") && !strings.HasSuffix(strings.ToLower(finalPath), ".gz") {
+ logger.Infof("开始压缩备份文件")
+ compressedPath, compressErr := compress.GzipFile(finalPath)
+ if compressErr != nil {
+ errMessage = compressErr.Error()
+ logger.Errorf("压缩备份文件失败:%v", compressErr)
+ return
+ }
+ finalPath = compressedPath
+ }
+ if task.Encrypt {
+ logger.Infof("开始加密备份文件")
+ encryptedPath, encryptErr := backupcrypto.EncryptFile(s.cipher.Key(), finalPath)
+ if encryptErr != nil {
+ errMessage = encryptErr.Error()
+ logger.Errorf("加密备份文件失败:%v", encryptErr)
+ return
+ }
+ finalPath = encryptedPath
+ }
+ info, err := os.Stat(finalPath)
+ if err != nil {
+ errMessage = err.Error()
+ logger.Errorf("获取备份文件信息失败:%v", err)
+ return
+ }
+ fileSize = info.Size()
+ fileName = filepath.Base(finalPath)
+ storagePath = backup.BuildStorageKey(task.Type, startedAt, fileName)
+ artifact, err := os.Open(finalPath)
+ if err != nil {
+ errMessage = err.Error()
+ logger.Errorf("打开备份文件失败:%v", err)
+ return
+ }
+ defer artifact.Close()
+ logger.Infof("开始上传备份到存储目标")
+ if err := provider.Upload(ctx, storagePath, artifact, fileSize, map[string]string{"taskId": fmt.Sprintf("%d", task.ID), "recordId": fmt.Sprintf("%d", recordID)}); err != nil {
+ errMessage = err.Error()
+ logger.Errorf("上传备份文件失败:%v", err)
+ return
+ }
+ if s.retention != nil {
+ cleanupResult, cleanupErr := s.retention.Cleanup(ctx, task, provider)
+ if cleanupErr != nil {
+ logger.Warnf("执行保留策略失败:%v", cleanupErr)
+ } else {
+ for _, warning := range cleanupResult.Warnings {
+ logger.Warnf("保留策略警告:%s", warning)
+ }
+ }
+ }
+ status = "success"
+ logger.Infof("备份执行完成")
+}
+
+func (s *BackupExecutionService) finalizeRecord(ctx context.Context, task *model.BackupTask, recordID uint, startedAt time.Time, status string, errorMessage string, logContent string, fileName string, fileSize int64, storagePath string) error {
+ record, err := s.records.FindByID(ctx, recordID)
+ if err != nil {
+ return err
+ }
+ if record == nil {
+ return fmt.Errorf("backup record %d not found", recordID)
+ }
+ completedAt := s.now()
+ record.Status = status
+ record.FileName = fileName
+ record.FileSize = fileSize
+ record.StoragePath = storagePath
+ record.DurationSeconds = int(completedAt.Sub(startedAt).Seconds())
+ record.ErrorMessage = strings.TrimSpace(errorMessage)
+ record.LogContent = strings.TrimSpace(logContent)
+ record.CompletedAt = &completedAt
+ if err := s.records.Update(ctx, record); err != nil {
+ return err
+ }
+ task.LastRunAt = &startedAt
+ task.LastStatus = status
+ return s.tasks.Update(ctx, task)
+}
+
+func (s *BackupExecutionService) resolveProvider(ctx context.Context, targetID uint) (storage.StorageProvider, error) {
+ target, err := s.targets.FindByID(ctx, targetID)
+ if err != nil {
+ return nil, apperror.Internal("BACKUP_STORAGE_TARGET_GET_FAILED", "无法获取存储目标详情", err)
+ }
+ if target == nil {
+ return nil, apperror.BadRequest("BACKUP_STORAGE_TARGET_INVALID", "关联的存储目标不存在", nil)
+ }
+ configMap := map[string]any{}
+ if err := s.cipher.DecryptJSON(target.ConfigCiphertext, &configMap); err != nil {
+ return nil, apperror.Internal("BACKUP_STORAGE_TARGET_DECRYPT_FAILED", "无法解密存储目标配置", err)
+ }
+ provider, err := s.storageRegistry.Create(ctx, target.Type, configMap)
+ if err != nil {
+ return nil, err
+ }
+ return provider, nil
+}
+
+func (s *BackupExecutionService) buildTaskSpec(task *model.BackupTask, startedAt time.Time) (backup.TaskSpec, error) {
+ excludePatterns := []string{}
+ if strings.TrimSpace(task.ExcludePatterns) != "" {
+ if err := json.Unmarshal([]byte(task.ExcludePatterns), &excludePatterns); err != nil {
+ return backup.TaskSpec{}, apperror.Internal("BACKUP_TASK_DECODE_FAILED", "无法解析排除规则", err)
+ }
+ }
+ password := ""
+ if strings.TrimSpace(task.DBPasswordCiphertext) != "" {
+ plain, err := s.cipher.Decrypt(task.DBPasswordCiphertext)
+ if err != nil {
+ return backup.TaskSpec{}, apperror.Internal("BACKUP_TASK_DECRYPT_FAILED", "无法解密数据库密码", err)
+ }
+ password = string(plain)
+ }
+ return backup.TaskSpec{
+ ID: task.ID,
+ Name: task.Name,
+ Type: task.Type,
+ SourcePath: task.SourcePath,
+ ExcludePatterns: excludePatterns,
+ StorageTargetID: task.StorageTargetID,
+ StorageTargetType: "",
+ Compression: task.Compression,
+ Encrypt: task.Encrypt,
+ RetentionDays: task.RetentionDays,
+ MaxBackups: task.MaxBackups,
+ StartedAt: startedAt,
+ TempDir: s.tempDir,
+ Database: backup.DatabaseSpec{
+ Host: task.DBHost,
+ Port: task.DBPort,
+ User: task.DBUser,
+ Password: password,
+ Names: []string{task.DBName},
+ Path: task.DBPath,
+ },
+ }, nil
+}
+
+func (s *BackupExecutionService) loadRecordProvider(ctx context.Context, recordID uint) (*model.BackupRecord, storage.StorageProvider, error) {
+ record, err := s.records.FindByID(ctx, recordID)
+ if err != nil {
+ return nil, nil, apperror.Internal("BACKUP_RECORD_GET_FAILED", "无法获取备份记录详情", err)
+ }
+ if record == nil {
+ return nil, nil, apperror.New(404, "BACKUP_RECORD_NOT_FOUND", "备份记录不存在", fmt.Errorf("backup record %d not found", recordID))
+ }
+ provider, err := s.resolveProvider(ctx, record.StorageTargetID)
+ if err != nil {
+ return nil, nil, err
+ }
+ return record, provider, nil
+}
+
+func (s *BackupExecutionService) prepareArtifactForRestore(artifactPath string) (string, error) {
+ currentPath := artifactPath
+ if strings.HasSuffix(strings.ToLower(currentPath), ".enc") {
+ decryptedPath, err := backupcrypto.DecryptFile(s.cipher.Key(), currentPath)
+ if err != nil {
+ return "", err
+ }
+ currentPath = decryptedPath
+ }
+ if strings.HasSuffix(strings.ToLower(currentPath), ".gz") {
+ decompressedPath, err := compress.GunzipFile(currentPath)
+ if err != nil {
+ return "", err
+ }
+ currentPath = decompressedPath
+ }
+ return currentPath, nil
+}
+
+func (s *BackupExecutionService) getRecordDetail(ctx context.Context, recordID uint) (*BackupRecordDetail, error) {
+ record, err := s.records.FindByID(ctx, recordID)
+ if err != nil {
+ return nil, apperror.Internal("BACKUP_RECORD_GET_FAILED", "无法获取备份记录详情", err)
+ }
+ if record == nil {
+ return nil, apperror.New(404, "BACKUP_RECORD_NOT_FOUND", "备份记录不存在", fmt.Errorf("backup record %d not found", recordID))
+ }
+ return toBackupRecordDetail(record, s.logHub), nil
+}
+
+func writeReaderToFile(targetPath string, reader io.ReadCloser) error {
+ defer reader.Close()
+ if err := os.MkdirAll(filepath.Dir(targetPath), 0o755); err != nil {
+ return err
+ }
+ file, err := os.Create(targetPath)
+ if err != nil {
+ return err
+ }
+ defer file.Close()
+ _, err = io.Copy(file, reader)
+ return err
+}
+
+func buildOptionalError(message string) error {
+ if strings.TrimSpace(message) == "" {
+ return nil
+ }
+ return fmt.Errorf("%s", message)
+}
+
+func buildStorageProviderFromRepos(ctx context.Context, storageTargetID uint, storageTargets repository.StorageTargetRepository, storageRegistry *storage.Registry, cipher *codec.ConfigCipher) (storage.StorageProvider, *model.StorageTarget, error) {
+ target, err := storageTargets.FindByID(ctx, storageTargetID)
+ if err != nil {
+ return nil, nil, apperror.Internal("BACKUP_STORAGE_TARGET_LOOKUP_FAILED", "无法读取存储目标", err)
+ }
+ if target == nil {
+ return nil, nil, apperror.BadRequest("BACKUP_STORAGE_TARGET_INVALID", "存储目标不存在", nil)
+ }
+ var configMap map[string]any
+ if err := cipher.DecryptJSON(target.ConfigCiphertext, &configMap); err != nil {
+ return nil, nil, apperror.Internal("BACKUP_STORAGE_TARGET_DECRYPT_FAILED", "无法解密存储目标配置", err)
+ }
+ provider, err := storageRegistry.Create(ctx, storage.ParseProviderType(target.Type), configMap)
+ if err != nil {
+ return nil, nil, err
+ }
+ return provider, target, nil
+}
diff --git a/server/internal/service/backup_execution_service_test.go b/server/internal/service/backup_execution_service_test.go
new file mode 100644
index 0000000..db89e15
--- /dev/null
+++ b/server/internal/service/backup_execution_service_test.go
@@ -0,0 +1,103 @@
+package service
+
+import (
+ "context"
+ "os"
+ "path/filepath"
+ "testing"
+
+ "backupx/server/internal/backup"
+ backupretention "backupx/server/internal/backup/retention"
+ "backupx/server/internal/config"
+ "backupx/server/internal/database"
+ "backupx/server/internal/logger"
+ "backupx/server/internal/model"
+ "backupx/server/internal/repository"
+ "backupx/server/internal/storage"
+ "backupx/server/internal/storage/codec"
+ "backupx/server/internal/storage/localdisk"
+)
+
+func newExecutionTestServices(t *testing.T) (*BackupExecutionService, *BackupRecordService, repository.BackupTaskRepository, repository.StorageTargetRepository, repository.BackupRecordRepository, string, string) {
+ t.Helper()
+ baseDir := t.TempDir()
+ storageDir := filepath.Join(baseDir, "storage")
+ sourceDir := filepath.Join(baseDir, "source")
+ if err := os.MkdirAll(sourceDir, 0o755); err != nil {
+ t.Fatalf("MkdirAll returned error: %v", err)
+ }
+ if err := os.WriteFile(filepath.Join(sourceDir, "index.html"), []byte("hello"), 0o644); err != nil {
+ t.Fatalf("WriteFile returned error: %v", err)
+ }
+ log, err := logger.New(config.LogConfig{Level: "error"})
+ if err != nil {
+ t.Fatalf("logger.New returned error: %v", err)
+ }
+ db, err := database.Open(config.DatabaseConfig{Path: filepath.Join(baseDir, "backupx.db")}, log)
+ if err != nil {
+ t.Fatalf("database.Open returned error: %v", err)
+ }
+ cipher := codec.NewConfigCipher("execution-secret")
+ tasks := repository.NewBackupTaskRepository(db)
+ targets := repository.NewStorageTargetRepository(db)
+ records := repository.NewBackupRecordRepository(db)
+ configCiphertext, err := cipher.EncryptJSON(map[string]any{"basePath": storageDir})
+ if err != nil {
+ t.Fatalf("EncryptJSON returned error: %v", err)
+ }
+ if err := targets.Create(context.Background(), &model.StorageTarget{Name: "local", Type: string(storage.ProviderTypeLocalDisk), Enabled: true, ConfigCiphertext: configCiphertext, ConfigVersion: 1, LastTestStatus: "unknown"}); err != nil {
+ t.Fatalf("Create storage target returned error: %v", err)
+ }
+ if err := tasks.Create(context.Background(), &model.BackupTask{Name: "site-files", Type: "file", Enabled: true, SourcePath: sourceDir, StorageTargetID: 1, RetentionDays: 30, Compression: "gzip", MaxBackups: 10, LastStatus: "idle"}); err != nil {
+ t.Fatalf("Create backup task returned error: %v", err)
+ }
+ logHub := backup.NewLogHub()
+ runnerRegistry := backup.NewRegistry(backup.NewFileRunner(), backup.NewMySQLRunner(nil), backup.NewSQLiteRunner(), backup.NewPostgreSQLRunner(nil))
+ storageRegistry := storage.NewRegistry(localdisk.NewFactory())
+ retentionService := backupretention.NewService(records)
+ executionService := NewBackupExecutionService(tasks, records, targets, storageRegistry, runnerRegistry, logHub, retentionService, cipher, nil, "", 2)
+ recordService := NewBackupRecordService(records, executionService, logHub)
+ return executionService, recordService, tasks, targets, records, sourceDir, storageDir
+}
+
+func TestBackupExecutionServiceRunTaskByIDSync(t *testing.T) {
+ executionService, _, _, _, records, _, storageDir := newExecutionTestServices(t)
+ detail, err := executionService.RunTaskByIDSync(context.Background(), 1)
+ if err != nil {
+ t.Fatalf("RunTaskByIDSync returned error: %v", err)
+ }
+ if detail.Status != "success" || detail.StoragePath == "" {
+ t.Fatalf("unexpected record detail: %#v", detail)
+ }
+ stored, err := records.FindByID(context.Background(), detail.ID)
+ if err != nil {
+ t.Fatalf("FindByID returned error: %v", err)
+ }
+ if stored == nil || stored.Status != "success" {
+ t.Fatalf("unexpected stored record: %#v", stored)
+ }
+ if _, err := os.Stat(filepath.Join(storageDir, filepath.FromSlash(detail.StoragePath))); err != nil {
+ t.Fatalf("expected artifact in local storage: %v", err)
+ }
+}
+
+func TestBackupRecordServiceRestore(t *testing.T) {
+ executionService, recordService, _, _, _, sourceDir, _ := newExecutionTestServices(t)
+ detail, err := executionService.RunTaskByIDSync(context.Background(), 1)
+ if err != nil {
+ t.Fatalf("RunTaskByIDSync returned error: %v", err)
+ }
+ if err := os.RemoveAll(sourceDir); err != nil {
+ t.Fatalf("RemoveAll returned error: %v", err)
+ }
+ if err := recordService.Restore(context.Background(), detail.ID); err != nil {
+ t.Fatalf("Restore returned error: %v", err)
+ }
+ content, err := os.ReadFile(filepath.Join(sourceDir, "index.html"))
+ if err != nil {
+ t.Fatalf("ReadFile returned error: %v", err)
+ }
+ if string(content) != "hello" {
+ t.Fatalf("unexpected restored content: %s", string(content))
+ }
+}
diff --git a/server/internal/service/backup_record_service.go b/server/internal/service/backup_record_service.go
new file mode 100644
index 0000000..06a44d5
--- /dev/null
+++ b/server/internal/service/backup_record_service.go
@@ -0,0 +1,134 @@
+package service
+
+import (
+ "context"
+ "strings"
+ "time"
+
+ "backupx/server/internal/apperror"
+ "backupx/server/internal/backup"
+ "backupx/server/internal/model"
+ "backupx/server/internal/repository"
+)
+
+type BackupRecordListInput struct {
+ TaskID *uint
+ Status string
+ DateFrom *time.Time
+ DateTo *time.Time
+ Limit int
+ Offset int
+}
+
+type BackupRecordSummary struct {
+ ID uint `json:"id"`
+ TaskID uint `json:"taskId"`
+ TaskName string `json:"taskName"`
+ StorageTargetID uint `json:"storageTargetId"`
+ StorageTargetName string `json:"storageTargetName"`
+ Status string `json:"status"`
+ FileName string `json:"fileName"`
+ FileSize int64 `json:"fileSize"`
+ StoragePath string `json:"storagePath"`
+ DurationSeconds int `json:"durationSeconds"`
+ ErrorMessage string `json:"errorMessage"`
+ StartedAt time.Time `json:"startedAt"`
+ CompletedAt *time.Time `json:"completedAt,omitempty"`
+}
+
+type BackupRecordDetail struct {
+ BackupRecordSummary
+ LogContent string `json:"logContent"`
+ LogEvents []backup.LogEvent `json:"logEvents,omitempty"`
+}
+
+type BackupRecordService struct {
+ records repository.BackupRecordRepository
+ execution *BackupExecutionService
+ logHub *backup.LogHub
+}
+
+func NewBackupRecordService(records repository.BackupRecordRepository, execution *BackupExecutionService, logHub *backup.LogHub) *BackupRecordService {
+ return &BackupRecordService{records: records, execution: execution, logHub: logHub}
+}
+
+func (s *BackupRecordService) List(ctx context.Context, input BackupRecordListInput) ([]BackupRecordSummary, error) {
+ items, err := s.records.List(ctx, repository.BackupRecordListOptions{TaskID: input.TaskID, Status: strings.TrimSpace(input.Status), DateFrom: input.DateFrom, DateTo: input.DateTo, Limit: input.Limit, Offset: input.Offset})
+ if err != nil {
+ return nil, apperror.Internal("BACKUP_RECORD_LIST_FAILED", "无法获取备份记录列表", err)
+ }
+ result := make([]BackupRecordSummary, 0, len(items))
+ for _, item := range items {
+ result = append(result, toBackupRecordSummary(&item))
+ }
+ return result, nil
+}
+
+func (s *BackupRecordService) Get(ctx context.Context, id uint) (*BackupRecordDetail, error) {
+ item, err := s.records.FindByID(ctx, id)
+ if err != nil {
+ return nil, apperror.Internal("BACKUP_RECORD_GET_FAILED", "无法获取备份记录详情", err)
+ }
+ if item == nil {
+ return nil, apperror.New(404, "BACKUP_RECORD_NOT_FOUND", "备份记录不存在", err)
+ }
+ return toBackupRecordDetail(item, s.logHub), nil
+}
+
+func (s *BackupRecordService) SubscribeLogs(ctx context.Context, id uint, buffer int) (<-chan backup.LogEvent, func(), error) {
+ item, err := s.records.FindByID(ctx, id)
+ if err != nil {
+ return nil, nil, apperror.Internal("BACKUP_RECORD_GET_FAILED", "无法获取备份记录详情", err)
+ }
+ if item == nil {
+ return nil, nil, apperror.New(404, "BACKUP_RECORD_NOT_FOUND", "备份记录不存在", err)
+ }
+ channel, cancel := s.logHub.Subscribe(id, buffer)
+ return channel, cancel, nil
+}
+
+func (s *BackupRecordService) Download(ctx context.Context, id uint) (*DownloadedArtifact, error) {
+ return s.execution.DownloadRecord(ctx, id)
+}
+
+func (s *BackupRecordService) Restore(ctx context.Context, id uint) error {
+ return s.execution.RestoreRecord(ctx, id)
+}
+
+func (s *BackupRecordService) Delete(ctx context.Context, id uint) error {
+ return s.execution.DeleteRecord(ctx, id)
+}
+
+func toBackupRecordSummary(item *model.BackupRecord) BackupRecordSummary {
+ return BackupRecordSummary{
+ ID: item.ID,
+ TaskID: item.TaskID,
+ TaskName: item.Task.Name,
+ StorageTargetID: item.StorageTargetID,
+ StorageTargetName: item.StorageTarget.Name,
+ Status: item.Status,
+ FileName: item.FileName,
+ FileSize: item.FileSize,
+ StoragePath: item.StoragePath,
+ DurationSeconds: item.DurationSeconds,
+ ErrorMessage: item.ErrorMessage,
+ StartedAt: item.StartedAt,
+ CompletedAt: item.CompletedAt,
+ }
+}
+
+func toBackupRecordDetail(item *model.BackupRecord, logHub *backup.LogHub) *BackupRecordDetail {
+ detail := &BackupRecordDetail{BackupRecordSummary: toBackupRecordSummary(item), LogContent: item.LogContent}
+ if item.Status == "running" && logHub != nil {
+ events := logHub.Snapshot(item.ID)
+ detail.LogEvents = events
+ if len(events) > 0 {
+ lines := make([]string, 0, len(events))
+ for _, event := range events {
+ lines = append(lines, event.Message)
+ }
+ detail.LogContent = strings.Join(lines, "\n")
+ }
+ }
+ return detail
+}
diff --git a/server/internal/service/backup_task_service.go b/server/internal/service/backup_task_service.go
new file mode 100644
index 0000000..b74679c
--- /dev/null
+++ b/server/internal/service/backup_task_service.go
@@ -0,0 +1,417 @@
+package service
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "net/http"
+ "strings"
+ "time"
+
+ "backupx/server/internal/apperror"
+ "backupx/server/internal/model"
+ "backupx/server/internal/repository"
+ "backupx/server/internal/storage/codec"
+)
+
+const backupTaskMaskedValue = "********"
+
+type BackupTaskUpsertInput struct {
+ Name string `json:"name" binding:"required,min=1,max=100"`
+ Type string `json:"type" binding:"required,oneof=file mysql sqlite postgresql pgsql"`
+ Enabled bool `json:"enabled"`
+ CronExpr string `json:"cronExpr" binding:"max=64"`
+ SourcePath string `json:"sourcePath" binding:"max=500"`
+ ExcludePatterns []string `json:"excludePatterns"`
+ DBHost string `json:"dbHost" binding:"max=255"`
+ DBPort int `json:"dbPort"`
+ DBUser string `json:"dbUser" binding:"max=100"`
+ DBPassword string `json:"dbPassword" binding:"max=255"`
+ DBName string `json:"dbName" binding:"max=255"`
+ DBPath string `json:"dbPath" binding:"max=500"`
+ StorageTargetID uint `json:"storageTargetId" binding:"required"`
+ RetentionDays int `json:"retentionDays"`
+ Compression string `json:"compression" binding:"omitempty,oneof=gzip none"`
+ Encrypt bool `json:"encrypt"`
+ MaxBackups int `json:"maxBackups"`
+}
+
+type BackupTaskToggleInput struct {
+ Enabled *bool `json:"enabled"`
+}
+
+type BackupTaskSummary struct {
+ ID uint `json:"id"`
+ Name string `json:"name"`
+ Type string `json:"type"`
+ Enabled bool `json:"enabled"`
+ CronExpr string `json:"cronExpr"`
+ StorageTargetID uint `json:"storageTargetId"`
+ StorageTargetName string `json:"storageTargetName"`
+ RetentionDays int `json:"retentionDays"`
+ Compression string `json:"compression"`
+ Encrypt bool `json:"encrypt"`
+ MaxBackups int `json:"maxBackups"`
+ LastRunAt *time.Time `json:"lastRunAt,omitempty"`
+ LastStatus string `json:"lastStatus"`
+ UpdatedAt time.Time `json:"updatedAt"`
+}
+
+type BackupTaskDetail struct {
+ BackupTaskSummary
+ SourcePath string `json:"sourcePath"`
+ ExcludePatterns []string `json:"excludePatterns"`
+ DBHost string `json:"dbHost"`
+ DBPort int `json:"dbPort"`
+ DBUser string `json:"dbUser"`
+ DBName string `json:"dbName"`
+ DBPath string `json:"dbPath"`
+ MaskedFields []string `json:"maskedFields,omitempty"`
+ CreatedAt time.Time `json:"createdAt"`
+}
+
+type BackupTaskScheduler interface {
+ SyncTask(ctx context.Context, task *model.BackupTask) error
+ RemoveTask(ctx context.Context, taskID uint) error
+}
+
+type BackupTaskService struct {
+ tasks repository.BackupTaskRepository
+ targets repository.StorageTargetRepository
+ cipher *codec.ConfigCipher
+ scheduler BackupTaskScheduler
+}
+
+func NewBackupTaskService(
+ tasks repository.BackupTaskRepository,
+ targets repository.StorageTargetRepository,
+ cipher *codec.ConfigCipher,
+) *BackupTaskService {
+ return &BackupTaskService{tasks: tasks, targets: targets, cipher: cipher}
+}
+
+func (s *BackupTaskService) SetScheduler(scheduler BackupTaskScheduler) {
+ s.scheduler = scheduler
+}
+
+func (s *BackupTaskService) List(ctx context.Context) ([]BackupTaskSummary, error) {
+ items, err := s.tasks.List(ctx, repository.BackupTaskListOptions{})
+ if err != nil {
+ return nil, apperror.Internal("BACKUP_TASK_LIST_FAILED", "无法获取备份任务列表", err)
+ }
+ result := make([]BackupTaskSummary, 0, len(items))
+ for _, item := range items {
+ result = append(result, toBackupTaskSummary(&item))
+ }
+ return result, nil
+}
+
+func (s *BackupTaskService) Get(ctx context.Context, id uint) (*BackupTaskDetail, error) {
+ item, err := s.tasks.FindByID(ctx, id)
+ if err != nil {
+ return nil, apperror.Internal("BACKUP_TASK_GET_FAILED", "无法获取备份任务详情", err)
+ }
+ if item == nil {
+ return nil, apperror.New(http.StatusNotFound, "BACKUP_TASK_NOT_FOUND", "备份任务不存在", fmt.Errorf("backup task %d not found", id))
+ }
+ return s.toDetail(item)
+}
+
+func (s *BackupTaskService) Create(ctx context.Context, input BackupTaskUpsertInput) (*BackupTaskDetail, error) {
+ input.Type = normalizeBackupTaskType(input.Type)
+ if err := s.validateInput(ctx, nil, input); err != nil {
+ return nil, err
+ }
+ existing, err := s.tasks.FindByName(ctx, strings.TrimSpace(input.Name))
+ if err != nil {
+ return nil, apperror.Internal("BACKUP_TASK_LOOKUP_FAILED", "无法检查备份任务名称", err)
+ }
+ if existing != nil {
+ return nil, apperror.Conflict("BACKUP_TASK_NAME_EXISTS", "备份任务名称已存在", nil)
+ }
+ item, err := s.buildTask(nil, input)
+ if err != nil {
+ return nil, err
+ }
+ if err := s.tasks.Create(ctx, item); err != nil {
+ return nil, apperror.Internal("BACKUP_TASK_CREATE_FAILED", "无法创建备份任务", err)
+ }
+ if s.scheduler != nil {
+ if err := s.scheduler.SyncTask(ctx, item); err != nil {
+ return nil, apperror.Internal("BACKUP_TASK_SCHEDULE_FAILED", "无法同步备份任务调度", err)
+ }
+ }
+ return s.Get(ctx, item.ID)
+}
+
+func (s *BackupTaskService) Update(ctx context.Context, id uint, input BackupTaskUpsertInput) (*BackupTaskDetail, error) {
+ input.Type = normalizeBackupTaskType(input.Type)
+ existing, err := s.tasks.FindByID(ctx, id)
+ if err != nil {
+ return nil, apperror.Internal("BACKUP_TASK_GET_FAILED", "无法获取备份任务详情", err)
+ }
+ if existing == nil {
+ return nil, apperror.New(http.StatusNotFound, "BACKUP_TASK_NOT_FOUND", "备份任务不存在", fmt.Errorf("backup task %d not found", id))
+ }
+ if err := s.validateInput(ctx, existing, input); err != nil {
+ return nil, err
+ }
+ sameName, err := s.tasks.FindByName(ctx, strings.TrimSpace(input.Name))
+ if err != nil {
+ return nil, apperror.Internal("BACKUP_TASK_LOOKUP_FAILED", "无法检查备份任务名称", err)
+ }
+ if sameName != nil && sameName.ID != existing.ID {
+ return nil, apperror.Conflict("BACKUP_TASK_NAME_EXISTS", "备份任务名称已存在", nil)
+ }
+ item, err := s.buildTask(existing, input)
+ if err != nil {
+ return nil, err
+ }
+ item.ID = existing.ID
+ item.CreatedAt = existing.CreatedAt
+ if err := s.tasks.Update(ctx, item); err != nil {
+ return nil, apperror.Internal("BACKUP_TASK_UPDATE_FAILED", "无法更新备份任务", err)
+ }
+ if s.scheduler != nil {
+ if err := s.scheduler.SyncTask(ctx, item); err != nil {
+ return nil, apperror.Internal("BACKUP_TASK_SCHEDULE_FAILED", "无法同步备份任务调度", err)
+ }
+ }
+ return s.Get(ctx, item.ID)
+}
+
+func (s *BackupTaskService) Delete(ctx context.Context, id uint) error {
+ existing, err := s.tasks.FindByID(ctx, id)
+ if err != nil {
+ return apperror.Internal("BACKUP_TASK_GET_FAILED", "无法获取备份任务详情", err)
+ }
+ if existing == nil {
+ return apperror.New(http.StatusNotFound, "BACKUP_TASK_NOT_FOUND", "备份任务不存在", fmt.Errorf("backup task %d not found", id))
+ }
+ if s.scheduler != nil {
+ if err := s.scheduler.RemoveTask(ctx, id); err != nil {
+ return apperror.Internal("BACKUP_TASK_SCHEDULE_FAILED", "无法移除备份任务调度", err)
+ }
+ }
+ if err := s.tasks.Delete(ctx, id); err != nil {
+ return apperror.Internal("BACKUP_TASK_DELETE_FAILED", "无法删除备份任务", err)
+ }
+ if s.scheduler != nil {
+ _ = s.scheduler.RemoveTask(ctx, id)
+ }
+ return nil
+}
+
+func (s *BackupTaskService) Toggle(ctx context.Context, id uint, enabled bool) (*BackupTaskSummary, error) {
+ item, err := s.tasks.FindByID(ctx, id)
+ if err != nil {
+ return nil, apperror.Internal("BACKUP_TASK_GET_FAILED", "无法获取备份任务详情", err)
+ }
+ if item == nil {
+ return nil, apperror.New(http.StatusNotFound, "BACKUP_TASK_NOT_FOUND", "备份任务不存在", fmt.Errorf("backup task %d not found", id))
+ }
+ item.Enabled = enabled
+ if err := s.tasks.Update(ctx, item); err != nil {
+ return nil, apperror.Internal("BACKUP_TASK_UPDATE_FAILED", "无法更新备份任务状态", err)
+ }
+ if s.scheduler != nil {
+ if err := s.scheduler.SyncTask(ctx, item); err != nil {
+ return nil, apperror.Internal("BACKUP_TASK_SCHEDULE_FAILED", "无法同步备份任务调度", err)
+ }
+ }
+ returnPtr, err := s.tasks.FindByID(ctx, id)
+ if err != nil {
+ return nil, apperror.Internal("BACKUP_TASK_GET_FAILED", "无法获取备份任务详情", err)
+ }
+ returnValue := toBackupTaskSummary(returnPtr)
+ return &returnValue, nil
+}
+
+func (s *BackupTaskService) validateInput(ctx context.Context, existing *model.BackupTask, input BackupTaskUpsertInput) error {
+ if strings.TrimSpace(input.Name) == "" {
+ return apperror.BadRequest("BACKUP_TASK_INVALID", "任务名称不能为空", nil)
+ }
+ if input.StorageTargetID == 0 {
+ return apperror.BadRequest("BACKUP_TASK_INVALID", "请选择存储目标", nil)
+ }
+ target, err := s.targets.FindByID(ctx, input.StorageTargetID)
+ if err != nil {
+ return apperror.Internal("BACKUP_TASK_STORAGE_LOOKUP_FAILED", "无法检查存储目标", err)
+ }
+ if target == nil {
+ return apperror.BadRequest("BACKUP_STORAGE_TARGET_INVALID", "关联的存储目标不存在", nil)
+ }
+ if input.RetentionDays < 0 {
+ return apperror.BadRequest("BACKUP_TASK_INVALID", "保留天数不能小于 0", nil)
+ }
+ if input.MaxBackups < 0 {
+ return apperror.BadRequest("BACKUP_TASK_INVALID", "最大保留份数不能小于 0", nil)
+ }
+ if input.Compression == "" {
+ input.Compression = "gzip"
+ }
+ if strings.TrimSpace(input.CronExpr) != "" && len(strings.Fields(strings.TrimSpace(input.CronExpr))) < 5 {
+ return apperror.BadRequest("BACKUP_TASK_INVALID", "Cron 表达式格式不正确", nil)
+ }
+ passwordRequired := existing == nil || existing.DBPasswordCiphertext == ""
+ return validateTaskTypeSpecificFields(input, passwordRequired)
+}
+
+func validateTaskTypeSpecificFields(input BackupTaskUpsertInput, passwordRequired bool) error {
+ switch normalizeBackupTaskType(input.Type) {
+ case "file":
+ if strings.TrimSpace(input.SourcePath) == "" {
+ return apperror.BadRequest("BACKUP_TASK_INVALID", "文件备份必须填写源路径", nil)
+ }
+ case "mysql", "postgresql":
+ if strings.TrimSpace(input.DBHost) == "" {
+ return apperror.BadRequest("BACKUP_TASK_INVALID", "数据库主机不能为空", nil)
+ }
+ if input.DBPort <= 0 {
+ return apperror.BadRequest("BACKUP_TASK_INVALID", "数据库端口必须大于 0", nil)
+ }
+ if strings.TrimSpace(input.DBUser) == "" {
+ return apperror.BadRequest("BACKUP_TASK_INVALID", "数据库用户名不能为空", nil)
+ }
+ if passwordRequired && strings.TrimSpace(input.DBPassword) == "" {
+ return apperror.BadRequest("BACKUP_TASK_INVALID", "数据库密码不能为空", nil)
+ }
+ if strings.TrimSpace(input.DBName) == "" {
+ return apperror.BadRequest("BACKUP_TASK_INVALID", "数据库名称不能为空", nil)
+ }
+ case "sqlite":
+ if strings.TrimSpace(input.DBPath) == "" {
+ return apperror.BadRequest("BACKUP_TASK_INVALID", "SQLite 备份必须填写数据库文件路径", nil)
+ }
+ default:
+ return apperror.BadRequest("BACKUP_TASK_INVALID", "不支持的备份任务类型", nil)
+ }
+ return nil
+}
+
+func (s *BackupTaskService) buildTask(existing *model.BackupTask, input BackupTaskUpsertInput) (*model.BackupTask, error) {
+ excludePatterns, err := encodeExcludePatterns(input.ExcludePatterns)
+ if err != nil {
+ return nil, apperror.BadRequest("BACKUP_TASK_INVALID", "排除规则格式不合法", err)
+ }
+ passwordCiphertext := ""
+ if existing != nil {
+ passwordCiphertext = existing.DBPasswordCiphertext
+ }
+ if text := strings.TrimSpace(input.DBPassword); text != "" && text != backupTaskMaskedValue {
+ ciphertext, encryptErr := s.cipher.Encrypt([]byte(text))
+ if encryptErr != nil {
+ return nil, apperror.Internal("BACKUP_TASK_ENCRYPT_FAILED", "无法保存数据库密码", encryptErr)
+ }
+ passwordCiphertext = ciphertext
+ }
+ compression := strings.TrimSpace(input.Compression)
+ if compression == "" {
+ compression = "gzip"
+ }
+ maxBackups := input.MaxBackups
+ if maxBackups == 0 {
+ maxBackups = 10
+ }
+ item := &model.BackupTask{
+ Name: strings.TrimSpace(input.Name),
+ Type: normalizeBackupTaskType(input.Type),
+ Enabled: input.Enabled,
+ CronExpr: strings.TrimSpace(input.CronExpr),
+ SourcePath: strings.TrimSpace(input.SourcePath),
+ ExcludePatterns: excludePatterns,
+ DBHost: strings.TrimSpace(input.DBHost),
+ DBPort: input.DBPort,
+ DBUser: strings.TrimSpace(input.DBUser),
+ DBPasswordCiphertext: passwordCiphertext,
+ DBName: strings.TrimSpace(input.DBName),
+ DBPath: strings.TrimSpace(input.DBPath),
+ StorageTargetID: input.StorageTargetID,
+ RetentionDays: input.RetentionDays,
+ Compression: compression,
+ Encrypt: input.Encrypt,
+ MaxBackups: maxBackups,
+ LastStatus: "idle",
+ }
+ if existing != nil {
+ item.LastRunAt = existing.LastRunAt
+ item.LastStatus = existing.LastStatus
+ item.CreatedAt = existing.CreatedAt
+ }
+ return item, nil
+}
+
+func (s *BackupTaskService) toDetail(item *model.BackupTask) (*BackupTaskDetail, error) {
+ excludePatterns, err := decodeExcludePatterns(item.ExcludePatterns)
+ if err != nil {
+ return nil, apperror.Internal("BACKUP_TASK_DECODE_FAILED", "无法解析备份任务配置", err)
+ }
+ detail := &BackupTaskDetail{
+ BackupTaskSummary: toBackupTaskSummary(item),
+ SourcePath: item.SourcePath,
+ ExcludePatterns: excludePatterns,
+ DBHost: item.DBHost,
+ DBPort: item.DBPort,
+ DBUser: item.DBUser,
+ DBName: item.DBName,
+ DBPath: item.DBPath,
+ CreatedAt: item.CreatedAt,
+ }
+ if item.DBPasswordCiphertext != "" {
+ detail.MaskedFields = []string{"dbPassword"}
+ }
+ return detail, nil
+}
+
+func toBackupTaskSummary(item *model.BackupTask) BackupTaskSummary {
+ storageTargetName := ""
+ if item != nil {
+ storageTargetName = item.StorageTarget.Name
+ }
+ return BackupTaskSummary{
+ ID: item.ID,
+ Name: item.Name,
+ Type: normalizeBackupTaskType(item.Type),
+ Enabled: item.Enabled,
+ CronExpr: item.CronExpr,
+ StorageTargetID: item.StorageTargetID,
+ StorageTargetName: storageTargetName,
+ RetentionDays: item.RetentionDays,
+ Compression: item.Compression,
+ Encrypt: item.Encrypt,
+ MaxBackups: item.MaxBackups,
+ LastRunAt: item.LastRunAt,
+ LastStatus: item.LastStatus,
+ UpdatedAt: item.UpdatedAt,
+ }
+}
+
+func encodeExcludePatterns(value []string) (string, error) {
+ if len(value) == 0 {
+ return "[]", nil
+ }
+ encoded, err := json.Marshal(value)
+ if err != nil {
+ return "", err
+ }
+ return string(encoded), nil
+}
+
+func decodeExcludePatterns(value string) ([]string, error) {
+ if strings.TrimSpace(value) == "" {
+ return []string{}, nil
+ }
+ var items []string
+ if err := json.Unmarshal([]byte(value), &items); err != nil {
+ return nil, err
+ }
+ return items, nil
+}
+
+func normalizeBackupTaskType(value string) string {
+ normalized := strings.TrimSpace(strings.ToLower(value))
+ if normalized == "pgsql" {
+ return "postgresql"
+ }
+ return normalized
+}
diff --git a/server/internal/service/backup_task_service_test.go b/server/internal/service/backup_task_service_test.go
new file mode 100644
index 0000000..5bdbdd8
--- /dev/null
+++ b/server/internal/service/backup_task_service_test.go
@@ -0,0 +1,119 @@
+package service
+
+import (
+ "context"
+ "path/filepath"
+ "testing"
+
+ "backupx/server/internal/config"
+ "backupx/server/internal/database"
+ "backupx/server/internal/logger"
+ "backupx/server/internal/model"
+ "backupx/server/internal/repository"
+ "backupx/server/internal/storage/codec"
+)
+
+func newBackupTaskServiceForTest(t *testing.T) (*BackupTaskService, repository.StorageTargetRepository, repository.BackupTaskRepository) {
+ t.Helper()
+ log, err := logger.New(config.LogConfig{Level: "error"})
+ if err != nil {
+ t.Fatalf("logger.New returned error: %v", err)
+ }
+ db, err := database.Open(config.DatabaseConfig{Path: filepath.Join(t.TempDir(), "backupx.db")}, log)
+ if err != nil {
+ t.Fatalf("database.Open returned error: %v", err)
+ }
+ targets := repository.NewStorageTargetRepository(db)
+ tasks := repository.NewBackupTaskRepository(db)
+ service := NewBackupTaskService(tasks, targets, codec.NewConfigCipher("task-service-secret"))
+ return service, targets, tasks
+}
+
+func TestBackupTaskServiceCreateAndGet(t *testing.T) {
+ ctx := context.Background()
+ service, targets, _ := newBackupTaskServiceForTest(t)
+ if err := targets.Create(ctx, &model.StorageTarget{Name: "local", Type: "local_disk", Enabled: true, ConfigCiphertext: "ciphertext", ConfigVersion: 1, LastTestStatus: "unknown"}); err != nil {
+ t.Fatalf("seed storage target error: %v", err)
+ }
+ created, err := service.Create(ctx, BackupTaskUpsertInput{
+ Name: "site-files",
+ Type: "file",
+ Enabled: true,
+ SourcePath: "/srv/site",
+ ExcludePatterns: []string{"*.log", "node_modules"},
+ StorageTargetID: 1,
+ RetentionDays: 30,
+ Compression: "gzip",
+ MaxBackups: 10,
+ })
+ if err != nil {
+ t.Fatalf("Create returned error: %v", err)
+ }
+ if created.Name != "site-files" || len(created.ExcludePatterns) != 2 {
+ t.Fatalf("unexpected created task: %#v", created)
+ }
+ loaded, err := service.Get(ctx, created.ID)
+ if err != nil {
+ t.Fatalf("Get returned error: %v", err)
+ }
+ if loaded.StorageTargetName != "local" {
+ t.Fatalf("expected storage target name local, got %s", loaded.StorageTargetName)
+ }
+}
+
+func TestBackupTaskServiceKeepsMaskedPasswordOnUpdate(t *testing.T) {
+ ctx := context.Background()
+ service, targets, tasks := newBackupTaskServiceForTest(t)
+ if err := targets.Create(ctx, &model.StorageTarget{Name: "local", Type: "local_disk", Enabled: true, ConfigCiphertext: "ciphertext", ConfigVersion: 1, LastTestStatus: "unknown"}); err != nil {
+ t.Fatalf("seed storage target error: %v", err)
+ }
+ created, err := service.Create(ctx, BackupTaskUpsertInput{
+ Name: "mysql-prod",
+ Type: "mysql",
+ Enabled: true,
+ DBHost: "127.0.0.1",
+ DBPort: 3306,
+ DBUser: "root",
+ DBPassword: "secret",
+ DBName: "app",
+ StorageTargetID: 1,
+ RetentionDays: 7,
+ Compression: "gzip",
+ MaxBackups: 5,
+ })
+ if err != nil {
+ t.Fatalf("Create returned error: %v", err)
+ }
+ stored, err := tasks.FindByID(ctx, created.ID)
+ if err != nil {
+ t.Fatalf("FindByID returned error: %v", err)
+ }
+ originalCiphertext := stored.DBPasswordCiphertext
+ updated, err := service.Update(ctx, created.ID, BackupTaskUpsertInput{
+ Name: created.Name,
+ Type: created.Type,
+ Enabled: true,
+ DBHost: "127.0.0.1",
+ DBPort: 3306,
+ DBUser: "root",
+ DBPassword: "",
+ DBName: "app_updated",
+ StorageTargetID: 1,
+ RetentionDays: 7,
+ Compression: "gzip",
+ MaxBackups: 5,
+ })
+ if err != nil {
+ t.Fatalf("Update returned error: %v", err)
+ }
+ if len(updated.MaskedFields) != 1 || updated.MaskedFields[0] != "dbPassword" {
+ t.Fatalf("expected masked dbPassword field, got %#v", updated.MaskedFields)
+ }
+ reloaded, err := tasks.FindByID(ctx, created.ID)
+ if err != nil {
+ t.Fatalf("FindByID returned error: %v", err)
+ }
+ if reloaded.DBPasswordCiphertext != originalCiphertext {
+ t.Fatalf("expected ciphertext unchanged")
+ }
+}
diff --git a/server/internal/service/dashboard_notification_service_test.go b/server/internal/service/dashboard_notification_service_test.go
new file mode 100644
index 0000000..8629206
--- /dev/null
+++ b/server/internal/service/dashboard_notification_service_test.go
@@ -0,0 +1,109 @@
+package service
+
+import (
+ "context"
+ "path/filepath"
+ "testing"
+ "time"
+
+ "backupx/server/internal/config"
+ "backupx/server/internal/database"
+ "backupx/server/internal/logger"
+ "backupx/server/internal/model"
+ "backupx/server/internal/notify"
+ "backupx/server/internal/repository"
+ "backupx/server/internal/storage/codec"
+)
+
+type fakeNotifier struct {
+ typeName string
+ messages []notify.Message
+ lastConfig map[string]any
+}
+
+func (n *fakeNotifier) Type() string { return n.typeName }
+func (n *fakeNotifier) SensitiveFields() []string { return []string{"secret"} }
+func (n *fakeNotifier) Validate(config map[string]any) error {
+ if config["url"] == nil {
+ return nil
+ }
+ return nil
+}
+func (n *fakeNotifier) Send(_ context.Context, config map[string]any, message notify.Message) error {
+ n.lastConfig = config
+ n.messages = append(n.messages, message)
+ return nil
+}
+
+func newDashboardNotificationTestDeps(t *testing.T) (*DashboardService, *NotificationService, *fakeNotifier, repository.BackupTaskRepository, repository.BackupRecordRepository, repository.NotificationRepository) {
+ t.Helper()
+ log, err := logger.New(config.LogConfig{Level: "error"})
+ if err != nil {
+ t.Fatalf("logger.New returned error: %v", err)
+ }
+ db, err := database.Open(config.DatabaseConfig{Path: filepath.Join(t.TempDir(), "backupx.db")}, log)
+ if err != nil {
+ t.Fatalf("database.Open returned error: %v", err)
+ }
+ tasks := repository.NewBackupTaskRepository(db)
+ records := repository.NewBackupRecordRepository(db)
+ targets := repository.NewStorageTargetRepository(db)
+ notifications := repository.NewNotificationRepository(db)
+ if err := targets.Create(context.Background(), &model.StorageTarget{Name: "local", Type: "local_disk", Enabled: true, ConfigCiphertext: "ciphertext", ConfigVersion: 1, LastTestStatus: "unknown"}); err != nil {
+ t.Fatalf("Create storage target returned error: %v", err)
+ }
+ fake := &fakeNotifier{typeName: "webhook"}
+ registry := notify.NewRegistry(fake)
+ cipher := codec.NewConfigCipher("notify-secret")
+ dashboardService := NewDashboardService(tasks, records, targets)
+ notificationService := NewNotificationService(notifications, registry, cipher)
+ return dashboardService, notificationService, fake, tasks, records, notifications
+}
+
+func TestDashboardServiceStats(t *testing.T) {
+ dashboardService, _, _, tasks, records, _ := newDashboardNotificationTestDeps(t)
+ ctx := context.Background()
+ if err := tasks.Create(ctx, &model.BackupTask{Name: "site", Type: "file", Enabled: true, SourcePath: "/srv/site", StorageTargetID: 1, RetentionDays: 30, Compression: "gzip", MaxBackups: 10, LastStatus: "success"}); err != nil {
+ t.Fatalf("Create task returned error: %v", err)
+ }
+ startedAt := time.Now().UTC().Add(-time.Hour)
+ completedAt := time.Now().UTC()
+ if err := records.Create(ctx, &model.BackupRecord{TaskID: 1, StorageTargetID: 1, Status: "success", FileName: "site.tar.gz", FileSize: 2048, StoragePath: "site/2026/03/07/site.tar.gz", DurationSeconds: 30, StartedAt: startedAt, CompletedAt: &completedAt}); err != nil {
+ t.Fatalf("Create record returned error: %v", err)
+ }
+ stats, err := dashboardService.Stats(ctx)
+ if err != nil {
+ t.Fatalf("Stats returned error: %v", err)
+ }
+ if stats.TotalTasks != 1 || stats.TotalRecords != 1 || stats.TotalBackupBytes != 2048 {
+ t.Fatalf("unexpected stats: %#v", stats)
+ }
+ if len(stats.RecentRecords) != 1 || len(stats.StorageUsage) != 1 {
+ t.Fatalf("expected recent records and storage usage, got %#v", stats)
+ }
+}
+
+func TestNotificationServiceCreateAndDispatch(t *testing.T) {
+ _, notificationService, fake, _, _, notifications := newDashboardNotificationTestDeps(t)
+ ctx := context.Background()
+ created, err := notificationService.Create(ctx, NotificationUpsertInput{Name: "ops", Type: "webhook", Enabled: true, OnSuccess: true, OnFailure: true, Config: map[string]any{"url": "https://example.invalid", "secret": "top-secret"}})
+ if err != nil {
+ t.Fatalf("Create returned error: %v", err)
+ }
+ if len(created.MaskedFields) != 1 || created.MaskedFields[0] != "secret" {
+ t.Fatalf("unexpected masked fields: %#v", created.MaskedFields)
+ }
+ item, err := notifications.FindByID(ctx, created.ID)
+ if err != nil {
+ t.Fatalf("FindByID returned error: %v", err)
+ }
+ if item == nil || item.ConfigCiphertext == "" {
+ t.Fatalf("expected encrypted notification config")
+ }
+ if err := notificationService.NotifyBackupResult(ctx, BackupExecutionNotification{Task: &model.BackupTask{Name: "site"}, Record: &model.BackupRecord{ID: 1, Status: "success", StartedAt: time.Now().UTC()}, Error: nil}); err != nil {
+ t.Fatalf("NotifyBackupResult returned error: %v", err)
+ }
+ if len(fake.messages) != 1 {
+ t.Fatalf("expected one notification message, got %d", len(fake.messages))
+ }
+}
diff --git a/server/internal/service/dashboard_service.go b/server/internal/service/dashboard_service.go
new file mode 100644
index 0000000..72ce5a5
--- /dev/null
+++ b/server/internal/service/dashboard_service.go
@@ -0,0 +1,109 @@
+package service
+
+import (
+ "context"
+ "time"
+
+ "backupx/server/internal/apperror"
+ "backupx/server/internal/repository"
+)
+
+type DashboardStorageUsageItem struct {
+ StorageTargetID uint `json:"storageTargetId"`
+ TargetName string `json:"targetName"`
+ TotalSize int64 `json:"totalSize"`
+}
+
+type DashboardStats struct {
+ TotalTasks int64 `json:"totalTasks"`
+ EnabledTasks int64 `json:"enabledTasks"`
+ TotalRecords int64 `json:"totalRecords"`
+ SuccessRate float64 `json:"successRate"`
+ TotalBackupBytes int64 `json:"totalBackupBytes"`
+ LastBackupAt *time.Time `json:"lastBackupAt,omitempty"`
+ RecentRecords []BackupRecordSummary `json:"recentRecords"`
+ StorageUsage []DashboardStorageUsageItem `json:"storageUsage"`
+}
+
+type DashboardService struct {
+ tasks repository.BackupTaskRepository
+ records repository.BackupRecordRepository
+ targets repository.StorageTargetRepository
+}
+
+func NewDashboardService(tasks repository.BackupTaskRepository, records repository.BackupRecordRepository, targets repository.StorageTargetRepository) *DashboardService {
+ return &DashboardService{tasks: tasks, records: records, targets: targets}
+}
+
+func (s *DashboardService) Stats(ctx context.Context) (*DashboardStats, error) {
+ totalTasks, err := s.tasks.Count(ctx)
+ if err != nil {
+ return nil, apperror.Internal("DASHBOARD_STATS_FAILED", "无法统计备份任务数量", err)
+ }
+ enabledTasks, err := s.tasks.CountEnabled(ctx)
+ if err != nil {
+ return nil, apperror.Internal("DASHBOARD_STATS_FAILED", "无法统计启用任务数量", err)
+ }
+ totalRecords, err := s.records.Count(ctx)
+ if err != nil {
+ return nil, apperror.Internal("DASHBOARD_STATS_FAILED", "无法统计备份记录数量", err)
+ }
+ since := time.Now().UTC().AddDate(0, 0, -30)
+ recentRecordsCount, err := s.records.CountSince(ctx, since)
+ if err != nil {
+ return nil, apperror.Internal("DASHBOARD_STATS_FAILED", "无法统计最近记录数量", err)
+ }
+ successRecordsCount, err := s.records.CountSuccessSince(ctx, since)
+ if err != nil {
+ return nil, apperror.Internal("DASHBOARD_STATS_FAILED", "无法统计最近成功记录数量", err)
+ }
+ totalBackupBytes, err := s.records.SumFileSize(ctx)
+ if err != nil {
+ return nil, apperror.Internal("DASHBOARD_STATS_FAILED", "无法统计备份总量", err)
+ }
+ recentRecords, err := s.records.ListRecent(ctx, 10)
+ if err != nil {
+ return nil, apperror.Internal("DASHBOARD_STATS_FAILED", "无法获取最近备份记录", err)
+ }
+ targetList, err := s.targets.List(ctx)
+ if err != nil {
+ return nil, apperror.Internal("DASHBOARD_STATS_FAILED", "无法获取存储目标信息", err)
+ }
+ targetNames := make(map[uint]string, len(targetList))
+ for _, item := range targetList {
+ targetNames[item.ID] = item.Name
+ }
+ usageItems, err := s.records.StorageUsage(ctx)
+ if err != nil {
+ return nil, apperror.Internal("DASHBOARD_STATS_FAILED", "无法统计存储使用量", err)
+ }
+ storageUsage := make([]DashboardStorageUsageItem, 0, len(usageItems))
+ for _, item := range usageItems {
+ storageUsage = append(storageUsage, DashboardStorageUsageItem{StorageTargetID: item.StorageTargetID, TargetName: targetNames[item.StorageTargetID], TotalSize: item.TotalSize})
+ }
+ result := &DashboardStats{TotalTasks: totalTasks, EnabledTasks: enabledTasks, TotalRecords: totalRecords, TotalBackupBytes: totalBackupBytes, RecentRecords: make([]BackupRecordSummary, 0, len(recentRecords)), StorageUsage: storageUsage}
+ if recentRecordsCount > 0 {
+ result.SuccessRate = float64(successRecordsCount) / float64(recentRecordsCount)
+ }
+ if len(recentRecords) > 0 {
+ result.LastBackupAt = &recentRecords[0].StartedAt
+ }
+ for _, item := range recentRecords {
+ result.RecentRecords = append(result.RecentRecords, toBackupRecordSummary(&item))
+ }
+ return result, nil
+}
+
+func (s *DashboardService) Timeline(ctx context.Context, days int) ([]repository.BackupTimelinePoint, error) {
+ if days <= 0 {
+ days = 30
+ }
+ items, err := s.records.TimelineSince(ctx, time.Now().UTC().AddDate(0, 0, -days))
+ if err != nil {
+ return nil, apperror.Internal("DASHBOARD_TIMELINE_FAILED", "无法获取备份时间线", err)
+ }
+ if items == nil {
+ items = []repository.BackupTimelinePoint{}
+ }
+ return items, nil
+}
diff --git a/server/internal/service/google_drive_oauth_service.go b/server/internal/service/google_drive_oauth_service.go
new file mode 100644
index 0000000..0b43f31
--- /dev/null
+++ b/server/internal/service/google_drive_oauth_service.go
@@ -0,0 +1,123 @@
+package service
+
+import (
+ "context"
+ "fmt"
+ "strings"
+ "time"
+
+ "golang.org/x/oauth2"
+ "golang.org/x/oauth2/google"
+ "google.golang.org/api/drive/v3"
+
+ "backupx/server/internal/model"
+ "backupx/server/internal/repository"
+ "backupx/server/internal/security"
+ "backupx/server/internal/storage"
+ "backupx/server/internal/storage/codec"
+)
+
+type GoogleDriveOAuthResult struct {
+ TargetID *uint
+ Config storage.GoogleDriveConfig
+ State string
+}
+
+type GoogleDriveOAuthService struct {
+ sessions repository.OAuthSessionRepository
+ cipher *codec.Cipher
+ now func() time.Time
+ generateState func() (string, error)
+ exchangeCode func(context.Context, *oauth2.Config, string) (*oauth2.Token, error)
+ ttl time.Duration
+}
+
+type googleDriveOAuthPayload struct {
+ TargetID *uint `json:"targetId,omitempty"`
+ Config storage.GoogleDriveConfig `json:"config"`
+}
+
+func NewGoogleDriveOAuthService(sessions repository.OAuthSessionRepository, cipher *codec.Cipher) *GoogleDriveOAuthService {
+ return &GoogleDriveOAuthService{
+ sessions: sessions,
+ cipher: cipher,
+ now: func() time.Time { return time.Now().UTC() },
+ generateState: func() (string, error) {
+ return security.GenerateSecret(24)
+ },
+ exchangeCode: func(ctx context.Context, config *oauth2.Config, code string) (*oauth2.Token, error) {
+ return config.Exchange(ctx, code)
+ },
+ ttl: 10 * time.Minute,
+ }
+}
+
+func (s *GoogleDriveOAuthService) Start(ctx context.Context, targetID *uint, cfg storage.GoogleDriveConfig) (string, string, error) {
+ if strings.TrimSpace(cfg.ClientID) == "" || strings.TrimSpace(cfg.ClientSecret) == "" {
+ return "", "", fmt.Errorf("google drive client credentials are required")
+ }
+ if strings.TrimSpace(cfg.RedirectURL) == "" {
+ return "", "", fmt.Errorf("google drive redirect url is required")
+ }
+ state, err := s.generateState()
+ if err != nil {
+ return "", "", fmt.Errorf("generate oauth state: %w", err)
+ }
+ payload := googleDriveOAuthPayload{TargetID: targetID, Config: cfg}
+ ciphertext, err := s.cipher.EncryptValue(payload)
+ if err != nil {
+ return "", "", fmt.Errorf("encrypt oauth payload: %w", err)
+ }
+ now := s.now()
+ session := &model.OAuthSession{ProviderType: string(storage.ProviderTypeGoogleDrive), State: state, PayloadCiphertext: ciphertext, TargetID: targetID, ExpiresAt: now.Add(s.ttl)}
+ if err := s.sessions.Create(ctx, session); err != nil {
+ return "", "", fmt.Errorf("create oauth session: %w", err)
+ }
+ oauthConfig := s.oauthConfig(cfg)
+ url := oauthConfig.AuthCodeURL(state, oauth2.AccessTypeOffline, oauth2.ApprovalForce)
+ return url, state, nil
+}
+
+func (s *GoogleDriveOAuthService) Complete(ctx context.Context, state string, code string) (*GoogleDriveOAuthResult, error) {
+ session, err := s.sessions.FindByState(ctx, state)
+ if err != nil {
+ return nil, fmt.Errorf("find oauth session: %w", err)
+ }
+ if session == nil {
+ return nil, fmt.Errorf("google drive oauth state not found")
+ }
+ now := s.now()
+ if session.UsedAt != nil {
+ return nil, fmt.Errorf("google drive oauth state already used")
+ }
+ if now.After(session.ExpiresAt) {
+ return nil, fmt.Errorf("google drive oauth state expired")
+ }
+ var payload googleDriveOAuthPayload
+ if err := s.cipher.DecryptValue(session.PayloadCiphertext, &payload); err != nil {
+ return nil, fmt.Errorf("decrypt oauth session payload: %w", err)
+ }
+ token, err := s.exchangeCode(ctx, s.oauthConfig(payload.Config), code)
+ if err != nil {
+ return nil, fmt.Errorf("exchange google drive oauth code: %w", err)
+ }
+ if strings.TrimSpace(token.RefreshToken) == "" {
+ return nil, fmt.Errorf("google drive oauth response missing refresh token")
+ }
+ payload.Config.RefreshToken = token.RefreshToken
+ session.UsedAt = &now
+ if err := s.sessions.Update(ctx, session); err != nil {
+ return nil, fmt.Errorf("mark oauth session used: %w", err)
+ }
+ return &GoogleDriveOAuthResult{TargetID: payload.TargetID, Config: payload.Config, State: state}, nil
+}
+
+func (s *GoogleDriveOAuthService) oauthConfig(cfg storage.GoogleDriveConfig) *oauth2.Config {
+ return &oauth2.Config{
+ ClientID: cfg.ClientID,
+ ClientSecret: cfg.ClientSecret,
+ RedirectURL: cfg.RedirectURL,
+ Endpoint: google.Endpoint,
+ Scopes: []string{drive.DriveScope},
+ }
+}
diff --git a/server/internal/service/google_drive_oauth_service_test.go b/server/internal/service/google_drive_oauth_service_test.go
new file mode 100644
index 0000000..7c314f5
--- /dev/null
+++ b/server/internal/service/google_drive_oauth_service_test.go
@@ -0,0 +1,61 @@
+package service
+
+import (
+ "context"
+ "path/filepath"
+ "strings"
+ "testing"
+ "time"
+
+ "golang.org/x/oauth2"
+
+ "backupx/server/internal/config"
+ "backupx/server/internal/database"
+ "backupx/server/internal/logger"
+ "backupx/server/internal/repository"
+ "backupx/server/internal/storage"
+ "backupx/server/internal/storage/codec"
+)
+
+func TestGoogleDriveOAuthServiceStartAndComplete(t *testing.T) {
+ tempDir := t.TempDir()
+ log, err := logger.New(config.LogConfig{Level: "error"})
+ if err != nil {
+ t.Fatalf("logger.New returned error: %v", err)
+ }
+ db, err := database.Open(config.DatabaseConfig{Path: filepath.Join(tempDir, "backupx.db")}, log)
+ if err != nil {
+ t.Fatalf("database.Open returned error: %v", err)
+ }
+ sessions := repository.NewOAuthSessionRepository(db)
+ service := NewGoogleDriveOAuthService(sessions, codec.New("encryption-secret"))
+ service.now = func() time.Time { return time.Date(2026, 3, 7, 0, 0, 0, 0, time.UTC) }
+ service.generateState = func() (string, error) { return "oauth-state", nil }
+ service.exchangeCode = func(context.Context, *oauth2.Config, string) (*oauth2.Token, error) {
+ return &oauth2.Token{RefreshToken: "refresh-token"}, nil
+ }
+
+ url, state, err := service.Start(context.Background(), nil, storage.GoogleDriveConfig{
+ ClientID: "client-id",
+ ClientSecret: "client-secret",
+ RedirectURL: "http://localhost:8340/api/storage-targets/google-drive/callback",
+ FolderID: "folder-id",
+ })
+ if err != nil {
+ t.Fatalf("Start returned error: %v", err)
+ }
+ if state != "oauth-state" {
+ t.Fatalf("expected deterministic state, got %s", state)
+ }
+ if !strings.Contains(url, "oauth-state") {
+ t.Fatalf("expected auth url to contain state, got %s", url)
+ }
+
+ result, err := service.Complete(context.Background(), state, "auth-code")
+ if err != nil {
+ t.Fatalf("Complete returned error: %v", err)
+ }
+ if result.Config.RefreshToken != "refresh-token" {
+ t.Fatalf("expected refresh token to be persisted")
+ }
+}
diff --git a/server/internal/service/node_service.go b/server/internal/service/node_service.go
new file mode 100644
index 0000000..ef1782a
--- /dev/null
+++ b/server/internal/service/node_service.go
@@ -0,0 +1,234 @@
+package service
+
+import (
+ "context"
+ "crypto/rand"
+ "encoding/hex"
+ "fmt"
+ "net/http"
+ "os"
+ "path/filepath"
+ "runtime"
+ "sort"
+ "time"
+
+ "backupx/server/internal/apperror"
+ "backupx/server/internal/model"
+ "backupx/server/internal/repository"
+)
+
+// NodeSummary is the API response for node listings.
+type NodeSummary struct {
+ ID uint `json:"id"`
+ Name string `json:"name"`
+ Hostname string `json:"hostname"`
+ IPAddress string `json:"ipAddress"`
+ Status string `json:"status"`
+ IsLocal bool `json:"isLocal"`
+ OS string `json:"os"`
+ Arch string `json:"arch"`
+ AgentVersion string `json:"agentVersion"`
+ LastSeen time.Time `json:"lastSeen"`
+ CreatedAt time.Time `json:"createdAt"`
+}
+
+// NodeCreateInput is the input for creating a new remote node.
+type NodeCreateInput struct {
+ Name string `json:"name" binding:"required"`
+}
+
+// NodeService manages the cluster nodes.
+type NodeService struct {
+ repo repository.NodeRepository
+}
+
+func NewNodeService(repo repository.NodeRepository) *NodeService {
+ return &NodeService{repo: repo}
+}
+
+// EnsureLocalNode creates the default "local" node if it does not exist.
+func (s *NodeService) EnsureLocalNode(ctx context.Context) error {
+ existing, err := s.repo.FindLocal(ctx)
+ if err != nil {
+ return err
+ }
+ if existing != nil {
+ existing.Status = model.NodeStatusOnline
+ existing.LastSeen = time.Now().UTC()
+ hostname, _ := os.Hostname()
+ existing.Hostname = hostname
+ existing.OS = runtime.GOOS
+ existing.Arch = runtime.GOARCH
+ return s.repo.Update(ctx, existing)
+ }
+ hostname, _ := os.Hostname()
+ token, _ := generateToken()
+ node := &model.Node{
+ Name: "本机 (Local)",
+ Hostname: hostname,
+ Token: token,
+ Status: model.NodeStatusOnline,
+ IsLocal: true,
+ OS: runtime.GOOS,
+ Arch: runtime.GOARCH,
+ LastSeen: time.Now().UTC(),
+ }
+ return s.repo.Create(ctx, node)
+}
+
+func (s *NodeService) List(ctx context.Context) ([]NodeSummary, error) {
+ nodes, err := s.repo.List(ctx)
+ if err != nil {
+ return nil, err
+ }
+ result := make([]NodeSummary, len(nodes))
+ for i, n := range nodes {
+ result[i] = NodeSummary{
+ ID: n.ID,
+ Name: n.Name,
+ Hostname: n.Hostname,
+ IPAddress: n.IPAddress,
+ Status: n.Status,
+ IsLocal: n.IsLocal,
+ OS: n.OS,
+ Arch: n.Arch,
+ AgentVersion: n.AgentVer,
+ LastSeen: n.LastSeen,
+ CreatedAt: n.CreatedAt,
+ }
+ }
+ return result, nil
+}
+
+func (s *NodeService) Get(ctx context.Context, id uint) (*NodeSummary, error) {
+ node, err := s.repo.FindByID(ctx, id)
+ if err != nil {
+ return nil, err
+ }
+ if node == nil {
+ return nil, apperror.New(http.StatusNotFound, "NODE_NOT_FOUND", "节点不存在", nil)
+ }
+ return &NodeSummary{
+ ID: node.ID,
+ Name: node.Name,
+ Hostname: node.Hostname,
+ IPAddress: node.IPAddress,
+ Status: node.Status,
+ IsLocal: node.IsLocal,
+ OS: node.OS,
+ Arch: node.Arch,
+ AgentVersion: node.AgentVer,
+ LastSeen: node.LastSeen,
+ CreatedAt: node.CreatedAt,
+ }, nil
+}
+
+// Create registers a new remote node and returns its authentication token.
+func (s *NodeService) Create(ctx context.Context, input NodeCreateInput) (string, error) {
+ token, err := generateToken()
+ if err != nil {
+ return "", fmt.Errorf("generate token: %w", err)
+ }
+ node := &model.Node{
+ Name: input.Name,
+ Token: token,
+ Status: model.NodeStatusOffline,
+ IsLocal: false,
+ LastSeen: time.Now().UTC(),
+ }
+ if err := s.repo.Create(ctx, node); err != nil {
+ return "", err
+ }
+ return token, nil
+}
+
+func (s *NodeService) Delete(ctx context.Context, id uint) error {
+ node, err := s.repo.FindByID(ctx, id)
+ if err != nil {
+ return err
+ }
+ if node == nil {
+ return apperror.New(http.StatusNotFound, "NODE_NOT_FOUND", "节点不存在", nil)
+ }
+ if node.IsLocal {
+ return apperror.BadRequest("NODE_DELETE_LOCAL", "无法删除本机节点", nil)
+ }
+ return s.repo.Delete(ctx, id)
+}
+
+// ListDirectory lists the contents of a directory on the local node.
+func (s *NodeService) ListDirectory(ctx context.Context, nodeID uint, path string) ([]DirEntry, error) {
+ node, err := s.repo.FindByID(ctx, nodeID)
+ if err != nil {
+ return nil, err
+ }
+ if node == nil {
+ return nil, apperror.New(http.StatusNotFound, "NODE_NOT_FOUND", "节点不存在", nil)
+ }
+ if !node.IsLocal {
+ return nil, apperror.BadRequest("NODE_REMOTE_FS_NOT_SUPPORTED", "远程节点的目录浏览需要 Agent 在线连接(即将支持)", nil)
+ }
+
+ cleanPath := filepath.Clean(path)
+ entries, err := os.ReadDir(cleanPath)
+ if err != nil {
+ return nil, apperror.BadRequest("NODE_FS_READ_ERROR", fmt.Sprintf("无法读取目录: %s", err.Error()), err)
+ }
+
+ result := make([]DirEntry, 0, len(entries))
+ for _, entry := range entries {
+ info, _ := entry.Info()
+ size := int64(0)
+ if info != nil {
+ size = info.Size()
+ }
+ result = append(result, DirEntry{
+ Name: entry.Name(),
+ Path: filepath.Join(cleanPath, entry.Name()),
+ IsDir: entry.IsDir(),
+ Size: size,
+ })
+ }
+ sort.Slice(result, func(i, j int) bool {
+ if result[i].IsDir != result[j].IsDir {
+ return result[i].IsDir
+ }
+ return result[i].Name < result[j].Name
+ })
+ return result, nil
+}
+
+// Heartbeat updates the node status when an agent reports in.
+func (s *NodeService) Heartbeat(ctx context.Context, token string, hostname string, ip string, agentVer string) error {
+ node, err := s.repo.FindByToken(ctx, token)
+ if err != nil {
+ return err
+ }
+ if node == nil {
+ return apperror.Unauthorized("NODE_INVALID_TOKEN", "无效的节点认证令牌", nil)
+ }
+ node.Status = model.NodeStatusOnline
+ node.Hostname = hostname
+ node.IPAddress = ip
+ node.AgentVer = agentVer
+ node.OS = runtime.GOOS
+ node.Arch = runtime.GOARCH
+ node.LastSeen = time.Now().UTC()
+ return s.repo.Update(ctx, node)
+}
+
+// DirEntry represents a file or directory in a node's file system.
+type DirEntry struct {
+ Name string `json:"name"`
+ Path string `json:"path"`
+ IsDir bool `json:"isDir"`
+ Size int64 `json:"size"`
+}
+
+func generateToken() (string, error) {
+ b := make([]byte, 32)
+ if _, err := rand.Read(b); err != nil {
+ return "", err
+ }
+ return hex.EncodeToString(b), nil
+}
diff --git a/server/internal/service/notification_service.go b/server/internal/service/notification_service.go
new file mode 100644
index 0000000..91ab31c
--- /dev/null
+++ b/server/internal/service/notification_service.go
@@ -0,0 +1,251 @@
+package service
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "net/http"
+ "strings"
+ "time"
+
+ "backupx/server/internal/apperror"
+ "backupx/server/internal/model"
+ "backupx/server/internal/notify"
+ "backupx/server/internal/repository"
+ "backupx/server/internal/storage/codec"
+)
+
+type NotificationUpsertInput struct {
+ Name string `json:"name" binding:"required,min=1,max=100"`
+ Type string `json:"type" binding:"required,oneof=email webhook telegram"`
+ Enabled bool `json:"enabled"`
+ OnSuccess bool `json:"onSuccess"`
+ OnFailure bool `json:"onFailure"`
+ Config map[string]any `json:"config" binding:"required"`
+}
+
+type NotificationSummary struct {
+ ID uint `json:"id"`
+ Name string `json:"name"`
+ Type string `json:"type"`
+ Enabled bool `json:"enabled"`
+ OnSuccess bool `json:"onSuccess"`
+ OnFailure bool `json:"onFailure"`
+ UpdatedAt time.Time `json:"updatedAt"`
+}
+
+type NotificationDetail struct {
+ NotificationSummary
+ Config map[string]any `json:"config"`
+ MaskedFields []string `json:"maskedFields,omitempty"`
+}
+
+type NotificationService struct {
+ notifications repository.NotificationRepository
+ registry *notify.Registry
+ cipher *codec.ConfigCipher
+}
+
+func NewNotificationService(notifications repository.NotificationRepository, registry *notify.Registry, cipher *codec.ConfigCipher) *NotificationService {
+ return &NotificationService{notifications: notifications, registry: registry, cipher: cipher}
+}
+
+func (s *NotificationService) List(ctx context.Context) ([]NotificationSummary, error) {
+ items, err := s.notifications.List(ctx)
+ if err != nil {
+ return nil, apperror.Internal("NOTIFICATION_LIST_FAILED", "无法获取通知配置列表", err)
+ }
+ result := make([]NotificationSummary, 0, len(items))
+ for _, item := range items {
+ result = append(result, toNotificationSummary(&item))
+ }
+ return result, nil
+}
+
+func (s *NotificationService) Get(ctx context.Context, id uint) (*NotificationDetail, error) {
+ item, err := s.notifications.FindByID(ctx, id)
+ if err != nil {
+ return nil, apperror.Internal("NOTIFICATION_GET_FAILED", "无法获取通知配置详情", err)
+ }
+ if item == nil {
+ return nil, apperror.New(http.StatusNotFound, "NOTIFICATION_NOT_FOUND", "通知配置不存在", fmt.Errorf("notification %d not found", id))
+ }
+ return s.toDetail(item)
+}
+
+func (s *NotificationService) Create(ctx context.Context, input NotificationUpsertInput) (*NotificationDetail, error) {
+ if err := s.validateInput(ctx, 0, input); err != nil {
+ return nil, err
+ }
+ item, err := s.buildNotification(nil, input)
+ if err != nil {
+ return nil, err
+ }
+ if err := s.notifications.Create(ctx, item); err != nil {
+ return nil, apperror.Internal("NOTIFICATION_CREATE_FAILED", "无法创建通知配置", err)
+ }
+ return s.Get(ctx, item.ID)
+}
+
+func (s *NotificationService) Update(ctx context.Context, id uint, input NotificationUpsertInput) (*NotificationDetail, error) {
+ existing, err := s.notifications.FindByID(ctx, id)
+ if err != nil {
+ return nil, apperror.Internal("NOTIFICATION_GET_FAILED", "无法获取通知配置详情", err)
+ }
+ if existing == nil {
+ return nil, apperror.New(http.StatusNotFound, "NOTIFICATION_NOT_FOUND", "通知配置不存在", fmt.Errorf("notification %d not found", id))
+ }
+ if err := s.validateInput(ctx, existing.ID, input); err != nil {
+ return nil, err
+ }
+ item, err := s.buildNotification(existing, input)
+ if err != nil {
+ return nil, err
+ }
+ item.ID = existing.ID
+ item.CreatedAt = existing.CreatedAt
+ if err := s.notifications.Update(ctx, item); err != nil {
+ return nil, apperror.Internal("NOTIFICATION_UPDATE_FAILED", "无法更新通知配置", err)
+ }
+ return s.Get(ctx, id)
+}
+
+func (s *NotificationService) Delete(ctx context.Context, id uint) error {
+ item, err := s.notifications.FindByID(ctx, id)
+ if err != nil {
+ return apperror.Internal("NOTIFICATION_GET_FAILED", "无法获取通知配置详情", err)
+ }
+ if item == nil {
+ return apperror.New(http.StatusNotFound, "NOTIFICATION_NOT_FOUND", "通知配置不存在", fmt.Errorf("notification %d not found", id))
+ }
+ if err := s.notifications.Delete(ctx, id); err != nil {
+ return apperror.Internal("NOTIFICATION_DELETE_FAILED", "无法删除通知配置", err)
+ }
+ return nil
+}
+
+func (s *NotificationService) Test(ctx context.Context, input NotificationUpsertInput) error {
+ if err := s.registry.Validate(strings.TrimSpace(input.Type), input.Config); err != nil {
+ return apperror.BadRequest("NOTIFICATION_INVALID", "通知配置不合法", err)
+ }
+ message := notify.Message{Title: "BackupX 通知测试", Body: "这是一条来自 BackupX 的测试通知。", Fields: map[string]any{"type": input.Type, "timestamp": time.Now().UTC().Format(time.RFC3339)}}
+ if err := s.registry.Send(ctx, input.Type, input.Config, message); err != nil {
+ return apperror.BadRequest("NOTIFICATION_TEST_FAILED", "发送测试通知失败", err)
+ }
+ return nil
+}
+
+func (s *NotificationService) TestSaved(ctx context.Context, id uint) error {
+ item, err := s.notifications.FindByID(ctx, id)
+ if err != nil {
+ return apperror.Internal("NOTIFICATION_GET_FAILED", "无法获取通知配置", err)
+ }
+ if item == nil {
+ return apperror.New(http.StatusNotFound, "NOTIFICATION_NOT_FOUND", "通知配置不存在", fmt.Errorf("notification %d not found", id))
+ }
+ configMap := map[string]any{}
+ if err := s.cipher.DecryptJSON(item.ConfigCiphertext, &configMap); err != nil {
+ return apperror.Internal("NOTIFICATION_DECRYPT_FAILED", "无法读取通知配置", err)
+ }
+ message := notify.Message{Title: "BackupX 通知测试", Body: "这是一条来自 BackupX 的测试通知。", Fields: map[string]any{"type": item.Type, "timestamp": time.Now().UTC().Format(time.RFC3339)}}
+ if err := s.registry.Send(ctx, item.Type, configMap, message); err != nil {
+ return apperror.BadRequest("NOTIFICATION_TEST_FAILED", "发送测试通知失败", err)
+ }
+ return nil
+}
+
+func (s *NotificationService) NotifyBackupResult(ctx context.Context, event BackupExecutionNotification) error {
+ success := event.Error == nil && event.Record != nil && event.Record.Status == "success"
+ items, err := s.notifications.ListEnabledForEvent(ctx, success)
+ if err != nil {
+ return err
+ }
+ message := buildNotificationMessage(event)
+ var joined error
+ for _, item := range items {
+ configMap := map[string]any{}
+ if err := s.cipher.DecryptJSON(item.ConfigCiphertext, &configMap); err != nil {
+ joined = errors.Join(joined, fmt.Errorf("decrypt notification %d config: %w", item.ID, err))
+ continue
+ }
+ if err := s.registry.Send(ctx, item.Type, configMap, message); err != nil {
+ joined = errors.Join(joined, fmt.Errorf("send notification %s failed: %w", item.Name, err))
+ }
+ }
+ return joined
+}
+
+func (s *NotificationService) validateInput(ctx context.Context, currentID uint, input NotificationUpsertInput) error {
+ existing, err := s.notifications.FindByName(ctx, strings.TrimSpace(input.Name))
+ if err != nil {
+ return apperror.Internal("NOTIFICATION_LOOKUP_FAILED", "无法检查通知配置名称", err)
+ }
+ if existing != nil && existing.ID != currentID {
+ return apperror.Conflict("NOTIFICATION_NAME_EXISTS", "通知配置名称已存在", nil)
+ }
+ if err := s.registry.Validate(strings.TrimSpace(input.Type), input.Config); err != nil {
+ return apperror.BadRequest("NOTIFICATION_INVALID", "通知配置不合法", err)
+ }
+ return nil
+}
+
+func (s *NotificationService) buildNotification(existing *model.Notification, input NotificationUpsertInput) (*model.Notification, error) {
+ configMap := input.Config
+ if existing != nil {
+ currentConfig := map[string]any{}
+ if err := s.cipher.DecryptJSON(existing.ConfigCiphertext, ¤tConfig); err != nil {
+ return nil, apperror.Internal("NOTIFICATION_DECRYPT_FAILED", "无法读取现有通知配置", err)
+ }
+ configMap = codec.MergeMaskedConfig(input.Config, currentConfig, s.registry.SensitiveFields(input.Type))
+ }
+ ciphertext, err := s.cipher.EncryptJSON(configMap)
+ if err != nil {
+ return nil, apperror.Internal("NOTIFICATION_ENCRYPT_FAILED", "无法保存通知配置", err)
+ }
+ item := &model.Notification{Name: strings.TrimSpace(input.Name), Type: strings.TrimSpace(input.Type), ConfigCiphertext: ciphertext, Enabled: input.Enabled, OnSuccess: input.OnSuccess, OnFailure: input.OnFailure}
+ return item, nil
+}
+
+func (s *NotificationService) toDetail(item *model.Notification) (*NotificationDetail, error) {
+ configMap := map[string]any{}
+ if err := s.cipher.DecryptJSON(item.ConfigCiphertext, &configMap); err != nil {
+ return nil, apperror.Internal("NOTIFICATION_DECRYPT_FAILED", "无法读取通知配置", err)
+ }
+ sensitiveFields := s.registry.SensitiveFields(item.Type)
+ return &NotificationDetail{NotificationSummary: toNotificationSummary(item), Config: codec.MaskConfig(configMap, sensitiveFields), MaskedFields: sensitiveFields}, nil
+}
+
+func toNotificationSummary(item *model.Notification) NotificationSummary {
+ return NotificationSummary{ID: item.ID, Name: item.Name, Type: item.Type, Enabled: item.Enabled, OnSuccess: item.OnSuccess, OnFailure: item.OnFailure, UpdatedAt: item.UpdatedAt}
+}
+
+func buildNotificationMessage(event BackupExecutionNotification) notify.Message {
+ statusText := "失败"
+ if event.Error == nil && event.Record != nil && event.Record.Status == "success" {
+ statusText = "成功"
+ }
+ taskName := "未知任务"
+ if event.Task != nil {
+ taskName = event.Task.Name
+ }
+ body := fmt.Sprintf("任务:%s\n状态:%s", taskName, statusText)
+ fields := map[string]any{"taskName": taskName, "status": statusText}
+ if event.Record != nil {
+ body += fmt.Sprintf("\n开始时间:%s\n耗时:%d 秒", event.Record.StartedAt.Format(time.RFC3339), event.Record.DurationSeconds)
+ fields["recordId"] = event.Record.ID
+ fields["durationSeconds"] = event.Record.DurationSeconds
+ if event.Record.FileName != "" {
+ body += fmt.Sprintf("\n文件:%s", event.Record.FileName)
+ fields["fileName"] = event.Record.FileName
+ }
+ if event.Record.FileSize > 0 {
+ body += fmt.Sprintf("\n大小:%d", event.Record.FileSize)
+ fields["fileSize"] = event.Record.FileSize
+ }
+ if event.Record.ErrorMessage != "" {
+ body += fmt.Sprintf("\n错误:%s", event.Record.ErrorMessage)
+ fields["error"] = event.Record.ErrorMessage
+ }
+ }
+ return notify.Message{Title: "BackupX 备份" + statusText + "通知", Body: body, Fields: fields}
+}
diff --git a/server/internal/service/retention_service.go b/server/internal/service/retention_service.go
new file mode 100644
index 0000000..68184f0
--- /dev/null
+++ b/server/internal/service/retention_service.go
@@ -0,0 +1,71 @@
+package service
+
+import (
+ "context"
+ "time"
+
+ "backupx/server/internal/model"
+ "backupx/server/internal/repository"
+ "backupx/server/internal/storage"
+ "backupx/server/internal/storage/codec"
+)
+
+type RetentionService struct {
+ records repository.BackupRecordRepository
+ storageTargets repository.StorageTargetRepository
+ storageRegistry *storage.Registry
+ cipher *codec.ConfigCipher
+}
+
+func NewRetentionService(records repository.BackupRecordRepository, storageTargets repository.StorageTargetRepository, storageRegistry *storage.Registry, cipher *codec.ConfigCipher) *RetentionService {
+ return &RetentionService{records: records, storageTargets: storageTargets, storageRegistry: storageRegistry, cipher: cipher}
+}
+
+func (s *RetentionService) Apply(ctx context.Context, task *model.BackupTask) error {
+ if task == nil || (task.RetentionDays <= 0 && task.MaxBackups <= 0) {
+ return nil
+ }
+ items, err := s.records.ListSuccessfulByTask(ctx, task.ID)
+ if err != nil {
+ return err
+ }
+ removeSet := make(map[uint]model.BackupRecord)
+ if task.RetentionDays > 0 {
+ cutoff := time.Now().UTC().AddDate(0, 0, -task.RetentionDays)
+ for _, item := range items {
+ if item.CompletedAt != nil && item.CompletedAt.Before(cutoff) {
+ removeSet[item.ID] = item
+ }
+ }
+ }
+ if task.MaxBackups > 0 {
+ kept := 0
+ for _, item := range items {
+ if _, marked := removeSet[item.ID]; marked {
+ continue
+ }
+ kept++
+ if kept > task.MaxBackups {
+ removeSet[item.ID] = item
+ }
+ }
+ }
+ if len(removeSet) == 0 {
+ return nil
+ }
+ provider, _, err := buildStorageProviderFromRepos(ctx, task.StorageTargetID, s.storageTargets, s.storageRegistry, s.cipher)
+ if err != nil {
+ return err
+ }
+ for _, item := range removeSet {
+ if item.StoragePath != "" {
+ if err := provider.Delete(ctx, item.StoragePath); err != nil {
+ return err
+ }
+ }
+ if err := s.records.Delete(ctx, item.ID); err != nil {
+ return err
+ }
+ }
+ return nil
+}
diff --git a/server/internal/service/security_config_service.go b/server/internal/service/security_config_service.go
new file mode 100644
index 0000000..c62e2a1
--- /dev/null
+++ b/server/internal/service/security_config_service.go
@@ -0,0 +1,60 @@
+package service
+
+import (
+ "context"
+ "fmt"
+ "strings"
+
+ "backupx/server/internal/config"
+ "backupx/server/internal/model"
+ "backupx/server/internal/repository"
+ "backupx/server/internal/security"
+)
+
+const (
+ jwtSecretKey = "security.jwt_secret"
+ encryptionKeyKey = "security.encryption_key"
+)
+
+type ResolvedSecurity struct {
+ JWTSecret string
+ EncryptionKey string
+}
+
+func ResolveSecurity(ctx context.Context, cfg config.SecurityConfig, repo repository.SystemConfigRepository) (ResolvedSecurity, error) {
+ jwtSecret, err := ensureSecurityValue(ctx, repo, jwtSecretKey, cfg.JWTSecret, 48)
+ if err != nil {
+ return ResolvedSecurity{}, fmt.Errorf("resolve jwt secret: %w", err)
+ }
+ encryptionKey, err := ensureSecurityValue(ctx, repo, encryptionKeyKey, cfg.EncryptionKey, 48)
+ if err != nil {
+ return ResolvedSecurity{}, fmt.Errorf("resolve encryption key: %w", err)
+ }
+ return ResolvedSecurity{JWTSecret: jwtSecret, EncryptionKey: encryptionKey}, nil
+}
+
+func ensureSecurityValue(ctx context.Context, repo repository.SystemConfigRepository, key, configuredValue string, size int) (string, error) {
+ if strings.TrimSpace(configuredValue) != "" {
+ if err := repo.Upsert(ctx, &model.SystemConfig{Key: key, Value: configuredValue, Encrypted: false}); err != nil {
+ return "", err
+ }
+ return configuredValue, nil
+ }
+
+ stored, err := repo.GetByKey(ctx, key)
+ if err != nil {
+ return "", err
+ }
+ if stored != nil && strings.TrimSpace(stored.Value) != "" {
+ return stored.Value, nil
+ }
+
+ generated, err := security.GenerateSecret(size)
+ if err != nil {
+ return "", err
+ }
+ if err := repo.Upsert(ctx, &model.SystemConfig{Key: key, Value: generated, Encrypted: false}); err != nil {
+ return "", err
+ }
+ return generated, nil
+}
diff --git a/server/internal/service/settings_service.go b/server/internal/service/settings_service.go
new file mode 100644
index 0000000..64a82e3
--- /dev/null
+++ b/server/internal/service/settings_service.go
@@ -0,0 +1,54 @@
+package service
+
+import (
+ "context"
+
+ "backupx/server/internal/apperror"
+ "backupx/server/internal/model"
+ "backupx/server/internal/repository"
+)
+
+type SettingsService struct {
+ configs repository.SystemConfigRepository
+}
+
+func NewSettingsService(configs repository.SystemConfigRepository) *SettingsService {
+ return &SettingsService{configs: configs}
+}
+
+// settingsKeys lists all user-editable setting keys.
+var settingsKeys = []string{
+ "site_name",
+ "language",
+ "timezone",
+ "backup_notification_enabled",
+}
+
+func (s *SettingsService) GetAll(ctx context.Context) (map[string]string, error) {
+ items, err := s.configs.List(ctx)
+ if err != nil {
+ return nil, apperror.Internal("SETTINGS_LIST_FAILED", "无法获取系统设置", err)
+ }
+ result := make(map[string]string, len(items))
+ for _, item := range items {
+ result[item.Key] = item.Value
+ }
+ return result, nil
+}
+
+func (s *SettingsService) Update(ctx context.Context, settings map[string]string) (map[string]string, error) {
+ allowed := make(map[string]bool, len(settingsKeys))
+ for _, key := range settingsKeys {
+ allowed[key] = true
+ }
+ for key, value := range settings {
+ if !allowed[key] {
+ continue
+ }
+ item := &model.SystemConfig{Key: key, Value: value}
+ if err := s.configs.Upsert(ctx, item); err != nil {
+ return nil, apperror.Internal("SETTINGS_UPDATE_FAILED", "无法更新系统设置", err)
+ }
+ }
+ return s.GetAll(ctx)
+}
diff --git a/server/internal/service/storage_target_service.go b/server/internal/service/storage_target_service.go
new file mode 100644
index 0000000..0ac209e
--- /dev/null
+++ b/server/internal/service/storage_target_service.go
@@ -0,0 +1,556 @@
+package service
+
+import (
+ "context"
+ "fmt"
+ "net/http"
+ "strings"
+ "time"
+
+ "backupx/server/internal/apperror"
+ "backupx/server/internal/model"
+ "backupx/server/internal/repository"
+ "backupx/server/internal/security"
+ "backupx/server/internal/storage"
+ "backupx/server/internal/storage/codec"
+ "golang.org/x/oauth2"
+ googleoauth "golang.org/x/oauth2/google"
+ goauth2api "google.golang.org/api/oauth2/v2"
+ "google.golang.org/api/option"
+)
+
+type StorageTargetUpsertInput struct {
+ Name string `json:"name" binding:"required,min=1,max=128"`
+ Type string `json:"type" binding:"required,oneof=local_disk google_drive s3 webdav"`
+ Description string `json:"description" binding:"max=255"`
+ Enabled bool `json:"enabled"`
+ Config map[string]any `json:"config" binding:"required"`
+}
+
+type StorageTargetTestInput struct {
+ TargetID *uint `json:"targetId"`
+ Payload StorageTargetUpsertInput `json:"payload"`
+}
+
+type GoogleDriveAuthStartInput struct {
+ TargetID *uint `json:"targetId"`
+ Name string `json:"name" binding:"required,min=1,max=128"`
+ Description string `json:"description" binding:"max=255"`
+ Enabled bool `json:"enabled"`
+ ClientID string `json:"clientId" binding:"required"`
+ ClientSecret string `json:"clientSecret" binding:"required"`
+ FolderID string `json:"folderId"`
+}
+
+type GoogleDriveAuthCompleteInput struct {
+ State string `json:"state" binding:"required"`
+ Code string `json:"code" binding:"required"`
+}
+
+type StorageTargetSummary struct {
+ ID uint `json:"id"`
+ Name string `json:"name"`
+ Type string `json:"type"`
+ Description string `json:"description"`
+ Enabled bool `json:"enabled"`
+ ConfigVersion int `json:"configVersion"`
+ LastTestedAt *time.Time `json:"lastTestedAt"`
+ LastTestStatus string `json:"lastTestStatus"`
+ LastTestMessage string `json:"lastTestMessage"`
+ UpdatedAt time.Time `json:"updatedAt"`
+}
+
+type StorageTargetDetail struct {
+ StorageTargetSummary
+ Config map[string]any `json:"config"`
+ MaskedFields []string `json:"maskedFields,omitempty"`
+}
+
+type GoogleDriveAuthStartResult struct {
+ AuthorizationURL string `json:"authorizationUrl"`
+ State string `json:"state"`
+ ExpiresAt time.Time `json:"expiresAt"`
+}
+
+type googleDriveOAuthDraft struct {
+ TargetID *uint `json:"targetId"`
+ Name string `json:"name"`
+ Description string `json:"description"`
+ Enabled bool `json:"enabled"`
+ ClientID string `json:"clientId"`
+ ClientSecret string `json:"clientSecret"`
+ FolderID string `json:"folderId"`
+ RedirectURI string `json:"redirectUri"`
+}
+
+type StorageTargetService struct {
+ targets repository.StorageTargetRepository
+ oauthSessions repository.OAuthSessionRepository
+ backupTasks repository.BackupTaskRepository
+ records repository.BackupRecordRepository
+ registry *storage.Registry
+ cipher *codec.ConfigCipher
+}
+
+func NewStorageTargetService(
+ targets repository.StorageTargetRepository,
+ oauthSessions repository.OAuthSessionRepository,
+ registry *storage.Registry,
+ cipher *codec.ConfigCipher,
+) *StorageTargetService {
+ return &StorageTargetService{targets: targets, oauthSessions: oauthSessions, registry: registry, cipher: cipher}
+}
+
+func (s *StorageTargetService) SetBackupTaskRepository(tasks repository.BackupTaskRepository) {
+ s.backupTasks = tasks
+}
+
+func (s *StorageTargetService) SetBackupRecordRepository(records repository.BackupRecordRepository) {
+ s.records = records
+}
+
+func (s *StorageTargetService) List(ctx context.Context) ([]StorageTargetSummary, error) {
+ items, err := s.targets.List(ctx)
+ if err != nil {
+ return nil, apperror.Internal("STORAGE_TARGET_LIST_FAILED", "无法获取存储目标列表", err)
+ }
+ result := make([]StorageTargetSummary, 0, len(items))
+ for _, item := range items {
+ result = append(result, toStorageTargetSummary(&item))
+ }
+ return result, nil
+}
+
+func (s *StorageTargetService) Get(ctx context.Context, id uint) (*StorageTargetDetail, error) {
+ item, err := s.targets.FindByID(ctx, id)
+ if err != nil {
+ return nil, apperror.Internal("STORAGE_TARGET_GET_FAILED", "无法获取存储目标详情", err)
+ }
+ if item == nil {
+ return nil, apperror.New(http.StatusNotFound, "STORAGE_TARGET_NOT_FOUND", "存储目标不存在", fmt.Errorf("storage target %d not found", id))
+ }
+ configMap, err := s.decryptTargetConfig(item)
+ if err != nil {
+ return nil, apperror.Internal("STORAGE_TARGET_DECRYPT_FAILED", "无法解密存储目标配置", err)
+ }
+ sensitiveFields := s.registry.SensitiveFields(storage.ParseProviderType(item.Type))
+ return &StorageTargetDetail{StorageTargetSummary: toStorageTargetSummary(item), Config: codec.MaskConfig(configMap, sensitiveFields), MaskedFields: sensitiveFields}, nil
+}
+
+func (s *StorageTargetService) Create(ctx context.Context, input StorageTargetUpsertInput) (*StorageTargetDetail, error) {
+ if err := s.validateType(input.Type); err != nil {
+ return nil, err
+ }
+ existing, err := s.targets.FindByName(ctx, strings.TrimSpace(input.Name))
+ if err != nil {
+ return nil, apperror.Internal("STORAGE_TARGET_LOOKUP_FAILED", "无法检查存储目标名称", err)
+ }
+ if existing != nil {
+ return nil, apperror.Conflict("STORAGE_TARGET_NAME_EXISTS", "存储目标名称已存在", nil)
+ }
+ item, err := s.buildStorageTarget(ctx, nil, input)
+ if err != nil {
+ return nil, err
+ }
+ if err := s.targets.Create(ctx, item); err != nil {
+ return nil, apperror.Internal("STORAGE_TARGET_CREATE_FAILED", "无法创建存储目标", err)
+ }
+ return s.Get(ctx, item.ID)
+}
+
+func (s *StorageTargetService) Update(ctx context.Context, id uint, input StorageTargetUpsertInput) (*StorageTargetDetail, error) {
+ if err := s.validateType(input.Type); err != nil {
+ return nil, err
+ }
+ existing, err := s.targets.FindByID(ctx, id)
+ if err != nil {
+ return nil, apperror.Internal("STORAGE_TARGET_GET_FAILED", "无法获取存储目标详情", err)
+ }
+ if existing == nil {
+ return nil, apperror.New(http.StatusNotFound, "STORAGE_TARGET_NOT_FOUND", "存储目标不存在", fmt.Errorf("storage target %d not found", id))
+ }
+ if sameName, err := s.targets.FindByName(ctx, strings.TrimSpace(input.Name)); err != nil {
+ return nil, apperror.Internal("STORAGE_TARGET_LOOKUP_FAILED", "无法检查存储目标名称", err)
+ } else if sameName != nil && sameName.ID != existing.ID {
+ return nil, apperror.Conflict("STORAGE_TARGET_NAME_EXISTS", "存储目标名称已存在", nil)
+ }
+ item, err := s.buildStorageTarget(ctx, existing, input)
+ if err != nil {
+ return nil, err
+ }
+ item.ID = existing.ID
+ item.CreatedAt = existing.CreatedAt
+ if err := s.targets.Update(ctx, item); err != nil {
+ return nil, apperror.Internal("STORAGE_TARGET_UPDATE_FAILED", "无法更新存储目标", err)
+ }
+ return s.Get(ctx, item.ID)
+}
+
+func (s *StorageTargetService) Delete(ctx context.Context, id uint) error {
+ existing, err := s.targets.FindByID(ctx, id)
+ if err != nil {
+ return apperror.Internal("STORAGE_TARGET_GET_FAILED", "无法获取存储目标详情", err)
+ }
+ if existing == nil {
+ return apperror.New(http.StatusNotFound, "STORAGE_TARGET_NOT_FOUND", "存储目标不存在", fmt.Errorf("storage target %d not found", id))
+ }
+ if s.backupTasks != nil {
+ count, countErr := s.backupTasks.CountByStorageTargetID(ctx, id)
+ if countErr != nil {
+ return apperror.Internal("STORAGE_TARGET_REF_CHECK_FAILED", "无法检查存储目标引用关系", countErr)
+ }
+ if count > 0 {
+ return apperror.Conflict("STORAGE_TARGET_IN_USE", "当前存储目标已被备份任务引用,无法删除", nil)
+ }
+ }
+ if err := s.targets.Delete(ctx, id); err != nil {
+ return apperror.Internal("STORAGE_TARGET_DELETE_FAILED", "无法删除存储目标", err)
+ }
+ return nil
+}
+
+func (s *StorageTargetService) TestConnection(ctx context.Context, input StorageTargetTestInput) error {
+ item, err := s.buildStorageTargetForTest(ctx, input)
+ if err != nil {
+ return err
+ }
+ configMap, err := s.decryptTargetConfig(item)
+ if err != nil {
+ return apperror.Internal("STORAGE_TARGET_DECRYPT_FAILED", "无法解密存储目标配置", err)
+ }
+ provider, err := s.registry.Create(ctx, storage.ParseProviderType(item.Type), configMap)
+ if err != nil {
+ return apperror.BadRequest("STORAGE_TARGET_INVALID_CONFIG", sanitizeMessage(err.Error()), err)
+ }
+ testErr := provider.TestConnection(ctx)
+ now := time.Now().UTC()
+ item.LastTestedAt = &now
+ if testErr != nil {
+ item.LastTestStatus = "failed"
+ item.LastTestMessage = sanitizeMessage(testErr.Error())
+ } else {
+ item.LastTestStatus = "success"
+ item.LastTestMessage = "连接成功"
+ }
+ if item.ID != 0 {
+ _ = s.targets.Update(ctx, item)
+ }
+ if testErr != nil {
+ return apperror.BadRequest("STORAGE_TARGET_TEST_FAILED", sanitizeMessage(testErr.Error()), testErr)
+ }
+ return nil
+}
+
+func (s *StorageTargetService) StartGoogleDriveOAuth(ctx context.Context, input GoogleDriveAuthStartInput, origin string) (*GoogleDriveAuthStartResult, error) {
+ origin = normalizeOrigin(origin)
+ if origin == "" {
+ return nil, apperror.BadRequest("STORAGE_GOOGLE_OAUTH_ORIGIN_REQUIRED", "无法确定 Google Drive 回调地址", nil)
+ }
+ draft, err := s.buildGoogleDriveDraft(ctx, input, origin)
+ if err != nil {
+ return nil, err
+ }
+ payload, err := s.cipher.EncryptJSON(draft)
+ if err != nil {
+ return nil, apperror.Internal("STORAGE_GOOGLE_OAUTH_ENCRYPT_FAILED", "无法创建授权会话", err)
+ }
+ state, err := security.GenerateSecret(24)
+ if err != nil {
+ return nil, apperror.Internal("STORAGE_GOOGLE_OAUTH_STATE_FAILED", "无法生成授权状态", err)
+ }
+ expiresAt := time.Now().UTC().Add(10 * time.Minute)
+ session := &model.OAuthSession{ProviderType: storage.TypeGoogleDrive, State: state, PayloadCiphertext: payload, TargetID: input.TargetID, ExpiresAt: expiresAt}
+ if err := s.oauthSessions.Create(ctx, session); err != nil {
+ return nil, apperror.Internal("STORAGE_GOOGLE_OAUTH_SESSION_FAILED", "无法创建授权会话", err)
+ }
+ oauthCfg := &oauth2.Config{ClientID: draft.ClientID, ClientSecret: draft.ClientSecret, RedirectURL: draft.RedirectURI, Endpoint: googleoauth.Endpoint, Scopes: []string{"https://www.googleapis.com/auth/drive"}}
+ url := oauthCfg.AuthCodeURL(state, oauth2.AccessTypeOffline, oauth2.SetAuthURLParam("prompt", "consent"))
+ return &GoogleDriveAuthStartResult{AuthorizationURL: url, State: state, ExpiresAt: expiresAt}, nil
+}
+
+func (s *StorageTargetService) CompleteGoogleDriveOAuth(ctx context.Context, input GoogleDriveAuthCompleteInput) (*StorageTargetDetail, error) {
+ session, err := s.oauthSessions.FindByState(ctx, strings.TrimSpace(input.State))
+ if err != nil {
+ return nil, apperror.Internal("STORAGE_GOOGLE_OAUTH_SESSION_FAILED", "无法读取授权会话", err)
+ }
+ if session == nil || session.UsedAt != nil || time.Now().UTC().After(session.ExpiresAt) {
+ return nil, apperror.BadRequest("STORAGE_GOOGLE_OAUTH_STATE_INVALID", "Google Drive 授权状态无效或已过期", nil)
+ }
+ // Mark used immediately to prevent duplicate requests (e.g. React StrictMode double invocation)
+ now := time.Now().UTC()
+ session.UsedAt = &now
+ _ = s.oauthSessions.Update(ctx, session)
+
+ var draft googleDriveOAuthDraft
+ if err := s.cipher.DecryptJSON(session.PayloadCiphertext, &draft); err != nil {
+ return nil, apperror.Internal("STORAGE_GOOGLE_OAUTH_DECRYPT_FAILED", "无法读取授权会话内容", err)
+ }
+ oauthCfg := &oauth2.Config{ClientID: draft.ClientID, ClientSecret: draft.ClientSecret, RedirectURL: draft.RedirectURI, Endpoint: googleoauth.Endpoint, Scopes: []string{"https://www.googleapis.com/auth/drive"}}
+ token, err := oauthCfg.Exchange(ctx, strings.TrimSpace(input.Code))
+ if err != nil {
+ return nil, apperror.BadRequest("STORAGE_GOOGLE_OAUTH_EXCHANGE_FAILED", "Google Drive 授权码换取失败", err)
+ }
+ if strings.TrimSpace(token.RefreshToken) == "" {
+ return nil, apperror.BadRequest("STORAGE_GOOGLE_OAUTH_REFRESH_TOKEN_MISSING", "未获取到 Google Drive refresh token,请重新授权", nil)
+ }
+ configMap := map[string]any{
+ "clientId": draft.ClientID,
+ "clientSecret": draft.ClientSecret,
+ "refreshToken": token.RefreshToken,
+ "folderId": draft.FolderID,
+ "redirectUri": draft.RedirectURI,
+ }
+ payload := StorageTargetUpsertInput{Name: draft.Name, Type: storage.TypeGoogleDrive, Description: draft.Description, Enabled: draft.Enabled, Config: configMap}
+ var detail *StorageTargetDetail
+ if session.TargetID != nil {
+ detail, err = s.Update(ctx, *session.TargetID, payload)
+ } else {
+ detail, err = s.Create(ctx, payload)
+ }
+ if err != nil {
+ return nil, err
+ }
+ return detail, nil
+}
+
+func (s *StorageTargetService) GoogleDriveProfile(ctx context.Context, id uint) (map[string]any, error) {
+ detail, err := s.Get(ctx, id)
+ if err != nil {
+ return nil, err
+ }
+ if detail.Type != storage.TypeGoogleDrive {
+ return nil, apperror.BadRequest("STORAGE_GOOGLE_DRIVE_TYPE_MISMATCH", "目标不是 Google Drive 存储类型", nil)
+ }
+ stored, err := s.targets.FindByID(ctx, id)
+ if err != nil || stored == nil {
+ return nil, apperror.New(http.StatusNotFound, "STORAGE_TARGET_NOT_FOUND", "存储目标不存在", err)
+ }
+ var cfg storage.GoogleDriveConfig
+ if err := s.cipher.DecryptJSON(stored.ConfigCiphertext, &cfg); err != nil {
+ return nil, apperror.Internal("STORAGE_TARGET_DECRYPT_FAILED", "无法解密存储目标配置", err)
+ }
+ cfg = cfg.Normalize()
+ oauthCfg := &oauth2.Config{ClientID: cfg.ClientID, ClientSecret: cfg.ClientSecret, Endpoint: googleoauth.Endpoint, RedirectURL: cfg.RedirectURL, Scopes: []string{"https://www.googleapis.com/auth/drive"}}
+ tokenSource := oauthCfg.TokenSource(ctx, &oauth2.Token{RefreshToken: cfg.RefreshToken, Expiry: time.Now().Add(-time.Hour)})
+ client, err := goauth2api.NewService(ctx, option.WithTokenSource(tokenSource))
+ if err != nil {
+ return nil, apperror.BadRequest("STORAGE_GOOGLE_PROFILE_FAILED", "无法获取 Google Drive 用户信息", err)
+ }
+ userInfo, err := client.Userinfo.Get().Do()
+ if err != nil {
+ return nil, apperror.BadRequest("STORAGE_GOOGLE_PROFILE_FAILED", "无法获取 Google Drive 用户信息", err)
+ }
+ return map[string]any{"email": userInfo.Email, "name": userInfo.Name, "picture": userInfo.Picture}, nil
+}
+
+func (s *StorageTargetService) buildStorageTargetForTest(ctx context.Context, input StorageTargetTestInput) (*model.StorageTarget, error) {
+ if input.TargetID == nil {
+ return s.buildStorageTarget(ctx, nil, input.Payload)
+ }
+ existing, err := s.targets.FindByID(ctx, *input.TargetID)
+ if err != nil {
+ return nil, apperror.Internal("STORAGE_TARGET_GET_FAILED", "无法获取存储目标详情", err)
+ }
+ if existing == nil {
+ return nil, apperror.New(http.StatusNotFound, "STORAGE_TARGET_NOT_FOUND", "存储目标不存在", fmt.Errorf("storage target %d not found", *input.TargetID))
+ }
+ if strings.TrimSpace(input.Payload.Type) == "" && strings.TrimSpace(input.Payload.Name) == "" && len(input.Payload.Config) == 0 {
+ return existing, nil
+ }
+ item, err := s.buildStorageTarget(ctx, existing, input.Payload)
+ if err != nil {
+ return nil, err
+ }
+ item.ID = existing.ID
+ item.LastTestedAt = existing.LastTestedAt
+ item.LastTestStatus = existing.LastTestStatus
+ item.LastTestMessage = existing.LastTestMessage
+ return item, nil
+}
+
+func (s *StorageTargetService) buildStorageTarget(ctx context.Context, existing *model.StorageTarget, input StorageTargetUpsertInput) (*model.StorageTarget, error) {
+ configMap, err := s.prepareConfig(ctx, existing, input)
+ if err != nil {
+ return nil, err
+ }
+ ciphertext, err := s.cipher.EncryptJSON(configMap)
+ if err != nil {
+ return nil, apperror.Internal("STORAGE_TARGET_ENCRYPT_FAILED", "无法保存存储目标配置", err)
+ }
+ item := &model.StorageTarget{
+ Name: strings.TrimSpace(input.Name),
+ Type: input.Type,
+ Description: strings.TrimSpace(input.Description),
+ Enabled: input.Enabled,
+ ConfigCiphertext: ciphertext,
+ ConfigVersion: 1,
+ LastTestStatus: "unknown",
+ }
+ if existing != nil {
+ item.LastTestedAt = existing.LastTestedAt
+ item.LastTestStatus = existing.LastTestStatus
+ item.LastTestMessage = existing.LastTestMessage
+ if existing.Type == input.Type {
+ item.ConfigVersion = existing.ConfigVersion
+ }
+ }
+ return item, nil
+}
+
+func (s *StorageTargetService) prepareConfig(ctx context.Context, existing *model.StorageTarget, input StorageTargetUpsertInput) (map[string]any, error) {
+ if err := s.validateType(input.Type); err != nil {
+ return nil, err
+ }
+ configMap := cloneMap(input.Config)
+ if existing != nil {
+ if existing.Type != input.Type {
+ return nil, apperror.BadRequest("STORAGE_TARGET_TYPE_IMMUTABLE", "不支持直接修改存储目标类型", nil)
+ }
+ existingMap, err := s.decryptTargetConfig(existing)
+ if err != nil {
+ return nil, apperror.Internal("STORAGE_TARGET_DECRYPT_FAILED", "无法读取现有存储目标配置", err)
+ }
+ configMap = codec.MergeMaskedConfig(configMap, existingMap, s.registry.SensitiveFields(storage.ParseProviderType(input.Type)))
+ }
+ if _, err := s.registry.Create(ctx, storage.ParseProviderType(input.Type), configMap); err != nil {
+ return nil, apperror.BadRequest("STORAGE_TARGET_INVALID_CONFIG", sanitizeMessage(err.Error()), err)
+ }
+ return configMap, nil
+}
+
+func (s *StorageTargetService) decryptTargetConfig(item *model.StorageTarget) (map[string]any, error) {
+ var configMap map[string]any
+ if err := s.cipher.DecryptJSON(item.ConfigCiphertext, &configMap); err != nil {
+ return nil, err
+ }
+ return configMap, nil
+}
+
+func (s *StorageTargetService) buildGoogleDriveDraft(ctx context.Context, input GoogleDriveAuthStartInput, origin string) (*googleDriveOAuthDraft, error) {
+ draft := &googleDriveOAuthDraft{
+ TargetID: input.TargetID,
+ Name: strings.TrimSpace(input.Name),
+ Description: strings.TrimSpace(input.Description),
+ Enabled: input.Enabled,
+ ClientID: strings.TrimSpace(input.ClientID),
+ ClientSecret: strings.TrimSpace(input.ClientSecret),
+ FolderID: strings.TrimSpace(input.FolderID),
+ RedirectURI: strings.TrimRight(origin, "/") + "/storage-targets/google-drive/callback",
+ }
+ if input.TargetID == nil {
+ if draft.Name == "" || draft.ClientID == "" || draft.ClientSecret == "" {
+ return nil, apperror.BadRequest("STORAGE_GOOGLE_OAUTH_INVALID", "Google Drive 授权参数不完整", nil)
+ }
+ return draft, nil
+ }
+ existing, err := s.targets.FindByID(ctx, *input.TargetID)
+ if err != nil {
+ return nil, apperror.Internal("STORAGE_TARGET_GET_FAILED", "无法获取存储目标详情", err)
+ }
+ if existing == nil {
+ return nil, apperror.New(http.StatusNotFound, "STORAGE_TARGET_NOT_FOUND", "存储目标不存在", fmt.Errorf("storage target %d not found", *input.TargetID))
+ }
+ if existing.Type != storage.TypeGoogleDrive {
+ return nil, apperror.BadRequest("STORAGE_GOOGLE_DRIVE_TYPE_MISMATCH", "目标不是 Google Drive 存储类型", nil)
+ }
+ var cfg storage.GoogleDriveConfig
+ if err := s.cipher.DecryptJSON(existing.ConfigCiphertext, &cfg); err != nil {
+ return nil, apperror.Internal("STORAGE_TARGET_DECRYPT_FAILED", "无法解密存储目标配置", err)
+ }
+ cfg = cfg.Normalize()
+ if draft.Name == "" {
+ draft.Name = existing.Name
+ }
+ if draft.Description == "" {
+ draft.Description = existing.Description
+ }
+ if draft.ClientID == "" || codec.IsMaskedString(draft.ClientID) {
+ draft.ClientID = cfg.ClientID
+ }
+ if draft.ClientSecret == "" || codec.IsMaskedString(draft.ClientSecret) {
+ draft.ClientSecret = cfg.ClientSecret
+ }
+ if draft.FolderID == "" {
+ draft.FolderID = cfg.FolderID
+ }
+ if draft.Name == "" || draft.ClientID == "" || draft.ClientSecret == "" {
+ return nil, apperror.BadRequest("STORAGE_GOOGLE_OAUTH_INVALID", "Google Drive 授权参数不完整", nil)
+ }
+ return draft, nil
+}
+
+func (s *StorageTargetService) validateType(providerType string) error {
+ if _, ok := s.registry.Factory(storage.ParseProviderType(providerType)); !ok {
+ return apperror.BadRequest("STORAGE_PROVIDER_UNSUPPORTED", "不支持的存储类型", fmt.Errorf("provider %s not found", providerType))
+ }
+ return nil
+}
+
+func toStorageTargetSummary(item *model.StorageTarget) StorageTargetSummary {
+ return StorageTargetSummary{
+ ID: item.ID,
+ Name: item.Name,
+ Type: item.Type,
+ Description: item.Description,
+ Enabled: item.Enabled,
+ ConfigVersion: item.ConfigVersion,
+ LastTestedAt: item.LastTestedAt,
+ LastTestStatus: item.LastTestStatus,
+ LastTestMessage: item.LastTestMessage,
+ UpdatedAt: item.UpdatedAt,
+ }
+}
+
+func sanitizeMessage(message string) string {
+ message = strings.TrimSpace(message)
+ if message == "" {
+ return "操作失败"
+ }
+ if len(message) > 255 {
+ return message[:255]
+ }
+ return message
+}
+
+func normalizeOrigin(origin string) string {
+ origin = strings.TrimSpace(origin)
+ return strings.TrimRight(origin, "/")
+}
+
+func cloneMap(source map[string]any) map[string]any {
+ result := make(map[string]any, len(source))
+ for key, value := range source {
+ result[key] = value
+ }
+ return result
+}
+
+type StorageTargetUsage struct {
+ TargetID uint `json:"targetId"`
+ TargetName string `json:"targetName"`
+ RecordCount int64 `json:"recordCount"`
+ TotalSize int64 `json:"totalSize"`
+}
+
+func (s *StorageTargetService) GetUsage(ctx context.Context, id uint) (*StorageTargetUsage, error) {
+ target, err := s.targets.FindByID(ctx, id)
+ if err != nil {
+ return nil, apperror.Internal("STORAGE_TARGET_GET_FAILED", "无法获取存储目标详情", err)
+ }
+ if target == nil {
+ return nil, apperror.New(http.StatusNotFound, "STORAGE_TARGET_NOT_FOUND", "存储目标不存在", fmt.Errorf("storage target %d not found", id))
+ }
+ result := &StorageTargetUsage{TargetID: id, TargetName: target.Name}
+ if s.records != nil {
+ usageItems, usageErr := s.records.StorageUsage(ctx)
+ if usageErr == nil {
+ for _, item := range usageItems {
+ if item.StorageTargetID == id {
+ result.TotalSize = item.TotalSize
+ break
+ }
+ }
+ }
+ }
+ return result, nil
+}
diff --git a/server/internal/service/system_service.go b/server/internal/service/system_service.go
new file mode 100644
index 0000000..7bd5949
--- /dev/null
+++ b/server/internal/service/system_service.go
@@ -0,0 +1,53 @@
+package service
+
+import (
+ "context"
+ "path/filepath"
+ "syscall"
+ "time"
+
+ "backupx/server/internal/config"
+)
+
+type SystemInfo struct {
+ Version string `json:"version"`
+ Mode string `json:"mode"`
+ StartedAt string `json:"startedAt"`
+ UptimeSeconds int64 `json:"uptimeSeconds"`
+ DatabasePath string `json:"databasePath"`
+ DiskTotal int64 `json:"diskTotal"`
+ DiskFree int64 `json:"diskFree"`
+ DiskUsed int64 `json:"diskUsed"`
+}
+
+type SystemService struct {
+ cfg config.Config
+ version string
+ startedAt time.Time
+}
+
+func NewSystemService(cfg config.Config, version string, startedAt time.Time) *SystemService {
+ return &SystemService{cfg: cfg, version: version, startedAt: startedAt}
+}
+
+func (s *SystemService) GetInfo(_ context.Context) *SystemInfo {
+ now := time.Now().UTC()
+ info := &SystemInfo{
+ Version: s.version,
+ Mode: s.cfg.Server.Mode,
+ StartedAt: s.startedAt.Format(time.RFC3339),
+ UptimeSeconds: int64(now.Sub(s.startedAt).Seconds()),
+ DatabasePath: s.cfg.Database.Path,
+ }
+ dir := filepath.Dir(s.cfg.Database.Path)
+ if dir == "" {
+ dir = "."
+ }
+ var stat syscall.Statfs_t
+ if err := syscall.Statfs(dir, &stat); err == nil {
+ info.DiskTotal = int64(stat.Blocks) * int64(stat.Bsize)
+ info.DiskFree = int64(stat.Bavail) * int64(stat.Bsize)
+ info.DiskUsed = info.DiskTotal - info.DiskFree
+ }
+ return info
+}
diff --git a/server/internal/storage/aliyun/factory.go b/server/internal/storage/aliyun/factory.go
new file mode 100644
index 0000000..c222b8f
--- /dev/null
+++ b/server/internal/storage/aliyun/factory.go
@@ -0,0 +1,66 @@
+// Package aliyun provides an Aliyun OSS storage factory that delegates to the S3-compatible engine.
+// Aliyun OSS is fully S3-compatible; we auto-assemble the endpoint from the user-provided region.
+package aliyun
+
+import (
+ "context"
+ "fmt"
+ "strings"
+
+ "backupx/server/internal/storage"
+ "backupx/server/internal/storage/s3"
+)
+
+// Config is the user-facing configuration for Aliyun OSS.
+type Config struct {
+ Region string `json:"region"`
+ Bucket string `json:"bucket"`
+ AccessKeyID string `json:"accessKeyId"`
+ SecretAccessKey string `json:"secretAccessKey"`
+ Endpoint string `json:"endpoint"` // optional override
+ InternalNetwork bool `json:"internalNetwork"` // use -internal endpoint
+}
+
+// Factory creates Aliyun OSS providers by composing the S3 engine.
+type Factory struct {
+ s3Factory s3.Factory
+}
+
+func NewFactory() Factory {
+ return Factory{s3Factory: s3.NewFactory()}
+}
+
+func (Factory) Type() storage.ProviderType { return storage.ProviderTypeAliyunOSS }
+func (Factory) SensitiveFields() []string { return []string{"accessKeyId", "secretAccessKey"} }
+
+func (f Factory) New(ctx context.Context, rawConfig map[string]any) (storage.StorageProvider, error) {
+ cfg, err := storage.DecodeConfig[Config](rawConfig)
+ if err != nil {
+ return nil, err
+ }
+
+ endpoint := strings.TrimSpace(cfg.Endpoint)
+ if endpoint == "" {
+ region := strings.TrimSpace(cfg.Region)
+ if region == "" {
+ return nil, fmt.Errorf("aliyun oss region is required")
+ }
+ suffix := "aliyuncs.com"
+ if cfg.InternalNetwork {
+ endpoint = fmt.Sprintf("https://oss-%s-internal.%s", region, suffix)
+ } else {
+ endpoint = fmt.Sprintf("https://oss-%s.%s", region, suffix)
+ }
+ }
+
+ // Delegate to S3 engine with assembled endpoint.
+ s3Config := map[string]any{
+ "endpoint": endpoint,
+ "region": cfg.Region,
+ "bucket": cfg.Bucket,
+ "accessKeyId": cfg.AccessKeyID,
+ "secretAccessKey": cfg.SecretAccessKey,
+ "forcePathStyle": false, // Aliyun OSS uses virtual-hosted style
+ }
+ return f.s3Factory.New(ctx, s3Config)
+}
diff --git a/server/internal/storage/codec/cipher.go b/server/internal/storage/codec/cipher.go
new file mode 100644
index 0000000..f47d614
--- /dev/null
+++ b/server/internal/storage/codec/cipher.go
@@ -0,0 +1,155 @@
+package codec
+
+import (
+ "bytes"
+ "crypto/aes"
+ "crypto/cipher"
+ "crypto/rand"
+ "crypto/sha256"
+ "encoding/base64"
+ "encoding/json"
+ "fmt"
+ "io"
+)
+
+const maskedValue = "********"
+
+type ConfigCipher struct {
+ key []byte
+}
+
+type Cipher = ConfigCipher
+
+func New(secret string) *ConfigCipher {
+ hash := sha256.Sum256([]byte(secret))
+ return &ConfigCipher{key: hash[:]}
+}
+
+func NewConfigCipher(secret string) *ConfigCipher {
+ return New(secret)
+}
+
+func (c *ConfigCipher) Key() []byte {
+ copyKey := make([]byte, len(c.key))
+ copy(copyKey, c.key)
+ return copyKey
+}
+
+func (c *ConfigCipher) Encrypt(raw []byte) (string, error) {
+ block, err := aes.NewCipher(c.key)
+ if err != nil {
+ return "", fmt.Errorf("create cipher: %w", err)
+ }
+ gcm, err := cipher.NewGCM(block)
+ if err != nil {
+ return "", fmt.Errorf("create gcm: %w", err)
+ }
+ nonce := make([]byte, gcm.NonceSize())
+ if _, err := io.ReadFull(rand.Reader, nonce); err != nil {
+ return "", fmt.Errorf("generate nonce: %w", err)
+ }
+ sealed := gcm.Seal(nonce, nonce, raw, nil)
+ return base64.RawURLEncoding.EncodeToString(sealed), nil
+}
+
+func (c *ConfigCipher) Decrypt(encoded string) ([]byte, error) {
+ payload, err := base64.RawURLEncoding.DecodeString(encoded)
+ if err != nil {
+ return nil, fmt.Errorf("decode ciphertext: %w", err)
+ }
+ block, err := aes.NewCipher(c.key)
+ if err != nil {
+ return nil, fmt.Errorf("create cipher: %w", err)
+ }
+ gcm, err := cipher.NewGCM(block)
+ if err != nil {
+ return nil, fmt.Errorf("create gcm: %w", err)
+ }
+ if len(payload) < gcm.NonceSize() {
+ return nil, fmt.Errorf("ciphertext too short")
+ }
+ nonce, ciphertext := payload[:gcm.NonceSize()], payload[gcm.NonceSize():]
+ plain, err := gcm.Open(nil, nonce, ciphertext, nil)
+ if err != nil {
+ return nil, fmt.Errorf("decrypt ciphertext: %w", err)
+ }
+ return plain, nil
+}
+
+func (c *ConfigCipher) EncryptJSON(value any) (string, error) {
+ plain, err := json.Marshal(value)
+ if err != nil {
+ return "", fmt.Errorf("marshal plaintext: %w", err)
+ }
+ return c.Encrypt(plain)
+}
+
+func (c *ConfigCipher) DecryptJSON(encoded string, out any) error {
+ plain, err := c.Decrypt(encoded)
+ if err != nil {
+ return err
+ }
+ if err := json.Unmarshal(plain, out); err != nil {
+ return fmt.Errorf("unmarshal plaintext: %w", err)
+ }
+ return nil
+}
+
+func (c *ConfigCipher) EncryptValue(value any) (string, error) {
+ return c.EncryptJSON(value)
+}
+
+func (c *ConfigCipher) DecryptValue(encoded string, out any) error {
+ return c.DecryptJSON(encoded, out)
+}
+
+func MaskConfig(raw map[string]any, sensitiveFields []string) map[string]any {
+ masked := cloneMap(raw)
+ for _, field := range sensitiveFields {
+ value, ok := masked[field]
+ if !ok {
+ continue
+ }
+ switch actual := value.(type) {
+ case string:
+ if actual != "" {
+ masked[field] = maskedValue
+ }
+ default:
+ masked[field] = maskedValue
+ }
+ }
+ return masked
+}
+
+func MergeMaskedConfig(next map[string]any, existing map[string]any, sensitiveFields []string) map[string]any {
+ merged := cloneMap(existing)
+ for key, value := range next {
+ merged[key] = value
+ }
+ for _, field := range sensitiveFields {
+ value, ok := merged[field]
+ if !ok {
+ continue
+ }
+ switch actual := value.(type) {
+ case string:
+ if actual == "" || actual == maskedValue {
+ merged[field] = existing[field]
+ }
+ }
+ }
+ return merged
+}
+
+func IsMaskedString(value string) bool {
+ return bytes.Equal([]byte(value), []byte(maskedValue))
+}
+
+func cloneMap(source map[string]any) map[string]any {
+ result := make(map[string]any, len(source))
+ for key, value := range source {
+ result[key] = value
+ }
+ return result
+}
diff --git a/server/internal/storage/codec/cipher_test.go b/server/internal/storage/codec/cipher_test.go
new file mode 100644
index 0000000..b141718
--- /dev/null
+++ b/server/internal/storage/codec/cipher_test.go
@@ -0,0 +1,64 @@
+package codec
+
+import (
+ "bytes"
+ "testing"
+)
+
+func TestCipherEncryptAndDecrypt(t *testing.T) {
+ cipher := New("encryption-secret")
+ input := map[string]any{
+ "endpoint": "https://example.com",
+ "secret": "top-secret",
+ }
+ encoded, err := cipher.EncryptValue(input)
+ if err != nil {
+ t.Fatalf("EncryptValue returned error: %v", err)
+ }
+ var output map[string]any
+ if err := cipher.DecryptValue(encoded, &output); err != nil {
+ t.Fatalf("DecryptValue returned error: %v", err)
+ }
+ if output["secret"] != "top-secret" {
+ t.Fatalf("expected decrypted secret, got %#v", output["secret"])
+ }
+}
+
+func TestConfigCipherEncryptAndDecryptBytes(t *testing.T) {
+ cipher := NewConfigCipher("encryption-secret")
+ encoded, err := cipher.Encrypt([]byte(`{"bucket":"demo"}`))
+ if err != nil {
+ t.Fatalf("Encrypt returned error: %v", err)
+ }
+ decoded, err := cipher.Decrypt(encoded)
+ if err != nil {
+ t.Fatalf("Decrypt returned error: %v", err)
+ }
+ if !bytes.Equal(decoded, []byte(`{"bucket":"demo"}`)) {
+ t.Fatalf("expected decrypted payload to match, got %s", string(decoded))
+ }
+}
+
+func TestMaskConfig(t *testing.T) {
+ masked := MaskConfig(map[string]any{"secret": "abc", "bucket": "demo"}, []string{"secret"})
+ if masked["secret"] != "********" {
+ t.Fatalf("expected masked secret, got %#v", masked["secret"])
+ }
+ if masked["bucket"] != "demo" {
+ t.Fatalf("expected bucket to remain unchanged")
+ }
+}
+
+func TestMergeMaskedConfig(t *testing.T) {
+ merged := MergeMaskedConfig(
+ map[string]any{"bucket": "changed", "secret": "********"},
+ map[string]any{"bucket": "demo", "secret": "top-secret"},
+ []string{"secret"},
+ )
+ if merged["bucket"] != "changed" {
+ t.Fatalf("expected bucket to use new value, got %#v", merged["bucket"])
+ }
+ if merged["secret"] != "top-secret" {
+ t.Fatalf("expected masked secret to reuse stored value, got %#v", merged["secret"])
+ }
+}
diff --git a/server/internal/storage/googledrive/provider.go b/server/internal/storage/googledrive/provider.go
new file mode 100644
index 0000000..a68b9f1
--- /dev/null
+++ b/server/internal/storage/googledrive/provider.go
@@ -0,0 +1,299 @@
+package googledrive
+
+import (
+ "context"
+ "fmt"
+ "io"
+ "path"
+ "strings"
+ "time"
+
+ "backupx/server/internal/storage"
+ "golang.org/x/oauth2"
+ googleoauth "golang.org/x/oauth2/google"
+ "google.golang.org/api/drive/v3"
+ "google.golang.org/api/option"
+)
+
+
+type fileInfo struct {
+ ID string
+ Name string
+ Size int64
+ ModifiedTime time.Time
+}
+
+type client interface {
+ TestConnection(context.Context, string) error
+ Upload(context.Context, string, string, io.Reader) error
+ Download(context.Context, string, string) (io.ReadCloser, error)
+ Delete(context.Context, string, string) error
+ List(context.Context, string, string) ([]storage.ObjectInfo, error)
+ EnsureFolder(ctx context.Context, parentID, name string) (string, error)
+}
+
+type Provider struct {
+ client client
+ rootFolder string // user-configured folderId, empty means Drive root
+ folderCache map[string]string // cache: path -> folderID
+}
+
+type Factory struct {
+ newClient func(context.Context, storage.GoogleDriveConfig) (client, error)
+}
+
+func NewFactory() Factory {
+ return Factory{newClient: newDriveClient}
+}
+
+func (Factory) Type() storage.ProviderType { return storage.ProviderTypeGoogleDrive }
+func (Factory) SensitiveFields() []string {
+ return []string{"clientId", "clientSecret", "refreshToken"}
+}
+
+func (f Factory) New(ctx context.Context, rawConfig map[string]any) (storage.StorageProvider, error) {
+ cfg, err := storage.DecodeConfig[storage.GoogleDriveConfig](rawConfig)
+ if err != nil {
+ return nil, err
+ }
+ cfg = cfg.Normalize()
+ if strings.TrimSpace(cfg.ClientID) == "" || strings.TrimSpace(cfg.ClientSecret) == "" {
+ return nil, fmt.Errorf("google drive client credentials are required")
+ }
+ if strings.TrimSpace(cfg.RefreshToken) == "" {
+ return nil, fmt.Errorf("google drive refresh token is required")
+ }
+ newClient := f.newClient
+ if newClient == nil {
+ newClient = NewFactory().newClient
+ }
+ client, err := newClient(ctx, cfg)
+ if err != nil {
+ return nil, err
+ }
+ return &Provider{
+ client: client,
+ rootFolder: strings.TrimSpace(cfg.FolderID),
+ folderCache: make(map[string]string),
+ }, nil
+}
+
+func (p *Provider) Type() storage.ProviderType { return storage.ProviderTypeGoogleDrive }
+
+// ensureFolderPath creates nested folders for a path like "BackupX/file/260308"
+// and returns the deepest folder's ID.
+func (p *Provider) ensureFolderPath(ctx context.Context, folderPath string) (string, error) {
+ if folderPath == "" || folderPath == "." {
+ return p.rootFolder, nil
+ }
+ if cached, ok := p.folderCache[folderPath]; ok {
+ return cached, nil
+ }
+ parts := strings.Split(path.Clean(folderPath), "/")
+ parentID := p.rootFolder
+ builtPath := ""
+ for _, part := range parts {
+ if part == "" || part == "." {
+ continue
+ }
+ if builtPath == "" {
+ builtPath = part
+ } else {
+ builtPath = builtPath + "/" + part
+ }
+ if cached, ok := p.folderCache[builtPath]; ok {
+ parentID = cached
+ continue
+ }
+ folderID, err := p.client.EnsureFolder(ctx, parentID, part)
+ if err != nil {
+ return "", fmt.Errorf("ensure folder %s: %w", builtPath, err)
+ }
+ p.folderCache[builtPath] = folderID
+ parentID = folderID
+ }
+ return parentID, nil
+}
+
+func (p *Provider) TestConnection(ctx context.Context) error {
+ return p.client.TestConnection(ctx, p.rootFolder)
+}
+
+func (p *Provider) Upload(ctx context.Context, objectKey string, reader io.Reader, _ int64, _ map[string]string) error {
+ dir := path.Dir(objectKey)
+ folderID, err := p.ensureFolderPath(ctx, dir)
+ if err != nil {
+ return err
+ }
+ return p.client.Upload(ctx, folderID, objectKey, reader)
+}
+
+func (p *Provider) Download(ctx context.Context, objectKey string) (io.ReadCloser, error) {
+ dir := path.Dir(objectKey)
+ folderID, err := p.ensureFolderPath(ctx, dir)
+ if err != nil {
+ return nil, err
+ }
+ return p.client.Download(ctx, folderID, objectKey)
+}
+
+func (p *Provider) Delete(ctx context.Context, objectKey string) error {
+ dir := path.Dir(objectKey)
+ folderID, err := p.ensureFolderPath(ctx, dir)
+ if err != nil {
+ return err
+ }
+ return p.client.Delete(ctx, folderID, objectKey)
+}
+
+func (p *Provider) List(ctx context.Context, prefix string) ([]storage.ObjectInfo, error) {
+ dir := path.Dir(prefix)
+ folderID, err := p.ensureFolderPath(ctx, dir)
+ if err != nil {
+ return nil, err
+ }
+ return p.client.List(ctx, folderID, prefix)
+}
+
+type driveClient struct {
+ service *drive.Service
+}
+
+func newDriveClient(ctx context.Context, cfg storage.GoogleDriveConfig) (client, error) {
+ cfg = cfg.Normalize()
+ oauthCfg := &oauth2.Config{
+ ClientID: cfg.ClientID,
+ ClientSecret: cfg.ClientSecret,
+ RedirectURL: cfg.RedirectURL,
+ Endpoint: googleoauth.Endpoint,
+ Scopes: []string{drive.DriveScope},
+ }
+ httpClient := oauthCfg.Client(ctx, &oauth2.Token{RefreshToken: cfg.RefreshToken})
+ service, err := drive.NewService(ctx, option.WithHTTPClient(httpClient))
+ if err != nil {
+ return nil, fmt.Errorf("create google drive service: %w", err)
+ }
+ return &driveClient{service: service}, nil
+}
+
+func (c *driveClient) TestConnection(ctx context.Context, folderID string) error {
+ if strings.TrimSpace(folderID) == "" {
+ _, err := c.service.About.Get().Fields("user").Context(ctx).Do()
+ if err != nil {
+ return fmt.Errorf("test google drive connection: %w", err)
+ }
+ return nil
+ }
+ _, err := c.service.Files.Get(folderID).Fields("id").Context(ctx).Do()
+ if err != nil {
+ return fmt.Errorf("test google drive folder: %w", err)
+ }
+ return nil
+}
+
+func (c *driveClient) EnsureFolder(ctx context.Context, parentID, name string) (string, error) {
+ // Search for existing folder
+ query := fmt.Sprintf("name = '%s' and mimeType = 'application/vnd.google-apps.folder' and trashed = false", escapeQuery(name))
+ if strings.TrimSpace(parentID) != "" {
+ query += fmt.Sprintf(" and '%s' in parents", escapeQuery(parentID))
+ } else {
+ query += " and 'root' in parents"
+ }
+ result, err := c.service.Files.List().Q(query).PageSize(1).Fields("files(id)").Context(ctx).Do()
+ if err != nil {
+ return "", fmt.Errorf("search for folder %s: %w", name, err)
+ }
+ if len(result.Files) > 0 {
+ return result.Files[0].Id, nil
+ }
+ // Create the folder
+ folder := &drive.File{
+ Name: name,
+ MimeType: "application/vnd.google-apps.folder",
+ }
+ if strings.TrimSpace(parentID) != "" {
+ folder.Parents = []string{parentID}
+ }
+ created, err := c.service.Files.Create(folder).Fields("id").Context(ctx).Do()
+ if err != nil {
+ return "", fmt.Errorf("create folder %s: %w", name, err)
+ }
+ return created.Id, nil
+}
+
+func (c *driveClient) Upload(ctx context.Context, folderID, objectKey string, reader io.Reader) error {
+ file := &drive.File{Name: path.Base(objectKey)}
+ if strings.TrimSpace(folderID) != "" {
+ file.Parents = []string{folderID}
+ }
+ _, err := c.service.Files.Create(file).Media(reader).Context(ctx).Do()
+ if err != nil {
+ return fmt.Errorf("upload google drive object: %w", err)
+ }
+ return nil
+}
+
+func (c *driveClient) Download(ctx context.Context, folderID, objectKey string) (io.ReadCloser, error) {
+ file, err := c.findFile(ctx, folderID, objectKey)
+ if err != nil {
+ return nil, err
+ }
+ response, err := c.service.Files.Get(file.ID).Context(ctx).Download()
+ if err != nil {
+ return nil, fmt.Errorf("download google drive object: %w", err)
+ }
+ return response.Body, nil
+}
+
+func (c *driveClient) Delete(ctx context.Context, folderID, objectKey string) error {
+ file, err := c.findFile(ctx, folderID, objectKey)
+ if err != nil {
+ return err
+ }
+ if err := c.service.Files.Delete(file.ID).Context(ctx).Do(); err != nil {
+ return fmt.Errorf("delete google drive object: %w", err)
+ }
+ return nil
+}
+
+func (c *driveClient) List(ctx context.Context, folderID, prefix string) ([]storage.ObjectInfo, error) {
+ query := "trashed = false"
+ if strings.TrimSpace(folderID) != "" {
+ query += fmt.Sprintf(" and '%s' in parents", escapeQuery(folderID))
+ }
+ if strings.TrimSpace(prefix) != "" {
+ query += fmt.Sprintf(" and name contains '%s'", escapeQuery(prefix))
+ }
+ result, err := c.service.Files.List().Q(query).Fields("files(id,name,size,modifiedTime)").Context(ctx).Do()
+ if err != nil {
+ return nil, fmt.Errorf("list google drive objects: %w", err)
+ }
+ items := make([]storage.ObjectInfo, 0, len(result.Files))
+ for _, file := range result.Files {
+ modifiedAt, _ := time.Parse(time.RFC3339, file.ModifiedTime)
+ items = append(items, storage.ObjectInfo{Key: file.Name, Size: file.Size, UpdatedAt: modifiedAt.UTC()})
+ }
+ return items, nil
+}
+
+func (c *driveClient) findFile(ctx context.Context, folderID, objectKey string) (*fileInfo, error) {
+ query := fmt.Sprintf("name = '%s' and trashed = false", escapeQuery(path.Base(objectKey)))
+ if strings.TrimSpace(folderID) != "" {
+ query += fmt.Sprintf(" and '%s' in parents", escapeQuery(folderID))
+ }
+ result, err := c.service.Files.List().Q(query).PageSize(1).Fields("files(id,name,size,modifiedTime)").Context(ctx).Do()
+ if err != nil {
+ return nil, fmt.Errorf("query google drive object: %w", err)
+ }
+ if len(result.Files) == 0 {
+ return nil, fmt.Errorf("google drive object not found: %s", objectKey)
+ }
+ file := result.Files[0]
+ modifiedAt, _ := time.Parse(time.RFC3339, file.ModifiedTime)
+ return &fileInfo{ID: file.Id, Name: file.Name, Size: file.Size, ModifiedTime: modifiedAt.UTC()}, nil
+}
+
+func escapeQuery(value string) string {
+ return strings.ReplaceAll(value, "'", "\\'")
+}
+
diff --git a/server/internal/storage/googledrive/provider_test.go b/server/internal/storage/googledrive/provider_test.go
new file mode 100644
index 0000000..0320788
--- /dev/null
+++ b/server/internal/storage/googledrive/provider_test.go
@@ -0,0 +1,75 @@
+package googledrive
+
+import (
+ "context"
+ "io"
+ "strings"
+ "testing"
+ "time"
+
+ "backupx/server/internal/storage"
+)
+
+type fakeClient struct{ data map[string]string }
+
+func (c *fakeClient) TestConnection(context.Context, string) error { return nil }
+func (c *fakeClient) Upload(_ context.Context, _ string, objectKey string, reader io.Reader) error {
+ content, _ := io.ReadAll(reader)
+ c.data[objectKey] = string(content)
+ return nil
+}
+func (c *fakeClient) Download(_ context.Context, _ string, objectKey string) (io.ReadCloser, error) {
+ return io.NopCloser(strings.NewReader(c.data[objectKey])), nil
+}
+func (c *fakeClient) Delete(_ context.Context, _ string, objectKey string) error {
+ delete(c.data, objectKey)
+ return nil
+}
+func (c *fakeClient) List(_ context.Context, _ string, prefix string) ([]storage.ObjectInfo, error) {
+ items := make([]storage.ObjectInfo, 0)
+ for key, value := range c.data {
+ if prefix == "" || strings.HasPrefix(key, prefix) {
+ items = append(items, storage.ObjectInfo{Key: key, Size: int64(len(value)), UpdatedAt: time.Now().UTC()})
+ }
+ }
+ return items, nil
+}
+func (c *fakeClient) EnsureFolder(_ context.Context, _, name string) (string, error) {
+ return "fake-folder-" + name, nil
+}
+
+func TestGoogleDriveProviderCRUD(t *testing.T) {
+ factory := Factory{newClient: func(context.Context, storage.GoogleDriveConfig) (client, error) {
+ return &fakeClient{data: make(map[string]string)}, nil
+ }}
+ providerAny, err := factory.New(context.Background(), map[string]any{"clientId": "id", "clientSecret": "secret", "refreshToken": "refresh"})
+ if err != nil {
+ t.Fatalf("Factory.New returned error: %v", err)
+ }
+ provider := providerAny.(*Provider)
+ if err := provider.TestConnection(context.Background()); err != nil {
+ t.Fatalf("TestConnection returned error: %v", err)
+ }
+ if err := provider.Upload(context.Background(), "backup.tar.gz", strings.NewReader("payload"), 7, nil); err != nil {
+ t.Fatalf("Upload returned error: %v", err)
+ }
+ reader, err := provider.Download(context.Background(), "backup.tar.gz")
+ if err != nil {
+ t.Fatalf("Download returned error: %v", err)
+ }
+ defer reader.Close()
+ content, _ := io.ReadAll(reader)
+ if string(content) != "payload" {
+ t.Fatalf("unexpected content: %s", string(content))
+ }
+ items, err := provider.List(context.Background(), "backup")
+ if err != nil {
+ t.Fatalf("List returned error: %v", err)
+ }
+ if len(items) != 1 || items[0].Key != "backup.tar.gz" {
+ t.Fatalf("unexpected list result: %#v", items)
+ }
+ if err := provider.Delete(context.Background(), "backup.tar.gz"); err != nil {
+ t.Fatalf("Delete returned error: %v", err)
+ }
+}
diff --git a/server/internal/storage/localdisk/provider.go b/server/internal/storage/localdisk/provider.go
new file mode 100644
index 0000000..6a0bd28
--- /dev/null
+++ b/server/internal/storage/localdisk/provider.go
@@ -0,0 +1,137 @@
+package localdisk
+
+import (
+ "context"
+ "fmt"
+ "io"
+ "io/fs"
+ "os"
+ "path/filepath"
+ "strings"
+
+ "backupx/server/internal/storage"
+)
+
+type Provider struct {
+ basePath string
+}
+
+type Factory struct{}
+
+func NewFactory() Factory { return Factory{} }
+
+func (Factory) Type() storage.ProviderType { return storage.ProviderTypeLocalDisk }
+func (Factory) SensitiveFields() []string { return nil }
+
+func (Factory) New(_ context.Context, rawConfig map[string]any) (storage.StorageProvider, error) {
+ cfg, err := storage.DecodeConfig[storage.LocalDiskConfig](rawConfig)
+ if err != nil {
+ return nil, err
+ }
+ if strings.TrimSpace(cfg.BasePath) == "" {
+ return nil, fmt.Errorf("local disk basePath is required")
+ }
+ return &Provider{basePath: filepath.Clean(cfg.BasePath)}, nil
+}
+
+func (p *Provider) Type() storage.ProviderType { return storage.ProviderTypeLocalDisk }
+
+func (p *Provider) TestConnection(_ context.Context) error {
+ if err := os.MkdirAll(p.basePath, 0o755); err != nil {
+ return fmt.Errorf("ensure local disk base path: %w", err)
+ }
+ tempFile, err := os.CreateTemp(p.basePath, ".backupx-connection-test-*")
+ if err != nil {
+ return fmt.Errorf("write access check failed: %w", err)
+ }
+ name := tempFile.Name()
+ _ = tempFile.Close()
+ _ = os.Remove(name)
+ return nil
+}
+
+func (p *Provider) Upload(_ context.Context, objectKey string, reader io.Reader, _ int64, _ map[string]string) error {
+ targetPath, err := p.resolvePath(objectKey)
+ if err != nil {
+ return err
+ }
+ if err := os.MkdirAll(filepath.Dir(targetPath), 0o755); err != nil {
+ return fmt.Errorf("create local disk directories: %w", err)
+ }
+ file, err := os.Create(targetPath)
+ if err != nil {
+ return fmt.Errorf("create local disk object: %w", err)
+ }
+ defer file.Close()
+ if _, err := io.Copy(file, reader); err != nil {
+ return fmt.Errorf("write local disk object: %w", err)
+ }
+ return nil
+}
+
+func (p *Provider) Download(_ context.Context, objectKey string) (io.ReadCloser, error) {
+ targetPath, err := p.resolvePath(objectKey)
+ if err != nil {
+ return nil, err
+ }
+ file, err := os.Open(targetPath)
+ if err != nil {
+ return nil, fmt.Errorf("open local disk object: %w", err)
+ }
+ return file, nil
+}
+
+func (p *Provider) Delete(_ context.Context, objectKey string) error {
+ targetPath, err := p.resolvePath(objectKey)
+ if err != nil {
+ return err
+ }
+ if err := os.Remove(targetPath); err != nil && !os.IsNotExist(err) {
+ return fmt.Errorf("delete local disk object: %w", err)
+ }
+ return nil
+}
+
+func (p *Provider) List(_ context.Context, prefix string) ([]storage.ObjectInfo, error) {
+ items := make([]storage.ObjectInfo, 0)
+ err := filepath.WalkDir(p.basePath, func(path string, entry fs.DirEntry, walkErr error) error {
+ if walkErr != nil {
+ return walkErr
+ }
+ if entry.IsDir() {
+ return nil
+ }
+ rel, err := filepath.Rel(p.basePath, path)
+ if err != nil {
+ return err
+ }
+ key := filepath.ToSlash(rel)
+ if prefix != "" && !strings.HasPrefix(key, prefix) {
+ return nil
+ }
+ info, err := entry.Info()
+ if err != nil {
+ return err
+ }
+ items = append(items, storage.ObjectInfo{Key: key, Size: info.Size(), UpdatedAt: info.ModTime().UTC()})
+ return nil
+ })
+ if err != nil && !os.IsNotExist(err) {
+ return nil, fmt.Errorf("list local disk objects: %w", err)
+ }
+ return items, nil
+}
+
+func (p *Provider) resolvePath(objectKey string) (string, error) {
+ cleanBase := filepath.Clean(p.basePath)
+ cleanKey := filepath.Clean(filepath.FromSlash(strings.TrimSpace(objectKey)))
+ if cleanKey == "." || cleanKey == string(filepath.Separator) || cleanKey == "" {
+ return "", fmt.Errorf("object key is required")
+ }
+ fullPath := filepath.Clean(filepath.Join(cleanBase, cleanKey))
+ baseWithSep := cleanBase + string(filepath.Separator)
+ if fullPath != cleanBase && !strings.HasPrefix(fullPath, baseWithSep) {
+ return "", fmt.Errorf("object key escapes base path")
+ }
+ return fullPath, nil
+}
diff --git a/server/internal/storage/localdisk/provider_test.go b/server/internal/storage/localdisk/provider_test.go
new file mode 100644
index 0000000..c6e8d49
--- /dev/null
+++ b/server/internal/storage/localdisk/provider_test.go
@@ -0,0 +1,52 @@
+package localdisk
+
+import (
+ "context"
+ "io"
+ "strings"
+ "testing"
+)
+
+func TestLocalDiskProviderCRUD(t *testing.T) {
+ providerAny, err := (Factory{}).New(context.Background(), map[string]any{"basePath": t.TempDir()})
+ if err != nil {
+ t.Fatalf("Factory.New returned error: %v", err)
+ }
+ provider := providerAny.(*Provider)
+ if err := provider.TestConnection(context.Background()); err != nil {
+ t.Fatalf("TestConnection returned error: %v", err)
+ }
+ if err := provider.Upload(context.Background(), "daily/backup.txt", strings.NewReader("hello"), 5, nil); err != nil {
+ t.Fatalf("Upload returned error: %v", err)
+ }
+ reader, err := provider.Download(context.Background(), "daily/backup.txt")
+ if err != nil {
+ t.Fatalf("Download returned error: %v", err)
+ }
+ defer reader.Close()
+ content, _ := io.ReadAll(reader)
+ if string(content) != "hello" {
+ t.Fatalf("expected downloaded content to match, got %s", string(content))
+ }
+ items, err := provider.List(context.Background(), "daily")
+ if err != nil {
+ t.Fatalf("List returned error: %v", err)
+ }
+ if len(items) != 1 || items[0].Key != "daily/backup.txt" {
+ t.Fatalf("unexpected list result: %#v", items)
+ }
+ if err := provider.Delete(context.Background(), "daily/backup.txt"); err != nil {
+ t.Fatalf("Delete returned error: %v", err)
+ }
+}
+
+func TestLocalDiskProviderRejectsTraversal(t *testing.T) {
+ providerAny, err := (Factory{}).New(context.Background(), map[string]any{"basePath": t.TempDir()})
+ if err != nil {
+ t.Fatalf("Factory.New returned error: %v", err)
+ }
+ provider := providerAny.(*Provider)
+ if _, err := provider.resolvePath("../escape.txt"); err == nil {
+ t.Fatalf("expected traversal to be rejected")
+ }
+}
diff --git a/server/internal/storage/qiniu/factory.go b/server/internal/storage/qiniu/factory.go
new file mode 100644
index 0000000..6872a05
--- /dev/null
+++ b/server/internal/storage/qiniu/factory.go
@@ -0,0 +1,73 @@
+// Package qiniu provides a Qiniu Cloud Kodo storage factory that delegates to the S3-compatible engine.
+// Qiniu Kodo is S3-compatible; we auto-assemble the endpoint from the user-provided region.
+package qiniu
+
+import (
+ "context"
+ "fmt"
+ "strings"
+
+ "backupx/server/internal/storage"
+ "backupx/server/internal/storage/s3"
+)
+
+// Config is the user-facing configuration for Qiniu Kodo.
+type Config struct {
+ Region string `json:"region"` // e.g. z0, z1, z2, na0, as0
+ Bucket string `json:"bucket"`
+ AccessKey string `json:"accessKeyId"`
+ SecretKey string `json:"secretAccessKey"`
+ Endpoint string `json:"endpoint"` // optional override
+}
+
+// regionEndpoints maps Qiniu storage region codes to their S3-compatible endpoints.
+var regionEndpoints = map[string]string{
+ "z0": "https://s3-cn-east-1.qiniucs.com",
+ "cn-east-2": "https://s3-cn-east-2.qiniucs.com",
+ "z1": "https://s3-cn-north-1.qiniucs.com",
+ "z2": "https://s3-cn-south-1.qiniucs.com",
+ "na0": "https://s3-us-north-1.qiniucs.com",
+ "as0": "https://s3-ap-southeast-1.qiniucs.com",
+}
+
+// Factory creates Qiniu Kodo providers by composing the S3 engine.
+type Factory struct {
+ s3Factory s3.Factory
+}
+
+func NewFactory() Factory {
+ return Factory{s3Factory: s3.NewFactory()}
+}
+
+func (Factory) Type() storage.ProviderType { return storage.ProviderTypeQiniuKodo }
+func (Factory) SensitiveFields() []string { return []string{"accessKeyId", "secretAccessKey"} }
+
+func (f Factory) New(ctx context.Context, rawConfig map[string]any) (storage.StorageProvider, error) {
+ cfg, err := storage.DecodeConfig[Config](rawConfig)
+ if err != nil {
+ return nil, err
+ }
+
+ endpoint := strings.TrimSpace(cfg.Endpoint)
+ if endpoint == "" {
+ region := strings.TrimSpace(cfg.Region)
+ if region == "" {
+ return nil, fmt.Errorf("qiniu kodo region is required")
+ }
+ var ok bool
+ endpoint, ok = regionEndpoints[region]
+ if !ok {
+ return nil, fmt.Errorf("unsupported qiniu region: %s (supported: z0, cn-east-2, z1, z2, na0, as0)", region)
+ }
+ }
+
+ s3Config := map[string]any{
+ "endpoint": endpoint,
+ "region": cfg.Region,
+ "bucket": cfg.Bucket,
+ "accessKeyId": cfg.AccessKey,
+ "secretAccessKey": cfg.SecretKey,
+ "forcePathStyle": true, // Qiniu S3-compatible uses path-style
+ }
+ return f.s3Factory.New(ctx, s3Config)
+}
diff --git a/server/internal/storage/registry.go b/server/internal/storage/registry.go
new file mode 100644
index 0000000..be2d4b1
--- /dev/null
+++ b/server/internal/storage/registry.go
@@ -0,0 +1,192 @@
+package storage
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "sort"
+ "sync"
+
+ "backupx/server/internal/apperror"
+)
+
+type providerFactoryWithNew interface {
+ New(context.Context, map[string]any) (StorageProvider, error)
+}
+
+type providerFactoryWithCreate interface {
+ Create(context.Context, json.RawMessage) (StorageProvider, error)
+}
+
+type providerFactoryWithSensitiveFields interface {
+ SensitiveFields() []string
+}
+
+type providerFactoryWithSensitiveKeys interface {
+ SensitiveKeys() []string
+}
+
+type providerFactoryWithValidate interface {
+ Validate(json.RawMessage) error
+}
+
+type Registry struct {
+ mu sync.RWMutex
+ factories map[ProviderType]ProviderFactory
+}
+
+func NewRegistry(factories ...ProviderFactory) *Registry {
+ registry := &Registry{factories: make(map[ProviderType]ProviderFactory)}
+ for _, factory := range factories {
+ registry.Register(factory)
+ }
+ return registry
+}
+
+func (r *Registry) Register(factory ProviderFactory) {
+ if factory == nil {
+ return
+ }
+ r.mu.Lock()
+ defer r.mu.Unlock()
+ if r.factories == nil {
+ r.factories = make(map[ProviderType]ProviderFactory)
+ }
+ r.factories[factory.Type()] = factory
+}
+
+func (r *Registry) Factory(providerType string) (ProviderFactory, bool) {
+ r.mu.RLock()
+ defer r.mu.RUnlock()
+ factory, ok := r.factories[providerType]
+ return factory, ok
+}
+
+func (r *Registry) Types() []ProviderType {
+ r.mu.RLock()
+ defer r.mu.RUnlock()
+ items := make([]ProviderType, 0, len(r.factories))
+ for providerType := range r.factories {
+ items = append(items, providerType)
+ }
+ sort.Slice(items, func(i, j int) bool { return items[i] < items[j] })
+ return items
+}
+
+func (r *Registry) SensitiveFields(providerType string) []string {
+ factory, ok := r.Factory(providerType)
+ if !ok {
+ return nil
+ }
+ if typed, ok := factory.(providerFactoryWithSensitiveFields); ok {
+ return typed.SensitiveFields()
+ }
+ if typed, ok := factory.(providerFactoryWithSensitiveKeys); ok {
+ return typed.SensitiveKeys()
+ }
+ return nil
+}
+
+func (r *Registry) SensitiveKeys(providerType string) []string {
+ return r.SensitiveFields(providerType)
+}
+
+func (r *Registry) Validate(providerType string, raw json.RawMessage) error {
+ factory, ok := r.Factory(providerType)
+ if !ok {
+ return apperror.BadRequest("STORAGE_PROVIDER_UNSUPPORTED", "不支持的存储类型", fmt.Errorf("unsupported storage provider type: %s", providerType))
+ }
+ if typed, ok := factory.(providerFactoryWithValidate); ok {
+ if err := typed.Validate(raw); err != nil {
+ return apperror.BadRequest("STORAGE_TARGET_INVALID_CONFIG", "存储目标配置不合法", err)
+ }
+ return nil
+ }
+ configMap, err := decodeConfigMap(raw)
+ if err != nil {
+ return apperror.BadRequest("STORAGE_TARGET_INVALID_CONFIG", "存储目标配置不合法", err)
+ }
+ if typed, ok := factory.(providerFactoryWithNew); ok {
+ if _, err := typed.New(context.Background(), configMap); err != nil {
+ return apperror.BadRequest("STORAGE_TARGET_INVALID_CONFIG", "存储目标配置不合法", err)
+ }
+ return nil
+ }
+ return apperror.BadRequest("STORAGE_TARGET_INVALID_CONFIG", "存储目标配置不合法", fmt.Errorf("provider %s has no validator", providerType))
+}
+
+func (r *Registry) Create(ctx context.Context, providerType string, rawConfig any) (StorageProvider, error) {
+ factory, ok := r.Factory(providerType)
+ if !ok {
+ return nil, apperror.BadRequest("STORAGE_PROVIDER_UNSUPPORTED", "不支持的存储类型", fmt.Errorf("unsupported storage provider type: %s", providerType))
+ }
+ raw, configMap, err := normalizeConfig(rawConfig)
+ if err != nil {
+ return nil, apperror.BadRequest("STORAGE_TARGET_INVALID_CONFIG", "存储目标配置不合法", err)
+ }
+ if typed, ok := factory.(providerFactoryWithNew); ok {
+ provider, err := typed.New(ctx, configMap)
+ if err != nil {
+ return nil, apperror.BadRequest("STORAGE_TARGET_INVALID_CONFIG", "无法创建存储客户端", err)
+ }
+ return provider, nil
+ }
+ if typed, ok := factory.(providerFactoryWithCreate); ok {
+ provider, err := typed.Create(ctx, raw)
+ if err != nil {
+ return nil, apperror.BadRequest("STORAGE_TARGET_INVALID_CONFIG", "无法创建存储客户端", err)
+ }
+ return provider, nil
+ }
+ return nil, apperror.BadRequest("STORAGE_TARGET_INVALID_CONFIG", "无法创建存储客户端", fmt.Errorf("provider %s has no constructor", providerType))
+}
+
+func normalizeConfig(rawConfig any) (json.RawMessage, map[string]any, error) {
+ switch value := rawConfig.(type) {
+ case nil:
+ return json.RawMessage("{}"), map[string]any{}, nil
+ case map[string]any:
+ raw, err := json.Marshal(value)
+ if err != nil {
+ return nil, nil, fmt.Errorf("marshal config: %w", err)
+ }
+ return raw, value, nil
+ case json.RawMessage:
+ configMap, err := decodeConfigMap(value)
+ if err != nil {
+ return nil, nil, err
+ }
+ return value, configMap, nil
+ case []byte:
+ raw := json.RawMessage(value)
+ configMap, err := decodeConfigMap(raw)
+ if err != nil {
+ return nil, nil, err
+ }
+ return raw, configMap, nil
+ default:
+ raw, err := json.Marshal(value)
+ if err != nil {
+ return nil, nil, fmt.Errorf("marshal config: %w", err)
+ }
+ configMap, err := decodeConfigMap(raw)
+ if err != nil {
+ return nil, nil, err
+ }
+ return raw, configMap, nil
+ }
+}
+
+func decodeConfigMap(raw json.RawMessage) (map[string]any, error) {
+ if len(raw) == 0 {
+ return map[string]any{}, nil
+ }
+ var configMap map[string]any
+ if err := json.Unmarshal(raw, &configMap); err != nil {
+ return nil, fmt.Errorf("decode config: %w", err)
+ }
+ if configMap == nil {
+ return map[string]any{}, nil
+ }
+ return configMap, nil
+}
diff --git a/server/internal/storage/registry_test.go b/server/internal/storage/registry_test.go
new file mode 100644
index 0000000..3aed2c8
--- /dev/null
+++ b/server/internal/storage/registry_test.go
@@ -0,0 +1,58 @@
+package storage
+
+import (
+ "context"
+ "io"
+ "strings"
+ "testing"
+)
+
+type fakeProvider struct{}
+
+func (fakeProvider) Type() ProviderType { return ProviderTypeLocalDisk }
+func (fakeProvider) TestConnection(context.Context) error { return nil }
+func (fakeProvider) Upload(context.Context, string, io.Reader, int64, map[string]string) error {
+ return nil
+}
+func (fakeProvider) Download(context.Context, string) (io.ReadCloser, error) {
+ return io.NopCloser(strings.NewReader("ok")), nil
+}
+func (fakeProvider) Delete(context.Context, string) error { return nil }
+func (fakeProvider) List(context.Context, string) ([]ObjectInfo, error) { return nil, nil }
+
+type fakeFactory struct{}
+
+func (fakeFactory) Type() ProviderType { return ProviderTypeLocalDisk }
+func (fakeFactory) SensitiveFields() []string { return []string{"secret"} }
+func (fakeFactory) New(context.Context, map[string]any) (StorageProvider, error) {
+ return fakeProvider{}, nil
+}
+
+func TestRegistryCreate(t *testing.T) {
+ registry := NewRegistry(fakeFactory{})
+ provider, err := registry.Create(context.Background(), ProviderTypeLocalDisk, map[string]any{"basePath": "/tmp"})
+ if err != nil {
+ t.Fatalf("Create returned error: %v", err)
+ }
+ if provider.Type() != ProviderTypeLocalDisk {
+ t.Fatalf("expected local disk provider, got %s", provider.Type())
+ }
+}
+
+func TestRegistryCreateReturnsErrorForUnknownType(t *testing.T) {
+ registry := NewRegistry()
+ _, err := registry.Create(context.Background(), ProviderTypeS3, nil)
+ if err == nil || !strings.Contains(err.Error(), "unsupported") {
+ t.Fatalf("expected unsupported type error, got %v", err)
+ }
+}
+
+func TestDecodeConfig(t *testing.T) {
+ cfg, err := DecodeConfig[LocalDiskConfig](map[string]any{"basePath": "/tmp/storage"})
+ if err != nil {
+ t.Fatalf("DecodeConfig returned error: %v", err)
+ }
+ if cfg.BasePath != "/tmp/storage" {
+ t.Fatalf("expected base path to decode")
+ }
+}
diff --git a/server/internal/storage/s3/provider.go b/server/internal/storage/s3/provider.go
new file mode 100644
index 0000000..1ce053a
--- /dev/null
+++ b/server/internal/storage/s3/provider.go
@@ -0,0 +1,126 @@
+package s3
+
+import (
+ "context"
+ "fmt"
+ "io"
+ "strings"
+ "time"
+
+ "backupx/server/internal/storage"
+ awscore "github.com/aws/aws-sdk-go-v2/aws"
+ "github.com/aws/aws-sdk-go-v2/credentials"
+ awss3 "github.com/aws/aws-sdk-go-v2/service/s3"
+)
+
+type client interface {
+ HeadBucket(context.Context, *awss3.HeadBucketInput, ...func(*awss3.Options)) (*awss3.HeadBucketOutput, error)
+ PutObject(context.Context, *awss3.PutObjectInput, ...func(*awss3.Options)) (*awss3.PutObjectOutput, error)
+ GetObject(context.Context, *awss3.GetObjectInput, ...func(*awss3.Options)) (*awss3.GetObjectOutput, error)
+ DeleteObject(context.Context, *awss3.DeleteObjectInput, ...func(*awss3.Options)) (*awss3.DeleteObjectOutput, error)
+ ListObjectsV2(context.Context, *awss3.ListObjectsV2Input, ...func(*awss3.Options)) (*awss3.ListObjectsV2Output, error)
+}
+
+type Provider struct {
+ client client
+ bucket string
+}
+
+type Factory struct {
+ newClient func(cfg storage.S3Config) client
+}
+
+func NewFactory() Factory {
+ return Factory{newClient: func(cfg storage.S3Config) client {
+ region := strings.TrimSpace(cfg.Region)
+ if region == "" {
+ region = "us-east-1"
+ }
+ awsConfig := awscore.Config{
+ Region: region,
+ Credentials: credentials.NewStaticCredentialsProvider(cfg.AccessKeyID, cfg.SecretAccessKey, ""),
+ }
+ return awss3.NewFromConfig(awsConfig, func(options *awss3.Options) {
+ options.UsePathStyle = cfg.ForcePathStyle
+ if strings.TrimSpace(cfg.Endpoint) != "" {
+ options.BaseEndpoint = awscore.String(strings.TrimRight(cfg.Endpoint, "/"))
+ }
+ })
+ }}
+}
+
+func (Factory) Type() storage.ProviderType { return storage.ProviderTypeS3 }
+func (Factory) SensitiveFields() []string { return []string{"accessKeyId", "secretAccessKey"} }
+
+func (f Factory) New(_ context.Context, rawConfig map[string]any) (storage.StorageProvider, error) {
+ cfg, err := storage.DecodeConfig[storage.S3Config](rawConfig)
+ if err != nil {
+ return nil, err
+ }
+ if strings.TrimSpace(cfg.Bucket) == "" {
+ return nil, fmt.Errorf("s3 bucket is required")
+ }
+ if strings.TrimSpace(cfg.AccessKeyID) == "" || strings.TrimSpace(cfg.SecretAccessKey) == "" {
+ return nil, fmt.Errorf("s3 credentials are required")
+ }
+ newClient := f.newClient
+ if newClient == nil {
+ factory := NewFactory()
+ newClient = factory.newClient
+ }
+ return &Provider{client: newClient(cfg), bucket: cfg.Bucket}, nil
+}
+
+func (p *Provider) Type() storage.ProviderType { return storage.ProviderTypeS3 }
+
+func (p *Provider) TestConnection(ctx context.Context) error {
+ _, err := p.client.HeadBucket(ctx, &awss3.HeadBucketInput{Bucket: awscore.String(p.bucket)})
+ if err != nil {
+ return fmt.Errorf("test s3 connection: %w", err)
+ }
+ return nil
+}
+
+func (p *Provider) Upload(ctx context.Context, objectKey string, reader io.Reader, _ int64, metadata map[string]string) error {
+ _, err := p.client.PutObject(ctx, &awss3.PutObjectInput{Bucket: awscore.String(p.bucket), Key: awscore.String(objectKey), Body: reader, Metadata: metadata})
+ if err != nil {
+ return fmt.Errorf("upload s3 object: %w", err)
+ }
+ return nil
+}
+
+func (p *Provider) Download(ctx context.Context, objectKey string) (io.ReadCloser, error) {
+ result, err := p.client.GetObject(ctx, &awss3.GetObjectInput{Bucket: awscore.String(p.bucket), Key: awscore.String(objectKey)})
+ if err != nil {
+ return nil, fmt.Errorf("download s3 object: %w", err)
+ }
+ return result.Body, nil
+}
+
+func (p *Provider) Delete(ctx context.Context, objectKey string) error {
+ _, err := p.client.DeleteObject(ctx, &awss3.DeleteObjectInput{Bucket: awscore.String(p.bucket), Key: awscore.String(objectKey)})
+ if err != nil {
+ return fmt.Errorf("delete s3 object: %w", err)
+ }
+ return nil
+}
+
+func (p *Provider) List(ctx context.Context, prefix string) ([]storage.ObjectInfo, error) {
+ result, err := p.client.ListObjectsV2(ctx, &awss3.ListObjectsV2Input{Bucket: awscore.String(p.bucket), Prefix: awscore.String(prefix)})
+ if err != nil {
+ return nil, fmt.Errorf("list s3 objects: %w", err)
+ }
+ items := make([]storage.ObjectInfo, 0, len(result.Contents))
+ for _, object := range result.Contents {
+ updatedAt := time.Time{}
+ if object.LastModified != nil {
+ updatedAt = object.LastModified.UTC()
+ }
+ size := int64(0)
+ if object.Size != nil {
+ size = *object.Size
+ }
+ items = append(items, storage.ObjectInfo{Key: awscore.ToString(object.Key), Size: size, UpdatedAt: updatedAt})
+ }
+ return items, nil
+}
diff --git a/server/internal/storage/s3/provider_test.go b/server/internal/storage/s3/provider_test.go
new file mode 100644
index 0000000..205f513
--- /dev/null
+++ b/server/internal/storage/s3/provider_test.go
@@ -0,0 +1,78 @@
+package s3
+
+import (
+ "bytes"
+ "context"
+ "io"
+ "strings"
+ "testing"
+ "time"
+
+ "backupx/server/internal/storage"
+
+ awscore "github.com/aws/aws-sdk-go-v2/aws"
+ awss3 "github.com/aws/aws-sdk-go-v2/service/s3"
+ awss3types "github.com/aws/aws-sdk-go-v2/service/s3/types"
+)
+
+type fakeClient struct{ data map[string]string }
+
+func (c *fakeClient) HeadBucket(context.Context, *awss3.HeadBucketInput, ...func(*awss3.Options)) (*awss3.HeadBucketOutput, error) {
+ return &awss3.HeadBucketOutput{}, nil
+}
+
+func (c *fakeClient) PutObject(_ context.Context, input *awss3.PutObjectInput, _ ...func(*awss3.Options)) (*awss3.PutObjectOutput, error) {
+ body, _ := io.ReadAll(input.Body)
+ c.data[awscore.ToString(input.Key)] = string(body)
+ return &awss3.PutObjectOutput{}, nil
+}
+
+func (c *fakeClient) GetObject(_ context.Context, input *awss3.GetObjectInput, _ ...func(*awss3.Options)) (*awss3.GetObjectOutput, error) {
+ return &awss3.GetObjectOutput{Body: io.NopCloser(strings.NewReader(c.data[awscore.ToString(input.Key)]))}, nil
+}
+
+func (c *fakeClient) DeleteObject(_ context.Context, input *awss3.DeleteObjectInput, _ ...func(*awss3.Options)) (*awss3.DeleteObjectOutput, error) {
+ delete(c.data, awscore.ToString(input.Key))
+ return &awss3.DeleteObjectOutput{}, nil
+}
+
+func (c *fakeClient) ListObjectsV2(_ context.Context, _ *awss3.ListObjectsV2Input, _ ...func(*awss3.Options)) (*awss3.ListObjectsV2Output, error) {
+ now := time.Now().UTC()
+ return &awss3.ListObjectsV2Output{Contents: []awss3types.Object{{Key: awscore.String("backup.tar.gz"), Size: awscore.Int64(10), LastModified: &now}}}, nil
+}
+
+func TestS3ProviderCRUD(t *testing.T) {
+ factory := Factory{newClient: func(cfg storage.S3Config) client {
+ return &fakeClient{data: make(map[string]string)}
+ }}
+ providerAny, err := factory.New(context.Background(), map[string]any{"bucket": "demo", "accessKeyId": "a", "secretAccessKey": "b"})
+ if err != nil {
+ t.Fatalf("Factory.New returned error: %v", err)
+ }
+ provider := providerAny.(*Provider)
+ if err := provider.TestConnection(context.Background()); err != nil {
+ t.Fatalf("TestConnection returned error: %v", err)
+ }
+ if err := provider.Upload(context.Background(), "backup.tar.gz", bytes.NewBufferString("payload"), 7, nil); err != nil {
+ t.Fatalf("Upload returned error: %v", err)
+ }
+ reader, err := provider.Download(context.Background(), "backup.tar.gz")
+ if err != nil {
+ t.Fatalf("Download returned error: %v", err)
+ }
+ defer reader.Close()
+ content, _ := io.ReadAll(reader)
+ if string(content) != "payload" {
+ t.Fatalf("unexpected content: %s", string(content))
+ }
+ items, err := provider.List(context.Background(), "backup")
+ if err != nil {
+ t.Fatalf("List returned error: %v", err)
+ }
+ if len(items) != 1 || items[0].Key != "backup.tar.gz" {
+ t.Fatalf("unexpected list result: %#v", items)
+ }
+ if err := provider.Delete(context.Background(), "backup.tar.gz"); err != nil {
+ t.Fatalf("Delete returned error: %v", err)
+ }
+}
diff --git a/server/internal/storage/s3provider/provider.go b/server/internal/storage/s3provider/provider.go
new file mode 100644
index 0000000..eb961e2
--- /dev/null
+++ b/server/internal/storage/s3provider/provider.go
@@ -0,0 +1,9 @@
+package s3provider
+
+import "backupx/server/internal/storage/s3"
+
+type Factory = s3.Factory
+
+func NewFactory() Factory {
+ return s3.NewFactory()
+}
diff --git a/server/internal/storage/tencent/factory.go b/server/internal/storage/tencent/factory.go
new file mode 100644
index 0000000..7b4e6e2
--- /dev/null
+++ b/server/internal/storage/tencent/factory.go
@@ -0,0 +1,60 @@
+// Package tencent provides a Tencent Cloud COS storage factory that delegates to the S3-compatible engine.
+// Tencent COS is fully S3-compatible; we auto-assemble the endpoint from region and appId.
+package tencent
+
+import (
+ "context"
+ "fmt"
+ "strings"
+
+ "backupx/server/internal/storage"
+ "backupx/server/internal/storage/s3"
+)
+
+// Config is the user-facing configuration for Tencent COS.
+type Config struct {
+ Region string `json:"region"`
+ Bucket string `json:"bucket"` // format: bucketname-appid
+ SecretID string `json:"accessKeyId"`
+ SecretKey string `json:"secretAccessKey"`
+ Endpoint string `json:"endpoint"` // optional override
+}
+
+// Factory creates Tencent COS providers by composing the S3 engine.
+type Factory struct {
+ s3Factory s3.Factory
+}
+
+func NewFactory() Factory {
+ return Factory{s3Factory: s3.NewFactory()}
+}
+
+func (Factory) Type() storage.ProviderType { return storage.ProviderTypeTencentCOS }
+func (Factory) SensitiveFields() []string { return []string{"accessKeyId", "secretAccessKey"} }
+
+func (f Factory) New(ctx context.Context, rawConfig map[string]any) (storage.StorageProvider, error) {
+ cfg, err := storage.DecodeConfig[Config](rawConfig)
+ if err != nil {
+ return nil, err
+ }
+
+ endpoint := strings.TrimSpace(cfg.Endpoint)
+ if endpoint == "" {
+ region := strings.TrimSpace(cfg.Region)
+ if region == "" {
+ return nil, fmt.Errorf("tencent cos region is required")
+ }
+ // Tencent COS S3-compatible endpoint format
+ endpoint = fmt.Sprintf("https://cos.%s.myqcloud.com", region)
+ }
+
+ s3Config := map[string]any{
+ "endpoint": endpoint,
+ "region": cfg.Region,
+ "bucket": cfg.Bucket,
+ "accessKeyId": cfg.SecretID,
+ "secretAccessKey": cfg.SecretKey,
+ "forcePathStyle": false, // COS uses virtual-hosted style
+ }
+ return f.s3Factory.New(ctx, s3Config)
+}
diff --git a/server/internal/storage/types.go b/server/internal/storage/types.go
new file mode 100644
index 0000000..79532f2
--- /dev/null
+++ b/server/internal/storage/types.go
@@ -0,0 +1,120 @@
+package storage
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "io"
+ "strings"
+ "time"
+)
+
+type ProviderType = string
+
+const (
+ ProviderTypeLocalDisk ProviderType = "local_disk"
+ ProviderTypeGoogleDrive ProviderType = "google_drive"
+ ProviderTypeS3 ProviderType = "s3"
+ ProviderTypeWebDAV ProviderType = "webdav"
+ ProviderTypeAliyunOSS ProviderType = "aliyun_oss"
+ ProviderTypeTencentCOS ProviderType = "tencent_cos"
+ ProviderTypeQiniuKodo ProviderType = "qiniu_kodo"
+)
+
+const (
+ TypeLocalDisk = string(ProviderTypeLocalDisk)
+ TypeGoogleDrive = string(ProviderTypeGoogleDrive)
+ TypeS3 = string(ProviderTypeS3)
+ TypeWebDAV = string(ProviderTypeWebDAV)
+ TypeAliyunOSS = string(ProviderTypeAliyunOSS)
+ TypeTencentCOS = string(ProviderTypeTencentCOS)
+ TypeQiniuKodo = string(ProviderTypeQiniuKodo)
+)
+
+type ObjectInfo struct {
+ Key string `json:"key"`
+ Size int64 `json:"size"`
+ UpdatedAt time.Time `json:"updatedAt"`
+}
+
+type StorageProvider interface {
+ Type() ProviderType
+ TestConnection(context.Context) error
+ Upload(ctx context.Context, objectKey string, reader io.Reader, size int64, metadata map[string]string) error
+ Download(ctx context.Context, objectKey string) (io.ReadCloser, error)
+ Delete(ctx context.Context, objectKey string) error
+ List(ctx context.Context, prefix string) ([]ObjectInfo, error)
+}
+
+type ProviderFactory interface {
+ Type() ProviderType
+}
+
+func DecodeConfig[T any](raw map[string]any) (T, error) {
+ var cfg T
+ encoded, err := json.Marshal(raw)
+ if err != nil {
+ return cfg, fmt.Errorf("marshal config: %w", err)
+ }
+ if err := json.Unmarshal(encoded, &cfg); err != nil {
+ return cfg, fmt.Errorf("decode config: %w", err)
+ }
+ return cfg, nil
+}
+
+func DecodeRawConfig[T any](raw json.RawMessage) (T, error) {
+ var cfg T
+ if err := json.Unmarshal(raw, &cfg); err != nil {
+ return cfg, fmt.Errorf("decode config: %w", err)
+ }
+ return cfg, nil
+}
+
+func ParseProviderType(value string) ProviderType {
+ return strings.TrimSpace(value)
+}
+
+type LocalDiskConfig struct {
+ BasePath string `json:"basePath"`
+}
+
+type S3Config struct {
+ Endpoint string `json:"endpoint"`
+ Region string `json:"region"`
+ Bucket string `json:"bucket"`
+ AccessKeyID string `json:"accessKeyId"`
+ SecretAccessKey string `json:"secretAccessKey"`
+ ForcePathStyle bool `json:"forcePathStyle"`
+}
+
+type WebDAVConfig struct {
+ Endpoint string `json:"endpoint"`
+ Username string `json:"username"`
+ Password string `json:"password"`
+ BasePath string `json:"basePath"`
+}
+
+type GoogleDriveConfig struct {
+ ClientID string `json:"clientId"`
+ ClientSecret string `json:"clientSecret"`
+ RedirectURI string `json:"redirectUri"`
+ RedirectURL string `json:"redirectUrl"`
+ RefreshToken string `json:"refreshToken"`
+ FolderID string `json:"folderId"`
+}
+
+func (cfg GoogleDriveConfig) Normalize() GoogleDriveConfig {
+ cfg.ClientID = strings.TrimSpace(cfg.ClientID)
+ cfg.ClientSecret = strings.TrimSpace(cfg.ClientSecret)
+ cfg.RedirectURI = strings.TrimSpace(cfg.RedirectURI)
+ cfg.RedirectURL = strings.TrimSpace(cfg.RedirectURL)
+ cfg.RefreshToken = strings.TrimSpace(cfg.RefreshToken)
+ cfg.FolderID = strings.TrimSpace(cfg.FolderID)
+ if cfg.RedirectURI == "" {
+ cfg.RedirectURI = cfg.RedirectURL
+ }
+ if cfg.RedirectURL == "" {
+ cfg.RedirectURL = cfg.RedirectURI
+ }
+ return cfg
+}
diff --git a/server/internal/storage/webdav/provider.go b/server/internal/storage/webdav/provider.go
new file mode 100644
index 0000000..2e1aafc
--- /dev/null
+++ b/server/internal/storage/webdav/provider.go
@@ -0,0 +1,126 @@
+package webdav
+
+import (
+ "context"
+ "fmt"
+ "io"
+ "os"
+ "path"
+ "strings"
+
+ "backupx/server/internal/storage"
+ gowebdav "github.com/studio-b12/gowebdav"
+)
+
+type client interface {
+ ReadDir(path string) ([]os.FileInfo, error)
+ WriteStream(path string, stream io.Reader, perm os.FileMode) error
+ ReadStream(path string) (io.ReadCloser, error)
+ Remove(path string) error
+ MkdirAll(path string, perm os.FileMode) error
+ Stat(path string) (os.FileInfo, error)
+}
+
+type Provider struct {
+ client client
+ basePath string
+}
+
+type Factory struct {
+ newClient func(cfg storage.WebDAVConfig) client
+}
+
+func NewFactory() Factory {
+ return Factory{newClient: func(cfg storage.WebDAVConfig) client {
+ return gowebdav.NewClient(strings.TrimRight(cfg.Endpoint, "/"), cfg.Username, cfg.Password)
+ }}
+}
+
+func (Factory) Type() storage.ProviderType { return storage.ProviderTypeWebDAV }
+func (Factory) SensitiveFields() []string { return []string{"username", "password"} }
+
+func (f Factory) New(_ context.Context, rawConfig map[string]any) (storage.StorageProvider, error) {
+ cfg, err := storage.DecodeConfig[storage.WebDAVConfig](rawConfig)
+ if err != nil {
+ return nil, err
+ }
+ if strings.TrimSpace(cfg.Endpoint) == "" {
+ return nil, fmt.Errorf("webdav endpoint is required")
+ }
+ newClient := f.newClient
+ if newClient == nil {
+ factory := NewFactory()
+ newClient = factory.newClient
+ }
+ return &Provider{client: newClient(cfg), basePath: normalizeBasePath(cfg.BasePath)}, nil
+}
+
+func (p *Provider) Type() storage.ProviderType { return storage.ProviderTypeWebDAV }
+
+func (p *Provider) TestConnection(_ context.Context) error {
+ if err := p.client.MkdirAll(p.basePath, 0o755); err != nil {
+ return fmt.Errorf("ensure webdav base path: %w", err)
+ }
+ if _, err := p.client.Stat(p.basePath); err != nil {
+ return fmt.Errorf("stat webdav base path: %w", err)
+ }
+ return nil
+}
+
+func (p *Provider) Upload(_ context.Context, objectKey string, reader io.Reader, _ int64, _ map[string]string) error {
+ objectPath := p.resolvePath(objectKey)
+ if err := p.client.MkdirAll(path.Dir(objectPath), 0o755); err != nil {
+ return fmt.Errorf("create webdav directories: %w", err)
+ }
+ if err := p.client.WriteStream(objectPath, reader, 0o644); err != nil {
+ return fmt.Errorf("write webdav object: %w", err)
+ }
+ return nil
+}
+
+func (p *Provider) Download(_ context.Context, objectKey string) (io.ReadCloser, error) {
+ reader, err := p.client.ReadStream(p.resolvePath(objectKey))
+ if err != nil {
+ return nil, fmt.Errorf("read webdav object: %w", err)
+ }
+ return reader, nil
+}
+
+func (p *Provider) Delete(_ context.Context, objectKey string) error {
+ if err := p.client.Remove(p.resolvePath(objectKey)); err != nil {
+ return fmt.Errorf("delete webdav object: %w", err)
+ }
+ return nil
+}
+
+func (p *Provider) List(_ context.Context, prefix string) ([]storage.ObjectInfo, error) {
+ entries, err := p.client.ReadDir(p.basePath)
+ if err != nil {
+ return nil, fmt.Errorf("list webdav directory: %w", err)
+ }
+ items := make([]storage.ObjectInfo, 0, len(entries))
+ for _, entry := range entries {
+ if entry.IsDir() {
+ continue
+ }
+ key := strings.TrimPrefix(path.Join(strings.TrimPrefix(p.basePath, "/"), entry.Name()), "/")
+ if prefix != "" && !strings.HasPrefix(key, prefix) {
+ continue
+ }
+ items = append(items, storage.ObjectInfo{Key: key, Size: entry.Size(), UpdatedAt: entry.ModTime().UTC()})
+ }
+ return items, nil
+}
+
+func normalizeBasePath(value string) string {
+ clean := path.Clean("/" + strings.TrimSpace(value))
+ if clean == "." {
+ return "/"
+ }
+ return clean
+}
+
+func (p *Provider) resolvePath(objectKey string) string {
+ cleanKey := path.Clean("/" + strings.TrimSpace(objectKey))
+ return path.Clean(path.Join(p.basePath, cleanKey))
+}
diff --git a/server/internal/storage/webdav/provider_test.go b/server/internal/storage/webdav/provider_test.go
new file mode 100644
index 0000000..7a73f68
--- /dev/null
+++ b/server/internal/storage/webdav/provider_test.go
@@ -0,0 +1,79 @@
+package webdav
+
+import (
+ "context"
+ "io"
+ "os"
+ "strings"
+ "testing"
+ "time"
+
+ "backupx/server/internal/storage"
+)
+
+type fakeFileInfo struct {
+ name string
+ size int64
+ mod time.Time
+ dir bool
+}
+
+func (f fakeFileInfo) Name() string { return f.name }
+func (f fakeFileInfo) Size() int64 { return f.size }
+func (f fakeFileInfo) Mode() os.FileMode { return 0 }
+func (f fakeFileInfo) ModTime() time.Time { return f.mod }
+func (f fakeFileInfo) IsDir() bool { return f.dir }
+func (f fakeFileInfo) Sys() any { return nil }
+
+type fakeClient struct{ data map[string]string }
+
+func (c *fakeClient) ReadDir(_ string) ([]os.FileInfo, error) {
+ return []os.FileInfo{fakeFileInfo{name: "backup.tar.gz", size: int64(len(c.data["/storage/backup.tar.gz"])), mod: time.Now().UTC()}}, nil
+}
+func (c *fakeClient) WriteStream(path string, stream io.Reader, _ os.FileMode) error {
+ content, _ := io.ReadAll(stream)
+ c.data[path] = string(content)
+ return nil
+}
+func (c *fakeClient) ReadStream(path string) (io.ReadCloser, error) {
+ return io.NopCloser(strings.NewReader(c.data[path])), nil
+}
+func (c *fakeClient) Remove(path string) error { delete(c.data, path); return nil }
+func (c *fakeClient) MkdirAll(_ string, _ os.FileMode) error { return nil }
+func (c *fakeClient) Stat(path string) (os.FileInfo, error) {
+ return fakeFileInfo{name: path, dir: true}, nil
+}
+
+func TestWebDAVProviderCRUD(t *testing.T) {
+ factory := Factory{newClient: func(storage.WebDAVConfig) client { return &fakeClient{data: make(map[string]string)} }}
+ providerAny, err := factory.New(context.Background(), map[string]any{"endpoint": "http://dav.example.com", "basePath": "/storage"})
+ if err != nil {
+ t.Fatalf("Factory.New returned error: %v", err)
+ }
+ provider := providerAny.(*Provider)
+ if err := provider.TestConnection(context.Background()); err != nil {
+ t.Fatalf("TestConnection returned error: %v", err)
+ }
+ if err := provider.Upload(context.Background(), "backup.tar.gz", strings.NewReader("payload"), 7, nil); err != nil {
+ t.Fatalf("Upload returned error: %v", err)
+ }
+ reader, err := provider.Download(context.Background(), "backup.tar.gz")
+ if err != nil {
+ t.Fatalf("Download returned error: %v", err)
+ }
+ defer reader.Close()
+ content, _ := io.ReadAll(reader)
+ if string(content) != "payload" {
+ t.Fatalf("unexpected content: %s", string(content))
+ }
+ items, err := provider.List(context.Background(), "storage")
+ if err != nil {
+ t.Fatalf("List returned error: %v", err)
+ }
+ if len(items) != 1 || items[0].Key != "storage/backup.tar.gz" {
+ t.Fatalf("unexpected list result: %#v", items)
+ }
+ if err := provider.Delete(context.Background(), "backup.tar.gz"); err != nil {
+ t.Fatalf("Delete returned error: %v", err)
+ }
+}
diff --git a/server/internal/storage/webdavprovider/provider.go b/server/internal/storage/webdavprovider/provider.go
new file mode 100644
index 0000000..3eda6e7
--- /dev/null
+++ b/server/internal/storage/webdavprovider/provider.go
@@ -0,0 +1,9 @@
+package webdavprovider
+
+import "backupx/server/internal/storage/webdav"
+
+type Factory = webdav.Factory
+
+func NewFactory() Factory {
+ return webdav.NewFactory()
+}
diff --git a/server/pkg/compress/gzip.go b/server/pkg/compress/gzip.go
new file mode 100644
index 0000000..66a3b01
--- /dev/null
+++ b/server/pkg/compress/gzip.go
@@ -0,0 +1,60 @@
+package compress
+
+import (
+ "compress/gzip"
+ "fmt"
+ "io"
+ "os"
+ "path/filepath"
+ "strings"
+)
+
+func GzipFile(sourcePath string) (string, error) {
+ source, err := os.Open(sourcePath)
+ if err != nil {
+ return "", fmt.Errorf("open source file: %w", err)
+ }
+ defer source.Close()
+ targetPath := sourcePath + ".gz"
+ target, err := os.Create(targetPath)
+ if err != nil {
+ return "", fmt.Errorf("create gzip file: %w", err)
+ }
+ defer target.Close()
+ writer := gzip.NewWriter(target)
+ writer.Name = filepath.Base(sourcePath)
+ if _, err := io.Copy(writer, source); err != nil {
+ writer.Close()
+ return "", fmt.Errorf("gzip source file: %w", err)
+ }
+ if err := writer.Close(); err != nil {
+ return "", fmt.Errorf("close gzip writer: %w", err)
+ }
+ return targetPath, nil
+}
+
+func GunzipFile(sourcePath string) (string, error) {
+ source, err := os.Open(sourcePath)
+ if err != nil {
+ return "", fmt.Errorf("open gzip file: %w", err)
+ }
+ defer source.Close()
+ reader, err := gzip.NewReader(source)
+ if err != nil {
+ return "", fmt.Errorf("create gzip reader: %w", err)
+ }
+ defer reader.Close()
+ targetPath := strings.TrimSuffix(sourcePath, ".gz")
+ if targetPath == sourcePath {
+ targetPath += ".out"
+ }
+ target, err := os.Create(targetPath)
+ if err != nil {
+ return "", fmt.Errorf("create target file: %w", err)
+ }
+ defer target.Close()
+ if _, err := io.Copy(target, reader); err != nil {
+ return "", fmt.Errorf("gunzip file: %w", err)
+ }
+ return targetPath, nil
+}
diff --git a/server/pkg/compress/gzip_test.go b/server/pkg/compress/gzip_test.go
new file mode 100644
index 0000000..4e5b5c4
--- /dev/null
+++ b/server/pkg/compress/gzip_test.go
@@ -0,0 +1,29 @@
+package compress
+
+import (
+ "os"
+ "path/filepath"
+ "testing"
+)
+
+func TestGzipAndGunzipFile(t *testing.T) {
+ sourcePath := filepath.Join(t.TempDir(), "payload.txt")
+ if err := os.WriteFile(sourcePath, []byte("payload"), 0o644); err != nil {
+ t.Fatalf("WriteFile returned error: %v", err)
+ }
+ compressedPath, err := GzipFile(sourcePath)
+ if err != nil {
+ t.Fatalf("GzipFile returned error: %v", err)
+ }
+ decompressedPath, err := GunzipFile(compressedPath)
+ if err != nil {
+ t.Fatalf("GunzipFile returned error: %v", err)
+ }
+ content, err := os.ReadFile(decompressedPath)
+ if err != nil {
+ t.Fatalf("ReadFile returned error: %v", err)
+ }
+ if string(content) != "payload" {
+ t.Fatalf("unexpected decompressed content: %s", string(content))
+ }
+}
diff --git a/server/pkg/crypto/file_cipher.go b/server/pkg/crypto/file_cipher.go
new file mode 100644
index 0000000..8b86bef
--- /dev/null
+++ b/server/pkg/crypto/file_cipher.go
@@ -0,0 +1,128 @@
+package backupcrypto
+
+import (
+ "crypto/aes"
+ "crypto/cipher"
+ "crypto/rand"
+ "encoding/binary"
+ "fmt"
+ "io"
+ "os"
+ "strings"
+)
+
+const (
+ chunkSize = 1 << 20
+ fileMagic = "BXENC1"
+ nonceSizeBytes = 12
+)
+
+func EncryptFile(key []byte, sourcePath string) (string, error) {
+ block, err := aes.NewCipher(key)
+ if err != nil {
+ return "", fmt.Errorf("create cipher: %w", err)
+ }
+ gcm, err := cipher.NewGCM(block)
+ if err != nil {
+ return "", fmt.Errorf("create gcm: %w", err)
+ }
+ source, err := os.Open(sourcePath)
+ if err != nil {
+ return "", fmt.Errorf("open source file: %w", err)
+ }
+ defer source.Close()
+ targetPath := sourcePath + ".enc"
+ target, err := os.Create(targetPath)
+ if err != nil {
+ return "", fmt.Errorf("create encrypted file: %w", err)
+ }
+ defer target.Close()
+ if _, err := target.WriteString(fileMagic); err != nil {
+ return "", fmt.Errorf("write encryption header: %w", err)
+ }
+ buffer := make([]byte, chunkSize)
+ for {
+ readCount, readErr := source.Read(buffer)
+ if readCount > 0 {
+ nonce := make([]byte, gcm.NonceSize())
+ if _, err := io.ReadFull(rand.Reader, nonce); err != nil {
+ return "", fmt.Errorf("generate nonce: %w", err)
+ }
+ sealed := gcm.Seal(nil, nonce, buffer[:readCount], nil)
+ if _, err := target.Write(nonce); err != nil {
+ return "", fmt.Errorf("write nonce: %w", err)
+ }
+ if err := binary.Write(target, binary.BigEndian, uint32(len(sealed))); err != nil {
+ return "", fmt.Errorf("write ciphertext length: %w", err)
+ }
+ if _, err := target.Write(sealed); err != nil {
+ return "", fmt.Errorf("write ciphertext: %w", err)
+ }
+ }
+ if readErr == io.EOF {
+ break
+ }
+ if readErr != nil {
+ return "", fmt.Errorf("read source file: %w", readErr)
+ }
+ }
+ return targetPath, nil
+}
+
+func DecryptFile(key []byte, sourcePath string) (string, error) {
+ block, err := aes.NewCipher(key)
+ if err != nil {
+ return "", fmt.Errorf("create cipher: %w", err)
+ }
+ gcm, err := cipher.NewGCM(block)
+ if err != nil {
+ return "", fmt.Errorf("create gcm: %w", err)
+ }
+ source, err := os.Open(sourcePath)
+ if err != nil {
+ return "", fmt.Errorf("open encrypted file: %w", err)
+ }
+ defer source.Close()
+ header := make([]byte, len(fileMagic))
+ if _, err := io.ReadFull(source, header); err != nil {
+ return "", fmt.Errorf("read encryption header: %w", err)
+ }
+ if string(header) != fileMagic {
+ return "", fmt.Errorf("invalid encrypted file header")
+ }
+ targetPath := strings.TrimSuffix(sourcePath, ".enc")
+ if targetPath == sourcePath {
+ targetPath += ".plain"
+ }
+ target, err := os.Create(targetPath)
+ if err != nil {
+ return "", fmt.Errorf("create decrypted file: %w", err)
+ }
+ defer target.Close()
+ for {
+ nonce := make([]byte, nonceSizeBytes)
+ _, err := io.ReadFull(source, nonce)
+ if err == io.EOF {
+ break
+ }
+ if err != nil {
+ return "", fmt.Errorf("read nonce: %w", err)
+ }
+ var cipherLength uint32
+ if err := binary.Read(source, binary.BigEndian, &cipherLength); err != nil {
+ return "", fmt.Errorf("read ciphertext length: %w", err)
+ }
+ ciphertext := make([]byte, cipherLength)
+ if _, err := io.ReadFull(source, ciphertext); err != nil {
+ return "", fmt.Errorf("read ciphertext payload: %w", err)
+ }
+ plain, err := gcm.Open(nil, nonce, ciphertext, nil)
+ if err != nil {
+ return "", fmt.Errorf("decrypt chunk: %w", err)
+ }
+ if _, err := target.Write(plain); err != nil {
+ return "", fmt.Errorf("write decrypted payload: %w", err)
+ }
+ }
+ return targetPath, nil
+}
diff --git a/server/pkg/crypto/file_cipher_test.go b/server/pkg/crypto/file_cipher_test.go
new file mode 100644
index 0000000..708b23c
--- /dev/null
+++ b/server/pkg/crypto/file_cipher_test.go
@@ -0,0 +1,31 @@
+package backupcrypto
+
+import (
+ "crypto/sha256"
+ "os"
+ "path/filepath"
+ "testing"
+)
+
+func TestEncryptAndDecryptFile(t *testing.T) {
+ hash := sha256.Sum256([]byte("backup-secret"))
+ sourcePath := filepath.Join(t.TempDir(), "payload.bin")
+ if err := os.WriteFile(sourcePath, []byte("backup-payload"), 0o644); err != nil {
+ t.Fatalf("WriteFile returned error: %v", err)
+ }
+ encryptedPath, err := EncryptFile(hash[:], sourcePath)
+ if err != nil {
+ t.Fatalf("EncryptFile returned error: %v", err)
+ }
+ decryptedPath, err := DecryptFile(hash[:], encryptedPath)
+ if err != nil {
+ t.Fatalf("DecryptFile returned error: %v", err)
+ }
+ content, err := os.ReadFile(decryptedPath)
+ if err != nil {
+ t.Fatalf("ReadFile returned error: %v", err)
+ }
+ if string(content) != "backup-payload" {
+ t.Fatalf("unexpected decrypted content: %s", string(content))
+ }
+}
diff --git a/server/pkg/response/response.go b/server/pkg/response/response.go
new file mode 100644
index 0000000..54ca6b5
--- /dev/null
+++ b/server/pkg/response/response.go
@@ -0,0 +1,30 @@
+package response
+
+import (
+ "errors"
+ "fmt"
+ "net/http"
+
+ "backupx/server/internal/apperror"
+ "github.com/gin-gonic/gin"
+)
+
+type Envelope struct {
+ Code string `json:"code"`
+ Message string `json:"message"`
+ Data any `json:"data,omitempty"`
+}
+
+func Success(c *gin.Context, data any) {
+ c.JSON(http.StatusOK, Envelope{Code: "OK", Message: "success", Data: data})
+}
+
+func Error(c *gin.Context, err error) {
+ fmt.Printf("HTTP Error: %v\n", err)
+ var appErr *apperror.AppError
+ if errors.As(err, &appErr) {
+ c.JSON(appErr.Status, Envelope{Code: appErr.Code, Message: appErr.Message})
+ return
+ }
+ c.JSON(http.StatusInternalServerError, Envelope{Code: "INTERNAL_ERROR", Message: "服务器内部错误"})
+}
diff --git a/web/index.html b/web/index.html
new file mode 100644
index 0000000..c8f6824
--- /dev/null
+++ b/web/index.html
@@ -0,0 +1,12 @@
+
+
+
+
+
+ BackupX
+
+
+
+
+
+
diff --git a/web/package-lock.json b/web/package-lock.json
new file mode 100644
index 0000000..7b4dcea
--- /dev/null
+++ b/web/package-lock.json
@@ -0,0 +1,3886 @@
+{
+ "name": "backupx-web",
+ "version": "0.1.0",
+ "lockfileVersion": 3,
+ "requires": true,
+ "packages": {
+ "": {
+ "name": "backupx-web",
+ "version": "0.1.0",
+ "dependencies": {
+ "@arco-design/web-react": "^2.66.0",
+ "axios": "^1.8.4",
+ "echarts": "^6.0.0",
+ "echarts-for-react": "^3.0.6",
+ "i18next": "^25.8.14",
+ "react": "^18.3.1",
+ "react-dom": "^18.3.1",
+ "react-i18next": "^16.5.6",
+ "react-router-dom": "^6.30.0",
+ "zustand": "^5.0.3"
+ },
+ "devDependencies": {
+ "@testing-library/jest-dom": "^6.6.3",
+ "@testing-library/react": "^16.2.0",
+ "@testing-library/user-event": "^14.6.1",
+ "@types/node": "^22.13.10",
+ "@types/react": "^18.3.20",
+ "@types/react-dom": "^18.3.6",
+ "@vitejs/plugin-react": "^4.3.4",
+ "jsdom": "^26.0.0",
+ "typescript": "^5.7.3",
+ "vite": "^6.2.1",
+ "vitest": "^3.0.8"
+ }
+ },
+ "node_modules/@adobe/css-tools": {
+ "version": "4.4.4",
+ "resolved": "https://registry.npmmirror.com/@adobe/css-tools/-/css-tools-4.4.4.tgz",
+ "integrity": "sha512-Elp+iwUx5rN5+Y8xLt5/GRoG20WGoDCQ/1Fb+1LiGtvwbDavuSk0jhD/eZdckHAuzcDzccnkv+rEjyWfRx18gg==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/@arco-design/color": {
+ "version": "0.4.0",
+ "resolved": "https://registry.npmmirror.com/@arco-design/color/-/color-0.4.0.tgz",
+ "integrity": "sha512-s7p9MSwJgHeL8DwcATaXvWT3m2SigKpxx4JA1BGPHL4gfvaQsmQfrLBDpjOJFJuJ2jG2dMt3R3P8Pm9E65q18g==",
+ "license": "MIT",
+ "dependencies": {
+ "color": "^3.1.3"
+ }
+ },
+ "node_modules/@arco-design/web-react": {
+ "version": "2.66.11",
+ "resolved": "https://registry.npmmirror.com/@arco-design/web-react/-/web-react-2.66.11.tgz",
+ "integrity": "sha512-PFMQ/OiK5Lb2ZP1VwKeA4lsLh5+0hCkSRgPWyE4LlZQI+Wqy0wXdl3BB4HODEcUzGC7i4ybMg8PUP1J5d1SUtg==",
+ "license": "MIT",
+ "dependencies": {
+ "@arco-design/color": "^0.4.0",
+ "@babel/runtime": "^7.5.5",
+ "b-tween": "^0.3.3",
+ "b-validate": "^1.4.2",
+ "compute-scroll-into-view": "^1.0.17",
+ "dayjs": "^1.10.5",
+ "lodash": "^4.17.21",
+ "number-precision": "^1.3.1",
+ "react-focus-lock": "^2.13.2",
+ "react-is": "^18.2.0",
+ "react-transition-group": "^4.3.0",
+ "resize-observer-polyfill": "^1.5.1",
+ "scroll-into-view-if-needed": "^2.2.20",
+ "shallowequal": "^1.1.0"
+ },
+ "peerDependencies": {
+ "react": ">=16",
+ "react-dom": ">=16"
+ }
+ },
+ "node_modules/@asamuzakjp/css-color": {
+ "version": "3.2.0",
+ "resolved": "https://registry.npmmirror.com/@asamuzakjp/css-color/-/css-color-3.2.0.tgz",
+ "integrity": "sha512-K1A6z8tS3XsmCMM86xoWdn7Fkdn9m6RSVtocUrJYIwZnFVkng/PvkEoWtOWmP+Scc6saYWHWZYbndEEXxl24jw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@csstools/css-calc": "^2.1.3",
+ "@csstools/css-color-parser": "^3.0.9",
+ "@csstools/css-parser-algorithms": "^3.0.4",
+ "@csstools/css-tokenizer": "^3.0.3",
+ "lru-cache": "^10.4.3"
+ }
+ },
+ "node_modules/@asamuzakjp/css-color/node_modules/lru-cache": {
+ "version": "10.4.3",
+ "resolved": "https://registry.npmmirror.com/lru-cache/-/lru-cache-10.4.3.tgz",
+ "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==",
+ "dev": true,
+ "license": "ISC"
+ },
+ "node_modules/@babel/code-frame": {
+ "version": "7.29.0",
+ "resolved": "https://registry.npmmirror.com/@babel/code-frame/-/code-frame-7.29.0.tgz",
+ "integrity": "sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/helper-validator-identifier": "^7.28.5",
+ "js-tokens": "^4.0.0",
+ "picocolors": "^1.1.1"
+ },
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
+ "node_modules/@babel/compat-data": {
+ "version": "7.29.0",
+ "resolved": "https://registry.npmmirror.com/@babel/compat-data/-/compat-data-7.29.0.tgz",
+ "integrity": "sha512-T1NCJqT/j9+cn8fvkt7jtwbLBfLC/1y1c7NtCeXFRgzGTsafi68MRv8yzkYSapBnFA6L3U2VSc02ciDzoAJhJg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
+ "node_modules/@babel/core": {
+ "version": "7.29.0",
+ "resolved": "https://registry.npmmirror.com/@babel/core/-/core-7.29.0.tgz",
+ "integrity": "sha512-CGOfOJqWjg2qW/Mb6zNsDm+u5vFQ8DxXfbM09z69p5Z6+mE1ikP2jUXw+j42Pf1XTYED2Rni5f95npYeuwMDQA==",
+ "dev": true,
+ "license": "MIT",
+ "peer": true,
+ "dependencies": {
+ "@babel/code-frame": "^7.29.0",
+ "@babel/generator": "^7.29.0",
+ "@babel/helper-compilation-targets": "^7.28.6",
+ "@babel/helper-module-transforms": "^7.28.6",
+ "@babel/helpers": "^7.28.6",
+ "@babel/parser": "^7.29.0",
+ "@babel/template": "^7.28.6",
+ "@babel/traverse": "^7.29.0",
+ "@babel/types": "^7.29.0",
+ "@jridgewell/remapping": "^2.3.5",
+ "convert-source-map": "^2.0.0",
+ "debug": "^4.1.0",
+ "gensync": "^1.0.0-beta.2",
+ "json5": "^2.2.3",
+ "semver": "^6.3.1"
+ },
+ "engines": {
+ "node": ">=6.9.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/babel"
+ }
+ },
+ "node_modules/@babel/generator": {
+ "version": "7.29.1",
+ "resolved": "https://registry.npmmirror.com/@babel/generator/-/generator-7.29.1.tgz",
+ "integrity": "sha512-qsaF+9Qcm2Qv8SRIMMscAvG4O3lJ0F1GuMo5HR/Bp02LopNgnZBC/EkbevHFeGs4ls/oPz9v+Bsmzbkbe+0dUw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/parser": "^7.29.0",
+ "@babel/types": "^7.29.0",
+ "@jridgewell/gen-mapping": "^0.3.12",
+ "@jridgewell/trace-mapping": "^0.3.28",
+ "jsesc": "^3.0.2"
+ },
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
+ "node_modules/@babel/helper-compilation-targets": {
+ "version": "7.28.6",
+ "resolved": "https://registry.npmmirror.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.28.6.tgz",
+ "integrity": "sha512-JYtls3hqi15fcx5GaSNL7SCTJ2MNmjrkHXg4FSpOA/grxK8KwyZ5bubHsCq8FXCkua6xhuaaBit+3b7+VZRfcA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/compat-data": "^7.28.6",
+ "@babel/helper-validator-option": "^7.27.1",
+ "browserslist": "^4.24.0",
+ "lru-cache": "^5.1.1",
+ "semver": "^6.3.1"
+ },
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
+ "node_modules/@babel/helper-globals": {
+ "version": "7.28.0",
+ "resolved": "https://registry.npmmirror.com/@babel/helper-globals/-/helper-globals-7.28.0.tgz",
+ "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
+ "node_modules/@babel/helper-module-imports": {
+ "version": "7.28.6",
+ "resolved": "https://registry.npmmirror.com/@babel/helper-module-imports/-/helper-module-imports-7.28.6.tgz",
+ "integrity": "sha512-l5XkZK7r7wa9LucGw9LwZyyCUscb4x37JWTPz7swwFE/0FMQAGpiWUZn8u9DzkSBWEcK25jmvubfpw2dnAMdbw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/traverse": "^7.28.6",
+ "@babel/types": "^7.28.6"
+ },
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
+ "node_modules/@babel/helper-module-transforms": {
+ "version": "7.28.6",
+ "resolved": "https://registry.npmmirror.com/@babel/helper-module-transforms/-/helper-module-transforms-7.28.6.tgz",
+ "integrity": "sha512-67oXFAYr2cDLDVGLXTEABjdBJZ6drElUSI7WKp70NrpyISso3plG9SAGEF6y7zbha/wOzUByWWTJvEDVNIUGcA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/helper-module-imports": "^7.28.6",
+ "@babel/helper-validator-identifier": "^7.28.5",
+ "@babel/traverse": "^7.28.6"
+ },
+ "engines": {
+ "node": ">=6.9.0"
+ },
+ "peerDependencies": {
+ "@babel/core": "^7.0.0"
+ }
+ },
+ "node_modules/@babel/helper-plugin-utils": {
+ "version": "7.28.6",
+ "resolved": "https://registry.npmmirror.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.28.6.tgz",
+ "integrity": "sha512-S9gzZ/bz83GRysI7gAD4wPT/AI3uCnY+9xn+Mx/KPs2JwHJIz1W8PZkg2cqyt3RNOBM8ejcXhV6y8Og7ly/Dug==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
+ "node_modules/@babel/helper-string-parser": {
+ "version": "7.27.1",
+ "resolved": "https://registry.npmmirror.com/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz",
+ "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
+ "node_modules/@babel/helper-validator-identifier": {
+ "version": "7.28.5",
+ "resolved": "https://registry.npmmirror.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz",
+ "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
+ "node_modules/@babel/helper-validator-option": {
+ "version": "7.27.1",
+ "resolved": "https://registry.npmmirror.com/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz",
+ "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
+ "node_modules/@babel/helpers": {
+ "version": "7.28.6",
+ "resolved": "https://registry.npmmirror.com/@babel/helpers/-/helpers-7.28.6.tgz",
+ "integrity": "sha512-xOBvwq86HHdB7WUDTfKfT/Vuxh7gElQ+Sfti2Cy6yIWNW05P8iUslOVcZ4/sKbE+/jQaukQAdz/gf3724kYdqw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/template": "^7.28.6",
+ "@babel/types": "^7.28.6"
+ },
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
+ "node_modules/@babel/parser": {
+ "version": "7.29.0",
+ "resolved": "https://registry.npmmirror.com/@babel/parser/-/parser-7.29.0.tgz",
+ "integrity": "sha512-IyDgFV5GeDUVX4YdF/3CPULtVGSXXMLh1xVIgdCgxApktqnQV0r7/8Nqthg+8YLGaAtdyIlo2qIdZrbCv4+7ww==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/types": "^7.29.0"
+ },
+ "bin": {
+ "parser": "bin/babel-parser.js"
+ },
+ "engines": {
+ "node": ">=6.0.0"
+ }
+ },
+ "node_modules/@babel/plugin-transform-react-jsx-self": {
+ "version": "7.27.1",
+ "resolved": "https://registry.npmmirror.com/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.27.1.tgz",
+ "integrity": "sha512-6UzkCs+ejGdZ5mFFC/OCUrv028ab2fp1znZmCZjAOBKiBK2jXD1O+BPSfX8X2qjJ75fZBMSnQn3Rq2mrBJK2mw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/helper-plugin-utils": "^7.27.1"
+ },
+ "engines": {
+ "node": ">=6.9.0"
+ },
+ "peerDependencies": {
+ "@babel/core": "^7.0.0-0"
+ }
+ },
+ "node_modules/@babel/plugin-transform-react-jsx-source": {
+ "version": "7.27.1",
+ "resolved": "https://registry.npmmirror.com/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.27.1.tgz",
+ "integrity": "sha512-zbwoTsBruTeKB9hSq73ha66iFeJHuaFkUbwvqElnygoNbj/jHRsSeokowZFN3CZ64IvEqcmmkVe89OPXc7ldAw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/helper-plugin-utils": "^7.27.1"
+ },
+ "engines": {
+ "node": ">=6.9.0"
+ },
+ "peerDependencies": {
+ "@babel/core": "^7.0.0-0"
+ }
+ },
+ "node_modules/@babel/runtime": {
+ "version": "7.28.6",
+ "resolved": "https://registry.npmmirror.com/@babel/runtime/-/runtime-7.28.6.tgz",
+ "integrity": "sha512-05WQkdpL9COIMz4LjTxGpPNCdlpyimKppYNoJ5Di5EUObifl8t4tuLuUBBZEpoLYOmfvIWrsp9fCl0HoPRVTdA==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
+ "node_modules/@babel/template": {
+ "version": "7.28.6",
+ "resolved": "https://registry.npmmirror.com/@babel/template/-/template-7.28.6.tgz",
+ "integrity": "sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/code-frame": "^7.28.6",
+ "@babel/parser": "^7.28.6",
+ "@babel/types": "^7.28.6"
+ },
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
+ "node_modules/@babel/traverse": {
+ "version": "7.29.0",
+ "resolved": "https://registry.npmmirror.com/@babel/traverse/-/traverse-7.29.0.tgz",
+ "integrity": "sha512-4HPiQr0X7+waHfyXPZpWPfWL/J7dcN1mx9gL6WdQVMbPnF3+ZhSMs8tCxN7oHddJE9fhNE7+lxdnlyemKfJRuA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/code-frame": "^7.29.0",
+ "@babel/generator": "^7.29.0",
+ "@babel/helper-globals": "^7.28.0",
+ "@babel/parser": "^7.29.0",
+ "@babel/template": "^7.28.6",
+ "@babel/types": "^7.29.0",
+ "debug": "^4.3.1"
+ },
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
+ "node_modules/@babel/types": {
+ "version": "7.29.0",
+ "resolved": "https://registry.npmmirror.com/@babel/types/-/types-7.29.0.tgz",
+ "integrity": "sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/helper-string-parser": "^7.27.1",
+ "@babel/helper-validator-identifier": "^7.28.5"
+ },
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
+ "node_modules/@csstools/color-helpers": {
+ "version": "5.1.0",
+ "resolved": "https://registry.npmmirror.com/@csstools/color-helpers/-/color-helpers-5.1.0.tgz",
+ "integrity": "sha512-S11EXWJyy0Mz5SYvRmY8nJYTFFd1LCNV+7cXyAgQtOOuzb4EsgfqDufL+9esx72/eLhsRdGZwaldu/h+E4t4BA==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/csstools"
+ },
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/csstools"
+ }
+ ],
+ "license": "MIT-0",
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@csstools/css-calc": {
+ "version": "2.1.4",
+ "resolved": "https://registry.npmmirror.com/@csstools/css-calc/-/css-calc-2.1.4.tgz",
+ "integrity": "sha512-3N8oaj+0juUw/1H3YwmDDJXCgTB1gKU6Hc/bB502u9zR0q2vd786XJH9QfrKIEgFlZmhZiq6epXl4rHqhzsIgQ==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/csstools"
+ },
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/csstools"
+ }
+ ],
+ "license": "MIT",
+ "engines": {
+ "node": ">=18"
+ },
+ "peerDependencies": {
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4"
+ }
+ },
+ "node_modules/@csstools/css-color-parser": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmmirror.com/@csstools/css-color-parser/-/css-color-parser-3.1.0.tgz",
+ "integrity": "sha512-nbtKwh3a6xNVIp/VRuXV64yTKnb1IjTAEEh3irzS+HkKjAOYLTGNb9pmVNntZ8iVBHcWDA2Dof0QtPgFI1BaTA==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/csstools"
+ },
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/csstools"
+ }
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "@csstools/color-helpers": "^5.1.0",
+ "@csstools/css-calc": "^2.1.4"
+ },
+ "engines": {
+ "node": ">=18"
+ },
+ "peerDependencies": {
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4"
+ }
+ },
+ "node_modules/@csstools/css-parser-algorithms": {
+ "version": "3.0.5",
+ "resolved": "https://registry.npmmirror.com/@csstools/css-parser-algorithms/-/css-parser-algorithms-3.0.5.tgz",
+ "integrity": "sha512-DaDeUkXZKjdGhgYaHNJTV9pV7Y9B3b644jCLs9Upc3VeNGg6LWARAT6O+Q+/COo+2gg/bM5rhpMAtf70WqfBdQ==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/csstools"
+ },
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/csstools"
+ }
+ ],
+ "license": "MIT",
+ "peer": true,
+ "engines": {
+ "node": ">=18"
+ },
+ "peerDependencies": {
+ "@csstools/css-tokenizer": "^3.0.4"
+ }
+ },
+ "node_modules/@csstools/css-tokenizer": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmmirror.com/@csstools/css-tokenizer/-/css-tokenizer-3.0.4.tgz",
+ "integrity": "sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/csstools"
+ },
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/csstools"
+ }
+ ],
+ "license": "MIT",
+ "peer": true,
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/aix-ppc64": {
+ "version": "0.25.12",
+ "resolved": "https://registry.npmmirror.com/@esbuild/aix-ppc64/-/aix-ppc64-0.25.12.tgz",
+ "integrity": "sha512-Hhmwd6CInZ3dwpuGTF8fJG6yoWmsToE+vYgD4nytZVxcu1ulHpUQRAB1UJ8+N1Am3Mz4+xOByoQoSZf4D+CpkA==",
+ "cpu": [
+ "ppc64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "aix"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/android-arm": {
+ "version": "0.25.12",
+ "resolved": "https://registry.npmmirror.com/@esbuild/android-arm/-/android-arm-0.25.12.tgz",
+ "integrity": "sha512-VJ+sKvNA/GE7Ccacc9Cha7bpS8nyzVv0jdVgwNDaR4gDMC/2TTRc33Ip8qrNYUcpkOHUT5OZ0bUcNNVZQ9RLlg==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/android-arm64": {
+ "version": "0.25.12",
+ "resolved": "https://registry.npmmirror.com/@esbuild/android-arm64/-/android-arm64-0.25.12.tgz",
+ "integrity": "sha512-6AAmLG7zwD1Z159jCKPvAxZd4y/VTO0VkprYy+3N2FtJ8+BQWFXU+OxARIwA46c5tdD9SsKGZ/1ocqBS/gAKHg==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/android-x64": {
+ "version": "0.25.12",
+ "resolved": "https://registry.npmmirror.com/@esbuild/android-x64/-/android-x64-0.25.12.tgz",
+ "integrity": "sha512-5jbb+2hhDHx5phYR2By8GTWEzn6I9UqR11Kwf22iKbNpYrsmRB18aX/9ivc5cabcUiAT/wM+YIZ6SG9QO6a8kg==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/darwin-arm64": {
+ "version": "0.25.12",
+ "resolved": "https://registry.npmmirror.com/@esbuild/darwin-arm64/-/darwin-arm64-0.25.12.tgz",
+ "integrity": "sha512-N3zl+lxHCifgIlcMUP5016ESkeQjLj/959RxxNYIthIg+CQHInujFuXeWbWMgnTo4cp5XVHqFPmpyu9J65C1Yg==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/darwin-x64": {
+ "version": "0.25.12",
+ "resolved": "https://registry.npmmirror.com/@esbuild/darwin-x64/-/darwin-x64-0.25.12.tgz",
+ "integrity": "sha512-HQ9ka4Kx21qHXwtlTUVbKJOAnmG1ipXhdWTmNXiPzPfWKpXqASVcWdnf2bnL73wgjNrFXAa3yYvBSd9pzfEIpA==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/freebsd-arm64": {
+ "version": "0.25.12",
+ "resolved": "https://registry.npmmirror.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.12.tgz",
+ "integrity": "sha512-gA0Bx759+7Jve03K1S0vkOu5Lg/85dou3EseOGUes8flVOGxbhDDh/iZaoek11Y8mtyKPGF3vP8XhnkDEAmzeg==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "freebsd"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/freebsd-x64": {
+ "version": "0.25.12",
+ "resolved": "https://registry.npmmirror.com/@esbuild/freebsd-x64/-/freebsd-x64-0.25.12.tgz",
+ "integrity": "sha512-TGbO26Yw2xsHzxtbVFGEXBFH0FRAP7gtcPE7P5yP7wGy7cXK2oO7RyOhL5NLiqTlBh47XhmIUXuGciXEqYFfBQ==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "freebsd"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-arm": {
+ "version": "0.25.12",
+ "resolved": "https://registry.npmmirror.com/@esbuild/linux-arm/-/linux-arm-0.25.12.tgz",
+ "integrity": "sha512-lPDGyC1JPDou8kGcywY0YILzWlhhnRjdof3UlcoqYmS9El818LLfJJc3PXXgZHrHCAKs/Z2SeZtDJr5MrkxtOw==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-arm64": {
+ "version": "0.25.12",
+ "resolved": "https://registry.npmmirror.com/@esbuild/linux-arm64/-/linux-arm64-0.25.12.tgz",
+ "integrity": "sha512-8bwX7a8FghIgrupcxb4aUmYDLp8pX06rGh5HqDT7bB+8Rdells6mHvrFHHW2JAOPZUbnjUpKTLg6ECyzvas2AQ==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-ia32": {
+ "version": "0.25.12",
+ "resolved": "https://registry.npmmirror.com/@esbuild/linux-ia32/-/linux-ia32-0.25.12.tgz",
+ "integrity": "sha512-0y9KrdVnbMM2/vG8KfU0byhUN+EFCny9+8g202gYqSSVMonbsCfLjUO+rCci7pM0WBEtz+oK/PIwHkzxkyharA==",
+ "cpu": [
+ "ia32"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-loong64": {
+ "version": "0.25.12",
+ "resolved": "https://registry.npmmirror.com/@esbuild/linux-loong64/-/linux-loong64-0.25.12.tgz",
+ "integrity": "sha512-h///Lr5a9rib/v1GGqXVGzjL4TMvVTv+s1DPoxQdz7l/AYv6LDSxdIwzxkrPW438oUXiDtwM10o9PmwS/6Z0Ng==",
+ "cpu": [
+ "loong64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-mips64el": {
+ "version": "0.25.12",
+ "resolved": "https://registry.npmmirror.com/@esbuild/linux-mips64el/-/linux-mips64el-0.25.12.tgz",
+ "integrity": "sha512-iyRrM1Pzy9GFMDLsXn1iHUm18nhKnNMWscjmp4+hpafcZjrr2WbT//d20xaGljXDBYHqRcl8HnxbX6uaA/eGVw==",
+ "cpu": [
+ "mips64el"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-ppc64": {
+ "version": "0.25.12",
+ "resolved": "https://registry.npmmirror.com/@esbuild/linux-ppc64/-/linux-ppc64-0.25.12.tgz",
+ "integrity": "sha512-9meM/lRXxMi5PSUqEXRCtVjEZBGwB7P/D4yT8UG/mwIdze2aV4Vo6U5gD3+RsoHXKkHCfSxZKzmDssVlRj1QQA==",
+ "cpu": [
+ "ppc64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-riscv64": {
+ "version": "0.25.12",
+ "resolved": "https://registry.npmmirror.com/@esbuild/linux-riscv64/-/linux-riscv64-0.25.12.tgz",
+ "integrity": "sha512-Zr7KR4hgKUpWAwb1f3o5ygT04MzqVrGEGXGLnj15YQDJErYu/BGg+wmFlIDOdJp0PmB0lLvxFIOXZgFRrdjR0w==",
+ "cpu": [
+ "riscv64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-s390x": {
+ "version": "0.25.12",
+ "resolved": "https://registry.npmmirror.com/@esbuild/linux-s390x/-/linux-s390x-0.25.12.tgz",
+ "integrity": "sha512-MsKncOcgTNvdtiISc/jZs/Zf8d0cl/t3gYWX8J9ubBnVOwlk65UIEEvgBORTiljloIWnBzLs4qhzPkJcitIzIg==",
+ "cpu": [
+ "s390x"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-x64": {
+ "version": "0.25.12",
+ "resolved": "https://registry.npmmirror.com/@esbuild/linux-x64/-/linux-x64-0.25.12.tgz",
+ "integrity": "sha512-uqZMTLr/zR/ed4jIGnwSLkaHmPjOjJvnm6TVVitAa08SLS9Z0VM8wIRx7gWbJB5/J54YuIMInDquWyYvQLZkgw==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/netbsd-arm64": {
+ "version": "0.25.12",
+ "resolved": "https://registry.npmmirror.com/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.12.tgz",
+ "integrity": "sha512-xXwcTq4GhRM7J9A8Gv5boanHhRa/Q9KLVmcyXHCTaM4wKfIpWkdXiMog/KsnxzJ0A1+nD+zoecuzqPmCRyBGjg==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "netbsd"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/netbsd-x64": {
+ "version": "0.25.12",
+ "resolved": "https://registry.npmmirror.com/@esbuild/netbsd-x64/-/netbsd-x64-0.25.12.tgz",
+ "integrity": "sha512-Ld5pTlzPy3YwGec4OuHh1aCVCRvOXdH8DgRjfDy/oumVovmuSzWfnSJg+VtakB9Cm0gxNO9BzWkj6mtO1FMXkQ==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "netbsd"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/openbsd-arm64": {
+ "version": "0.25.12",
+ "resolved": "https://registry.npmmirror.com/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.12.tgz",
+ "integrity": "sha512-fF96T6KsBo/pkQI950FARU9apGNTSlZGsv1jZBAlcLL1MLjLNIWPBkj5NlSz8aAzYKg+eNqknrUJ24QBybeR5A==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "openbsd"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/openbsd-x64": {
+ "version": "0.25.12",
+ "resolved": "https://registry.npmmirror.com/@esbuild/openbsd-x64/-/openbsd-x64-0.25.12.tgz",
+ "integrity": "sha512-MZyXUkZHjQxUvzK7rN8DJ3SRmrVrke8ZyRusHlP+kuwqTcfWLyqMOE3sScPPyeIXN/mDJIfGXvcMqCgYKekoQw==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "openbsd"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/openharmony-arm64": {
+ "version": "0.25.12",
+ "resolved": "https://registry.npmmirror.com/@esbuild/openharmony-arm64/-/openharmony-arm64-0.25.12.tgz",
+ "integrity": "sha512-rm0YWsqUSRrjncSXGA7Zv78Nbnw4XL6/dzr20cyrQf7ZmRcsovpcRBdhD43Nuk3y7XIoW2OxMVvwuRvk9XdASg==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "openharmony"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/sunos-x64": {
+ "version": "0.25.12",
+ "resolved": "https://registry.npmmirror.com/@esbuild/sunos-x64/-/sunos-x64-0.25.12.tgz",
+ "integrity": "sha512-3wGSCDyuTHQUzt0nV7bocDy72r2lI33QL3gkDNGkod22EsYl04sMf0qLb8luNKTOmgF/eDEDP5BFNwoBKH441w==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "sunos"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/win32-arm64": {
+ "version": "0.25.12",
+ "resolved": "https://registry.npmmirror.com/@esbuild/win32-arm64/-/win32-arm64-0.25.12.tgz",
+ "integrity": "sha512-rMmLrur64A7+DKlnSuwqUdRKyd3UE7oPJZmnljqEptesKM8wx9J8gx5u0+9Pq0fQQW8vqeKebwNXdfOyP+8Bsg==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/win32-ia32": {
+ "version": "0.25.12",
+ "resolved": "https://registry.npmmirror.com/@esbuild/win32-ia32/-/win32-ia32-0.25.12.tgz",
+ "integrity": "sha512-HkqnmmBoCbCwxUKKNPBixiWDGCpQGVsrQfJoVGYLPT41XWF8lHuE5N6WhVia2n4o5QK5M4tYr21827fNhi4byQ==",
+ "cpu": [
+ "ia32"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/win32-x64": {
+ "version": "0.25.12",
+ "resolved": "https://registry.npmmirror.com/@esbuild/win32-x64/-/win32-x64-0.25.12.tgz",
+ "integrity": "sha512-alJC0uCZpTFrSL0CCDjcgleBXPnCrEAhTBILpeAp7M/OFgoqtAetfBzX0xM00MUsVVPpVjlPuMbREqnZCXaTnA==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@jridgewell/gen-mapping": {
+ "version": "0.3.13",
+ "resolved": "https://registry.npmmirror.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz",
+ "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jridgewell/sourcemap-codec": "^1.5.0",
+ "@jridgewell/trace-mapping": "^0.3.24"
+ }
+ },
+ "node_modules/@jridgewell/remapping": {
+ "version": "2.3.5",
+ "resolved": "https://registry.npmmirror.com/@jridgewell/remapping/-/remapping-2.3.5.tgz",
+ "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jridgewell/gen-mapping": "^0.3.5",
+ "@jridgewell/trace-mapping": "^0.3.24"
+ }
+ },
+ "node_modules/@jridgewell/resolve-uri": {
+ "version": "3.1.2",
+ "resolved": "https://registry.npmmirror.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz",
+ "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6.0.0"
+ }
+ },
+ "node_modules/@jridgewell/sourcemap-codec": {
+ "version": "1.5.5",
+ "resolved": "https://registry.npmmirror.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz",
+ "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/@jridgewell/trace-mapping": {
+ "version": "0.3.31",
+ "resolved": "https://registry.npmmirror.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz",
+ "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jridgewell/resolve-uri": "^3.1.0",
+ "@jridgewell/sourcemap-codec": "^1.4.14"
+ }
+ },
+ "node_modules/@remix-run/router": {
+ "version": "1.23.2",
+ "resolved": "https://registry.npmmirror.com/@remix-run/router/-/router-1.23.2.tgz",
+ "integrity": "sha512-Ic6m2U/rMjTkhERIa/0ZtXJP17QUi2CbWE7cqx4J58M8aA3QTfW+2UlQ4psvTX9IO1RfNVhK3pcpdjej7L+t2w==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=14.0.0"
+ }
+ },
+ "node_modules/@rolldown/pluginutils": {
+ "version": "1.0.0-beta.27",
+ "resolved": "https://registry.npmmirror.com/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.27.tgz",
+ "integrity": "sha512-+d0F4MKMCbeVUJwG96uQ4SgAznZNSq93I3V+9NHA4OpvqG8mRCpGdKmK8l/dl02h2CCDHwW2FqilnTyDcAnqjA==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/@rollup/rollup-android-arm-eabi": {
+ "version": "4.59.0",
+ "resolved": "https://registry.npmmirror.com/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.59.0.tgz",
+ "integrity": "sha512-upnNBkA6ZH2VKGcBj9Fyl9IGNPULcjXRlg0LLeaioQWueH30p6IXtJEbKAgvyv+mJaMxSm1l6xwDXYjpEMiLMg==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ]
+ },
+ "node_modules/@rollup/rollup-android-arm64": {
+ "version": "4.59.0",
+ "resolved": "https://registry.npmmirror.com/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.59.0.tgz",
+ "integrity": "sha512-hZ+Zxj3SySm4A/DylsDKZAeVg0mvi++0PYVceVyX7hemkw7OreKdCvW2oQ3T1FMZvCaQXqOTHb8qmBShoqk69Q==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ]
+ },
+ "node_modules/@rollup/rollup-darwin-arm64": {
+ "version": "4.59.0",
+ "resolved": "https://registry.npmmirror.com/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.59.0.tgz",
+ "integrity": "sha512-W2Psnbh1J8ZJw0xKAd8zdNgF9HRLkdWwwdWqubSVk0pUuQkoHnv7rx4GiF9rT4t5DIZGAsConRE3AxCdJ4m8rg==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "darwin"
+ ]
+ },
+ "node_modules/@rollup/rollup-darwin-x64": {
+ "version": "4.59.0",
+ "resolved": "https://registry.npmmirror.com/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.59.0.tgz",
+ "integrity": "sha512-ZW2KkwlS4lwTv7ZVsYDiARfFCnSGhzYPdiOU4IM2fDbL+QGlyAbjgSFuqNRbSthybLbIJ915UtZBtmuLrQAT/w==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "darwin"
+ ]
+ },
+ "node_modules/@rollup/rollup-freebsd-arm64": {
+ "version": "4.59.0",
+ "resolved": "https://registry.npmmirror.com/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.59.0.tgz",
+ "integrity": "sha512-EsKaJ5ytAu9jI3lonzn3BgG8iRBjV4LxZexygcQbpiU0wU0ATxhNVEpXKfUa0pS05gTcSDMKpn3Sx+QB9RlTTA==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "freebsd"
+ ]
+ },
+ "node_modules/@rollup/rollup-freebsd-x64": {
+ "version": "4.59.0",
+ "resolved": "https://registry.npmmirror.com/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.59.0.tgz",
+ "integrity": "sha512-d3DuZi2KzTMjImrxoHIAODUZYoUUMsuUiY4SRRcJy6NJoZ6iIqWnJu9IScV9jXysyGMVuW+KNzZvBLOcpdl3Vg==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "freebsd"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-arm-gnueabihf": {
+ "version": "4.59.0",
+ "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.59.0.tgz",
+ "integrity": "sha512-t4ONHboXi/3E0rT6OZl1pKbl2Vgxf9vJfWgmUoCEVQVxhW6Cw/c8I6hbbu7DAvgp82RKiH7TpLwxnJeKv2pbsw==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-arm-musleabihf": {
+ "version": "4.59.0",
+ "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.59.0.tgz",
+ "integrity": "sha512-CikFT7aYPA2ufMD086cVORBYGHffBo4K8MQ4uPS/ZnY54GKj36i196u8U+aDVT2LX4eSMbyHtyOh7D7Zvk2VvA==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-arm64-gnu": {
+ "version": "4.59.0",
+ "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.59.0.tgz",
+ "integrity": "sha512-jYgUGk5aLd1nUb1CtQ8E+t5JhLc9x5WdBKew9ZgAXg7DBk0ZHErLHdXM24rfX+bKrFe+Xp5YuJo54I5HFjGDAA==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-arm64-musl": {
+ "version": "4.59.0",
+ "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.59.0.tgz",
+ "integrity": "sha512-peZRVEdnFWZ5Bh2KeumKG9ty7aCXzzEsHShOZEFiCQlDEepP1dpUl/SrUNXNg13UmZl+gzVDPsiCwnV1uI0RUA==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-loong64-gnu": {
+ "version": "4.59.0",
+ "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.59.0.tgz",
+ "integrity": "sha512-gbUSW/97f7+r4gHy3Jlup8zDG190AuodsWnNiXErp9mT90iCy9NKKU0Xwx5k8VlRAIV2uU9CsMnEFg/xXaOfXg==",
+ "cpu": [
+ "loong64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-loong64-musl": {
+ "version": "4.59.0",
+ "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.59.0.tgz",
+ "integrity": "sha512-yTRONe79E+o0FWFijasoTjtzG9EBedFXJMl888NBEDCDV9I2wGbFFfJQQe63OijbFCUZqxpHz1GzpbtSFikJ4Q==",
+ "cpu": [
+ "loong64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-ppc64-gnu": {
+ "version": "4.59.0",
+ "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.59.0.tgz",
+ "integrity": "sha512-sw1o3tfyk12k3OEpRddF68a1unZ5VCN7zoTNtSn2KndUE+ea3m3ROOKRCZxEpmT9nsGnogpFP9x6mnLTCaoLkA==",
+ "cpu": [
+ "ppc64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-ppc64-musl": {
+ "version": "4.59.0",
+ "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.59.0.tgz",
+ "integrity": "sha512-+2kLtQ4xT3AiIxkzFVFXfsmlZiG5FXYW7ZyIIvGA7Bdeuh9Z0aN4hVyXS/G1E9bTP/vqszNIN/pUKCk/BTHsKA==",
+ "cpu": [
+ "ppc64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-riscv64-gnu": {
+ "version": "4.59.0",
+ "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.59.0.tgz",
+ "integrity": "sha512-NDYMpsXYJJaj+I7UdwIuHHNxXZ/b/N2hR15NyH3m2qAtb/hHPA4g4SuuvrdxetTdndfj9b1WOmy73kcPRoERUg==",
+ "cpu": [
+ "riscv64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-riscv64-musl": {
+ "version": "4.59.0",
+ "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.59.0.tgz",
+ "integrity": "sha512-nLckB8WOqHIf1bhymk+oHxvM9D3tyPndZH8i8+35p/1YiVoVswPid2yLzgX7ZJP0KQvnkhM4H6QZ5m0LzbyIAg==",
+ "cpu": [
+ "riscv64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-s390x-gnu": {
+ "version": "4.59.0",
+ "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.59.0.tgz",
+ "integrity": "sha512-oF87Ie3uAIvORFBpwnCvUzdeYUqi2wY6jRFWJAy1qus/udHFYIkplYRW+wo+GRUP4sKzYdmE1Y3+rY5Gc4ZO+w==",
+ "cpu": [
+ "s390x"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-x64-gnu": {
+ "version": "4.59.0",
+ "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.59.0.tgz",
+ "integrity": "sha512-3AHmtQq/ppNuUspKAlvA8HtLybkDflkMuLK4DPo77DfthRb71V84/c4MlWJXixZz4uruIH4uaa07IqoAkG64fg==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-x64-musl": {
+ "version": "4.59.0",
+ "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.59.0.tgz",
+ "integrity": "sha512-2UdiwS/9cTAx7qIUZB/fWtToJwvt0Vbo0zmnYt7ED35KPg13Q0ym1g442THLC7VyI6JfYTP4PiSOWyoMdV2/xg==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-openbsd-x64": {
+ "version": "4.59.0",
+ "resolved": "https://registry.npmmirror.com/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.59.0.tgz",
+ "integrity": "sha512-M3bLRAVk6GOwFlPTIxVBSYKUaqfLrn8l0psKinkCFxl4lQvOSz8ZrKDz2gxcBwHFpci0B6rttydI4IpS4IS/jQ==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "openbsd"
+ ]
+ },
+ "node_modules/@rollup/rollup-openharmony-arm64": {
+ "version": "4.59.0",
+ "resolved": "https://registry.npmmirror.com/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.59.0.tgz",
+ "integrity": "sha512-tt9KBJqaqp5i5HUZzoafHZX8b5Q2Fe7UjYERADll83O4fGqJ49O1FsL6LpdzVFQcpwvnyd0i+K/VSwu/o/nWlA==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "openharmony"
+ ]
+ },
+ "node_modules/@rollup/rollup-win32-arm64-msvc": {
+ "version": "4.59.0",
+ "resolved": "https://registry.npmmirror.com/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.59.0.tgz",
+ "integrity": "sha512-V5B6mG7OrGTwnxaNUzZTDTjDS7F75PO1ae6MJYdiMu60sq0CqN5CVeVsbhPxalupvTX8gXVSU9gq+Rx1/hvu6A==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ]
+ },
+ "node_modules/@rollup/rollup-win32-ia32-msvc": {
+ "version": "4.59.0",
+ "resolved": "https://registry.npmmirror.com/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.59.0.tgz",
+ "integrity": "sha512-UKFMHPuM9R0iBegwzKF4y0C4J9u8C6MEJgFuXTBerMk7EJ92GFVFYBfOZaSGLu6COf7FxpQNqhNS4c4icUPqxA==",
+ "cpu": [
+ "ia32"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ]
+ },
+ "node_modules/@rollup/rollup-win32-x64-gnu": {
+ "version": "4.59.0",
+ "resolved": "https://registry.npmmirror.com/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.59.0.tgz",
+ "integrity": "sha512-laBkYlSS1n2L8fSo1thDNGrCTQMmxjYY5G0WFWjFFYZkKPjsMBsgJfGf4TLxXrF6RyhI60L8TMOjBMvXiTcxeA==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ]
+ },
+ "node_modules/@rollup/rollup-win32-x64-msvc": {
+ "version": "4.59.0",
+ "resolved": "https://registry.npmmirror.com/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.59.0.tgz",
+ "integrity": "sha512-2HRCml6OztYXyJXAvdDXPKcawukWY2GpR5/nxKp4iBgiO3wcoEGkAaqctIbZcNB6KlUQBIqt8VYkNSj2397EfA==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ]
+ },
+ "node_modules/@testing-library/dom": {
+ "version": "10.4.1",
+ "resolved": "https://registry.npmmirror.com/@testing-library/dom/-/dom-10.4.1.tgz",
+ "integrity": "sha512-o4PXJQidqJl82ckFaXUeoAW+XysPLauYI43Abki5hABd853iMhitooc6znOnczgbTYmEP6U6/y1ZyKAIsvMKGg==",
+ "dev": true,
+ "license": "MIT",
+ "peer": true,
+ "dependencies": {
+ "@babel/code-frame": "^7.10.4",
+ "@babel/runtime": "^7.12.5",
+ "@types/aria-query": "^5.0.1",
+ "aria-query": "5.3.0",
+ "dom-accessibility-api": "^0.5.9",
+ "lz-string": "^1.5.0",
+ "picocolors": "1.1.1",
+ "pretty-format": "^27.0.2"
+ },
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@testing-library/jest-dom": {
+ "version": "6.9.1",
+ "resolved": "https://registry.npmmirror.com/@testing-library/jest-dom/-/jest-dom-6.9.1.tgz",
+ "integrity": "sha512-zIcONa+hVtVSSep9UT3jZ5rizo2BsxgyDYU7WFD5eICBE7no3881HGeb/QkGfsJs6JTkY1aQhT7rIPC7e+0nnA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@adobe/css-tools": "^4.4.0",
+ "aria-query": "^5.0.0",
+ "css.escape": "^1.5.1",
+ "dom-accessibility-api": "^0.6.3",
+ "picocolors": "^1.1.1",
+ "redent": "^3.0.0"
+ },
+ "engines": {
+ "node": ">=14",
+ "npm": ">=6",
+ "yarn": ">=1"
+ }
+ },
+ "node_modules/@testing-library/jest-dom/node_modules/dom-accessibility-api": {
+ "version": "0.6.3",
+ "resolved": "https://registry.npmmirror.com/dom-accessibility-api/-/dom-accessibility-api-0.6.3.tgz",
+ "integrity": "sha512-7ZgogeTnjuHbo+ct10G9Ffp0mif17idi0IyWNVA/wcwcm7NPOD/WEHVP3n7n3MhXqxoIYm8d6MuZohYWIZ4T3w==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/@testing-library/react": {
+ "version": "16.3.2",
+ "resolved": "https://registry.npmmirror.com/@testing-library/react/-/react-16.3.2.tgz",
+ "integrity": "sha512-XU5/SytQM+ykqMnAnvB2umaJNIOsLF3PVv//1Ew4CTcpz0/BRyy/af40qqrt7SjKpDdT1saBMc42CUok5gaw+g==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/runtime": "^7.12.5"
+ },
+ "engines": {
+ "node": ">=18"
+ },
+ "peerDependencies": {
+ "@testing-library/dom": "^10.0.0",
+ "@types/react": "^18.0.0 || ^19.0.0",
+ "@types/react-dom": "^18.0.0 || ^19.0.0",
+ "react": "^18.0.0 || ^19.0.0",
+ "react-dom": "^18.0.0 || ^19.0.0"
+ },
+ "peerDependenciesMeta": {
+ "@types/react": {
+ "optional": true
+ },
+ "@types/react-dom": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/@testing-library/user-event": {
+ "version": "14.6.1",
+ "resolved": "https://registry.npmmirror.com/@testing-library/user-event/-/user-event-14.6.1.tgz",
+ "integrity": "sha512-vq7fv0rnt+QTXgPxr5Hjc210p6YKq2kmdziLgnsZGgLJ9e6VAShx1pACLuRjd/AS/sr7phAR58OIIpf0LlmQNw==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=12",
+ "npm": ">=6"
+ },
+ "peerDependencies": {
+ "@testing-library/dom": ">=7.21.4"
+ }
+ },
+ "node_modules/@types/aria-query": {
+ "version": "5.0.4",
+ "resolved": "https://registry.npmmirror.com/@types/aria-query/-/aria-query-5.0.4.tgz",
+ "integrity": "sha512-rfT93uj5s0PRL7EzccGMs3brplhcrghnDoV26NqKhCAS1hVo+WdNsPvE/yb6ilfr5hi2MEk6d5EWJTKdxg8jVw==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/@types/babel__core": {
+ "version": "7.20.5",
+ "resolved": "https://registry.npmmirror.com/@types/babel__core/-/babel__core-7.20.5.tgz",
+ "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/parser": "^7.20.7",
+ "@babel/types": "^7.20.7",
+ "@types/babel__generator": "*",
+ "@types/babel__template": "*",
+ "@types/babel__traverse": "*"
+ }
+ },
+ "node_modules/@types/babel__generator": {
+ "version": "7.27.0",
+ "resolved": "https://registry.npmmirror.com/@types/babel__generator/-/babel__generator-7.27.0.tgz",
+ "integrity": "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/types": "^7.0.0"
+ }
+ },
+ "node_modules/@types/babel__template": {
+ "version": "7.4.4",
+ "resolved": "https://registry.npmmirror.com/@types/babel__template/-/babel__template-7.4.4.tgz",
+ "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/parser": "^7.1.0",
+ "@babel/types": "^7.0.0"
+ }
+ },
+ "node_modules/@types/babel__traverse": {
+ "version": "7.28.0",
+ "resolved": "https://registry.npmmirror.com/@types/babel__traverse/-/babel__traverse-7.28.0.tgz",
+ "integrity": "sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/types": "^7.28.2"
+ }
+ },
+ "node_modules/@types/chai": {
+ "version": "5.2.3",
+ "resolved": "https://registry.npmmirror.com/@types/chai/-/chai-5.2.3.tgz",
+ "integrity": "sha512-Mw558oeA9fFbv65/y4mHtXDs9bPnFMZAL/jxdPFUpOHHIXX91mcgEHbS5Lahr+pwZFR8A7GQleRWeI6cGFC2UA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/deep-eql": "*",
+ "assertion-error": "^2.0.1"
+ }
+ },
+ "node_modules/@types/deep-eql": {
+ "version": "4.0.2",
+ "resolved": "https://registry.npmmirror.com/@types/deep-eql/-/deep-eql-4.0.2.tgz",
+ "integrity": "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/@types/estree": {
+ "version": "1.0.8",
+ "resolved": "https://registry.npmmirror.com/@types/estree/-/estree-1.0.8.tgz",
+ "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/@types/node": {
+ "version": "22.19.15",
+ "resolved": "https://registry.npmmirror.com/@types/node/-/node-22.19.15.tgz",
+ "integrity": "sha512-F0R/h2+dsy5wJAUe3tAU6oqa2qbWY5TpNfL/RGmo1y38hiyO1w3x2jPtt76wmuaJI4DQnOBu21cNXQ2STIUUWg==",
+ "dev": true,
+ "license": "MIT",
+ "peer": true,
+ "dependencies": {
+ "undici-types": "~6.21.0"
+ }
+ },
+ "node_modules/@types/prop-types": {
+ "version": "15.7.15",
+ "resolved": "https://registry.npmmirror.com/@types/prop-types/-/prop-types-15.7.15.tgz",
+ "integrity": "sha512-F6bEyamV9jKGAFBEmlQnesRPGOQqS2+Uwi0Em15xenOxHaf2hv6L8YCVn3rPdPJOiJfPiCnLIRyvwVaqMY3MIw==",
+ "devOptional": true,
+ "license": "MIT"
+ },
+ "node_modules/@types/react": {
+ "version": "18.3.28",
+ "resolved": "https://registry.npmmirror.com/@types/react/-/react-18.3.28.tgz",
+ "integrity": "sha512-z9VXpC7MWrhfWipitjNdgCauoMLRdIILQsAEV+ZesIzBq/oUlxk0m3ApZuMFCXdnS4U7KrI+l3WRUEGQ8K1QKw==",
+ "devOptional": true,
+ "license": "MIT",
+ "peer": true,
+ "dependencies": {
+ "@types/prop-types": "*",
+ "csstype": "^3.2.2"
+ }
+ },
+ "node_modules/@types/react-dom": {
+ "version": "18.3.7",
+ "resolved": "https://registry.npmmirror.com/@types/react-dom/-/react-dom-18.3.7.tgz",
+ "integrity": "sha512-MEe3UeoENYVFXzoXEWsvcpg6ZvlrFNlOQ7EOsvhI3CfAXwzPfO8Qwuxd40nepsYKqyyVQnTdEfv68q91yLcKrQ==",
+ "dev": true,
+ "license": "MIT",
+ "peer": true,
+ "peerDependencies": {
+ "@types/react": "^18.0.0"
+ }
+ },
+ "node_modules/@vitejs/plugin-react": {
+ "version": "4.7.0",
+ "resolved": "https://registry.npmmirror.com/@vitejs/plugin-react/-/plugin-react-4.7.0.tgz",
+ "integrity": "sha512-gUu9hwfWvvEDBBmgtAowQCojwZmJ5mcLn3aufeCsitijs3+f2NsrPtlAWIR6OPiqljl96GVCUbLe0HyqIpVaoA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/core": "^7.28.0",
+ "@babel/plugin-transform-react-jsx-self": "^7.27.1",
+ "@babel/plugin-transform-react-jsx-source": "^7.27.1",
+ "@rolldown/pluginutils": "1.0.0-beta.27",
+ "@types/babel__core": "^7.20.5",
+ "react-refresh": "^0.17.0"
+ },
+ "engines": {
+ "node": "^14.18.0 || >=16.0.0"
+ },
+ "peerDependencies": {
+ "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0"
+ }
+ },
+ "node_modules/@vitest/expect": {
+ "version": "3.2.4",
+ "resolved": "https://registry.npmmirror.com/@vitest/expect/-/expect-3.2.4.tgz",
+ "integrity": "sha512-Io0yyORnB6sikFlt8QW5K7slY4OjqNX9jmJQ02QDda8lyM6B5oNgVWoSoKPac8/kgnCUzuHQKrSLtu/uOqqrig==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/chai": "^5.2.2",
+ "@vitest/spy": "3.2.4",
+ "@vitest/utils": "3.2.4",
+ "chai": "^5.2.0",
+ "tinyrainbow": "^2.0.0"
+ },
+ "funding": {
+ "url": "https://opencollective.com/vitest"
+ }
+ },
+ "node_modules/@vitest/mocker": {
+ "version": "3.2.4",
+ "resolved": "https://registry.npmmirror.com/@vitest/mocker/-/mocker-3.2.4.tgz",
+ "integrity": "sha512-46ryTE9RZO/rfDd7pEqFl7etuyzekzEhUbTW3BvmeO/BcCMEgq59BKhek3dXDWgAj4oMK6OZi+vRr1wPW6qjEQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@vitest/spy": "3.2.4",
+ "estree-walker": "^3.0.3",
+ "magic-string": "^0.30.17"
+ },
+ "funding": {
+ "url": "https://opencollective.com/vitest"
+ },
+ "peerDependencies": {
+ "msw": "^2.4.9",
+ "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0"
+ },
+ "peerDependenciesMeta": {
+ "msw": {
+ "optional": true
+ },
+ "vite": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/@vitest/pretty-format": {
+ "version": "3.2.4",
+ "resolved": "https://registry.npmmirror.com/@vitest/pretty-format/-/pretty-format-3.2.4.tgz",
+ "integrity": "sha512-IVNZik8IVRJRTr9fxlitMKeJeXFFFN0JaB9PHPGQ8NKQbGpfjlTx9zO4RefN8gp7eqjNy8nyK3NZmBzOPeIxtA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "tinyrainbow": "^2.0.0"
+ },
+ "funding": {
+ "url": "https://opencollective.com/vitest"
+ }
+ },
+ "node_modules/@vitest/runner": {
+ "version": "3.2.4",
+ "resolved": "https://registry.npmmirror.com/@vitest/runner/-/runner-3.2.4.tgz",
+ "integrity": "sha512-oukfKT9Mk41LreEW09vt45f8wx7DordoWUZMYdY/cyAk7w5TWkTRCNZYF7sX7n2wB7jyGAl74OxgwhPgKaqDMQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@vitest/utils": "3.2.4",
+ "pathe": "^2.0.3",
+ "strip-literal": "^3.0.0"
+ },
+ "funding": {
+ "url": "https://opencollective.com/vitest"
+ }
+ },
+ "node_modules/@vitest/snapshot": {
+ "version": "3.2.4",
+ "resolved": "https://registry.npmmirror.com/@vitest/snapshot/-/snapshot-3.2.4.tgz",
+ "integrity": "sha512-dEYtS7qQP2CjU27QBC5oUOxLE/v5eLkGqPE0ZKEIDGMs4vKWe7IjgLOeauHsR0D5YuuycGRO5oSRXnwnmA78fQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@vitest/pretty-format": "3.2.4",
+ "magic-string": "^0.30.17",
+ "pathe": "^2.0.3"
+ },
+ "funding": {
+ "url": "https://opencollective.com/vitest"
+ }
+ },
+ "node_modules/@vitest/spy": {
+ "version": "3.2.4",
+ "resolved": "https://registry.npmmirror.com/@vitest/spy/-/spy-3.2.4.tgz",
+ "integrity": "sha512-vAfasCOe6AIK70iP5UD11Ac4siNUNJ9i/9PZ3NKx07sG6sUxeag1LWdNrMWeKKYBLlzuK+Gn65Yd5nyL6ds+nw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "tinyspy": "^4.0.3"
+ },
+ "funding": {
+ "url": "https://opencollective.com/vitest"
+ }
+ },
+ "node_modules/@vitest/utils": {
+ "version": "3.2.4",
+ "resolved": "https://registry.npmmirror.com/@vitest/utils/-/utils-3.2.4.tgz",
+ "integrity": "sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@vitest/pretty-format": "3.2.4",
+ "loupe": "^3.1.4",
+ "tinyrainbow": "^2.0.0"
+ },
+ "funding": {
+ "url": "https://opencollective.com/vitest"
+ }
+ },
+ "node_modules/agent-base": {
+ "version": "7.1.4",
+ "resolved": "https://registry.npmmirror.com/agent-base/-/agent-base-7.1.4.tgz",
+ "integrity": "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">= 14"
+ }
+ },
+ "node_modules/ansi-regex": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmmirror.com/ansi-regex/-/ansi-regex-5.0.1.tgz",
+ "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/ansi-styles": {
+ "version": "5.2.0",
+ "resolved": "https://registry.npmmirror.com/ansi-styles/-/ansi-styles-5.2.0.tgz",
+ "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/ansi-styles?sponsor=1"
+ }
+ },
+ "node_modules/aria-query": {
+ "version": "5.3.0",
+ "resolved": "https://registry.npmmirror.com/aria-query/-/aria-query-5.3.0.tgz",
+ "integrity": "sha512-b0P0sZPKtyu8HkeRAfCq0IfURZK+SuwMjY1UXGBU27wpAiTwQAIlq56IbIO+ytk/JjS1fMR14ee5WBBfKi5J6A==",
+ "dev": true,
+ "license": "Apache-2.0",
+ "dependencies": {
+ "dequal": "^2.0.3"
+ }
+ },
+ "node_modules/assertion-error": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmmirror.com/assertion-error/-/assertion-error-2.0.1.tgz",
+ "integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/asynckit": {
+ "version": "0.4.0",
+ "resolved": "https://registry.npmmirror.com/asynckit/-/asynckit-0.4.0.tgz",
+ "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==",
+ "license": "MIT"
+ },
+ "node_modules/axios": {
+ "version": "1.13.6",
+ "resolved": "https://registry.npmmirror.com/axios/-/axios-1.13.6.tgz",
+ "integrity": "sha512-ChTCHMouEe2kn713WHbQGcuYrr6fXTBiu460OTwWrWob16g1bXn4vtz07Ope7ewMozJAnEquLk5lWQWtBig9DQ==",
+ "license": "MIT",
+ "dependencies": {
+ "follow-redirects": "^1.15.11",
+ "form-data": "^4.0.5",
+ "proxy-from-env": "^1.1.0"
+ }
+ },
+ "node_modules/b-tween": {
+ "version": "0.3.3",
+ "resolved": "https://registry.npmmirror.com/b-tween/-/b-tween-0.3.3.tgz",
+ "integrity": "sha512-oEHegcRpA7fAuc9KC4nktucuZn2aS8htymCPcP3qkEGPqiBH+GfqtqoG2l7LxHngg6O0HFM7hOeOYExl1Oz4ZA==",
+ "license": "MIT"
+ },
+ "node_modules/b-validate": {
+ "version": "1.5.3",
+ "resolved": "https://registry.npmmirror.com/b-validate/-/b-validate-1.5.3.tgz",
+ "integrity": "sha512-iCvCkGFskbaYtfQ0a3GmcQCHl/Sv1GufXFGuUQ+FE+WJa7A/espLOuFIn09B944V8/ImPj71T4+rTASxO2PAuA==",
+ "license": "MIT"
+ },
+ "node_modules/baseline-browser-mapping": {
+ "version": "2.10.0",
+ "resolved": "https://registry.npmmirror.com/baseline-browser-mapping/-/baseline-browser-mapping-2.10.0.tgz",
+ "integrity": "sha512-lIyg0szRfYbiy67j9KN8IyeD7q7hcmqnJ1ddWmNt19ItGpNN64mnllmxUNFIOdOm6by97jlL6wfpTTJrmnjWAA==",
+ "dev": true,
+ "license": "Apache-2.0",
+ "bin": {
+ "baseline-browser-mapping": "dist/cli.cjs"
+ },
+ "engines": {
+ "node": ">=6.0.0"
+ }
+ },
+ "node_modules/browserslist": {
+ "version": "4.28.1",
+ "resolved": "https://registry.npmmirror.com/browserslist/-/browserslist-4.28.1.tgz",
+ "integrity": "sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/browserslist"
+ },
+ {
+ "type": "tidelift",
+ "url": "https://tidelift.com/funding/github/npm/browserslist"
+ },
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/ai"
+ }
+ ],
+ "license": "MIT",
+ "peer": true,
+ "dependencies": {
+ "baseline-browser-mapping": "^2.9.0",
+ "caniuse-lite": "^1.0.30001759",
+ "electron-to-chromium": "^1.5.263",
+ "node-releases": "^2.0.27",
+ "update-browserslist-db": "^1.2.0"
+ },
+ "bin": {
+ "browserslist": "cli.js"
+ },
+ "engines": {
+ "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7"
+ }
+ },
+ "node_modules/cac": {
+ "version": "6.7.14",
+ "resolved": "https://registry.npmmirror.com/cac/-/cac-6.7.14.tgz",
+ "integrity": "sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/call-bind-apply-helpers": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmmirror.com/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz",
+ "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==",
+ "license": "MIT",
+ "dependencies": {
+ "es-errors": "^1.3.0",
+ "function-bind": "^1.1.2"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
+ "node_modules/caniuse-lite": {
+ "version": "1.0.30001777",
+ "resolved": "https://registry.npmmirror.com/caniuse-lite/-/caniuse-lite-1.0.30001777.tgz",
+ "integrity": "sha512-tmN+fJxroPndC74efCdp12j+0rk0RHwV5Jwa1zWaFVyw2ZxAuPeG8ZgWC3Wz7uSjT3qMRQ5XHZ4COgQmsCMJAQ==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/browserslist"
+ },
+ {
+ "type": "tidelift",
+ "url": "https://tidelift.com/funding/github/npm/caniuse-lite"
+ },
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/ai"
+ }
+ ],
+ "license": "CC-BY-4.0"
+ },
+ "node_modules/chai": {
+ "version": "5.3.3",
+ "resolved": "https://registry.npmmirror.com/chai/-/chai-5.3.3.tgz",
+ "integrity": "sha512-4zNhdJD/iOjSH0A05ea+Ke6MU5mmpQcbQsSOkgdaUMJ9zTlDTD/GYlwohmIE2u0gaxHYiVHEn1Fw9mZ/ktJWgw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "assertion-error": "^2.0.1",
+ "check-error": "^2.1.1",
+ "deep-eql": "^5.0.1",
+ "loupe": "^3.1.0",
+ "pathval": "^2.0.0"
+ },
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/check-error": {
+ "version": "2.1.3",
+ "resolved": "https://registry.npmmirror.com/check-error/-/check-error-2.1.3.tgz",
+ "integrity": "sha512-PAJdDJusoxnwm1VwW07VWwUN1sl7smmC3OKggvndJFadxxDRyFJBX/ggnu/KE4kQAB7a3Dp8f/YXC1FlUprWmA==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">= 16"
+ }
+ },
+ "node_modules/color": {
+ "version": "3.2.1",
+ "resolved": "https://registry.npmmirror.com/color/-/color-3.2.1.tgz",
+ "integrity": "sha512-aBl7dZI9ENN6fUGC7mWpMTPNHmWUSNan9tuWN6ahh5ZLNk9baLJOnSMlrQkHcrfFgz2/RigjUVAjdx36VcemKA==",
+ "license": "MIT",
+ "dependencies": {
+ "color-convert": "^1.9.3",
+ "color-string": "^1.6.0"
+ }
+ },
+ "node_modules/color-convert": {
+ "version": "1.9.3",
+ "resolved": "https://registry.npmmirror.com/color-convert/-/color-convert-1.9.3.tgz",
+ "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==",
+ "license": "MIT",
+ "dependencies": {
+ "color-name": "1.1.3"
+ }
+ },
+ "node_modules/color-name": {
+ "version": "1.1.3",
+ "resolved": "https://registry.npmmirror.com/color-name/-/color-name-1.1.3.tgz",
+ "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==",
+ "license": "MIT"
+ },
+ "node_modules/color-string": {
+ "version": "1.9.1",
+ "resolved": "https://registry.npmmirror.com/color-string/-/color-string-1.9.1.tgz",
+ "integrity": "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==",
+ "license": "MIT",
+ "dependencies": {
+ "color-name": "^1.0.0",
+ "simple-swizzle": "^0.2.2"
+ }
+ },
+ "node_modules/combined-stream": {
+ "version": "1.0.8",
+ "resolved": "https://registry.npmmirror.com/combined-stream/-/combined-stream-1.0.8.tgz",
+ "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
+ "license": "MIT",
+ "dependencies": {
+ "delayed-stream": "~1.0.0"
+ },
+ "engines": {
+ "node": ">= 0.8"
+ }
+ },
+ "node_modules/compute-scroll-into-view": {
+ "version": "1.0.20",
+ "resolved": "https://registry.npmmirror.com/compute-scroll-into-view/-/compute-scroll-into-view-1.0.20.tgz",
+ "integrity": "sha512-UCB0ioiyj8CRjtrvaceBLqqhZCVP+1B8+NWQhmdsm0VXOJtobBCf1dBQmebCCo34qZmUwZfIH2MZLqNHazrfjg==",
+ "license": "MIT"
+ },
+ "node_modules/convert-source-map": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmmirror.com/convert-source-map/-/convert-source-map-2.0.0.tgz",
+ "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/css.escape": {
+ "version": "1.5.1",
+ "resolved": "https://registry.npmmirror.com/css.escape/-/css.escape-1.5.1.tgz",
+ "integrity": "sha512-YUifsXXuknHlUsmlgyY0PKzgPOr7/FjCePfHNt0jxm83wHZi44VDMQ7/fGNkjY3/jV1MC+1CmZbaHzugyeRtpg==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/cssstyle": {
+ "version": "4.6.0",
+ "resolved": "https://registry.npmmirror.com/cssstyle/-/cssstyle-4.6.0.tgz",
+ "integrity": "sha512-2z+rWdzbbSZv6/rhtvzvqeZQHrBaqgogqt85sqFNbabZOuFbCVFb8kPeEtZjiKkbrm395irpNKiYeFeLiQnFPg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@asamuzakjp/css-color": "^3.2.0",
+ "rrweb-cssom": "^0.8.0"
+ },
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/csstype": {
+ "version": "3.2.3",
+ "resolved": "https://registry.npmmirror.com/csstype/-/csstype-3.2.3.tgz",
+ "integrity": "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==",
+ "license": "MIT"
+ },
+ "node_modules/data-urls": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmmirror.com/data-urls/-/data-urls-5.0.0.tgz",
+ "integrity": "sha512-ZYP5VBHshaDAiVZxjbRVcFJpc+4xGgT0bK3vzy1HLN8jTO975HEbuYzZJcHoQEY5K1a0z8YayJkyVETa08eNTg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "whatwg-mimetype": "^4.0.0",
+ "whatwg-url": "^14.0.0"
+ },
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/dayjs": {
+ "version": "1.11.19",
+ "resolved": "https://registry.npmmirror.com/dayjs/-/dayjs-1.11.19.tgz",
+ "integrity": "sha512-t5EcLVS6QPBNqM2z8fakk/NKel+Xzshgt8FFKAn+qwlD1pzZWxh0nVCrvFK7ZDb6XucZeF9z8C7CBWTRIVApAw==",
+ "license": "MIT"
+ },
+ "node_modules/debug": {
+ "version": "4.4.3",
+ "resolved": "https://registry.npmmirror.com/debug/-/debug-4.4.3.tgz",
+ "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "ms": "^2.1.3"
+ },
+ "engines": {
+ "node": ">=6.0"
+ },
+ "peerDependenciesMeta": {
+ "supports-color": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/decimal.js": {
+ "version": "10.6.0",
+ "resolved": "https://registry.npmmirror.com/decimal.js/-/decimal.js-10.6.0.tgz",
+ "integrity": "sha512-YpgQiITW3JXGntzdUmyUR1V812Hn8T1YVXhCu+wO3OpS4eU9l4YdD3qjyiKdV6mvV29zapkMeD390UVEf2lkUg==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/deep-eql": {
+ "version": "5.0.2",
+ "resolved": "https://registry.npmmirror.com/deep-eql/-/deep-eql-5.0.2.tgz",
+ "integrity": "sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6"
+ }
+ },
+ "node_modules/delayed-stream": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmmirror.com/delayed-stream/-/delayed-stream-1.0.0.tgz",
+ "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=0.4.0"
+ }
+ },
+ "node_modules/dequal": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmmirror.com/dequal/-/dequal-2.0.3.tgz",
+ "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6"
+ }
+ },
+ "node_modules/detect-node-es": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmmirror.com/detect-node-es/-/detect-node-es-1.1.0.tgz",
+ "integrity": "sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ==",
+ "license": "MIT"
+ },
+ "node_modules/dom-accessibility-api": {
+ "version": "0.5.16",
+ "resolved": "https://registry.npmmirror.com/dom-accessibility-api/-/dom-accessibility-api-0.5.16.tgz",
+ "integrity": "sha512-X7BJ2yElsnOJ30pZF4uIIDfBEVgF4XEBxL9Bxhy6dnrm5hkzqmsWHGTiHqRiITNhMyFLyAiWndIJP7Z1NTteDg==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/dom-helpers": {
+ "version": "5.2.1",
+ "resolved": "https://registry.npmmirror.com/dom-helpers/-/dom-helpers-5.2.1.tgz",
+ "integrity": "sha512-nRCa7CK3VTrM2NmGkIy4cbK7IZlgBE/PYMn55rrXefr5xXDP0LdtfPnblFDoVdcAfslJ7or6iqAUnx0CCGIWQA==",
+ "license": "MIT",
+ "dependencies": {
+ "@babel/runtime": "^7.8.7",
+ "csstype": "^3.0.2"
+ }
+ },
+ "node_modules/dunder-proto": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmmirror.com/dunder-proto/-/dunder-proto-1.0.1.tgz",
+ "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==",
+ "license": "MIT",
+ "dependencies": {
+ "call-bind-apply-helpers": "^1.0.1",
+ "es-errors": "^1.3.0",
+ "gopd": "^1.2.0"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
+ "node_modules/echarts": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmmirror.com/echarts/-/echarts-6.0.0.tgz",
+ "integrity": "sha512-Tte/grDQRiETQP4xz3iZWSvoHrkCQtwqd6hs+mifXcjrCuo2iKWbajFObuLJVBlDIJlOzgQPd1hsaKt/3+OMkQ==",
+ "license": "Apache-2.0",
+ "dependencies": {
+ "tslib": "2.3.0",
+ "zrender": "6.0.0"
+ }
+ },
+ "node_modules/echarts-for-react": {
+ "version": "3.0.6",
+ "resolved": "https://registry.npmmirror.com/echarts-for-react/-/echarts-for-react-3.0.6.tgz",
+ "integrity": "sha512-4zqLgTGWS3JvkQDXjzkR1k1CHRdpd6by0988TWMJgnvDytegWLbeP/VNZmMa+0VJx2eD7Y632bi2JquXDgiGJg==",
+ "license": "MIT",
+ "dependencies": {
+ "fast-deep-equal": "^3.1.3",
+ "size-sensor": "^1.0.1"
+ },
+ "peerDependencies": {
+ "echarts": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0",
+ "react": "^15.0.0 || >=16.0.0"
+ }
+ },
+ "node_modules/echarts/node_modules/tslib": {
+ "version": "2.3.0",
+ "resolved": "https://registry.npmmirror.com/tslib/-/tslib-2.3.0.tgz",
+ "integrity": "sha512-N82ooyxVNm6h1riLCoyS9e3fuJ3AMG2zIZs2Gd1ATcSFjSA23Q0fzjjZeh0jbJvWVDZ0cJT8yaNNaaXHzueNjg==",
+ "license": "0BSD"
+ },
+ "node_modules/electron-to-chromium": {
+ "version": "1.5.307",
+ "resolved": "https://registry.npmmirror.com/electron-to-chromium/-/electron-to-chromium-1.5.307.tgz",
+ "integrity": "sha512-5z3uFKBWjiNR44nFcYdkcXjKMbg5KXNdciu7mhTPo9tB7NbqSNP2sSnGR+fqknZSCwKkBN+oxiiajWs4dT6ORg==",
+ "dev": true,
+ "license": "ISC"
+ },
+ "node_modules/entities": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmmirror.com/entities/-/entities-6.0.1.tgz",
+ "integrity": "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==",
+ "dev": true,
+ "license": "BSD-2-Clause",
+ "engines": {
+ "node": ">=0.12"
+ },
+ "funding": {
+ "url": "https://github.com/fb55/entities?sponsor=1"
+ }
+ },
+ "node_modules/es-define-property": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmmirror.com/es-define-property/-/es-define-property-1.0.1.tgz",
+ "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
+ "node_modules/es-errors": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmmirror.com/es-errors/-/es-errors-1.3.0.tgz",
+ "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
+ "node_modules/es-module-lexer": {
+ "version": "1.7.0",
+ "resolved": "https://registry.npmmirror.com/es-module-lexer/-/es-module-lexer-1.7.0.tgz",
+ "integrity": "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/es-object-atoms": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmmirror.com/es-object-atoms/-/es-object-atoms-1.1.1.tgz",
+ "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==",
+ "license": "MIT",
+ "dependencies": {
+ "es-errors": "^1.3.0"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
+ "node_modules/es-set-tostringtag": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmmirror.com/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz",
+ "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==",
+ "license": "MIT",
+ "dependencies": {
+ "es-errors": "^1.3.0",
+ "get-intrinsic": "^1.2.6",
+ "has-tostringtag": "^1.0.2",
+ "hasown": "^2.0.2"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
+ "node_modules/esbuild": {
+ "version": "0.25.12",
+ "resolved": "https://registry.npmmirror.com/esbuild/-/esbuild-0.25.12.tgz",
+ "integrity": "sha512-bbPBYYrtZbkt6Os6FiTLCTFxvq4tt3JKall1vRwshA3fdVztsLAatFaZobhkBC8/BrPetoa0oksYoKXoG4ryJg==",
+ "dev": true,
+ "hasInstallScript": true,
+ "license": "MIT",
+ "bin": {
+ "esbuild": "bin/esbuild"
+ },
+ "engines": {
+ "node": ">=18"
+ },
+ "optionalDependencies": {
+ "@esbuild/aix-ppc64": "0.25.12",
+ "@esbuild/android-arm": "0.25.12",
+ "@esbuild/android-arm64": "0.25.12",
+ "@esbuild/android-x64": "0.25.12",
+ "@esbuild/darwin-arm64": "0.25.12",
+ "@esbuild/darwin-x64": "0.25.12",
+ "@esbuild/freebsd-arm64": "0.25.12",
+ "@esbuild/freebsd-x64": "0.25.12",
+ "@esbuild/linux-arm": "0.25.12",
+ "@esbuild/linux-arm64": "0.25.12",
+ "@esbuild/linux-ia32": "0.25.12",
+ "@esbuild/linux-loong64": "0.25.12",
+ "@esbuild/linux-mips64el": "0.25.12",
+ "@esbuild/linux-ppc64": "0.25.12",
+ "@esbuild/linux-riscv64": "0.25.12",
+ "@esbuild/linux-s390x": "0.25.12",
+ "@esbuild/linux-x64": "0.25.12",
+ "@esbuild/netbsd-arm64": "0.25.12",
+ "@esbuild/netbsd-x64": "0.25.12",
+ "@esbuild/openbsd-arm64": "0.25.12",
+ "@esbuild/openbsd-x64": "0.25.12",
+ "@esbuild/openharmony-arm64": "0.25.12",
+ "@esbuild/sunos-x64": "0.25.12",
+ "@esbuild/win32-arm64": "0.25.12",
+ "@esbuild/win32-ia32": "0.25.12",
+ "@esbuild/win32-x64": "0.25.12"
+ }
+ },
+ "node_modules/escalade": {
+ "version": "3.2.0",
+ "resolved": "https://registry.npmmirror.com/escalade/-/escalade-3.2.0.tgz",
+ "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6"
+ }
+ },
+ "node_modules/estree-walker": {
+ "version": "3.0.3",
+ "resolved": "https://registry.npmmirror.com/estree-walker/-/estree-walker-3.0.3.tgz",
+ "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/estree": "^1.0.0"
+ }
+ },
+ "node_modules/expect-type": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmmirror.com/expect-type/-/expect-type-1.3.0.tgz",
+ "integrity": "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==",
+ "dev": true,
+ "license": "Apache-2.0",
+ "engines": {
+ "node": ">=12.0.0"
+ }
+ },
+ "node_modules/fast-deep-equal": {
+ "version": "3.1.3",
+ "resolved": "https://registry.npmmirror.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
+ "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==",
+ "license": "MIT"
+ },
+ "node_modules/fdir": {
+ "version": "6.5.0",
+ "resolved": "https://registry.npmmirror.com/fdir/-/fdir-6.5.0.tgz",
+ "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=12.0.0"
+ },
+ "peerDependencies": {
+ "picomatch": "^3 || ^4"
+ },
+ "peerDependenciesMeta": {
+ "picomatch": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/focus-lock": {
+ "version": "1.3.6",
+ "resolved": "https://registry.npmmirror.com/focus-lock/-/focus-lock-1.3.6.tgz",
+ "integrity": "sha512-Ik/6OCk9RQQ0T5Xw+hKNLWrjSMtv51dD4GRmJjbD5a58TIEpI5a5iXagKVl3Z5UuyslMCA8Xwnu76jQob62Yhg==",
+ "license": "MIT",
+ "dependencies": {
+ "tslib": "^2.0.3"
+ },
+ "engines": {
+ "node": ">=10"
+ }
+ },
+ "node_modules/follow-redirects": {
+ "version": "1.15.11",
+ "resolved": "https://registry.npmmirror.com/follow-redirects/-/follow-redirects-1.15.11.tgz",
+ "integrity": "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==",
+ "funding": [
+ {
+ "type": "individual",
+ "url": "https://github.com/sponsors/RubenVerborgh"
+ }
+ ],
+ "license": "MIT",
+ "engines": {
+ "node": ">=4.0"
+ },
+ "peerDependenciesMeta": {
+ "debug": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/form-data": {
+ "version": "4.0.5",
+ "resolved": "https://registry.npmmirror.com/form-data/-/form-data-4.0.5.tgz",
+ "integrity": "sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==",
+ "license": "MIT",
+ "dependencies": {
+ "asynckit": "^0.4.0",
+ "combined-stream": "^1.0.8",
+ "es-set-tostringtag": "^2.1.0",
+ "hasown": "^2.0.2",
+ "mime-types": "^2.1.12"
+ },
+ "engines": {
+ "node": ">= 6"
+ }
+ },
+ "node_modules/fsevents": {
+ "version": "2.3.3",
+ "resolved": "https://registry.npmmirror.com/fsevents/-/fsevents-2.3.3.tgz",
+ "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==",
+ "dev": true,
+ "hasInstallScript": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": "^8.16.0 || ^10.6.0 || >=11.0.0"
+ }
+ },
+ "node_modules/function-bind": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmmirror.com/function-bind/-/function-bind-1.1.2.tgz",
+ "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==",
+ "license": "MIT",
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/gensync": {
+ "version": "1.0.0-beta.2",
+ "resolved": "https://registry.npmmirror.com/gensync/-/gensync-1.0.0-beta.2.tgz",
+ "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
+ "node_modules/get-intrinsic": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmmirror.com/get-intrinsic/-/get-intrinsic-1.3.0.tgz",
+ "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==",
+ "license": "MIT",
+ "dependencies": {
+ "call-bind-apply-helpers": "^1.0.2",
+ "es-define-property": "^1.0.1",
+ "es-errors": "^1.3.0",
+ "es-object-atoms": "^1.1.1",
+ "function-bind": "^1.1.2",
+ "get-proto": "^1.0.1",
+ "gopd": "^1.2.0",
+ "has-symbols": "^1.1.0",
+ "hasown": "^2.0.2",
+ "math-intrinsics": "^1.1.0"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/get-proto": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmmirror.com/get-proto/-/get-proto-1.0.1.tgz",
+ "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==",
+ "license": "MIT",
+ "dependencies": {
+ "dunder-proto": "^1.0.1",
+ "es-object-atoms": "^1.0.0"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
+ "node_modules/gopd": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmmirror.com/gopd/-/gopd-1.2.0.tgz",
+ "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/has-symbols": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmmirror.com/has-symbols/-/has-symbols-1.1.0.tgz",
+ "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/has-tostringtag": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmmirror.com/has-tostringtag/-/has-tostringtag-1.0.2.tgz",
+ "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==",
+ "license": "MIT",
+ "dependencies": {
+ "has-symbols": "^1.0.3"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/hasown": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmmirror.com/hasown/-/hasown-2.0.2.tgz",
+ "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==",
+ "license": "MIT",
+ "dependencies": {
+ "function-bind": "^1.1.2"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
+ "node_modules/html-encoding-sniffer": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmmirror.com/html-encoding-sniffer/-/html-encoding-sniffer-4.0.0.tgz",
+ "integrity": "sha512-Y22oTqIU4uuPgEemfz7NDJz6OeKf12Lsu+QC+s3BVpda64lTiMYCyGwg5ki4vFxkMwQdeZDl2adZoqUgdFuTgQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "whatwg-encoding": "^3.1.1"
+ },
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/html-parse-stringify": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmmirror.com/html-parse-stringify/-/html-parse-stringify-3.0.1.tgz",
+ "integrity": "sha512-KknJ50kTInJ7qIScF3jeaFRpMpE8/lfiTdzf/twXyPBLAGrLRTmkz3AdTnKeh40X8k9L2fdYwEp/42WGXIRGcg==",
+ "license": "MIT",
+ "dependencies": {
+ "void-elements": "3.1.0"
+ }
+ },
+ "node_modules/http-proxy-agent": {
+ "version": "7.0.2",
+ "resolved": "https://registry.npmmirror.com/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz",
+ "integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "agent-base": "^7.1.0",
+ "debug": "^4.3.4"
+ },
+ "engines": {
+ "node": ">= 14"
+ }
+ },
+ "node_modules/https-proxy-agent": {
+ "version": "7.0.6",
+ "resolved": "https://registry.npmmirror.com/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz",
+ "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "agent-base": "^7.1.2",
+ "debug": "4"
+ },
+ "engines": {
+ "node": ">= 14"
+ }
+ },
+ "node_modules/i18next": {
+ "version": "25.8.14",
+ "resolved": "https://registry.npmmirror.com/i18next/-/i18next-25.8.14.tgz",
+ "integrity": "sha512-paMUYkfWJMsWPeE/Hejcw+XLhHrQPehem+4wMo+uELnvIwvCG019L9sAIljwjCmEMtFQQO3YeitJY8Kctei3iA==",
+ "funding": [
+ {
+ "type": "individual",
+ "url": "https://locize.com"
+ },
+ {
+ "type": "individual",
+ "url": "https://locize.com/i18next.html"
+ },
+ {
+ "type": "individual",
+ "url": "https://www.i18next.com/how-to/faq#i18next-is-awesome.-how-can-i-support-the-project"
+ }
+ ],
+ "license": "MIT",
+ "peer": true,
+ "dependencies": {
+ "@babel/runtime": "^7.28.4"
+ },
+ "peerDependencies": {
+ "typescript": "^5"
+ },
+ "peerDependenciesMeta": {
+ "typescript": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/iconv-lite": {
+ "version": "0.6.3",
+ "resolved": "https://registry.npmmirror.com/iconv-lite/-/iconv-lite-0.6.3.tgz",
+ "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "safer-buffer": ">= 2.1.2 < 3.0.0"
+ },
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/indent-string": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmmirror.com/indent-string/-/indent-string-4.0.0.tgz",
+ "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/is-arrayish": {
+ "version": "0.3.4",
+ "resolved": "https://registry.npmmirror.com/is-arrayish/-/is-arrayish-0.3.4.tgz",
+ "integrity": "sha512-m6UrgzFVUYawGBh1dUsWR5M2Clqic9RVXC/9f8ceNlv2IcO9j9J/z8UoCLPqtsPBFNzEpfR3xftohbfqDx8EQA==",
+ "license": "MIT"
+ },
+ "node_modules/is-potential-custom-element-name": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmmirror.com/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz",
+ "integrity": "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/js-tokens": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmmirror.com/js-tokens/-/js-tokens-4.0.0.tgz",
+ "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==",
+ "license": "MIT"
+ },
+ "node_modules/jsdom": {
+ "version": "26.1.0",
+ "resolved": "https://registry.npmmirror.com/jsdom/-/jsdom-26.1.0.tgz",
+ "integrity": "sha512-Cvc9WUhxSMEo4McES3P7oK3QaXldCfNWp7pl2NNeiIFlCoLr3kfq9kb1fxftiwk1FLV7CvpvDfonxtzUDeSOPg==",
+ "dev": true,
+ "license": "MIT",
+ "peer": true,
+ "dependencies": {
+ "cssstyle": "^4.2.1",
+ "data-urls": "^5.0.0",
+ "decimal.js": "^10.5.0",
+ "html-encoding-sniffer": "^4.0.0",
+ "http-proxy-agent": "^7.0.2",
+ "https-proxy-agent": "^7.0.6",
+ "is-potential-custom-element-name": "^1.0.1",
+ "nwsapi": "^2.2.16",
+ "parse5": "^7.2.1",
+ "rrweb-cssom": "^0.8.0",
+ "saxes": "^6.0.0",
+ "symbol-tree": "^3.2.4",
+ "tough-cookie": "^5.1.1",
+ "w3c-xmlserializer": "^5.0.0",
+ "webidl-conversions": "^7.0.0",
+ "whatwg-encoding": "^3.1.1",
+ "whatwg-mimetype": "^4.0.0",
+ "whatwg-url": "^14.1.1",
+ "ws": "^8.18.0",
+ "xml-name-validator": "^5.0.0"
+ },
+ "engines": {
+ "node": ">=18"
+ },
+ "peerDependencies": {
+ "canvas": "^3.0.0"
+ },
+ "peerDependenciesMeta": {
+ "canvas": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/jsesc": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmmirror.com/jsesc/-/jsesc-3.1.0.tgz",
+ "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==",
+ "dev": true,
+ "license": "MIT",
+ "bin": {
+ "jsesc": "bin/jsesc"
+ },
+ "engines": {
+ "node": ">=6"
+ }
+ },
+ "node_modules/json5": {
+ "version": "2.2.3",
+ "resolved": "https://registry.npmmirror.com/json5/-/json5-2.2.3.tgz",
+ "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==",
+ "dev": true,
+ "license": "MIT",
+ "bin": {
+ "json5": "lib/cli.js"
+ },
+ "engines": {
+ "node": ">=6"
+ }
+ },
+ "node_modules/lodash": {
+ "version": "4.17.23",
+ "resolved": "https://registry.npmmirror.com/lodash/-/lodash-4.17.23.tgz",
+ "integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==",
+ "license": "MIT"
+ },
+ "node_modules/loose-envify": {
+ "version": "1.4.0",
+ "resolved": "https://registry.npmmirror.com/loose-envify/-/loose-envify-1.4.0.tgz",
+ "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==",
+ "license": "MIT",
+ "dependencies": {
+ "js-tokens": "^3.0.0 || ^4.0.0"
+ },
+ "bin": {
+ "loose-envify": "cli.js"
+ }
+ },
+ "node_modules/loupe": {
+ "version": "3.2.1",
+ "resolved": "https://registry.npmmirror.com/loupe/-/loupe-3.2.1.tgz",
+ "integrity": "sha512-CdzqowRJCeLU72bHvWqwRBBlLcMEtIvGrlvef74kMnV2AolS9Y8xUv1I0U/MNAWMhBlKIoyuEgoJ0t/bbwHbLQ==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/lru-cache": {
+ "version": "5.1.1",
+ "resolved": "https://registry.npmmirror.com/lru-cache/-/lru-cache-5.1.1.tgz",
+ "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==",
+ "dev": true,
+ "license": "ISC",
+ "dependencies": {
+ "yallist": "^3.0.2"
+ }
+ },
+ "node_modules/lz-string": {
+ "version": "1.5.0",
+ "resolved": "https://registry.npmmirror.com/lz-string/-/lz-string-1.5.0.tgz",
+ "integrity": "sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ==",
+ "dev": true,
+ "license": "MIT",
+ "bin": {
+ "lz-string": "bin/bin.js"
+ }
+ },
+ "node_modules/magic-string": {
+ "version": "0.30.21",
+ "resolved": "https://registry.npmmirror.com/magic-string/-/magic-string-0.30.21.tgz",
+ "integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jridgewell/sourcemap-codec": "^1.5.5"
+ }
+ },
+ "node_modules/math-intrinsics": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmmirror.com/math-intrinsics/-/math-intrinsics-1.1.0.tgz",
+ "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
+ "node_modules/mime-db": {
+ "version": "1.52.0",
+ "resolved": "https://registry.npmmirror.com/mime-db/-/mime-db-1.52.0.tgz",
+ "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/mime-types": {
+ "version": "2.1.35",
+ "resolved": "https://registry.npmmirror.com/mime-types/-/mime-types-2.1.35.tgz",
+ "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
+ "license": "MIT",
+ "dependencies": {
+ "mime-db": "1.52.0"
+ },
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/min-indent": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmmirror.com/min-indent/-/min-indent-1.0.1.tgz",
+ "integrity": "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=4"
+ }
+ },
+ "node_modules/ms": {
+ "version": "2.1.3",
+ "resolved": "https://registry.npmmirror.com/ms/-/ms-2.1.3.tgz",
+ "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/nanoid": {
+ "version": "3.3.11",
+ "resolved": "https://registry.npmmirror.com/nanoid/-/nanoid-3.3.11.tgz",
+ "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/ai"
+ }
+ ],
+ "license": "MIT",
+ "bin": {
+ "nanoid": "bin/nanoid.cjs"
+ },
+ "engines": {
+ "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1"
+ }
+ },
+ "node_modules/node-releases": {
+ "version": "2.0.36",
+ "resolved": "https://registry.npmmirror.com/node-releases/-/node-releases-2.0.36.tgz",
+ "integrity": "sha512-TdC8FSgHz8Mwtw9g5L4gR/Sh9XhSP/0DEkQxfEFXOpiul5IiHgHan2VhYYb6agDSfp4KuvltmGApc8HMgUrIkA==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/number-precision": {
+ "version": "1.6.0",
+ "resolved": "https://registry.npmmirror.com/number-precision/-/number-precision-1.6.0.tgz",
+ "integrity": "sha512-05OLPgbgmnixJw+VvEh18yNPUo3iyp4BEWJcrLu4X9W05KmMifN7Mu5exYvQXqxxeNWhvIF+j3Rij+HmddM/hQ==",
+ "license": "MIT"
+ },
+ "node_modules/nwsapi": {
+ "version": "2.2.23",
+ "resolved": "https://registry.npmmirror.com/nwsapi/-/nwsapi-2.2.23.tgz",
+ "integrity": "sha512-7wfH4sLbt4M0gCDzGE6vzQBo0bfTKjU7Sfpqy/7gs1qBfYz2vEJH6vXcBKpO3+6Yu1telwd0t9HpyOoLEQQbIQ==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/object-assign": {
+ "version": "4.1.1",
+ "resolved": "https://registry.npmmirror.com/object-assign/-/object-assign-4.1.1.tgz",
+ "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/parse5": {
+ "version": "7.3.0",
+ "resolved": "https://registry.npmmirror.com/parse5/-/parse5-7.3.0.tgz",
+ "integrity": "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "entities": "^6.0.0"
+ },
+ "funding": {
+ "url": "https://github.com/inikulin/parse5?sponsor=1"
+ }
+ },
+ "node_modules/pathe": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmmirror.com/pathe/-/pathe-2.0.3.tgz",
+ "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/pathval": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmmirror.com/pathval/-/pathval-2.0.1.tgz",
+ "integrity": "sha512-//nshmD55c46FuFw26xV/xFAaB5HF9Xdap7HJBBnrKdAd6/GxDBaNA1870O79+9ueg61cZLSVc+OaFlfmObYVQ==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">= 14.16"
+ }
+ },
+ "node_modules/picocolors": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmmirror.com/picocolors/-/picocolors-1.1.1.tgz",
+ "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==",
+ "dev": true,
+ "license": "ISC"
+ },
+ "node_modules/picomatch": {
+ "version": "4.0.3",
+ "resolved": "https://registry.npmmirror.com/picomatch/-/picomatch-4.0.3.tgz",
+ "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
+ "dev": true,
+ "license": "MIT",
+ "peer": true,
+ "engines": {
+ "node": ">=12"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/jonschlinkert"
+ }
+ },
+ "node_modules/postcss": {
+ "version": "8.5.8",
+ "resolved": "https://registry.npmmirror.com/postcss/-/postcss-8.5.8.tgz",
+ "integrity": "sha512-OW/rX8O/jXnm82Ey1k44pObPtdblfiuWnrd8X7GJ7emImCOstunGbXUpp7HdBrFQX6rJzn3sPT397Wp5aCwCHg==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/postcss/"
+ },
+ {
+ "type": "tidelift",
+ "url": "https://tidelift.com/funding/github/npm/postcss"
+ },
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/ai"
+ }
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "nanoid": "^3.3.11",
+ "picocolors": "^1.1.1",
+ "source-map-js": "^1.2.1"
+ },
+ "engines": {
+ "node": "^10 || ^12 || >=14"
+ }
+ },
+ "node_modules/pretty-format": {
+ "version": "27.5.1",
+ "resolved": "https://registry.npmmirror.com/pretty-format/-/pretty-format-27.5.1.tgz",
+ "integrity": "sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "ansi-regex": "^5.0.1",
+ "ansi-styles": "^5.0.0",
+ "react-is": "^17.0.1"
+ },
+ "engines": {
+ "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0"
+ }
+ },
+ "node_modules/pretty-format/node_modules/react-is": {
+ "version": "17.0.2",
+ "resolved": "https://registry.npmmirror.com/react-is/-/react-is-17.0.2.tgz",
+ "integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/prop-types": {
+ "version": "15.8.1",
+ "resolved": "https://registry.npmmirror.com/prop-types/-/prop-types-15.8.1.tgz",
+ "integrity": "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==",
+ "license": "MIT",
+ "dependencies": {
+ "loose-envify": "^1.4.0",
+ "object-assign": "^4.1.1",
+ "react-is": "^16.13.1"
+ }
+ },
+ "node_modules/prop-types/node_modules/react-is": {
+ "version": "16.13.1",
+ "resolved": "https://registry.npmmirror.com/react-is/-/react-is-16.13.1.tgz",
+ "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==",
+ "license": "MIT"
+ },
+ "node_modules/proxy-from-env": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmmirror.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
+ "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==",
+ "license": "MIT"
+ },
+ "node_modules/punycode": {
+ "version": "2.3.1",
+ "resolved": "https://registry.npmmirror.com/punycode/-/punycode-2.3.1.tgz",
+ "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6"
+ }
+ },
+ "node_modules/react": {
+ "version": "18.3.1",
+ "resolved": "https://registry.npmmirror.com/react/-/react-18.3.1.tgz",
+ "integrity": "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==",
+ "license": "MIT",
+ "peer": true,
+ "dependencies": {
+ "loose-envify": "^1.1.0"
+ },
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/react-clientside-effect": {
+ "version": "1.2.8",
+ "resolved": "https://registry.npmmirror.com/react-clientside-effect/-/react-clientside-effect-1.2.8.tgz",
+ "integrity": "sha512-ma2FePH0z3px2+WOu6h+YycZcEvFmmxIlAb62cF52bG86eMySciO/EQZeQMXd07kPCYB0a1dWDT5J+KE9mCDUw==",
+ "license": "MIT",
+ "dependencies": {
+ "@babel/runtime": "^7.12.13"
+ },
+ "peerDependencies": {
+ "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc"
+ }
+ },
+ "node_modules/react-dom": {
+ "version": "18.3.1",
+ "resolved": "https://registry.npmmirror.com/react-dom/-/react-dom-18.3.1.tgz",
+ "integrity": "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==",
+ "license": "MIT",
+ "peer": true,
+ "dependencies": {
+ "loose-envify": "^1.1.0",
+ "scheduler": "^0.23.2"
+ },
+ "peerDependencies": {
+ "react": "^18.3.1"
+ }
+ },
+ "node_modules/react-focus-lock": {
+ "version": "2.13.7",
+ "resolved": "https://registry.npmmirror.com/react-focus-lock/-/react-focus-lock-2.13.7.tgz",
+ "integrity": "sha512-20lpZHEQrXPb+pp1tzd4ULL6DyO5D2KnR0G69tTDdydrmNhU7pdFmbQUYVyHUgp+xN29IuFR0PVuhOmvaZL9Og==",
+ "license": "MIT",
+ "dependencies": {
+ "@babel/runtime": "^7.0.0",
+ "focus-lock": "^1.3.6",
+ "prop-types": "^15.6.2",
+ "react-clientside-effect": "^1.2.7",
+ "use-callback-ref": "^1.3.3",
+ "use-sidecar": "^1.1.3"
+ },
+ "peerDependencies": {
+ "@types/react": "*",
+ "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc"
+ },
+ "peerDependenciesMeta": {
+ "@types/react": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/react-i18next": {
+ "version": "16.5.6",
+ "resolved": "https://registry.npmmirror.com/react-i18next/-/react-i18next-16.5.6.tgz",
+ "integrity": "sha512-Ua7V2/efA88ido7KyK51fb8Ki8M/sRfW8LR/rZ/9ZKr2luhuTI7kwYZN5agT1rWG7aYm5G0RYE/6JR8KJoCMDw==",
+ "license": "MIT",
+ "dependencies": {
+ "@babel/runtime": "^7.28.4",
+ "html-parse-stringify": "^3.0.1",
+ "use-sync-external-store": "^1.6.0"
+ },
+ "peerDependencies": {
+ "i18next": ">= 25.6.2",
+ "react": ">= 16.8.0",
+ "typescript": "^5"
+ },
+ "peerDependenciesMeta": {
+ "react-dom": {
+ "optional": true
+ },
+ "react-native": {
+ "optional": true
+ },
+ "typescript": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/react-is": {
+ "version": "18.3.1",
+ "resolved": "https://registry.npmmirror.com/react-is/-/react-is-18.3.1.tgz",
+ "integrity": "sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==",
+ "license": "MIT"
+ },
+ "node_modules/react-refresh": {
+ "version": "0.17.0",
+ "resolved": "https://registry.npmmirror.com/react-refresh/-/react-refresh-0.17.0.tgz",
+ "integrity": "sha512-z6F7K9bV85EfseRCp2bzrpyQ0Gkw1uLoCel9XBVWPg/TjRj94SkJzUTGfOa4bs7iJvBWtQG0Wq7wnI0syw3EBQ==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/react-router": {
+ "version": "6.30.3",
+ "resolved": "https://registry.npmmirror.com/react-router/-/react-router-6.30.3.tgz",
+ "integrity": "sha512-XRnlbKMTmktBkjCLE8/XcZFlnHvr2Ltdr1eJX4idL55/9BbORzyZEaIkBFDhFGCEWBBItsVrDxwx3gnisMitdw==",
+ "license": "MIT",
+ "dependencies": {
+ "@remix-run/router": "1.23.2"
+ },
+ "engines": {
+ "node": ">=14.0.0"
+ },
+ "peerDependencies": {
+ "react": ">=16.8"
+ }
+ },
+ "node_modules/react-router-dom": {
+ "version": "6.30.3",
+ "resolved": "https://registry.npmmirror.com/react-router-dom/-/react-router-dom-6.30.3.tgz",
+ "integrity": "sha512-pxPcv1AczD4vso7G4Z3TKcvlxK7g7TNt3/FNGMhfqyntocvYKj+GCatfigGDjbLozC4baguJ0ReCigoDJXb0ag==",
+ "license": "MIT",
+ "dependencies": {
+ "@remix-run/router": "1.23.2",
+ "react-router": "6.30.3"
+ },
+ "engines": {
+ "node": ">=14.0.0"
+ },
+ "peerDependencies": {
+ "react": ">=16.8",
+ "react-dom": ">=16.8"
+ }
+ },
+ "node_modules/react-transition-group": {
+ "version": "4.4.5",
+ "resolved": "https://registry.npmmirror.com/react-transition-group/-/react-transition-group-4.4.5.tgz",
+ "integrity": "sha512-pZcd1MCJoiKiBR2NRxeCRg13uCXbydPnmB4EOeRrY7480qNWO8IIgQG6zlDkm6uRMsURXPuKq0GWtiM59a5Q6g==",
+ "license": "BSD-3-Clause",
+ "dependencies": {
+ "@babel/runtime": "^7.5.5",
+ "dom-helpers": "^5.0.1",
+ "loose-envify": "^1.4.0",
+ "prop-types": "^15.6.2"
+ },
+ "peerDependencies": {
+ "react": ">=16.6.0",
+ "react-dom": ">=16.6.0"
+ }
+ },
+ "node_modules/redent": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmmirror.com/redent/-/redent-3.0.0.tgz",
+ "integrity": "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "indent-string": "^4.0.0",
+ "strip-indent": "^3.0.0"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/resize-observer-polyfill": {
+ "version": "1.5.1",
+ "resolved": "https://registry.npmmirror.com/resize-observer-polyfill/-/resize-observer-polyfill-1.5.1.tgz",
+ "integrity": "sha512-LwZrotdHOo12nQuZlHEmtuXdqGoOD0OhaxopaNFxWzInpEgaLWoVuAMbTzixuosCx2nEG58ngzW3vxdWoxIgdg==",
+ "license": "MIT"
+ },
+ "node_modules/rollup": {
+ "version": "4.59.0",
+ "resolved": "https://registry.npmmirror.com/rollup/-/rollup-4.59.0.tgz",
+ "integrity": "sha512-2oMpl67a3zCH9H79LeMcbDhXW/UmWG/y2zuqnF2jQq5uq9TbM9TVyXvA4+t+ne2IIkBdrLpAaRQAvo7YI/Yyeg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/estree": "1.0.8"
+ },
+ "bin": {
+ "rollup": "dist/bin/rollup"
+ },
+ "engines": {
+ "node": ">=18.0.0",
+ "npm": ">=8.0.0"
+ },
+ "optionalDependencies": {
+ "@rollup/rollup-android-arm-eabi": "4.59.0",
+ "@rollup/rollup-android-arm64": "4.59.0",
+ "@rollup/rollup-darwin-arm64": "4.59.0",
+ "@rollup/rollup-darwin-x64": "4.59.0",
+ "@rollup/rollup-freebsd-arm64": "4.59.0",
+ "@rollup/rollup-freebsd-x64": "4.59.0",
+ "@rollup/rollup-linux-arm-gnueabihf": "4.59.0",
+ "@rollup/rollup-linux-arm-musleabihf": "4.59.0",
+ "@rollup/rollup-linux-arm64-gnu": "4.59.0",
+ "@rollup/rollup-linux-arm64-musl": "4.59.0",
+ "@rollup/rollup-linux-loong64-gnu": "4.59.0",
+ "@rollup/rollup-linux-loong64-musl": "4.59.0",
+ "@rollup/rollup-linux-ppc64-gnu": "4.59.0",
+ "@rollup/rollup-linux-ppc64-musl": "4.59.0",
+ "@rollup/rollup-linux-riscv64-gnu": "4.59.0",
+ "@rollup/rollup-linux-riscv64-musl": "4.59.0",
+ "@rollup/rollup-linux-s390x-gnu": "4.59.0",
+ "@rollup/rollup-linux-x64-gnu": "4.59.0",
+ "@rollup/rollup-linux-x64-musl": "4.59.0",
+ "@rollup/rollup-openbsd-x64": "4.59.0",
+ "@rollup/rollup-openharmony-arm64": "4.59.0",
+ "@rollup/rollup-win32-arm64-msvc": "4.59.0",
+ "@rollup/rollup-win32-ia32-msvc": "4.59.0",
+ "@rollup/rollup-win32-x64-gnu": "4.59.0",
+ "@rollup/rollup-win32-x64-msvc": "4.59.0",
+ "fsevents": "~2.3.2"
+ }
+ },
+ "node_modules/rrweb-cssom": {
+ "version": "0.8.0",
+ "resolved": "https://registry.npmmirror.com/rrweb-cssom/-/rrweb-cssom-0.8.0.tgz",
+ "integrity": "sha512-guoltQEx+9aMf2gDZ0s62EcV8lsXR+0w8915TC3ITdn2YueuNjdAYh/levpU9nFaoChh9RUS5ZdQMrKfVEN9tw==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/safer-buffer": {
+ "version": "2.1.2",
+ "resolved": "https://registry.npmmirror.com/safer-buffer/-/safer-buffer-2.1.2.tgz",
+ "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/saxes": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmmirror.com/saxes/-/saxes-6.0.0.tgz",
+ "integrity": "sha512-xAg7SOnEhrm5zI3puOOKyy1OMcMlIJZYNJY7xLBwSze0UjhPLnWfj2GF2EpT0jmzaJKIWKHLsaSSajf35bcYnA==",
+ "dev": true,
+ "license": "ISC",
+ "dependencies": {
+ "xmlchars": "^2.2.0"
+ },
+ "engines": {
+ "node": ">=v12.22.7"
+ }
+ },
+ "node_modules/scheduler": {
+ "version": "0.23.2",
+ "resolved": "https://registry.npmmirror.com/scheduler/-/scheduler-0.23.2.tgz",
+ "integrity": "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==",
+ "license": "MIT",
+ "dependencies": {
+ "loose-envify": "^1.1.0"
+ }
+ },
+ "node_modules/scroll-into-view-if-needed": {
+ "version": "2.2.31",
+ "resolved": "https://registry.npmmirror.com/scroll-into-view-if-needed/-/scroll-into-view-if-needed-2.2.31.tgz",
+ "integrity": "sha512-dGCXy99wZQivjmjIqihaBQNjryrz5rueJY7eHfTdyWEiR4ttYpsajb14rn9s5d4DY4EcY6+4+U/maARBXJedkA==",
+ "license": "MIT",
+ "dependencies": {
+ "compute-scroll-into-view": "^1.0.20"
+ }
+ },
+ "node_modules/semver": {
+ "version": "6.3.1",
+ "resolved": "https://registry.npmmirror.com/semver/-/semver-6.3.1.tgz",
+ "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
+ "dev": true,
+ "license": "ISC",
+ "bin": {
+ "semver": "bin/semver.js"
+ }
+ },
+ "node_modules/shallowequal": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmmirror.com/shallowequal/-/shallowequal-1.1.0.tgz",
+ "integrity": "sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ==",
+ "license": "MIT"
+ },
+ "node_modules/siginfo": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmmirror.com/siginfo/-/siginfo-2.0.0.tgz",
+ "integrity": "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==",
+ "dev": true,
+ "license": "ISC"
+ },
+ "node_modules/simple-swizzle": {
+ "version": "0.2.4",
+ "resolved": "https://registry.npmmirror.com/simple-swizzle/-/simple-swizzle-0.2.4.tgz",
+ "integrity": "sha512-nAu1WFPQSMNr2Zn9PGSZK9AGn4t/y97lEm+MXTtUDwfP0ksAIX4nO+6ruD9Jwut4C49SB1Ws+fbXsm/yScWOHw==",
+ "license": "MIT",
+ "dependencies": {
+ "is-arrayish": "^0.3.1"
+ }
+ },
+ "node_modules/size-sensor": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmmirror.com/size-sensor/-/size-sensor-1.0.3.tgz",
+ "integrity": "sha512-+k9mJ2/rQMiRmQUcjn+qznch260leIXY8r4FyYKKyRBO/s5UoeMAHGkCJyE1R/4wrIhTJONfyloY55SkE7ve3A==",
+ "license": "ISC"
+ },
+ "node_modules/source-map-js": {
+ "version": "1.2.1",
+ "resolved": "https://registry.npmmirror.com/source-map-js/-/source-map-js-1.2.1.tgz",
+ "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==",
+ "dev": true,
+ "license": "BSD-3-Clause",
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/stackback": {
+ "version": "0.0.2",
+ "resolved": "https://registry.npmmirror.com/stackback/-/stackback-0.0.2.tgz",
+ "integrity": "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/std-env": {
+ "version": "3.10.0",
+ "resolved": "https://registry.npmmirror.com/std-env/-/std-env-3.10.0.tgz",
+ "integrity": "sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/strip-indent": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmmirror.com/strip-indent/-/strip-indent-3.0.0.tgz",
+ "integrity": "sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "min-indent": "^1.0.0"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/strip-literal": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmmirror.com/strip-literal/-/strip-literal-3.1.0.tgz",
+ "integrity": "sha512-8r3mkIM/2+PpjHoOtiAW8Rg3jJLHaV7xPwG+YRGrv6FP0wwk/toTpATxWYOW0BKdWwl82VT2tFYi5DlROa0Mxg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "js-tokens": "^9.0.1"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/antfu"
+ }
+ },
+ "node_modules/strip-literal/node_modules/js-tokens": {
+ "version": "9.0.1",
+ "resolved": "https://registry.npmmirror.com/js-tokens/-/js-tokens-9.0.1.tgz",
+ "integrity": "sha512-mxa9E9ITFOt0ban3j6L5MpjwegGz6lBQmM1IJkWeBZGcMxto50+eWdjC/52xDbS2vy0k7vIMK0Fe2wfL9OQSpQ==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/symbol-tree": {
+ "version": "3.2.4",
+ "resolved": "https://registry.npmmirror.com/symbol-tree/-/symbol-tree-3.2.4.tgz",
+ "integrity": "sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/tinybench": {
+ "version": "2.9.0",
+ "resolved": "https://registry.npmmirror.com/tinybench/-/tinybench-2.9.0.tgz",
+ "integrity": "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/tinyexec": {
+ "version": "0.3.2",
+ "resolved": "https://registry.npmmirror.com/tinyexec/-/tinyexec-0.3.2.tgz",
+ "integrity": "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/tinyglobby": {
+ "version": "0.2.15",
+ "resolved": "https://registry.npmmirror.com/tinyglobby/-/tinyglobby-0.2.15.tgz",
+ "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "fdir": "^6.5.0",
+ "picomatch": "^4.0.3"
+ },
+ "engines": {
+ "node": ">=12.0.0"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/SuperchupuDev"
+ }
+ },
+ "node_modules/tinypool": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmmirror.com/tinypool/-/tinypool-1.1.1.tgz",
+ "integrity": "sha512-Zba82s87IFq9A9XmjiX5uZA/ARWDrB03OHlq+Vw1fSdt0I+4/Kutwy8BP4Y/y/aORMo61FQ0vIb5j44vSo5Pkg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": "^18.0.0 || >=20.0.0"
+ }
+ },
+ "node_modules/tinyrainbow": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmmirror.com/tinyrainbow/-/tinyrainbow-2.0.0.tgz",
+ "integrity": "sha512-op4nsTR47R6p0vMUUoYl/a+ljLFVtlfaXkLQmqfLR1qHma1h/ysYk4hEXZ880bf2CYgTskvTa/e196Vd5dDQXw==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=14.0.0"
+ }
+ },
+ "node_modules/tinyspy": {
+ "version": "4.0.4",
+ "resolved": "https://registry.npmmirror.com/tinyspy/-/tinyspy-4.0.4.tgz",
+ "integrity": "sha512-azl+t0z7pw/z958Gy9svOTuzqIk6xq+NSheJzn5MMWtWTFywIacg2wUlzKFGtt3cthx0r2SxMK0yzJOR0IES7Q==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=14.0.0"
+ }
+ },
+ "node_modules/tldts": {
+ "version": "6.1.86",
+ "resolved": "https://registry.npmmirror.com/tldts/-/tldts-6.1.86.tgz",
+ "integrity": "sha512-WMi/OQ2axVTf/ykqCQgXiIct+mSQDFdH2fkwhPwgEwvJ1kSzZRiinb0zF2Xb8u4+OqPChmyI6MEu4EezNJz+FQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "tldts-core": "^6.1.86"
+ },
+ "bin": {
+ "tldts": "bin/cli.js"
+ }
+ },
+ "node_modules/tldts-core": {
+ "version": "6.1.86",
+ "resolved": "https://registry.npmmirror.com/tldts-core/-/tldts-core-6.1.86.tgz",
+ "integrity": "sha512-Je6p7pkk+KMzMv2XXKmAE3McmolOQFdxkKw0R8EYNr7sELW46JqnNeTX8ybPiQgvg1ymCoF8LXs5fzFaZvJPTA==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/tough-cookie": {
+ "version": "5.1.2",
+ "resolved": "https://registry.npmmirror.com/tough-cookie/-/tough-cookie-5.1.2.tgz",
+ "integrity": "sha512-FVDYdxtnj0G6Qm/DhNPSb8Ju59ULcup3tuJxkFb5K8Bv2pUXILbf0xZWU8PX8Ov19OXljbUyveOFwRMwkXzO+A==",
+ "dev": true,
+ "license": "BSD-3-Clause",
+ "dependencies": {
+ "tldts": "^6.1.32"
+ },
+ "engines": {
+ "node": ">=16"
+ }
+ },
+ "node_modules/tr46": {
+ "version": "5.1.1",
+ "resolved": "https://registry.npmmirror.com/tr46/-/tr46-5.1.1.tgz",
+ "integrity": "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "punycode": "^2.3.1"
+ },
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/tslib": {
+ "version": "2.8.1",
+ "resolved": "https://registry.npmmirror.com/tslib/-/tslib-2.8.1.tgz",
+ "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==",
+ "license": "0BSD"
+ },
+ "node_modules/typescript": {
+ "version": "5.9.3",
+ "resolved": "https://registry.npmmirror.com/typescript/-/typescript-5.9.3.tgz",
+ "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==",
+ "devOptional": true,
+ "license": "Apache-2.0",
+ "peer": true,
+ "bin": {
+ "tsc": "bin/tsc",
+ "tsserver": "bin/tsserver"
+ },
+ "engines": {
+ "node": ">=14.17"
+ }
+ },
+ "node_modules/undici-types": {
+ "version": "6.21.0",
+ "resolved": "https://registry.npmmirror.com/undici-types/-/undici-types-6.21.0.tgz",
+ "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/update-browserslist-db": {
+ "version": "1.2.3",
+ "resolved": "https://registry.npmmirror.com/update-browserslist-db/-/update-browserslist-db-1.2.3.tgz",
+ "integrity": "sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/browserslist"
+ },
+ {
+ "type": "tidelift",
+ "url": "https://tidelift.com/funding/github/npm/browserslist"
+ },
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/ai"
+ }
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "escalade": "^3.2.0",
+ "picocolors": "^1.1.1"
+ },
+ "bin": {
+ "update-browserslist-db": "cli.js"
+ },
+ "peerDependencies": {
+ "browserslist": ">= 4.21.0"
+ }
+ },
+ "node_modules/use-callback-ref": {
+ "version": "1.3.3",
+ "resolved": "https://registry.npmmirror.com/use-callback-ref/-/use-callback-ref-1.3.3.tgz",
+ "integrity": "sha512-jQL3lRnocaFtu3V00JToYz/4QkNWswxijDaCVNZRiRTO3HQDLsdu1ZtmIUvV4yPp+rvWm5j0y0TG/S61cuijTg==",
+ "license": "MIT",
+ "dependencies": {
+ "tslib": "^2.0.0"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "peerDependencies": {
+ "@types/react": "*",
+ "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc"
+ },
+ "peerDependenciesMeta": {
+ "@types/react": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/use-sidecar": {
+ "version": "1.1.3",
+ "resolved": "https://registry.npmmirror.com/use-sidecar/-/use-sidecar-1.1.3.tgz",
+ "integrity": "sha512-Fedw0aZvkhynoPYlA5WXrMCAMm+nSWdZt6lzJQ7Ok8S6Q+VsHmHpRWndVRJ8Be0ZbkfPc5LRYH+5XrzXcEeLRQ==",
+ "license": "MIT",
+ "dependencies": {
+ "detect-node-es": "^1.1.0",
+ "tslib": "^2.0.0"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "peerDependencies": {
+ "@types/react": "*",
+ "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc"
+ },
+ "peerDependenciesMeta": {
+ "@types/react": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/use-sync-external-store": {
+ "version": "1.6.0",
+ "resolved": "https://registry.npmmirror.com/use-sync-external-store/-/use-sync-external-store-1.6.0.tgz",
+ "integrity": "sha512-Pp6GSwGP/NrPIrxVFAIkOQeyw8lFenOHijQWkUTrDvrF4ALqylP2C/KCkeS9dpUM3KvYRQhna5vt7IL95+ZQ9w==",
+ "license": "MIT",
+ "peer": true,
+ "peerDependencies": {
+ "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0"
+ }
+ },
+ "node_modules/vite": {
+ "version": "6.4.1",
+ "resolved": "https://registry.npmmirror.com/vite/-/vite-6.4.1.tgz",
+ "integrity": "sha512-+Oxm7q9hDoLMyJOYfUYBuHQo+dkAloi33apOPP56pzj+vsdJDzr+j1NISE5pyaAuKL4A3UD34qd0lx5+kfKp2g==",
+ "dev": true,
+ "license": "MIT",
+ "peer": true,
+ "dependencies": {
+ "esbuild": "^0.25.0",
+ "fdir": "^6.4.4",
+ "picomatch": "^4.0.2",
+ "postcss": "^8.5.3",
+ "rollup": "^4.34.9",
+ "tinyglobby": "^0.2.13"
+ },
+ "bin": {
+ "vite": "bin/vite.js"
+ },
+ "engines": {
+ "node": "^18.0.0 || ^20.0.0 || >=22.0.0"
+ },
+ "funding": {
+ "url": "https://github.com/vitejs/vite?sponsor=1"
+ },
+ "optionalDependencies": {
+ "fsevents": "~2.3.3"
+ },
+ "peerDependencies": {
+ "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0",
+ "jiti": ">=1.21.0",
+ "less": "*",
+ "lightningcss": "^1.21.0",
+ "sass": "*",
+ "sass-embedded": "*",
+ "stylus": "*",
+ "sugarss": "*",
+ "terser": "^5.16.0",
+ "tsx": "^4.8.1",
+ "yaml": "^2.4.2"
+ },
+ "peerDependenciesMeta": {
+ "@types/node": {
+ "optional": true
+ },
+ "jiti": {
+ "optional": true
+ },
+ "less": {
+ "optional": true
+ },
+ "lightningcss": {
+ "optional": true
+ },
+ "sass": {
+ "optional": true
+ },
+ "sass-embedded": {
+ "optional": true
+ },
+ "stylus": {
+ "optional": true
+ },
+ "sugarss": {
+ "optional": true
+ },
+ "terser": {
+ "optional": true
+ },
+ "tsx": {
+ "optional": true
+ },
+ "yaml": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/vite-node": {
+ "version": "3.2.4",
+ "resolved": "https://registry.npmmirror.com/vite-node/-/vite-node-3.2.4.tgz",
+ "integrity": "sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "cac": "^6.7.14",
+ "debug": "^4.4.1",
+ "es-module-lexer": "^1.7.0",
+ "pathe": "^2.0.3",
+ "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0"
+ },
+ "bin": {
+ "vite-node": "vite-node.mjs"
+ },
+ "engines": {
+ "node": "^18.0.0 || ^20.0.0 || >=22.0.0"
+ },
+ "funding": {
+ "url": "https://opencollective.com/vitest"
+ }
+ },
+ "node_modules/vitest": {
+ "version": "3.2.4",
+ "resolved": "https://registry.npmmirror.com/vitest/-/vitest-3.2.4.tgz",
+ "integrity": "sha512-LUCP5ev3GURDysTWiP47wRRUpLKMOfPh+yKTx3kVIEiu5KOMeqzpnYNsKyOoVrULivR8tLcks4+lga33Whn90A==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/chai": "^5.2.2",
+ "@vitest/expect": "3.2.4",
+ "@vitest/mocker": "3.2.4",
+ "@vitest/pretty-format": "^3.2.4",
+ "@vitest/runner": "3.2.4",
+ "@vitest/snapshot": "3.2.4",
+ "@vitest/spy": "3.2.4",
+ "@vitest/utils": "3.2.4",
+ "chai": "^5.2.0",
+ "debug": "^4.4.1",
+ "expect-type": "^1.2.1",
+ "magic-string": "^0.30.17",
+ "pathe": "^2.0.3",
+ "picomatch": "^4.0.2",
+ "std-env": "^3.9.0",
+ "tinybench": "^2.9.0",
+ "tinyexec": "^0.3.2",
+ "tinyglobby": "^0.2.14",
+ "tinypool": "^1.1.1",
+ "tinyrainbow": "^2.0.0",
+ "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0",
+ "vite-node": "3.2.4",
+ "why-is-node-running": "^2.3.0"
+ },
+ "bin": {
+ "vitest": "vitest.mjs"
+ },
+ "engines": {
+ "node": "^18.0.0 || ^20.0.0 || >=22.0.0"
+ },
+ "funding": {
+ "url": "https://opencollective.com/vitest"
+ },
+ "peerDependencies": {
+ "@edge-runtime/vm": "*",
+ "@types/debug": "^4.1.12",
+ "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0",
+ "@vitest/browser": "3.2.4",
+ "@vitest/ui": "3.2.4",
+ "happy-dom": "*",
+ "jsdom": "*"
+ },
+ "peerDependenciesMeta": {
+ "@edge-runtime/vm": {
+ "optional": true
+ },
+ "@types/debug": {
+ "optional": true
+ },
+ "@types/node": {
+ "optional": true
+ },
+ "@vitest/browser": {
+ "optional": true
+ },
+ "@vitest/ui": {
+ "optional": true
+ },
+ "happy-dom": {
+ "optional": true
+ },
+ "jsdom": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/void-elements": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmmirror.com/void-elements/-/void-elements-3.1.0.tgz",
+ "integrity": "sha512-Dhxzh5HZuiHQhbvTW9AMetFfBHDMYpo23Uo9btPXgdYP+3T5S+p+jgNy7spra+veYhBP2dCSgxR/i2Y02h5/6w==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/w3c-xmlserializer": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmmirror.com/w3c-xmlserializer/-/w3c-xmlserializer-5.0.0.tgz",
+ "integrity": "sha512-o8qghlI8NZHU1lLPrpi2+Uq7abh4GGPpYANlalzWxyWteJOCsr/P+oPBA49TOLu5FTZO4d3F9MnWJfiMo4BkmA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "xml-name-validator": "^5.0.0"
+ },
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/webidl-conversions": {
+ "version": "7.0.0",
+ "resolved": "https://registry.npmmirror.com/webidl-conversions/-/webidl-conversions-7.0.0.tgz",
+ "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==",
+ "dev": true,
+ "license": "BSD-2-Clause",
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/whatwg-encoding": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmmirror.com/whatwg-encoding/-/whatwg-encoding-3.1.1.tgz",
+ "integrity": "sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "iconv-lite": "0.6.3"
+ },
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/whatwg-mimetype": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmmirror.com/whatwg-mimetype/-/whatwg-mimetype-4.0.0.tgz",
+ "integrity": "sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/whatwg-url": {
+ "version": "14.2.0",
+ "resolved": "https://registry.npmmirror.com/whatwg-url/-/whatwg-url-14.2.0.tgz",
+ "integrity": "sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "tr46": "^5.1.0",
+ "webidl-conversions": "^7.0.0"
+ },
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/why-is-node-running": {
+ "version": "2.3.0",
+ "resolved": "https://registry.npmmirror.com/why-is-node-running/-/why-is-node-running-2.3.0.tgz",
+ "integrity": "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "siginfo": "^2.0.0",
+ "stackback": "0.0.2"
+ },
+ "bin": {
+ "why-is-node-running": "cli.js"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/ws": {
+ "version": "8.19.0",
+ "resolved": "https://registry.npmmirror.com/ws/-/ws-8.19.0.tgz",
+ "integrity": "sha512-blAT2mjOEIi0ZzruJfIhb3nps74PRWTCz1IjglWEEpQl5XS/UNama6u2/rjFkDDouqr4L67ry+1aGIALViWjDg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=10.0.0"
+ },
+ "peerDependencies": {
+ "bufferutil": "^4.0.1",
+ "utf-8-validate": ">=5.0.2"
+ },
+ "peerDependenciesMeta": {
+ "bufferutil": {
+ "optional": true
+ },
+ "utf-8-validate": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/xml-name-validator": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmmirror.com/xml-name-validator/-/xml-name-validator-5.0.0.tgz",
+ "integrity": "sha512-EvGK8EJ3DhaHfbRlETOWAS5pO9MZITeauHKJyb8wyajUfQUenkIg2MvLDTZ4T/TgIcm3HU0TFBgWWboAZ30UHg==",
+ "dev": true,
+ "license": "Apache-2.0",
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/xmlchars": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmmirror.com/xmlchars/-/xmlchars-2.2.0.tgz",
+ "integrity": "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/yallist": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmmirror.com/yallist/-/yallist-3.1.1.tgz",
+ "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==",
+ "dev": true,
+ "license": "ISC"
+ },
+ "node_modules/zrender": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmmirror.com/zrender/-/zrender-6.0.0.tgz",
+ "integrity": "sha512-41dFXEEXuJpNecuUQq6JlbybmnHaqqpGlbH1yxnA5V9MMP4SbohSVZsJIwz+zdjQXSSlR1Vc34EgH1zxyTDvhg==",
+ "license": "BSD-3-Clause",
+ "dependencies": {
+ "tslib": "2.3.0"
+ }
+ },
+ "node_modules/zrender/node_modules/tslib": {
+ "version": "2.3.0",
+ "resolved": "https://registry.npmmirror.com/tslib/-/tslib-2.3.0.tgz",
+ "integrity": "sha512-N82ooyxVNm6h1riLCoyS9e3fuJ3AMG2zIZs2Gd1ATcSFjSA23Q0fzjjZeh0jbJvWVDZ0cJT8yaNNaaXHzueNjg==",
+ "license": "0BSD"
+ },
+ "node_modules/zustand": {
+ "version": "5.0.11",
+ "resolved": "https://registry.npmmirror.com/zustand/-/zustand-5.0.11.tgz",
+ "integrity": "sha512-fdZY+dk7zn/vbWNCYmzZULHRrss0jx5pPFiOuMZ/5HJN6Yv3u+1Wswy/4MpZEkEGhtNH+pwxZB8OKgUBPzYAGg==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=12.20.0"
+ },
+ "peerDependencies": {
+ "@types/react": ">=18.0.0",
+ "immer": ">=9.0.6",
+ "react": ">=18.0.0",
+ "use-sync-external-store": ">=1.2.0"
+ },
+ "peerDependenciesMeta": {
+ "@types/react": {
+ "optional": true
+ },
+ "immer": {
+ "optional": true
+ },
+ "react": {
+ "optional": true
+ },
+ "use-sync-external-store": {
+ "optional": true
+ }
+ }
+ }
+ }
+}
diff --git a/web/package.json b/web/package.json
new file mode 100644
index 0000000..4f3a349
--- /dev/null
+++ b/web/package.json
@@ -0,0 +1,37 @@
+{
+ "name": "backupx-web",
+ "private": true,
+ "version": "0.1.0",
+ "type": "module",
+ "scripts": {
+ "dev": "vite",
+ "build": "tsc --noEmit -p tsconfig.json && vite build",
+ "preview": "vite preview",
+ "test": "vitest run"
+ },
+ "dependencies": {
+ "@arco-design/web-react": "^2.66.0",
+ "axios": "^1.8.4",
+ "echarts": "^6.0.0",
+ "echarts-for-react": "^3.0.6",
+ "i18next": "^25.8.14",
+ "react": "^18.3.1",
+ "react-dom": "^18.3.1",
+ "react-i18next": "^16.5.6",
+ "react-router-dom": "^6.30.0",
+ "zustand": "^5.0.3"
+ },
+ "devDependencies": {
+ "@testing-library/jest-dom": "^6.6.3",
+ "@testing-library/react": "^16.2.0",
+ "@testing-library/user-event": "^14.6.1",
+ "@types/node": "^22.13.10",
+ "@types/react": "^18.3.20",
+ "@types/react-dom": "^18.3.6",
+ "@vitejs/plugin-react": "^4.3.4",
+ "jsdom": "^26.0.0",
+ "typescript": "^5.7.3",
+ "vite": "^6.2.1",
+ "vitest": "^3.0.8"
+ }
+}
diff --git a/web/src/RootApp.tsx b/web/src/RootApp.tsx
new file mode 100644
index 0000000..5d35a51
--- /dev/null
+++ b/web/src/RootApp.tsx
@@ -0,0 +1,13 @@
+import { BrowserRouter } from 'react-router-dom'
+import { RouterView } from './router'
+import { AuthBootstrap } from './components/AuthBootstrap'
+
+export function RootApp() {
+ return (
+
+
+
+
+
+ )
+}
diff --git a/web/src/app.tsx b/web/src/app.tsx
new file mode 100644
index 0000000..9351236
--- /dev/null
+++ b/web/src/app.tsx
@@ -0,0 +1,13 @@
+import { BrowserRouter } from 'react-router-dom'
+import { RouterView } from './router'
+import { AuthBootstrap } from './components/AuthBootstrap'
+
+export function App() {
+ return (
+
+
+
+
+
+ )
+}
diff --git a/web/src/components/AuthBootstrap.tsx b/web/src/components/AuthBootstrap.tsx
new file mode 100644
index 0000000..8b4d36b
--- /dev/null
+++ b/web/src/components/AuthBootstrap.tsx
@@ -0,0 +1,27 @@
+import { ReactNode, useEffect, useRef } from 'react'
+import { FullPageLoading } from './FullPageLoading'
+import { useAuthStore } from '../stores/auth'
+
+interface AuthBootstrapProps {
+ children: ReactNode
+}
+
+export function AuthBootstrap({ children }: AuthBootstrapProps) {
+ const bootstrap = useAuthStore((state) => state.bootstrap)
+ const bootstrapped = useAuthStore((state) => state.bootstrapped)
+ const startedRef = useRef(false)
+
+ useEffect(() => {
+ if (startedRef.current) {
+ return
+ }
+ startedRef.current = true
+ void bootstrap()
+ }, [bootstrap])
+
+ if (!bootstrapped) {
+ return
+ }
+
+ return <>{children}>
+}
diff --git a/web/src/components/CronInput/CronInput.tsx b/web/src/components/CronInput/CronInput.tsx
new file mode 100644
index 0000000..fc32fe8
--- /dev/null
+++ b/web/src/components/CronInput/CronInput.tsx
@@ -0,0 +1,197 @@
+import { Input, Space, Switch, Tabs, Typography, Radio, Checkbox, Select } from '@arco-design/web-react'
+import { useEffect, useState } from 'react'
+
+export interface CronInputProps {
+ value?: string
+ onChange?: (value: string) => void
+}
+
+const DEFAULT_CRON = '* * * * *'
+
+type CronPart = 'minute' | 'hour' | 'day' | 'month' | 'week'
+
+interface CronState {
+ minute: string
+ hour: string
+ day: string
+ month: string
+ week: string
+}
+
+function parseCron(expr: string): CronState {
+ const parts = (expr || DEFAULT_CRON).trim().split(/\s+/)
+ return {
+ minute: parts[0] || '*',
+ hour: parts[1] || '*',
+ day: parts[2] || '*',
+ month: parts[3] || '*',
+ week: parts[4] || '*',
+ }
+}
+
+function stringifyCron(state: CronState): string {
+ return `${state.minute} ${state.hour} ${state.day} ${state.month} ${state.week}`
+}
+
+function generateOptions(min: number, max: number) {
+ return Array.from({ length: max - min + 1 }, (_, i) => ({
+ label: String(i + min),
+ value: String(i + min),
+ }))
+}
+
+const MINUTES_OPTIONS = generateOptions(0, 59)
+const HOURS_OPTIONS = generateOptions(0, 23)
+const DAYS_OPTIONS = generateOptions(1, 31)
+const MONTHS_OPTIONS = generateOptions(1, 12)
+const WEEKS_OPTIONS = [
+ { label: '星期日', value: '0' },
+ { label: '星期一', value: '1' },
+ { label: '星期二', value: '2' },
+ { label: '星期三', value: '3' },
+ { label: '星期四', value: '4' },
+ { label: '星期五', value: '5' },
+ { label: '星期六', value: '6' },
+]
+
+export function CronInput({ value, onChange }: CronInputProps) {
+ const [internalValue, setInternalValue] = useState(value || DEFAULT_CRON)
+ const [isAdvanced, setIsAdvanced] = useState(false)
+ const [state, setState] = useState(parseCron(internalValue))
+
+ // Sync prop to internal state
+ useEffect(() => {
+ if (value !== undefined && value !== internalValue) {
+ setInternalValue(value || DEFAULT_CRON)
+ if (!isAdvanced) {
+ setState(parseCron(value || DEFAULT_CRON))
+ }
+ }
+ }, [value, isAdvanced, internalValue])
+
+ const notifyChange = (nextValue: string) => {
+ setInternalValue(nextValue)
+ if (onChange) {
+ onChange(nextValue)
+ }
+ }
+
+ const handleStateChange = (part: CronPart, val: string) => {
+ const nextState = { ...state, [part]: val }
+ setState(nextState)
+ notifyChange(stringifyCron(nextState))
+ }
+
+ const renderPartTab = (
+ part: CronPart,
+ title: string,
+ options: { label: string; value: string }[],
+ allowAnyVal = '*',
+ ) => {
+ const currentVal = state[part]
+ const isAny = currentVal === allowAnyVal || currentVal === '*' || currentVal === '?'
+ const isSpecific = !isAny && !currentVal.includes('/') && !currentVal.includes('-')
+
+ // For simplicity in this visual editor, we only support "every" (*) and "specific values" (1,2,3).
+ const type = isAny ? 'any' : 'specific'
+ const specificValues = isSpecific ? currentVal.split(',') : []
+
+ return (
+
+
{
+ if (val === 'any') {
+ handleStateChange(part, allowAnyVal)
+ } else {
+ handleStateChange(part, options[0].value) // Default to first valid item
+ }
+ }}
+ >
+
+ 通配 ({allowAnyVal}) - 任意{title}
+
+
+ 指定{title}
+
+
+
+ {type === 'specific' && (
+
+ {
+ if (vals.length === 0) {
+ handleStateChange(part, allowAnyVal)
+ } else {
+ // Sort numerically to keep things neat
+ const sorted = [...vals].sort((a, b) => Number(a) - Number(b))
+ handleStateChange(part, sorted.join(','))
+ }
+ }}
+ style={{ width: '100%', maxWidth: 400 }}
+ allowClear
+ />
+
+ )}
+
+ )
+ }
+
+ return (
+
+
+ {
+ setInternalValue(val)
+ if (isAdvanced && onChange) {
+ onChange(val)
+ }
+ }}
+ readOnly={!isAdvanced}
+ style={{ width: 240, fontFamily: 'monospace' }}
+ placeholder="* * * * *"
+ />
+
+ 高级模式 (手动输入)
+ {
+ setIsAdvanced(checked)
+ if (!checked) {
+ // When switching back to visual, parse the current raw value
+ setState(parseCron(internalValue))
+ notifyChange(stringifyCron(parseCron(internalValue)))
+ }
+ }}
+ />
+
+
+
+ {!isAdvanced && (
+
+
+ {renderPartTab('minute', '分钟', MINUTES_OPTIONS, '*')}
+
+
+ {renderPartTab('hour', '小时', HOURS_OPTIONS, '*')}
+
+
+ {renderPartTab('day', '日', DAYS_OPTIONS, '*')}
+
+
+ {renderPartTab('month', '月', MONTHS_OPTIONS, '*')}
+
+
+ {renderPartTab('week', '周', WEEKS_OPTIONS, '*')}
+
+
+ )}
+
+ )
+}
diff --git a/web/src/components/CronInput/index.ts b/web/src/components/CronInput/index.ts
new file mode 100644
index 0000000..ec1412a
--- /dev/null
+++ b/web/src/components/CronInput/index.ts
@@ -0,0 +1,2 @@
+export { CronInput } from './CronInput'
+export type { CronInputProps } from './CronInput'
diff --git a/web/src/components/FullPageLoading.tsx b/web/src/components/FullPageLoading.tsx
new file mode 100644
index 0000000..47beacf
--- /dev/null
+++ b/web/src/components/FullPageLoading.tsx
@@ -0,0 +1,16 @@
+import { Space, Spin, Typography } from '@arco-design/web-react'
+
+interface FullPageLoadingProps {
+ tip: string
+}
+
+export function FullPageLoading({ tip }: FullPageLoadingProps) {
+ return (
+
+
+
+ {tip}
+
+
+ )
+}
diff --git a/web/src/components/auth-guard.test.tsx b/web/src/components/auth-guard.test.tsx
new file mode 100644
index 0000000..cb5724d
--- /dev/null
+++ b/web/src/components/auth-guard.test.tsx
@@ -0,0 +1,58 @@
+import { render, screen } from '@testing-library/react';
+import { MemoryRouter, Route, Routes } from 'react-router-dom';
+
+import { AuthGuard } from './auth-guard';
+import { useAuthStore } from '../stores/auth';
+
+function renderWithRoutes(initialEntry: string) {
+ return render(
+
+
+ login-page} />
+
+ protected-page
+
+ }
+ />
+
+ ,
+ );
+}
+
+describe('AuthGuard', () => {
+ beforeEach(() => {
+ useAuthStore.setState({
+ token: null,
+ user: null,
+ hydrated: true,
+ status: 'anonymous',
+ });
+ });
+
+ it('redirects anonymous users to login page', async () => {
+ renderWithRoutes('/');
+
+ expect(await screen.findByText('login-page')).toBeInTheDocument();
+ });
+
+ it('renders children for authenticated users', async () => {
+ useAuthStore.setState({
+ token: 'token',
+ user: {
+ id: 1,
+ username: 'admin',
+ displayName: '管理员',
+ role: 'admin',
+ },
+ hydrated: true,
+ status: 'authenticated',
+ });
+
+ renderWithRoutes('/');
+
+ expect(await screen.findByText('protected-page')).toBeInTheDocument();
+ });
+});
diff --git a/web/src/components/auth-guard.tsx b/web/src/components/auth-guard.tsx
new file mode 100644
index 0000000..3421405
--- /dev/null
+++ b/web/src/components/auth-guard.tsx
@@ -0,0 +1,25 @@
+import { Spin } from '@arco-design/web-react';
+import type { PropsWithChildren } from 'react';
+import { Navigate, useLocation } from 'react-router-dom';
+
+import { useAuthStore } from '../stores/auth';
+
+export function AuthGuard({ children }: PropsWithChildren) {
+ const hydrated = useAuthStore((state) => state.hydrated);
+ const status = useAuthStore((state) => state.status);
+ const location = useLocation();
+
+ if (!hydrated || status === 'bootstrapping' || status === 'idle') {
+ return (
+
+
+
+ );
+ }
+
+ if (status !== 'authenticated') {
+ return ;
+ }
+
+ return <>{children}>;
+}
diff --git a/web/src/components/backup-records/BackupRecordLogDrawer.tsx b/web/src/components/backup-records/BackupRecordLogDrawer.tsx
new file mode 100644
index 0000000..2f7612a
--- /dev/null
+++ b/web/src/components/backup-records/BackupRecordLogDrawer.tsx
@@ -0,0 +1,232 @@
+import { Alert, Button, Descriptions, Drawer, Space, Spin, Tag, Typography } from '@arco-design/web-react'
+import { useEffect, useMemo, useState } from 'react'
+import { deleteBackupRecord, downloadBackupRecord, getBackupRecord, restoreBackupRecord, streamBackupRecordLogs } from '../../services/backup-records'
+import type { BackupLogEvent, BackupRecordDetail, BackupRecordStatus } from '../../types/backup-records'
+import { resolveErrorMessage } from '../../utils/error'
+import { formatBytes, formatDateTime, formatDuration } from '../../utils/format'
+
+interface BackupRecordLogDrawerProps {
+ visible: boolean
+ recordId?: number
+ onCancel: () => void
+ onChanged?: () => Promise | void
+}
+
+function getStatusColor(status: BackupRecordStatus) {
+ switch (status) {
+ case 'success':
+ return 'green'
+ case 'failed':
+ return 'red'
+ default:
+ return 'arcoblue'
+ }
+}
+
+function buildLogText(record: BackupRecordDetail | null, events: BackupLogEvent[]) {
+ if (events.length > 0) {
+ return events.map((item) => `[${formatDateTime(item.timestamp)}] ${item.message}`).join('\n')
+ }
+ return record?.logContent ?? ''
+}
+
+export function BackupRecordLogDrawer({ visible, recordId, onCancel, onChanged }: BackupRecordLogDrawerProps) {
+ const [record, setRecord] = useState(null)
+ const [events, setEvents] = useState([])
+ const [loading, setLoading] = useState(false)
+ const [acting, setActing] = useState(false)
+ const [error, setError] = useState('')
+ const [streamError, setStreamError] = useState('')
+
+ useEffect(() => {
+ if (!visible || !recordId) {
+ return
+ }
+
+ const currentRecordId = recordId
+ let active = true
+ let unsubscribe: (() => void) | null = null
+
+ async function loadRecordDetail() {
+ setLoading(true)
+ try {
+ const detail = await getBackupRecord(currentRecordId)
+ if (!active) {
+ return
+ }
+ setRecord(detail)
+ setEvents(detail.logEvents ?? [])
+ setError('')
+ setStreamError('')
+
+ if (detail.status === 'running') {
+ unsubscribe = streamBackupRecordLogs(currentRecordId, {
+ onEvent: (event) => {
+ if (!active) {
+ return
+ }
+ setEvents((current) => {
+ if (current.some((item) => item.sequence === event.sequence)) {
+ return current
+ }
+ return [...current, event]
+ })
+ if (event.completed) {
+ setRecord((current) => (current ? { ...current, status: event.status as BackupRecordStatus } : current))
+ }
+ },
+ onDone: () => {
+ if (!active) {
+ return
+ }
+ void (async () => {
+ try {
+ const latest = await getBackupRecord(currentRecordId)
+ if (active) {
+ setRecord(latest)
+ setEvents(latest.logEvents ?? [])
+ }
+ } catch (streamLoadError) {
+ if (active) {
+ setStreamError(resolveErrorMessage(streamLoadError, '刷新日志详情失败'))
+ }
+ }
+ })()
+ },
+ onError: (message) => {
+ if (active) {
+ setStreamError(message)
+ }
+ },
+ })
+ }
+ } catch (loadError) {
+ if (active) {
+ setError(resolveErrorMessage(loadError, '加载备份记录失败'))
+ }
+ } finally {
+ if (active) {
+ setLoading(false)
+ }
+ }
+ }
+
+ void loadRecordDetail()
+
+ return () => {
+ active = false
+ unsubscribe?.()
+ }
+ }, [recordId, visible])
+
+ const logText = useMemo(() => buildLogText(record, events), [events, record])
+
+ async function handleDownload() {
+ if (!recordId) {
+ return
+ }
+ setActing(true)
+ try {
+ const result = await downloadBackupRecord(recordId)
+ const url = window.URL.createObjectURL(result.blob)
+ const link = document.createElement('a')
+ link.href = url
+ link.download = result.fileName
+ link.click()
+ window.URL.revokeObjectURL(url)
+ } catch (downloadError) {
+ setStreamError(resolveErrorMessage(downloadError, '下载备份文件失败'))
+ } finally {
+ setActing(false)
+ }
+ }
+
+ async function handleRestore() {
+ if (!recordId) {
+ return
+ }
+ setActing(true)
+ try {
+ await restoreBackupRecord(recordId)
+ setStreamError('恢复命令已提交')
+ await onChanged?.()
+ } catch (restoreError) {
+ setStreamError(resolveErrorMessage(restoreError, '恢复备份失败'))
+ } finally {
+ setActing(false)
+ }
+ }
+
+ async function handleDelete() {
+ if (!recordId) {
+ return
+ }
+ if (!window.confirm('确定删除该备份记录及远端对象吗?')) {
+ return
+ }
+ setActing(true)
+ try {
+ await deleteBackupRecord(recordId)
+ await onChanged?.()
+ onCancel()
+ } catch (deleteError) {
+ setStreamError(resolveErrorMessage(deleteError, '删除备份记录失败'))
+ } finally {
+ setActing(false)
+ }
+ }
+
+ return (
+
+ {loading ? (
+
+ ) : error ? (
+
+ ) : record ? (
+
+ {streamError ? : null}
+
+
+ {record.taskName}
+
+
+ {record.status && (
+
+ {record.status === 'success' ? '成功' : record.status === 'failed' ? '失败' : record.status === 'running' ? '执行中' : record.status}
+
+ )}
+ {record.storageTargetName && {record.storageTargetName} }
+
+
+
+
+
+ 下载
+
+
+ 恢复
+
+
+ 删除
+
+
+
+
执行日志
+
{logText || '暂无日志输出'}
+
+
+ ) : null}
+
+ )
+}
diff --git a/web/src/components/backup-tasks/BackupTaskDetailDrawer.tsx b/web/src/components/backup-tasks/BackupTaskDetailDrawer.tsx
new file mode 100644
index 0000000..8a4fd32
--- /dev/null
+++ b/web/src/components/backup-tasks/BackupTaskDetailDrawer.tsx
@@ -0,0 +1,63 @@
+import { Descriptions, Drawer, Space, Tag, Typography } from '@arco-design/web-react'
+import type { BackupTaskDetail } from '../../types/backup-tasks'
+import { formatDateTime } from '../../utils/format'
+import { getBackupTaskStatusColor, getBackupTaskStatusLabel, getBackupTaskTypeLabel } from './field-config'
+
+interface BackupTaskDetailDrawerProps {
+ visible: boolean
+ task: BackupTaskDetail | null
+ onCancel: () => void
+}
+
+export function BackupTaskDetailDrawer({ visible, task, onCancel }: BackupTaskDetailDrawerProps) {
+ return (
+
+ {task ? (
+
+
+
+ {task.name}
+
+
+ {getBackupTaskTypeLabel(task.type)}
+ {task.enabled ? '已启用' : '已停用'}
+ {getBackupTaskStatusLabel(task.lastStatus)}
+
+
+
+ {task.type === 'file' ? (
+
+ ) : null}
+ {task.type === 'sqlite' ? : null}
+ {task.type === 'mysql' || task.type === 'postgresql' ? (
+
+ ) : null}
+
+ ) : null}
+
+ )
+}
diff --git a/web/src/components/backup-tasks/BackupTaskFormDrawer.tsx b/web/src/components/backup-tasks/BackupTaskFormDrawer.tsx
new file mode 100644
index 0000000..dbed692
--- /dev/null
+++ b/web/src/components/backup-tasks/BackupTaskFormDrawer.tsx
@@ -0,0 +1,325 @@
+import { Alert, Button, Divider, Drawer, Input, InputNumber, Select, Space, Steps, Switch, Typography } from '@arco-design/web-react'
+import { useEffect, useMemo, useState } from 'react'
+import { CronInput } from '../CronInput'
+import type { StorageTargetSummary } from '../../types/storage-targets'
+import type { BackupTaskDetail, BackupTaskPayload, BackupTaskType } from '../../types/backup-tasks'
+import {
+ backupCompressionOptions,
+ backupTaskTypeOptions,
+ getDefaultPort,
+ isDatabaseBackupTask,
+ isFileBackupTask,
+ isSQLiteBackupTask,
+} from './field-config'
+
+interface BackupTaskFormDrawerProps {
+ visible: boolean
+ loading: boolean
+ initialValue: BackupTaskDetail | null
+ storageTargets: StorageTargetSummary[]
+ onCancel: () => void
+ onSubmit: (value: BackupTaskPayload, taskId?: number) => Promise
+}
+
+function createEmptyDraft(storageTargetId?: number): BackupTaskPayload {
+ return {
+ name: '',
+ type: 'file',
+ enabled: true,
+ cronExpr: '',
+ sourcePath: '',
+ excludePatterns: [],
+ dbHost: '',
+ dbPort: 0,
+ dbUser: '',
+ dbPassword: '',
+ dbName: '',
+ dbPath: '',
+ storageTargetId: storageTargetId ?? 0,
+ nodeId: 0,
+ tags: '',
+ retentionDays: 30,
+ compression: 'gzip',
+ encrypt: false,
+ maxBackups: 10,
+ }
+}
+
+export function BackupTaskFormDrawer({ visible, loading, initialValue, storageTargets, onCancel, onSubmit }: BackupTaskFormDrawerProps) {
+ const [draft, setDraft] = useState(createEmptyDraft())
+ const [excludePatternsText, setExcludePatternsText] = useState('')
+ const [currentStep, setCurrentStep] = useState(0)
+ const [error, setError] = useState('')
+
+ useEffect(() => {
+ if (!visible) {
+ return
+ }
+
+ if (!initialValue) {
+ const nextDraft = createEmptyDraft(storageTargets[0]?.id)
+ setDraft(nextDraft)
+ setExcludePatternsText('')
+ setCurrentStep(0)
+ setError('')
+ return
+ }
+
+ setDraft({
+ name: initialValue.name,
+ type: initialValue.type,
+ enabled: initialValue.enabled,
+ cronExpr: initialValue.cronExpr,
+ sourcePath: initialValue.sourcePath,
+ excludePatterns: initialValue.excludePatterns,
+ dbHost: initialValue.dbHost,
+ dbPort: initialValue.dbPort,
+ dbUser: initialValue.dbUser,
+ dbPassword: '',
+ dbName: initialValue.dbName,
+ dbPath: initialValue.dbPath,
+ storageTargetId: initialValue.storageTargetId,
+ nodeId: (initialValue as any).nodeId ?? 0,
+ tags: (initialValue as any).tags ?? '',
+ retentionDays: initialValue.retentionDays,
+ compression: initialValue.compression,
+ encrypt: initialValue.encrypt,
+ maxBackups: initialValue.maxBackups,
+ })
+ setExcludePatternsText(initialValue.excludePatterns.join('\n'))
+ setCurrentStep(0)
+ setError('')
+ }, [initialValue, storageTargets, visible])
+
+ const storageTargetOptions = useMemo(
+ () => storageTargets.map((item) => ({ label: item.name, value: item.id, disabled: !item.enabled })),
+ [storageTargets],
+ )
+
+ function updateDraft(patch: Partial) {
+ setDraft((current) => ({ ...current, ...patch }))
+ }
+
+ function updateTaskType(value: BackupTaskType) {
+ setDraft((current) => ({
+ ...current,
+ type: value,
+ sourcePath: value === 'file' ? current.sourcePath : '',
+ excludePatterns: value === 'file' ? current.excludePatterns : [],
+ dbHost: value === 'mysql' || value === 'postgresql' ? current.dbHost : '',
+ dbPort: value === 'mysql' || value === 'postgresql' ? current.dbPort || getDefaultPort(value) : 0,
+ dbUser: value === 'mysql' || value === 'postgresql' ? current.dbUser : '',
+ dbPassword: value === 'mysql' || value === 'postgresql' ? current.dbPassword : '',
+ dbName: value === 'mysql' || value === 'postgresql' ? current.dbName : '',
+ dbPath: value === 'sqlite' ? current.dbPath : '',
+ }))
+ if (value !== 'file') {
+ setExcludePatternsText('')
+ }
+ }
+
+ function validate(value: BackupTaskPayload) {
+ if (!value.name.trim()) {
+ return '请输入任务名称'
+ }
+ if (!value.storageTargetId) {
+ return '请选择存储目标'
+ }
+ if (value.cronExpr.trim() && value.cronExpr.trim().split(/\s+/).length < 5) {
+ return 'Cron 表达式至少需要 5 段'
+ }
+ if (value.retentionDays < 0) {
+ return '保留天数不能小于 0'
+ }
+ if (value.maxBackups < 0) {
+ return '最大保留份数不能小于 0'
+ }
+ if (isFileBackupTask(value.type) && !value.sourcePath.trim()) {
+ return '请输入源路径'
+ }
+ if (isSQLiteBackupTask(value.type) && !value.dbPath.trim()) {
+ return '请输入 SQLite 数据库路径'
+ }
+ if (isDatabaseBackupTask(value.type)) {
+ if (!value.dbHost.trim()) {
+ return '请输入数据库主机'
+ }
+ if (!value.dbPort || value.dbPort <= 0) {
+ return '请输入正确的数据库端口'
+ }
+ if (!value.dbUser.trim()) {
+ return '请输入数据库用户名'
+ }
+ if (!initialValue?.maskedFields?.includes('dbPassword') && !value.dbPassword.trim()) {
+ return '请输入数据库密码'
+ }
+ if (!value.dbName.trim()) {
+ return '请输入数据库名称'
+ }
+ }
+ return ''
+ }
+
+ async function handleSubmit() {
+ const nextValue: BackupTaskPayload = {
+ ...draft,
+ excludePatterns: excludePatternsText
+ .split('\n')
+ .map((item) => item.trim())
+ .filter(Boolean),
+ }
+ const validationError = validate(nextValue)
+ if (validationError) {
+ setError(validationError)
+ return
+ }
+ setError('')
+ await onSubmit(nextValue, initialValue?.id)
+ }
+
+ function renderBasicStep() {
+ return (
+
+
+ 任务名称
+ updateDraft({ name: value })} />
+
+
+ 备份类型
+ updateTaskType(value as BackupTaskType)} />
+
+
+ Cron 表达式
+ updateDraft({ cronExpr: value })} />
+
+ 留空表示仅手动执行;已填写时由服务端调度器自动触发。
+
+
+
+ 启用任务
+ updateDraft({ enabled: checked })} />
+
+
+ )
+ }
+
+ function renderSourceStep() {
+ return (
+
+ {isFileBackupTask(draft.type) ? (
+ <>
+
+ 源路径
+ updateDraft({ sourcePath: value })} />
+
+
+ 排除规则
+ setExcludePatternsText(value)}
+ />
+
+ >
+ ) : null}
+
+ {isSQLiteBackupTask(draft.type) ? (
+
+ SQLite 数据库文件
+ updateDraft({ dbPath: value })} />
+
+ ) : null}
+
+ {isDatabaseBackupTask(draft.type) ? (
+ <>
+
+ 数据库主机
+ updateDraft({ dbHost: value })} />
+
+
+ 数据库端口
+ updateDraft({ dbPort: Number(value ?? 0) })} />
+
+
+ 数据库用户名
+ updateDraft({ dbUser: value })} />
+
+
+ 数据库密码
+ updateDraft({ dbPassword: value })} />
+
+
+ 数据库名称
+ updateDraft({ dbName: value })} />
+
+ >
+ ) : null}
+
+ )
+ }
+
+ function renderPolicyStep() {
+ return (
+
+
+ 存储目标
+ updateDraft({ storageTargetId: Number(value) })} />
+
+
+ 压缩策略
+ updateDraft({ compression: value as BackupTaskPayload['compression'] })} />
+
+
+ 保留天数
+ updateDraft({ retentionDays: Number(value ?? 0) })} />
+
+
+ 最大保留份数
+ updateDraft({ maxBackups: Number(value ?? 0) })} />
+
+
+ 备份后加密
+ updateDraft({ encrypt: checked })} />
+
+
+ )
+ }
+
+ return (
+
+
+ {error ? : }
+
+
+
+
+
+
+ {currentStep === 0 ? renderBasicStep() : null}
+ {currentStep === 1 ? renderSourceStep() : null}
+ {currentStep === 2 ? renderPolicyStep() : null}
+
+ setCurrentStep((value) => Math.max(0, value - 1))}>
+ 上一步
+
+ {currentStep < 2 ? (
+ setCurrentStep((value) => Math.min(2, value + 1))}>
+ 下一步
+
+ ) : (
+
+ 保存任务
+
+ )}
+
+
+
+ )
+}
diff --git a/web/src/components/backup-tasks/field-config.test.ts b/web/src/components/backup-tasks/field-config.test.ts
new file mode 100644
index 0000000..8f1799d
--- /dev/null
+++ b/web/src/components/backup-tasks/field-config.test.ts
@@ -0,0 +1,32 @@
+import { describe, expect, it } from 'vitest'
+import {
+ getBackupTaskStatusColor,
+ getBackupTaskStatusLabel,
+ getBackupTaskTypeLabel,
+ getDefaultPort,
+ isDatabaseBackupTask,
+ isFileBackupTask,
+ isSQLiteBackupTask,
+} from './field-config'
+
+describe('backup task field config', () => {
+ it('returns readable task labels', () => {
+ expect(getBackupTaskTypeLabel('file')).toBe('文件目录')
+ expect(getBackupTaskTypeLabel('postgresql')).toBe('PostgreSQL')
+ })
+
+ it('classifies task types correctly', () => {
+ expect(isFileBackupTask('file')).toBe(true)
+ expect(isSQLiteBackupTask('sqlite')).toBe(true)
+ expect(isDatabaseBackupTask('mysql')).toBe(true)
+ expect(isDatabaseBackupTask('postgresql')).toBe(true)
+ expect(isDatabaseBackupTask('file')).toBe(false)
+ })
+
+ it('returns expected status meta and default ports', () => {
+ expect(getBackupTaskStatusLabel('success')).toBe('成功')
+ expect(getBackupTaskStatusColor('failed')).toBe('red')
+ expect(getDefaultPort('mysql')).toBe(3306)
+ expect(getDefaultPort('postgresql')).toBe(5432)
+ })
+})
diff --git a/web/src/components/backup-tasks/field-config.ts b/web/src/components/backup-tasks/field-config.ts
new file mode 100644
index 0000000..a764c13
--- /dev/null
+++ b/web/src/components/backup-tasks/field-config.ts
@@ -0,0 +1,83 @@
+import type { BackupCompression, BackupTaskStatus, BackupTaskType } from '../../types/backup-tasks'
+
+export const backupTaskTypeOptions = [
+ { label: '文件目录', value: 'file' },
+ { label: 'MySQL', value: 'mysql' },
+ { label: 'SQLite', value: 'sqlite' },
+ { label: 'PostgreSQL', value: 'postgresql' },
+] as const
+
+export const backupCompressionOptions = [
+ { label: 'Gzip 压缩', value: 'gzip' },
+ { label: '不压缩', value: 'none' },
+] as const
+
+export function getBackupTaskTypeLabel(type: BackupTaskType) {
+ switch (type) {
+ case 'file':
+ return '文件目录'
+ case 'mysql':
+ return 'MySQL'
+ case 'sqlite':
+ return 'SQLite'
+ case 'postgresql':
+ return 'PostgreSQL'
+ default:
+ return type
+ }
+}
+
+export function getBackupTaskStatusLabel(status: BackupTaskStatus) {
+ switch (status) {
+ case 'idle':
+ return '空闲'
+ case 'running':
+ return '执行中'
+ case 'success':
+ return '成功'
+ case 'failed':
+ return '失败'
+ default:
+ return status
+ }
+}
+
+export function getBackupTaskStatusColor(status: BackupTaskStatus) {
+ switch (status) {
+ case 'success':
+ return 'green'
+ case 'failed':
+ return 'red'
+ case 'running':
+ return 'arcoblue'
+ default:
+ return 'gray'
+ }
+}
+
+export function isFileBackupTask(type: BackupTaskType) {
+ return type === 'file'
+}
+
+export function isSQLiteBackupTask(type: BackupTaskType) {
+ return type === 'sqlite'
+}
+
+export function isDatabaseBackupTask(type: BackupTaskType) {
+ return type === 'mysql' || type === 'postgresql'
+}
+
+export function getDefaultPort(type: BackupTaskType) {
+ switch (type) {
+ case 'mysql':
+ return 3306
+ case 'postgresql':
+ return 5432
+ default:
+ return 0
+ }
+}
+
+export function getCompressionLabel(compression: BackupCompression) {
+ return compression === 'gzip' ? 'Gzip' : '无'
+}
diff --git a/web/src/components/notifications/NotificationFormDrawer.tsx b/web/src/components/notifications/NotificationFormDrawer.tsx
new file mode 100644
index 0000000..9b285b0
--- /dev/null
+++ b/web/src/components/notifications/NotificationFormDrawer.tsx
@@ -0,0 +1,184 @@
+import { Alert, Button, Drawer, Input, InputNumber, Select, Space, Switch, Typography } from '@arco-design/web-react'
+import { useEffect, useMemo, useState } from 'react'
+import type { NotificationDetail, NotificationPayload, NotificationType } from '../../types/notifications'
+import { getNotificationFieldConfigs, getNotificationTypeLabel, notificationTypeOptions } from './field-config'
+
+interface NotificationFormDrawerProps {
+ visible: boolean
+ loading: boolean
+ testing: boolean
+ initialValue: NotificationDetail | null
+ onCancel: () => void
+ onSubmit: (value: NotificationPayload, notificationId?: number) => Promise
+ onTest: (value: NotificationPayload, notificationId?: number) => Promise
+}
+
+function createEmptyDraft(): NotificationPayload {
+ return {
+ name: '',
+ type: 'webhook',
+ enabled: true,
+ onSuccess: false,
+ onFailure: true,
+ config: {},
+ }
+}
+
+export function NotificationFormDrawer({ visible, loading, testing, initialValue, onCancel, onSubmit, onTest }: NotificationFormDrawerProps) {
+ const [draft, setDraft] = useState(createEmptyDraft())
+ const [error, setError] = useState('')
+
+ useEffect(() => {
+ if (!visible) {
+ return
+ }
+ if (!initialValue) {
+ setDraft(createEmptyDraft())
+ setError('')
+ return
+ }
+ setDraft({
+ name: initialValue.name,
+ type: initialValue.type,
+ enabled: initialValue.enabled,
+ onSuccess: initialValue.onSuccess,
+ onFailure: initialValue.onFailure,
+ config: { ...initialValue.config },
+ })
+ setError('')
+ }, [initialValue, visible])
+
+ const fieldConfigs = useMemo(() => getNotificationFieldConfigs(draft.type), [draft.type])
+
+ function updateDraft(patch: Partial) {
+ setDraft((current) => ({ ...current, ...patch }))
+ }
+
+ function updateConfig(key: string, value: string | number) {
+ setDraft((current) => ({
+ ...current,
+ config: {
+ ...current.config,
+ [key]: value,
+ },
+ }))
+ }
+
+ function validate(value: NotificationPayload) {
+ if (!value.name.trim()) {
+ return '请输入通知名称'
+ }
+ for (const field of fieldConfigs) {
+ if (!field.required) {
+ continue
+ }
+ const currentValue = value.config[field.key]
+ if (typeof currentValue === 'number' && currentValue > 0) {
+ continue
+ }
+ if (typeof currentValue === 'string' && currentValue.trim()) {
+ continue
+ }
+ if (initialValue?.maskedFields?.includes(field.key) && (currentValue === '' || currentValue === undefined)) {
+ continue
+ }
+ return `请填写${field.label}`
+ }
+ return ''
+ }
+
+ async function handleSubmit() {
+ const validationError = validate(draft)
+ if (validationError) {
+ setError(validationError)
+ return
+ }
+ setError('')
+ await onSubmit(draft, initialValue?.id)
+ }
+
+ async function handleTest() {
+ const validationError = validate(draft)
+ if (validationError) {
+ setError(validationError)
+ return
+ }
+ setError('')
+ await onTest(draft, initialValue?.id)
+ }
+
+ return (
+
+
+ {error ? : null}
+
+ 名称
+ updateDraft({ name: value })} />
+
+
+ 类型
+ updateDraft({ type: value as NotificationType, config: {} })} />
+
+
+ 启用
+ updateDraft({ enabled: checked })} />
+
+
+ 成功时通知
+ updateDraft({ onSuccess: checked })} />
+
+
+ 失败时通知
+ updateDraft({ onFailure: checked })} />
+
+
+
+ {getNotificationTypeLabel(draft.type)} 配置
+
+
+ {fieldConfigs.map((field) => {
+ const currentValue = draft.config[field.key]
+ const normalizedValue = typeof currentValue === 'number' || typeof currentValue === 'string' ? currentValue : field.type === 'number' ? 0 : ''
+
+ return (
+
+
+ {field.label}
+ {field.required ? ' *' : ''}
+
+ {field.type === 'password' ? (
+ updateConfig(field.key, value)} />
+ ) : field.type === 'number' ? (
+ updateConfig(field.key, Number(value ?? 0))} />
+ ) : field.type === 'textarea' ? (
+ updateConfig(field.key, value)} />
+ ) : (
+ updateConfig(field.key, value)} />
+ )}
+ {field.description ? (
+
+ {field.description}
+
+ ) : null}
+ {initialValue?.maskedFields?.includes(field.key) && !draft.config[field.key] ? (
+
+ 已存在敏感配置,留空则保持不变。
+
+ ) : null}
+
+ )
+ })}
+
+
+
+
+ 发送测试通知
+
+
+ 保存配置
+
+
+
+
+ )
+}
diff --git a/web/src/components/notifications/field-config.test.ts b/web/src/components/notifications/field-config.test.ts
new file mode 100644
index 0000000..78782de
--- /dev/null
+++ b/web/src/components/notifications/field-config.test.ts
@@ -0,0 +1,19 @@
+import { describe, expect, it } from 'vitest'
+import { getNotificationFieldConfigs, getNotificationTypeLabel } from './field-config'
+
+describe('notification field config', () => {
+ it('returns readable type labels', () => {
+ expect(getNotificationTypeLabel('email')).toBe('Email')
+ expect(getNotificationTypeLabel('telegram')).toBe('Telegram')
+ })
+
+ it('returns required fields for each notification type', () => {
+ const emailFields = getNotificationFieldConfigs('email')
+ const webhookFields = getNotificationFieldConfigs('webhook')
+ const telegramFields = getNotificationFieldConfigs('telegram')
+
+ expect(emailFields.some((field) => field.key === 'host' && field.required)).toBe(true)
+ expect(webhookFields.some((field) => field.key === 'url' && field.required)).toBe(true)
+ expect(telegramFields.some((field) => field.key === 'botToken' && field.required)).toBe(true)
+ })
+})
diff --git a/web/src/components/notifications/field-config.ts b/web/src/components/notifications/field-config.ts
new file mode 100644
index 0000000..0b59f12
--- /dev/null
+++ b/web/src/components/notifications/field-config.ts
@@ -0,0 +1,43 @@
+import type { NotificationFieldConfig, NotificationType } from '../../types/notifications'
+
+const FIELD_CONFIG_MAP: Record = {
+ email: [
+ { key: 'host', label: 'SMTP Host', type: 'input', required: true, placeholder: 'smtp.example.com' },
+ { key: 'port', label: 'SMTP Port', type: 'number', required: true, placeholder: '587' },
+ { key: 'username', label: '用户名', type: 'input', placeholder: '可选' },
+ { key: 'password', label: '密码', type: 'password', placeholder: '留空表示保持原密码', sensitive: true },
+ { key: 'from', label: '发件人', type: 'input', required: true, placeholder: 'backupx@example.com' },
+ { key: 'to', label: '收件人', type: 'input', required: true, placeholder: 'ops@example.com,dev@example.com' },
+ ],
+ webhook: [
+ { key: 'url', label: 'Webhook URL', type: 'input', required: true, placeholder: 'https://hooks.example.com/backupx' },
+ { key: 'secret', label: '共享密钥', type: 'password', placeholder: '可选', sensitive: true },
+ ],
+ telegram: [
+ { key: 'botToken', label: 'Bot Token', type: 'password', required: true, placeholder: '123456:ABC', sensitive: true },
+ { key: 'chatId', label: 'Chat ID', type: 'input', required: true, placeholder: '-100xxxxxxxxxx' },
+ ],
+}
+
+export const notificationTypeOptions = [
+ { label: 'Email', value: 'email' },
+ { label: 'Webhook', value: 'webhook' },
+ { label: 'Telegram', value: 'telegram' },
+] as const
+
+export function getNotificationTypeLabel(type: NotificationType) {
+ switch (type) {
+ case 'email':
+ return 'Email'
+ case 'webhook':
+ return 'Webhook'
+ case 'telegram':
+ return 'Telegram'
+ default:
+ return type
+ }
+}
+
+export function getNotificationFieldConfigs(type: NotificationType) {
+ return FIELD_CONFIG_MAP[type]
+}
diff --git a/web/src/components/page-card.tsx b/web/src/components/page-card.tsx
new file mode 100644
index 0000000..6ba20ff
--- /dev/null
+++ b/web/src/components/page-card.tsx
@@ -0,0 +1,14 @@
+import { Card } from '@arco-design/web-react';
+import type { PropsWithChildren, ReactNode } from 'react';
+
+interface PageCardProps extends PropsWithChildren {
+ title: ReactNode;
+}
+
+export function PageCard({ title, children }: PageCardProps) {
+ return (
+
+ {children}
+
+ );
+}
diff --git a/web/src/components/storage-targets/StorageTargetFormDrawer.tsx b/web/src/components/storage-targets/StorageTargetFormDrawer.tsx
new file mode 100644
index 0000000..7d8a4df
--- /dev/null
+++ b/web/src/components/storage-targets/StorageTargetFormDrawer.tsx
@@ -0,0 +1,235 @@
+import { Alert, Button, Divider, Drawer, Input, Select, Space, Switch, Typography } from '@arco-design/web-react'
+import { useEffect, useMemo, useState } from 'react'
+import { getStorageTargetFieldConfigs, getStorageTargetTypeLabel, storageTargetTypeOptions } from './field-config'
+import type { StorageConnectionTestResult, StorageTargetDetail, StorageTargetPayload, StorageTargetType } from '../../types/storage-targets'
+
+interface StorageTargetFormDrawerProps {
+ visible: boolean
+ loading: boolean
+ testing: boolean
+ initialValue: StorageTargetDetail | null
+ onCancel: () => void
+ onSubmit: (value: StorageTargetPayload, targetId?: number) => Promise
+ onTest: (value: StorageTargetPayload, targetId?: number) => Promise
+ onGoogleDriveAuth: (value: StorageTargetPayload, targetId?: number) => Promise
+}
+
+function createEmptyDraft(type: StorageTargetType = 'local_disk'): StorageTargetPayload {
+ return {
+ name: '',
+ type,
+ description: '',
+ enabled: true,
+ config: {},
+ }
+}
+
+export function StorageTargetFormDrawer({
+ visible,
+ loading,
+ testing,
+ initialValue,
+ onCancel,
+ onSubmit,
+ onTest,
+ onGoogleDriveAuth,
+}: StorageTargetFormDrawerProps) {
+ const [draft, setDraft] = useState(createEmptyDraft())
+ const [error, setError] = useState('')
+ const [testResult, setTestResult] = useState(null)
+
+ useEffect(() => {
+ if (!visible) {
+ return
+ }
+ if (!initialValue) {
+ setDraft(createEmptyDraft())
+ setError('')
+ setTestResult(null)
+ return
+ }
+ setDraft({
+ name: initialValue.name,
+ type: initialValue.type,
+ description: initialValue.description,
+ enabled: initialValue.enabled,
+ config: { ...initialValue.config },
+ })
+ setError('')
+ setTestResult(null)
+ }, [initialValue, visible])
+
+ const fieldConfigs = useMemo(() => getStorageTargetFieldConfigs(draft.type), [draft.type])
+
+ function updateConfig(key: string, value: string | boolean) {
+ setDraft((current) => ({
+ ...current,
+ config: {
+ ...current.config,
+ [key]: value,
+ },
+ }))
+ }
+
+ function validate(value: StorageTargetPayload) {
+ if (!value.name.trim()) {
+ return '请输入存储目标名称'
+ }
+ for (const field of fieldConfigs) {
+ if (!field.required) {
+ continue
+ }
+ const currentValue = value.config[field.key]
+ if (field.type === 'switch') {
+ continue
+ }
+ if (typeof currentValue !== 'string' || !currentValue.trim()) {
+ return `请填写${field.label}`
+ }
+ }
+ return ''
+ }
+
+ async function handleSubmit() {
+ const validationError = validate(draft)
+ if (validationError) {
+ setError(validationError)
+ return
+ }
+ setError('')
+ await onSubmit(draft, initialValue?.id)
+ }
+
+ async function handleTest() {
+ const validationError = validate(draft)
+ if (validationError) {
+ setError(validationError)
+ return
+ }
+ setError('')
+ const result = await onTest(draft, initialValue?.id)
+ setTestResult(result)
+ }
+
+ async function handleGoogleDriveAuth() {
+ const validationError = validate(draft)
+ if (validationError) {
+ setError(validationError)
+ return
+ }
+ setError('')
+ await onGoogleDriveAuth(draft, initialValue?.id)
+ }
+
+ return (
+
+
+ {error ? : }
+ {testResult ? : null}
+
+
+ 名称
+ setDraft((current) => ({ ...current, name: value }))} />
+
+
+
+ 类型
+ {
+ const nextType = value as StorageTargetType
+ setDraft((current) => ({
+ ...current,
+ type: nextType,
+ config: {},
+ }))
+ setTestResult(null)
+ }}
+ />
+
+
+
+ 描述
+ setDraft((current) => ({ ...current, description: value }))}
+ />
+
+
+
+ 启用
+ setDraft((current) => ({ ...current, enabled: checked }))} />
+
+
+ 环境配置
+
+
+
+ {getStorageTargetTypeLabel(draft.type)}
+
+
+ {fieldConfigs.map((field) => {
+ const value = draft.config[field.key]
+ const normalizedValue = typeof value === 'boolean' ? value : typeof value === 'string' ? value : field.type === 'switch' ? false : ''
+
+ return (
+
+
+ {field.label}
+ {field.required ? ' *' : ''}
+
+ {field.type === 'switch' ? (
+
+ updateConfig(field.key, checked)} />
+ {field.description ? {field.description} : null}
+
+ ) : field.type === 'password' ? (
+ updateConfig(field.key, nextValue)}
+ />
+ ) : (
+ updateConfig(field.key, nextValue)} />
+ )}
+ {field.description && field.type !== 'switch' ? (
+
+ {field.description}
+
+ ) : null}
+ {initialValue?.maskedFields?.includes(field.key) && !draft.config[field.key] ? (
+
+ 已存在敏感配置,留空则保持不变。
+
+ ) : null}
+
+ )
+ })}
+
+
+
+
+
+ 测试连接
+
+ {draft.type === 'google_drive' ? (
+
+ {initialValue ? '重新授权 Google Drive' : '发起 Google Drive 授权'}
+
+ ) : null}
+
+ 保存
+
+
+
+
+ )
+}
diff --git a/web/src/components/storage-targets/field-config.test.ts b/web/src/components/storage-targets/field-config.test.ts
new file mode 100644
index 0000000..515c90c
--- /dev/null
+++ b/web/src/components/storage-targets/field-config.test.ts
@@ -0,0 +1,15 @@
+import { describe, expect, it } from 'vitest'
+import { getStorageTargetFieldConfigs, getStorageTargetTypeLabel } from './field-config'
+
+describe('storage target field config', () => {
+ it('returns local disk field config', () => {
+ const fields = getStorageTargetFieldConfigs('local_disk')
+ expect(fields).toHaveLength(1)
+ expect(fields[0]?.key).toBe('basePath')
+ })
+
+ it('returns readable type labels', () => {
+ expect(getStorageTargetTypeLabel('google_drive')).toBe('Google Drive')
+ expect(getStorageTargetTypeLabel('webdav')).toBe('WebDAV')
+ })
+})
diff --git a/web/src/components/storage-targets/field-config.ts b/web/src/components/storage-targets/field-config.ts
new file mode 100644
index 0000000..5789d68
--- /dev/null
+++ b/web/src/components/storage-targets/field-config.ts
@@ -0,0 +1,254 @@
+import type { StorageTargetFieldConfig, StorageTargetType } from '../../types/storage-targets'
+
+const FIELD_CONFIG_MAP: Record = {
+ local_disk: [
+ {
+ key: 'basePath',
+ label: '基础目录',
+ type: 'input',
+ required: true,
+ placeholder: '/data/backups',
+ description: 'BackupX 将在该目录下创建和管理备份文件。',
+ },
+ ],
+ s3: [
+ {
+ key: 'endpoint',
+ label: 'Endpoint',
+ type: 'input',
+ required: true,
+ placeholder: 'https://s3.amazonaws.com',
+ },
+ {
+ key: 'region',
+ label: '区域',
+ type: 'input',
+ required: true,
+ placeholder: 'ap-east-1',
+ },
+ {
+ key: 'bucket',
+ label: 'Bucket',
+ type: 'input',
+ required: true,
+ placeholder: 'backupx-prod',
+ },
+ {
+ key: 'accessKeyId',
+ label: 'Access Key ID',
+ type: 'input',
+ required: true,
+ sensitive: true,
+ placeholder: 'AKIA...',
+ },
+ {
+ key: 'secretAccessKey',
+ label: 'Secret Access Key',
+ type: 'password',
+ required: true,
+ sensitive: true,
+ placeholder: '输入新的 Secret Access Key',
+ },
+ {
+ key: 'forcePathStyle',
+ label: '强制 Path Style',
+ type: 'switch',
+ description: 'MinIO 或部分兼容对象存储通常需要开启。',
+ },
+ ],
+ webdav: [
+ {
+ key: 'endpoint',
+ label: 'WebDAV 地址',
+ type: 'input',
+ required: true,
+ placeholder: 'https://dav.example.com/remote.php/dav/files/admin',
+ },
+ {
+ key: 'username',
+ label: '用户名',
+ type: 'input',
+ required: true,
+ placeholder: 'admin',
+ },
+ {
+ key: 'password',
+ label: '密码',
+ type: 'password',
+ required: true,
+ sensitive: true,
+ placeholder: '输入新的 WebDAV 密码',
+ },
+ {
+ key: 'basePath',
+ label: '基础目录',
+ type: 'input',
+ placeholder: '/backupx',
+ },
+ ],
+ google_drive: [
+ {
+ key: 'clientId',
+ label: 'Client ID',
+ type: 'input',
+ required: true,
+ sensitive: true,
+ placeholder: 'Google OAuth Client ID',
+ },
+ {
+ key: 'clientSecret',
+ label: 'Client Secret',
+ type: 'password',
+ required: true,
+ sensitive: true,
+ placeholder: '输入新的 Google Client Secret',
+ },
+ {
+ key: 'folderId',
+ label: '目标文件夹 ID',
+ type: 'input',
+ placeholder: '留空则使用根目录',
+ },
+ ],
+ aliyun_oss: [
+ {
+ key: 'region',
+ label: '区域 (Region)',
+ type: 'input',
+ required: true,
+ placeholder: 'cn-hangzhou',
+ description: '如 cn-hangzhou, cn-shanghai, cn-beijing, cn-shenzhen 等。系统会自动组装 Endpoint。',
+ },
+ {
+ key: 'bucket',
+ label: 'Bucket',
+ type: 'input',
+ required: true,
+ placeholder: 'my-backup-bucket',
+ },
+ {
+ key: 'accessKeyId',
+ label: 'AccessKey ID',
+ type: 'input',
+ required: true,
+ sensitive: true,
+ placeholder: 'LTAI...',
+ },
+ {
+ key: 'secretAccessKey',
+ label: 'AccessKey Secret',
+ type: 'password',
+ required: true,
+ sensitive: true,
+ placeholder: '输入新的 AccessKey Secret',
+ },
+ {
+ key: 'internalNetwork',
+ label: '使用内网 Endpoint',
+ type: 'switch',
+ description: '同一区域的 ECS 实例可启用内网传输,节省流量费用。',
+ },
+ ],
+ tencent_cos: [
+ {
+ key: 'region',
+ label: '区域 (Region)',
+ type: 'input',
+ required: true,
+ placeholder: 'ap-guangzhou',
+ description: '如 ap-guangzhou, ap-shanghai, ap-beijing, ap-chengdu 等。系统会自动组装 Endpoint。',
+ },
+ {
+ key: 'bucket',
+ label: 'Bucket',
+ type: 'input',
+ required: true,
+ placeholder: 'backup-1250000000',
+ description: '格式为 BucketName-APPID,如 backup-1250000000。',
+ },
+ {
+ key: 'accessKeyId',
+ label: 'SecretId',
+ type: 'input',
+ required: true,
+ sensitive: true,
+ placeholder: 'AKIDxxxxxxxx',
+ },
+ {
+ key: 'secretAccessKey',
+ label: 'SecretKey',
+ type: 'password',
+ required: true,
+ sensitive: true,
+ placeholder: '输入新的 SecretKey',
+ },
+ ],
+ qiniu_kodo: [
+ {
+ key: 'region',
+ label: '区域 (Region)',
+ type: 'input',
+ required: true,
+ placeholder: 'z0',
+ description: '支持 z0(华东), cn-east-2(华东-浙江2), z1(华北), z2(华南), na0(北美), as0(东南亚)。',
+ },
+ {
+ key: 'bucket',
+ label: 'Bucket',
+ type: 'input',
+ required: true,
+ placeholder: 'my-backup',
+ },
+ {
+ key: 'accessKeyId',
+ label: 'AccessKey',
+ type: 'input',
+ required: true,
+ sensitive: true,
+ placeholder: '七牛云 AccessKey',
+ },
+ {
+ key: 'secretAccessKey',
+ label: 'SecretKey',
+ type: 'password',
+ required: true,
+ sensitive: true,
+ placeholder: '输入新的 SecretKey',
+ },
+ ],
+}
+
+export function getStorageTargetFieldConfigs(type: StorageTargetType) {
+ return FIELD_CONFIG_MAP[type]
+}
+
+export function getStorageTargetTypeLabel(type: StorageTargetType) {
+ switch (type) {
+ case 'local_disk':
+ return '本地磁盘'
+ case 'google_drive':
+ return 'Google Drive'
+ case 's3':
+ return 'S3 Compatible'
+ case 'webdav':
+ return 'WebDAV'
+ case 'aliyun_oss':
+ return '阿里云 OSS'
+ case 'tencent_cos':
+ return '腾讯云 COS'
+ case 'qiniu_kodo':
+ return '七牛云 Kodo'
+ default:
+ return type
+ }
+}
+
+export const storageTargetTypeOptions = [
+ { label: '本地磁盘', value: 'local_disk' },
+ { label: '阿里云 OSS', value: 'aliyun_oss' },
+ { label: '腾讯云 COS', value: 'tencent_cos' },
+ { label: '七牛云 Kodo', value: 'qiniu_kodo' },
+ { label: 'S3 Compatible', value: 's3' },
+ { label: 'Google Drive', value: 'google_drive' },
+ { label: 'WebDAV', value: 'webdav' },
+] as const
diff --git a/web/src/i18n.ts b/web/src/i18n.ts
new file mode 100644
index 0000000..56fbd50
--- /dev/null
+++ b/web/src/i18n.ts
@@ -0,0 +1,20 @@
+import i18n from 'i18next'
+import { initReactI18next } from 'react-i18next'
+import zhCN from './locales/zh-CN.json'
+import enUS from './locales/en-US.json'
+
+const savedLanguage = localStorage.getItem('backupx-language') || 'zh-CN'
+
+i18n.use(initReactI18next).init({
+ resources: {
+ 'zh-CN': { translation: zhCN },
+ 'en-US': { translation: enUS },
+ },
+ lng: savedLanguage,
+ fallbackLng: 'zh-CN',
+ interpolation: {
+ escapeValue: false,
+ },
+})
+
+export default i18n
diff --git a/web/src/layouts/AppLayout.tsx b/web/src/layouts/AppLayout.tsx
new file mode 100644
index 0000000..e4577f4
--- /dev/null
+++ b/web/src/layouts/AppLayout.tsx
@@ -0,0 +1,170 @@
+import { Avatar, Button, Dropdown, Layout, Menu, Message, Modal, Form, Input, Space, Typography } from '@arco-design/web-react'
+import {
+ IconDashboard,
+ IconStorage,
+ IconFile,
+ IconHistory,
+ IconNotification,
+ IconSettings,
+ IconMenuFold,
+ IconMenuUnfold,
+ IconLock,
+ IconPoweroff,
+ IconDown,
+ IconCloud,
+ IconDesktop,
+} from '@arco-design/web-react/icon'
+import { useState } from 'react'
+import { Outlet, useLocation, useNavigate } from 'react-router-dom'
+import { changePassword, type ChangePasswordPayload } from '../services/auth'
+import { useAuthStore } from '../stores/auth'
+import { resolveErrorMessage } from '../utils/error'
+
+const Header = Layout.Header
+const Sider = Layout.Sider
+const Content = Layout.Content
+
+function resolveSelectedKey(pathname: string) {
+ if (pathname.startsWith('/backup/tasks')) {
+ return '/backup/tasks'
+ }
+ if (pathname.startsWith('/backup/records')) {
+ return '/backup/records'
+ }
+ if (pathname.startsWith('/storage-targets')) {
+ return '/storage-targets'
+ }
+ if (pathname.startsWith('/settings/notifications')) {
+ return '/settings/notifications'
+ }
+ if (pathname.startsWith('/nodes')) {
+ return '/nodes'
+ }
+ if (pathname.startsWith('/settings') || pathname.startsWith('/system-info')) {
+ return '/settings'
+ }
+ return pathname
+}
+
+const menuItems = [
+ { key: '/dashboard', label: '仪表盘', icon: },
+ { key: '/backup/tasks', label: '备份任务', icon: },
+ { key: '/backup/records', label: '备份记录', icon: },
+ { key: '/storage-targets', label: '存储目标', icon: },
+ { key: '/nodes', label: '节点管理', icon: },
+ { key: '/settings/notifications', label: '通知配置', icon: },
+ { key: '/settings', label: '系统设置', icon: },
+]
+
+export function AppLayout() {
+ const [collapsed, setCollapsed] = useState(false)
+ const [pwdVisible, setPwdVisible] = useState(false)
+ const [pwdLoading, setPwdLoading] = useState(false)
+ const [pwdForm] = Form.useForm()
+ const location = useLocation()
+ const navigate = useNavigate()
+ const user = useAuthStore((state) => state.user)
+ const logout = useAuthStore((state) => state.logout)
+
+ async function handleChangePassword() {
+ try {
+ const values = await pwdForm.validate()
+ if (values.newPassword !== values.confirmPassword) {
+ Message.error('两次输入的新密码不一致')
+ return
+ }
+ setPwdLoading(true)
+ await changePassword({ oldPassword: values.oldPassword, newPassword: values.newPassword })
+ Message.success('密码修改成功')
+ setPwdVisible(false)
+ pwdForm.resetFields()
+ } catch (err) {
+ if (err) {
+ Message.error(resolveErrorMessage(err, '密码修改失败'))
+ }
+ } finally {
+ setPwdLoading(false)
+ }
+ }
+
+ const userDroplist = (
+ {
+ if (key === 'password') {
+ setPwdVisible(true)
+ } else if (key === 'logout') {
+ logout()
+ }
+ }}>
+ 修改密码
+ 退出登录
+
+ )
+
+ return (
+
+
+
+
+ {!collapsed && BackupX }
+
+ navigate(key)}>
+ {menuItems.map((item) => (
+
+ {item.icon}
+ {item.label}
+
+ ))}
+
+ {!collapsed && (
+
+ v1.0.0
+
+ )}
+
+
+
+ : }
+ onClick={() => setCollapsed((value) => !value)}
+ />
+
+
+
+
+ {(user?.displayName ?? user?.username ?? '管')[0]}
+
+ {user?.displayName ?? user?.username ?? '管理员'}
+
+
+
+
+
+
+
+
+
+
+ { setPwdVisible(false); pwdForm.resetFields() }}
+ onOk={handleChangePassword}
+ confirmLoading={pwdLoading}
+ unmountOnExit
+ >
+
+
+
+
+
+
+
+
+
+
+
+
+ )
+}
diff --git a/web/src/layouts/protected-layout.tsx b/web/src/layouts/protected-layout.tsx
new file mode 100644
index 0000000..abc81e7
--- /dev/null
+++ b/web/src/layouts/protected-layout.tsx
@@ -0,0 +1,84 @@
+import {
+ Button,
+ Layout,
+ Menu,
+ Space,
+ Typography,
+} from '@arco-design/web-react';
+import {
+ IconDashboard,
+ IconInfoCircle,
+ IconPoweroff,
+} from '@arco-design/web-react/icon';
+import { Outlet, useLocation, useNavigate } from 'react-router-dom';
+
+import { useAuthStore } from '../stores/auth';
+
+const { Sider, Header, Content } = Layout;
+
+const menuItems = [
+ {
+ key: '/',
+ label: '仪表盘',
+ icon: ,
+ },
+ {
+ key: '/system-info',
+ label: '系统信息',
+ icon: ,
+ },
+];
+
+export function ProtectedLayout() {
+ const navigate = useNavigate();
+ const location = useLocation();
+ const user = useAuthStore((state) => state.user);
+ const logout = useAuthStore((state) => state.logout);
+
+ return (
+
+
+ BackupX
+ {
+ navigate(key);
+ }}
+ style={{ width: '100%' }}
+ >
+ {menuItems.map((item) => (
+
+ {item.icon}
+ {item.label}
+
+ ))}
+
+
+
+
+
+
+ BackupX 管理台
+
+
+ {user?.displayName ?? user?.username ?? '未登录'}
+
+ }
+ type="outline"
+ onClick={() => {
+ logout();
+ navigate('/login', { replace: true });
+ }}
+ >
+ 退出登录
+
+
+
+
+
+
+
+
+ );
+}
diff --git a/web/src/locales/en-US.json b/web/src/locales/en-US.json
new file mode 100644
index 0000000..cca9df2
--- /dev/null
+++ b/web/src/locales/en-US.json
@@ -0,0 +1,130 @@
+{
+ "common": {
+ "confirm": "Confirm",
+ "cancel": "Cancel",
+ "save": "Save",
+ "delete": "Delete",
+ "edit": "Edit",
+ "create": "Create",
+ "search": "Search",
+ "loading": "Loading...",
+ "success": "Success",
+ "failed": "Failed",
+ "enabled": "Enabled",
+ "disabled": "Disabled",
+ "unknown": "Unknown",
+ "noData": "No data",
+ "actions": "Actions",
+ "status": "Status",
+ "name": "Name",
+ "type": "Type",
+ "description": "Description",
+ "createdAt": "Created At",
+ "updatedAt": "Updated At"
+ },
+ "menu": {
+ "dashboard": "Dashboard",
+ "backupTasks": "Backup Tasks",
+ "backupRecords": "Backup Records",
+ "storageTargets": "Storage",
+ "notifications": "Notifications",
+ "settings": "Settings",
+ "docs": "Docs"
+ },
+ "auth": {
+ "login": "Login",
+ "logout": "Logout",
+ "username": "Username",
+ "password": "Password",
+ "changePassword": "Change Password",
+ "oldPassword": "Old Password",
+ "newPassword": "New Password",
+ "loginTitle": "Sign in to BackupX",
+ "loginSubtitle": "Linux Server Backup Manager"
+ },
+ "dashboard": {
+ "title": "Dashboard",
+ "totalTasks": "Backup Tasks",
+ "successRate": "Success Rate",
+ "totalBackupSize": "Total Backup Size",
+ "lastBackup": "Last Backup",
+ "backupTimeline": "Backup Trend",
+ "storageUsage": "Storage Usage",
+ "recentRecords": "Recent Backups",
+ "minutesAgo": "{{count}} min ago",
+ "hoursAgo": "{{count}} hr ago",
+ "daysAgo": "{{count}} days ago"
+ },
+ "backup": {
+ "tasks": "Backup Tasks",
+ "createTask": "Create Task",
+ "editTask": "Edit Task",
+ "taskName": "Task Name",
+ "taskType": "Backup Type",
+ "cronExpr": "Schedule",
+ "sourcePath": "Source Path",
+ "excludePatterns": "Exclude Patterns",
+ "storageTarget": "Storage Target",
+ "retentionDays": "Retention Days",
+ "maxBackups": "Max Backups",
+ "compression": "Compression",
+ "encrypt": "Encryption",
+ "runNow": "Run Now",
+ "enable": "Enable",
+ "disable": "Disable",
+ "typeFile": "File/Directory",
+ "typeMySQL": "MySQL",
+ "typeSQLite": "SQLite",
+ "typePostgreSQL": "PostgreSQL"
+ },
+ "records": {
+ "title": "Backup Records",
+ "taskName": "Task Name",
+ "status": "Status",
+ "fileName": "File Name",
+ "fileSize": "File Size",
+ "duration": "Duration",
+ "startedAt": "Started At",
+ "viewLog": "View Log",
+ "download": "Download",
+ "restore": "Restore",
+ "delete": "Delete",
+ "statusRunning": "Running",
+ "statusSuccess": "Success",
+ "statusFailed": "Failed"
+ },
+ "storage": {
+ "title": "Storage",
+ "addTarget": "Add Storage",
+ "editTarget": "Edit Storage",
+ "testConnection": "Test Connection",
+ "connectionSuccess": "Connection Successful",
+ "connectionFailed": "Connection Failed",
+ "typeLocalDisk": "Local Disk",
+ "typeGoogleDrive": "Google Drive",
+ "typeS3": "S3 Compatible",
+ "typeWebDAV": "WebDAV",
+ "authorize": "Authorize",
+ "authorized": "Authorized"
+ },
+ "notification": {
+ "title": "Notifications",
+ "addNotification": "Add Notification",
+ "editNotification": "Edit Notification",
+ "testNotification": "Send Test",
+ "typeEmail": "Email",
+ "typeWebhook": "Webhook",
+ "typeTelegram": "Telegram",
+ "onSuccess": "Notify on Success",
+ "onFailure": "Notify on Failure"
+ },
+ "settings": {
+ "title": "Settings",
+ "general": "General",
+ "language": "Language",
+ "systemInfo": "System Info",
+ "version": "Version",
+ "uptime": "Uptime",
+ "diskUsage": "Disk Usage"
+ }
+}
diff --git a/web/src/locales/zh-CN.json b/web/src/locales/zh-CN.json
new file mode 100644
index 0000000..4625ef0
--- /dev/null
+++ b/web/src/locales/zh-CN.json
@@ -0,0 +1,130 @@
+{
+ "common": {
+ "confirm": "确认",
+ "cancel": "取消",
+ "save": "保存",
+ "delete": "删除",
+ "edit": "编辑",
+ "create": "创建",
+ "search": "搜索",
+ "loading": "加载中...",
+ "success": "操作成功",
+ "failed": "操作失败",
+ "enabled": "已启用",
+ "disabled": "已禁用",
+ "unknown": "未知",
+ "noData": "暂无数据",
+ "actions": "操作",
+ "status": "状态",
+ "name": "名称",
+ "type": "类型",
+ "description": "描述",
+ "createdAt": "创建时间",
+ "updatedAt": "更新时间"
+ },
+ "menu": {
+ "dashboard": "仪表盘",
+ "backupTasks": "备份任务",
+ "backupRecords": "备份记录",
+ "storageTargets": "存储管理",
+ "notifications": "通知配置",
+ "settings": "系统设置",
+ "docs": "文档"
+ },
+ "auth": {
+ "login": "登录",
+ "logout": "退出登录",
+ "username": "用户名",
+ "password": "密码",
+ "changePassword": "修改密码",
+ "oldPassword": "旧密码",
+ "newPassword": "新密码",
+ "loginTitle": "登录 BackupX",
+ "loginSubtitle": "Linux 服务器备份管理系统"
+ },
+ "dashboard": {
+ "title": "仪表盘",
+ "totalTasks": "备份任务",
+ "successRate": "成功率",
+ "totalBackupSize": "总备份量",
+ "lastBackup": "最近备份",
+ "backupTimeline": "备份趋势",
+ "storageUsage": "存储用量",
+ "recentRecords": "最近备份记录",
+ "minutesAgo": "{{count}} 分钟前",
+ "hoursAgo": "{{count}} 小时前",
+ "daysAgo": "{{count}} 天前"
+ },
+ "backup": {
+ "tasks": "备份任务",
+ "createTask": "创建任务",
+ "editTask": "编辑任务",
+ "taskName": "任务名称",
+ "taskType": "备份类型",
+ "cronExpr": "定时策略",
+ "sourcePath": "源路径",
+ "excludePatterns": "排除规则",
+ "storageTarget": "存储目标",
+ "retentionDays": "保留天数",
+ "maxBackups": "最大份数",
+ "compression": "压缩",
+ "encrypt": "加密",
+ "runNow": "立即执行",
+ "enable": "启用",
+ "disable": "禁用",
+ "typeFile": "文件/目录",
+ "typeMySQL": "MySQL",
+ "typeSQLite": "SQLite",
+ "typePostgreSQL": "PostgreSQL"
+ },
+ "records": {
+ "title": "备份记录",
+ "taskName": "任务名称",
+ "status": "状态",
+ "fileName": "文件名",
+ "fileSize": "文件大小",
+ "duration": "耗时",
+ "startedAt": "开始时间",
+ "viewLog": "查看日志",
+ "download": "下载",
+ "restore": "恢复",
+ "delete": "删除",
+ "statusRunning": "执行中",
+ "statusSuccess": "成功",
+ "statusFailed": "失败"
+ },
+ "storage": {
+ "title": "存储管理",
+ "addTarget": "添加存储",
+ "editTarget": "编辑存储",
+ "testConnection": "测试连接",
+ "connectionSuccess": "连接成功",
+ "connectionFailed": "连接失败",
+ "typeLocalDisk": "本地磁盘",
+ "typeGoogleDrive": "Google Drive",
+ "typeS3": "S3 兼容",
+ "typeWebDAV": "WebDAV",
+ "authorize": "授权",
+ "authorized": "已授权"
+ },
+ "notification": {
+ "title": "通知配置",
+ "addNotification": "添加通知",
+ "editNotification": "编辑通知",
+ "testNotification": "发送测试",
+ "typeEmail": "邮件",
+ "typeWebhook": "Webhook",
+ "typeTelegram": "Telegram",
+ "onSuccess": "成功时通知",
+ "onFailure": "失败时通知"
+ },
+ "settings": {
+ "title": "系统设置",
+ "general": "通用设置",
+ "language": "语言",
+ "systemInfo": "系统信息",
+ "version": "版本",
+ "uptime": "运行时间",
+ "diskUsage": "磁盘使用"
+ }
+}
diff --git a/web/src/main.tsx b/web/src/main.tsx
new file mode 100644
index 0000000..d647000
--- /dev/null
+++ b/web/src/main.tsx
@@ -0,0 +1,12 @@
+import React from 'react'
+import ReactDOM from 'react-dom/client'
+import '@arco-design/web-react/dist/css/arco.css'
+import './styles/global.css'
+import './i18n'
+import { RootApp } from './RootApp'
+
+ReactDOM.createRoot(document.getElementById('root')!).render(
+
+
+ ,
+)
diff --git a/web/src/pages/backup-records/BackupRecordsPage.tsx b/web/src/pages/backup-records/BackupRecordsPage.tsx
new file mode 100644
index 0000000..6e69ddb
--- /dev/null
+++ b/web/src/pages/backup-records/BackupRecordsPage.tsx
@@ -0,0 +1,189 @@
+import { Button, Card, Empty, Message, Select, Space, Table, Tag, Typography } from '@arco-design/web-react'
+import { useCallback, useEffect, useMemo, useState } from 'react'
+import { useSearchParams } from 'react-router-dom'
+import { BackupRecordLogDrawer } from '../../components/backup-records/BackupRecordLogDrawer'
+import { listBackupRecords } from '../../services/backup-records'
+import { listBackupTasks } from '../../services/backup-tasks'
+import type { BackupRecordStatus, BackupRecordSummary } from '../../types/backup-records'
+import type { BackupTaskSummary } from '../../types/backup-tasks'
+import { resolveErrorMessage } from '../../utils/error'
+import { formatBytes, formatDateTime, formatDuration } from '../../utils/format'
+
+const statusOptions = [
+ { label: '全部状态', value: '' },
+ { label: '执行中', value: 'running' },
+ { label: '成功', value: 'success' },
+ { label: '失败', value: 'failed' },
+]
+
+function getRecordStatusColor(status: BackupRecordStatus) {
+ switch (status) {
+ case 'success':
+ return 'green'
+ case 'failed':
+ return 'red'
+ default:
+ return 'arcoblue'
+ }
+}
+
+export function BackupRecordsPage() {
+ const [searchParams, setSearchParams] = useSearchParams()
+ const [records, setRecords] = useState([])
+ const [tasks, setTasks] = useState([])
+ const [loading, setLoading] = useState(true)
+ const [error, setError] = useState('')
+
+ const selectedTaskId = Number(searchParams.get('taskId') ?? 0) || undefined
+ const selectedRecordId = Number(searchParams.get('recordId') ?? 0) || undefined
+ const selectedStatus = (searchParams.get('status') ?? '') as BackupRecordStatus | ''
+
+ const taskOptions = useMemo(
+ () => [{ label: '全部任务', value: 0 }, ...tasks.map((item) => ({ label: item.name, value: item.id }))],
+ [tasks],
+ )
+
+ const loadData = useCallback(async () => {
+ setLoading(true)
+ try {
+ const [recordList, taskList] = await Promise.all([
+ listBackupRecords({ taskId: selectedTaskId, status: selectedStatus }),
+ listBackupTasks(),
+ ])
+ setRecords(recordList)
+ setTasks(taskList)
+ setError('')
+ } catch (loadError) {
+ setError(resolveErrorMessage(loadError, '加载备份记录失败'))
+ } finally {
+ setLoading(false)
+ }
+ }, [selectedStatus, selectedTaskId])
+
+ useEffect(() => {
+ void loadData()
+ }, [loadData])
+
+ function updateSearchParam(key: 'taskId' | 'status' | 'recordId', value?: string) {
+ const nextParams = new URLSearchParams(searchParams)
+ if (!value || value === '0') {
+ nextParams.delete(key)
+ } else {
+ nextParams.set(key, value)
+ }
+ setSearchParams(nextParams, { replace: true })
+ }
+
+ const columns = [
+ {
+ title: '任务 / 状态',
+ dataIndex: 'taskName',
+ render: (_: unknown, record: BackupRecordSummary) => {
+ const statusLabel = record.status === 'success' ? '成功' : record.status === 'failed' ? '失败' : record.status === 'running' ? '执行中' : record.status
+ return (
+
+ {record.taskName}
+
+ {statusLabel ? {statusLabel} : - }
+ {record.storageTargetName ? {record.storageTargetName} : - }
+
+
+ )
+ },
+ },
+ {
+ title: '文件',
+ dataIndex: 'fileName',
+ render: (_: unknown, record: BackupRecordSummary) => (
+
+ {record.fileName || '-'}
+ {formatBytes(record.fileSize)}
+
+ ),
+ },
+ {
+ title: '开始 / 完成',
+ dataIndex: 'startedAt',
+ render: (_: unknown, record: BackupRecordSummary) => (
+
+ {formatDateTime(record.startedAt)}
+ {formatDateTime(record.completedAt)}
+
+ ),
+ },
+ {
+ title: '耗时',
+ dataIndex: 'durationSeconds',
+ render: (value: number) => formatDuration(value),
+ },
+ {
+ title: '错误信息',
+ dataIndex: 'errorMessage',
+ render: (value: string) => value || '-',
+ },
+ {
+ title: '操作',
+ dataIndex: 'actions',
+ width: 120,
+ render: (_: unknown, record: BackupRecordSummary) => (
+ updateSearchParam('recordId', String(record.id))}>
+ 查看日志
+
+ ),
+ },
+ ]
+
+ return (
+
+
+ 备份记录
+
+ 查看任务执行结果、筛选历史记录,并在详情中跟踪实时日志、下载或恢复产物。
+
+
+
+
+
+
+ 任务筛选
+ updateSearchParam('taskId', Number(value) > 0 ? String(value) : undefined)} />
+
+
+ 状态筛选
+ updateSearchParam('status', value ? String(value) : undefined)} />
+
+ {
+ const next = new URLSearchParams(searchParams)
+ next.delete('taskId')
+ next.delete('status')
+ setSearchParams(next, { replace: true })
+ }}>
+ 重置筛选
+
+
+
+
+ {error ? {error} : null}
+
+
+ {records.length === 0 && !loading ? (
+
+ ) : (
+ } />
+ )}
+
+
+ updateSearchParam('recordId', undefined)}
+ onChanged={async () => {
+ await loadData()
+ if (selectedRecordId) {
+ Message.success('备份记录已更新')
+ }
+ }}
+ />
+
+ )
+}
diff --git a/web/src/pages/backup-tasks/BackupTasksPage.tsx b/web/src/pages/backup-tasks/BackupTasksPage.tsx
new file mode 100644
index 0000000..9a00dcf
--- /dev/null
+++ b/web/src/pages/backup-tasks/BackupTasksPage.tsx
@@ -0,0 +1,248 @@
+import { Button, Card, Empty, Message, PageHeader, Space, Table, Tag, Typography } from '@arco-design/web-react'
+import { useCallback, useEffect, useMemo, useState } from 'react'
+import { useNavigate } from 'react-router-dom'
+import { BackupTaskDetailDrawer } from '../../components/backup-tasks/BackupTaskDetailDrawer'
+import { BackupTaskFormDrawer } from '../../components/backup-tasks/BackupTaskFormDrawer'
+import { getBackupTaskStatusColor, getBackupTaskStatusLabel, getBackupTaskTypeLabel } from '../../components/backup-tasks/field-config'
+import { createBackupTask, deleteBackupTask, getBackupTask, listBackupTasks, runBackupTask, toggleBackupTask, updateBackupTask } from '../../services/backup-tasks'
+import { listStorageTargets } from '../../services/storage-targets'
+import type { BackupTaskDetail, BackupTaskPayload, BackupTaskSummary } from '../../types/backup-tasks'
+import type { StorageTargetSummary } from '../../types/storage-targets'
+import { resolveErrorMessage } from '../../utils/error'
+import { formatDateTime } from '../../utils/format'
+
+export function BackupTasksPage() {
+ const navigate = useNavigate()
+ const [tasks, setTasks] = useState([])
+ const [storageTargets, setStorageTargets] = useState([])
+ const [loading, setLoading] = useState(true)
+ const [submitting, setSubmitting] = useState(false)
+ const [drawerVisible, setDrawerVisible] = useState(false)
+ const [detailVisible, setDetailVisible] = useState(false)
+ const [editingTask, setEditingTask] = useState(null)
+ const [detailTask, setDetailTask] = useState(null)
+ const [error, setError] = useState('')
+
+ const enabledStorageTargets = useMemo(() => storageTargets.filter((item) => item.enabled), [storageTargets])
+
+ const loadData = useCallback(async () => {
+ setLoading(true)
+ try {
+ const [taskList, targetList] = await Promise.all([listBackupTasks(), listStorageTargets()])
+ setTasks(taskList)
+ setStorageTargets(targetList)
+ setError('')
+ } catch (loadError) {
+ setError(resolveErrorMessage(loadError, '加载备份任务失败'))
+ } finally {
+ setLoading(false)
+ }
+ }, [])
+
+ useEffect(() => {
+ void loadData()
+ }, [loadData])
+
+ async function openEdit(id: number) {
+ setSubmitting(true)
+ try {
+ const detail = await getBackupTask(id)
+ setEditingTask(detail)
+ setDrawerVisible(true)
+ } catch (loadError) {
+ Message.error(resolveErrorMessage(loadError, '加载任务详情失败'))
+ } finally {
+ setSubmitting(false)
+ }
+ }
+
+ async function openDetail(id: number) {
+ setSubmitting(true)
+ try {
+ const detail = await getBackupTask(id)
+ setDetailTask(detail)
+ setDetailVisible(true)
+ } catch (loadError) {
+ Message.error(resolveErrorMessage(loadError, '加载任务详情失败'))
+ } finally {
+ setSubmitting(false)
+ }
+ }
+
+ async function handleSubmit(value: BackupTaskPayload, taskId?: number) {
+ setSubmitting(true)
+ try {
+ if (taskId) {
+ await updateBackupTask(taskId, value)
+ Message.success('备份任务已更新')
+ } else {
+ await createBackupTask(value)
+ Message.success('备份任务已创建')
+ }
+ setDrawerVisible(false)
+ setEditingTask(null)
+ await loadData()
+ } catch (submitError) {
+ Message.error(resolveErrorMessage(submitError, '保存备份任务失败'))
+ throw submitError
+ } finally {
+ setSubmitting(false)
+ }
+ }
+
+ async function handleToggle(task: BackupTaskSummary) {
+ try {
+ await toggleBackupTask(task.id, { enabled: !task.enabled })
+ Message.success(task.enabled ? '任务已停用' : '任务已启用')
+ await loadData()
+ } catch (toggleError) {
+ Message.error(resolveErrorMessage(toggleError, '切换任务状态失败'))
+ }
+ }
+
+ async function handleRun(task: BackupTaskSummary) {
+ try {
+ const record = await runBackupTask(task.id)
+ Message.success('已触发备份任务,正在打开执行日志')
+ navigate(`/backup/records?taskId=${task.id}&recordId=${record.id}`)
+ } catch (runError) {
+ Message.error(resolveErrorMessage(runError, '触发备份任务失败'))
+ }
+ }
+
+ async function handleDelete(task: BackupTaskSummary) {
+ if (!window.confirm(`确定删除任务“${task.name}”吗?`)) {
+ return
+ }
+ try {
+ await deleteBackupTask(task.id)
+ Message.success('备份任务已删除')
+ await loadData()
+ } catch (deleteError) {
+ Message.error(resolveErrorMessage(deleteError, '删除备份任务失败'))
+ }
+ }
+
+ const columns = [
+ {
+ title: '任务名称',
+ dataIndex: 'name',
+ render: (_: unknown, record: BackupTaskSummary) => (
+
+ {record.name}
+
+ {getBackupTaskTypeLabel(record.type) && {getBackupTaskTypeLabel(record.type)} }
+ {record.enabled !== undefined && (
+ {record.enabled ? '已启用' : '已停用'}
+ )}
+
+
+ ),
+ },
+ {
+ title: '调度',
+ dataIndex: 'cronExpr',
+ render: (value: string) => value || '仅手动执行',
+ },
+ {
+ title: '存储目标',
+ dataIndex: 'storageTargetName',
+ render: (value: string) => value || '-',
+ },
+ {
+ title: '策略',
+ dataIndex: 'retentionDays',
+ render: (_: unknown, record: BackupTaskSummary) => `${record.retentionDays} 天 / ${record.maxBackups} 份`,
+ },
+ {
+ title: '最近状态',
+ render: (value: BackupTaskSummary['lastStatus']) => {
+ const label = getBackupTaskStatusLabel(value)
+ return label ? {label} : -
+ },
+ },
+ {
+ title: '最近执行',
+ dataIndex: 'lastRunAt',
+ render: (value?: string) => formatDateTime(value),
+ },
+ {
+ title: '操作',
+ dataIndex: 'actions',
+ width: 280,
+ render: (_: unknown, record: BackupTaskSummary) => (
+
+ void openDetail(record.id)}>
+ 详情
+
+ void openEdit(record.id)} loading={submitting && editingTask?.id === record.id}>
+ 编辑
+
+ void handleRun(record)}>
+ 立即执行
+
+ void handleToggle(record)}>
+ {record.enabled ? '停用' : '启用'}
+
+ void handleDelete(record)}>
+ 删除
+
+
+ ),
+ },
+ ]
+
+ return (
+
+ {
+ setEditingTask(null)
+ setDrawerVisible(true)
+ }}
+ >
+ 新建任务
+
+ }
+ />
+
+ {error ? {error} : null}
+ {enabledStorageTargets.length === 0 ? (
+
+
+
+ ) : null}
+
+
+ } />
+
+
+ {
+ setDrawerVisible(false)
+ setEditingTask(null)
+ }}
+ onSubmit={handleSubmit}
+ />
+
+ {
+ setDetailVisible(false)
+ setDetailTask(null)
+ }}
+ />
+
+ )
+}
diff --git a/web/src/pages/dashboard/DashboardPage.tsx b/web/src/pages/dashboard/DashboardPage.tsx
new file mode 100644
index 0000000..37770b7
--- /dev/null
+++ b/web/src/pages/dashboard/DashboardPage.tsx
@@ -0,0 +1,219 @@
+import { Avatar, Card, Empty, Grid, PageHeader, Space, Table, Tag, Typography } from '@arco-design/web-react'
+import { IconCheckCircle, IconHistory, IconSave, IconStorage } from '@arco-design/web-react/icon'
+import ReactEChartsCore from 'echarts-for-react/lib/core'
+import * as echarts from 'echarts/core'
+import { LineChart, PieChart } from 'echarts/charts'
+import { GridComponent, TooltipComponent, LegendComponent } from 'echarts/components'
+import { CanvasRenderer } from 'echarts/renderers'
+import { useEffect, useMemo, useState } from 'react'
+import { fetchDashboardStats, fetchDashboardTimeline } from '../../services/dashboard'
+import { useAuthStore } from '../../stores/auth'
+import type { BackupTimelinePoint, DashboardStats } from '../../types/dashboard'
+import { resolveErrorMessage } from '../../utils/error'
+import { formatBytes, formatDateTime, formatPercent } from '../../utils/format'
+
+echarts.use([LineChart, PieChart, GridComponent, TooltipComponent, LegendComponent, CanvasRenderer])
+
+const { Row, Col } = Grid
+
+export function DashboardPage() {
+ const user = useAuthStore((state) => state.user)
+ const [stats, setStats] = useState(null)
+ const [timeline, setTimeline] = useState([])
+ const [loading, setLoading] = useState(true)
+ const [error, setError] = useState('')
+
+ useEffect(() => {
+ let active = true
+ void (async () => {
+ setLoading(true)
+ try {
+ const [statsResult, timelineResult] = await Promise.all([fetchDashboardStats(), fetchDashboardTimeline(30)])
+ if (!active) {
+ return
+ }
+ setStats(statsResult)
+ setTimeline(timelineResult || [])
+ setError('')
+ } catch (loadError) {
+ if (active) {
+ setError(resolveErrorMessage(loadError, '加载仪表盘失败'))
+ }
+ } finally {
+ if (active) {
+ setLoading(false)
+ }
+ }
+ })()
+ return () => {
+ active = false
+ }
+ }, [])
+
+ const cards = useMemo(
+ () => [
+ { label: '备份任务', value: stats?.totalTasks ?? 0, helper: `${stats?.enabledTasks ?? 0} 个已启用`, icon: , color: 'var(--color-primary-6)', bg: 'var(--color-primary-1)' },
+ { label: '成功率', value: formatPercent(stats?.successRate), helper: '最近 30 天', icon: , color: 'var(--color-success-6)', bg: 'var(--color-success-1)' },
+ { label: '总备份量', value: formatBytes(stats?.totalBackupBytes), helper: '历史累计', icon: , color: 'var(--color-purple-6)', bg: 'var(--color-purple-1)' },
+ { label: '最近备份', value: stats?.totalRecords ?? 0, helper: formatDateTime(stats?.lastBackupAt), icon: , color: 'var(--color-warning-6)', bg: 'var(--color-warning-1)' },
+ ],
+ [stats],
+ )
+
+ const timelineChartOption = useMemo(() => ({
+ tooltip: { trigger: 'axis' as const },
+ legend: { data: ['成功', '失败'], bottom: 0 },
+ grid: { left: 40, right: 20, top: 40, bottom: 40 },
+ xAxis: {
+ type: 'category' as const,
+ data: timeline.map((p) => p.date),
+ axisLabel: { rotate: 45, fontSize: 11, color: 'var(--color-text-3)' },
+ axisLine: { lineStyle: { color: 'var(--color-border-2)' } },
+ axisTick: { show: false },
+ },
+ yAxis: {
+ type: 'value' as const,
+ minInterval: 1,
+ axisLabel: { color: 'var(--color-text-3)' },
+ splitLine: { lineStyle: { type: 'dashed', color: 'var(--color-border-2)' } },
+ },
+ series: [
+ {
+ name: '成功',
+ type: 'line' as const,
+ smooth: true,
+ data: timeline.map((p) => p.success),
+ itemStyle: { color: 'var(--color-primary-6)' },
+ areaStyle: { color: new echarts.graphic.LinearGradient(0, 0, 0, 1, [
+ { offset: 0, color: 'rgba(52,145,250,0.25)' },
+ { offset: 1, color: 'rgba(52,145,250,0.02)' },
+ ]) },
+ symbolSize: 6,
+ },
+ {
+ name: '失败',
+ type: 'line' as const,
+ smooth: true,
+ data: timeline.map((p) => p.failed),
+ itemStyle: { color: 'var(--color-danger-light-4)' },
+ areaStyle: { color: new echarts.graphic.LinearGradient(0, 0, 0, 1, [
+ { offset: 0, color: 'rgba(245,63,63,0.15)' },
+ { offset: 1, color: 'rgba(245,63,63,0.01)' },
+ ]) },
+ symbolSize: 6,
+ },
+ ],
+ }), [timeline])
+
+ const storageChartOption = useMemo(() => {
+ const data = (stats?.storageUsage ?? []).map((s) => ({
+ name: s.targetName || '未命名',
+ value: s.totalSize,
+ }))
+ return {
+ tooltip: {
+ trigger: 'item' as const,
+ formatter: (params: { name: string; value: number; percent: number }) =>
+ `${params.name}: ${formatBytes(params.value)} (${params.percent}%)`,
+ },
+ legend: { bottom: 0, type: 'scroll' as const },
+ series: [
+ {
+ type: 'pie' as const,
+ radius: ['50%', '70%'],
+ avoidLabelOverlap: false,
+ itemStyle: { borderRadius: 6, borderColor: 'var(--color-bg-2)', borderWidth: 2 },
+ label: { show: false },
+ emphasis: { label: { show: true, fontSize: 13, fontWeight: 'bold' } },
+ data,
+ color: ['#165DFF', '#14C9C9', '#FADC19', '#FF7D00', '#F53F3F', '#722ED1'],
+ },
+ ],
+ }
+ }, [stats])
+
+ return (
+
+
+ {error ? {error} : null}
+
+
+
+ {cards.map((card) => (
+
+
+
+
+ {card.icon}
+
+
+ {card.label}
+
+ {card.value}
+
+ {card.helper}
+
+
+
+
+ ))}
+
+
+
+
+
+ {timeline.length > 0 ? (
+
+ ) : (
+
+ 暂无数据
+
+ )}
+
+
+
+
+ {(stats?.storageUsage ?? []).length > 0 ? (
+
+ ) : (
+
+ 暂无存储数据
+
+ )}
+
+
+
+
+
+ }
+ rowKey="id"
+ columns={[
+ { title: '任务', dataIndex: 'taskName' },
+ {
+ title: '状态',
+ dataIndex: 'status',
+ render: (value: string) => {
+ const label = value === 'success' ? '成功' : value === 'failed' ? '失败' : value === 'running' ? '执行中' : value
+ return label ? (
+
+ {label}
+
+ ) : -
+ },
+ },
+ { title: '文件大小', dataIndex: 'fileSize', render: (value: number) => formatBytes(value) },
+ { title: '开始时间', dataIndex: 'startedAt', render: (value: string) => formatDateTime(value) },
+ ]}
+ data={stats?.recentRecords ?? []}
+ pagination={false}
+ stripe
+ />
+
+
+ )
+}
diff --git a/web/src/pages/dashboard/page.tsx b/web/src/pages/dashboard/page.tsx
new file mode 100644
index 0000000..c6ea738
--- /dev/null
+++ b/web/src/pages/dashboard/page.tsx
@@ -0,0 +1,30 @@
+import { Grid, Statistic, Typography } from '@arco-design/web-react';
+
+import { PageCard } from '../../components/page-card';
+
+const cards = [
+ { label: '存储目标', value: 0 },
+ { label: '备份任务', value: 0 },
+ { label: '最近执行', value: 0 },
+];
+
+export function DashboardPage() {
+ return (
+
+
+
+ `platform-foundation` 阶段提供基础登录、导航与系统状态展示,后续模块将在此页面扩展统计与运行数据。
+
+
+
+ {cards.map((card) => (
+
+
+
+
+
+ ))}
+
+
+ );
+}
diff --git a/web/src/pages/login/LoginPage.tsx b/web/src/pages/login/LoginPage.tsx
new file mode 100644
index 0000000..22ad35b
--- /dev/null
+++ b/web/src/pages/login/LoginPage.tsx
@@ -0,0 +1,201 @@
+import { Alert, Button, Card, Form, Input, Space, Typography, Message } from '@arco-design/web-react'
+import { IconCloud, IconLock, IconUser } from '@arco-design/web-react/icon'
+import { useEffect, useState } from 'react'
+import { useNavigate } from 'react-router-dom'
+import axios from 'axios'
+import { fetchSetupStatus } from '../../services/auth'
+import { useAuthStore } from '../../stores/auth'
+
+interface SetupFormValues {
+ username: string
+ password: string
+ displayName: string
+}
+
+interface LoginFormValues {
+ username: string
+ password: string
+}
+
+function resolveErrorMessage(error: unknown) {
+ if (axios.isAxiosError(error)) {
+ return error.response?.data?.message ?? '请求失败,请稍后重试'
+ }
+ return '请求失败,请稍后重试'
+}
+
+export function LoginPage() {
+ const navigate = useNavigate()
+ const authStatus = useAuthStore((state) => state.status)
+ const doLogin = useAuthStore((state) => state.login)
+ const doSetup = useAuthStore((state) => state.setup)
+ const [initialized, setInitialized] = useState(null)
+ const [loading, setLoading] = useState(false)
+
+ useEffect(() => {
+ if (authStatus === 'authenticated') {
+ navigate('/dashboard', { replace: true })
+ }
+ }, [authStatus, navigate])
+
+ useEffect(() => {
+ let mounted = true
+ void (async () => {
+ try {
+ const result = await fetchSetupStatus()
+ if (mounted) {
+ setInitialized(result.initialized)
+ }
+ } catch {
+ if (mounted) {
+ setInitialized(true)
+ }
+ }
+ })()
+ return () => {
+ mounted = false
+ }
+ }, [])
+
+ const handleSetup = async (values: SetupFormValues) => {
+ setLoading(true)
+ try {
+ await doSetup(values)
+ Message.success('初始化完成,正在进入控制台')
+ navigate('/dashboard', { replace: true })
+ } catch (error) {
+ Message.error(resolveErrorMessage(error))
+ } finally {
+ setLoading(false)
+ }
+ }
+
+ const handleLogin = async (values: LoginFormValues) => {
+ setLoading(true)
+ try {
+ await doLogin(values)
+ Message.success('登录成功')
+ navigate('/dashboard', { replace: true })
+ } catch (error) {
+ Message.error(resolveErrorMessage(error))
+ } finally {
+ setLoading(false)
+ }
+ }
+
+ return (
+
+
+
+
+ {/* Background decorative circles for the banner */}
+
+
+
+
+
+ {/* Outer pulsing rings */}
+
+
+
+
+
+
+
+
+
+
+ {/* Layer 1 (Top) */}
+
+
+
+ {/* Layer 2 (Middle) */}
+
+
+
+ {/* Layer 3 (Bottom) */}
+
+
+
+ {/* Glowing Dots Output - Animated */}
+
+
+
+
+
+
+
+
+
+
+
+
+ {/* Connecting Data Line */}
+
+
+
+
+
+
+
+ 守护您的数据资产
+
+
+ 安全、可靠、高效的企业级服务器备份管理平台
+
+
+
+
+
+
+
+ )
+}
diff --git a/web/src/pages/login/page.tsx b/web/src/pages/login/page.tsx
new file mode 100644
index 0000000..1ded942
--- /dev/null
+++ b/web/src/pages/login/page.tsx
@@ -0,0 +1,79 @@
+import {
+ Alert,
+ Button,
+ Card,
+ Form,
+ Grid,
+ Input,
+ Space,
+ Typography,
+} from '@arco-design/web-react';
+import { useMemo, useState } from 'react';
+import { useLocation, useNavigate } from 'react-router-dom';
+
+import { useAuthStore } from '../../stores/auth';
+
+interface LoginFormValue {
+ username: string;
+ password: string;
+}
+
+export function LoginPage() {
+ const navigate = useNavigate();
+ const location = useLocation();
+ const login = useAuthStore((state) => state.login);
+ const status = useAuthStore((state) => state.status);
+ const [errorMessage, setErrorMessage] = useState(null);
+
+ const redirectPath = useMemo(() => {
+ const from = location.state as { from?: { pathname?: string } } | null;
+ return from?.from?.pathname ?? '/';
+ }, [location.state]);
+
+ async function handleSubmit(values: LoginFormValue) {
+ setErrorMessage(null);
+
+ try {
+ await login(values);
+ navigate(redirectPath, { replace: true });
+ } catch (error) {
+ setErrorMessage(error instanceof Error ? error.message : '登录失败');
+ }
+ }
+
+ return (
+
+
+
+
+
+
+ 欢迎使用 BackupX
+
+ 登录后可管理备份任务、存储目标与系统状态。
+
+
+ {errorMessage ? : null}
+
+
+
+
+
+
+
+ 登录
+
+
+
+
+
+
+
+ );
+}
diff --git a/web/src/pages/nodes/NodesPage.tsx b/web/src/pages/nodes/NodesPage.tsx
new file mode 100644
index 0000000..75fe2d5
--- /dev/null
+++ b/web/src/pages/nodes/NodesPage.tsx
@@ -0,0 +1,180 @@
+import React, { useEffect, useState, useCallback } from 'react'
+import {
+ Table, Button, Space, Tag, Typography, PageHeader, Modal, Input, Message, Badge, Popconfirm, Card, Descriptions, Empty
+} from '@arco-design/web-react'
+import {
+ IconPlus, IconDelete, IconDesktop, IconCloudDownload
+} from '@arco-design/web-react/icon'
+import type { NodeSummary } from '../../types/nodes'
+import { listNodes, createNode, deleteNode } from '../../services/nodes'
+
+const { Title, Text } = Typography
+
+export default function NodesPage() {
+ const [nodes, setNodes] = useState([])
+ const [loading, setLoading] = useState(false)
+ const [createVisible, setCreateVisible] = useState(false)
+ const [newNodeName, setNewNodeName] = useState('')
+ const [newToken, setNewToken] = useState('')
+
+ const fetchNodes = useCallback(async () => {
+ setLoading(true)
+ try {
+ const data = await listNodes()
+ setNodes(data)
+ } catch {
+ Message.error('获取节点列表失败')
+ } finally {
+ setLoading(false)
+ }
+ }, [])
+
+ useEffect(() => { fetchNodes() }, [fetchNodes])
+
+ const handleCreate = async () => {
+ if (!newNodeName.trim()) {
+ Message.warning('请输入节点名称')
+ return
+ }
+ try {
+ const result = await createNode(newNodeName.trim())
+ setNewToken(result.token)
+ Message.success('节点创建成功')
+ fetchNodes()
+ } catch {
+ Message.error('创建节点失败')
+ }
+ }
+
+ const handleDelete = async (id: number) => {
+ try {
+ await deleteNode(id)
+ Message.success('节点已删除')
+ fetchNodes()
+ } catch {
+ Message.error('删除节点失败')
+ }
+ }
+
+ const columns = [
+ {
+ title: '节点名称',
+ dataIndex: 'name',
+ render: (name: string, record: NodeSummary) => (
+
+ {record.isLocal ? : }
+ {name}
+ {record.isLocal && 本机 }
+
+ ),
+ },
+ {
+ title: '状态',
+ dataIndex: 'status',
+ width: 100,
+ render: (status: string) => {
+ if (status === 'online') return
+ return
+ },
+ },
+ {
+ title: '主机名',
+ dataIndex: 'hostname',
+ render: (v: string) => v || '-',
+ },
+ {
+ title: 'IP 地址',
+ dataIndex: 'ipAddress',
+ render: (v: string) => v || '-',
+ },
+ {
+ title: '系统',
+ dataIndex: 'os',
+ width: 120,
+ render: (_: string, record: NodeSummary) => {
+ if (!record.os) return '-'
+ return {record.os}/{record.arch}
+ },
+ },
+ {
+ title: 'Agent 版本',
+ dataIndex: 'agentVersion',
+ width: 100,
+ render: (v: string) => v || '-',
+ },
+ {
+ title: '最后活跃',
+ dataIndex: 'lastSeen',
+ width: 170,
+ render: (v: string) => v ? new Date(v).toLocaleString('zh-CN') : '-',
+ },
+ {
+ title: '操作',
+ width: 80,
+ render: (_: unknown, record: NodeSummary) => {
+ if (record.isLocal) return -
+ return (
+ handleDelete(record.id)}>
+ } size="small" />
+
+ )
+ },
+ },
+ ]
+
+ return (
+
+
} onClick={() => { setCreateVisible(true); setNewToken(''); setNewNodeName('') }}>
+ 添加节点
+
+ }
+ />
+
+
+ }
+ />
+
+
+
setCreateVisible(false)}
+ footer={newToken ? (
+ setCreateVisible(false)}>完成
+ ) : undefined}
+ onOk={handleCreate}
+ okText="创建"
+ >
+ {!newToken ? (
+
+ ) : (
+
+
{newToken} },
+ ]} />
+
+
+ 请将此令牌配置到远程服务器的 Agent 启动参数中。令牌仅显示一次,请妥善保存。
+
+
+
+ )}
+
+
+ )
+}
diff --git a/web/src/pages/notifications/NotificationsPage.tsx b/web/src/pages/notifications/NotificationsPage.tsx
new file mode 100644
index 0000000..d89bc2d
--- /dev/null
+++ b/web/src/pages/notifications/NotificationsPage.tsx
@@ -0,0 +1,186 @@
+import { Button, Card, Empty, Message, PageHeader, Space, Table, Tag, Typography } from '@arco-design/web-react'
+import { useCallback, useEffect, useState } from 'react'
+import { NotificationFormDrawer } from '../../components/notifications/NotificationFormDrawer'
+import { getNotificationTypeLabel } from '../../components/notifications/field-config'
+import { createNotification, deleteNotification, getNotification, listNotifications, testNotification, testSavedNotification, updateNotification } from '../../services/notifications'
+import type { NotificationDetail, NotificationPayload, NotificationSummary } from '../../types/notifications'
+import { resolveErrorMessage } from '../../utils/error'
+import { formatDateTime } from '../../utils/format'
+
+export function NotificationsPage() {
+ const [items, setItems] = useState([])
+ const [loading, setLoading] = useState(true)
+ const [submitting, setSubmitting] = useState(false)
+ const [testing, setTesting] = useState(false)
+ const [drawerVisible, setDrawerVisible] = useState(false)
+ const [editingItem, setEditingItem] = useState(null)
+ const [error, setError] = useState('')
+
+ const loadData = useCallback(async () => {
+ setLoading(true)
+ try {
+ const result = await listNotifications()
+ setItems(result)
+ setError('')
+ } catch (loadError) {
+ setError(resolveErrorMessage(loadError, '加载通知配置失败'))
+ } finally {
+ setLoading(false)
+ }
+ }, [])
+
+ useEffect(() => {
+ void loadData()
+ }, [loadData])
+
+ async function openEdit(id: number) {
+ setSubmitting(true)
+ try {
+ const detail = await getNotification(id)
+ setEditingItem(detail)
+ setDrawerVisible(true)
+ } catch (loadError) {
+ Message.error(resolveErrorMessage(loadError, '加载通知详情失败'))
+ } finally {
+ setSubmitting(false)
+ }
+ }
+
+ async function handleSubmit(value: NotificationPayload, notificationId?: number) {
+ setSubmitting(true)
+ try {
+ if (notificationId) {
+ await updateNotification(notificationId, value)
+ Message.success('通知配置已更新')
+ } else {
+ await createNotification(value)
+ Message.success('通知配置已创建')
+ }
+ setDrawerVisible(false)
+ setEditingItem(null)
+ await loadData()
+ } catch (submitError) {
+ Message.error(resolveErrorMessage(submitError, '保存通知配置失败'))
+ throw submitError
+ } finally {
+ setSubmitting(false)
+ }
+ }
+
+ async function handleTest(value: NotificationPayload, notificationId?: number) {
+ setTesting(true)
+ try {
+ if (notificationId) {
+ await testSavedNotification(notificationId)
+ } else {
+ await testNotification(value)
+ }
+ Message.success('测试通知已发出,请查收')
+ } catch (testError) {
+ Message.error(resolveErrorMessage(testError, '发送测试通知失败'))
+ throw testError
+ } finally {
+ setTesting(false)
+ }
+ }
+
+ async function handleDelete(item: NotificationSummary) {
+ if (!window.confirm(`确定删除通知配置“${item.name}”吗?`)) {
+ return
+ }
+ try {
+ await deleteNotification(item.id)
+ Message.success('通知配置已删除')
+ await loadData()
+ } catch (deleteError) {
+ Message.error(resolveErrorMessage(deleteError, '删除通知配置失败'))
+ }
+ }
+
+ const columns = [
+ {
+ title: '名称',
+ dataIndex: 'name',
+ render: (_: unknown, record: NotificationSummary) => (
+
+ {record.name}
+
+ {getNotificationTypeLabel(record.type) && {getNotificationTypeLabel(record.type)} }
+ {record.enabled !== undefined && {record.enabled ? '已启用' : '已停用'} }
+
+
+ ),
+ },
+ {
+ title: '触发条件',
+ dataIndex: 'events',
+ render: (_: unknown, record: NotificationSummary) => (
+
+ {record.onSuccess ? 成功 : null}
+ {record.onFailure ? 失败 : null}
+ {!record.onSuccess && !record.onFailure ? 未配置 : null}
+
+ ),
+ },
+ {
+ title: '更新时间',
+ dataIndex: 'updatedAt',
+ render: (value: string) => formatDateTime(value),
+ },
+ {
+ title: '操作',
+ dataIndex: 'actions',
+ width: 180,
+ render: (_: unknown, record: NotificationSummary) => (
+
+ void openEdit(record.id)}>
+ 编辑
+
+ void handleDelete(record)}>
+ 删除
+
+
+ ),
+ },
+ ]
+
+ return (
+
+ {
+ setEditingItem(null)
+ setDrawerVisible(true)
+ }}
+ >
+ 新建通知
+
+ }
+ />
+
+ {error ? {error} : null}
+
+
+ } />
+
+
+ {
+ setDrawerVisible(false)
+ setEditingItem(null)
+ }}
+ onSubmit={handleSubmit}
+ onTest={handleTest}
+ />
+
+ )
+}
diff --git a/web/src/pages/settings/SettingsPage.tsx b/web/src/pages/settings/SettingsPage.tsx
new file mode 100644
index 0000000..f119b19
--- /dev/null
+++ b/web/src/pages/settings/SettingsPage.tsx
@@ -0,0 +1,88 @@
+import { Card, Descriptions, Grid, PageHeader, Space, Typography } from '@arco-design/web-react'
+import { useEffect, useState } from 'react'
+import { fetchSystemInfo, type SystemInfo } from '../../services/system'
+import { resolveErrorMessage } from '../../utils/error'
+import { formatDuration } from '../../utils/format'
+
+const { Row, Col } = Grid
+
+const deploySteps = [
+ '1. 构建前端:cd web && npm run build',
+ '2. 编译后端:cd server && go build -o backupx ./cmd/backupx',
+ '3. 部署静态资源与二进制,并按 deploy/ 目录提供的配置接入 Nginx 与 systemd',
+ '4. 首次启动后访问 Web 控制台,完成管理员初始化与存储目标配置',
+]
+
+export function SettingsPage() {
+ const [info, setInfo] = useState(null)
+ const [loading, setLoading] = useState(true)
+ const [error, setError] = useState('')
+
+ useEffect(() => {
+ let active = true
+ void (async () => {
+ try {
+ const result = await fetchSystemInfo()
+ if (active) {
+ setInfo(result)
+ setError('')
+ }
+ } catch (loadError) {
+ if (active) {
+ setError(resolveErrorMessage(loadError, '加载系统设置失败'))
+ }
+ } finally {
+ if (active) {
+ setLoading(false)
+ }
+ }
+ })()
+ return () => {
+ active = false
+ }
+ }, [])
+
+ return (
+
+
+ {error ? {error} : null}
+
+
+
+
+
+
+
+
+
+
+
+ `deploy/nginx.conf`:静态资源托管与 `/api` 反向代理示例。
+ `deploy/backupx.service`:systemd 服务单元,负责守护 API 进程。
+ `deploy/install.sh`:一键安装示例脚本,用于创建目录、复制文件并启动服务。
+ `README.md`:包含完整部署与使用文档。
+
+
+
+
+
+
+ {deploySteps.join('\n')}
+
+
+ )
+}
diff --git a/web/src/pages/storage-targets/GoogleDriveCallbackPage.tsx b/web/src/pages/storage-targets/GoogleDriveCallbackPage.tsx
new file mode 100644
index 0000000..a714603
--- /dev/null
+++ b/web/src/pages/storage-targets/GoogleDriveCallbackPage.tsx
@@ -0,0 +1,106 @@
+import { Alert, Button, Card, Space, Spin, Typography } from '@arco-design/web-react'
+import axios from 'axios'
+import { useEffect, useRef, useState } from 'react'
+import { useSearchParams } from 'react-router-dom'
+import { completeGoogleDriveAuth } from '../../services/storage-targets'
+import type { GoogleDriveCallbackResult } from '../../types/storage-targets'
+
+function resolveErrorMessage(error: unknown) {
+ if (axios.isAxiosError(error)) {
+ return error.response?.data?.message ?? 'Google Drive 授权回调失败'
+ }
+ return 'Google Drive 授权回调失败'
+}
+
+// Define outside the component to survive React StrictMode unmount/remount
+let globalAuthPromise: Promise | null = null
+
+export function GoogleDriveCallbackPage() {
+ const [searchParams] = useSearchParams()
+ const [loading, setLoading] = useState(true)
+ const [result, setResult] = useState(null)
+ const [error, setError] = useState('')
+ const [countdown, setCountdown] = useState(3)
+
+ useEffect(() => {
+ let active = true
+
+ if (!globalAuthPromise) {
+ globalAuthPromise = completeGoogleDriveAuth(searchParams.toString())
+ }
+
+ globalAuthPromise
+ .then((response) => {
+ if (active) setResult(response)
+ })
+ .catch((callbackError) => {
+ if (active) setError(resolveErrorMessage(callbackError))
+ })
+ .finally(() => {
+ if (active) setLoading(false)
+ })
+
+ return () => {
+ active = false
+ }
+ }, [searchParams])
+
+ // Auto-close countdown on success
+ useEffect(() => {
+ if (!result?.success) return
+ if (countdown <= 0) {
+ window.close()
+ return
+ }
+ const timer = setTimeout(() => setCountdown((c) => c - 1), 1000)
+ return () => clearTimeout(timer)
+ }, [result, countdown])
+
+ function handleClose() {
+ window.close()
+ }
+
+ return (
+
+
+
+
+ Google Drive 授权结果
+
+ BackupX 正在处理 Google Drive OAuth 回调结果。
+
+
+
+ {loading ? : null}
+
+ {!loading && error ? : null}
+
+ {!loading && !error && result ? (
+
+ ) : null}
+
+
+ {!loading && result?.success ? (
+
+ 立即关闭此页面
+
+ ) : null}
+ {!loading && (error || !result?.success) ? (
+
+ 关闭页面
+
+ ) : null}
+
+
+
+
+ )
+}
+
diff --git a/web/src/pages/storage-targets/StorageTargetsPage.tsx b/web/src/pages/storage-targets/StorageTargetsPage.tsx
new file mode 100644
index 0000000..3aecee3
--- /dev/null
+++ b/web/src/pages/storage-targets/StorageTargetsPage.tsx
@@ -0,0 +1,246 @@
+import { Alert, Button, Card, Empty, Grid, Message, PageHeader, Space, Spin, Tag, Typography } from '@arco-design/web-react'
+import axios from 'axios'
+import { useCallback, useEffect, useState } from 'react'
+import {
+ createStorageTarget,
+ deleteStorageTarget,
+ getStorageTarget,
+ listStorageTargets,
+ startGoogleDriveAuth,
+ testSavedStorageTarget,
+ testStorageTarget,
+ updateStorageTarget,
+} from '../../services/storage-targets'
+import type { StorageConnectionTestResult, StorageTargetDetail, StorageTargetPayload, StorageTargetSummary } from '../../types/storage-targets'
+import { getStorageTargetTypeLabel } from '../../components/storage-targets/field-config'
+import { StorageTargetFormDrawer } from '../../components/storage-targets/StorageTargetFormDrawer'
+
+function resolveErrorMessage(error: unknown) {
+ if (axios.isAxiosError(error)) {
+ return error.response?.data?.message ?? '请求失败,请稍后重试'
+ }
+ return '请求失败,请稍后重试'
+}
+
+function renderTestStatus(target: StorageTargetSummary) {
+ switch (target.lastTestStatus) {
+ case 'success':
+ return 连接正常
+ case 'failed':
+ return 最近测试失败
+ default:
+ return 未测试
+ }
+}
+
+export function StorageTargetsPage() {
+ const [targets, setTargets] = useState([])
+ const [loading, setLoading] = useState(true)
+ const [submitting, setSubmitting] = useState(false)
+ const [testing, setTesting] = useState(false)
+ const [drawerVisible, setDrawerVisible] = useState(false)
+ const [editingTarget, setEditingTarget] = useState(null)
+ const [error, setError] = useState('')
+
+ const loadTargets = useCallback(async () => {
+ setLoading(true)
+ try {
+ const result = await listStorageTargets()
+ setTargets(result)
+ setError('')
+ } catch (loadError) {
+ setError(resolveErrorMessage(loadError))
+ } finally {
+ setLoading(false)
+ }
+ }, [])
+
+ useEffect(() => {
+ void loadTargets()
+ }, [loadTargets])
+
+ // Auto-refresh when user comes back from Google Drive OAuth tab
+ useEffect(() => {
+ function handleVisibilityChange() {
+ if (document.visibilityState === 'visible') {
+ void loadTargets()
+ }
+ }
+ document.addEventListener('visibilitychange', handleVisibilityChange)
+ return () => document.removeEventListener('visibilitychange', handleVisibilityChange)
+ }, [loadTargets])
+
+ async function openEdit(id: number) {
+ setSubmitting(true)
+ try {
+ const detail = await getStorageTarget(id)
+ setEditingTarget(detail)
+ setDrawerVisible(true)
+ } catch (loadError) {
+ Message.error(resolveErrorMessage(loadError))
+ } finally {
+ setSubmitting(false)
+ }
+ }
+
+ async function handleSubmit(value: StorageTargetPayload, targetId?: number) {
+ setSubmitting(true)
+ try {
+ if (targetId) {
+ await updateStorageTarget(targetId, value)
+ Message.success('存储目标已更新')
+ } else {
+ await createStorageTarget(value)
+ Message.success('存储目标已创建')
+ }
+ setDrawerVisible(false)
+ setEditingTarget(null)
+ await loadTargets()
+ } catch (submitError) {
+ Message.error(resolveErrorMessage(submitError))
+ throw submitError
+ } finally {
+ setSubmitting(false)
+ }
+ }
+
+ async function handleDelete(id: number) {
+ if (!window.confirm('确定删除该存储目标吗?')) {
+ return
+ }
+ try {
+ await deleteStorageTarget(id)
+ Message.success('存储目标已删除')
+ await loadTargets()
+ } catch (deleteError) {
+ Message.error(resolveErrorMessage(deleteError))
+ }
+ }
+
+ async function handleDraftTest(value: StorageTargetPayload, targetId?: number): Promise {
+ setTesting(true)
+ try {
+ // When editing an existing target, use saved config test to avoid sending masked values
+ const result = targetId
+ ? await testSavedStorageTarget(targetId)
+ : await testStorageTarget(value)
+ Message.success(result.message)
+ if (targetId) {
+ await loadTargets()
+ }
+ return result
+ } catch (testError) {
+ const message = resolveErrorMessage(testError)
+ Message.error(message)
+ return { success: false, message }
+ } finally {
+ setTesting(false)
+ }
+ }
+
+ async function handleSavedTest(id: number) {
+ try {
+ const result = await testSavedStorageTarget(id)
+ Message.success(result.message)
+ await loadTargets()
+ } catch (testError) {
+ Message.error(resolveErrorMessage(testError))
+ }
+ }
+
+ async function handleGoogleDriveAuth(value: StorageTargetPayload, targetId?: number) {
+ try {
+ const result = await startGoogleDriveAuth(value, targetId)
+ window.open(result.authUrl, '_blank')
+ } catch (authError) {
+ Message.error(resolveErrorMessage(authError))
+ throw authError
+ }
+ }
+
+ return (
+
+ {
+ setEditingTarget(null)
+ setDrawerVisible(true)
+ }}
+ >
+ 新建存储目标
+
+ }
+ />
+
+ {error ? : null}
+
+ {loading ? (
+
+ ) : targets.length === 0 ? (
+
+
+
+ ) : (
+
+ {targets.map((target) => (
+
+
+
+
+
+
+ {target.name}
+
+
+ {getStorageTargetTypeLabel(target.type) && {getStorageTargetTypeLabel(target.type)} }
+ {target.enabled ? 已启用 : 已停用 }
+ {renderTestStatus(target)}
+
+
+
+
+ {target.description ? {target.description} : null}
+ {target.lastTestMessage ? (
+ 最近测试:{target.lastTestMessage}
+ ) : null}
+ 更新时间:{target.updatedAt}
+
+
+ void openEdit(target.id)} loading={submitting && editingTarget?.id === target.id}>
+ 编辑
+
+ void handleSavedTest(target.id)}>
+ 测试连接
+
+ void handleDelete(target.id)}>
+ 删除
+
+
+
+
+
+ ))}
+
+ )}
+
+ {
+ setDrawerVisible(false)
+ setEditingTarget(null)
+ }}
+ onSubmit={handleSubmit}
+ onTest={handleDraftTest}
+ onGoogleDriveAuth={handleGoogleDriveAuth}
+ />
+
+ )
+}
diff --git a/web/src/pages/system-info/SystemInfoPage.tsx b/web/src/pages/system-info/SystemInfoPage.tsx
new file mode 100644
index 0000000..9d7a8e8
--- /dev/null
+++ b/web/src/pages/system-info/SystemInfoPage.tsx
@@ -0,0 +1,67 @@
+import { Alert, Card, Descriptions, Space, Spin, Typography } from '@arco-design/web-react'
+import { useEffect, useState } from 'react'
+import axios from 'axios'
+import { fetchSystemInfo, type SystemInfo } from '../../services/system'
+
+function resolveErrorMessage(error: unknown) {
+ if (axios.isAxiosError(error)) {
+ return error.response?.data?.message ?? '加载系统信息失败'
+ }
+ return '加载系统信息失败'
+}
+
+export function SystemInfoPage() {
+ const [data, setData] = useState(null)
+ const [loading, setLoading] = useState(true)
+ const [error, setError] = useState('')
+
+ useEffect(() => {
+ let mounted = true
+ void (async () => {
+ try {
+ const result = await fetchSystemInfo()
+ if (mounted) {
+ setData(result)
+ }
+ } catch (err) {
+ if (mounted) {
+ setError(resolveErrorMessage(err))
+ }
+ } finally {
+ if (mounted) {
+ setLoading(false)
+ }
+ }
+ })()
+ return () => {
+ mounted = false
+ }
+ }, [])
+
+ return (
+
+
+ 系统信息
+
+ 用于确认服务版本、运行模式、数据库位置与运行时长。
+
+
+
+
+ {loading ? (
+
+ ) : error ? (
+
+ ) : (
+
+ )}
+
+
+ )
+}
diff --git a/web/src/pages/system-info/page.tsx b/web/src/pages/system-info/page.tsx
new file mode 100644
index 0000000..54fd325
--- /dev/null
+++ b/web/src/pages/system-info/page.tsx
@@ -0,0 +1,97 @@
+import {
+ Alert,
+ Descriptions,
+ Spin,
+ Tag,
+ Typography,
+} from '@arco-design/web-react';
+import { useEffect, useState } from 'react';
+
+import { PageCard } from '../../components/page-card';
+import { systemApi } from '../../services/system';
+import type { SystemInfo } from '../../types/system';
+
+function formatUptime(seconds: number) {
+ if (seconds < 60) {
+ return `${seconds} 秒`;
+ }
+
+ const hours = Math.floor(seconds / 3600);
+ const minutes = Math.floor((seconds % 3600) / 60);
+
+ return `${hours} 小时 ${minutes} 分钟`;
+}
+
+export function SystemInfoPage() {
+ const [data, setData] = useState(null);
+ const [loading, setLoading] = useState(true);
+ const [error, setError] = useState(null);
+
+ useEffect(() => {
+ let active = true;
+
+ async function loadSystemInfo() {
+ try {
+ const result = await systemApi.fetchInfo();
+
+ if (active) {
+ setData(result);
+ setError(null);
+ }
+ } catch (loadError) {
+ if (active) {
+ setError(loadError instanceof Error ? loadError.message : '系统信息加载失败');
+ }
+ } finally {
+ if (active) {
+ setLoading(false);
+ }
+ }
+ }
+
+ void loadSystemInfo();
+
+ return () => {
+ active = false;
+ };
+ }, []);
+
+ if (loading) {
+ return (
+
+
+
+ );
+ }
+
+ if (error) {
+ return ;
+ }
+
+ if (!data) {
+ return ;
+ }
+
+ return (
+
+
+
+ 用于确认 API 服务已正常启动,并展示平台基础运行状态。
+
+ {data.mode},
+ },
+ { label: '启动时间', value: data.startedAt },
+ { label: '运行时长', value: formatUptime(data.uptimeSeconds) },
+ { label: '数据库路径', value: data.databasePath },
+ ]}
+ />
+
+
+ );
+}
diff --git a/web/src/router/ProtectedRoute.test.tsx b/web/src/router/ProtectedRoute.test.tsx
new file mode 100644
index 0000000..85754dd
--- /dev/null
+++ b/web/src/router/ProtectedRoute.test.tsx
@@ -0,0 +1,61 @@
+import { render, screen } from '@testing-library/react'
+import { MemoryRouter, Routes, Route } from 'react-router-dom'
+import { ProtectedRoute } from './ProtectedRoute'
+import { useAuthStore } from '../stores/auth'
+
+describe('ProtectedRoute', () => {
+ beforeEach(() => {
+ useAuthStore.setState({
+ token: '',
+ user: null,
+ status: 'anonymous',
+ bootstrapped: true,
+ })
+ })
+
+ it('redirects anonymous users to login', () => {
+ render(
+
+
+ login page} />
+
+ dashboard page
+
+ }
+ />
+
+ ,
+ )
+
+ expect(screen.getByText('login page')).toBeInTheDocument()
+ })
+
+ it('renders protected content for authenticated users', () => {
+ useAuthStore.setState({
+ token: 'token',
+ user: { id: 1, username: 'admin', displayName: 'Admin', role: 'admin' },
+ status: 'authenticated',
+ bootstrapped: true,
+ })
+
+ render(
+
+
+
+ dashboard page
+
+ }
+ />
+
+ ,
+ )
+
+ expect(screen.getByText('dashboard page')).toBeInTheDocument()
+ })
+})
diff --git a/web/src/router/ProtectedRoute.tsx b/web/src/router/ProtectedRoute.tsx
new file mode 100644
index 0000000..0764ab7
--- /dev/null
+++ b/web/src/router/ProtectedRoute.tsx
@@ -0,0 +1,24 @@
+import { ReactNode } from 'react'
+import { Navigate, useLocation } from 'react-router-dom'
+import { useAuthStore } from '../stores/auth'
+import { FullPageLoading } from '../components/FullPageLoading'
+
+interface ProtectedRouteProps {
+ children: ReactNode
+}
+
+export function ProtectedRoute({ children }: ProtectedRouteProps) {
+ const status = useAuthStore((state) => state.status)
+ const bootstrapped = useAuthStore((state) => state.bootstrapped)
+ const location = useLocation()
+
+ if (!bootstrapped || status === 'loading') {
+ return
+ }
+
+ if (status !== 'authenticated') {
+ return
+ }
+
+ return <>{children}>
+}
diff --git a/web/src/router/index.tsx b/web/src/router/index.tsx
new file mode 100644
index 0000000..edc3ebd
--- /dev/null
+++ b/web/src/router/index.tsx
@@ -0,0 +1,40 @@
+import { Navigate, Route, Routes } from 'react-router-dom'
+import { AppLayout } from '../layouts/AppLayout'
+import { DashboardPage } from '../pages/dashboard/DashboardPage'
+import { LoginPage } from '../pages/login/LoginPage'
+import { NotificationsPage } from '../pages/notifications/NotificationsPage'
+import { BackupRecordsPage } from '../pages/backup-records/BackupRecordsPage'
+import { BackupTasksPage } from '../pages/backup-tasks/BackupTasksPage'
+import { GoogleDriveCallbackPage } from '../pages/storage-targets/GoogleDriveCallbackPage'
+import { StorageTargetsPage } from '../pages/storage-targets/StorageTargetsPage'
+import { SettingsPage } from '../pages/settings/SettingsPage'
+import NodesPage from '../pages/nodes/NodesPage'
+import { ProtectedRoute } from './ProtectedRoute'
+
+export function RouterView() {
+ return (
+
+ } />
+
+
+
+ }
+ >
+ } />
+ } />
+ } />
+ } />
+ } />
+ } />
+ } />
+ } />
+ } />
+ } />
+
+ } />
+
+ )
+}
diff --git a/web/src/services/auth.ts b/web/src/services/auth.ts
new file mode 100644
index 0000000..dcfbd61
--- /dev/null
+++ b/web/src/services/auth.ts
@@ -0,0 +1,59 @@
+import { http } from './http'
+
+export interface SetupPayload {
+ username: string
+ password: string
+ displayName: string
+}
+
+export interface LoginPayload {
+ username: string
+ password: string
+}
+
+export interface UserInfo {
+ id: number
+ username: string
+ displayName: string
+ role: string
+}
+
+export interface AuthResult {
+ token: string
+ user: UserInfo
+}
+
+export async function fetchSetupStatus() {
+ const response = await http.get<{ code: string; message: string; data: { initialized: boolean } }>('/auth/setup/status')
+ return response.data.data
+}
+
+export async function setup(payload: SetupPayload) {
+ const response = await http.post<{ code: string; message: string; data: AuthResult }>('/auth/setup', payload)
+ return response.data.data
+}
+
+export async function login(payload: LoginPayload) {
+ const response = await http.post<{ code: string; message: string; data: AuthResult }>('/auth/login', payload)
+ return response.data.data
+}
+
+export async function fetchProfile() {
+ const response = await http.get<{ code: string; message: string; data: UserInfo }>('/auth/profile')
+ return response.data.data
+}
+
+export interface ChangePasswordPayload {
+ oldPassword: string
+ newPassword: string
+}
+
+export async function changePassword(payload: ChangePasswordPayload) {
+ const response = await http.put<{ code: string; message: string; data: { changed: boolean } }>('/auth/password', payload)
+ return response.data.data
+}
+
+export async function logout() {
+ const response = await http.post<{ code: string; message: string; data: { loggedOut: boolean } }>('/auth/logout')
+ return response.data.data
+}
diff --git a/web/src/services/backup-records.ts b/web/src/services/backup-records.ts
new file mode 100644
index 0000000..31a9bd0
--- /dev/null
+++ b/web/src/services/backup-records.ts
@@ -0,0 +1,155 @@
+import { http, getAccessToken, type ApiEnvelope, unwrapApiEnvelope } from './http'
+import type { BackupLogEvent, BackupRecordDetail, BackupRecordListFilter, BackupRecordSummary } from '../types/backup-records'
+import { resolveErrorMessage } from '../utils/error'
+
+interface RecordLogStreamHandlers {
+ onEvent: (event: BackupLogEvent) => void
+ onDone?: () => void
+ onError?: (message: string) => void
+}
+
+function buildRecordQuery(filter: BackupRecordListFilter) {
+ const query: Record = {}
+ if (filter.taskId) {
+ query.taskId = filter.taskId
+ }
+ if (filter.status) {
+ query.status = filter.status
+ }
+ if (filter.dateFrom) {
+ query.dateFrom = filter.dateFrom
+ }
+ if (filter.dateTo) {
+ query.dateTo = filter.dateTo
+ }
+ return query
+}
+
+function parseContentDisposition(value?: string) {
+ if (!value) {
+ return 'backup-artifact.bin'
+ }
+ const match = value.match(/filename="?([^";]+)"?/i)
+ return match?.[1] ?? 'backup-artifact.bin'
+}
+
+function parseLogEvent(chunk: string) {
+ const payloadLine = chunk
+ .split('\n')
+ .find((line) => line.startsWith('data:'))
+
+ if (!payloadLine) {
+ return null
+ }
+
+ const payload = payloadLine.slice(5).trim()
+ if (!payload) {
+ return null
+ }
+
+ return JSON.parse(payload) as BackupLogEvent
+}
+
+async function resolveStreamError(response: Response) {
+ try {
+ const payload = (await response.json()) as { message?: string }
+ return payload.message ?? '连接日志流失败'
+ } catch {
+ return `连接日志流失败(HTTP ${response.status})`
+ }
+}
+
+export async function listBackupRecords(filter: BackupRecordListFilter = {}) {
+ const response = await http.get>('/backup/records', { params: buildRecordQuery(filter) })
+ return unwrapApiEnvelope(response.data)
+}
+
+export async function getBackupRecord(id: number) {
+ const response = await http.get>(`/backup/records/${id}`)
+ return unwrapApiEnvelope(response.data)
+}
+
+export async function downloadBackupRecord(id: number) {
+ const response = await http.get(`/backup/records/${id}/download`, { responseType: 'blob' })
+ return {
+ blob: response.data,
+ fileName: parseContentDisposition(response.headers['content-disposition']),
+ }
+}
+
+export async function restoreBackupRecord(id: number) {
+ const response = await http.post>(`/backup/records/${id}/restore`)
+ return unwrapApiEnvelope(response.data)
+}
+
+export async function deleteBackupRecord(id: number) {
+ const response = await http.delete>(`/backup/records/${id}`)
+ return unwrapApiEnvelope(response.data)
+}
+
+export function streamBackupRecordLogs(recordId: number, handlers: RecordLogStreamHandlers) {
+ const controller = new AbortController()
+
+ void (async () => {
+ try {
+ const token = getAccessToken()
+ const response = await fetch(`/api/backup/records/${recordId}/logs/stream`, {
+ method: 'GET',
+ headers: token ? { Authorization: `Bearer ${token}` } : undefined,
+ signal: controller.signal,
+ })
+
+ if (!response.ok) {
+ throw new Error(await resolveStreamError(response))
+ }
+ if (!response.body) {
+ throw new Error('日志流不可用')
+ }
+
+ const reader = response.body.getReader()
+ const decoder = new TextDecoder()
+ let buffer = ''
+
+ while (true) {
+ const { value, done } = await reader.read()
+ if (done) {
+ break
+ }
+
+ buffer += decoder.decode(value, { stream: true })
+
+ while (buffer.includes('\n\n')) {
+ const boundary = buffer.indexOf('\n\n')
+ const chunk = buffer.slice(0, boundary)
+ buffer = buffer.slice(boundary + 2)
+
+ const event = parseLogEvent(chunk)
+ if (!event) {
+ continue
+ }
+ handlers.onEvent(event)
+ if (event.completed) {
+ handlers.onDone?.()
+ controller.abort()
+ return
+ }
+ }
+ }
+
+ if (buffer.trim()) {
+ const event = parseLogEvent(buffer)
+ if (event) {
+ handlers.onEvent(event)
+ }
+ }
+ handlers.onDone?.()
+ } catch (error) {
+ if (error instanceof DOMException && error.name === 'AbortError') {
+ return
+ }
+ handlers.onError?.(resolveErrorMessage(error, '日志流连接失败'))
+ }
+ })()
+
+ return () => controller.abort()
+}
diff --git a/web/src/services/backup-tasks.ts b/web/src/services/backup-tasks.ts
new file mode 100644
index 0000000..59aa0f6
--- /dev/null
+++ b/web/src/services/backup-tasks.ts
@@ -0,0 +1,38 @@
+import { http, type ApiEnvelope, unwrapApiEnvelope } from './http'
+import type { BackupTaskDetail, BackupTaskPayload, BackupTaskSummary, BackupTaskTogglePayload } from '../types/backup-tasks'
+import type { BackupRecordDetail } from '../types/backup-records'
+
+export async function listBackupTasks() {
+ const response = await http.get>('/backup/tasks')
+ return unwrapApiEnvelope(response.data)
+}
+
+export async function getBackupTask(id: number) {
+ const response = await http.get>(`/backup/tasks/${id}`)
+ return unwrapApiEnvelope(response.data)
+}
+
+export async function createBackupTask(payload: BackupTaskPayload) {
+ const response = await http.post>('/backup/tasks', payload)
+ return unwrapApiEnvelope(response.data)
+}
+
+export async function updateBackupTask(id: number, payload: BackupTaskPayload) {
+ const response = await http.put>(`/backup/tasks/${id}`, payload)
+ return unwrapApiEnvelope(response.data)
+}
+
+export async function deleteBackupTask(id: number) {
+ const response = await http.delete>(`/backup/tasks/${id}`)
+ return unwrapApiEnvelope(response.data)
+}
+
+export async function toggleBackupTask(id: number, payload: BackupTaskTogglePayload) {
+ const response = await http.put>(`/backup/tasks/${id}/toggle`, payload)
+ return unwrapApiEnvelope(response.data)
+}
+
+export async function runBackupTask(id: number) {
+ const response = await http.post>(`/backup/tasks/${id}/run`)
+ return unwrapApiEnvelope(response.data)
+}
diff --git a/web/src/services/dashboard.ts b/web/src/services/dashboard.ts
new file mode 100644
index 0000000..9e5bcc5
--- /dev/null
+++ b/web/src/services/dashboard.ts
@@ -0,0 +1,12 @@
+import { http, type ApiEnvelope, unwrapApiEnvelope } from './http'
+import type { BackupTimelinePoint, DashboardStats } from '../types/dashboard'
+
+export async function fetchDashboardStats() {
+ const response = await http.get>('/dashboard/stats')
+ return unwrapApiEnvelope(response.data)
+}
+
+export async function fetchDashboardTimeline(days = 30) {
+ const response = await http.get>('/dashboard/timeline', { params: { days } })
+ return unwrapApiEnvelope(response.data)
+}
diff --git a/web/src/services/http.ts b/web/src/services/http.ts
new file mode 100644
index 0000000..5bac3d0
--- /dev/null
+++ b/web/src/services/http.ts
@@ -0,0 +1,48 @@
+import axios from 'axios'
+
+export interface ApiEnvelope {
+ code: string | number
+ message: string
+ data: T
+}
+
+let accessToken = ''
+let unauthorizedHandler: (() => void) | null = null
+
+export const http = axios.create({
+ baseURL: '/api',
+ timeout: 10000,
+})
+
+export function setAccessToken(token: string) {
+ accessToken = token
+}
+
+export function getAccessToken() {
+ return accessToken
+}
+
+export function setUnauthorizedHandler(handler: (() => void) | null) {
+ unauthorizedHandler = handler
+}
+
+export function unwrapApiEnvelope(response: ApiEnvelope) {
+ return response.data
+}
+
+http.interceptors.request.use((config) => {
+ if (accessToken) {
+ config.headers.Authorization = `Bearer ${accessToken}`
+ }
+ return config
+})
+
+http.interceptors.response.use(
+ (response) => response,
+ (error) => {
+ if (error.response?.status === 401 && unauthorizedHandler) {
+ unauthorizedHandler()
+ }
+ return Promise.reject(error)
+ },
+)
diff --git a/web/src/services/nodes.ts b/web/src/services/nodes.ts
new file mode 100644
index 0000000..3e1df0b
--- /dev/null
+++ b/web/src/services/nodes.ts
@@ -0,0 +1,27 @@
+import { http, type ApiEnvelope, unwrapApiEnvelope } from './http'
+import type { NodeSummary, DirEntry } from '../types/nodes'
+
+export async function listNodes() {
+ const response = await http.get>('/nodes')
+ return unwrapApiEnvelope(response.data)
+}
+
+export async function getNode(id: number) {
+ const response = await http.get>(`/nodes/${id}`)
+ return unwrapApiEnvelope(response.data)
+}
+
+export async function createNode(name: string) {
+ const response = await http.post>('/nodes', { name })
+ return unwrapApiEnvelope(response.data)
+}
+
+export async function deleteNode(id: number) {
+ const response = await http.delete>(`/nodes/${id}`)
+ return unwrapApiEnvelope(response.data)
+}
+
+export async function listNodeDirectory(nodeId: number, path: string) {
+ const response = await http.get>(`/nodes/${nodeId}/fs/list`, { params: { path } })
+ return unwrapApiEnvelope(response.data)
+}
diff --git a/web/src/services/notifications.ts b/web/src/services/notifications.ts
new file mode 100644
index 0000000..3a5da67
--- /dev/null
+++ b/web/src/services/notifications.ts
@@ -0,0 +1,37 @@
+import { http, type ApiEnvelope, unwrapApiEnvelope } from './http'
+import type { NotificationDetail, NotificationPayload, NotificationSummary } from '../types/notifications'
+
+export async function listNotifications() {
+ const response = await http.get>('/notifications')
+ return unwrapApiEnvelope(response.data)
+}
+
+export async function getNotification(id: number) {
+ const response = await http.get>(`/notifications/${id}`)
+ return unwrapApiEnvelope(response.data)
+}
+
+export async function createNotification(payload: NotificationPayload) {
+ const response = await http.post>('/notifications', payload)
+ return unwrapApiEnvelope(response.data)
+}
+
+export async function updateNotification(id: number, payload: NotificationPayload) {
+ const response = await http.put>(`/notifications/${id}`, payload)
+ return unwrapApiEnvelope(response.data)
+}
+
+export async function deleteNotification(id: number) {
+ const response = await http.delete>(`/notifications/${id}`)
+ return unwrapApiEnvelope(response.data)
+}
+
+export async function testNotification(payload: NotificationPayload) {
+ const response = await http.post>('/notifications/test', payload, { timeout: 30000 })
+ return unwrapApiEnvelope(response.data)
+}
+
+export async function testSavedNotification(id: number) {
+ const response = await http.post>(`/notifications/${id}/test`, undefined, { timeout: 30000 })
+ return unwrapApiEnvelope(response.data)
+}
diff --git a/web/src/services/storage-targets.ts b/web/src/services/storage-targets.ts
new file mode 100644
index 0000000..168d24d
--- /dev/null
+++ b/web/src/services/storage-targets.ts
@@ -0,0 +1,80 @@
+import { http } from './http'
+import type {
+ GoogleDriveAuthStartResult,
+ GoogleDriveCallbackResult,
+ StorageConnectionTestResult,
+ StorageTargetDetail,
+ StorageTargetPayload,
+ StorageTargetSummary,
+} from '../types/storage-targets'
+
+interface ApiEnvelope {
+ code: string | number
+ message: string
+ data: T
+}
+
+function unwrap(response: ApiEnvelope) {
+ return response.data
+}
+
+export async function listStorageTargets() {
+ const response = await http.get>('/storage-targets')
+ return unwrap(response.data)
+}
+
+export async function getStorageTarget(id: number) {
+ const response = await http.get>(`/storage-targets/${id}`)
+ return unwrap(response.data)
+}
+
+export async function createStorageTarget(payload: StorageTargetPayload) {
+ const response = await http.post>('/storage-targets', payload)
+ return unwrap(response.data)
+}
+
+export async function updateStorageTarget(id: number, payload: StorageTargetPayload) {
+ const response = await http.put>(`/storage-targets/${id}`, payload)
+ return unwrap(response.data)
+}
+
+export async function deleteStorageTarget(id: number) {
+ const response = await http.delete>(`/storage-targets/${id}`)
+ return unwrap(response.data)
+}
+
+export async function testStorageTarget(payload: StorageTargetPayload) {
+ const response = await http.post>('/storage-targets/test', payload, { timeout: 30000 })
+ return unwrap(response.data)
+}
+
+export async function testSavedStorageTarget(id: number) {
+ const response = await http.post>(`/storage-targets/${id}/test`, undefined, { timeout: 30000 })
+ return unwrap(response.data)
+}
+
+export async function startGoogleDriveAuth(payload: StorageTargetPayload, targetId?: number) {
+ const response = await http.post>('/storage-targets/google-drive/auth-url', {
+ ...payload,
+ targetId,
+ })
+ return unwrap(response.data)
+}
+
+export async function completeGoogleDriveAuth(queryString: string) {
+ const suffix = queryString.startsWith('?') ? queryString : `?${queryString}`
+ const response = await http.get>(`/storage-targets/google-drive/callback${suffix}`)
+ return unwrap(response.data)
+}
+
+export interface StorageTargetUsage {
+ targetId: number
+ targetName: string
+ recordCount: number
+ totalSize: number
+}
+
+export async function getStorageTargetUsage(id: number) {
+ const response = await http.get>(`/storage-targets/${id}/usage`)
+ return unwrap(response.data)
+}
diff --git a/web/src/services/system.ts b/web/src/services/system.ts
new file mode 100644
index 0000000..6456321
--- /dev/null
+++ b/web/src/services/system.ts
@@ -0,0 +1,27 @@
+import { http } from './http'
+
+export interface SystemInfo {
+ version: string
+ mode: string
+ startedAt: string
+ uptimeSeconds: number
+ databasePath: string
+ diskTotal: number
+ diskFree: number
+ diskUsed: number
+}
+
+export async function fetchSystemInfo() {
+ const response = await http.get<{ code: string; message: string; data: SystemInfo }>('/system/info')
+ return response.data.data
+}
+
+export async function fetchSettings() {
+ const response = await http.get<{ code: string; message: string; data: Record }>('/settings')
+ return response.data.data
+}
+
+export async function updateSettings(settings: Record) {
+ const response = await http.put<{ code: string; message: string; data: Record }>('/settings', settings)
+ return response.data.data
+}
diff --git a/web/src/stores/auth.test.ts b/web/src/stores/auth.test.ts
new file mode 100644
index 0000000..c662428
--- /dev/null
+++ b/web/src/stores/auth.test.ts
@@ -0,0 +1,52 @@
+import { beforeEach, describe, expect, it, vi } from 'vitest';
+
+import { authApi } from '../services/auth';
+import { useAuthStore } from './auth';
+
+vi.mock('../services/auth', () => ({
+ authApi: {
+ login: vi.fn(),
+ fetchProfile: vi.fn(),
+ },
+}));
+
+describe('useAuthStore', () => {
+ beforeEach(() => {
+ window.localStorage.clear();
+ useAuthStore.setState({
+ token: null,
+ user: null,
+ hydrated: true,
+ status: 'idle',
+ });
+ vi.clearAllMocks();
+ });
+
+ it('stores token and user after login', async () => {
+ vi.mocked(authApi.login).mockResolvedValue({
+ token: 'jwt-token',
+ user: {
+ id: 1,
+ username: 'admin',
+ displayName: '管理员',
+ role: 'admin',
+ },
+ });
+
+ await useAuthStore.getState().login({ username: 'admin', password: 'secret' });
+
+ expect(useAuthStore.getState().token).toBe('jwt-token');
+ expect(useAuthStore.getState().status).toBe('authenticated');
+ expect(window.localStorage.getItem('backupx-auth-token')).toBe('jwt-token');
+ });
+
+ it('clears state when bootstrap profile request fails', async () => {
+ useAuthStore.setState({ token: 'expired-token', status: 'idle' });
+ vi.mocked(authApi.fetchProfile).mockRejectedValue(new Error('unauthorized'));
+
+ await useAuthStore.getState().bootstrap();
+
+ expect(useAuthStore.getState().token).toBeNull();
+ expect(useAuthStore.getState().status).toBe('anonymous');
+ });
+});
diff --git a/web/src/stores/auth.ts b/web/src/stores/auth.ts
new file mode 100644
index 0000000..8290eab
--- /dev/null
+++ b/web/src/stores/auth.ts
@@ -0,0 +1,74 @@
+import { create } from 'zustand'
+import { persist } from 'zustand/middleware'
+import { fetchProfile, login, setup, type LoginPayload, type SetupPayload, type UserInfo } from '../services/auth'
+import { setAccessToken, setUnauthorizedHandler } from '../services/http'
+
+type AuthStatus = 'unknown' | 'loading' | 'anonymous' | 'authenticated'
+
+interface AuthState {
+ token: string
+ user: UserInfo | null
+ status: AuthStatus
+ bootstrapped: boolean
+ bootstrap: () => Promise
+ login: (payload: LoginPayload) => Promise
+ setup: (payload: SetupPayload) => Promise
+ logout: () => void
+ applyAuth: (token: string, user: UserInfo) => void
+}
+
+function clearAuthState(set: (partial: Partial) => void) {
+ setAccessToken('')
+ set({ token: '', user: null, status: 'anonymous', bootstrapped: true })
+}
+
+export const useAuthStore = create()(
+ persist(
+ (set, get) => ({
+ token: '',
+ user: null,
+ status: 'unknown',
+ bootstrapped: false,
+ bootstrap: async () => {
+ const token = get().token
+ setUnauthorizedHandler(() => {
+ clearAuthState(set)
+ })
+
+ if (!token) {
+ setAccessToken('')
+ set({ status: 'anonymous', bootstrapped: true })
+ return
+ }
+
+ setAccessToken(token)
+ set({ status: 'loading' })
+ try {
+ const user = await fetchProfile()
+ set({ user, status: 'authenticated', bootstrapped: true })
+ } catch {
+ clearAuthState(set)
+ }
+ },
+ login: async (payload) => {
+ const result = await login(payload)
+ get().applyAuth(result.token, result.user)
+ },
+ setup: async (payload) => {
+ const result = await setup(payload)
+ get().applyAuth(result.token, result.user)
+ },
+ logout: () => {
+ clearAuthState(set)
+ },
+ applyAuth: (token, user) => {
+ setAccessToken(token)
+ set({ token, user, status: 'authenticated', bootstrapped: true })
+ },
+ }),
+ {
+ name: 'backupx-auth',
+ partialize: (state) => ({ token: state.token }),
+ },
+ ),
+)
diff --git a/web/src/styles/global.css b/web/src/styles/global.css
new file mode 100644
index 0000000..3b58508
--- /dev/null
+++ b/web/src/styles/global.css
@@ -0,0 +1,152 @@
+@import url('https://fonts.googleapis.com/css2?family=Inter:wght@400;500;600;700&display=swap');
+
+:root {
+ color: #1d2129;
+ background: #f2f3f5;
+ font-family: 'Inter', -apple-system, BlinkMacSystemFont, 'Segoe UI', sans-serif;
+}
+
+* {
+ box-sizing: border-box;
+}
+
+html,
+body,
+#root {
+ margin: 0;
+ min-height: 100%;
+ height: 100%;
+}
+
+body {
+ min-width: 1280px;
+}
+
+/* ---- Card Overrides ---- */
+.arco-card {
+ border: none !important;
+ box-shadow: 0 4px 10px rgba(0,0,0,0.03) !important;
+}
+
+/* ---- Login ---- */
+.login-shell {
+ min-height: 100vh;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ padding: 24px;
+ position: relative;
+ overflow: hidden;
+}
+
+.login-bg {
+ position: fixed;
+ inset: 0;
+ background: linear-gradient(135deg, #111a2c 0%, #1f2d47 100%);
+ z-index: 0;
+}
+
+.login-bg::before {
+ content: '';
+ position: absolute;
+ width: 800px;
+ height: 800px;
+ border-radius: 50%;
+ background: radial-gradient(circle, rgba(52,145,250,0.08) 0%, transparent 70%);
+ top: -300px;
+ right: -200px;
+}
+
+.login-bg::after {
+ content: '';
+ position: absolute;
+ width: 600px;
+ height: 600px;
+ border-radius: 50%;
+ background: radial-gradient(circle, rgba(114,46,209,0.06) 0%, transparent 70%);
+ bottom: -200px;
+ left: -100px;
+}
+
+.login-container {
+ display: flex;
+ width: 1000px;
+ max-width: 90vw;
+ min-height: 560px;
+ background: var(--color-bg-2);
+ border-radius: 20px;
+ overflow: hidden;
+ box-shadow: 0 20px 60px rgba(0,0,0,0.4);
+ z-index: 1;
+ animation: slideUp 0.6s cubic-bezier(0.34, 1.56, 0.64, 1);
+}
+
+.login-banner {
+ flex: 1;
+ background: linear-gradient(135deg, var(--color-primary-6, #165dff) 0%, var(--color-primary-8, #0e42d2) 100%);
+ position: relative;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ overflow: hidden;
+}
+
+.login-banner-inner {
+ position: relative;
+ z-index: 2;
+ text-align: center;
+ padding: 40px;
+}
+
+.login-form-wrapper {
+ width: 440px;
+ padding: 64px 48px;
+ display: flex;
+ flex-direction: column;
+ justify-content: center;
+ background: var(--color-bg-2);
+}
+
+@keyframes slideUp {
+ from {
+ opacity: 0;
+ transform: translateY(30px);
+ }
+ to {
+ opacity: 1;
+ transform: translateY(0);
+ }
+}
+
+/* ---- Full Page Shell ---- */
+.full-page-shell {
+ min-height: 100vh;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ padding: 24px;
+}
+
+/* ---- Log Viewer ---- */
+.log-viewer {
+ max-height: 420px;
+ overflow: auto;
+ padding: 12px;
+ border-radius: 8px;
+ border: 1px solid var(--color-border-2);
+ background: var(--color-bg-2); /* adapted for theme */
+ color: var(--color-text-1);
+ white-space: pre-wrap;
+ font-family: 'SFMono-Regular', Consolas, 'Liberation Mono', Menlo, monospace;
+ line-height: 1.6;
+}
+
+.code-block {
+ padding: 16px;
+ border-radius: 8px;
+ background: var(--color-bg-2);
+ border: 1px solid var(--color-border-2);
+ color: var(--color-text-1);
+ white-space: pre-wrap;
+ font-family: 'SFMono-Regular', Consolas, 'Liberation Mono', Menlo, monospace;
+}
diff --git a/web/src/test/setup.ts b/web/src/test/setup.ts
new file mode 100644
index 0000000..a866f69
--- /dev/null
+++ b/web/src/test/setup.ts
@@ -0,0 +1,26 @@
+import '@testing-library/jest-dom/vitest'
+
+const storage = (() => {
+ const store = new Map()
+ return {
+ getItem: (key: string) => store.get(key) ?? null,
+ setItem: (key: string, value: string) => {
+ store.set(key, value)
+ },
+ removeItem: (key: string) => {
+ store.delete(key)
+ },
+ clear: () => {
+ store.clear()
+ },
+ key: (index: number) => Array.from(store.keys())[index] ?? null,
+ get length() {
+ return store.size
+ },
+ }
+})()
+
+Object.defineProperty(window, 'localStorage', {
+ value: storage,
+ configurable: true,
+})
diff --git a/web/src/types/api.ts b/web/src/types/api.ts
new file mode 100644
index 0000000..3a0df19
--- /dev/null
+++ b/web/src/types/api.ts
@@ -0,0 +1,10 @@
+export interface ApiResponse {
+ code: number;
+ message: string;
+ data: T;
+}
+
+export interface ApiErrorPayload {
+ code?: number;
+ message?: string;
+}
diff --git a/web/src/types/auth.ts b/web/src/types/auth.ts
new file mode 100644
index 0000000..8850e72
--- /dev/null
+++ b/web/src/types/auth.ts
@@ -0,0 +1,18 @@
+export interface AuthUser {
+ id: number;
+ username: string;
+ displayName: string;
+ role: string;
+}
+
+export interface LoginPayload {
+ username: string;
+ password: string;
+}
+
+export interface LoginResult {
+ token: string;
+ user: AuthUser;
+}
+
+export type AuthStatus = 'idle' | 'bootstrapping' | 'authenticated' | 'anonymous';
diff --git a/web/src/types/backup-records.ts b/web/src/types/backup-records.ts
new file mode 100644
index 0000000..978794e
--- /dev/null
+++ b/web/src/types/backup-records.ts
@@ -0,0 +1,39 @@
+export type BackupRecordStatus = 'running' | 'success' | 'failed'
+
+export interface BackupLogEvent {
+ recordId: number
+ sequence: number
+ level: string
+ message: string
+ timestamp: string
+ completed: boolean
+ status: string
+}
+
+export interface BackupRecordSummary {
+ id: number
+ taskId: number
+ taskName: string
+ storageTargetId: number
+ storageTargetName: string
+ status: BackupRecordStatus
+ fileName: string
+ fileSize: number
+ storagePath: string
+ durationSeconds: number
+ errorMessage: string
+ startedAt: string
+ completedAt?: string
+}
+
+export interface BackupRecordDetail extends BackupRecordSummary {
+ logContent: string
+ logEvents?: BackupLogEvent[]
+}
+
+export interface BackupRecordListFilter {
+ taskId?: number
+ status?: BackupRecordStatus | ''
+ dateFrom?: string
+ dateTo?: string
+}
diff --git a/web/src/types/backup-tasks.ts b/web/src/types/backup-tasks.ts
new file mode 100644
index 0000000..217beac
--- /dev/null
+++ b/web/src/types/backup-tasks.ts
@@ -0,0 +1,61 @@
+export type BackupTaskType = 'file' | 'mysql' | 'sqlite' | 'postgresql'
+export type BackupTaskStatus = 'idle' | 'running' | 'success' | 'failed'
+export type BackupCompression = 'gzip' | 'none'
+
+export interface BackupTaskSummary {
+ id: number
+ name: string
+ type: BackupTaskType
+ enabled: boolean
+ cronExpr: string
+ storageTargetId: number
+ storageTargetName: string
+ nodeId: number
+ nodeName?: string
+ tags: string
+ retentionDays: number
+ compression: BackupCompression
+ encrypt: boolean
+ maxBackups: number
+ lastRunAt?: string
+ lastStatus: BackupTaskStatus
+ updatedAt: string
+}
+
+export interface BackupTaskDetail extends BackupTaskSummary {
+ sourcePath: string
+ excludePatterns: string[]
+ dbHost: string
+ dbPort: number
+ dbUser: string
+ dbName: string
+ dbPath: string
+ maskedFields?: string[]
+ createdAt: string
+}
+
+export interface BackupTaskPayload {
+ name: string
+ type: BackupTaskType
+ enabled: boolean
+ cronExpr: string
+ sourcePath: string
+ excludePatterns: string[]
+ dbHost: string
+ dbPort: number
+ dbUser: string
+ dbPassword: string
+ dbName: string
+ dbPath: string
+ storageTargetId: number
+ nodeId: number
+ tags: string
+ retentionDays: number
+ compression: BackupCompression
+ encrypt: boolean
+ maxBackups: number
+}
+
+export interface BackupTaskTogglePayload {
+ enabled?: boolean
+}
diff --git a/web/src/types/dashboard.ts b/web/src/types/dashboard.ts
new file mode 100644
index 0000000..03cef09
--- /dev/null
+++ b/web/src/types/dashboard.ts
@@ -0,0 +1,25 @@
+import type { BackupRecordSummary } from './backup-records'
+
+export interface DashboardStorageUsageItem {
+ storageTargetId: number
+ targetName: string
+ totalSize: number
+}
+
+export interface BackupTimelinePoint {
+ date: string
+ total: number
+ success: number
+ failed: number
+}
+
+export interface DashboardStats {
+ totalTasks: number
+ enabledTasks: number
+ totalRecords: number
+ successRate: number
+ totalBackupBytes: number
+ lastBackupAt?: string
+ recentRecords: BackupRecordSummary[]
+ storageUsage: DashboardStorageUsageItem[]
+}
diff --git a/web/src/types/nodes.ts b/web/src/types/nodes.ts
new file mode 100644
index 0000000..c894b4c
--- /dev/null
+++ b/web/src/types/nodes.ts
@@ -0,0 +1,20 @@
+export interface NodeSummary {
+ id: number
+ name: string
+ hostname: string
+ ipAddress: string
+ status: 'online' | 'offline'
+ isLocal: boolean
+ os: string
+ arch: string
+ agentVersion: string
+ lastSeen: string
+ createdAt: string
+}
+
+export interface DirEntry {
+ name: string
+ path: string
+ isDir: boolean
+ size: number
+}
diff --git a/web/src/types/notifications.ts b/web/src/types/notifications.ts
new file mode 100644
index 0000000..287578a
--- /dev/null
+++ b/web/src/types/notifications.ts
@@ -0,0 +1,36 @@
+export type NotificationType = 'email' | 'webhook' | 'telegram'
+export type NotificationFieldType = 'input' | 'password' | 'number' | 'textarea'
+
+export interface NotificationSummary {
+ id: number
+ name: string
+ type: NotificationType
+ enabled: boolean
+ onSuccess: boolean
+ onFailure: boolean
+ updatedAt: string
+}
+
+export interface NotificationDetail extends NotificationSummary {
+ config: Record
+ maskedFields?: string[]
+}
+
+export interface NotificationPayload {
+ name: string
+ type: NotificationType
+ enabled: boolean
+ onSuccess: boolean
+ onFailure: boolean
+ config: Record
+}
+
+export interface NotificationFieldConfig {
+ key: string
+ label: string
+ type: NotificationFieldType
+ required?: boolean
+ placeholder?: string
+ description?: string
+ sensitive?: boolean
+}
diff --git a/web/src/types/storage-targets.ts b/web/src/types/storage-targets.ts
new file mode 100644
index 0000000..9f6cd0a
--- /dev/null
+++ b/web/src/types/storage-targets.ts
@@ -0,0 +1,54 @@
+export type StorageTargetType = 'local_disk' | 'google_drive' | 's3' | 'webdav' | 'aliyun_oss' | 'tencent_cos' | 'qiniu_kodo'
+export type StorageTestStatus = 'unknown' | 'success' | 'failed'
+export type StorageFieldType = 'input' | 'password' | 'switch'
+
+export interface StorageTargetSummary {
+ id: number
+ name: string
+ type: StorageTargetType
+ description: string
+ enabled: boolean
+ updatedAt: string
+ lastTestedAt?: string
+ lastTestStatus: StorageTestStatus
+ lastTestMessage?: string
+}
+
+export interface StorageTargetDetail extends StorageTargetSummary {
+ configVersion?: number
+ config: Record
+ maskedFields?: string[]
+}
+
+export interface StorageTargetPayload {
+ name: string
+ type: StorageTargetType
+ description: string
+ enabled: boolean
+ config: Record
+}
+
+export interface StorageConnectionTestResult {
+ success: boolean
+ message: string
+}
+
+export interface GoogleDriveAuthStartResult {
+ authUrl: string
+}
+
+export interface GoogleDriveCallbackResult {
+ success: boolean
+ message: string
+ target?: StorageTargetDetail
+}
+
+export interface StorageTargetFieldConfig {
+ key: string
+ label: string
+ type: StorageFieldType
+ required?: boolean
+ placeholder?: string
+ description?: string
+ sensitive?: boolean
+}
diff --git a/web/src/types/system.ts b/web/src/types/system.ts
new file mode 100644
index 0000000..99c81f6
--- /dev/null
+++ b/web/src/types/system.ts
@@ -0,0 +1,7 @@
+export interface SystemInfo {
+ version: string;
+ mode: string;
+ startedAt: string;
+ uptimeSeconds: number;
+ databasePath: string;
+}
diff --git a/web/src/utils/error.ts b/web/src/utils/error.ts
new file mode 100644
index 0000000..c109b90
--- /dev/null
+++ b/web/src/utils/error.ts
@@ -0,0 +1,11 @@
+import axios from 'axios'
+
+export function resolveErrorMessage(error: unknown, fallback = '请求失败,请稍后重试') {
+ if (axios.isAxiosError(error)) {
+ return error.response?.data?.message ?? fallback
+ }
+ if (error instanceof Error && error.message) {
+ return error.message
+ }
+ return fallback
+}
diff --git a/web/src/utils/format.test.ts b/web/src/utils/format.test.ts
new file mode 100644
index 0000000..c9598b2
--- /dev/null
+++ b/web/src/utils/format.test.ts
@@ -0,0 +1,16 @@
+import { describe, expect, it } from 'vitest'
+import { formatBytes, formatDuration, formatPercent } from './format'
+
+describe('format utils', () => {
+ it('formats bytes into readable units', () => {
+ expect(formatBytes(0)).toBe('0 B')
+ expect(formatBytes(1024)).toBe('1 KB')
+ expect(formatBytes(1536)).toBe('1.5 KB')
+ })
+
+ it('formats percent and duration', () => {
+ expect(formatPercent(0.56)).toBe('56%')
+ expect(formatDuration(45)).toBe('45 秒')
+ expect(formatDuration(3661)).toBe('1 小时 1 分 1 秒')
+ })
+})
diff --git a/web/src/utils/format.ts b/web/src/utils/format.ts
new file mode 100644
index 0000000..884f235
--- /dev/null
+++ b/web/src/utils/format.ts
@@ -0,0 +1,56 @@
+export function formatDateTime(value?: string | Date | null) {
+ if (!value) {
+ return '-'
+ }
+ const date = value instanceof Date ? value : new Date(value)
+ if (Number.isNaN(date.getTime())) {
+ return '-'
+ }
+ return new Intl.DateTimeFormat('zh-CN', {
+ year: 'numeric',
+ month: '2-digit',
+ day: '2-digit',
+ hour: '2-digit',
+ minute: '2-digit',
+ second: '2-digit',
+ }).format(date)
+}
+
+export function formatBytes(value?: number | null) {
+ if (!value || value <= 0) {
+ return '0 B'
+ }
+ const units = ['B', 'KB', 'MB', 'GB', 'TB']
+ let current = value
+ let index = 0
+ while (current >= 1024 && index < units.length - 1) {
+ current /= 1024
+ index += 1
+ }
+ const digits = current >= 10 || index === 0 ? 0 : 1
+ const formatted = current.toFixed(digits).replace(/\.0$/, '')
+ return `${formatted} ${units[index]}`
+}
+
+export function formatPercent(value?: number | null) {
+ if (typeof value !== 'number' || Number.isNaN(value)) {
+ return '0%'
+ }
+ return `${(value * 100).toFixed(value >= 0.1 ? 0 : 1)}%`
+}
+
+export function formatDuration(seconds?: number | null) {
+ if (!seconds || seconds <= 0) {
+ return '0 秒'
+ }
+ if (seconds < 60) {
+ return `${seconds} 秒`
+ }
+ const hours = Math.floor(seconds / 3600)
+ const minutes = Math.floor((seconds % 3600) / 60)
+ const remainSeconds = seconds % 60
+ if (hours > 0) {
+ return `${hours} 小时 ${minutes} 分 ${remainSeconds} 秒`
+ }
+ return `${minutes} 分 ${remainSeconds} 秒`
+}
diff --git a/web/tsconfig.json b/web/tsconfig.json
new file mode 100644
index 0000000..05eda53
--- /dev/null
+++ b/web/tsconfig.json
@@ -0,0 +1,34 @@
+{
+ "compilerOptions": {
+ "target": "ES2020",
+ "useDefineForClassFields": true,
+ "lib": ["ES2020", "DOM", "DOM.Iterable"],
+ "allowJs": false,
+ "skipLibCheck": true,
+ "esModuleInterop": true,
+ "allowSyntheticDefaultImports": true,
+ "strict": true,
+ "forceConsistentCasingInFileNames": true,
+ "module": "ESNext",
+ "moduleResolution": "Bundler",
+ "resolveJsonModule": true,
+ "isolatedModules": true,
+ "noEmit": true,
+ "jsx": "react-jsx",
+ "types": ["vite/client"]
+ },
+ "include": ["src"],
+ "exclude": [
+ "src/**/*.test.ts",
+ "src/**/*.test.tsx",
+ "src/app.tsx",
+ "src/components/auth-guard.tsx",
+ "src/components/page-card.tsx",
+ "src/layouts/protected-layout.tsx",
+ "src/pages/dashboard/page.tsx",
+ "src/pages/login/page.tsx",
+ "src/pages/system-info/page.tsx",
+ "src/stores/auth.test.ts"
+ ],
+ "references": [{ "path": "./tsconfig.node.json" }]
+}
diff --git a/web/tsconfig.node.json b/web/tsconfig.node.json
new file mode 100644
index 0000000..c0a187b
--- /dev/null
+++ b/web/tsconfig.node.json
@@ -0,0 +1,12 @@
+{
+ "compilerOptions": {
+ "composite": true,
+ "target": "ES2020",
+ "lib": ["ES2020", "DOM"],
+ "module": "ESNext",
+ "moduleResolution": "Bundler",
+ "allowSyntheticDefaultImports": true,
+ "types": ["node"]
+ },
+ "include": ["vite.config.ts"]
+}
diff --git a/web/tsconfig.node.tsbuildinfo b/web/tsconfig.node.tsbuildinfo
new file mode 100644
index 0000000..52dc24c
--- /dev/null
+++ b/web/tsconfig.node.tsbuildinfo
@@ -0,0 +1 @@
+{"fileNames":["./node_modules/typescript/lib/lib.es5.d.ts","./node_modules/typescript/lib/lib.es2015.d.ts","./node_modules/typescript/lib/lib.es2016.d.ts","./node_modules/typescript/lib/lib.es2017.d.ts","./node_modules/typescript/lib/lib.es2018.d.ts","./node_modules/typescript/lib/lib.es2019.d.ts","./node_modules/typescript/lib/lib.es2020.d.ts","./node_modules/typescript/lib/lib.dom.d.ts","./node_modules/typescript/lib/lib.es2015.core.d.ts","./node_modules/typescript/lib/lib.es2015.collection.d.ts","./node_modules/typescript/lib/lib.es2015.generator.d.ts","./node_modules/typescript/lib/lib.es2015.iterable.d.ts","./node_modules/typescript/lib/lib.es2015.promise.d.ts","./node_modules/typescript/lib/lib.es2015.proxy.d.ts","./node_modules/typescript/lib/lib.es2015.reflect.d.ts","./node_modules/typescript/lib/lib.es2015.symbol.d.ts","./node_modules/typescript/lib/lib.es2015.symbol.wellknown.d.ts","./node_modules/typescript/lib/lib.es2016.array.include.d.ts","./node_modules/typescript/lib/lib.es2016.intl.d.ts","./node_modules/typescript/lib/lib.es2017.arraybuffer.d.ts","./node_modules/typescript/lib/lib.es2017.date.d.ts","./node_modules/typescript/lib/lib.es2017.object.d.ts","./node_modules/typescript/lib/lib.es2017.sharedmemory.d.ts","./node_modules/typescript/lib/lib.es2017.string.d.ts","./node_modules/typescript/lib/lib.es2017.intl.d.ts","./node_modules/typescript/lib/lib.es2017.typedarrays.d.ts","./node_modules/typescript/lib/lib.es2018.asyncgenerator.d.ts","./node_modules/typescript/lib/lib.es2018.asynciterable.d.ts","./node_modules/typescript/lib/lib.es2018.intl.d.ts","./node_modules/typescript/lib/lib.es2018.promise.d.ts","./node_modules/typescript/lib/lib.es2018.regexp.d.ts","./node_modules/typescript/lib/lib.es2019.array.d.ts","./node_modules/typescript/lib/lib.es2019.object.d.ts","./node_modules/typescript/lib/lib.es2019.string.d.ts","./node_modules/typescript/lib/lib.es2019.symbol.d.ts","./node_modules/typescript/lib/lib.es2019.intl.d.ts","./node_modules/typescript/lib/lib.es2020.bigint.d.ts","./node_modules/typescript/lib/lib.es2020.date.d.ts","./node_modules/typescript/lib/lib.es2020.promise.d.ts","./node_modules/typescript/lib/lib.es2020.sharedmemory.d.ts","./node_modules/typescript/lib/lib.es2020.string.d.ts","./node_modules/typescript/lib/lib.es2020.symbol.wellknown.d.ts","./node_modules/typescript/lib/lib.es2020.intl.d.ts","./node_modules/typescript/lib/lib.es2020.number.d.ts","./node_modules/typescript/lib/lib.decorators.d.ts","./node_modules/typescript/lib/lib.decorators.legacy.d.ts","./node_modules/@vitest/spy/dist/index.d.ts","./node_modules/@vitest/pretty-format/dist/index.d.ts","./node_modules/@vitest/utils/dist/types.d.ts","./node_modules/@vitest/utils/dist/helpers.d.ts","./node_modules/tinyrainbow/dist/index-8b61d5bc.d.ts","./node_modules/tinyrainbow/dist/node.d.ts","./node_modules/@vitest/utils/dist/index.d.ts","./node_modules/@vitest/utils/dist/types.d-bcelap-c.d.ts","./node_modules/@vitest/utils/dist/diff.d.ts","./node_modules/@vitest/expect/dist/index.d.ts","./node_modules/@types/node/compatibility/disposable.d.ts","./node_modules/@types/node/compatibility/indexable.d.ts","./node_modules/@types/node/compatibility/iterators.d.ts","./node_modules/@types/node/compatibility/index.d.ts","./node_modules/@types/node/globals.typedarray.d.ts","./node_modules/@types/node/buffer.buffer.d.ts","./node_modules/@types/node/globals.d.ts","./node_modules/@types/node/web-globals/abortcontroller.d.ts","./node_modules/@types/node/web-globals/domexception.d.ts","./node_modules/@types/node/web-globals/events.d.ts","./node_modules/undici-types/header.d.ts","./node_modules/undici-types/readable.d.ts","./node_modules/undici-types/file.d.ts","./node_modules/undici-types/fetch.d.ts","./node_modules/undici-types/formdata.d.ts","./node_modules/undici-types/connector.d.ts","./node_modules/undici-types/client.d.ts","./node_modules/undici-types/errors.d.ts","./node_modules/undici-types/dispatcher.d.ts","./node_modules/undici-types/global-dispatcher.d.ts","./node_modules/undici-types/global-origin.d.ts","./node_modules/undici-types/pool-stats.d.ts","./node_modules/undici-types/pool.d.ts","./node_modules/undici-types/handlers.d.ts","./node_modules/undici-types/balanced-pool.d.ts","./node_modules/undici-types/agent.d.ts","./node_modules/undici-types/mock-interceptor.d.ts","./node_modules/undici-types/mock-agent.d.ts","./node_modules/undici-types/mock-client.d.ts","./node_modules/undici-types/mock-pool.d.ts","./node_modules/undici-types/mock-errors.d.ts","./node_modules/undici-types/proxy-agent.d.ts","./node_modules/undici-types/env-http-proxy-agent.d.ts","./node_modules/undici-types/retry-handler.d.ts","./node_modules/undici-types/retry-agent.d.ts","./node_modules/undici-types/api.d.ts","./node_modules/undici-types/interceptors.d.ts","./node_modules/undici-types/util.d.ts","./node_modules/undici-types/cookies.d.ts","./node_modules/undici-types/patch.d.ts","./node_modules/undici-types/websocket.d.ts","./node_modules/undici-types/eventsource.d.ts","./node_modules/undici-types/filereader.d.ts","./node_modules/undici-types/diagnostics-channel.d.ts","./node_modules/undici-types/content-type.d.ts","./node_modules/undici-types/cache.d.ts","./node_modules/undici-types/index.d.ts","./node_modules/@types/node/web-globals/fetch.d.ts","./node_modules/@types/node/web-globals/navigator.d.ts","./node_modules/@types/node/web-globals/storage.d.ts","./node_modules/@types/node/assert.d.ts","./node_modules/@types/node/assert/strict.d.ts","./node_modules/@types/node/async_hooks.d.ts","./node_modules/@types/node/buffer.d.ts","./node_modules/@types/node/child_process.d.ts","./node_modules/@types/node/cluster.d.ts","./node_modules/@types/node/console.d.ts","./node_modules/@types/node/constants.d.ts","./node_modules/@types/node/crypto.d.ts","./node_modules/@types/node/dgram.d.ts","./node_modules/@types/node/diagnostics_channel.d.ts","./node_modules/@types/node/dns.d.ts","./node_modules/@types/node/dns/promises.d.ts","./node_modules/@types/node/domain.d.ts","./node_modules/@types/node/events.d.ts","./node_modules/@types/node/fs.d.ts","./node_modules/@types/node/fs/promises.d.ts","./node_modules/@types/node/http.d.ts","./node_modules/@types/node/http2.d.ts","./node_modules/@types/node/https.d.ts","./node_modules/@types/node/inspector.d.ts","./node_modules/@types/node/inspector.generated.d.ts","./node_modules/@types/node/module.d.ts","./node_modules/@types/node/net.d.ts","./node_modules/@types/node/os.d.ts","./node_modules/@types/node/path.d.ts","./node_modules/@types/node/perf_hooks.d.ts","./node_modules/@types/node/process.d.ts","./node_modules/@types/node/punycode.d.ts","./node_modules/@types/node/querystring.d.ts","./node_modules/@types/node/readline.d.ts","./node_modules/@types/node/readline/promises.d.ts","./node_modules/@types/node/repl.d.ts","./node_modules/@types/node/sea.d.ts","./node_modules/@types/node/sqlite.d.ts","./node_modules/@types/node/stream.d.ts","./node_modules/@types/node/stream/promises.d.ts","./node_modules/@types/node/stream/consumers.d.ts","./node_modules/@types/node/stream/web.d.ts","./node_modules/@types/node/string_decoder.d.ts","./node_modules/@types/node/test.d.ts","./node_modules/@types/node/timers.d.ts","./node_modules/@types/node/timers/promises.d.ts","./node_modules/@types/node/tls.d.ts","./node_modules/@types/node/trace_events.d.ts","./node_modules/@types/node/tty.d.ts","./node_modules/@types/node/url.d.ts","./node_modules/@types/node/util.d.ts","./node_modules/@types/node/v8.d.ts","./node_modules/@types/node/vm.d.ts","./node_modules/@types/node/wasi.d.ts","./node_modules/@types/node/worker_threads.d.ts","./node_modules/@types/node/zlib.d.ts","./node_modules/@types/node/index.d.ts","./node_modules/@types/estree/index.d.ts","./node_modules/rollup/dist/rollup.d.ts","./node_modules/rollup/dist/parseast.d.ts","./node_modules/vite/types/hmrpayload.d.ts","./node_modules/vite/types/customevent.d.ts","./node_modules/vite/types/hot.d.ts","./node_modules/vite/dist/node/modulerunnertransport.d-dj_me5sf.d.ts","./node_modules/vite/dist/node/module-runner.d.ts","./node_modules/esbuild/lib/main.d.ts","./node_modules/source-map-js/source-map.d.ts","./node_modules/postcss/lib/previous-map.d.ts","./node_modules/postcss/lib/input.d.ts","./node_modules/postcss/lib/css-syntax-error.d.ts","./node_modules/postcss/lib/declaration.d.ts","./node_modules/postcss/lib/root.d.ts","./node_modules/postcss/lib/warning.d.ts","./node_modules/postcss/lib/lazy-result.d.ts","./node_modules/postcss/lib/no-work-result.d.ts","./node_modules/postcss/lib/processor.d.ts","./node_modules/postcss/lib/result.d.ts","./node_modules/postcss/lib/document.d.ts","./node_modules/postcss/lib/rule.d.ts","./node_modules/postcss/lib/node.d.ts","./node_modules/postcss/lib/comment.d.ts","./node_modules/postcss/lib/container.d.ts","./node_modules/postcss/lib/at-rule.d.ts","./node_modules/postcss/lib/list.d.ts","./node_modules/postcss/lib/postcss.d.ts","./node_modules/postcss/lib/postcss.d.mts","./node_modules/vite/types/internal/lightningcssoptions.d.ts","./node_modules/vite/types/internal/csspreprocessoroptions.d.ts","./node_modules/vite/types/importglob.d.ts","./node_modules/vite/types/metadata.d.ts","./node_modules/vite/dist/node/index.d.ts","./node_modules/@vitest/runner/dist/tasks.d-cksck4of.d.ts","./node_modules/@vitest/runner/dist/types.d.ts","./node_modules/@vitest/utils/dist/error.d.ts","./node_modules/@vitest/runner/dist/index.d.ts","./node_modules/vitest/optional-types.d.ts","./node_modules/vitest/dist/chunks/environment.d.cl3nlxbe.d.ts","./node_modules/@vitest/mocker/dist/registry.d-d765pazg.d.ts","./node_modules/@vitest/mocker/dist/types.d-d_arzrdy.d.ts","./node_modules/@vitest/mocker/dist/index.d.ts","./node_modules/@vitest/utils/dist/source-map.d.ts","./node_modules/vite-node/dist/trace-mapping.d-dlvdeqop.d.ts","./node_modules/vite-node/dist/index.d-dgmxd2u7.d.ts","./node_modules/vite-node/dist/index.d.ts","./node_modules/@vitest/snapshot/dist/environment.d-dhdq1csl.d.ts","./node_modules/@vitest/snapshot/dist/rawsnapshot.d-lfsmjfud.d.ts","./node_modules/@vitest/snapshot/dist/index.d.ts","./node_modules/@vitest/snapshot/dist/environment.d.ts","./node_modules/vitest/dist/chunks/config.d.d2roskhv.d.ts","./node_modules/vitest/dist/chunks/worker.d.1gmbbd7g.d.ts","./node_modules/@types/deep-eql/index.d.ts","./node_modules/assertion-error/index.d.ts","./node_modules/@types/chai/index.d.ts","./node_modules/@vitest/runner/dist/utils.d.ts","./node_modules/tinybench/dist/index.d.ts","./node_modules/vitest/dist/chunks/benchmark.d.bwvbvtda.d.ts","./node_modules/vite-node/dist/client.d.ts","./node_modules/vitest/dist/chunks/coverage.d.s9rmnxie.d.ts","./node_modules/@vitest/snapshot/dist/manager.d.ts","./node_modules/vitest/dist/chunks/reporters.d.bflkqcl6.d.ts","./node_modules/vitest/dist/chunks/vite.d.cmlllifp.d.ts","./node_modules/vitest/dist/config.d.ts","./node_modules/vitest/config.d.ts","./node_modules/@babel/types/lib/index.d.ts","./node_modules/@types/babel__generator/index.d.ts","./node_modules/@babel/parser/typings/babel-parser.d.ts","./node_modules/@types/babel__template/index.d.ts","./node_modules/@types/babel__traverse/index.d.ts","./node_modules/@types/babel__core/index.d.ts","./node_modules/@vitejs/plugin-react/dist/index.d.ts","./vite.config.ts"],"fileIdsList":[[62,110,127,128,227],[62,110,127,128],[62,110,127,128,227,228,229,230,231],[62,110,127,128,227,229],[62,110,127,128,214,215],[62,107,108,110,127,128],[62,109,110,127,128],[110,127,128],[62,110,115,127,128,145],[62,110,111,116,121,127,128,130,142,153],[62,110,111,112,121,127,128,130],[57,58,59,62,110,127,128],[62,110,113,127,128,154],[62,110,114,115,122,127,128,131],[62,110,115,127,128,142,150],[62,110,116,118,121,127,128,130],[62,109,110,117,127,128],[62,110,118,119,127,128],[62,110,120,121,127,128],[62,109,110,121,127,128],[62,110,121,122,123,127,128,142,153],[62,110,121,122,123,127,128,137,142,145],[62,103,110,118,121,124,127,128,130,142,153],[62,110,121,122,124,125,127,128,130,142,150,153],[62,110,124,126,127,128,142,150,153],[60,61,62,63,64,65,66,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159],[62,110,121,127,128],[62,110,127,128,129,153],[62,110,118,121,127,128,130,142],[62,110,127,128,131],[62,110,127,128,132],[62,109,110,127,128,133],[62,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159],[62,110,127,128,135],[62,110,127,128,136],[62,110,121,127,128,137,138],[62,110,127,128,137,139,154,156],[62,110,122,127,128],[62,110,121,127,128,142,143,145],[62,110,127,128,144,145],[62,110,127,128,142,143],[62,110,127,128,145],[62,110,127,128,146],[62,107,110,127,128,142,147,153],[62,110,121,127,128,148,149],[62,110,127,128,148,149],[62,110,115,127,128,130,142,150],[62,110,127,128,151],[62,110,127,128,130,152],[62,110,124,127,128,136,153],[62,110,115,127,128,154],[62,110,127,128,142,155],[62,110,127,128,129,156],[62,110,127,128,157],[62,103,110,127,128],[62,103,110,121,123,127,128,133,142,145,153,155,156,158],[62,110,127,128,142,159],[62,110,127,128,194,224,232],[47,52,53,55,62,110,127,128],[62,110,127,128,201,202],[53,55,62,110,127,128,195,196,197],[53,62,110,127,128],[53,55,62,110,127,128,195],[53,62,110,127,128,195],[62,110,127,128,208],[48,62,110,127,128,208,209],[48,62,110,127,128,208],[48,54,62,110,127,128],[49,62,110,127,128],[48,49,50,52,62,110,127,128],[48,62,110,127,128],[62,110,127,128,185],[62,110,127,128,183,185],[62,110,127,128,174,182,183,184,186,188],[62,110,127,128,172],[62,110,127,128,175,180,185,188],[62,110,127,128,171,188],[62,110,127,128,175,176,179,180,181,188],[62,110,127,128,175,176,177,179,180,188],[62,110,127,128,172,173,174,175,176,180,181,182,184,185,186,188],[62,110,127,128,188],[62,110,127,128,170,172,173,174,175,176,177,179,180,181,182,183,184,185,186,187],[62,110,127,128,170,188],[62,110,127,128,175,177,178,180,181,188],[62,110,127,128,179,188],[62,110,127,128,180,181,185,188],[62,110,127,128,173,183],[62,110,127,128,162,193,194],[62,110,127,128,161,162],[51,62,110,127,128],[62,75,79,110,127,128,153],[62,75,110,127,128,142,153],[62,70,110,127,128],[62,72,75,110,127,128,150,153],[62,110,127,128,130,150],[62,110,127,128,160],[62,70,110,127,128,160],[62,72,75,110,127,128,130,153],[62,67,68,71,74,110,121,127,128,142,153],[62,75,82,110,127,128],[62,67,73,110,127,128],[62,75,96,97,110,127,128],[62,71,75,110,127,128,145,153,160],[62,96,110,127,128,160],[62,69,70,110,127,128,160],[62,75,110,127,128],[62,69,70,71,72,73,74,75,76,77,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,97,98,99,100,101,102,110,127,128],[62,75,90,110,127,128],[62,75,82,83,110,127,128],[62,73,75,83,84,110,127,128],[62,74,110,127,128],[62,67,70,75,110,127,128],[62,75,79,83,84,110,127,128],[62,79,110,127,128],[62,73,75,78,110,127,128,153],[62,67,72,75,82,110,127,128],[62,110,127,128,142],[62,70,75,96,110,127,128,158,160],[62,110,127,128,205,206],[62,110,127,128,205],[62,110,121,122,124,125,126,127,128,130,142,150,153,159,160,162,163,164,165,167,168,169,189,190,191,192,193,194],[62,110,127,128,164,165,166,167],[62,110,127,128,164],[62,110,127,128,165],[62,110,127,128,162,194],[56,62,110,127,128,225],[62,110,127,128,198,217,218],[48,55,62,110,127,128,198,210,211],[62,110,127,128,220],[62,110,127,128,199],[48,53,55,62,110,113,122,127,128,142,194,198,200,203,204,207,210,212,213,216,219,221,222,224],[62,110,127,128,194,223,224],[62,110,127,128,198,200,207,210,212],[48,53,55,62,110,113,122,127,128,142,194,198,199,200,203,204,207,210,211,212,213,216,217,218,219,220,221,222,223,224],[62,110,127,128,226,233]],"fileInfos":[{"version":"c430d44666289dae81f30fa7b2edebf186ecc91a2d4c71266ea6ae76388792e1","affectsGlobalScope":true,"impliedFormat":1},{"version":"45b7ab580deca34ae9729e97c13cfd999df04416a79116c3bfb483804f85ded4","impliedFormat":1},{"version":"3facaf05f0c5fc569c5649dd359892c98a85557e3e0c847964caeb67076f4d75","impliedFormat":1},{"version":"e44bb8bbac7f10ecc786703fe0a6a4b952189f908707980ba8f3c8975a760962","impliedFormat":1},{"version":"5e1c4c362065a6b95ff952c0eab010f04dcd2c3494e813b493ecfd4fcb9fc0d8","impliedFormat":1},{"version":"68d73b4a11549f9c0b7d352d10e91e5dca8faa3322bfb77b661839c42b1ddec7","impliedFormat":1},{"version":"5efce4fc3c29ea84e8928f97adec086e3dc876365e0982cc8479a07954a3efd4","impliedFormat":1},{"version":"080941d9f9ff9307f7e27a83bcd888b7c8270716c39af943532438932ec1d0b9","affectsGlobalScope":true,"impliedFormat":1},{"version":"c57796738e7f83dbc4b8e65132f11a377649c00dd3eee333f672b8f0a6bea671","affectsGlobalScope":true,"impliedFormat":1},{"version":"dc2df20b1bcdc8c2d34af4926e2c3ab15ffe1160a63e58b7e09833f616efff44","affectsGlobalScope":true,"impliedFormat":1},{"version":"515d0b7b9bea2e31ea4ec968e9edd2c39d3eebf4a2d5cbd04e88639819ae3b71","affectsGlobalScope":true,"impliedFormat":1},{"version":"0559b1f683ac7505ae451f9a96ce4c3c92bdc71411651ca6ddb0e88baaaad6a3","affectsGlobalScope":true,"impliedFormat":1},{"version":"0dc1e7ceda9b8b9b455c3a2d67b0412feab00bd2f66656cd8850e8831b08b537","affectsGlobalScope":true,"impliedFormat":1},{"version":"ce691fb9e5c64efb9547083e4a34091bcbe5bdb41027e310ebba8f7d96a98671","affectsGlobalScope":true,"impliedFormat":1},{"version":"8d697a2a929a5fcb38b7a65594020fcef05ec1630804a33748829c5ff53640d0","affectsGlobalScope":true,"impliedFormat":1},{"version":"4ff2a353abf8a80ee399af572debb8faab2d33ad38c4b4474cff7f26e7653b8d","affectsGlobalScope":true,"impliedFormat":1},{"version":"fb0f136d372979348d59b3f5020b4cdb81b5504192b1cacff5d1fbba29378aa1","affectsGlobalScope":true,"impliedFormat":1},{"version":"d15bea3d62cbbdb9797079416b8ac375ae99162a7fba5de2c6c505446486ac0a","affectsGlobalScope":true,"impliedFormat":1},{"version":"68d18b664c9d32a7336a70235958b8997ebc1c3b8505f4f1ae2b7e7753b87618","affectsGlobalScope":true,"impliedFormat":1},{"version":"eb3d66c8327153d8fa7dd03f9c58d351107fe824c79e9b56b462935176cdf12a","affectsGlobalScope":true,"impliedFormat":1},{"version":"38f0219c9e23c915ef9790ab1d680440d95419ad264816fa15009a8851e79119","affectsGlobalScope":true,"impliedFormat":1},{"version":"69ab18c3b76cd9b1be3d188eaf8bba06112ebbe2f47f6c322b5105a6fbc45a2e","affectsGlobalScope":true,"impliedFormat":1},{"version":"a680117f487a4d2f30ea46f1b4b7f58bef1480456e18ba53ee85c2746eeca012","affectsGlobalScope":true,"impliedFormat":1},{"version":"2f11ff796926e0832f9ae148008138ad583bd181899ab7dd768a2666700b1893","affectsGlobalScope":true,"impliedFormat":1},{"version":"4de680d5bb41c17f7f68e0419412ca23c98d5749dcaaea1896172f06435891fc","affectsGlobalScope":true,"impliedFormat":1},{"version":"954296b30da6d508a104a3a0b5d96b76495c709785c1d11610908e63481ee667","affectsGlobalScope":true,"impliedFormat":1},{"version":"ac9538681b19688c8eae65811b329d3744af679e0bdfa5d842d0e32524c73e1c","affectsGlobalScope":true,"impliedFormat":1},{"version":"0a969edff4bd52585473d24995c5ef223f6652d6ef46193309b3921d65dd4376","affectsGlobalScope":true,"impliedFormat":1},{"version":"9e9fbd7030c440b33d021da145d3232984c8bb7916f277e8ffd3dc2e3eae2bdb","affectsGlobalScope":true,"impliedFormat":1},{"version":"811ec78f7fefcabbda4bfa93b3eb67d9ae166ef95f9bff989d964061cbf81a0c","affectsGlobalScope":true,"impliedFormat":1},{"version":"717937616a17072082152a2ef351cb51f98802fb4b2fdabd32399843875974ca","affectsGlobalScope":true,"impliedFormat":1},{"version":"d7e7d9b7b50e5f22c915b525acc5a49a7a6584cf8f62d0569e557c5cfc4b2ac2","affectsGlobalScope":true,"impliedFormat":1},{"version":"71c37f4c9543f31dfced6c7840e068c5a5aacb7b89111a4364b1d5276b852557","affectsGlobalScope":true,"impliedFormat":1},{"version":"576711e016cf4f1804676043e6a0a5414252560eb57de9faceee34d79798c850","affectsGlobalScope":true,"impliedFormat":1},{"version":"89c1b1281ba7b8a96efc676b11b264de7a8374c5ea1e6617f11880a13fc56dc6","affectsGlobalScope":true,"impliedFormat":1},{"version":"74f7fa2d027d5b33eb0471c8e82a6c87216223181ec31247c357a3e8e2fddc5b","affectsGlobalScope":true,"impliedFormat":1},{"version":"d6d7ae4d1f1f3772e2a3cde568ed08991a8ae34a080ff1151af28b7f798e22ca","affectsGlobalScope":true,"impliedFormat":1},{"version":"063600664504610fe3e99b717a1223f8b1900087fab0b4cad1496a114744f8df","affectsGlobalScope":true,"impliedFormat":1},{"version":"934019d7e3c81950f9a8426d093458b65d5aff2c7c1511233c0fd5b941e608ab","affectsGlobalScope":true,"impliedFormat":1},{"version":"52ada8e0b6e0482b728070b7639ee42e83a9b1c22d205992756fe020fd9f4a47","affectsGlobalScope":true,"impliedFormat":1},{"version":"3bdefe1bfd4d6dee0e26f928f93ccc128f1b64d5d501ff4a8cf3c6371200e5e6","affectsGlobalScope":true,"impliedFormat":1},{"version":"59fb2c069260b4ba00b5643b907ef5d5341b167e7d1dbf58dfd895658bda2867","affectsGlobalScope":true,"impliedFormat":1},{"version":"639e512c0dfc3fad96a84caad71b8834d66329a1f28dc95e3946c9b58176c73a","affectsGlobalScope":true,"impliedFormat":1},{"version":"368af93f74c9c932edd84c58883e736c9e3d53cec1fe24c0b0ff451f529ceab1","affectsGlobalScope":true,"impliedFormat":1},{"version":"8e7f8264d0fb4c5339605a15daadb037bf238c10b654bb3eee14208f860a32ea","affectsGlobalScope":true,"impliedFormat":1},{"version":"782dec38049b92d4e85c1585fbea5474a219c6984a35b004963b00beb1aab538","affectsGlobalScope":true,"impliedFormat":1},{"version":"04471dc55f802c29791cc75edda8c4dd2a121f71c2401059da61eff83099e8ab","impliedFormat":99},{"version":"5c54a34e3d91727f7ae840bfe4d5d1c9a2f93c54cb7b6063d06ee4a6c3322656","impliedFormat":99},{"version":"db4da53b03596668cf6cc9484834e5de3833b9e7e64620cf08399fe069cd398d","impliedFormat":99},{"version":"ac7c28f153820c10850457994db1462d8c8e462f253b828ad942a979f726f2f9","impliedFormat":99},{"version":"f9b028d3c3891dd817e24d53102132b8f696269309605e6ed4f0db2c113bbd82","impliedFormat":99},{"version":"fb7c8d90e52e2884509166f96f3d591020c7b7977ab473b746954b0c8d100960","impliedFormat":99},{"version":"0bff51d6ed0c9093f6955b9d8258ce152ddb273359d50a897d8baabcb34de2c4","impliedFormat":99},{"version":"ef13c73d6157a32933c612d476c1524dd674cf5b9a88571d7d6a0d147544d529","impliedFormat":99},{"version":"13918e2b81c4288695f9b1f3dcc2468caf0f848d5c1f3dc00071c619d34ff63a","impliedFormat":99},{"version":"120a80aa556732f684db3ed61aeff1d6671e1655bd6cba0aa88b22b88ac9a6b1","affectsGlobalScope":true,"impliedFormat":99},{"version":"6c7176368037af28cb72f2392010fa1cef295d6d6744bca8cfb54985f3a18c3e","affectsGlobalScope":true,"impliedFormat":1},{"version":"ab41ef1f2cdafb8df48be20cd969d875602483859dc194e9c97c8a576892c052","affectsGlobalScope":true,"impliedFormat":1},{"version":"437e20f2ba32abaeb7985e0afe0002de1917bc74e949ba585e49feba65da6ca1","affectsGlobalScope":true,"impliedFormat":1},{"version":"21d819c173c0cf7cc3ce57c3276e77fd9a8a01d35a06ad87158781515c9a438a","impliedFormat":1},{"version":"98cffbf06d6bab333473c70a893770dbe990783904002c4f1a960447b4b53dca","affectsGlobalScope":true,"impliedFormat":1},{"version":"3af97acf03cc97de58a3a4bc91f8f616408099bc4233f6d0852e72a8ffb91ac9","affectsGlobalScope":true,"impliedFormat":1},{"version":"808069bba06b6768b62fd22429b53362e7af342da4a236ed2d2e1c89fcca3b4a","affectsGlobalScope":true,"impliedFormat":1},{"version":"1db0b7dca579049ca4193d034d835f6bfe73096c73663e5ef9a0b5779939f3d0","affectsGlobalScope":true,"impliedFormat":1},{"version":"9798340ffb0d067d69b1ae5b32faa17ab31b82466a3fc00d8f2f2df0c8554aaa","affectsGlobalScope":true,"impliedFormat":1},{"version":"f26b11d8d8e4b8028f1c7d618b22274c892e4b0ef5b3678a8ccbad85419aef43","affectsGlobalScope":true,"impliedFormat":1},{"version":"5929864ce17fba74232584d90cb721a89b7ad277220627cc97054ba15a98ea8f","impliedFormat":1},{"version":"763fe0f42b3d79b440a9b6e51e9ba3f3f91352469c1e4b3b67bfa4ff6352f3f4","impliedFormat":1},{"version":"25c8056edf4314820382a5fdb4bb7816999acdcb929c8f75e3f39473b87e85bc","impliedFormat":1},{"version":"c464d66b20788266e5353b48dc4aa6bc0dc4a707276df1e7152ab0c9ae21fad8","impliedFormat":1},{"version":"78d0d27c130d35c60b5e5566c9f1e5be77caf39804636bc1a40133919a949f21","impliedFormat":1},{"version":"c6fd2c5a395f2432786c9cb8deb870b9b0e8ff7e22c029954fabdd692bff6195","impliedFormat":1},{"version":"1d6e127068ea8e104a912e42fc0a110e2aa5a66a356a917a163e8cf9a65e4a75","impliedFormat":1},{"version":"5ded6427296cdf3b9542de4471d2aa8d3983671d4cac0f4bf9c637208d1ced43","impliedFormat":1},{"version":"7f182617db458e98fc18dfb272d40aa2fff3a353c44a89b2c0ccb3937709bfb5","impliedFormat":1},{"version":"cadc8aced301244057c4e7e73fbcae534b0f5b12a37b150d80e5a45aa4bebcbd","impliedFormat":1},{"version":"385aab901643aa54e1c36f5ef3107913b10d1b5bb8cbcd933d4263b80a0d7f20","impliedFormat":1},{"version":"9670d44354bab9d9982eca21945686b5c24a3f893db73c0dae0fd74217a4c219","impliedFormat":1},{"version":"0b8a9268adaf4da35e7fa830c8981cfa22adbbe5b3f6f5ab91f6658899e657a7","impliedFormat":1},{"version":"11396ed8a44c02ab9798b7dca436009f866e8dae3c9c25e8c1fbc396880bf1bb","impliedFormat":1},{"version":"ba7bc87d01492633cb5a0e5da8a4a42a1c86270e7b3d2dea5d156828a84e4882","impliedFormat":1},{"version":"4893a895ea92c85345017a04ed427cbd6a1710453338df26881a6019432febdd","impliedFormat":1},{"version":"c21dc52e277bcfc75fac0436ccb75c204f9e1b3fa5e12729670910639f27343e","impliedFormat":1},{"version":"13f6f39e12b1518c6650bbb220c8985999020fe0f21d818e28f512b7771d00f9","impliedFormat":1},{"version":"9b5369969f6e7175740bf51223112ff209f94ba43ecd3bb09eefff9fd675624a","impliedFormat":1},{"version":"4fe9e626e7164748e8769bbf74b538e09607f07ed17c2f20af8d680ee49fc1da","impliedFormat":1},{"version":"24515859bc0b836719105bb6cc3d68255042a9f02a6022b3187948b204946bd2","impliedFormat":1},{"version":"ea0148f897b45a76544ae179784c95af1bd6721b8610af9ffa467a518a086a43","impliedFormat":1},{"version":"24c6a117721e606c9984335f71711877293a9651e44f59f3d21c1ea0856f9cc9","impliedFormat":1},{"version":"dd3273ead9fbde62a72949c97dbec2247ea08e0c6952e701a483d74ef92d6a17","impliedFormat":1},{"version":"405822be75ad3e4d162e07439bac80c6bcc6dbae1929e179cf467ec0b9ee4e2e","impliedFormat":1},{"version":"0db18c6e78ea846316c012478888f33c11ffadab9efd1cc8bcc12daded7a60b6","impliedFormat":1},{"version":"e61be3f894b41b7baa1fbd6a66893f2579bfad01d208b4ff61daef21493ef0a8","impliedFormat":1},{"version":"bd0532fd6556073727d28da0edfd1736417a3f9f394877b6d5ef6ad88fba1d1a","impliedFormat":1},{"version":"89167d696a849fce5ca508032aabfe901c0868f833a8625d5a9c6e861ef935d2","impliedFormat":1},{"version":"615ba88d0128ed16bf83ef8ccbb6aff05c3ee2db1cc0f89ab50a4939bfc1943f","impliedFormat":1},{"version":"a4d551dbf8746780194d550c88f26cf937caf8d56f102969a110cfaed4b06656","impliedFormat":1},{"version":"8bd86b8e8f6a6aa6c49b71e14c4ffe1211a0e97c80f08d2c8cc98838006e4b88","impliedFormat":1},{"version":"317e63deeb21ac07f3992f5b50cdca8338f10acd4fbb7257ebf56735bf52ab00","impliedFormat":1},{"version":"4732aec92b20fb28c5fe9ad99521fb59974289ed1e45aecb282616202184064f","impliedFormat":1},{"version":"2e85db9e6fd73cfa3d7f28e0ab6b55417ea18931423bd47b409a96e4a169e8e6","impliedFormat":1},{"version":"c46e079fe54c76f95c67fb89081b3e399da2c7d109e7dca8e4b58d83e332e605","impliedFormat":1},{"version":"bf67d53d168abc1298888693338cb82854bdb2e69ef83f8a0092093c2d562107","impliedFormat":1},{"version":"b52476feb4a0cbcb25e5931b930fc73cb6643fb1a5060bf8a3dda0eeae5b4b68","affectsGlobalScope":true,"impliedFormat":1},{"version":"f9501cc13ce624c72b61f12b3963e84fad210fbdf0ffbc4590e08460a3f04eba","affectsGlobalScope":true,"impliedFormat":1},{"version":"e7721c4f69f93c91360c26a0a84ee885997d748237ef78ef665b153e622b36c1","affectsGlobalScope":true,"impliedFormat":1},{"version":"0fa06ada475b910e2106c98c68b10483dc8811d0c14a8a8dd36efb2672485b29","impliedFormat":1},{"version":"33e5e9aba62c3193d10d1d33ae1fa75c46a1171cf76fef750777377d53b0303f","impliedFormat":1},{"version":"2b06b93fd01bcd49d1a6bd1f9b65ddcae6480b9a86e9061634d6f8e354c1468f","impliedFormat":1},{"version":"6a0cd27e5dc2cfbe039e731cf879d12b0e2dded06d1b1dedad07f7712de0d7f4","affectsGlobalScope":true,"impliedFormat":1},{"version":"13f5c844119c43e51ce777c509267f14d6aaf31eafb2c2b002ca35584cd13b29","impliedFormat":1},{"version":"e60477649d6ad21542bd2dc7e3d9ff6853d0797ba9f689ba2f6653818999c264","impliedFormat":1},{"version":"c2510f124c0293ab80b1777c44d80f812b75612f297b9857406468c0f4dafe29","affectsGlobalScope":true,"impliedFormat":1},{"version":"5524481e56c48ff486f42926778c0a3cce1cc85dc46683b92b1271865bcf015a","impliedFormat":1},{"version":"4c829ab315f57c5442c6667b53769975acbf92003a66aef19bce151987675bd1","affectsGlobalScope":true,"impliedFormat":1},{"version":"b2ade7657e2db96d18315694789eff2ddd3d8aea7215b181f8a0b303277cc579","impliedFormat":1},{"version":"9855e02d837744303391e5623a531734443a5f8e6e8755e018c41d63ad797db2","impliedFormat":1},{"version":"4d631b81fa2f07a0e63a9a143d6a82c25c5f051298651a9b69176ba28930756d","impliedFormat":1},{"version":"836a356aae992ff3c28a0212e3eabcb76dd4b0cc06bcb9607aeef560661b860d","impliedFormat":1},{"version":"1e0d1f8b0adfa0b0330e028c7941b5a98c08b600efe7f14d2d2a00854fb2f393","impliedFormat":1},{"version":"41670ee38943d9cbb4924e436f56fc19ee94232bc96108562de1a734af20dc2c","affectsGlobalScope":true,"impliedFormat":1},{"version":"c906fb15bd2aabc9ed1e3f44eb6a8661199d6c320b3aa196b826121552cb3695","impliedFormat":1},{"version":"22295e8103f1d6d8ea4b5d6211e43421fe4564e34d0dd8e09e520e452d89e659","impliedFormat":1},{"version":"58647d85d0f722a1ce9de50955df60a7489f0593bf1a7015521efe901c06d770","impliedFormat":1},{"version":"6b4e081d55ac24fc8a4631d5dd77fe249fa25900abd7d046abb87d90e3b45645","impliedFormat":1},{"version":"a10f0e1854f3316d7ee437b79649e5a6ae3ae14ffe6322b02d4987071a95362e","impliedFormat":1},{"version":"e208f73ef6a980104304b0d2ca5f6bf1b85de6009d2c7e404028b875020fa8f2","impliedFormat":1},{"version":"d163b6bc2372b4f07260747cbc6c0a6405ab3fbcea3852305e98ac43ca59f5bc","impliedFormat":1},{"version":"e6fa9ad47c5f71ff733744a029d1dc472c618de53804eae08ffc243b936f87ff","affectsGlobalScope":true,"impliedFormat":1},{"version":"83e63d6ccf8ec004a3bb6d58b9bb0104f60e002754b1e968024b320730cc5311","impliedFormat":1},{"version":"24826ed94a78d5c64bd857570fdbd96229ad41b5cb654c08d75a9845e3ab7dde","impliedFormat":1},{"version":"8b479a130ccb62e98f11f136d3ac80f2984fdc07616516d29881f3061f2dd472","impliedFormat":1},{"version":"928af3d90454bf656a52a48679f199f64c1435247d6189d1caf4c68f2eaf921f","affectsGlobalScope":true,"impliedFormat":1},{"version":"bceb58df66ab8fb00170df20cd813978c5ab84be1d285710c4eb005d8e9d8efb","affectsGlobalScope":true,"impliedFormat":1},{"version":"3f16a7e4deafa527ed9995a772bb380eb7d3c2c0fd4ae178c5263ed18394db2c","impliedFormat":1},{"version":"933921f0bb0ec12ef45d1062a1fc0f27635318f4d294e4d99de9a5493e618ca2","impliedFormat":1},{"version":"71a0f3ad612c123b57239a7749770017ecfe6b66411488000aba83e4546fde25","impliedFormat":1},{"version":"77fbe5eecb6fac4b6242bbf6eebfc43e98ce5ccba8fa44e0ef6a95c945ff4d98","impliedFormat":1},{"version":"4f9d8ca0c417b67b69eeb54c7ca1bedd7b56034bb9bfd27c5d4f3bc4692daca7","impliedFormat":1},{"version":"814118df420c4e38fe5ae1b9a3bafb6e9c2aa40838e528cde908381867be6466","impliedFormat":1},{"version":"a3fc63c0d7b031693f665f5494412ba4b551fe644ededccc0ab5922401079c95","impliedFormat":1},{"version":"f27524f4bef4b6519c604bdb23bf4465bddcccbf3f003abb901acbd0d7404d99","impliedFormat":1},{"version":"37ba7b45141a45ce6e80e66f2a96c8a5ab1bcef0fc2d0f56bb58df96ec67e972","impliedFormat":1},{"version":"45650f47bfb376c8a8ed39d4bcda5902ab899a3150029684ee4c10676d9fbaee","impliedFormat":1},{"version":"6b039f55681caaf111d5eb84d292b9bee9e0131d0db1ad0871eef0964f533c73","affectsGlobalScope":true,"impliedFormat":1},{"version":"18fd40412d102c5564136f29735e5d1c3b455b8a37f920da79561f1fde068208","impliedFormat":1},{"version":"c8d3e5a18ba35629954e48c4cc8f11dc88224650067a172685c736b27a34a4dc","impliedFormat":1},{"version":"f0be1b8078cd549d91f37c30c222c2a187ac1cf981d994fb476a1adc61387b14","affectsGlobalScope":true,"impliedFormat":1},{"version":"0aaed1d72199b01234152f7a60046bc947f1f37d78d182e9ae09c4289e06a592","impliedFormat":1},{"version":"2b55d426ff2b9087485e52ac4bc7cfafe1dc420fc76dad926cd46526567c501a","impliedFormat":1},{"version":"66ba1b2c3e3a3644a1011cd530fb444a96b1b2dfe2f5e837a002d41a1a799e60","impliedFormat":1},{"version":"7e514f5b852fdbc166b539fdd1f4e9114f29911592a5eb10a94bb3a13ccac3c4","impliedFormat":1},{"version":"5b7aa3c4c1a5d81b411e8cb302b45507fea9358d3569196b27eb1a27ae3a90ef","affectsGlobalScope":true,"impliedFormat":1},{"version":"5987a903da92c7462e0b35704ce7da94d7fdc4b89a984871c0e2b87a8aae9e69","affectsGlobalScope":true,"impliedFormat":1},{"version":"ea08a0345023ade2b47fbff5a76d0d0ed8bff10bc9d22b83f40858a8e941501c","impliedFormat":1},{"version":"47613031a5a31510831304405af561b0ffaedb734437c595256bb61a90f9311b","impliedFormat":1},{"version":"ae062ce7d9510060c5d7e7952ae379224fb3f8f2dd74e88959878af2057c143b","impliedFormat":1},{"version":"8a1a0d0a4a06a8d278947fcb66bf684f117bf147f89b06e50662d79a53be3e9f","affectsGlobalScope":true,"impliedFormat":1},{"version":"358765d5ea8afd285d4fd1532e78b88273f18cb3f87403a9b16fef61ac9fdcfe","impliedFormat":1},{"version":"9f55299850d4f0921e79b6bf344b47c420ce0f507b9dcf593e532b09ea7eeea1","impliedFormat":1},{"version":"151ff381ef9ff8da2da9b9663ebf657eac35c4c9a19183420c05728f31a6761d","impliedFormat":1},{"version":"ee70b8037ecdf0de6c04f35277f253663a536d7e38f1539d270e4e916d225a3f","affectsGlobalScope":true,"impliedFormat":1},{"version":"a660aa95476042d3fdcc1343cf6bb8fdf24772d31712b1db321c5a4dcc325434","impliedFormat":1},{"version":"a7ca8df4f2931bef2aa4118078584d84a0b16539598eaadf7dce9104dfaa381c","impliedFormat":1},{"version":"11443a1dcfaaa404c68d53368b5b818712b95dd19f188cab1669c39bee8b84b3","impliedFormat":1},{"version":"36977c14a7f7bfc8c0426ae4343875689949fb699f3f84ecbe5b300ebf9a2c55","impliedFormat":1},{"version":"035d0934d304483f07148427a5bd5b98ac265dae914a6b49749fe23fbd893ec7","impliedFormat":99},{"version":"e2ed5b81cbed3a511b21a18ab2539e79ac1f4bc1d1d28f8d35d8104caa3b429f","impliedFormat":99},{"version":"161c8e0690c46021506e32fda85956d785b70f309ae97011fd27374c065cac9b","affectsGlobalScope":true,"impliedFormat":1},{"version":"402e5c534fb2b85fa771170595db3ac0dd532112c8fa44fc23f233bc6967488b","impliedFormat":1},{"version":"7965dc3c7648e2a7a586d11781cabb43d4859920716bc2fdc523da912b06570d","impliedFormat":1},{"version":"90c2bd9a3e72fe08b8fa5982e78cb8dc855a1157b26e11e37a793283c52bf64b","impliedFormat":1},{"version":"a8122fe390a2a987079e06c573b1471296114677923c1c094c24a53ddd7344a2","impliedFormat":1},{"version":"70c2cb19c0c42061a39351156653aa0cf5ba1ecdc8a07424dd38e3a1f1e3c7f4","impliedFormat":1},{"version":"a8fb10fd8c7bc7d9b8f546d4d186d1027f8a9002a639bec689b5000dab68e35c","impliedFormat":1},{"version":"c9b467ea59b86bd27714a879b9ad43c16f186012a26d0f7110b1322025ceaa83","impliedFormat":1},{"version":"57ea19c2e6ba094d8087c721bac30ff1c681081dbd8b167ac068590ef633e7a5","impliedFormat":1},{"version":"cba81ec9ae7bc31a4dc56f33c054131e037649d6b9a2cfa245124c67e23e4721","impliedFormat":1},{"version":"ad193f61ba708e01218496f093c23626aa3808c296844a99189be7108a9c8343","impliedFormat":1},{"version":"a0544b3c8b70b2f319a99ea380b55ab5394ede9188cdee452a5d0ce264f258b2","impliedFormat":1},{"version":"8c654c17c334c7c168c1c36e5336896dc2c892de940886c1639bebd9fc7b9be4","impliedFormat":1},{"version":"6a4da742485d5c2eb6bcb322ae96993999ffecbd5660b0219a5f5678d8225bb0","impliedFormat":1},{"version":"c65ca21d7002bdb431f9ab3c7a6e765a489aa5196e7e0ef00aed55b1294df599","impliedFormat":1},{"version":"c8fc655c2c4bafc155ceee01c84ab3d6c03192ced5d3f2de82e20f3d1bd7f9fa","impliedFormat":1},{"version":"be5a7ff3b47f7e553565e9483bdcadb0ca2040ac9e5ec7b81c7e115a81059882","impliedFormat":1},{"version":"1a93f36ecdb60a95e3a3621b561763e2952da81962fae217ab5441ac1d77ffc5","impliedFormat":1},{"version":"2a771d907aebf9391ac1f50e4ad37952943515eeea0dcc7e78aa08f508294668","impliedFormat":1},{"version":"0146fd6262c3fd3da51cb0254bb6b9a4e42931eb2f56329edd4c199cb9aaf804","impliedFormat":1},{"version":"183f480885db5caa5a8acb833c2be04f98056bdcc5fb29e969ff86e07efe57ab","impliedFormat":99},{"version":"4ec16d7a4e366c06a4573d299e15fe6207fc080f41beac5da06f4af33ea9761e","impliedFormat":1},{"version":"7870becb94cbc11d2d01b77c4422589adcba4d8e59f726246d40cd0d129784d8","affectsGlobalScope":true,"impliedFormat":1},{"version":"7f698624bbbb060ece7c0e51b7236520ebada74b747d7523c7df376453ed6fea","impliedFormat":1},{"version":"f70b8328a15ca1d10b1436b691e134a49bc30dcf3183a69bfaa7ba77e1b78ecd","impliedFormat":1},{"version":"683b035f752e318d02e303894e767a1ac16ac4493baa2b593195d7976e6b7310","impliedFormat":99},{"version":"45cec9a1ba6549060552eead8959d47226048e0b71c7d0702ae58b7e16a28912","impliedFormat":99},{"version":"6907b09850f86610e7a528348c15484c1e1c09a18a9c1e98861399dfe4b18b46","impliedFormat":99},{"version":"12deea8eaa7a4fc1a2908e67da99831e5c5a6b46ad4f4f948fd4759314ea2b80","impliedFormat":99},{"version":"f0a8b376568a18f9a4976ecb0855187672b16b96c4df1c183a7e52dc1b5d98e8","impliedFormat":99},{"version":"8124828a11be7db984fcdab052fd4ff756b18edcfa8d71118b55388176210923","impliedFormat":99},{"version":"092944a8c05f9b96579161e88c6f211d5304a76bd2c47f8d4c30053269146bc8","impliedFormat":99},{"version":"b34b5f6b506abb206b1ea73c6a332b9ee9c8c98be0f6d17cdbda9430ecc1efab","impliedFormat":99},{"version":"75d4c746c3d16af0df61e7b0afe9606475a23335d9f34fcc525d388c21e9058b","impliedFormat":99},{"version":"fa959bf357232201c32566f45d97e70538c75a093c940af594865d12f31d4912","impliedFormat":99},{"version":"d2c52abd76259fc39a30dfae70a2e5ce77fd23144457a7ff1b64b03de6e3aec7","impliedFormat":99},{"version":"e6233e1c976265e85aa8ad76c3881febe6264cb06ae3136f0257e1eab4a6cc5a","impliedFormat":99},{"version":"f73e2335e568014e279927321770da6fe26facd4ac96cdc22a56687f1ecbb58e","impliedFormat":99},{"version":"317878f156f976d487e21fd1d58ad0461ee0a09185d5b0a43eedf2a56eb7e4ea","impliedFormat":99},{"version":"324ac98294dab54fbd580c7d0e707d94506d7b2c3d5efe981a8495f02cf9ad96","impliedFormat":99},{"version":"9ec72eb493ff209b470467e24264116b6a8616484bca438091433a545dfba17e","impliedFormat":99},{"version":"d6ee22aba183d5fc0c7b8617f77ee82ecadc2c14359cc51271c135e23f6ed51f","impliedFormat":99},{"version":"49747416f08b3ba50500a215e7a55d75268b84e31e896a40313c8053e8dec908","impliedFormat":99},{"version":"81e634f1c5e1ca309e7e3dc69e2732eea932ef07b8b34517d452e5a3e9a36fa3","impliedFormat":99},{"version":"34f39f75f2b5aa9c84a9f8157abbf8322e6831430e402badeaf58dd284f9b9a6","impliedFormat":99},{"version":"427fe2004642504828c1476d0af4270e6ad4db6de78c0b5da3e4c5ca95052a99","impliedFormat":1},{"version":"2eeffcee5c1661ddca53353929558037b8cf305ffb86a803512982f99bcab50d","impliedFormat":99},{"version":"9afb4cb864d297e4092a79ee2871b5d3143ea14153f62ef0bb04ede25f432030","affectsGlobalScope":true,"impliedFormat":99},{"version":"891694d3694abd66f0b8872997b85fd8e52bc51632ce0f8128c96962b443189f","impliedFormat":99},{"version":"69bf2422313487956e4dacf049f30cb91b34968912058d244cb19e4baa24da97","impliedFormat":99},{"version":"971a2c327ff166c770c5fb35699575ba2d13bba1f6d2757309c9be4b30036c8e","impliedFormat":99},{"version":"4f45e8effab83434a78d17123b01124259fbd1e335732135c213955d85222234","impliedFormat":99},{"version":"7bd51996fb7717941cbe094b05adc0d80b9503b350a77b789bbb0fc786f28053","impliedFormat":99},{"version":"b62006bbc815fe8190c7aee262aad6bff993e3f9ade70d7057dfceab6de79d2f","impliedFormat":99},{"version":"13497c0d73306e27f70634c424cd2f3b472187164f36140b504b3756b0ff476d","impliedFormat":99},{"version":"a23a08b626aa4d4a1924957bd8c4d38a7ffc032e21407bbd2c97413e1d8c3dbd","impliedFormat":99},{"version":"c320fe76361c53cad266b46986aac4e68d644acda1629f64be29c95534463d28","impliedFormat":99},{"version":"7bbff6783e96c691a41a7cf12dd5486b8166a01b0c57d071dbcfca55c9525ec4","impliedFormat":99},{"version":"556ccd493ec36c7d7cb130d51be66e147b91cc1415be383d71da0f1e49f742a9","impliedFormat":1},{"version":"b6d03c9cfe2cf0ba4c673c209fcd7c46c815b2619fd2aad59fc4229aaef2ed43","impliedFormat":1},{"version":"95aba78013d782537cc5e23868e736bec5d377b918990e28ed56110e3ae8b958","impliedFormat":1},{"version":"670a76db379b27c8ff42f1ba927828a22862e2ab0b0908e38b671f0e912cc5ed","impliedFormat":1},{"version":"13b77ab19ef7aadd86a1e54f2f08ea23a6d74e102909e3c00d31f231ed040f62","impliedFormat":1},{"version":"069bebfee29864e3955378107e243508b163e77ab10de6a5ee03ae06939f0bb9","impliedFormat":1},{"version":"26e0ffceb2198feb1ef460d5d14111c69ad07d44c5a67fd4bfeb74c969aa9afb","impliedFormat":99},{"version":"161b7ff2c0b478686a09ee22372e2b3b9b3d08a25204e271a4581562a6764bac","signature":"4b96dd19fd2949d28ce80e913412b0026dc421e5bf6c31d87c7b5eb11b5753b4"}],"root":[234],"options":{"allowSyntheticDefaultImports":true,"composite":true,"module":99,"target":7},"referencedMap":[[229,1],[227,2],[232,3],[228,1],[230,4],[231,1],[216,5],[214,2],[161,2],[107,6],[108,6],[109,7],[62,8],[110,9],[111,10],[112,11],[57,2],[60,12],[58,2],[59,2],[113,13],[114,14],[115,15],[116,16],[117,17],[118,18],[119,18],[120,19],[121,20],[122,21],[123,22],[63,2],[61,2],[124,23],[125,24],[126,25],[160,26],[127,27],[128,2],[129,28],[130,29],[131,30],[132,31],[133,32],[134,33],[135,34],[136,35],[137,36],[138,36],[139,37],[140,2],[141,38],[142,39],[144,40],[143,41],[145,42],[146,43],[147,44],[148,45],[149,46],[150,47],[151,48],[152,49],[153,50],[154,51],[155,52],[156,53],[157,54],[64,2],[65,2],[66,2],[104,55],[105,2],[106,2],[158,56],[159,57],[233,58],[56,59],[203,60],[201,2],[202,2],[48,2],[198,61],[195,62],[196,63],[217,64],[208,2],[211,65],[210,66],[222,66],[209,67],[47,2],[55,68],[197,68],[50,69],[53,70],[204,69],[54,71],[49,2],[215,2],[169,2],[186,72],[184,73],[185,74],[173,75],[174,73],[181,76],[172,77],[177,78],[187,2],[178,79],[183,80],[189,81],[188,82],[171,83],[179,84],[180,85],[175,86],[182,72],[176,87],[163,88],[162,89],[170,2],[218,2],[51,2],[52,90],[45,2],[46,2],[8,2],[10,2],[9,2],[2,2],[11,2],[12,2],[13,2],[14,2],[15,2],[16,2],[17,2],[18,2],[3,2],[19,2],[20,2],[4,2],[21,2],[25,2],[22,2],[23,2],[24,2],[26,2],[27,2],[28,2],[5,2],[29,2],[30,2],[31,2],[32,2],[6,2],[36,2],[33,2],[34,2],[35,2],[37,2],[7,2],[38,2],[43,2],[44,2],[39,2],[40,2],[41,2],[42,2],[1,2],[82,91],[92,92],[81,91],[102,93],[73,94],[72,95],[101,96],[95,97],[100,98],[75,99],[89,100],[74,101],[98,102],[70,103],[69,96],[99,104],[71,105],[76,106],[77,2],[80,106],[67,2],[103,107],[93,108],[84,109],[85,110],[87,111],[83,112],[86,113],[96,96],[78,114],[79,115],[88,116],[68,117],[91,108],[90,106],[94,2],[97,118],[220,119],[206,120],[207,119],[205,2],[194,121],[168,122],[167,123],[165,123],[164,2],[166,124],[192,2],[191,2],[190,2],[193,125],[226,126],[219,127],[212,128],[221,129],[200,130],[223,131],[224,132],[213,133],[225,134],[199,2],[234,135]],"semanticDiagnosticsPerFile":[[8,[{"start":930,"length":9,"messageText":"Definitions of the following identifiers conflict with those in another file: ImportExportKind, TableKind, ExportValue, Exports, ImportValue, Imports, ModuleImports, ValueType, name, AlgorithmIdentifier, AllowSharedBufferSource, BigInteger, BlobPart, BodyInit, BufferSource, CSSKeywordish, CSSNumberish, CSSPerspectiveValue, CSSUnparsedSegment, CanvasImageSource, CookieList, DOMHighResTimeStamp, EpochTimeStamp, EventListenerOrEventListenerObject, FileSystemWriteChunkType, Float32List, FormDataEntryValue, GLbitfield, GLboolean, GLclampf, GLenum, GLfloat, GLint, GLint64, GLintptr, GLsizei, GLsizeiptr, GLuint, GLuint64, HashAlgorithmIdentifier, HeadersInit, IDBValidKey, ImageBitmapSource, ImageBufferSource, ImageDataArray, Int32List, MessageEventSource, NamedCurve, OffscreenRenderingContext, OnErrorEventHandler, PerformanceEntryList, ReadableStreamController, ReadableStreamReadResult, ReadableStreamReader, ReportList, RequestInfo, TexImageSource, TimerHandler, Transferable, Uint32List, XMLHttpRequestBodyInit, AlphaOption, AudioSampleFormat, AvcBitstreamFormat, BinaryType, BitrateMode, CSSMathOperator, CSSNumericBaseType, CanvasDirection, CanvasFillRule, CanvasFontKerning, CanvasFontStretch, CanvasFontVariantCaps, CanvasLineCap, CanvasLineJoin, CanvasTextAlign, CanvasTextBaseline, CanvasTextRendering, ClientTypes, CodecState, ColorGamut, ColorSpaceConversion, CompressionFormat, CookieSameSite, DocumentVisibilityState, EncodedAudioChunkType, EncodedVideoChunkType, EndingType, FileSystemHandleKind, FontDisplay, FontFaceLoadStatus, FontFaceSetLoadStatus, GlobalCompositeOperation, HardwareAcceleration, HdrMetadataType, IDBCursorDirection, IDBRequestReadyState, IDBTransactionDurability, IDBTransactionMode, ImageOrientation, ImageSmoothingQuality, KeyFormat, KeyType, KeyUsage, LatencyMode, LockMode, MediaDecodingType, MediaEncodingType, MediaKeysRequirement, NotificationDirection, NotificationPermission, OffscreenRenderingContextId, OpusBitstreamFormat, PermissionName, PermissionState, PredefinedColorSpace, PremultiplyAlpha, PushEncryptionKeyName, RTCDataChannelState, RTCEncodedVideoFrameType, ReadableStreamReaderMode, ReadableStreamType, ReferrerPolicy, RequestCache, RequestCredentials, RequestDestination, RequestMode, RequestPriority, RequestRedirect, ResizeQuality, ResponseType, SecurityPolicyViolationEventDisposition, ServiceWorkerState, ServiceWorkerUpdateViaCache, TransferFunction, VideoColorPrimaries, VideoEncoderBitrateMode, VideoMatrixCoefficients, VideoPixelFormat, VideoTransferCharacteristics, WebGLPowerPreference, WebTransportCongestionControl, WebTransportErrorSource, WorkerType, WriteCommandType, XMLHttpRequestResponseType","category":1,"code":6200,"relatedInformation":[{"file":false,"start":930,"length":9,"messageText":"Conflicts are in this file.","category":3,"code":6201}]},{"start":194996,"length":33,"messageText":"Duplicate index signature for type 'number'.","category":1,"code":2374},{"start":283291,"length":39,"messageText":"Duplicate index signature for type 'number'.","category":1,"code":2374},{"start":287103,"length":36,"messageText":"Duplicate index signature for type 'number'.","category":1,"code":2374},{"start":378009,"length":24,"messageText":"Duplicate index signature for type 'number'.","category":1,"code":2374},{"start":487425,"length":22,"messageText":"Duplicate index signature for type 'number'.","category":1,"code":2374},{"start":851009,"length":28,"messageText":"Duplicate index signature for type 'number'.","category":1,"code":2374}]]],"latestChangedDtsFile":"./vite.config.d.ts","version":"5.9.3"}
\ No newline at end of file
diff --git a/web/tsconfig.tsbuildinfo b/web/tsconfig.tsbuildinfo
new file mode 100644
index 0000000..31c3ec1
--- /dev/null
+++ b/web/tsconfig.tsbuildinfo
@@ -0,0 +1 @@
+{"root":["./src/rootapp.tsx","./src/main.tsx","./src/components/authbootstrap.tsx","./src/components/fullpageloading.tsx","./src/layouts/applayout.tsx","./src/pages/dashboard/dashboardpage.tsx","./src/pages/login/loginpage.tsx","./src/pages/system-info/systeminfopage.tsx","./src/router/protectedroute.tsx","./src/router/index.tsx","./src/services/auth.ts","./src/services/http.ts","./src/services/system.ts","./src/stores/auth.ts","./src/test/setup.ts","./src/types/api.ts","./src/types/auth.ts","./src/types/system.ts"],"version":"5.9.3"}
\ No newline at end of file
diff --git a/web/vite.config.d.ts b/web/vite.config.d.ts
new file mode 100644
index 0000000..340562a
--- /dev/null
+++ b/web/vite.config.d.ts
@@ -0,0 +1,2 @@
+declare const _default: import("vite").UserConfig;
+export default _default;
diff --git a/web/vite.config.js b/web/vite.config.js
new file mode 100644
index 0000000..c48ce53
--- /dev/null
+++ b/web/vite.config.js
@@ -0,0 +1,32 @@
+import { defineConfig } from 'vitest/config';
+import react from '@vitejs/plugin-react';
+export default defineConfig({
+ plugins: [react()],
+ server: {
+ port: 5173,
+ proxy: {
+ '/api': {
+ target: 'http://127.0.0.1:8340',
+ changeOrigin: true,
+ },
+ },
+ },
+ build: {
+ rollupOptions: {
+ output: {
+ manualChunks: {
+ 'vendor-react': ['react', 'react-dom', 'react-router-dom'],
+ 'vendor-arco': ['@arco-design/web-react'],
+ 'vendor-echarts': ['echarts', 'echarts-for-react'],
+ },
+ },
+ },
+ },
+ test: {
+ environment: 'jsdom',
+ setupFiles: './src/test/setup.ts',
+ globals: true,
+ include: ['src/**/*.test.ts', 'src/**/*.test.tsx'],
+ exclude: ['src/components/auth-guard.test.tsx', 'src/stores/auth.test.ts'],
+ },
+});
diff --git a/web/vite.config.ts b/web/vite.config.ts
new file mode 100644
index 0000000..bc83a8d
--- /dev/null
+++ b/web/vite.config.ts
@@ -0,0 +1,34 @@
+import { defineConfig } from 'vitest/config'
+import react from '@vitejs/plugin-react'
+
+export default defineConfig({
+ plugins: [react()],
+ server: {
+ port: 5173,
+ proxy: {
+ '/api': {
+ target: 'http://127.0.0.1:8340',
+ changeOrigin: true,
+ },
+ },
+ },
+ build: {
+ rollupOptions: {
+ output: {
+ manualChunks: {
+ 'vendor-react': ['react', 'react-dom', 'react-router-dom'],
+ 'vendor-arco': ['@arco-design/web-react'],
+ 'vendor-echarts': ['echarts', 'echarts-for-react'],
+ },
+ },
+ },
+ },
+ test: {
+ environment: 'jsdom',
+ setupFiles: './src/test/setup.ts',
+ globals: true,
+ include: ['src/**/*.test.ts', 'src/**/*.test.tsx'],
+ exclude: ['src/components/auth-guard.test.tsx', 'src/stores/auth.test.ts'],
+ },
+})
+