mirror of
https://github.com/amtoaer/bili-sync.git
synced 2026-05-08 20:22:40 +08:00
Compare commits
88 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2b046362d7 | ||
|
|
61c9e7de88 | ||
|
|
3d25c6b321 | ||
|
|
d35858790b | ||
|
|
b441f04cdf | ||
|
|
4db7e6763a | ||
|
|
bbbb7d0c5b | ||
|
|
210c94398a | ||
|
|
6c7d295fe6 | ||
|
|
71519af2f3 | ||
|
|
8ed2fbae24 | ||
|
|
fd90bc8b73 | ||
|
|
66bd3d6a41 | ||
|
|
5ef23a678f | ||
|
|
66079f3adc | ||
|
|
4f780faf64 | ||
|
|
dbcb1fa78b | ||
|
|
386dac7735 | ||
|
|
5537c621be | ||
|
|
c7978e20da | ||
|
|
6e4af47bda | ||
|
|
791e4997a0 | ||
|
|
05ab83fc93 | ||
|
|
18ed9e09b1 | ||
|
|
e196afa8ce | ||
|
|
9b2da75391 | ||
|
|
664e1d9f21 | ||
|
|
31c26f033e | ||
|
|
29d78dabdd | ||
|
|
87fb597ba4 | ||
|
|
c8f7a2267d | ||
|
|
2837bb5234 | ||
|
|
0990a276ff | ||
|
|
adc2e32e58 | ||
|
|
267e9373f9 | ||
|
|
dd23d1db58 | ||
|
|
cc25749445 | ||
|
|
655b4389b7 | ||
|
|
486dab5355 | ||
|
|
74a45526f0 | ||
|
|
ce60838244 | ||
|
|
35866888e8 | ||
|
|
fbb7623ee1 | ||
|
|
1affe4d594 | ||
|
|
7c73a2f01a | ||
|
|
a627584fb0 | ||
|
|
636a843bda | ||
|
|
7bb4e7bc44 | ||
|
|
e50318870e | ||
|
|
28971c3ff3 | ||
|
|
f47ce92a51 | ||
|
|
a35794ed7a | ||
|
|
bad00af147 | ||
|
|
4539e9379d | ||
|
|
a46c2572b1 | ||
|
|
a41efdbe78 | ||
|
|
a98e49347b | ||
|
|
586d5ec4ee | ||
|
|
65a047b0fa | ||
|
|
c0ed37750f | ||
|
|
0e98f484ef | ||
|
|
6226fa7c4d | ||
|
|
c528152986 | ||
|
|
45849957ff | ||
|
|
8510aa318e | ||
|
|
c07e475fe6 | ||
|
|
a574d005c3 | ||
|
|
e9d1c9eadb | ||
|
|
a9f604a07d | ||
|
|
6383730706 | ||
|
|
34d3e47b2d | ||
|
|
d7ec0584bc | ||
|
|
1ec015856b | ||
|
|
99d4d900e6 | ||
|
|
f85f105e69 | ||
|
|
8a1395458c | ||
|
|
bafb4af8dd | ||
|
|
f52724b974 | ||
|
|
4e1e0c40cf | ||
|
|
439513e5ab | ||
|
|
33a61ec08d | ||
|
|
a6d0d6b777 | ||
|
|
ae685cbe61 | ||
|
|
16e14fc371 | ||
|
|
b4a5dee236 | ||
|
|
2b3e6f9547 | ||
|
|
f8b93d2c76 | ||
|
|
94462ca706 |
2
.github/workflows/build-binary.yaml
vendored
2
.github/workflows/build-binary.yaml
vendored
@@ -68,6 +68,8 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Download Web Build Artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
|
||||
1
.github/workflows/commit-build.yaml
vendored
1
.github/workflows/commit-build.yaml
vendored
@@ -7,4 +7,5 @@ on:
|
||||
|
||||
jobs:
|
||||
build-binary:
|
||||
if: ${{ !startsWith(github.ref, 'refs/tags/') }}
|
||||
uses: amtoaer/bili-sync/.github/workflows/build-binary.yaml@main
|
||||
|
||||
37
.github/workflows/pr-check.yaml
vendored
37
.github/workflows/pr-check.yaml
vendored
@@ -5,7 +5,7 @@ on:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
types: ['opened', 'reopened', 'synchronize', 'ready_for_review']
|
||||
types: ["opened", "reopened", "synchronize", "ready_for_review"]
|
||||
|
||||
concurrency:
|
||||
# Allow only one workflow per any non-`main` branch.
|
||||
@@ -18,15 +18,15 @@ env:
|
||||
RUST_BACKTRACE: 1
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
name: Run Clippy and tests
|
||||
check-backend:
|
||||
name: Run backend checks
|
||||
runs-on: ubuntu-24.04
|
||||
if: ${{ github.event_name == 'push' || !github.event.pull_request.draft }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- run: rustup default nightly && rustup component add rustfmt clippy
|
||||
- run: rustup default stable && rustup component add clippy && rustup component add rustfmt --toolchain nightly
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@v2
|
||||
@@ -34,10 +34,35 @@ jobs:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
|
||||
- name: cargo fmt check
|
||||
run: cargo fmt --check
|
||||
run: cargo +nightly fmt --check
|
||||
|
||||
- name: cargo clippy
|
||||
run: cargo clippy
|
||||
run: cargo clippy -- -D warnings
|
||||
|
||||
- name: cargo test
|
||||
run: cargo test
|
||||
check-frontend:
|
||||
name: Run frontend checks
|
||||
runs-on: ubuntu-24.04
|
||||
if: ${{ github.event_name == 'push' || !github.event.pull_request.draft }}
|
||||
defaults:
|
||||
run:
|
||||
working-directory: web
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: latest
|
||||
- name: Install dependencies
|
||||
run: bun install --frozen-lockfile
|
||||
- name: Cache dependencies
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.bun/install/cache
|
||||
key: ${{ runner.os }}-bun-${{ hashFiles('docs/bun.lockb') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-bun-
|
||||
- name: Check Frontend
|
||||
run: bun run lint
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,7 +1,6 @@
|
||||
**/target
|
||||
auth_data
|
||||
*.sqlite
|
||||
video
|
||||
debug*
|
||||
node_modules
|
||||
docs/.vitepress/cache
|
||||
|
||||
1547
Cargo.lock
generated
1547
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
75
Cargo.toml
75
Cargo.toml
@@ -4,45 +4,49 @@ default-members = ["crates/bili_sync"]
|
||||
resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
version = "2.4.0"
|
||||
version = "2.7.0"
|
||||
authors = ["amtoaer <amtoaer@gmail.com>"]
|
||||
license = "MIT"
|
||||
description = "由 Rust & Tokio 驱动的哔哩哔哩同步工具"
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
publish = false
|
||||
|
||||
[workspace.dependencies]
|
||||
bili_sync_entity = { path = "crates/bili_sync_entity" }
|
||||
bili_sync_migration = { path = "crates/bili_sync_migration" }
|
||||
|
||||
anyhow = { version = "1.0.95", features = ["backtrace"] }
|
||||
anyhow = { version = "1.0.98", features = ["backtrace"] }
|
||||
arc-swap = { version = "1.7.1", features = ["serde"] }
|
||||
assert_matches = "1.5"
|
||||
async-std = { version = "1.13.0", features = ["attributes", "tokio1"] }
|
||||
assert_matches = "1.5.0"
|
||||
async-std = { version = "1.13.1", features = ["attributes", "tokio1"] }
|
||||
async-stream = "0.3.6"
|
||||
async-trait = "0.1.85"
|
||||
axum = { version = "0.8.1", features = ["macros"] }
|
||||
async-trait = "0.1.88"
|
||||
axum = { version = "0.8.4", features = ["macros", "ws"] }
|
||||
base64 = "0.22.1"
|
||||
built = { version = "0.7.7", features = ["git2", "chrono"] }
|
||||
chrono = { version = "0.4.39", features = ["serde"] }
|
||||
clap = { version = "4.5.26", features = ["env", "string"] }
|
||||
chrono = { version = "0.4.41", features = ["serde"] }
|
||||
clap = { version = "4.5.41", features = ["env", "string"] }
|
||||
cookie = "0.18.1"
|
||||
cow-utils = "0.1.3"
|
||||
dashmap = "6.1.0"
|
||||
derivative = "2.2.0"
|
||||
dirs = "6.0.0"
|
||||
enum_dispatch = "0.3.13"
|
||||
float-ord = "0.3.2"
|
||||
futures = "0.3.31"
|
||||
handlebars = "6.3.0"
|
||||
git2 = { version = "0.20.2", features = [], default-features = false }
|
||||
handlebars = "6.3.2"
|
||||
hex = "0.4.3"
|
||||
leaky-bucket = "1.1.2"
|
||||
md5 = "0.7.0"
|
||||
memchr = "2.7.4"
|
||||
mime_guess = "=2.0.5"
|
||||
once_cell = "1.20.2"
|
||||
prost = "0.13.4"
|
||||
quick-xml = { version = "0.37.2", features = ["async-tokio"] }
|
||||
rand = "0.8.5"
|
||||
md5 = "0.8.0"
|
||||
memchr = "2.7.5"
|
||||
once_cell = "1.21.3"
|
||||
parking_lot = "0.12.4"
|
||||
prost = "0.14.1"
|
||||
quick-xml = { version = "0.38.0", features = ["async-tokio"] }
|
||||
rand = "0.9.1"
|
||||
regex = "1.11.1"
|
||||
reqwest = { version = "0.12.12", features = [
|
||||
reqwest = { version = "0.12.22", features = [
|
||||
"charset",
|
||||
"cookies",
|
||||
"gzip",
|
||||
@@ -51,27 +55,30 @@ reqwest = { version = "0.12.12", features = [
|
||||
"rustls-tls",
|
||||
"stream",
|
||||
], default-features = false }
|
||||
rsa = { version = "0.9.7", features = ["sha2"] }
|
||||
rust-embed = "8.5.0"
|
||||
sea-orm = { version = "1.1.4", features = [
|
||||
rsa = { version = "0.10.0-rc.3", features = ["sha2"] }
|
||||
rust-embed-for-web = { git = "https://github.com/amtoaer/rust-embed-for-web", tag = "v1.0.0" }
|
||||
sea-orm = { version = "1.1.13", features = [
|
||||
"macros",
|
||||
"runtime-tokio-rustls",
|
||||
"sqlx-sqlite",
|
||||
] }
|
||||
sea-orm-migration = { version = "1.1.4", features = [] }
|
||||
serde = { version = "1.0.217", features = ["derive"] }
|
||||
serde_json = "1.0.135"
|
||||
sea-orm-migration = { version = "1.1.13", features = [] }
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
serde_json = "1.0.140"
|
||||
serde_urlencoded = "0.7.1"
|
||||
strum = { version = "0.26.3", features = ["derive"] }
|
||||
thiserror = "2.0.11"
|
||||
tokio = { version = "1.43.0", features = ["full"] }
|
||||
tokio-util = { version = "0.7.13", features = ["io", "rt"] }
|
||||
toml = "0.8.19"
|
||||
strum = { version = "0.27.1", features = ["derive"] }
|
||||
sysinfo = "0.36.0"
|
||||
thiserror = "2.0.12"
|
||||
tokio = { version = "1.46.1", features = ["full"] }
|
||||
tokio-stream = { version = "0.1.17", features = ["sync"] }
|
||||
tokio-util = { version = "0.7.15", features = ["io", "rt"] }
|
||||
toml = "0.9.1"
|
||||
tower = "0.5.2"
|
||||
tracing = "0.1.41"
|
||||
tracing-subscriber = { version = "0.3.19", features = ["chrono"] }
|
||||
utoipa = { version = "5", features = ["axum_extras"] }
|
||||
utoipa-swagger-ui = { version = "9.0.0", features = ["axum", "vendored"] }
|
||||
tracing-subscriber = { version = "0.3.19", features = ["chrono", "json"] }
|
||||
ua_generator = "0.5.22"
|
||||
uuid = { version = "1.17.0", features = ["v4"] }
|
||||
validator = { version = "0.20.0", features = ["derive"] }
|
||||
|
||||
[workspace.metadata.release]
|
||||
release = false
|
||||
@@ -82,8 +89,12 @@ publish = false
|
||||
pre-release-replacements = [
|
||||
{ file = "../../docs/.vitepress/config.mts", search = "\"v[0-9\\.]+\"", replace = "\"v{{version}}\"", exactly = 1 },
|
||||
{ file = "../../docs/introduction.md", search = " v[0-9\\.]+,", replace = " v{{version}},", exactly = 1 },
|
||||
{ file = "../../web/package.json", search = "\"version\": \"[0-9\\.]+\"", replace = "\"version\": \"{{version}}\"", exactly = 1 },
|
||||
]
|
||||
|
||||
[profile.dev.package."*"]
|
||||
debug = false
|
||||
|
||||
[profile.release]
|
||||
strip = true
|
||||
lto = "thin"
|
||||
|
||||
13
README.md
13
README.md
@@ -9,10 +9,12 @@ bili-sync 是一款专为 NAS 用户编写的哔哩哔哩同步工具,由 Rust
|
||||
|
||||
## 效果演示
|
||||
|
||||
### 概览
|
||||

|
||||
### 详情
|
||||

|
||||
### 管理页
|
||||

|
||||
### 媒体库概览
|
||||

|
||||
### 媒体库详情
|
||||

|
||||
### 播放(使用 infuse)
|
||||

|
||||
### 文件排布
|
||||
@@ -33,7 +35,8 @@ bili-sync 是一款专为 NAS 用户编写的哔哩哔哩同步工具,由 Rust
|
||||
- [x] 支持对“稍后再看”内视频的自动扫描与下载
|
||||
- [x] 支持对 UP 主投稿视频的自动扫描与下载
|
||||
- [x] 支持限制任务的并行度和接口请求频率
|
||||
- [ ] 下载单个文件时支持断点续传与并发下载
|
||||
- [x] 支持单个文件的分块并行下载
|
||||
- [x] 支持使用 Web UI 配置,查看并管理视频、视频源
|
||||
|
||||
|
||||
## 参考与借鉴
|
||||
|
||||
BIN
assets/webui.webp
Normal file
BIN
assets/webui.webp
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 95 KiB |
@@ -14,12 +14,14 @@ anyhow = { workspace = true }
|
||||
arc-swap = { workspace = true }
|
||||
async-stream = { workspace = true }
|
||||
axum = { workspace = true }
|
||||
base64 = { workspace = true }
|
||||
bili_sync_entity = { workspace = true }
|
||||
bili_sync_migration = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
clap = { workspace = true }
|
||||
cookie = { workspace = true }
|
||||
cow-utils = { workspace = true }
|
||||
dashmap = { workspace = true }
|
||||
dirs = { workspace = true }
|
||||
enum_dispatch = { workspace = true }
|
||||
float-ord = { workspace = true }
|
||||
@@ -29,35 +31,39 @@ hex = { workspace = true }
|
||||
leaky-bucket = { workspace = true }
|
||||
md5 = { workspace = true }
|
||||
memchr = { workspace = true }
|
||||
mime_guess = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
parking_lot = { workspace = true }
|
||||
prost = { workspace = true }
|
||||
quick-xml = { workspace = true }
|
||||
rand = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
reqwest = { workspace = true }
|
||||
rsa = { workspace = true }
|
||||
rust-embed = { workspace = true }
|
||||
rust-embed-for-web = { workspace = true }
|
||||
sea-orm = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
serde_urlencoded = { workspace = true }
|
||||
strum = { workspace = true }
|
||||
sysinfo = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
tokio-stream = { workspace = true }
|
||||
tokio-util = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
tower = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
utoipa = { workspace = true }
|
||||
utoipa-swagger-ui = { workspace = true }
|
||||
ua_generator = { workspace = true }
|
||||
uuid = { workspace = true }
|
||||
validator = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
assert_matches = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
built = { workspace = true }
|
||||
git2 = { workspace = true }
|
||||
|
||||
[package.metadata.release]
|
||||
release = true
|
||||
|
||||
@@ -1,18 +1,25 @@
|
||||
use std::borrow::Cow;
|
||||
use std::path::Path;
|
||||
use std::pin::Pin;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use anyhow::{Context, Result, ensure};
|
||||
use bili_sync_entity::rule::Rule;
|
||||
use bili_sync_entity::*;
|
||||
use chrono::Utc;
|
||||
use futures::Stream;
|
||||
use sea_orm::entity::prelude::*;
|
||||
use sea_orm::sea_query::{OnConflict, SimpleExpr};
|
||||
use sea_orm::ActiveValue::Set;
|
||||
use sea_orm::entity::prelude::*;
|
||||
use sea_orm::sea_query::SimpleExpr;
|
||||
use sea_orm::{DatabaseConnection, Unchanged};
|
||||
|
||||
use crate::adapter::{VideoSource, VideoSourceEnum, _ActiveModel};
|
||||
use crate::adapter::{_ActiveModel, VideoSource, VideoSourceEnum};
|
||||
use crate::bilibili::{BiliClient, Collection, CollectionItem, CollectionType, VideoInfo};
|
||||
|
||||
impl VideoSource for collection::Model {
|
||||
fn display_name(&self) -> Cow<'static, str> {
|
||||
format!("{}「{}」", CollectionType::from_expected(self.r#type), self.name).into()
|
||||
}
|
||||
|
||||
fn filter_expr(&self) -> SimpleExpr {
|
||||
video::Column::CollectionId.eq(self.id)
|
||||
}
|
||||
@@ -37,82 +44,71 @@ impl VideoSource for collection::Model {
|
||||
})
|
||||
}
|
||||
|
||||
fn log_refresh_video_start(&self) {
|
||||
info!("开始扫描{}「{}」..", CollectionType::from(self.r#type), self.name);
|
||||
fn should_take(&self, _release_datetime: &chrono::DateTime<Utc>, _latest_row_at: &chrono::DateTime<Utc>) -> bool {
|
||||
// collection(视频合集/视频列表)返回的内容似乎并非严格按照时间排序,并且不同 collection 的排序方式也不同
|
||||
// 为了保证程序正确性,collection 不根据时间提前 break,而是每次都全量拉取
|
||||
true
|
||||
}
|
||||
|
||||
fn log_refresh_video_end(&self, count: usize) {
|
||||
info!(
|
||||
"扫描{}「{}」完成,获取到 {} 条新视频",
|
||||
CollectionType::from(self.r#type),
|
||||
self.name,
|
||||
count,
|
||||
fn should_filter(
|
||||
&self,
|
||||
video_info: Result<VideoInfo, anyhow::Error>,
|
||||
latest_row_at: &chrono::DateTime<Utc>,
|
||||
) -> Option<VideoInfo> {
|
||||
// 由于 collection 的视频无固定时间顺序,should_take 无法提前中断拉取,因此 should_filter 环节需要进行额外过滤
|
||||
if let Ok(video_info) = video_info
|
||||
&& video_info.release_datetime() > latest_row_at
|
||||
{
|
||||
return Some(video_info);
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
fn rule(&self) -> &Option<Rule> {
|
||||
&self.rule
|
||||
}
|
||||
|
||||
async fn refresh<'a>(
|
||||
self,
|
||||
bili_client: &'a BiliClient,
|
||||
connection: &'a DatabaseConnection,
|
||||
) -> Result<(
|
||||
VideoSourceEnum,
|
||||
Pin<Box<dyn Stream<Item = Result<VideoInfo>> + Send + 'a>>,
|
||||
)> {
|
||||
let collection = Collection::new(
|
||||
bili_client,
|
||||
CollectionItem {
|
||||
sid: self.s_id.to_string(),
|
||||
mid: self.m_id.to_string(),
|
||||
collection_type: CollectionType::from_expected(self.r#type),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
fn log_fetch_video_start(&self) {
|
||||
info!(
|
||||
"开始填充{}「{}」视频详情..",
|
||||
CollectionType::from(self.r#type),
|
||||
self.name
|
||||
let collection_info = collection.get_info().await?;
|
||||
ensure!(
|
||||
collection_info.sid == self.s_id
|
||||
&& collection_info.mid == self.m_id
|
||||
&& collection_info.collection_type == CollectionType::from_expected(self.r#type),
|
||||
"collection info mismatch: {:?} != {:?}",
|
||||
collection_info,
|
||||
collection.collection
|
||||
);
|
||||
}
|
||||
|
||||
fn log_fetch_video_end(&self) {
|
||||
info!("填充{}「{}」视频详情完成", CollectionType::from(self.r#type), self.name);
|
||||
}
|
||||
|
||||
fn log_download_video_start(&self) {
|
||||
info!("开始下载{}「{}」视频..", CollectionType::from(self.r#type), self.name);
|
||||
}
|
||||
|
||||
fn log_download_video_end(&self) {
|
||||
info!("下载{}「{}」视频完成", CollectionType::from(self.r#type), self.name);
|
||||
collection::ActiveModel {
|
||||
id: Unchanged(self.id),
|
||||
name: Set(collection_info.name.clone()),
|
||||
..Default::default()
|
||||
}
|
||||
.save(connection)
|
||||
.await?;
|
||||
Ok((
|
||||
collection::Entity::find()
|
||||
.filter(collection::Column::Id.eq(self.id))
|
||||
.one(connection)
|
||||
.await?
|
||||
.context("collection not found")?
|
||||
.into(),
|
||||
Box::pin(collection.into_video_stream()),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) async fn collection_from<'a>(
|
||||
collection_item: &'a CollectionItem,
|
||||
path: &Path,
|
||||
bili_client: &'a BiliClient,
|
||||
connection: &DatabaseConnection,
|
||||
) -> Result<(
|
||||
VideoSourceEnum,
|
||||
Pin<Box<dyn Stream<Item = Result<VideoInfo>> + 'a + Send>>,
|
||||
)> {
|
||||
let collection = Collection::new(bili_client, collection_item);
|
||||
let collection_info = collection.get_info().await?;
|
||||
collection::Entity::insert(collection::ActiveModel {
|
||||
s_id: Set(collection_info.sid),
|
||||
m_id: Set(collection_info.mid),
|
||||
r#type: Set(collection_info.collection_type.into()),
|
||||
name: Set(collection_info.name.clone()),
|
||||
path: Set(path.to_string_lossy().to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.on_conflict(
|
||||
OnConflict::columns([
|
||||
collection::Column::SId,
|
||||
collection::Column::MId,
|
||||
collection::Column::Type,
|
||||
])
|
||||
.update_columns([collection::Column::Name, collection::Column::Path])
|
||||
.to_owned(),
|
||||
)
|
||||
.exec(connection)
|
||||
.await?;
|
||||
Ok((
|
||||
collection::Entity::find()
|
||||
.filter(
|
||||
collection::Column::SId
|
||||
.eq(collection_item.sid.clone())
|
||||
.and(collection::Column::MId.eq(collection_item.mid.clone()))
|
||||
.and(collection::Column::Type.eq(Into::<i32>::into(collection_item.collection_type.clone()))),
|
||||
)
|
||||
.one(connection)
|
||||
.await?
|
||||
.context("collection not found")?
|
||||
.into(),
|
||||
Box::pin(collection.into_video_stream()),
|
||||
))
|
||||
}
|
||||
|
||||
@@ -1,18 +1,24 @@
|
||||
use std::borrow::Cow;
|
||||
use std::path::Path;
|
||||
use std::pin::Pin;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use anyhow::{Context, Result, ensure};
|
||||
use bili_sync_entity::rule::Rule;
|
||||
use bili_sync_entity::*;
|
||||
use futures::Stream;
|
||||
use sea_orm::entity::prelude::*;
|
||||
use sea_orm::sea_query::{OnConflict, SimpleExpr};
|
||||
use sea_orm::ActiveValue::Set;
|
||||
use sea_orm::entity::prelude::*;
|
||||
use sea_orm::sea_query::SimpleExpr;
|
||||
use sea_orm::{DatabaseConnection, Unchanged};
|
||||
|
||||
use crate::adapter::{VideoSource, VideoSourceEnum, _ActiveModel};
|
||||
use crate::adapter::{_ActiveModel, VideoSource, VideoSourceEnum};
|
||||
use crate::bilibili::{BiliClient, FavoriteList, VideoInfo};
|
||||
|
||||
impl VideoSource for favorite::Model {
|
||||
fn display_name(&self) -> Cow<'static, str> {
|
||||
format!("收藏夹「{}」", self.name).into()
|
||||
}
|
||||
|
||||
fn filter_expr(&self) -> SimpleExpr {
|
||||
video::Column::FavoriteId.eq(self.id)
|
||||
}
|
||||
@@ -37,62 +43,41 @@ impl VideoSource for favorite::Model {
|
||||
})
|
||||
}
|
||||
|
||||
fn log_refresh_video_start(&self) {
|
||||
info!("开始扫描收藏夹「{}」..", self.name);
|
||||
fn rule(&self) -> &Option<Rule> {
|
||||
&self.rule
|
||||
}
|
||||
|
||||
fn log_refresh_video_end(&self, count: usize) {
|
||||
info!("扫描收藏夹「{}」完成,获取到 {} 条新视频", self.name, count);
|
||||
}
|
||||
|
||||
fn log_fetch_video_start(&self) {
|
||||
info!("开始填充收藏夹「{}」视频详情..", self.name);
|
||||
}
|
||||
|
||||
fn log_fetch_video_end(&self) {
|
||||
info!("填充收藏夹「{}」视频详情完成", self.name);
|
||||
}
|
||||
|
||||
fn log_download_video_start(&self) {
|
||||
info!("开始下载收藏夹「{}」视频..", self.name);
|
||||
}
|
||||
|
||||
fn log_download_video_end(&self) {
|
||||
info!("下载收藏夹「{}」视频完成", self.name);
|
||||
async fn refresh<'a>(
|
||||
self,
|
||||
bili_client: &'a BiliClient,
|
||||
connection: &'a DatabaseConnection,
|
||||
) -> Result<(
|
||||
VideoSourceEnum,
|
||||
Pin<Box<dyn Stream<Item = Result<VideoInfo>> + Send + 'a>>,
|
||||
)> {
|
||||
let favorite = FavoriteList::new(bili_client, self.f_id.to_string());
|
||||
let favorite_info = favorite.get_info().await?;
|
||||
ensure!(
|
||||
favorite_info.id == self.f_id,
|
||||
"favorite id mismatch: {} != {}",
|
||||
favorite_info.id,
|
||||
self.f_id
|
||||
);
|
||||
favorite::ActiveModel {
|
||||
id: Unchanged(self.id),
|
||||
name: Set(favorite_info.title.clone()),
|
||||
..Default::default()
|
||||
}
|
||||
.save(connection)
|
||||
.await?;
|
||||
Ok((
|
||||
favorite::Entity::find()
|
||||
.filter(favorite::Column::Id.eq(self.id))
|
||||
.one(connection)
|
||||
.await?
|
||||
.context("favorite not found")?
|
||||
.into(),
|
||||
Box::pin(favorite.into_video_stream()),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) async fn favorite_from<'a>(
|
||||
fid: &str,
|
||||
path: &Path,
|
||||
bili_client: &'a BiliClient,
|
||||
connection: &DatabaseConnection,
|
||||
) -> Result<(
|
||||
VideoSourceEnum,
|
||||
Pin<Box<dyn Stream<Item = Result<VideoInfo>> + 'a + Send>>,
|
||||
)> {
|
||||
let favorite = FavoriteList::new(bili_client, fid.to_owned());
|
||||
let favorite_info = favorite.get_info().await?;
|
||||
favorite::Entity::insert(favorite::ActiveModel {
|
||||
f_id: Set(favorite_info.id),
|
||||
name: Set(favorite_info.title.clone()),
|
||||
path: Set(path.to_string_lossy().to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.on_conflict(
|
||||
OnConflict::column(favorite::Column::FId)
|
||||
.update_columns([favorite::Column::Name, favorite::Column::Path])
|
||||
.to_owned(),
|
||||
)
|
||||
.exec(connection)
|
||||
.await?;
|
||||
Ok((
|
||||
favorite::Entity::find()
|
||||
.filter(favorite::Column::FId.eq(favorite_info.id))
|
||||
.one(connection)
|
||||
.await?
|
||||
.context("favorite not found")?
|
||||
.into(),
|
||||
Box::pin(favorite.into_video_stream()),
|
||||
))
|
||||
}
|
||||
|
||||
@@ -3,27 +3,27 @@ mod favorite;
|
||||
mod submission;
|
||||
mod watch_later;
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::path::Path;
|
||||
use std::pin::Pin;
|
||||
|
||||
use anyhow::Result;
|
||||
use chrono::Utc;
|
||||
use enum_dispatch::enum_dispatch;
|
||||
use futures::Stream;
|
||||
use sea_orm::ActiveValue::Set;
|
||||
use sea_orm::DatabaseConnection;
|
||||
use sea_orm::entity::prelude::*;
|
||||
use sea_orm::sea_query::SimpleExpr;
|
||||
use sea_orm::DatabaseConnection;
|
||||
|
||||
#[rustfmt::skip]
|
||||
use bili_sync_entity::collection::Model as Collection;
|
||||
use bili_sync_entity::favorite::Model as Favorite;
|
||||
use bili_sync_entity::rule::Rule;
|
||||
use bili_sync_entity::submission::Model as Submission;
|
||||
use bili_sync_entity::watch_later::Model as WatchLater;
|
||||
|
||||
use crate::adapter::collection::collection_from;
|
||||
use crate::adapter::favorite::favorite_from;
|
||||
use crate::adapter::submission::submission_from;
|
||||
use crate::adapter::watch_later::watch_later_from;
|
||||
use crate::bilibili::{BiliClient, CollectionItem, VideoInfo};
|
||||
use crate::bilibili::{BiliClient, VideoInfo};
|
||||
|
||||
#[enum_dispatch]
|
||||
pub enum VideoSourceEnum {
|
||||
@@ -35,6 +35,9 @@ pub enum VideoSourceEnum {
|
||||
|
||||
#[enum_dispatch(VideoSourceEnum)]
|
||||
pub trait VideoSource {
|
||||
/// 获取视频源的名称
|
||||
fn display_name(&self) -> Cow<'static, str>;
|
||||
|
||||
/// 获取特定视频列表的筛选条件
|
||||
fn filter_expr(&self) -> SimpleExpr;
|
||||
|
||||
@@ -52,48 +55,54 @@ pub trait VideoSource {
|
||||
/// Box<dyn ActiveModelTrait> 又提示 ActiveModelTrait 没有 object safety,因此手写一个 Enum 静态分发
|
||||
fn update_latest_row_at(&self, datetime: DateTime) -> _ActiveModel;
|
||||
|
||||
/// 开始刷新视频
|
||||
fn log_refresh_video_start(&self);
|
||||
|
||||
/// 结束刷新视频
|
||||
fn log_refresh_video_end(&self, count: usize);
|
||||
|
||||
/// 开始填充视频
|
||||
fn log_fetch_video_start(&self);
|
||||
|
||||
/// 结束填充视频
|
||||
fn log_fetch_video_end(&self);
|
||||
|
||||
/// 开始下载视频
|
||||
fn log_download_video_start(&self);
|
||||
|
||||
/// 结束下载视频
|
||||
fn log_download_video_end(&self);
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub enum Args<'a> {
|
||||
Favorite { fid: &'a str },
|
||||
Collection { collection_item: &'a CollectionItem },
|
||||
WatchLater,
|
||||
Submission { upper_id: &'a str },
|
||||
}
|
||||
|
||||
pub async fn video_source_from<'a>(
|
||||
args: Args<'a>,
|
||||
path: &Path,
|
||||
bili_client: &'a BiliClient,
|
||||
connection: &DatabaseConnection,
|
||||
) -> Result<(
|
||||
VideoSourceEnum,
|
||||
Pin<Box<dyn Stream<Item = Result<VideoInfo>> + 'a + Send>>,
|
||||
)> {
|
||||
match args {
|
||||
Args::Favorite { fid } => favorite_from(fid, path, bili_client, connection).await,
|
||||
Args::Collection { collection_item } => collection_from(collection_item, path, bili_client, connection).await,
|
||||
Args::WatchLater => watch_later_from(path, bili_client, connection).await,
|
||||
Args::Submission { upper_id } => submission_from(upper_id, path, bili_client, connection).await,
|
||||
// 判断是否应该继续拉取视频
|
||||
fn should_take(&self, release_datetime: &chrono::DateTime<Utc>, latest_row_at: &chrono::DateTime<Utc>) -> bool {
|
||||
release_datetime > latest_row_at
|
||||
}
|
||||
|
||||
fn should_filter(
|
||||
&self,
|
||||
video_info: Result<VideoInfo, anyhow::Error>,
|
||||
_latest_row_at: &chrono::DateTime<Utc>,
|
||||
) -> Option<VideoInfo> {
|
||||
// 视频按照时间顺序拉取,should_take 已经获取了所有需要处理的视频,should_filter 无需额外处理
|
||||
video_info.ok()
|
||||
}
|
||||
|
||||
fn rule(&self) -> &Option<Rule>;
|
||||
|
||||
fn log_refresh_video_start(&self) {
|
||||
info!("开始扫描{}..", self.display_name());
|
||||
}
|
||||
|
||||
fn log_refresh_video_end(&self, count: usize) {
|
||||
info!("扫描{}完成,获取到 {} 条新视频", self.display_name(), count);
|
||||
}
|
||||
|
||||
fn log_fetch_video_start(&self) {
|
||||
info!("开始填充{}视频详情..", self.display_name());
|
||||
}
|
||||
|
||||
fn log_fetch_video_end(&self) {
|
||||
info!("填充{}视频详情完成", self.display_name());
|
||||
}
|
||||
|
||||
fn log_download_video_start(&self) {
|
||||
info!("开始下载{}视频..", self.display_name());
|
||||
}
|
||||
|
||||
fn log_download_video_end(&self) {
|
||||
info!("下载{}视频完成", self.display_name());
|
||||
}
|
||||
|
||||
async fn refresh<'a>(
|
||||
self,
|
||||
bili_client: &'a BiliClient,
|
||||
connection: &'a DatabaseConnection,
|
||||
) -> Result<(
|
||||
VideoSourceEnum,
|
||||
Pin<Box<dyn Stream<Item = Result<VideoInfo>> + Send + 'a>>,
|
||||
)>;
|
||||
}
|
||||
|
||||
pub enum _ActiveModel {
|
||||
@@ -115,8 +124,13 @@ impl _ActiveModel {
|
||||
_ActiveModel::Submission(model) => {
|
||||
model.save(connection).await?;
|
||||
}
|
||||
_ActiveModel::WatchLater(model) => {
|
||||
model.save(connection).await?;
|
||||
_ActiveModel::WatchLater(mut model) => {
|
||||
if model.id.is_not_set() {
|
||||
model.id = Set(1);
|
||||
model.insert(connection).await?;
|
||||
} else {
|
||||
model.save(connection).await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
|
||||
@@ -1,18 +1,23 @@
|
||||
use std::path::Path;
|
||||
use std::pin::Pin;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use anyhow::{Context, Result, ensure};
|
||||
use bili_sync_entity::rule::Rule;
|
||||
use bili_sync_entity::*;
|
||||
use futures::Stream;
|
||||
use sea_orm::entity::prelude::*;
|
||||
use sea_orm::sea_query::{OnConflict, SimpleExpr};
|
||||
use sea_orm::ActiveValue::Set;
|
||||
use sea_orm::entity::prelude::*;
|
||||
use sea_orm::sea_query::SimpleExpr;
|
||||
use sea_orm::{DatabaseConnection, Unchanged};
|
||||
|
||||
use crate::adapter::{VideoSource, VideoSourceEnum, _ActiveModel};
|
||||
use crate::adapter::{_ActiveModel, VideoSource, VideoSourceEnum};
|
||||
use crate::bilibili::{BiliClient, Submission, VideoInfo};
|
||||
|
||||
impl VideoSource for submission::Model {
|
||||
fn display_name(&self) -> std::borrow::Cow<'static, str> {
|
||||
format!("「{}」投稿", self.upper_name).into()
|
||||
}
|
||||
|
||||
fn filter_expr(&self) -> SimpleExpr {
|
||||
video::Column::SubmissionId.eq(self.id)
|
||||
}
|
||||
@@ -37,62 +42,41 @@ impl VideoSource for submission::Model {
|
||||
})
|
||||
}
|
||||
|
||||
fn log_refresh_video_start(&self) {
|
||||
info!("开始扫描「{}」投稿..", self.upper_name);
|
||||
fn rule(&self) -> &Option<Rule> {
|
||||
&self.rule
|
||||
}
|
||||
|
||||
fn log_refresh_video_end(&self, count: usize) {
|
||||
info!("扫描「{}」投稿完成,获取到 {} 条新视频", self.upper_name, count,);
|
||||
}
|
||||
|
||||
fn log_fetch_video_start(&self) {
|
||||
info!("开始填充「{}」投稿视频详情..", self.upper_name);
|
||||
}
|
||||
|
||||
fn log_fetch_video_end(&self) {
|
||||
info!("填充「{}」投稿视频详情完成", self.upper_name);
|
||||
}
|
||||
|
||||
fn log_download_video_start(&self) {
|
||||
info!("开始下载「{}」投稿视频..", self.upper_name);
|
||||
}
|
||||
|
||||
fn log_download_video_end(&self) {
|
||||
info!("下载「{}」投稿视频完成", self.upper_name);
|
||||
async fn refresh<'a>(
|
||||
self,
|
||||
bili_client: &'a BiliClient,
|
||||
connection: &'a DatabaseConnection,
|
||||
) -> Result<(
|
||||
VideoSourceEnum,
|
||||
Pin<Box<dyn Stream<Item = Result<VideoInfo>> + Send + 'a>>,
|
||||
)> {
|
||||
let submission = Submission::new(bili_client, self.upper_id.to_string());
|
||||
let upper = submission.get_info().await?;
|
||||
ensure!(
|
||||
upper.mid == submission.upper_id,
|
||||
"submission upper id mismatch: {} != {}",
|
||||
upper.mid,
|
||||
submission.upper_id
|
||||
);
|
||||
submission::ActiveModel {
|
||||
id: Unchanged(self.id),
|
||||
upper_name: Set(upper.name),
|
||||
..Default::default()
|
||||
}
|
||||
.save(connection)
|
||||
.await?;
|
||||
Ok((
|
||||
submission::Entity::find()
|
||||
.filter(submission::Column::Id.eq(self.id))
|
||||
.one(connection)
|
||||
.await?
|
||||
.context("submission not found")?
|
||||
.into(),
|
||||
Box::pin(submission.into_video_stream()),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) async fn submission_from<'a>(
|
||||
upper_id: &str,
|
||||
path: &Path,
|
||||
bili_client: &'a BiliClient,
|
||||
connection: &DatabaseConnection,
|
||||
) -> Result<(
|
||||
VideoSourceEnum,
|
||||
Pin<Box<dyn Stream<Item = Result<VideoInfo>> + 'a + Send>>,
|
||||
)> {
|
||||
let submission = Submission::new(bili_client, upper_id.to_owned());
|
||||
let upper = submission.get_info().await?;
|
||||
submission::Entity::insert(submission::ActiveModel {
|
||||
upper_id: Set(upper.mid.parse()?),
|
||||
upper_name: Set(upper.name),
|
||||
path: Set(path.to_string_lossy().to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.on_conflict(
|
||||
OnConflict::column(submission::Column::UpperId)
|
||||
.update_columns([submission::Column::UpperName, submission::Column::Path])
|
||||
.to_owned(),
|
||||
)
|
||||
.exec(connection)
|
||||
.await?;
|
||||
Ok((
|
||||
submission::Entity::find()
|
||||
.filter(submission::Column::UpperId.eq(upper.mid))
|
||||
.one(connection)
|
||||
.await?
|
||||
.context("submission not found")?
|
||||
.into(),
|
||||
Box::pin(submission.into_video_stream()),
|
||||
))
|
||||
}
|
||||
|
||||
@@ -1,18 +1,23 @@
|
||||
use std::path::Path;
|
||||
use std::pin::Pin;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use anyhow::Result;
|
||||
use bili_sync_entity::rule::Rule;
|
||||
use bili_sync_entity::*;
|
||||
use futures::Stream;
|
||||
use sea_orm::entity::prelude::*;
|
||||
use sea_orm::sea_query::{OnConflict, SimpleExpr};
|
||||
use sea_orm::ActiveValue::Set;
|
||||
use sea_orm::entity::prelude::*;
|
||||
use sea_orm::sea_query::SimpleExpr;
|
||||
use sea_orm::{DatabaseConnection, Unchanged};
|
||||
|
||||
use crate::adapter::{VideoSource, VideoSourceEnum, _ActiveModel};
|
||||
use crate::adapter::{_ActiveModel, VideoSource, VideoSourceEnum};
|
||||
use crate::bilibili::{BiliClient, VideoInfo, WatchLater};
|
||||
|
||||
impl VideoSource for watch_later::Model {
|
||||
fn display_name(&self) -> std::borrow::Cow<'static, str> {
|
||||
"稍后再看".into()
|
||||
}
|
||||
|
||||
fn filter_expr(&self) -> SimpleExpr {
|
||||
video::Column::WatchLaterId.eq(self.id)
|
||||
}
|
||||
@@ -37,59 +42,19 @@ impl VideoSource for watch_later::Model {
|
||||
})
|
||||
}
|
||||
|
||||
fn log_refresh_video_start(&self) {
|
||||
info!("开始扫描稍后再看..");
|
||||
fn rule(&self) -> &Option<Rule> {
|
||||
&self.rule
|
||||
}
|
||||
|
||||
fn log_refresh_video_end(&self, count: usize) {
|
||||
info!("扫描稍后再看完成,获取到 {} 条新视频", count);
|
||||
}
|
||||
|
||||
fn log_fetch_video_start(&self) {
|
||||
info!("开始填充稍后再看视频详情..");
|
||||
}
|
||||
|
||||
fn log_fetch_video_end(&self) {
|
||||
info!("填充稍后再看视频详情完成");
|
||||
}
|
||||
|
||||
fn log_download_video_start(&self) {
|
||||
info!("开始下载稍后再看视频..");
|
||||
}
|
||||
|
||||
fn log_download_video_end(&self) {
|
||||
info!("下载稍后再看视频完成");
|
||||
async fn refresh<'a>(
|
||||
self,
|
||||
bili_client: &'a BiliClient,
|
||||
_connection: &'a DatabaseConnection,
|
||||
) -> Result<(
|
||||
VideoSourceEnum,
|
||||
Pin<Box<dyn Stream<Item = Result<VideoInfo>> + Send + 'a>>,
|
||||
)> {
|
||||
let watch_later = WatchLater::new(bili_client);
|
||||
Ok((self.into(), Box::pin(watch_later.into_video_stream())))
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) async fn watch_later_from<'a>(
|
||||
path: &Path,
|
||||
bili_client: &'a BiliClient,
|
||||
connection: &DatabaseConnection,
|
||||
) -> Result<(
|
||||
VideoSourceEnum,
|
||||
Pin<Box<dyn Stream<Item = Result<VideoInfo>> + 'a + Send>>,
|
||||
)> {
|
||||
let watch_later = WatchLater::new(bili_client);
|
||||
watch_later::Entity::insert(watch_later::ActiveModel {
|
||||
id: Set(1),
|
||||
path: Set(path.to_string_lossy().to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.on_conflict(
|
||||
OnConflict::column(watch_later::Column::Id)
|
||||
.update_column(watch_later::Column::Path)
|
||||
.to_owned(),
|
||||
)
|
||||
.exec(connection)
|
||||
.await?;
|
||||
Ok((
|
||||
watch_later::Entity::find()
|
||||
.filter(watch_later::Column::Id.eq(1))
|
||||
.one(connection)
|
||||
.await?
|
||||
.context("watch_later not found")?
|
||||
.into(),
|
||||
Box::pin(watch_later.into_video_stream()),
|
||||
))
|
||||
}
|
||||
|
||||
@@ -1,40 +0,0 @@
|
||||
use axum::extract::Request;
|
||||
use axum::http::HeaderMap;
|
||||
use axum::middleware::Next;
|
||||
use axum::response::{IntoResponse, Response};
|
||||
use reqwest::StatusCode;
|
||||
use utoipa::openapi::security::{ApiKey, ApiKeyValue, SecurityScheme};
|
||||
use utoipa::Modify;
|
||||
|
||||
use crate::api::wrapper::ApiResponse;
|
||||
use crate::config::CONFIG;
|
||||
|
||||
pub async fn auth(headers: HeaderMap, request: Request, next: Next) -> Result<Response, StatusCode> {
|
||||
if request.uri().path().starts_with("/api/") && get_token(&headers) != CONFIG.auth_token {
|
||||
return Ok(ApiResponse::unauthorized(()).into_response());
|
||||
}
|
||||
Ok(next.run(request).await)
|
||||
}
|
||||
|
||||
fn get_token(headers: &HeaderMap) -> Option<String> {
|
||||
headers
|
||||
.get("Authorization")
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.map(Into::into)
|
||||
}
|
||||
|
||||
pub(super) struct OpenAPIAuth;
|
||||
|
||||
impl Modify for OpenAPIAuth {
|
||||
fn modify(&self, openapi: &mut utoipa::openapi::OpenApi) {
|
||||
if let Some(schema) = openapi.components.as_mut() {
|
||||
schema.add_security_scheme(
|
||||
"Token",
|
||||
SecurityScheme::ApiKey(ApiKey::Header(ApiKeyValue::with_description(
|
||||
"Authorization",
|
||||
"与配置文件中的 auth_token 相同",
|
||||
))),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -4,4 +4,6 @@ use thiserror::Error;
|
||||
pub enum InnerApiError {
|
||||
#[error("Primary key not found: {0}")]
|
||||
NotFound(i32),
|
||||
#[error("Bad request: {0}")]
|
||||
BadRequest(String),
|
||||
}
|
||||
|
||||
@@ -1,252 +0,0 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use axum::extract::{Extension, Path, Query};
|
||||
use bili_sync_entity::*;
|
||||
use bili_sync_migration::{Expr, OnConflict};
|
||||
use sea_orm::{
|
||||
ColumnTrait, DatabaseConnection, EntityTrait, IntoActiveModel, PaginatorTrait, QueryFilter, QueryOrder,
|
||||
QuerySelect, Set, TransactionTrait, Unchanged,
|
||||
};
|
||||
use utoipa::OpenApi;
|
||||
|
||||
use crate::api::auth::OpenAPIAuth;
|
||||
use crate::api::error::InnerApiError;
|
||||
use crate::api::request::VideosRequest;
|
||||
use crate::api::response::{
|
||||
PageInfo, ResetVideoResponse, VideoInfo, VideoResponse, VideoSource, VideoSourcesResponse, VideosResponse,
|
||||
};
|
||||
use crate::api::wrapper::{ApiError, ApiResponse};
|
||||
use crate::utils::status::{PageStatus, VideoStatus};
|
||||
|
||||
#[derive(OpenApi)]
|
||||
#[openapi(
|
||||
paths(get_video_sources, get_videos, get_video, reset_video),
|
||||
modifiers(&OpenAPIAuth),
|
||||
security(
|
||||
("Token" = []),
|
||||
)
|
||||
)]
|
||||
pub struct ApiDoc;
|
||||
|
||||
/// 列出所有视频来源
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/api/video-sources",
|
||||
responses(
|
||||
(status = 200, body = ApiResponse<VideoSourcesResponse>),
|
||||
)
|
||||
)]
|
||||
pub async fn get_video_sources(
|
||||
Extension(db): Extension<Arc<DatabaseConnection>>,
|
||||
) -> Result<ApiResponse<VideoSourcesResponse>, ApiError> {
|
||||
Ok(ApiResponse::ok(VideoSourcesResponse {
|
||||
collection: collection::Entity::find()
|
||||
.select_only()
|
||||
.columns([collection::Column::Id, collection::Column::Name])
|
||||
.into_model::<VideoSource>()
|
||||
.all(db.as_ref())
|
||||
.await?,
|
||||
favorite: favorite::Entity::find()
|
||||
.select_only()
|
||||
.columns([favorite::Column::Id, favorite::Column::Name])
|
||||
.into_model::<VideoSource>()
|
||||
.all(db.as_ref())
|
||||
.await?,
|
||||
submission: submission::Entity::find()
|
||||
.select_only()
|
||||
.column(submission::Column::Id)
|
||||
.column_as(submission::Column::UpperName, "name")
|
||||
.into_model::<VideoSource>()
|
||||
.all(db.as_ref())
|
||||
.await?,
|
||||
watch_later: watch_later::Entity::find()
|
||||
.select_only()
|
||||
.column(watch_later::Column::Id)
|
||||
.column_as(Expr::value("稍后再看"), "name")
|
||||
.into_model::<VideoSource>()
|
||||
.all(db.as_ref())
|
||||
.await?,
|
||||
}))
|
||||
}
|
||||
|
||||
/// 列出视频的基本信息,支持根据视频来源筛选、名称查找和分页
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/api/videos",
|
||||
params(
|
||||
VideosRequest,
|
||||
),
|
||||
responses(
|
||||
(status = 200, body = ApiResponse<VideosResponse>),
|
||||
)
|
||||
)]
|
||||
pub async fn get_videos(
|
||||
Extension(db): Extension<Arc<DatabaseConnection>>,
|
||||
Query(params): Query<VideosRequest>,
|
||||
) -> Result<ApiResponse<VideosResponse>, ApiError> {
|
||||
let mut query = video::Entity::find();
|
||||
for (field, column) in [
|
||||
(params.collection, video::Column::CollectionId),
|
||||
(params.favorite, video::Column::FavoriteId),
|
||||
(params.submission, video::Column::SubmissionId),
|
||||
(params.watch_later, video::Column::WatchLaterId),
|
||||
] {
|
||||
if let Some(id) = field {
|
||||
query = query.filter(column.eq(id));
|
||||
}
|
||||
}
|
||||
if let Some(query_word) = params.query {
|
||||
query = query.filter(video::Column::Name.contains(query_word));
|
||||
}
|
||||
let total_count = query.clone().count(db.as_ref()).await?;
|
||||
let (page, page_size) = if let (Some(page), Some(page_size)) = (params.page, params.page_size) {
|
||||
(page, page_size)
|
||||
} else {
|
||||
(1, 10)
|
||||
};
|
||||
Ok(ApiResponse::ok(VideosResponse {
|
||||
videos: query
|
||||
.order_by_desc(video::Column::Id)
|
||||
.select_only()
|
||||
.columns([
|
||||
video::Column::Id,
|
||||
video::Column::Name,
|
||||
video::Column::UpperName,
|
||||
video::Column::DownloadStatus,
|
||||
])
|
||||
.into_tuple::<(i32, String, String, u32)>()
|
||||
.paginate(db.as_ref(), page_size)
|
||||
.fetch_page(page)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(VideoInfo::from)
|
||||
.collect(),
|
||||
total_count,
|
||||
}))
|
||||
}
|
||||
|
||||
/// 获取视频详细信息,包括关联的所有 page
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/api/videos/{id}",
|
||||
responses(
|
||||
(status = 200, body = ApiResponse<VideoResponse>),
|
||||
)
|
||||
)]
|
||||
pub async fn get_video(
|
||||
Path(id): Path<i32>,
|
||||
Extension(db): Extension<Arc<DatabaseConnection>>,
|
||||
) -> Result<ApiResponse<VideoResponse>, ApiError> {
|
||||
let video_info = video::Entity::find_by_id(id)
|
||||
.select_only()
|
||||
.columns([
|
||||
video::Column::Id,
|
||||
video::Column::Name,
|
||||
video::Column::UpperName,
|
||||
video::Column::DownloadStatus,
|
||||
])
|
||||
.into_tuple::<(i32, String, String, u32)>()
|
||||
.one(db.as_ref())
|
||||
.await?
|
||||
.map(VideoInfo::from);
|
||||
let Some(video_info) = video_info else {
|
||||
return Err(InnerApiError::NotFound(id).into());
|
||||
};
|
||||
let pages = page::Entity::find()
|
||||
.filter(page::Column::VideoId.eq(id))
|
||||
.order_by_asc(page::Column::Pid)
|
||||
.select_only()
|
||||
.columns([
|
||||
page::Column::Id,
|
||||
page::Column::Pid,
|
||||
page::Column::Name,
|
||||
page::Column::DownloadStatus,
|
||||
])
|
||||
.into_tuple::<(i32, i32, String, u32)>()
|
||||
.all(db.as_ref())
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(PageInfo::from)
|
||||
.collect();
|
||||
Ok(ApiResponse::ok(VideoResponse {
|
||||
video: video_info,
|
||||
pages,
|
||||
}))
|
||||
}
|
||||
|
||||
/// 将某个视频与其所有分页的失败状态清空为未下载状态,这样在下次下载任务中会触发重试
|
||||
#[utoipa::path(
|
||||
post,
|
||||
path = "/api/videos/{id}/reset",
|
||||
responses(
|
||||
(status = 200, body = ApiResponse<ResetVideoResponse> ),
|
||||
)
|
||||
)]
|
||||
pub async fn reset_video(
|
||||
Path(id): Path<i32>,
|
||||
Extension(db): Extension<Arc<DatabaseConnection>>,
|
||||
) -> Result<ApiResponse<ResetVideoResponse>, ApiError> {
|
||||
let txn = db.begin().await?;
|
||||
let video_status: Option<u32> = video::Entity::find_by_id(id)
|
||||
.select_only()
|
||||
.column(video::Column::DownloadStatus)
|
||||
.into_tuple()
|
||||
.one(&txn)
|
||||
.await?;
|
||||
let Some(video_status) = video_status else {
|
||||
return Err(anyhow!(InnerApiError::NotFound(id)).into());
|
||||
};
|
||||
let resetted_pages_model: Vec<_> = page::Entity::find()
|
||||
.filter(page::Column::VideoId.eq(id))
|
||||
.all(&txn)
|
||||
.await?
|
||||
.into_iter()
|
||||
.filter_map(|mut model| {
|
||||
let mut page_status = PageStatus::from(model.download_status);
|
||||
if page_status.reset_failed() {
|
||||
model.download_status = page_status.into();
|
||||
Some(model)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
let mut video_status = VideoStatus::from(video_status);
|
||||
let mut should_update_video = video_status.reset_failed();
|
||||
if !resetted_pages_model.is_empty() {
|
||||
// 视频状态标志的第 5 位表示是否有分 P 下载失败,如果有需要重置的分页,需要同时重置视频的该状态
|
||||
video_status.set(4, 0);
|
||||
should_update_video = true;
|
||||
}
|
||||
if should_update_video {
|
||||
video::Entity::update(video::ActiveModel {
|
||||
id: Unchanged(id),
|
||||
download_status: Set(video_status.into()),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&txn)
|
||||
.await?;
|
||||
}
|
||||
let resetted_pages_id: Vec<_> = resetted_pages_model.iter().map(|model| model.id).collect();
|
||||
let resetted_pages_model: Vec<page::ActiveModel> = resetted_pages_model
|
||||
.into_iter()
|
||||
.map(|model| model.into_active_model())
|
||||
.collect();
|
||||
for page_trunk in resetted_pages_model.chunks(50) {
|
||||
page::Entity::insert_many(page_trunk.to_vec())
|
||||
.on_conflict(
|
||||
OnConflict::column(page::Column::Id)
|
||||
.update_column(page::Column::DownloadStatus)
|
||||
.to_owned(),
|
||||
)
|
||||
.exec(&txn)
|
||||
.await?;
|
||||
}
|
||||
txn.commit().await?;
|
||||
Ok(ApiResponse::ok(ResetVideoResponse {
|
||||
resetted: should_update_video,
|
||||
video: id,
|
||||
pages: resetted_pages_id,
|
||||
}))
|
||||
}
|
||||
83
crates/bili_sync/src/api/helper.rs
Normal file
83
crates/bili_sync/src/api/helper.rs
Normal file
@@ -0,0 +1,83 @@
|
||||
use std::borrow::Borrow;
|
||||
|
||||
use sea_orm::{ConnectionTrait, DatabaseTransaction};
|
||||
|
||||
use crate::api::response::{PageInfo, VideoInfo};
|
||||
|
||||
pub async fn update_video_download_status(
|
||||
txn: &DatabaseTransaction,
|
||||
videos: &[impl Borrow<VideoInfo>],
|
||||
batch_size: Option<usize>,
|
||||
) -> Result<(), sea_orm::DbErr> {
|
||||
if videos.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
let videos = videos.iter().map(|v| v.borrow()).collect::<Vec<_>>();
|
||||
if let Some(size) = batch_size {
|
||||
for chunk in videos.chunks(size) {
|
||||
execute_video_update_batch(txn, chunk).await?;
|
||||
}
|
||||
} else {
|
||||
execute_video_update_batch(txn, &videos).await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn update_page_download_status(
|
||||
txn: &DatabaseTransaction,
|
||||
pages: &[impl Borrow<PageInfo>],
|
||||
batch_size: Option<usize>,
|
||||
) -> Result<(), sea_orm::DbErr> {
|
||||
if pages.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
let pages = pages.iter().map(|v| v.borrow()).collect::<Vec<_>>();
|
||||
if let Some(size) = batch_size {
|
||||
for chunk in pages.chunks(size) {
|
||||
execute_page_update_batch(txn, chunk).await?;
|
||||
}
|
||||
} else {
|
||||
execute_page_update_batch(txn, &pages).await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn execute_video_update_batch(txn: &DatabaseTransaction, videos: &[&VideoInfo]) -> Result<(), sea_orm::DbErr> {
|
||||
if videos.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
let sql = format!(
|
||||
"WITH tempdata(id, download_status) AS (VALUES {}) \
|
||||
UPDATE video \
|
||||
SET download_status = tempdata.download_status \
|
||||
FROM tempdata \
|
||||
WHERE video.id = tempdata.id",
|
||||
videos
|
||||
.iter()
|
||||
.map(|v| format!("({}, {})", v.id, v.download_status))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
);
|
||||
txn.execute_unprepared(&sql).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn execute_page_update_batch(txn: &DatabaseTransaction, pages: &[&PageInfo]) -> Result<(), sea_orm::DbErr> {
|
||||
if pages.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
let sql = format!(
|
||||
"WITH tempdata(id, download_status) AS (VALUES {}) \
|
||||
UPDATE page \
|
||||
SET download_status = tempdata.download_status \
|
||||
FROM tempdata \
|
||||
WHERE page.id = tempdata.id",
|
||||
pages
|
||||
.iter()
|
||||
.map(|p| format!("({}, {})", p.id, p.download_status))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
);
|
||||
txn.execute_unprepared(&sql).await?;
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,7 +1,8 @@
|
||||
pub mod auth;
|
||||
pub mod handler;
|
||||
|
||||
mod error;
|
||||
mod helper;
|
||||
mod request;
|
||||
mod response;
|
||||
mod routes;
|
||||
mod wrapper;
|
||||
|
||||
pub use routes::{LogHelper, MAX_HISTORY_LOGS, router};
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
use bili_sync_entity::rule::Rule;
|
||||
use serde::Deserialize;
|
||||
use utoipa::IntoParams;
|
||||
use validator::Validate;
|
||||
|
||||
#[derive(Deserialize, IntoParams)]
|
||||
use crate::bilibili::CollectionType;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct VideosRequest {
|
||||
pub collection: Option<i32>,
|
||||
pub favorite: Option<i32>,
|
||||
@@ -11,3 +14,78 @@ pub struct VideosRequest {
|
||||
pub page: Option<u64>,
|
||||
pub page_size: Option<u64>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct ResetRequest {
|
||||
#[serde(default)]
|
||||
pub force: bool,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Validate)]
|
||||
pub struct StatusUpdate {
|
||||
#[validate(range(min = 0, max = 4))]
|
||||
pub status_index: usize,
|
||||
#[validate(custom(function = "crate::utils::validation::validate_status_value"))]
|
||||
pub status_value: u32,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Validate)]
|
||||
pub struct PageStatusUpdate {
|
||||
pub page_id: i32,
|
||||
#[validate(nested)]
|
||||
pub updates: Vec<StatusUpdate>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Validate)]
|
||||
pub struct UpdateVideoStatusRequest {
|
||||
#[serde(default)]
|
||||
#[validate(nested)]
|
||||
pub video_updates: Vec<StatusUpdate>,
|
||||
#[serde(default)]
|
||||
#[validate(nested)]
|
||||
pub page_updates: Vec<PageStatusUpdate>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct FollowedCollectionsRequest {
|
||||
pub page_num: Option<i32>,
|
||||
pub page_size: Option<i32>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct FollowedUppersRequest {
|
||||
pub page_num: Option<i32>,
|
||||
pub page_size: Option<i32>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Validate)]
|
||||
pub struct InsertFavoriteRequest {
|
||||
pub fid: i64,
|
||||
#[validate(custom(function = "crate::utils::validation::validate_path"))]
|
||||
pub path: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Validate)]
|
||||
pub struct InsertCollectionRequest {
|
||||
pub sid: i64,
|
||||
pub mid: i64,
|
||||
#[serde(default)]
|
||||
pub collection_type: CollectionType,
|
||||
#[validate(custom(function = "crate::utils::validation::validate_path"))]
|
||||
pub path: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Validate)]
|
||||
pub struct InsertSubmissionRequest {
|
||||
pub upper_id: i64,
|
||||
#[validate(custom(function = "crate::utils::validation::validate_path"))]
|
||||
pub path: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Validate)]
|
||||
pub struct UpdateVideoSourceRequest {
|
||||
#[validate(custom(function = "crate::utils::validation::validate_path"))]
|
||||
pub path: String,
|
||||
pub enabled: bool,
|
||||
pub rule: Option<Rule>,
|
||||
}
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
use sea_orm::FromQueryResult;
|
||||
use bili_sync_entity::rule::Rule;
|
||||
use bili_sync_entity::*;
|
||||
use sea_orm::{DerivePartialModel, FromQueryResult};
|
||||
use serde::Serialize;
|
||||
use utoipa::ToSchema;
|
||||
|
||||
use crate::utils::status::{PageStatus, VideoStatus};
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
#[derive(Serialize)]
|
||||
pub struct VideoSourcesResponse {
|
||||
pub collection: Vec<VideoSource>,
|
||||
pub favorite: Vec<VideoSource>,
|
||||
@@ -12,65 +13,177 @@ pub struct VideoSourcesResponse {
|
||||
pub watch_later: Vec<VideoSource>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
#[derive(Serialize)]
|
||||
pub struct VideosResponse {
|
||||
pub videos: Vec<VideoInfo>,
|
||||
pub total_count: u64,
|
||||
}
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
#[derive(Serialize)]
|
||||
pub struct VideoResponse {
|
||||
pub video: VideoInfo,
|
||||
pub pages: Vec<PageInfo>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
#[derive(Serialize)]
|
||||
pub struct ResetVideoResponse {
|
||||
pub resetted: bool,
|
||||
pub video: i32,
|
||||
pub pages: Vec<i32>,
|
||||
pub video: VideoInfo,
|
||||
pub pages: Vec<PageInfo>,
|
||||
}
|
||||
|
||||
#[derive(FromQueryResult, Serialize, ToSchema)]
|
||||
#[derive(Serialize)]
|
||||
pub struct ResetAllVideosResponse {
|
||||
pub resetted: bool,
|
||||
pub resetted_videos_count: usize,
|
||||
pub resetted_pages_count: usize,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct UpdateVideoStatusResponse {
|
||||
pub success: bool,
|
||||
pub video: VideoInfo,
|
||||
pub pages: Vec<PageInfo>,
|
||||
}
|
||||
|
||||
#[derive(FromQueryResult, Serialize)]
|
||||
pub struct VideoSource {
|
||||
id: i32,
|
||||
name: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
pub struct PageInfo {
|
||||
pub id: i32,
|
||||
pub pid: i32,
|
||||
pub name: String,
|
||||
pub download_status: [u32; 5],
|
||||
}
|
||||
|
||||
impl From<(i32, i32, String, u32)> for PageInfo {
|
||||
fn from((id, pid, name, download_status): (i32, i32, String, u32)) -> Self {
|
||||
Self {
|
||||
id,
|
||||
pid,
|
||||
name,
|
||||
download_status: PageStatus::from(download_status).into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
#[derive(Serialize, DerivePartialModel, FromQueryResult)]
|
||||
#[sea_orm(entity = "video::Entity")]
|
||||
pub struct VideoInfo {
|
||||
pub id: i32,
|
||||
pub bvid: String,
|
||||
pub name: String,
|
||||
pub upper_name: String,
|
||||
pub download_status: [u32; 5],
|
||||
pub should_download: bool,
|
||||
#[serde(serialize_with = "serde_video_download_status")]
|
||||
pub download_status: u32,
|
||||
}
|
||||
|
||||
impl From<(i32, String, String, u32)> for VideoInfo {
|
||||
fn from((id, name, upper_name, download_status): (i32, String, String, u32)) -> Self {
|
||||
Self {
|
||||
id,
|
||||
name,
|
||||
upper_name,
|
||||
download_status: VideoStatus::from(download_status).into(),
|
||||
}
|
||||
}
|
||||
#[derive(Serialize, DerivePartialModel, FromQueryResult)]
|
||||
#[sea_orm(entity = "page::Entity")]
|
||||
pub struct PageInfo {
|
||||
pub id: i32,
|
||||
pub video_id: i32,
|
||||
pub pid: i32,
|
||||
pub name: String,
|
||||
#[serde(serialize_with = "serde_page_download_status")]
|
||||
pub download_status: u32,
|
||||
}
|
||||
|
||||
fn serde_video_download_status<S>(status: &u32, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
let status: [u32; 5] = VideoStatus::from(*status).into();
|
||||
status.serialize(serializer)
|
||||
}
|
||||
|
||||
fn serde_page_download_status<S>(status: &u32, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
let status: [u32; 5] = PageStatus::from(*status).into();
|
||||
status.serialize(serializer)
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct FavoriteWithSubscriptionStatus {
|
||||
pub title: String,
|
||||
pub media_count: i64,
|
||||
pub fid: i64,
|
||||
pub mid: i64,
|
||||
pub subscribed: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct CollectionWithSubscriptionStatus {
|
||||
pub title: String,
|
||||
pub sid: i64,
|
||||
pub mid: i64,
|
||||
pub invalid: bool,
|
||||
pub subscribed: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct UpperWithSubscriptionStatus {
|
||||
pub mid: i64,
|
||||
pub uname: String,
|
||||
pub face: String,
|
||||
pub sign: String,
|
||||
pub invalid: bool,
|
||||
pub subscribed: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct FavoritesResponse {
|
||||
pub favorites: Vec<FavoriteWithSubscriptionStatus>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct CollectionsResponse {
|
||||
pub collections: Vec<CollectionWithSubscriptionStatus>,
|
||||
pub total: i64,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct UppersResponse {
|
||||
pub uppers: Vec<UpperWithSubscriptionStatus>,
|
||||
pub total: i64,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct VideoSourcesDetailsResponse {
|
||||
pub collections: Vec<VideoSourceDetail>,
|
||||
pub favorites: Vec<VideoSourceDetail>,
|
||||
pub submissions: Vec<VideoSourceDetail>,
|
||||
pub watch_later: Vec<VideoSourceDetail>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, FromQueryResult)]
|
||||
pub struct DayCountPair {
|
||||
pub day: String,
|
||||
pub cnt: i64,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct DashBoardResponse {
|
||||
pub enabled_favorites: u64,
|
||||
pub enabled_collections: u64,
|
||||
pub enabled_submissions: u64,
|
||||
pub enable_watch_later: bool,
|
||||
pub videos_by_day: Vec<DayCountPair>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct SysInfo {
|
||||
pub total_memory: u64,
|
||||
pub used_memory: u64,
|
||||
pub process_memory: u64,
|
||||
pub used_cpu: f32,
|
||||
pub process_cpu: f32,
|
||||
pub total_disk: u64,
|
||||
pub available_disk: u64,
|
||||
}
|
||||
|
||||
#[derive(Serialize, FromQueryResult)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct VideoSourceDetail {
|
||||
pub id: i32,
|
||||
pub name: String,
|
||||
pub path: String,
|
||||
pub rule: Option<Rule>,
|
||||
#[serde(default)]
|
||||
pub rule_display: Option<String>,
|
||||
pub enabled: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct UpdateVideoSourceResponse {
|
||||
pub rule_display: Option<String>,
|
||||
}
|
||||
|
||||
36
crates/bili_sync/src/api/routes/config/mod.rs
Normal file
36
crates/bili_sync/src/api/routes/config/mod.rs
Normal file
@@ -0,0 +1,36 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::Result;
|
||||
use axum::Router;
|
||||
use axum::extract::Extension;
|
||||
use axum::routing::get;
|
||||
use sea_orm::DatabaseConnection;
|
||||
|
||||
use crate::api::error::InnerApiError;
|
||||
use crate::api::wrapper::{ApiError, ApiResponse, ValidatedJson};
|
||||
use crate::config::{Config, VersionedConfig};
|
||||
use crate::utils::task_notifier::TASK_STATUS_NOTIFIER;
|
||||
|
||||
pub(super) fn router() -> Router {
|
||||
Router::new().route("/config", get(get_config).put(update_config))
|
||||
}
|
||||
|
||||
/// 获取全局配置
|
||||
pub async fn get_config() -> Result<ApiResponse<Arc<Config>>, ApiError> {
|
||||
Ok(ApiResponse::ok(VersionedConfig::get().load_full()))
|
||||
}
|
||||
|
||||
/// 更新全局配置
|
||||
pub async fn update_config(
|
||||
Extension(db): Extension<DatabaseConnection>,
|
||||
ValidatedJson(config): ValidatedJson<Config>,
|
||||
) -> Result<ApiResponse<Arc<Config>>, ApiError> {
|
||||
let Some(_lock) = TASK_STATUS_NOTIFIER.detect_running() else {
|
||||
// 简单避免一下可能的不一致现象
|
||||
return Err(InnerApiError::BadRequest("下载任务正在运行,无法修改配置".to_string()).into());
|
||||
};
|
||||
config.check()?;
|
||||
let new_config = VersionedConfig::get().update(config, &db).await?;
|
||||
drop(_lock);
|
||||
Ok(ApiResponse::ok(new_config))
|
||||
}
|
||||
65
crates/bili_sync/src/api/routes/dashboard/mod.rs
Normal file
65
crates/bili_sync/src/api/routes/dashboard/mod.rs
Normal file
@@ -0,0 +1,65 @@
|
||||
use axum::routing::get;
|
||||
use axum::{Extension, Router};
|
||||
use bili_sync_entity::*;
|
||||
use sea_orm::entity::prelude::*;
|
||||
use sea_orm::{FromQueryResult, Statement};
|
||||
|
||||
use crate::api::response::{DashBoardResponse, DayCountPair};
|
||||
use crate::api::wrapper::{ApiError, ApiResponse};
|
||||
|
||||
pub(super) fn router() -> Router {
|
||||
Router::new().route("/dashboard", get(get_dashboard))
|
||||
}
|
||||
|
||||
async fn get_dashboard(
|
||||
Extension(db): Extension<DatabaseConnection>,
|
||||
) -> Result<ApiResponse<DashBoardResponse>, ApiError> {
|
||||
let (enabled_favorites, enabled_collections, enabled_submissions, enabled_watch_later, videos_by_day) = tokio::try_join!(
|
||||
favorite::Entity::find()
|
||||
.filter(favorite::Column::Enabled.eq(true))
|
||||
.count(&db),
|
||||
collection::Entity::find()
|
||||
.filter(collection::Column::Enabled.eq(true))
|
||||
.count(&db),
|
||||
submission::Entity::find()
|
||||
.filter(submission::Column::Enabled.eq(true))
|
||||
.count(&db),
|
||||
watch_later::Entity::find()
|
||||
.filter(watch_later::Column::Enabled.eq(true))
|
||||
.count(&db),
|
||||
DayCountPair::find_by_statement(Statement::from_string(
|
||||
db.get_database_backend(),
|
||||
// 用 SeaORM 太复杂了,直接写个裸 SQL
|
||||
"
|
||||
SELECT
|
||||
dates.day AS day,
|
||||
COUNT(video.id) AS cnt
|
||||
FROM
|
||||
(
|
||||
SELECT
|
||||
STRFTIME('%Y-%m-%d', DATE('now', '-' || n || ' days', 'localtime')) AS day,
|
||||
DATETIME(DATE('now', '-' || n || ' days', 'localtime'), 'utc') AS start_utc_datetime,
|
||||
DATETIME(DATE('now', '-' || n || ' days', '+1 day', 'localtime'), 'utc') AS end_utc_datetime
|
||||
FROM
|
||||
(
|
||||
SELECT 0 AS n UNION ALL SELECT 1 UNION ALL SELECT 2 UNION ALL SELECT 3 UNION ALL SELECT 4 UNION ALL SELECT 5 UNION ALL SELECT 6
|
||||
)
|
||||
) AS dates
|
||||
LEFT JOIN
|
||||
video ON video.created_at >= dates.start_utc_datetime AND video.created_at < dates.end_utc_datetime
|
||||
GROUP BY
|
||||
dates.day
|
||||
ORDER BY
|
||||
dates.day;
|
||||
"
|
||||
))
|
||||
.all(&db),
|
||||
)?;
|
||||
Ok(ApiResponse::ok(DashBoardResponse {
|
||||
enabled_favorites,
|
||||
enabled_collections,
|
||||
enabled_submissions,
|
||||
enable_watch_later: enabled_watch_later > 0,
|
||||
videos_by_day,
|
||||
}))
|
||||
}
|
||||
146
crates/bili_sync/src/api/routes/me/mod.rs
Normal file
146
crates/bili_sync/src/api/routes/me/mod.rs
Normal file
@@ -0,0 +1,146 @@
|
||||
use std::collections::HashSet;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::Result;
|
||||
use axum::Router;
|
||||
use axum::extract::{Extension, Query};
|
||||
use axum::routing::get;
|
||||
use bili_sync_entity::*;
|
||||
use sea_orm::{ColumnTrait, DatabaseConnection, EntityTrait, QueryFilter, QuerySelect};
|
||||
|
||||
use crate::api::request::{FollowedCollectionsRequest, FollowedUppersRequest};
|
||||
use crate::api::response::{
|
||||
CollectionWithSubscriptionStatus, CollectionsResponse, FavoriteWithSubscriptionStatus, FavoritesResponse,
|
||||
UpperWithSubscriptionStatus, UppersResponse,
|
||||
};
|
||||
use crate::api::wrapper::{ApiError, ApiResponse};
|
||||
use crate::bilibili::{BiliClient, Me};
|
||||
|
||||
pub(super) fn router() -> Router {
|
||||
Router::new()
|
||||
.route("/me/favorites", get(get_created_favorites))
|
||||
.route("/me/collections", get(get_followed_collections))
|
||||
.route("/me/uppers", get(get_followed_uppers))
|
||||
}
|
||||
|
||||
/// 获取当前用户创建的收藏夹
|
||||
pub async fn get_created_favorites(
|
||||
Extension(db): Extension<DatabaseConnection>,
|
||||
Extension(bili_client): Extension<Arc<BiliClient>>,
|
||||
) -> Result<ApiResponse<FavoritesResponse>, ApiError> {
|
||||
let me = Me::new(bili_client.as_ref());
|
||||
let bili_favorites = me.get_created_favorites().await?;
|
||||
|
||||
let favorites = if let Some(bili_favorites) = bili_favorites {
|
||||
// b 站收藏夹相关接口使用的所谓“fid”其实是该处的 id,即 fid + mid 后两位
|
||||
let bili_fids: Vec<_> = bili_favorites.iter().map(|fav| fav.id).collect();
|
||||
|
||||
let subscribed_fids: Vec<i64> = favorite::Entity::find()
|
||||
.select_only()
|
||||
.column(favorite::Column::FId)
|
||||
.filter(favorite::Column::FId.is_in(bili_fids))
|
||||
.into_tuple()
|
||||
.all(&db)
|
||||
.await?;
|
||||
let subscribed_set: HashSet<i64> = subscribed_fids.into_iter().collect();
|
||||
|
||||
bili_favorites
|
||||
.into_iter()
|
||||
.map(|fav| FavoriteWithSubscriptionStatus {
|
||||
title: fav.title,
|
||||
media_count: fav.media_count,
|
||||
// api 返回的 id 才是真实的 fid
|
||||
fid: fav.id,
|
||||
mid: fav.mid,
|
||||
subscribed: subscribed_set.contains(&fav.id),
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
Ok(ApiResponse::ok(FavoritesResponse { favorites }))
|
||||
}
|
||||
|
||||
/// 获取当前用户收藏的合集
|
||||
pub async fn get_followed_collections(
|
||||
Extension(db): Extension<DatabaseConnection>,
|
||||
Extension(bili_client): Extension<Arc<BiliClient>>,
|
||||
Query(params): Query<FollowedCollectionsRequest>,
|
||||
) -> Result<ApiResponse<CollectionsResponse>, ApiError> {
|
||||
let me = Me::new(bili_client.as_ref());
|
||||
let (page_num, page_size) = (params.page_num.unwrap_or(1), params.page_size.unwrap_or(50));
|
||||
let bili_collections = me.get_followed_collections(page_num, page_size).await?;
|
||||
|
||||
let collections = if let Some(collection_list) = bili_collections.list {
|
||||
let bili_sids: Vec<_> = collection_list.iter().map(|col| col.id).collect();
|
||||
|
||||
let subscribed_ids: Vec<i64> = collection::Entity::find()
|
||||
.select_only()
|
||||
.column(collection::Column::SId)
|
||||
.filter(collection::Column::SId.is_in(bili_sids))
|
||||
.into_tuple()
|
||||
.all(&db)
|
||||
.await?;
|
||||
let subscribed_set: HashSet<i64> = subscribed_ids.into_iter().collect();
|
||||
|
||||
collection_list
|
||||
.into_iter()
|
||||
.map(|col| CollectionWithSubscriptionStatus {
|
||||
title: col.title,
|
||||
sid: col.id,
|
||||
mid: col.mid,
|
||||
invalid: col.state == 1,
|
||||
subscribed: subscribed_set.contains(&col.id),
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
Ok(ApiResponse::ok(CollectionsResponse {
|
||||
collections,
|
||||
total: bili_collections.count,
|
||||
}))
|
||||
}
|
||||
|
||||
/// 获取当前用户关注的 UP 主
|
||||
pub async fn get_followed_uppers(
|
||||
Extension(db): Extension<DatabaseConnection>,
|
||||
Extension(bili_client): Extension<Arc<BiliClient>>,
|
||||
Query(params): Query<FollowedUppersRequest>,
|
||||
) -> Result<ApiResponse<UppersResponse>, ApiError> {
|
||||
let me = Me::new(bili_client.as_ref());
|
||||
let (page_num, page_size) = (params.page_num.unwrap_or(1), params.page_size.unwrap_or(20));
|
||||
let bili_uppers = me.get_followed_uppers(page_num, page_size).await?;
|
||||
|
||||
let bili_uid: Vec<_> = bili_uppers.list.iter().map(|upper| upper.mid).collect();
|
||||
|
||||
let subscribed_ids: Vec<i64> = submission::Entity::find()
|
||||
.select_only()
|
||||
.column(submission::Column::UpperId)
|
||||
.filter(submission::Column::UpperId.is_in(bili_uid))
|
||||
.into_tuple()
|
||||
.all(&db)
|
||||
.await?;
|
||||
let subscribed_set: HashSet<i64> = subscribed_ids.into_iter().collect();
|
||||
|
||||
let uppers = bili_uppers
|
||||
.list
|
||||
.into_iter()
|
||||
.map(|upper| UpperWithSubscriptionStatus {
|
||||
mid: upper.mid,
|
||||
// 官方没有提供字段,但是可以使用这种方式简单判断下
|
||||
invalid: upper.uname == "账号已注销" && upper.face == "https://i0.hdslb.com/bfs/face/member/noface.jpg",
|
||||
uname: upper.uname,
|
||||
face: upper.face,
|
||||
sign: upper.sign,
|
||||
subscribed: subscribed_set.contains(&upper.mid),
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(ApiResponse::ok(UppersResponse {
|
||||
uppers,
|
||||
total: bili_uppers.total,
|
||||
}))
|
||||
}
|
||||
58
crates/bili_sync/src/api/routes/mod.rs
Normal file
58
crates/bili_sync/src/api/routes/mod.rs
Normal file
@@ -0,0 +1,58 @@
|
||||
use axum::extract::Request;
|
||||
use axum::http::HeaderMap;
|
||||
use axum::middleware::Next;
|
||||
use axum::response::{IntoResponse, Response};
|
||||
use axum::{Router, middleware};
|
||||
use base64::Engine;
|
||||
use base64::prelude::BASE64_URL_SAFE_NO_PAD;
|
||||
use reqwest::StatusCode;
|
||||
|
||||
use crate::api::wrapper::ApiResponse;
|
||||
use crate::config::VersionedConfig;
|
||||
|
||||
mod config;
|
||||
mod dashboard;
|
||||
mod me;
|
||||
mod video_sources;
|
||||
mod videos;
|
||||
mod ws;
|
||||
|
||||
pub use ws::{LogHelper, MAX_HISTORY_LOGS};
|
||||
|
||||
pub fn router() -> Router {
|
||||
Router::new().nest(
|
||||
"/api",
|
||||
config::router()
|
||||
.merge(me::router())
|
||||
.merge(video_sources::router())
|
||||
.merge(videos::router())
|
||||
.merge(dashboard::router())
|
||||
.merge(ws::router())
|
||||
.layer(middleware::from_fn(auth)),
|
||||
)
|
||||
}
|
||||
|
||||
/// 中间件:使用 auth token 对请求进行身份验证
|
||||
pub async fn auth(mut headers: HeaderMap, request: Request, next: Next) -> Result<Response, StatusCode> {
|
||||
let config = VersionedConfig::get().load();
|
||||
let token = config.auth_token.as_str();
|
||||
if headers
|
||||
.get("Authorization")
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.is_some_and(|s| s == token)
|
||||
{
|
||||
return Ok(next.run(request).await);
|
||||
}
|
||||
if let Some(protocol) = headers.remove("Sec-WebSocket-Protocol")
|
||||
&& protocol
|
||||
.to_str()
|
||||
.ok()
|
||||
.and_then(|s| BASE64_URL_SAFE_NO_PAD.decode(s).ok())
|
||||
.is_some_and(|s| s == token.as_bytes())
|
||||
{
|
||||
let mut resp = next.run(request).await;
|
||||
resp.headers_mut().insert("Sec-WebSocket-Protocol", protocol);
|
||||
return Ok(resp);
|
||||
}
|
||||
Ok(ApiResponse::<()>::unauthorized("auth token does not match").into_response())
|
||||
}
|
||||
363
crates/bili_sync/src/api/routes/video_sources/mod.rs
Normal file
363
crates/bili_sync/src/api/routes/video_sources/mod.rs
Normal file
@@ -0,0 +1,363 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::Result;
|
||||
use axum::Router;
|
||||
use axum::extract::{Extension, Path};
|
||||
use axum::routing::{get, post, put};
|
||||
use bili_sync_entity::rule::Rule;
|
||||
use bili_sync_entity::*;
|
||||
use bili_sync_migration::Expr;
|
||||
use sea_orm::ActiveValue::Set;
|
||||
use sea_orm::entity::prelude::*;
|
||||
use sea_orm::{ColumnTrait, DatabaseConnection, EntityTrait, QuerySelect, TransactionTrait};
|
||||
|
||||
use crate::adapter::_ActiveModel;
|
||||
use crate::api::error::InnerApiError;
|
||||
use crate::api::request::{
|
||||
InsertCollectionRequest, InsertFavoriteRequest, InsertSubmissionRequest, UpdateVideoSourceRequest,
|
||||
};
|
||||
use crate::api::response::{
|
||||
UpdateVideoSourceResponse, VideoSource, VideoSourceDetail, VideoSourcesDetailsResponse, VideoSourcesResponse,
|
||||
};
|
||||
use crate::api::wrapper::{ApiError, ApiResponse, ValidatedJson};
|
||||
use crate::bilibili::{BiliClient, Collection, CollectionItem, FavoriteList, Submission};
|
||||
use crate::utils::rule::FieldEvaluatable;
|
||||
|
||||
pub(super) fn router() -> Router {
|
||||
Router::new()
|
||||
.route("/video-sources", get(get_video_sources))
|
||||
.route("/video-sources/details", get(get_video_sources_details))
|
||||
.route("/video-sources/{type}/{id}", put(update_video_source))
|
||||
.route("/video-sources/{type}/{id}/evaluate", post(evaluate_video_source))
|
||||
.route("/video-sources/favorites", post(insert_favorite))
|
||||
.route("/video-sources/collections", post(insert_collection))
|
||||
.route("/video-sources/submissions", post(insert_submission))
|
||||
}
|
||||
|
||||
/// 列出所有视频来源
|
||||
pub async fn get_video_sources(
|
||||
Extension(db): Extension<DatabaseConnection>,
|
||||
) -> Result<ApiResponse<VideoSourcesResponse>, ApiError> {
|
||||
let (collection, favorite, submission, mut watch_later) = tokio::try_join!(
|
||||
collection::Entity::find()
|
||||
.select_only()
|
||||
.columns([collection::Column::Id, collection::Column::Name])
|
||||
.into_model::<VideoSource>()
|
||||
.all(&db),
|
||||
favorite::Entity::find()
|
||||
.select_only()
|
||||
.columns([favorite::Column::Id, favorite::Column::Name])
|
||||
.into_model::<VideoSource>()
|
||||
.all(&db),
|
||||
submission::Entity::find()
|
||||
.select_only()
|
||||
.column(submission::Column::Id)
|
||||
.column_as(submission::Column::UpperName, "name")
|
||||
.into_model::<VideoSource>()
|
||||
.all(&db),
|
||||
watch_later::Entity::find()
|
||||
.select_only()
|
||||
.column(watch_later::Column::Id)
|
||||
.column_as(Expr::value("稍后再看"), "name")
|
||||
.into_model::<VideoSource>()
|
||||
.all(&db)
|
||||
)?;
|
||||
// watch_later 是一个特殊的视频来源,如果不存在则添加一个默认项
|
||||
if watch_later.is_empty() {
|
||||
watch_later.push(VideoSource {
|
||||
id: 1,
|
||||
name: "稍后再看".to_string(),
|
||||
});
|
||||
}
|
||||
Ok(ApiResponse::ok(VideoSourcesResponse {
|
||||
collection,
|
||||
favorite,
|
||||
submission,
|
||||
watch_later,
|
||||
}))
|
||||
}
|
||||
|
||||
/// 获取视频来源详情
|
||||
pub async fn get_video_sources_details(
|
||||
Extension(db): Extension<DatabaseConnection>,
|
||||
) -> Result<ApiResponse<VideoSourcesDetailsResponse>, ApiError> {
|
||||
let (mut collections, mut favorites, mut submissions, mut watch_later) = tokio::try_join!(
|
||||
collection::Entity::find()
|
||||
.select_only()
|
||||
.columns([
|
||||
collection::Column::Id,
|
||||
collection::Column::Name,
|
||||
collection::Column::Path,
|
||||
collection::Column::Rule,
|
||||
collection::Column::Enabled
|
||||
])
|
||||
.into_model::<VideoSourceDetail>()
|
||||
.all(&db),
|
||||
favorite::Entity::find()
|
||||
.select_only()
|
||||
.columns([
|
||||
favorite::Column::Id,
|
||||
favorite::Column::Name,
|
||||
favorite::Column::Path,
|
||||
favorite::Column::Rule,
|
||||
favorite::Column::Enabled
|
||||
])
|
||||
.into_model::<VideoSourceDetail>()
|
||||
.all(&db),
|
||||
submission::Entity::find()
|
||||
.select_only()
|
||||
.column_as(submission::Column::UpperName, "name")
|
||||
.columns([
|
||||
submission::Column::Id,
|
||||
submission::Column::Path,
|
||||
submission::Column::Enabled,
|
||||
submission::Column::Rule
|
||||
])
|
||||
.into_model::<VideoSourceDetail>()
|
||||
.all(&db),
|
||||
watch_later::Entity::find()
|
||||
.select_only()
|
||||
.column_as(Expr::value("稍后再看"), "name")
|
||||
.columns([
|
||||
watch_later::Column::Id,
|
||||
watch_later::Column::Path,
|
||||
watch_later::Column::Enabled,
|
||||
watch_later::Column::Rule
|
||||
])
|
||||
.into_model::<VideoSourceDetail>()
|
||||
.all(&db)
|
||||
)?;
|
||||
if watch_later.is_empty() {
|
||||
watch_later.push(VideoSourceDetail {
|
||||
id: 1,
|
||||
name: "稍后再看".to_string(),
|
||||
path: String::new(),
|
||||
rule: None,
|
||||
rule_display: None,
|
||||
enabled: false,
|
||||
})
|
||||
}
|
||||
for sources in [&mut collections, &mut favorites, &mut submissions, &mut watch_later] {
|
||||
sources.iter_mut().for_each(|item| {
|
||||
if let Some(rule) = &item.rule {
|
||||
item.rule_display = Some(rule.to_string());
|
||||
}
|
||||
});
|
||||
}
|
||||
Ok(ApiResponse::ok(VideoSourcesDetailsResponse {
|
||||
collections,
|
||||
favorites,
|
||||
submissions,
|
||||
watch_later,
|
||||
}))
|
||||
}
|
||||
|
||||
/// 更新视频来源
|
||||
pub async fn update_video_source(
|
||||
Path((source_type, id)): Path<(String, i32)>,
|
||||
Extension(db): Extension<DatabaseConnection>,
|
||||
ValidatedJson(request): ValidatedJson<UpdateVideoSourceRequest>,
|
||||
) -> Result<ApiResponse<UpdateVideoSourceResponse>, ApiError> {
|
||||
let rule_display = request.rule.as_ref().map(|rule| rule.to_string());
|
||||
let active_model = match source_type.as_str() {
|
||||
"collections" => collection::Entity::find_by_id(id).one(&db).await?.map(|model| {
|
||||
let mut active_model: collection::ActiveModel = model.into();
|
||||
active_model.path = Set(request.path);
|
||||
active_model.enabled = Set(request.enabled);
|
||||
active_model.rule = Set(request.rule);
|
||||
_ActiveModel::Collection(active_model)
|
||||
}),
|
||||
"favorites" => favorite::Entity::find_by_id(id).one(&db).await?.map(|model| {
|
||||
let mut active_model: favorite::ActiveModel = model.into();
|
||||
active_model.path = Set(request.path);
|
||||
active_model.enabled = Set(request.enabled);
|
||||
active_model.rule = Set(request.rule);
|
||||
_ActiveModel::Favorite(active_model)
|
||||
}),
|
||||
"submissions" => submission::Entity::find_by_id(id).one(&db).await?.map(|model| {
|
||||
let mut active_model: submission::ActiveModel = model.into();
|
||||
active_model.path = Set(request.path);
|
||||
active_model.enabled = Set(request.enabled);
|
||||
active_model.rule = Set(request.rule);
|
||||
_ActiveModel::Submission(active_model)
|
||||
}),
|
||||
"watch_later" => match watch_later::Entity::find_by_id(id).one(&db).await? {
|
||||
// 稍后再看需要做特殊处理,get 时如果稍后再看不存在返回的是 id 为 1 的假记录
|
||||
// 因此此处可能是更新也可能是插入,做个额外的处理
|
||||
Some(model) => {
|
||||
// 如果有记录,使用 id 对应的记录更新
|
||||
let mut active_model: watch_later::ActiveModel = model.into();
|
||||
active_model.path = Set(request.path);
|
||||
active_model.enabled = Set(request.enabled);
|
||||
active_model.rule = Set(request.rule);
|
||||
Some(_ActiveModel::WatchLater(active_model))
|
||||
}
|
||||
None => {
|
||||
if id != 1 {
|
||||
None
|
||||
} else {
|
||||
// 如果没有记录且 id 为 1,插入一个新的稍后再看记录
|
||||
Some(_ActiveModel::WatchLater(watch_later::ActiveModel {
|
||||
path: Set(request.path),
|
||||
enabled: Set(request.enabled),
|
||||
rule: Set(request.rule),
|
||||
..Default::default()
|
||||
}))
|
||||
}
|
||||
}
|
||||
},
|
||||
_ => return Err(InnerApiError::BadRequest("Invalid video source type".to_string()).into()),
|
||||
};
|
||||
let Some(active_model) = active_model else {
|
||||
return Err(InnerApiError::NotFound(id).into());
|
||||
};
|
||||
active_model.save(&db).await?;
|
||||
Ok(ApiResponse::ok(UpdateVideoSourceResponse { rule_display }))
|
||||
}
|
||||
|
||||
pub async fn evaluate_video_source(
|
||||
Path((source_type, id)): Path<(String, i32)>,
|
||||
Extension(db): Extension<DatabaseConnection>,
|
||||
) -> Result<ApiResponse<bool>, ApiError> {
|
||||
// 找出对应 source 的规则与 video 筛选条件
|
||||
let (rule, filter_condition) = match source_type.as_str() {
|
||||
"collections" => (
|
||||
collection::Entity::find_by_id(id)
|
||||
.select_only()
|
||||
.column(collection::Column::Rule)
|
||||
.into_tuple::<Option<Rule>>()
|
||||
.one(&db)
|
||||
.await?
|
||||
.and_then(|r| r),
|
||||
video::Column::CollectionId.eq(id),
|
||||
),
|
||||
"favorites" => (
|
||||
favorite::Entity::find_by_id(id)
|
||||
.select_only()
|
||||
.column(favorite::Column::Rule)
|
||||
.into_tuple::<Option<Rule>>()
|
||||
.one(&db)
|
||||
.await?
|
||||
.and_then(|r| r),
|
||||
video::Column::FavoriteId.eq(id),
|
||||
),
|
||||
"submissions" => (
|
||||
submission::Entity::find_by_id(id)
|
||||
.select_only()
|
||||
.column(submission::Column::Rule)
|
||||
.into_tuple::<Option<Rule>>()
|
||||
.one(&db)
|
||||
.await?
|
||||
.and_then(|r| r),
|
||||
video::Column::SubmissionId.eq(id),
|
||||
),
|
||||
"watch_later" => (
|
||||
watch_later::Entity::find_by_id(id)
|
||||
.select_only()
|
||||
.column(watch_later::Column::Rule)
|
||||
.into_tuple::<Option<Rule>>()
|
||||
.one(&db)
|
||||
.await?
|
||||
.and_then(|r| r),
|
||||
video::Column::WatchLaterId.eq(id),
|
||||
),
|
||||
_ => return Err(InnerApiError::BadRequest("Invalid video source type".to_string()).into()),
|
||||
};
|
||||
let videos: Vec<(video::Model, Vec<page::Model>)> = video::Entity::find()
|
||||
.filter(filter_condition)
|
||||
.find_with_related(page::Entity)
|
||||
.all(&db)
|
||||
.await?;
|
||||
let video_should_download_pairs = videos
|
||||
.into_iter()
|
||||
.map(|(video, pages)| (video.id, rule.evaluate_model(&video, &pages)))
|
||||
.collect::<Vec<(i32, bool)>>();
|
||||
let txn = db.begin().await?;
|
||||
for chunk in video_should_download_pairs.chunks(500) {
|
||||
let sql = format!(
|
||||
"WITH tempdata(id, should_download) AS (VALUES {}) \
|
||||
UPDATE video \
|
||||
SET should_download = tempdata.should_download \
|
||||
FROM tempdata \
|
||||
WHERE video.id = tempdata.id",
|
||||
chunk
|
||||
.iter()
|
||||
.map(|item| format!("({}, {})", item.0, item.1))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
);
|
||||
txn.execute_unprepared(&sql).await?;
|
||||
}
|
||||
txn.commit().await?;
|
||||
Ok(ApiResponse::ok(true))
|
||||
}
|
||||
|
||||
/// 新增收藏夹订阅
|
||||
pub async fn insert_favorite(
|
||||
Extension(db): Extension<DatabaseConnection>,
|
||||
Extension(bili_client): Extension<Arc<BiliClient>>,
|
||||
ValidatedJson(request): ValidatedJson<InsertFavoriteRequest>,
|
||||
) -> Result<ApiResponse<bool>, ApiError> {
|
||||
let favorite = FavoriteList::new(bili_client.as_ref(), request.fid.to_string());
|
||||
let favorite_info = favorite.get_info().await?;
|
||||
favorite::Entity::insert(favorite::ActiveModel {
|
||||
f_id: Set(favorite_info.id),
|
||||
name: Set(favorite_info.title.clone()),
|
||||
path: Set(request.path),
|
||||
enabled: Set(false),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&db)
|
||||
.await?;
|
||||
Ok(ApiResponse::ok(true))
|
||||
}
|
||||
|
||||
/// 新增合集/列表订阅
|
||||
pub async fn insert_collection(
|
||||
Extension(db): Extension<DatabaseConnection>,
|
||||
Extension(bili_client): Extension<Arc<BiliClient>>,
|
||||
ValidatedJson(request): ValidatedJson<InsertCollectionRequest>,
|
||||
) -> Result<ApiResponse<bool>, ApiError> {
|
||||
let collection = Collection::new(
|
||||
bili_client.as_ref(),
|
||||
CollectionItem {
|
||||
sid: request.sid.to_string(),
|
||||
mid: request.mid.to_string(),
|
||||
collection_type: request.collection_type,
|
||||
},
|
||||
);
|
||||
let collection_info = collection.get_info().await?;
|
||||
collection::Entity::insert(collection::ActiveModel {
|
||||
s_id: Set(collection_info.sid),
|
||||
m_id: Set(collection_info.mid),
|
||||
r#type: Set(collection_info.collection_type.into()),
|
||||
name: Set(collection_info.name.clone()),
|
||||
path: Set(request.path),
|
||||
enabled: Set(false),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&db)
|
||||
.await?;
|
||||
|
||||
Ok(ApiResponse::ok(true))
|
||||
}
|
||||
|
||||
/// 新增投稿订阅
|
||||
pub async fn insert_submission(
|
||||
Extension(db): Extension<DatabaseConnection>,
|
||||
Extension(bili_client): Extension<Arc<BiliClient>>,
|
||||
ValidatedJson(request): ValidatedJson<InsertSubmissionRequest>,
|
||||
) -> Result<ApiResponse<bool>, ApiError> {
|
||||
let submission = Submission::new(bili_client.as_ref(), request.upper_id.to_string());
|
||||
let upper = submission.get_info().await?;
|
||||
submission::Entity::insert(submission::ActiveModel {
|
||||
upper_id: Set(upper.mid.parse()?),
|
||||
upper_name: Set(upper.name),
|
||||
path: Set(request.path),
|
||||
enabled: Set(false),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&db)
|
||||
.await?;
|
||||
Ok(ApiResponse::ok(true))
|
||||
}
|
||||
259
crates/bili_sync/src/api/routes/videos/mod.rs
Normal file
259
crates/bili_sync/src/api/routes/videos/mod.rs
Normal file
@@ -0,0 +1,259 @@
|
||||
use std::collections::HashSet;
|
||||
|
||||
use anyhow::Result;
|
||||
use axum::extract::{Extension, Path, Query};
|
||||
use axum::routing::{get, post};
|
||||
use axum::{Json, Router};
|
||||
use bili_sync_entity::*;
|
||||
use sea_orm::{
|
||||
ColumnTrait, DatabaseConnection, EntityTrait, PaginatorTrait, QueryFilter, QueryOrder, TransactionTrait,
|
||||
};
|
||||
|
||||
use crate::api::error::InnerApiError;
|
||||
use crate::api::helper::{update_page_download_status, update_video_download_status};
|
||||
use crate::api::request::{ResetRequest, UpdateVideoStatusRequest, VideosRequest};
|
||||
use crate::api::response::{
|
||||
PageInfo, ResetAllVideosResponse, ResetVideoResponse, UpdateVideoStatusResponse, VideoInfo, VideoResponse,
|
||||
VideosResponse,
|
||||
};
|
||||
use crate::api::wrapper::{ApiError, ApiResponse, ValidatedJson};
|
||||
use crate::utils::status::{PageStatus, VideoStatus};
|
||||
|
||||
pub(super) fn router() -> Router {
|
||||
Router::new()
|
||||
.route("/videos", get(get_videos))
|
||||
.route("/videos/{id}", get(get_video))
|
||||
.route("/videos/{id}/reset", post(reset_video))
|
||||
.route("/videos/reset-all", post(reset_all_videos))
|
||||
.route("/videos/{id}/update-status", post(update_video_status))
|
||||
}
|
||||
|
||||
/// 列出视频的基本信息,支持根据视频来源筛选、名称查找和分页
|
||||
pub async fn get_videos(
|
||||
Extension(db): Extension<DatabaseConnection>,
|
||||
Query(params): Query<VideosRequest>,
|
||||
) -> Result<ApiResponse<VideosResponse>, ApiError> {
|
||||
let mut query = video::Entity::find();
|
||||
for (field, column) in [
|
||||
(params.collection, video::Column::CollectionId),
|
||||
(params.favorite, video::Column::FavoriteId),
|
||||
(params.submission, video::Column::SubmissionId),
|
||||
(params.watch_later, video::Column::WatchLaterId),
|
||||
] {
|
||||
if let Some(id) = field {
|
||||
query = query.filter(column.eq(id));
|
||||
}
|
||||
}
|
||||
if let Some(query_word) = params.query {
|
||||
query = query.filter(video::Column::Name.contains(query_word));
|
||||
}
|
||||
let total_count = query.clone().count(&db).await?;
|
||||
let (page, page_size) = if let (Some(page), Some(page_size)) = (params.page, params.page_size) {
|
||||
(page, page_size)
|
||||
} else {
|
||||
(0, 10)
|
||||
};
|
||||
Ok(ApiResponse::ok(VideosResponse {
|
||||
videos: query
|
||||
.order_by_desc(video::Column::Id)
|
||||
.into_partial_model::<VideoInfo>()
|
||||
.paginate(&db, page_size)
|
||||
.fetch_page(page)
|
||||
.await?,
|
||||
total_count,
|
||||
}))
|
||||
}
|
||||
|
||||
pub async fn get_video(
|
||||
Path(id): Path<i32>,
|
||||
Extension(db): Extension<DatabaseConnection>,
|
||||
) -> Result<ApiResponse<VideoResponse>, ApiError> {
|
||||
let (video_info, pages_info) = tokio::try_join!(
|
||||
video::Entity::find_by_id(id).into_partial_model::<VideoInfo>().one(&db),
|
||||
page::Entity::find()
|
||||
.filter(page::Column::VideoId.eq(id))
|
||||
.order_by_asc(page::Column::Cid)
|
||||
.into_partial_model::<PageInfo>()
|
||||
.all(&db)
|
||||
)?;
|
||||
let Some(video_info) = video_info else {
|
||||
return Err(InnerApiError::NotFound(id).into());
|
||||
};
|
||||
Ok(ApiResponse::ok(VideoResponse {
|
||||
video: video_info,
|
||||
pages: pages_info,
|
||||
}))
|
||||
}
|
||||
|
||||
pub async fn reset_video(
|
||||
Path(id): Path<i32>,
|
||||
Extension(db): Extension<DatabaseConnection>,
|
||||
Json(request): Json<ResetRequest>,
|
||||
) -> Result<ApiResponse<ResetVideoResponse>, ApiError> {
|
||||
let (video_info, pages_info) = tokio::try_join!(
|
||||
video::Entity::find_by_id(id).into_partial_model::<VideoInfo>().one(&db),
|
||||
page::Entity::find()
|
||||
.filter(page::Column::VideoId.eq(id))
|
||||
.order_by_asc(page::Column::Cid)
|
||||
.into_partial_model::<PageInfo>()
|
||||
.all(&db)
|
||||
)?;
|
||||
let Some(mut video_info) = video_info else {
|
||||
return Err(InnerApiError::NotFound(id).into());
|
||||
};
|
||||
let resetted_pages_info = pages_info
|
||||
.into_iter()
|
||||
.filter_map(|mut page_info| {
|
||||
let mut page_status = PageStatus::from(page_info.download_status);
|
||||
if (request.force && page_status.force_reset_failed()) || page_status.reset_failed() {
|
||||
page_info.download_status = page_status.into();
|
||||
Some(page_info)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let mut video_status = VideoStatus::from(video_info.download_status);
|
||||
let mut video_resetted = (request.force && video_status.force_reset_failed()) || video_status.reset_failed();
|
||||
if !resetted_pages_info.is_empty() {
|
||||
video_status.set(4, 0); // 将“分页下载”重置为 0
|
||||
video_resetted = true;
|
||||
}
|
||||
let resetted_videos_info = if video_resetted {
|
||||
video_info.download_status = video_status.into();
|
||||
vec![&video_info]
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
let resetted = !resetted_videos_info.is_empty() || !resetted_pages_info.is_empty();
|
||||
if resetted {
|
||||
let txn = db.begin().await?;
|
||||
if !resetted_videos_info.is_empty() {
|
||||
// 只可能有 1 个元素,所以不用 batch
|
||||
update_video_download_status(&txn, &resetted_videos_info, None).await?;
|
||||
}
|
||||
if !resetted_pages_info.is_empty() {
|
||||
update_page_download_status(&txn, &resetted_pages_info, Some(500)).await?;
|
||||
}
|
||||
txn.commit().await?;
|
||||
}
|
||||
Ok(ApiResponse::ok(ResetVideoResponse {
|
||||
resetted,
|
||||
video: video_info,
|
||||
pages: resetted_pages_info,
|
||||
}))
|
||||
}
|
||||
|
||||
pub async fn reset_all_videos(
|
||||
Extension(db): Extension<DatabaseConnection>,
|
||||
Json(request): Json<ResetRequest>,
|
||||
) -> Result<ApiResponse<ResetAllVideosResponse>, ApiError> {
|
||||
// 先查询所有视频和页面数据
|
||||
let (all_videos, all_pages) = tokio::try_join!(
|
||||
video::Entity::find().into_partial_model::<VideoInfo>().all(&db),
|
||||
page::Entity::find().into_partial_model::<PageInfo>().all(&db)
|
||||
)?;
|
||||
let resetted_pages_info = all_pages
|
||||
.into_iter()
|
||||
.filter_map(|mut page_info| {
|
||||
let mut page_status = PageStatus::from(page_info.download_status);
|
||||
if (request.force && page_status.force_reset_failed()) || page_status.reset_failed() {
|
||||
page_info.download_status = page_status.into();
|
||||
Some(page_info)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let video_ids_with_resetted_pages: HashSet<i32> = resetted_pages_info.iter().map(|page| page.video_id).collect();
|
||||
let resetted_videos_info = all_videos
|
||||
.into_iter()
|
||||
.filter_map(|mut video_info| {
|
||||
let mut video_status = VideoStatus::from(video_info.download_status);
|
||||
let mut video_resetted =
|
||||
(request.force && video_status.force_reset_failed()) || video_status.reset_failed();
|
||||
if video_ids_with_resetted_pages.contains(&video_info.id) {
|
||||
video_status.set(4, 0); // 将"分页下载"重置为 0
|
||||
video_resetted = true;
|
||||
}
|
||||
if video_resetted {
|
||||
video_info.download_status = video_status.into();
|
||||
Some(video_info)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let has_video_updates = !resetted_videos_info.is_empty();
|
||||
let has_page_updates = !resetted_pages_info.is_empty();
|
||||
if has_video_updates || has_page_updates {
|
||||
let txn = db.begin().await?;
|
||||
if has_video_updates {
|
||||
update_video_download_status(&txn, &resetted_videos_info, Some(500)).await?;
|
||||
}
|
||||
if has_page_updates {
|
||||
update_page_download_status(&txn, &resetted_pages_info, Some(500)).await?;
|
||||
}
|
||||
txn.commit().await?;
|
||||
}
|
||||
Ok(ApiResponse::ok(ResetAllVideosResponse {
|
||||
resetted: has_video_updates || has_page_updates,
|
||||
resetted_videos_count: resetted_videos_info.len(),
|
||||
resetted_pages_count: resetted_pages_info.len(),
|
||||
}))
|
||||
}
|
||||
|
||||
pub async fn update_video_status(
|
||||
Path(id): Path<i32>,
|
||||
Extension(db): Extension<DatabaseConnection>,
|
||||
ValidatedJson(request): ValidatedJson<UpdateVideoStatusRequest>,
|
||||
) -> Result<ApiResponse<UpdateVideoStatusResponse>, ApiError> {
|
||||
let (video_info, mut pages_info) = tokio::try_join!(
|
||||
video::Entity::find_by_id(id).into_partial_model::<VideoInfo>().one(&db),
|
||||
page::Entity::find()
|
||||
.filter(page::Column::VideoId.eq(id))
|
||||
.order_by_asc(page::Column::Cid)
|
||||
.into_partial_model::<PageInfo>()
|
||||
.all(&db)
|
||||
)?;
|
||||
let Some(mut video_info) = video_info else {
|
||||
return Err(InnerApiError::NotFound(id).into());
|
||||
};
|
||||
let mut video_status = VideoStatus::from(video_info.download_status);
|
||||
for update in &request.video_updates {
|
||||
video_status.set(update.status_index, update.status_value);
|
||||
}
|
||||
video_info.download_status = video_status.into();
|
||||
let mut updated_pages_info = Vec::new();
|
||||
let mut page_id_map = pages_info
|
||||
.iter_mut()
|
||||
.map(|page| (page.id, page))
|
||||
.collect::<std::collections::HashMap<_, _>>();
|
||||
for page_update in &request.page_updates {
|
||||
if let Some(page_info) = page_id_map.remove(&page_update.page_id) {
|
||||
let mut page_status = PageStatus::from(page_info.download_status);
|
||||
for update in &page_update.updates {
|
||||
page_status.set(update.status_index, update.status_value);
|
||||
}
|
||||
page_info.download_status = page_status.into();
|
||||
updated_pages_info.push(page_info);
|
||||
}
|
||||
}
|
||||
let has_video_updates = !request.video_updates.is_empty();
|
||||
let has_page_updates = !updated_pages_info.is_empty();
|
||||
if has_video_updates || has_page_updates {
|
||||
let txn = db.begin().await?;
|
||||
if has_video_updates {
|
||||
update_video_download_status(&txn, &[&video_info], None).await?;
|
||||
}
|
||||
if has_page_updates {
|
||||
update_page_download_status(&txn, &updated_pages_info, None).await?;
|
||||
}
|
||||
txn.commit().await?;
|
||||
}
|
||||
Ok(ApiResponse::ok(UpdateVideoStatusResponse {
|
||||
success: has_video_updates || has_page_updates,
|
||||
video: video_info,
|
||||
pages: pages_info,
|
||||
}))
|
||||
}
|
||||
54
crates/bili_sync/src/api/routes/ws/log_helper.rs
Normal file
54
crates/bili_sync/src/api/routes/ws/log_helper.rs
Normal file
@@ -0,0 +1,54 @@
|
||||
use std::collections::VecDeque;
|
||||
use std::sync::Arc;
|
||||
|
||||
use parking_lot::Mutex;
|
||||
use tokio::sync::broadcast;
|
||||
use tracing_subscriber::fmt::MakeWriter;
|
||||
|
||||
pub const MAX_HISTORY_LOGS: usize = 30;
|
||||
|
||||
/// LogHelper 维护了日志发送器和一个日志历史记录的缓冲区
|
||||
pub struct LogHelper {
|
||||
pub sender: broadcast::Sender<String>,
|
||||
pub log_history: Arc<Mutex<VecDeque<String>>>,
|
||||
}
|
||||
|
||||
impl LogHelper {
|
||||
pub fn new(sender: broadcast::Sender<String>, log_history: Arc<Mutex<VecDeque<String>>>) -> Self {
|
||||
LogHelper { sender, log_history }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> MakeWriter<'a> for LogHelper {
|
||||
type Writer = Self;
|
||||
|
||||
fn make_writer(&'a self) -> Self::Writer {
|
||||
self.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::io::Write for LogHelper {
|
||||
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
|
||||
let log_message = String::from_utf8_lossy(buf).to_string();
|
||||
let _ = self.sender.send(log_message.clone());
|
||||
let mut history = self.log_history.lock();
|
||||
history.push_back(log_message);
|
||||
if history.len() > MAX_HISTORY_LOGS {
|
||||
history.pop_front();
|
||||
}
|
||||
Ok(buf.len())
|
||||
}
|
||||
|
||||
fn flush(&mut self) -> std::io::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for LogHelper {
|
||||
fn clone(&self) -> Self {
|
||||
LogHelper {
|
||||
sender: self.sender.clone(),
|
||||
log_history: self.log_history.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
263
crates/bili_sync/src/api/routes/ws/mod.rs
Normal file
263
crates/bili_sync/src/api/routes/ws/mod.rs
Normal file
@@ -0,0 +1,263 @@
|
||||
mod log_helper;
|
||||
|
||||
use std::sync::{Arc, LazyLock};
|
||||
use std::time::Duration;
|
||||
|
||||
use axum::extract::WebSocketUpgrade;
|
||||
use axum::extract::ws::{Message, WebSocket};
|
||||
use axum::response::IntoResponse;
|
||||
use axum::routing::any;
|
||||
use axum::{Extension, Router};
|
||||
use dashmap::DashMap;
|
||||
use futures::stream::{SplitSink, SplitStream};
|
||||
use futures::{SinkExt, StreamExt, future};
|
||||
pub use log_helper::{LogHelper, MAX_HISTORY_LOGS};
|
||||
use parking_lot::RwLock;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sysinfo::{
|
||||
CpuRefreshKind, DiskRefreshKind, Disks, MemoryRefreshKind, ProcessRefreshKind, RefreshKind, System, get_current_pid,
|
||||
};
|
||||
use tokio::pin;
|
||||
use tokio::task::JoinHandle;
|
||||
use tokio_stream::wrappers::{BroadcastStream, IntervalStream, WatchStream};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::api::response::SysInfo;
|
||||
use crate::utils::task_notifier::{TASK_STATUS_NOTIFIER, TaskStatus};
|
||||
|
||||
static WEBSOCKET_HANDLER: LazyLock<WebSocketHandler> = LazyLock::new(WebSocketHandler::new);
|
||||
|
||||
pub(super) fn router() -> Router {
|
||||
Router::new().route("/ws", any(websocket_handler))
|
||||
}
|
||||
|
||||
async fn websocket_handler(ws: WebSocketUpgrade, Extension(log_writer): Extension<LogHelper>) -> impl IntoResponse {
|
||||
ws.on_upgrade(|socket| handle_socket(socket, log_writer))
|
||||
}
|
||||
|
||||
// 事件类型枚举
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
enum EventType {
|
||||
Logs,
|
||||
Tasks,
|
||||
SysInfo,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
enum ClientEvent {
|
||||
Subscribe(EventType),
|
||||
Unsubscribe(EventType),
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
enum ServerEvent {
|
||||
Logs(String),
|
||||
Tasks(Arc<TaskStatus>),
|
||||
SysInfo(Arc<SysInfo>),
|
||||
}
|
||||
|
||||
struct WebSocketHandler {
|
||||
sysinfo_subscribers: Arc<DashMap<Uuid, tokio::sync::mpsc::Sender<ServerEvent>>>,
|
||||
sysinfo_handles: RwLock<Option<JoinHandle<()>>>,
|
||||
}
|
||||
|
||||
impl WebSocketHandler {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
sysinfo_subscribers: Arc::new(DashMap::new()),
|
||||
sysinfo_handles: RwLock::new(None),
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_sender(
|
||||
&self,
|
||||
mut sender: SplitSink<WebSocket, Message>,
|
||||
mut rx: tokio::sync::mpsc::Receiver<ServerEvent>,
|
||||
) {
|
||||
while let Some(event) = rx.recv().await {
|
||||
match serde_json::to_string(&event) {
|
||||
Ok(text) => {
|
||||
if let Err(e) = sender.send(Message::Text(text.into())).await {
|
||||
error!("Failed to send message: {:?}", e);
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Failed to serialize event: {:?}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_receiver(
|
||||
&self,
|
||||
mut receiver: SplitStream<WebSocket>,
|
||||
tx: tokio::sync::mpsc::Sender<ServerEvent>,
|
||||
uuid: Uuid,
|
||||
log_writer: LogHelper,
|
||||
) {
|
||||
// 日志和任务状态的处理本身就是由 stream 驱动的,可以直接为每个 ws 连接维护独立的任务处理器
|
||||
// 系统信息是服务端轮询然后推送的,如果单独维护会导致每个连接都独立轮询系统信息,造成不必要的浪费
|
||||
// 因此采用了全局的订阅者管理,所有连接共享同一个系统信息轮询任务
|
||||
let (mut log_handle, mut task_handle) = (None, None);
|
||||
while let Some(Ok(msg)) = receiver.next().await {
|
||||
if let Message::Text(text) = msg {
|
||||
match serde_json::from_str::<ClientEvent>(&text) {
|
||||
Ok(ClientEvent::Subscribe(event_type)) => match event_type {
|
||||
EventType::Logs => {
|
||||
if log_handle.as_ref().is_none_or(|h: &JoinHandle<()>| h.is_finished()) {
|
||||
let log_writer_clone = log_writer.clone();
|
||||
let tx_clone = tx.clone();
|
||||
let history = log_writer_clone.log_history.lock();
|
||||
let history_logs: Vec<String> = history.iter().cloned().collect();
|
||||
drop(history);
|
||||
log_handle = Some(tokio::spawn(async move {
|
||||
let rx = log_writer_clone.sender.subscribe();
|
||||
let log_stream = futures::stream::iter(history_logs.into_iter())
|
||||
.chain(BroadcastStream::new(rx).filter_map(async |msg| msg.ok()))
|
||||
.map(ServerEvent::Logs);
|
||||
pin!(log_stream);
|
||||
while let Some(event) = log_stream.next().await {
|
||||
if let Err(e) = tx_clone.send(event).await {
|
||||
error!("Failed to send log event: {:?}", e);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}));
|
||||
}
|
||||
}
|
||||
EventType::Tasks => {
|
||||
if task_handle.as_ref().is_none_or(|h: &JoinHandle<()>| h.is_finished()) {
|
||||
let tx_clone = tx.clone();
|
||||
task_handle = Some(tokio::spawn(async move {
|
||||
let mut stream =
|
||||
WatchStream::new(TASK_STATUS_NOTIFIER.subscribe()).map(ServerEvent::Tasks);
|
||||
while let Some(event) = stream.next().await {
|
||||
if let Err(e) = tx_clone.send(event).await {
|
||||
error!("Failed to send task status: {:?}", e);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}));
|
||||
}
|
||||
}
|
||||
EventType::SysInfo => self.add_sysinfo_subscriber(uuid, tx.clone()).await,
|
||||
},
|
||||
Ok(ClientEvent::Unsubscribe(event_type)) => match event_type {
|
||||
EventType::Logs => {
|
||||
if let Some(handle) = log_handle.take() {
|
||||
handle.abort();
|
||||
}
|
||||
}
|
||||
EventType::Tasks => {
|
||||
if let Some(handle) = task_handle.take() {
|
||||
handle.abort();
|
||||
}
|
||||
}
|
||||
EventType::SysInfo => {
|
||||
self.remove_sysinfo_subscriber(uuid).await;
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
error!("Failed to parse client message: {:?}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(handle) = log_handle {
|
||||
handle.abort();
|
||||
}
|
||||
if let Some(handle) = task_handle {
|
||||
handle.abort();
|
||||
}
|
||||
self.remove_sysinfo_subscriber(uuid).await;
|
||||
}
|
||||
|
||||
// 添加订阅者
|
||||
async fn add_sysinfo_subscriber(&self, uuid: Uuid, sender: tokio::sync::mpsc::Sender<ServerEvent>) {
|
||||
self.sysinfo_subscribers.insert(uuid, sender);
|
||||
if !self.sysinfo_subscribers.is_empty()
|
||||
&& self
|
||||
.sysinfo_handles
|
||||
.read()
|
||||
.as_ref()
|
||||
.is_none_or(|h: &JoinHandle<()>| h.is_finished())
|
||||
{
|
||||
let sysinfo_subscribers = self.sysinfo_subscribers.clone();
|
||||
let mut write_guard = self.sysinfo_handles.write();
|
||||
if write_guard.as_ref().is_some_and(|h: &JoinHandle<()>| !h.is_finished()) {
|
||||
return;
|
||||
}
|
||||
*write_guard = Some(tokio::spawn(async move {
|
||||
let mut system = System::new();
|
||||
let mut disks = Disks::new();
|
||||
let sys_refresh_kind = sys_refresh_kind();
|
||||
let disk_refresh_kind = disk_refresh_kind();
|
||||
// 对于 linux/mac/windows 平台,该方法永远返回 Some(pid),expect 基本是安全的
|
||||
let self_pid = get_current_pid().expect("Unsupported platform");
|
||||
let mut stream =
|
||||
IntervalStream::new(tokio::time::interval(Duration::from_secs(2))).filter_map(move |_| {
|
||||
system.refresh_specifics(sys_refresh_kind);
|
||||
disks.refresh_specifics(true, disk_refresh_kind);
|
||||
let process = match system.process(self_pid) {
|
||||
Some(p) => p,
|
||||
None => return futures::future::ready(None),
|
||||
};
|
||||
futures::future::ready(Some(SysInfo {
|
||||
total_memory: system.total_memory(),
|
||||
used_memory: system.used_memory(),
|
||||
process_memory: process.memory(),
|
||||
used_cpu: system.global_cpu_usage(),
|
||||
process_cpu: process.cpu_usage() / system.cpus().len() as f32,
|
||||
total_disk: disks.iter().map(|d| d.total_space()).sum(),
|
||||
available_disk: disks.iter().map(|d| d.available_space()).sum(),
|
||||
}))
|
||||
});
|
||||
while let Some(sys_info) = stream.next().await {
|
||||
let sys_info = Arc::new(sys_info);
|
||||
future::join_all(sysinfo_subscribers.iter().map(async |subscriber| {
|
||||
if let Err(e) = subscriber.send(ServerEvent::SysInfo(sys_info.clone())).await {
|
||||
error!(
|
||||
"Failed to send sysinfo event to subscriber {}: {:?}",
|
||||
subscriber.key(),
|
||||
e
|
||||
);
|
||||
}
|
||||
}))
|
||||
.await;
|
||||
}
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
async fn remove_sysinfo_subscriber(&self, uuid: Uuid) {
|
||||
self.sysinfo_subscribers.remove(&uuid);
|
||||
if self.sysinfo_subscribers.is_empty()
|
||||
&& let Some(handle) = self.sysinfo_handles.write().take()
|
||||
{
|
||||
handle.abort();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_socket(socket: WebSocket, log_writer: LogHelper) {
|
||||
let (ws_sender, ws_receiver) = socket.split();
|
||||
let uuid = Uuid::new_v4();
|
||||
let (tx, rx) = tokio::sync::mpsc::channel(100);
|
||||
tokio::spawn(WEBSOCKET_HANDLER.handle_sender(ws_sender, rx));
|
||||
tokio::spawn(WEBSOCKET_HANDLER.handle_receiver(ws_receiver, tx, uuid, log_writer));
|
||||
}
|
||||
|
||||
fn sys_refresh_kind() -> RefreshKind {
|
||||
RefreshKind::nothing()
|
||||
.with_cpu(CpuRefreshKind::nothing().with_cpu_usage())
|
||||
.with_memory(MemoryRefreshKind::nothing().with_ram())
|
||||
.with_processes(ProcessRefreshKind::nothing().with_cpu().with_memory())
|
||||
}
|
||||
|
||||
fn disk_refresh_kind() -> DiskRefreshKind {
|
||||
DiskRefreshKind::nothing().with_storage()
|
||||
}
|
||||
@@ -1,33 +1,65 @@
|
||||
use std::borrow::Cow;
|
||||
|
||||
use anyhow::Error;
|
||||
use axum::response::IntoResponse;
|
||||
use axum::Json;
|
||||
use axum::extract::rejection::JsonRejection;
|
||||
use axum::extract::{FromRequest, Request};
|
||||
use axum::response::IntoResponse;
|
||||
use reqwest::StatusCode;
|
||||
use serde::Serialize;
|
||||
use utoipa::ToSchema;
|
||||
use serde::de::DeserializeOwned;
|
||||
use validator::Validate;
|
||||
|
||||
use crate::api::error::InnerApiError;
|
||||
|
||||
#[derive(ToSchema, Serialize)]
|
||||
#[derive(Serialize)]
|
||||
pub struct ApiResponse<T: Serialize> {
|
||||
status_code: u16,
|
||||
data: T,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
data: Option<T>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
message: Option<Cow<'static, str>>,
|
||||
}
|
||||
|
||||
impl<T: Serialize> ApiResponse<T> {
|
||||
pub fn ok(data: T) -> Self {
|
||||
Self { status_code: 200, data }
|
||||
Self {
|
||||
status_code: 200,
|
||||
data: Some(data),
|
||||
message: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unauthorized(data: T) -> Self {
|
||||
Self { status_code: 401, data }
|
||||
pub fn bad_request(message: impl Into<Cow<'static, str>>) -> Self {
|
||||
Self {
|
||||
status_code: 400,
|
||||
data: None,
|
||||
message: Some(message.into()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn not_found(data: T) -> Self {
|
||||
Self { status_code: 404, data }
|
||||
pub fn unauthorized(message: impl Into<Cow<'static, str>>) -> Self {
|
||||
Self {
|
||||
status_code: 401,
|
||||
data: None,
|
||||
message: Some(message.into()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn internal_server_error(data: T) -> Self {
|
||||
Self { status_code: 500, data }
|
||||
pub fn not_found(message: impl Into<Cow<'static, str>>) -> Self {
|
||||
Self {
|
||||
status_code: 404,
|
||||
data: None,
|
||||
message: Some(message.into()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn internal_server_error(message: impl Into<Cow<'static, str>>) -> Self {
|
||||
Self {
|
||||
status_code: 500,
|
||||
data: None,
|
||||
message: Some(message.into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -56,9 +88,32 @@ impl IntoResponse for ApiError {
|
||||
fn into_response(self) -> axum::response::Response {
|
||||
if let Some(inner_error) = self.0.downcast_ref::<InnerApiError>() {
|
||||
match inner_error {
|
||||
InnerApiError::NotFound(_) => return ApiResponse::not_found(self.0.to_string()).into_response(),
|
||||
InnerApiError::NotFound(_) => return ApiResponse::<()>::not_found(self.0.to_string()).into_response(),
|
||||
InnerApiError::BadRequest(_) => {
|
||||
return ApiResponse::<()>::bad_request(self.0.to_string()).into_response();
|
||||
}
|
||||
}
|
||||
}
|
||||
ApiResponse::internal_server_error(self.0.to_string()).into_response()
|
||||
ApiResponse::<()>::internal_server_error(self.0.to_string()).into_response()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Default)]
|
||||
pub struct ValidatedJson<T>(pub T);
|
||||
|
||||
impl<T, S> FromRequest<S> for ValidatedJson<T>
|
||||
where
|
||||
T: DeserializeOwned + Validate,
|
||||
S: Send + Sync,
|
||||
Json<T>: FromRequest<S, Rejection = JsonRejection>,
|
||||
{
|
||||
type Rejection = ApiError;
|
||||
|
||||
async fn from_request(req: Request, state: &S) -> Result<Self, Self::Rejection> {
|
||||
let Json(value) = Json::<T>::from_request(req, state).await?;
|
||||
value
|
||||
.validate()
|
||||
.map_err(|e| ApiError::from(InnerApiError::BadRequest(e.to_string())))?;
|
||||
Ok(ValidatedJson(value))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
use anyhow::{bail, Context, Result};
|
||||
use anyhow::{Context, Result, bail};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::bilibili::error::BiliError;
|
||||
use crate::config::VersionedConfig;
|
||||
|
||||
pub struct PageAnalyzer {
|
||||
info: serde_json::Value,
|
||||
}
|
||||
|
||||
#[derive(Debug, strum::FromRepr, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
|
||||
#[derive(Debug, strum::FromRepr, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, Clone)]
|
||||
pub enum VideoQuality {
|
||||
Quality360p = 16,
|
||||
Quality480p = 32,
|
||||
@@ -53,7 +54,9 @@ impl AudioQuality {
|
||||
}
|
||||
|
||||
#[allow(clippy::upper_case_acronyms)]
|
||||
#[derive(Debug, strum::EnumString, strum::Display, strum::AsRefStr, PartialEq, PartialOrd, Serialize, Deserialize)]
|
||||
#[derive(
|
||||
Debug, strum::EnumString, strum::Display, strum::AsRefStr, PartialEq, PartialOrd, Serialize, Deserialize, Clone,
|
||||
)]
|
||||
pub enum VideoCodecs {
|
||||
#[strum(serialize = "hev")]
|
||||
HEV,
|
||||
@@ -63,8 +66,22 @@ pub enum VideoCodecs {
|
||||
AV1,
|
||||
}
|
||||
|
||||
impl TryFrom<u64> for VideoCodecs {
|
||||
type Error = anyhow::Error;
|
||||
|
||||
fn try_from(value: u64) -> std::result::Result<Self, Self::Error> {
|
||||
// https://socialsisteryi.github.io/bilibili-API-collect/docs/video/videostream_url.html#%E8%A7%86%E9%A2%91%E7%BC%96%E7%A0%81%E4%BB%A3%E7%A0%81
|
||||
match value {
|
||||
7 => Ok(Self::AVC),
|
||||
12 => Ok(Self::HEV),
|
||||
13 => Ok(Self::AV1),
|
||||
_ => bail!("invalid video codecs id: {}", value),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 视频流的筛选偏好
|
||||
#[derive(Serialize, Deserialize)]
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub struct FilterOption {
|
||||
pub video_max_quality: VideoQuality,
|
||||
pub video_min_quality: VideoQuality,
|
||||
@@ -101,24 +118,41 @@ pub enum Stream {
|
||||
EpisodeTryMp4(String),
|
||||
DashVideo {
|
||||
url: String,
|
||||
backup_url: Vec<String>,
|
||||
quality: VideoQuality,
|
||||
codecs: VideoCodecs,
|
||||
},
|
||||
DashAudio {
|
||||
url: String,
|
||||
backup_url: Vec<String>,
|
||||
quality: AudioQuality,
|
||||
},
|
||||
}
|
||||
|
||||
// 通用的获取流链接的方法,交由 Downloader 使用
|
||||
impl Stream {
|
||||
pub fn url(&self) -> &str {
|
||||
pub fn urls(&self) -> Vec<&str> {
|
||||
match self {
|
||||
Self::Flv(url) => url,
|
||||
Self::Html5Mp4(url) => url,
|
||||
Self::EpisodeTryMp4(url) => url,
|
||||
Self::DashVideo { url, .. } => url,
|
||||
Self::DashAudio { url, .. } => url,
|
||||
Self::Flv(url) | Self::Html5Mp4(url) | Self::EpisodeTryMp4(url) => vec![url],
|
||||
Self::DashVideo { url, backup_url, .. } | Self::DashAudio { url, backup_url, .. } => {
|
||||
let mut urls = std::iter::once(url.as_str())
|
||||
.chain(backup_url.iter().map(|s| s.as_str()))
|
||||
.collect::<Vec<_>>();
|
||||
if VersionedConfig::get().load().cdn_sorting {
|
||||
urls.sort_by_key(|u| {
|
||||
if u.contains("upos-") {
|
||||
0 // 服务商 cdn
|
||||
} else if u.contains("cn-") {
|
||||
1 // 自建 cdn
|
||||
} else if u.contains("mcdn") {
|
||||
2 // mcdn
|
||||
} else {
|
||||
3 // pcdn 或者其它
|
||||
}
|
||||
});
|
||||
}
|
||||
urls
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -180,25 +214,22 @@ impl PageAnalyzer {
|
||||
)]);
|
||||
}
|
||||
let mut streams: Vec<Stream> = Vec::new();
|
||||
for video in self.info["dash"]["video"]
|
||||
.as_array()
|
||||
for video in self
|
||||
.info
|
||||
.pointer_mut("/dash/video")
|
||||
.and_then(|v| v.as_array_mut())
|
||||
.ok_or(BiliError::RiskControlOccurred)?
|
||||
.iter()
|
||||
.iter_mut()
|
||||
{
|
||||
let (Some(url), Some(quality), Some(codecs)) = (
|
||||
let (Some(url), Some(quality), Some(codecs_id)) = (
|
||||
video["baseUrl"].as_str(),
|
||||
video["id"].as_u64(),
|
||||
video["codecs"].as_str(),
|
||||
video["codecid"].as_u64(),
|
||||
) else {
|
||||
continue;
|
||||
};
|
||||
let quality = VideoQuality::from_repr(quality as usize).context("invalid video stream quality")?;
|
||||
// 从视频流的 codecs 字段中获取编码格式,此处并非精确匹配而是判断包含,比如 codecs 是 av1.42c01e,需要匹配为 av1
|
||||
let Some(codecs) = [VideoCodecs::HEV, VideoCodecs::AVC, VideoCodecs::AV1]
|
||||
.into_iter()
|
||||
.find(|c| codecs.contains(c.as_ref()))
|
||||
else {
|
||||
// 少数情况会走到此处,如 codecs 为 dvh1.08.09、hvc1.2.4.L123.90 等,直接跳过,不影响流程
|
||||
let Ok(codecs) = codecs_id.try_into() else {
|
||||
continue;
|
||||
};
|
||||
if !filter_option.codecs.contains(&codecs)
|
||||
@@ -211,12 +242,13 @@ impl PageAnalyzer {
|
||||
}
|
||||
streams.push(Stream::DashVideo {
|
||||
url: url.to_string(),
|
||||
backup_url: serde_json::from_value(video["backupUrl"].take()).unwrap_or_default(),
|
||||
quality,
|
||||
codecs,
|
||||
});
|
||||
}
|
||||
if let Some(audios) = self.info["dash"]["audio"].as_array() {
|
||||
for audio in audios.iter() {
|
||||
if let Some(audios) = self.info.pointer_mut("/dash/audio").and_then(|a| a.as_array_mut()) {
|
||||
for audio in audios.iter_mut() {
|
||||
let (Some(url), Some(quality)) = (audio["baseUrl"].as_str(), audio["id"].as_u64()) else {
|
||||
continue;
|
||||
};
|
||||
@@ -226,12 +258,14 @@ impl PageAnalyzer {
|
||||
}
|
||||
streams.push(Stream::DashAudio {
|
||||
url: url.to_string(),
|
||||
backup_url: serde_json::from_value(audio["backupUrl"].take()).unwrap_or_default(),
|
||||
quality,
|
||||
});
|
||||
}
|
||||
}
|
||||
let flac = &self.info["dash"]["flac"]["audio"];
|
||||
if !(filter_option.no_hires || flac.is_null()) {
|
||||
if !filter_option.no_hires
|
||||
&& let Some(flac) = self.info.pointer_mut("/dash/flac/audio")
|
||||
{
|
||||
let (Some(url), Some(quality)) = (flac["baseUrl"].as_str(), flac["id"].as_u64()) else {
|
||||
bail!("invalid flac stream");
|
||||
};
|
||||
@@ -239,12 +273,17 @@ impl PageAnalyzer {
|
||||
if quality >= filter_option.audio_min_quality && quality <= filter_option.audio_max_quality {
|
||||
streams.push(Stream::DashAudio {
|
||||
url: url.to_string(),
|
||||
backup_url: serde_json::from_value(flac["backupUrl"].take()).unwrap_or_default(),
|
||||
quality,
|
||||
});
|
||||
}
|
||||
}
|
||||
let dolby_audio = &self.info["dash"]["dolby"]["audio"][0];
|
||||
if !(filter_option.no_dolby_audio || dolby_audio.is_null()) {
|
||||
if !filter_option.no_dolby_audio
|
||||
&& let Some(dolby_audio) = self
|
||||
.info
|
||||
.pointer_mut("/dash/dolby/audio/0")
|
||||
.and_then(|a| a.as_object_mut())
|
||||
{
|
||||
let (Some(url), Some(quality)) = (dolby_audio["baseUrl"].as_str(), dolby_audio["id"].as_u64()) else {
|
||||
bail!("invalid dolby audio stream");
|
||||
};
|
||||
@@ -252,6 +291,7 @@ impl PageAnalyzer {
|
||||
if quality >= filter_option.audio_min_quality && quality <= filter_option.audio_max_quality {
|
||||
streams.push(Stream::DashAudio {
|
||||
url: url.to_string(),
|
||||
backup_url: serde_json::from_value(dolby_audio["backupUrl"].take()).unwrap_or_default(),
|
||||
quality,
|
||||
});
|
||||
}
|
||||
@@ -270,32 +310,34 @@ impl PageAnalyzer {
|
||||
let (videos, audios): (Vec<Stream>, Vec<Stream>) =
|
||||
streams.into_iter().partition(|s| matches!(s, Stream::DashVideo { .. }));
|
||||
Ok(BestStream::VideoAudio {
|
||||
video: Iterator::max_by(videos.into_iter(), |a, b| match (a, b) {
|
||||
(
|
||||
Stream::DashVideo {
|
||||
quality: a_quality,
|
||||
codecs: a_codecs,
|
||||
..
|
||||
},
|
||||
Stream::DashVideo {
|
||||
quality: b_quality,
|
||||
codecs: b_codecs,
|
||||
..
|
||||
},
|
||||
) => {
|
||||
if a_quality != b_quality {
|
||||
return a_quality.cmp(b_quality);
|
||||
};
|
||||
filter_option
|
||||
.codecs
|
||||
.iter()
|
||||
.position(|c| c == b_codecs)
|
||||
.cmp(&filter_option.codecs.iter().position(|c| c == a_codecs))
|
||||
}
|
||||
_ => unreachable!(),
|
||||
})
|
||||
.context("no video stream found")?,
|
||||
audio: Iterator::max_by(audios.into_iter(), |a, b| match (a, b) {
|
||||
video: videos
|
||||
.into_iter()
|
||||
.max_by(|a, b| match (a, b) {
|
||||
(
|
||||
Stream::DashVideo {
|
||||
quality: a_quality,
|
||||
codecs: a_codecs,
|
||||
..
|
||||
},
|
||||
Stream::DashVideo {
|
||||
quality: b_quality,
|
||||
codecs: b_codecs,
|
||||
..
|
||||
},
|
||||
) => {
|
||||
if a_quality != b_quality {
|
||||
return a_quality.cmp(b_quality);
|
||||
};
|
||||
filter_option
|
||||
.codecs
|
||||
.iter()
|
||||
.position(|c| c == b_codecs)
|
||||
.cmp(&filter_option.codecs.iter().position(|c| c == a_codecs))
|
||||
}
|
||||
_ => unreachable!(),
|
||||
})
|
||||
.context("no video stream found")?,
|
||||
audio: audios.into_iter().max_by(|a, b| match (a, b) {
|
||||
(Stream::DashAudio { quality: a_quality, .. }, Stream::DashAudio { quality: b_quality, .. }) => {
|
||||
a_quality.cmp(b_quality)
|
||||
}
|
||||
@@ -309,31 +351,35 @@ impl PageAnalyzer {
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::bilibili::{BiliClient, Video};
|
||||
use crate::config::CONFIG;
|
||||
use crate::config::VersionedConfig;
|
||||
|
||||
#[test]
|
||||
fn test_quality_order() {
|
||||
assert!([
|
||||
VideoQuality::Quality360p,
|
||||
VideoQuality::Quality480p,
|
||||
VideoQuality::Quality720p,
|
||||
VideoQuality::Quality1080p,
|
||||
VideoQuality::Quality1080pPLUS,
|
||||
VideoQuality::Quality1080p60,
|
||||
VideoQuality::Quality4k,
|
||||
VideoQuality::QualityHdr,
|
||||
VideoQuality::QualityDolby,
|
||||
VideoQuality::Quality8k
|
||||
]
|
||||
.is_sorted());
|
||||
assert!([
|
||||
AudioQuality::Quality64k,
|
||||
AudioQuality::Quality132k,
|
||||
AudioQuality::Quality192k,
|
||||
AudioQuality::QualityDolby,
|
||||
AudioQuality::QualityHiRES,
|
||||
]
|
||||
.is_sorted());
|
||||
assert!(
|
||||
[
|
||||
VideoQuality::Quality360p,
|
||||
VideoQuality::Quality480p,
|
||||
VideoQuality::Quality720p,
|
||||
VideoQuality::Quality1080p,
|
||||
VideoQuality::Quality1080pPLUS,
|
||||
VideoQuality::Quality1080p60,
|
||||
VideoQuality::Quality4k,
|
||||
VideoQuality::QualityHdr,
|
||||
VideoQuality::QualityDolby,
|
||||
VideoQuality::Quality8k
|
||||
]
|
||||
.is_sorted()
|
||||
);
|
||||
assert!(
|
||||
[
|
||||
AudioQuality::Quality64k,
|
||||
AudioQuality::Quality132k,
|
||||
AudioQuality::Quality192k,
|
||||
AudioQuality::QualityDolby,
|
||||
AudioQuality::QualityHiRES,
|
||||
]
|
||||
.is_sorted()
|
||||
);
|
||||
}
|
||||
|
||||
#[ignore = "only for manual test"]
|
||||
@@ -344,18 +390,41 @@ mod tests {
|
||||
(
|
||||
"BV1xRChYUE2R",
|
||||
VideoQuality::Quality8k,
|
||||
VideoCodecs::HEV,
|
||||
Some(AudioQuality::QualityHiRES),
|
||||
),
|
||||
// 一个没有声音的纯视频
|
||||
("BV1J7411H7KQ", VideoQuality::Quality720p, None),
|
||||
("BV1J7411H7KQ", VideoQuality::Quality720p, VideoCodecs::HEV, None),
|
||||
// 一个杜比全景声的演示片
|
||||
(
|
||||
"BV1Mm4y1P7JV",
|
||||
VideoQuality::Quality4k,
|
||||
VideoQuality::QualityDolby,
|
||||
VideoCodecs::HEV,
|
||||
Some(AudioQuality::QualityDolby),
|
||||
),
|
||||
// 影视飓风的杜比视界视频
|
||||
(
|
||||
"BV1HEf2YWEvs",
|
||||
VideoQuality::QualityDolby,
|
||||
VideoCodecs::HEV,
|
||||
Some(AudioQuality::QualityDolby),
|
||||
),
|
||||
// 孤独摇滚的杜比视界 + hires + 杜比全景声视频
|
||||
(
|
||||
"BV1YDVYzeE39",
|
||||
VideoQuality::QualityDolby,
|
||||
VideoCodecs::HEV,
|
||||
Some(AudioQuality::QualityHiRES),
|
||||
),
|
||||
// 一个京紫的 HDR 视频
|
||||
(
|
||||
"BV1cZ4y1b7iB",
|
||||
VideoQuality::QualityHdr,
|
||||
VideoCodecs::HEV,
|
||||
Some(AudioQuality::Quality192k),
|
||||
),
|
||||
];
|
||||
for (bvid, video_quality, audio_quality) in testcases.into_iter() {
|
||||
for (bvid, video_quality, video_codec, audio_quality) in testcases.into_iter() {
|
||||
let client = BiliClient::new();
|
||||
let video = Video::new(&client, bvid.to_owned());
|
||||
let pages = video.get_pages().await.expect("failed to get pages");
|
||||
@@ -364,15 +433,16 @@ mod tests {
|
||||
.get_page_analyzer(&first_page)
|
||||
.await
|
||||
.expect("failed to get page analyzer")
|
||||
.best_stream(&CONFIG.filter_option)
|
||||
.best_stream(&VersionedConfig::get().load().filter_option)
|
||||
.expect("failed to get best stream");
|
||||
dbg!(bvid, &best_stream);
|
||||
match best_stream {
|
||||
BestStream::VideoAudio {
|
||||
video: Stream::DashVideo { quality, .. },
|
||||
video: Stream::DashVideo { quality, codecs, .. },
|
||||
audio,
|
||||
} => {
|
||||
assert_eq!(quality, video_quality);
|
||||
assert_eq!(codecs, video_codec);
|
||||
assert_eq!(
|
||||
audio.map(|audio_stream| match audio_stream {
|
||||
Stream::DashAudio { quality, .. } => quality,
|
||||
@@ -385,4 +455,27 @@ mod tests {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_url_sort() {
|
||||
let stream = Stream::DashVideo {
|
||||
url: "https://xy116x207x155x163xy240ey95dy1010y700yy8dxy.mcdn.bilivideo.cn:4483".to_owned(),
|
||||
backup_url: vec![
|
||||
"https://upos-sz-mirrorcos.bilivideo.com".to_owned(),
|
||||
"https://cn-tj-cu-01-11.bilivideo.com".to_owned(),
|
||||
"https://xxx.v1d.szbdys.com".to_owned(),
|
||||
],
|
||||
quality: VideoQuality::Quality1080p,
|
||||
codecs: VideoCodecs::AVC,
|
||||
};
|
||||
assert_eq!(
|
||||
stream.urls(),
|
||||
vec![
|
||||
"https://upos-sz-mirrorcos.bilivideo.com",
|
||||
"https://cn-tj-cu-01-11.bilivideo.com",
|
||||
"https://xy116x207x155x163xy240ey95dy1010y700yy8dxy.mcdn.bilivideo.cn:4483",
|
||||
"https://xxx.v1d.szbdys.com"
|
||||
]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use anyhow::Result;
|
||||
use leaky_bucket::RateLimiter;
|
||||
use reqwest::{header, Method};
|
||||
use reqwest::{Method, header};
|
||||
use sea_orm::DatabaseConnection;
|
||||
use ua_generator::ua;
|
||||
|
||||
use crate::bilibili::credential::WbiImg;
|
||||
use crate::bilibili::Credential;
|
||||
use crate::config::{RateLimit, CONFIG};
|
||||
use crate::bilibili::credential::WbiImg;
|
||||
use crate::config::{RateLimit, VersionedCache, VersionedConfig};
|
||||
|
||||
// 一个对 reqwest::Client 的简单封装,用于 Bilibili 请求
|
||||
#[derive(Clone)]
|
||||
@@ -19,9 +20,7 @@ impl Client {
|
||||
let mut headers = header::HeaderMap::new();
|
||||
headers.insert(
|
||||
header::USER_AGENT,
|
||||
header::HeaderValue::from_static(
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36",
|
||||
),
|
||||
header::HeaderValue::from_static(ua::spoof_chrome_ua()),
|
||||
);
|
||||
headers.insert(
|
||||
header::REFERER,
|
||||
@@ -63,53 +62,54 @@ impl Default for Client {
|
||||
|
||||
pub struct BiliClient {
|
||||
pub client: Client,
|
||||
limiter: Option<RateLimiter>,
|
||||
limiter: VersionedCache<Option<RateLimiter>>,
|
||||
}
|
||||
|
||||
impl BiliClient {
|
||||
pub fn new() -> Self {
|
||||
let client = Client::new();
|
||||
let limiter = CONFIG
|
||||
.concurrent_limit
|
||||
.rate_limit
|
||||
.as_ref()
|
||||
.map(|RateLimit { limit, duration }| {
|
||||
RateLimiter::builder()
|
||||
.initial(*limit)
|
||||
.refill(*limit)
|
||||
.max(*limit)
|
||||
.interval(Duration::from_millis(*duration))
|
||||
.build()
|
||||
});
|
||||
let limiter = VersionedCache::new(|config| {
|
||||
Ok(config
|
||||
.concurrent_limit
|
||||
.rate_limit
|
||||
.as_ref()
|
||||
.map(|RateLimit { limit, duration }| {
|
||||
RateLimiter::builder()
|
||||
.initial(*limit)
|
||||
.refill(*limit)
|
||||
.max(*limit)
|
||||
.interval(Duration::from_millis(*duration))
|
||||
.build()
|
||||
}))
|
||||
})
|
||||
.expect("failed to create rate limiter");
|
||||
Self { client, limiter }
|
||||
}
|
||||
|
||||
/// 获取一个预构建的请求,通过该方法获取请求时会检查并等待速率限制
|
||||
pub async fn request(&self, method: Method, url: &str) -> reqwest::RequestBuilder {
|
||||
if let Some(limiter) = &self.limiter {
|
||||
if let Some(limiter) = self.limiter.load().as_ref() {
|
||||
limiter.acquire_one().await;
|
||||
}
|
||||
let credential = CONFIG.credential.load();
|
||||
self.client.request(method, url, credential.as_deref())
|
||||
let credential = &VersionedConfig::get().load().credential;
|
||||
self.client.request(method, url, Some(credential))
|
||||
}
|
||||
|
||||
pub async fn check_refresh(&self) -> Result<()> {
|
||||
let credential = CONFIG.credential.load();
|
||||
let Some(credential) = credential.as_deref() else {
|
||||
return Ok(());
|
||||
};
|
||||
pub async fn check_refresh(&self, connection: &DatabaseConnection) -> Result<()> {
|
||||
let credential = &VersionedConfig::get().load().credential;
|
||||
if !credential.need_refresh(&self.client).await? {
|
||||
return Ok(());
|
||||
}
|
||||
let new_credential = credential.refresh(&self.client).await?;
|
||||
CONFIG.credential.store(Some(Arc::new(new_credential)));
|
||||
CONFIG.save()
|
||||
VersionedConfig::get()
|
||||
.update_credential(new_credential, connection)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// 获取 wbi img,用于生成请求签名
|
||||
pub async fn wbi_img(&self) -> Result<WbiImg> {
|
||||
let credential = CONFIG.credential.load();
|
||||
let credential = credential.as_deref().context("no credential found")?;
|
||||
let credential = &VersionedConfig::get().load().credential;
|
||||
credential.wbi_img(&self.client).await
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::fmt::{Display, Formatter};
|
||||
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use anyhow::{Context, Result, anyhow};
|
||||
use async_stream::try_stream;
|
||||
use futures::Stream;
|
||||
use reqwest::Method;
|
||||
@@ -8,14 +8,25 @@ use serde::Deserialize;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::bilibili::credential::encoded_query;
|
||||
use crate::bilibili::{BiliClient, Validate, VideoInfo, MIXIN_KEY};
|
||||
use crate::bilibili::{BiliClient, MIXIN_KEY, Validate, VideoInfo};
|
||||
|
||||
#[derive(PartialEq, Eq, Hash, Clone, Debug)]
|
||||
#[derive(PartialEq, Eq, Hash, Clone, Debug, Default, Copy)]
|
||||
pub enum CollectionType {
|
||||
Series,
|
||||
#[default]
|
||||
Season,
|
||||
}
|
||||
|
||||
impl<'de> serde::Deserialize<'de> for CollectionType {
|
||||
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
let v = i32::deserialize(deserializer)?;
|
||||
CollectionType::try_from(v).map_err(serde::de::Error::custom)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CollectionType> for i32 {
|
||||
fn from(v: CollectionType) -> Self {
|
||||
match v {
|
||||
@@ -25,16 +36,24 @@ impl From<CollectionType> for i32 {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<i32> for CollectionType {
|
||||
fn from(v: i32) -> Self {
|
||||
impl TryFrom<i32> for CollectionType {
|
||||
type Error = anyhow::Error;
|
||||
|
||||
fn try_from(v: i32) -> Result<Self, Self::Error> {
|
||||
match v {
|
||||
1 => CollectionType::Series,
|
||||
2 => CollectionType::Season,
|
||||
_ => panic!("invalid collection type"),
|
||||
1 => Ok(CollectionType::Series),
|
||||
2 => Ok(CollectionType::Season),
|
||||
v => Err(anyhow!("got invalid collection type {}", v)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl CollectionType {
|
||||
pub fn from_expected(v: i32) -> Self {
|
||||
Self::try_from(v).expect("invalid collection type")
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for CollectionType {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let s = match self {
|
||||
@@ -54,7 +73,7 @@ pub struct CollectionItem {
|
||||
|
||||
pub struct Collection<'a> {
|
||||
client: &'a BiliClient,
|
||||
collection: &'a CollectionItem,
|
||||
pub collection: CollectionItem,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
@@ -93,7 +112,7 @@ impl<'de> Deserialize<'de> for CollectionInfo {
|
||||
}
|
||||
|
||||
impl<'a> Collection<'a> {
|
||||
pub fn new(client: &'a BiliClient, collection: &'a CollectionItem) -> Self {
|
||||
pub fn new(client: &'a BiliClient, collection: CollectionItem) -> Self {
|
||||
Self { client, collection }
|
||||
}
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashSet;
|
||||
|
||||
use anyhow::{bail, ensure, Context, Result};
|
||||
use anyhow::{Context, Result, bail, ensure};
|
||||
use cookie::Cookie;
|
||||
use cow_utils::CowUtils;
|
||||
use regex::Regex;
|
||||
use reqwest::{header, Method};
|
||||
use reqwest::{Method, header};
|
||||
use rsa::pkcs8::DecodePublicKey;
|
||||
use rsa::sha2::Sha256;
|
||||
use rsa::{Oaep, RsaPublicKey};
|
||||
@@ -100,7 +100,7 @@ JNrRuoEUXpabUzGB8QIDAQAB
|
||||
.expect("fail to decode public key");
|
||||
let ts = chrono::Local::now().timestamp_millis();
|
||||
let data = format!("refresh_{}", ts).into_bytes();
|
||||
let mut rng = rand::rngs::OsRng;
|
||||
let mut rng = rand::rng();
|
||||
let encrypted = key
|
||||
.encrypt(&mut rng, Oaep::new::<Sha256>(), &data)
|
||||
.expect("fail to encrypt");
|
||||
|
||||
@@ -88,14 +88,14 @@ impl fmt::Display for CanvasStyles {
|
||||
}
|
||||
}
|
||||
|
||||
pub struct AssWriter<W: AsyncWrite> {
|
||||
pub struct AssWriter<'a, W: AsyncWrite> {
|
||||
f: Pin<Box<BufWriter<W>>>,
|
||||
title: String,
|
||||
canvas_config: CanvasConfig,
|
||||
canvas_config: CanvasConfig<'a>,
|
||||
}
|
||||
|
||||
impl<W: AsyncWrite> AssWriter<W> {
|
||||
pub fn new(f: W, title: String, canvas_config: CanvasConfig) -> Self {
|
||||
impl<'a, W: AsyncWrite> AssWriter<'a, W> {
|
||||
pub fn new(f: W, title: String, canvas_config: CanvasConfig<'a>) -> Self {
|
||||
AssWriter {
|
||||
// 对于 HDD、docker 之类的场景,磁盘 IO 是非常大的瓶颈。使用大缓存
|
||||
f: Box::pin(BufWriter::with_capacity(10 << 20, f)),
|
||||
@@ -104,7 +104,7 @@ impl<W: AsyncWrite> AssWriter<W> {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn construct(f: W, title: String, canvas_config: CanvasConfig) -> Result<Self> {
|
||||
pub async fn construct(f: W, title: String, canvas_config: CanvasConfig<'a>) -> Result<Self> {
|
||||
let mut res = Self::new(f, title, canvas_config);
|
||||
res.init().await?;
|
||||
Ok(res)
|
||||
@@ -184,7 +184,7 @@ impl<W: AsyncWrite> AssWriter<W> {
|
||||
}
|
||||
}
|
||||
|
||||
fn escape_text(text: &str) -> Cow<str> {
|
||||
fn escape_text(text: &'_ str) -> Cow<'_, str> {
|
||||
let text = text.trim();
|
||||
if memchr::memchr(b'\n', text.as_bytes()).is_some() {
|
||||
Cow::from(text.replace('\n', "\\N"))
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use crate::bilibili::danmaku::canvas::CanvasConfig;
|
||||
use crate::bilibili::danmaku::Danmu;
|
||||
use crate::bilibili::danmaku::canvas::CanvasConfig;
|
||||
|
||||
pub enum Collision {
|
||||
// 会越来越远
|
||||
@@ -18,7 +18,7 @@ pub struct Lane {
|
||||
}
|
||||
|
||||
impl Lane {
|
||||
pub fn draw(danmu: &Danmu, config: &CanvasConfig) -> Self {
|
||||
pub fn draw(danmu: &Danmu, config: &CanvasConfig<'_>) -> Self {
|
||||
Lane {
|
||||
last_shoot_time: danmu.timeline_s,
|
||||
last_length: danmu.length(config),
|
||||
@@ -26,7 +26,7 @@ impl Lane {
|
||||
}
|
||||
|
||||
/// 这个槽位是否可以发射另外一条弹幕,返回可能的情形
|
||||
pub fn available_for(&self, other: &Danmu, config: &CanvasConfig) -> Collision {
|
||||
pub fn available_for(&self, other: &Danmu, config: &CanvasConfig<'_>) -> Collision {
|
||||
#[allow(non_snake_case)]
|
||||
let T = config.danmaku_option.duration;
|
||||
#[allow(non_snake_case)]
|
||||
|
||||
@@ -5,12 +5,12 @@ use anyhow::Result;
|
||||
use float_ord::FloatOrd;
|
||||
use lane::Lane;
|
||||
|
||||
use crate::bilibili::PageInfo;
|
||||
use crate::bilibili::danmaku::canvas::lane::Collision;
|
||||
use crate::bilibili::danmaku::danmu::DanmuType;
|
||||
use crate::bilibili::danmaku::{Danmu, DrawEffect, Drawable};
|
||||
use crate::bilibili::PageInfo;
|
||||
|
||||
#[derive(Debug, serde::Deserialize, serde::Serialize)]
|
||||
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
|
||||
pub struct DanmakuOption {
|
||||
pub duration: f64,
|
||||
pub font: String,
|
||||
@@ -26,7 +26,7 @@ pub struct DanmakuOption {
|
||||
pub bottom_percentage: f64,
|
||||
/// 透明度(0-255)
|
||||
pub opacity: u8,
|
||||
/// 是否加粗,1代表是,0代表否
|
||||
/// 是否加粗,1 代表是,0 代表否
|
||||
pub bold: bool,
|
||||
/// 描边
|
||||
pub outline: f64,
|
||||
@@ -54,13 +54,13 @@ impl Default for DanmakuOption {
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct CanvasConfig {
|
||||
pub struct CanvasConfig<'a> {
|
||||
pub width: u64,
|
||||
pub height: u64,
|
||||
pub danmaku_option: &'static DanmakuOption,
|
||||
pub danmaku_option: &'a DanmakuOption,
|
||||
}
|
||||
impl CanvasConfig {
|
||||
pub fn new(danmaku_option: &'static DanmakuOption, page: &PageInfo) -> Self {
|
||||
impl<'a> CanvasConfig<'a> {
|
||||
pub fn new(danmaku_option: &'a DanmakuOption, page: &PageInfo) -> Self {
|
||||
let (width, height) = Self::dimension(page);
|
||||
Self {
|
||||
width,
|
||||
@@ -86,7 +86,7 @@ impl CanvasConfig {
|
||||
((720.0 / height as f64 * width as f64) as u64, 720)
|
||||
}
|
||||
|
||||
pub fn canvas(self) -> Canvas {
|
||||
pub fn canvas(self) -> Canvas<'a> {
|
||||
let float_lanes_cnt =
|
||||
(self.danmaku_option.float_percentage * self.height as f64 / self.danmaku_option.lane_size as f64) as usize;
|
||||
|
||||
@@ -97,12 +97,12 @@ impl CanvasConfig {
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Canvas {
|
||||
pub config: CanvasConfig,
|
||||
pub struct Canvas<'a> {
|
||||
pub config: CanvasConfig<'a>,
|
||||
pub float_lanes: Vec<Option<Lane>>,
|
||||
}
|
||||
|
||||
impl Canvas {
|
||||
impl<'a> Canvas<'a> {
|
||||
pub fn draw(&mut self, mut danmu: Danmu) -> Result<Option<Drawable>> {
|
||||
danmu.timeline_s += self.config.danmaku_option.time_offset;
|
||||
if danmu.timeline_s < 0.0 {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
//! 一个弹幕实例,但是没有位置信息
|
||||
use anyhow::{bail, Result};
|
||||
use anyhow::{Result, bail};
|
||||
|
||||
use crate::bilibili::danmaku::canvas::CanvasConfig;
|
||||
|
||||
@@ -39,8 +39,8 @@ pub struct Danmu {
|
||||
impl Danmu {
|
||||
/// 计算弹幕的“像素长度”,会乘上一个缩放因子
|
||||
///
|
||||
/// 汉字算一个全宽,英文算2/3宽
|
||||
pub fn length(&self, config: &CanvasConfig) -> f64 {
|
||||
/// 汉字算一个全宽,英文算 2/3 宽
|
||||
pub fn length(&self, config: &CanvasConfig<'_>) -> f64 {
|
||||
let pts = config.danmaku_option.font_size
|
||||
* self
|
||||
.content
|
||||
|
||||
@@ -3,10 +3,10 @@ use std::path::PathBuf;
|
||||
use anyhow::Result;
|
||||
use tokio::fs::{self, File};
|
||||
|
||||
use crate::bilibili::PageInfo;
|
||||
use crate::bilibili::danmaku::canvas::CanvasConfig;
|
||||
use crate::bilibili::danmaku::{AssWriter, Danmu};
|
||||
use crate::bilibili::PageInfo;
|
||||
use crate::config::CONFIG;
|
||||
use crate::config::VersionedConfig;
|
||||
|
||||
pub struct DanmakuWriter<'a> {
|
||||
page: &'a PageInfo,
|
||||
@@ -22,7 +22,8 @@ impl<'a> DanmakuWriter<'a> {
|
||||
if let Some(parent) = path.parent() {
|
||||
fs::create_dir_all(parent).await?;
|
||||
}
|
||||
let canvas_config = CanvasConfig::new(&CONFIG.danmaku_option, self.page);
|
||||
let config = VersionedConfig::get().load_full();
|
||||
let canvas_config = CanvasConfig::new(&config.danmaku_option, self.page);
|
||||
let mut writer =
|
||||
AssWriter::construct(File::create(path).await?, self.page.name.clone(), canvas_config.clone()).await?;
|
||||
let mut canvas = canvas_config.canvas();
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use anyhow::{Context, Result, anyhow};
|
||||
use async_stream::try_stream;
|
||||
use futures::Stream;
|
||||
use serde_json::Value;
|
||||
|
||||
115
crates/bili_sync/src/bilibili/me.rs
Normal file
115
crates/bili_sync/src/bilibili/me.rs
Normal file
@@ -0,0 +1,115 @@
|
||||
use anyhow::{Result, ensure};
|
||||
use reqwest::Method;
|
||||
|
||||
use crate::bilibili::{BiliClient, Validate};
|
||||
use crate::config::VersionedConfig;
|
||||
pub struct Me<'a> {
|
||||
client: &'a BiliClient,
|
||||
mid: String,
|
||||
}
|
||||
|
||||
impl<'a> Me<'a> {
|
||||
pub fn new(client: &'a BiliClient) -> Self {
|
||||
Self {
|
||||
client,
|
||||
mid: Self::my_id(),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_created_favorites(&self) -> Result<Option<Vec<FavoriteItem>>> {
|
||||
ensure!(!self.mid.is_empty(), "未获取到用户 ID,请确保填写设置中的 B 站认证信息");
|
||||
let mut resp = self
|
||||
.client
|
||||
.request(Method::GET, "https://api.bilibili.com/x/v3/fav/folder/created/list-all")
|
||||
.await
|
||||
.query(&[("up_mid", &self.mid)])
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()?
|
||||
.json::<serde_json::Value>()
|
||||
.await?
|
||||
.validate()?;
|
||||
Ok(serde_json::from_value(resp["data"]["list"].take())?)
|
||||
}
|
||||
|
||||
pub async fn get_followed_collections(&self, page_num: i32, page_size: i32) -> Result<Collections> {
|
||||
ensure!(!self.mid.is_empty(), "未获取到用户 ID,请确保填写设置中的 B 站认证信息");
|
||||
let mut resp = self
|
||||
.client
|
||||
.request(Method::GET, "https://api.bilibili.com/x/v3/fav/folder/collected/list")
|
||||
.await
|
||||
.query(&[
|
||||
("up_mid", self.mid.as_str()),
|
||||
("pn", page_num.to_string().as_str()),
|
||||
("ps", page_size.to_string().as_str()),
|
||||
("platform", "web"),
|
||||
])
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()?
|
||||
.json::<serde_json::Value>()
|
||||
.await?
|
||||
.validate()?;
|
||||
Ok(serde_json::from_value(resp["data"].take())?)
|
||||
}
|
||||
|
||||
pub async fn get_followed_uppers(&self, page_num: i32, page_size: i32) -> Result<FollowedUppers> {
|
||||
ensure!(!self.mid.is_empty(), "未获取到用户 ID,请确保填写设置中的 B 站认证信息");
|
||||
let mut resp = self
|
||||
.client
|
||||
.request(Method::GET, "https://api.bilibili.com/x/relation/followings")
|
||||
.await
|
||||
.query(&[
|
||||
("vmid", self.mid.as_str()),
|
||||
("pn", page_num.to_string().as_str()),
|
||||
("ps", page_size.to_string().as_str()),
|
||||
])
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()?
|
||||
.json::<serde_json::Value>()
|
||||
.await?
|
||||
.validate()?;
|
||||
Ok(serde_json::from_value(resp["data"].take())?)
|
||||
}
|
||||
|
||||
fn my_id() -> String {
|
||||
VersionedConfig::get().load().credential.dedeuserid.clone()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
pub struct FavoriteItem {
|
||||
pub title: String,
|
||||
pub media_count: i64,
|
||||
pub id: i64,
|
||||
pub mid: i64,
|
||||
}
|
||||
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
pub struct CollectionItem {
|
||||
pub id: i64,
|
||||
pub mid: i64,
|
||||
pub state: i32,
|
||||
pub title: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
pub struct Collections {
|
||||
pub count: i64,
|
||||
pub list: Option<Vec<CollectionItem>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
pub struct FollowedUppers {
|
||||
pub total: i64,
|
||||
pub list: Vec<FollowedUpper>,
|
||||
}
|
||||
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
pub struct FollowedUpper {
|
||||
pub mid: i64,
|
||||
pub uname: String,
|
||||
pub face: String,
|
||||
pub sign: String,
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
pub use analyzer::{BestStream, FilterOption};
|
||||
use anyhow::{bail, ensure, Result};
|
||||
use anyhow::{Result, bail, ensure};
|
||||
use arc_swap::ArcSwapOption;
|
||||
use chrono::serde::ts_seconds;
|
||||
use chrono::{DateTime, Utc};
|
||||
@@ -12,6 +12,7 @@ pub use danmaku::DanmakuOption;
|
||||
pub use error::BiliError;
|
||||
pub use favorite_list::FavoriteList;
|
||||
use favorite_list::Upper;
|
||||
pub use me::Me;
|
||||
use once_cell::sync::Lazy;
|
||||
pub use submission::Submission;
|
||||
pub use video::{Dimension, PageInfo, Video};
|
||||
@@ -24,6 +25,7 @@ mod credential;
|
||||
mod danmaku;
|
||||
mod error;
|
||||
mod favorite_list;
|
||||
mod me;
|
||||
mod submission;
|
||||
mod subtitle;
|
||||
mod video;
|
||||
@@ -145,20 +147,21 @@ mod tests {
|
||||
#[ignore = "only for manual test"]
|
||||
#[tokio::test]
|
||||
async fn test_video_info_type() {
|
||||
init_logger("None,bili_sync=debug");
|
||||
init_logger("None,bili_sync=debug", None);
|
||||
let bili_client = BiliClient::new();
|
||||
// 请求 UP 主视频必须要获取 mixin key,使用 key 计算请求参数的签名,否则直接提示权限不足返回空
|
||||
let Ok(Some(mixin_key)) = bili_client.wbi_img().await.map(|wbi_img| wbi_img.into()) else {
|
||||
panic!("获取 mixin key 失败");
|
||||
};
|
||||
set_global_mixin_key(mixin_key);
|
||||
// 测试视频合集
|
||||
let collection_item = CollectionItem {
|
||||
mid: "521722088".to_string(),
|
||||
sid: "4523".to_string(),
|
||||
collection_type: CollectionType::Season,
|
||||
};
|
||||
let collection = Collection::new(&bili_client, &collection_item);
|
||||
let collection = Collection::new(
|
||||
&bili_client,
|
||||
CollectionItem {
|
||||
mid: "521722088".to_string(),
|
||||
sid: "4523".to_string(),
|
||||
collection_type: CollectionType::Season,
|
||||
},
|
||||
);
|
||||
let videos = collection
|
||||
.into_video_stream()
|
||||
.take(20)
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use arc_swap::access::Access;
|
||||
use anyhow::{Context, Result, anyhow};
|
||||
use async_stream::try_stream;
|
||||
use futures::Stream;
|
||||
use reqwest::Method;
|
||||
@@ -7,10 +6,10 @@ use serde_json::Value;
|
||||
|
||||
use crate::bilibili::credential::encoded_query;
|
||||
use crate::bilibili::favorite_list::Upper;
|
||||
use crate::bilibili::{BiliClient, Validate, VideoInfo, MIXIN_KEY};
|
||||
use crate::bilibili::{BiliClient, MIXIN_KEY, Validate, VideoInfo};
|
||||
pub struct Submission<'a> {
|
||||
client: &'a BiliClient,
|
||||
upper_id: String,
|
||||
pub upper_id: String,
|
||||
}
|
||||
|
||||
impl<'a> Submission<'a> {
|
||||
|
||||
@@ -29,7 +29,7 @@ pub struct SubTitleItem {
|
||||
impl SubTitleInfo {
|
||||
pub fn is_ai_sub(&self) -> bool {
|
||||
// ai: aisubtitle.hdslb.com/bfs/ai_subtitle/xxxx
|
||||
// 非 ai: aisubtitle.hdslb.com/bfs/subtitle/xxxx
|
||||
// 非 ai:aisubtitle.hdslb.com/bfs/subtitle/xxxx
|
||||
self.subtitle_url.contains("ai_subtitle")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use anyhow::{ensure, Result};
|
||||
use futures::stream::FuturesUnordered;
|
||||
use anyhow::{Context, Result, ensure};
|
||||
use futures::TryStreamExt;
|
||||
use futures::stream::FuturesUnordered;
|
||||
use prost::Message;
|
||||
use reqwest::Method;
|
||||
|
||||
@@ -9,36 +9,13 @@ use crate::bilibili::client::BiliClient;
|
||||
use crate::bilibili::credential::encoded_query;
|
||||
use crate::bilibili::danmaku::{DanmakuElem, DanmakuWriter, DmSegMobileReply};
|
||||
use crate::bilibili::subtitle::{SubTitle, SubTitleBody, SubTitleInfo, SubTitlesInfo};
|
||||
use crate::bilibili::{Validate, VideoInfo, MIXIN_KEY};
|
||||
|
||||
static MASK_CODE: u64 = 2251799813685247;
|
||||
static XOR_CODE: u64 = 23442827791579;
|
||||
static BASE: u64 = 58;
|
||||
static DATA: &[char] = &[
|
||||
'F', 'c', 'w', 'A', 'P', 'N', 'K', 'T', 'M', 'u', 'g', '3', 'G', 'V', '5', 'L', 'j', '7', 'E', 'J', 'n', 'H', 'p',
|
||||
'W', 's', 'x', '4', 't', 'b', '8', 'h', 'a', 'Y', 'e', 'v', 'i', 'q', 'B', 'z', '6', 'r', 'k', 'C', 'y', '1', '2',
|
||||
'm', 'U', 'S', 'D', 'Q', 'X', '9', 'R', 'd', 'o', 'Z', 'f',
|
||||
];
|
||||
use crate::bilibili::{MIXIN_KEY, Validate, VideoInfo};
|
||||
|
||||
pub struct Video<'a> {
|
||||
client: &'a BiliClient,
|
||||
pub aid: String,
|
||||
pub bvid: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
pub struct Tag {
|
||||
pub tag_name: String,
|
||||
}
|
||||
|
||||
impl serde::Serialize for Tag {
|
||||
fn serialize<S>(&self, serializer: S) -> core::result::Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
serializer.serialize_str(&self.tag_name)
|
||||
}
|
||||
}
|
||||
#[derive(Debug, serde::Deserialize, Default)]
|
||||
pub struct PageInfo {
|
||||
pub cid: i64,
|
||||
@@ -59,8 +36,7 @@ pub struct Dimension {
|
||||
|
||||
impl<'a> Video<'a> {
|
||||
pub fn new(client: &'a BiliClient, bvid: String) -> Self {
|
||||
let aid = bvid_to_aid(&bvid).to_string();
|
||||
Self { client, aid, bvid }
|
||||
Self { client, bvid }
|
||||
}
|
||||
|
||||
/// 直接调用视频信息接口获取详细的视频信息,视频信息中包含了视频的分页信息
|
||||
@@ -69,7 +45,7 @@ impl<'a> Video<'a> {
|
||||
.client
|
||||
.request(Method::GET, "https://api.bilibili.com/x/web-interface/view")
|
||||
.await
|
||||
.query(&[("aid", &self.aid), ("bvid", &self.bvid)])
|
||||
.query(&[("bvid", &self.bvid)])
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()?
|
||||
@@ -79,13 +55,13 @@ impl<'a> Video<'a> {
|
||||
Ok(serde_json::from_value(res["data"].take())?)
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
#[allow(dead_code)]
|
||||
pub async fn get_pages(&self) -> Result<Vec<PageInfo>> {
|
||||
let mut res = self
|
||||
.client
|
||||
.request(Method::GET, "https://api.bilibili.com/x/player/pagelist")
|
||||
.await
|
||||
.query(&[("aid", &self.aid), ("bvid", &self.bvid)])
|
||||
.query(&[("bvid", &self.bvid)])
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()?
|
||||
@@ -95,24 +71,29 @@ impl<'a> Video<'a> {
|
||||
Ok(serde_json::from_value(res["data"].take())?)
|
||||
}
|
||||
|
||||
pub async fn get_tags(&self) -> Result<Vec<Tag>> {
|
||||
let mut res = self
|
||||
pub async fn get_tags(&self) -> Result<Vec<String>> {
|
||||
let res = self
|
||||
.client
|
||||
.request(Method::GET, "https://api.bilibili.com/x/web-interface/view/detail/tag")
|
||||
.await
|
||||
.query(&[("aid", &self.aid), ("bvid", &self.bvid)])
|
||||
.query(&[("bvid", &self.bvid)])
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()?
|
||||
.json::<serde_json::Value>()
|
||||
.await?
|
||||
.validate()?;
|
||||
Ok(serde_json::from_value(res["data"].take())?)
|
||||
Ok(res["data"]
|
||||
.as_array()
|
||||
.context("tags is not an array")?
|
||||
.iter()
|
||||
.filter_map(|v| v["tag_name"].as_str().map(String::from))
|
||||
.collect())
|
||||
}
|
||||
|
||||
pub async fn get_danmaku_writer(&self, page: &'a PageInfo) -> Result<DanmakuWriter> {
|
||||
pub async fn get_danmaku_writer(&self, page: &'a PageInfo) -> Result<DanmakuWriter<'a>> {
|
||||
let tasks = FuturesUnordered::new();
|
||||
for i in 1..=(page.duration + 359) / 360 {
|
||||
for i in 1..=page.duration.div_ceil(360) {
|
||||
tasks.push(self.get_danmaku_segment(page, i as i64));
|
||||
}
|
||||
let result: Vec<Vec<DanmakuElem>> = tasks.try_collect().await?;
|
||||
@@ -148,7 +129,7 @@ impl<'a> Video<'a> {
|
||||
.await
|
||||
.query(&encoded_query(
|
||||
vec![
|
||||
("avid", self.aid.as_str()),
|
||||
("bvid", self.bvid.as_str()),
|
||||
("cid", page.cid.to_string().as_str()),
|
||||
("qn", "127"),
|
||||
("otype", "json"),
|
||||
@@ -172,7 +153,7 @@ impl<'a> Video<'a> {
|
||||
.request(Method::GET, "https://api.bilibili.com/x/player/wbi/v2")
|
||||
.await
|
||||
.query(&encoded_query(
|
||||
vec![("cid", &page.cid.to_string()), ("bvid", &self.bvid), ("aid", &self.aid)],
|
||||
vec![("cid", &page.cid.to_string()), ("bvid", &self.bvid)],
|
||||
MIXIN_KEY.load().as_deref(),
|
||||
))
|
||||
.send()
|
||||
@@ -182,14 +163,18 @@ impl<'a> Video<'a> {
|
||||
.await?
|
||||
.validate()?;
|
||||
// 接口返回的信息,包含了一系列的字幕,每个字幕包含了字幕的语言和 json 下载地址
|
||||
let subtitles_info: SubTitlesInfo = serde_json::from_value(res["data"]["subtitle"].take())?;
|
||||
let tasks = subtitles_info
|
||||
.subtitles
|
||||
.into_iter()
|
||||
.filter(|v| !v.is_ai_sub())
|
||||
.map(|v| self.get_subtitle(v))
|
||||
.collect::<FuturesUnordered<_>>();
|
||||
tasks.try_collect().await
|
||||
match serde_json::from_value::<Option<SubTitlesInfo>>(res["data"]["subtitle"].take())? {
|
||||
Some(subtitles_info) => {
|
||||
let tasks = subtitles_info
|
||||
.subtitles
|
||||
.into_iter()
|
||||
.filter(|v| !v.is_ai_sub())
|
||||
.map(|v| self.get_subtitle(v))
|
||||
.collect::<FuturesUnordered<_>>();
|
||||
tasks.try_collect().await
|
||||
}
|
||||
None => Ok(vec![]),
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_subtitle(&self, info: SubTitleInfo) -> Result<SubTitle> {
|
||||
@@ -206,26 +191,3 @@ impl<'a> Video<'a> {
|
||||
Ok(SubTitle { lan: info.lan, body })
|
||||
}
|
||||
}
|
||||
|
||||
fn bvid_to_aid(bvid: &str) -> u64 {
|
||||
let mut bvid = bvid.chars().collect::<Vec<_>>();
|
||||
(bvid[3], bvid[9]) = (bvid[9], bvid[3]);
|
||||
(bvid[4], bvid[7]) = (bvid[7], bvid[4]);
|
||||
let mut tmp = 0u64;
|
||||
for char in bvid.into_iter().skip(3) {
|
||||
let idx = DATA.iter().position(|&x| x == char).expect("invalid bvid");
|
||||
tmp = tmp * BASE + idx as u64;
|
||||
}
|
||||
(tmp & MASK_CODE) ^ XOR_CODE
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_bvid_to_aid() {
|
||||
assert_eq!(bvid_to_aid("BV1Tr421n746"), 1401752220u64);
|
||||
assert_eq!(bvid_to_aid("BV1sH4y1s7fe"), 1051892992u64);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use anyhow::{Context, Result, anyhow};
|
||||
use async_stream::try_stream;
|
||||
use futures::Stream;
|
||||
use serde_json::Value;
|
||||
|
||||
@@ -1,7 +1,12 @@
|
||||
use std::borrow::Cow;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use clap::Parser;
|
||||
|
||||
pub static ARGS: LazyLock<Args> = LazyLock::new(Args::parse);
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(name = "Bili-Sync", version = version(), about, long_about = None)]
|
||||
#[command(name = "Bili-Sync", version = detail_version(), about, long_about = None)]
|
||||
pub struct Args {
|
||||
#[arg(short, long, env = "SCAN_ONLY")]
|
||||
pub scan_only: bool,
|
||||
@@ -14,19 +19,22 @@ mod built_info {
|
||||
include!(concat!(env!("OUT_DIR"), "/built.rs"));
|
||||
}
|
||||
|
||||
fn version() -> String {
|
||||
let version = if let (Some(git_version), Some(git_dirty)) = (built_info::GIT_VERSION, built_info::GIT_DIRTY) {
|
||||
format!("{}{}", git_version, if git_dirty { "-dirty" } else { "" })
|
||||
pub fn version() -> Cow<'static, str> {
|
||||
if let (Some(git_version), Some(git_dirty)) = (built_info::GIT_VERSION, built_info::GIT_DIRTY) {
|
||||
Cow::Owned(format!("{}{}", git_version, if git_dirty { "-dirty" } else { "" }))
|
||||
} else {
|
||||
built_info::PKG_VERSION.to_owned()
|
||||
};
|
||||
Cow::Borrowed(built_info::PKG_VERSION)
|
||||
}
|
||||
}
|
||||
|
||||
fn detail_version() -> String {
|
||||
format!(
|
||||
"{}
|
||||
Architecture: {}-{}
|
||||
Author: {}
|
||||
Built Time: {}
|
||||
Rustc Version: {}",
|
||||
version,
|
||||
version(),
|
||||
built_info::CFG_OS,
|
||||
built_info::CFG_TARGET_ARCH,
|
||||
built_info::PKG_AUTHORS,
|
||||
129
crates/bili_sync/src/config/current.rs
Normal file
129
crates/bili_sync/src/config/current.rs
Normal file
@@ -0,0 +1,129 @@
|
||||
use std::path::PathBuf;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use anyhow::{Result, bail};
|
||||
use sea_orm::DatabaseConnection;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use validator::Validate;
|
||||
|
||||
use crate::bilibili::{Credential, DanmakuOption, FilterOption};
|
||||
use crate::config::LegacyConfig;
|
||||
use crate::config::default::{default_auth_token, default_bind_address, default_time_format};
|
||||
use crate::config::item::{ConcurrentLimit, NFOTimeType};
|
||||
use crate::utils::model::{load_db_config, save_db_config};
|
||||
|
||||
pub static CONFIG_DIR: LazyLock<PathBuf> =
|
||||
LazyLock::new(|| dirs::config_dir().expect("No config path found").join("bili-sync"));
|
||||
|
||||
#[derive(Serialize, Deserialize, Validate, Clone)]
|
||||
pub struct Config {
|
||||
pub auth_token: String,
|
||||
pub bind_address: String,
|
||||
pub credential: Credential,
|
||||
pub filter_option: FilterOption,
|
||||
pub danmaku_option: DanmakuOption,
|
||||
pub video_name: String,
|
||||
pub page_name: String,
|
||||
pub interval: u64,
|
||||
pub upper_path: PathBuf,
|
||||
pub nfo_time_type: NFOTimeType,
|
||||
pub concurrent_limit: ConcurrentLimit,
|
||||
pub time_format: String,
|
||||
pub cdn_sorting: bool,
|
||||
pub version: u64,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub async fn load_from_database(connection: &DatabaseConnection) -> Result<Option<Result<Self>>> {
|
||||
load_db_config(connection).await
|
||||
}
|
||||
|
||||
pub async fn save_to_database(&self, connection: &DatabaseConnection) -> Result<()> {
|
||||
save_db_config(self, connection).await
|
||||
}
|
||||
|
||||
pub fn check(&self) -> Result<()> {
|
||||
let mut errors = Vec::new();
|
||||
if !self.upper_path.is_absolute() {
|
||||
errors.push("up 主头像保存的路径应为绝对路径");
|
||||
}
|
||||
if self.video_name.is_empty() {
|
||||
errors.push("未设置 video_name 模板");
|
||||
}
|
||||
if self.page_name.is_empty() {
|
||||
errors.push("未设置 page_name 模板");
|
||||
}
|
||||
let credential = &self.credential;
|
||||
if credential.sessdata.is_empty()
|
||||
|| credential.bili_jct.is_empty()
|
||||
|| credential.buvid3.is_empty()
|
||||
|| credential.dedeuserid.is_empty()
|
||||
|| credential.ac_time_value.is_empty()
|
||||
{
|
||||
errors.push("Credential 信息不完整,请确保填写完整");
|
||||
}
|
||||
if !(self.concurrent_limit.video > 0 && self.concurrent_limit.page > 0) {
|
||||
errors.push("video 和 page 允许的并发数必须大于 0");
|
||||
}
|
||||
if !errors.is_empty() {
|
||||
bail!(
|
||||
errors
|
||||
.into_iter()
|
||||
.map(|e| format!("- {}", e))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(super) fn test_default() -> Self {
|
||||
Self {
|
||||
cdn_sorting: true,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Config {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
auth_token: default_auth_token(),
|
||||
bind_address: default_bind_address(),
|
||||
credential: Credential::default(),
|
||||
filter_option: FilterOption::default(),
|
||||
danmaku_option: DanmakuOption::default(),
|
||||
video_name: "{{title}}".to_owned(),
|
||||
page_name: "{{bvid}}".to_owned(),
|
||||
interval: 1200,
|
||||
upper_path: CONFIG_DIR.join("upper_face"),
|
||||
nfo_time_type: NFOTimeType::FavTime,
|
||||
concurrent_limit: ConcurrentLimit::default(),
|
||||
time_format: default_time_format(),
|
||||
cdn_sorting: false,
|
||||
version: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<LegacyConfig> for Config {
|
||||
fn from(legacy: LegacyConfig) -> Self {
|
||||
Self {
|
||||
auth_token: legacy.auth_token,
|
||||
bind_address: legacy.bind_address,
|
||||
credential: legacy.credential,
|
||||
filter_option: legacy.filter_option,
|
||||
danmaku_option: legacy.danmaku_option,
|
||||
video_name: legacy.video_name,
|
||||
page_name: legacy.page_name,
|
||||
interval: legacy.interval,
|
||||
upper_path: legacy.upper_path,
|
||||
nfo_time_type: legacy.nfo_time_type,
|
||||
concurrent_limit: legacy.concurrent_limit,
|
||||
time_format: legacy.time_format,
|
||||
cdn_sorting: legacy.cdn_sorting,
|
||||
version: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
18
crates/bili_sync/src/config/default.rs
Normal file
18
crates/bili_sync/src/config/default.rs
Normal file
@@ -0,0 +1,18 @@
|
||||
use rand::seq::IndexedRandom;
|
||||
|
||||
pub(super) fn default_time_format() -> String {
|
||||
"%Y-%m-%d".to_string()
|
||||
}
|
||||
|
||||
/// 默认的 auth_token 实现,生成随机 16 位字符串
|
||||
pub(super) fn default_auth_token() -> String {
|
||||
let byte_choices = b"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!@#$%^&*()_+-=";
|
||||
let mut rng = rand::rng();
|
||||
(0..16)
|
||||
.map(|_| *(byte_choices.choose(&mut rng).expect("choose byte failed")) as char)
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub(super) fn default_bind_address() -> String {
|
||||
"0.0.0.0:12345".to_string()
|
||||
}
|
||||
@@ -1,85 +0,0 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use clap::Parser;
|
||||
use handlebars::handlebars_helper;
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
use crate::config::clap::Args;
|
||||
use crate::config::item::PathSafeTemplate;
|
||||
use crate::config::Config;
|
||||
|
||||
/// 全局的 CONFIG,可以从中读取配置信息
|
||||
pub static CONFIG: Lazy<Config> = Lazy::new(load_config);
|
||||
|
||||
/// 全局的 TEMPLATE,用来渲染 video_name 和 page_name 模板
|
||||
pub static TEMPLATE: Lazy<handlebars::Handlebars> = Lazy::new(|| {
|
||||
let mut handlebars = handlebars::Handlebars::new();
|
||||
handlebars_helper!(truncate: |s: String, len: usize| {
|
||||
if s.chars().count() > len {
|
||||
s.chars().take(len).collect::<String>()
|
||||
} else {
|
||||
s.to_string()
|
||||
}
|
||||
});
|
||||
handlebars.register_helper("truncate", Box::new(truncate));
|
||||
handlebars
|
||||
.path_safe_register("video", &CONFIG.video_name)
|
||||
.expect("failed to register video template");
|
||||
handlebars
|
||||
.path_safe_register("page", &CONFIG.page_name)
|
||||
.expect("failed to register page template");
|
||||
handlebars
|
||||
});
|
||||
|
||||
/// 全局的 ARGS,用来解析命令行参数
|
||||
pub static ARGS: Lazy<Args> = Lazy::new(Args::parse);
|
||||
|
||||
/// 全局的 CONFIG_DIR,表示配置文件夹的路径
|
||||
pub static CONFIG_DIR: Lazy<PathBuf> =
|
||||
Lazy::new(|| dirs::config_dir().expect("No config path found").join("bili-sync"));
|
||||
|
||||
#[cfg(not(test))]
|
||||
fn load_config() -> Config {
|
||||
let config = Config::load().unwrap_or_else(|err| {
|
||||
if err
|
||||
.downcast_ref::<std::io::Error>()
|
||||
.is_none_or(|e| e.kind() != std::io::ErrorKind::NotFound)
|
||||
{
|
||||
panic!("加载配置文件失败,错误为: {err}");
|
||||
}
|
||||
warn!("配置文件不存在,使用默认配置...");
|
||||
Config::default()
|
||||
});
|
||||
info!("配置文件加载完毕,覆盖刷新原有配置");
|
||||
config.save().expect("保存默认配置时遇到错误");
|
||||
info!("检查配置文件..");
|
||||
config.check();
|
||||
info!("配置文件检查通过");
|
||||
config
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn load_config() -> Config {
|
||||
let credential = match (
|
||||
std::env::var("TEST_SESSDATA"),
|
||||
std::env::var("TEST_BILI_JCT"),
|
||||
std::env::var("TEST_BUVID3"),
|
||||
std::env::var("TEST_DEDEUSERID"),
|
||||
std::env::var("TEST_AC_TIME_VALUE"),
|
||||
) {
|
||||
(Ok(sessdata), Ok(bili_jct), Ok(buvid3), Ok(dedeuserid), Ok(ac_time_value)) => {
|
||||
Some(std::sync::Arc::new(crate::bilibili::Credential {
|
||||
sessdata,
|
||||
bili_jct,
|
||||
buvid3,
|
||||
dedeuserid,
|
||||
ac_time_value,
|
||||
}))
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
Config {
|
||||
credential: arc_swap::ArcSwapOption::from(credential),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
91
crates/bili_sync/src/config/handlebar.rs
Normal file
91
crates/bili_sync/src/config/handlebar.rs
Normal file
@@ -0,0 +1,91 @@
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use anyhow::Result;
|
||||
use handlebars::handlebars_helper;
|
||||
|
||||
use crate::config::versioned_cache::VersionedCache;
|
||||
use crate::config::{Config, PathSafeTemplate};
|
||||
|
||||
pub static TEMPLATE: LazyLock<VersionedCache<handlebars::Handlebars<'static>>> =
|
||||
LazyLock::new(|| VersionedCache::new(create_template).expect("Failed to create handlebars template"));
|
||||
|
||||
fn create_template(config: &Config) -> Result<handlebars::Handlebars<'static>> {
|
||||
let mut handlebars = handlebars::Handlebars::new();
|
||||
handlebars.register_helper("truncate", Box::new(truncate));
|
||||
handlebars.path_safe_register("video", config.video_name.to_owned())?;
|
||||
handlebars.path_safe_register("page", config.page_name.to_owned())?;
|
||||
Ok(handlebars)
|
||||
}
|
||||
|
||||
handlebars_helper!(truncate: |s: String, len: usize| {
|
||||
if s.chars().count() > len {
|
||||
s.chars().take(len).collect::<String>()
|
||||
} else {
|
||||
s.to_string()
|
||||
}
|
||||
});
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use serde_json::json;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_template_usage() {
|
||||
let mut template = handlebars::Handlebars::new();
|
||||
template.register_helper("truncate", Box::new(truncate));
|
||||
let _ = template.path_safe_register("video", "test{{bvid}}test");
|
||||
let _ = template.path_safe_register("test_truncate", "哈哈,{{ truncate title 30 }}");
|
||||
let _ = template.path_safe_register("test_path_unix", "{{ truncate title 7 }}/test/a");
|
||||
let _ = template.path_safe_register("test_path_windows", r"{{ truncate title 7 }}\\test\\a");
|
||||
#[cfg(not(windows))]
|
||||
{
|
||||
assert_eq!(
|
||||
template
|
||||
.path_safe_render("test_path_unix", &json!({"title": "关注/永雏塔菲喵"}))
|
||||
.unwrap(),
|
||||
"关注_永雏塔菲/test/a"
|
||||
);
|
||||
assert_eq!(
|
||||
template
|
||||
.path_safe_render("test_path_windows", &json!({"title": "关注/永雏塔菲喵"}))
|
||||
.unwrap(),
|
||||
"关注_永雏塔菲_test_a"
|
||||
);
|
||||
}
|
||||
#[cfg(windows)]
|
||||
{
|
||||
assert_eq!(
|
||||
template
|
||||
.path_safe_render("test_path_unix", &json!({"title": "关注/永雏塔菲喵"}))
|
||||
.unwrap(),
|
||||
"关注_永雏塔菲_test_a"
|
||||
);
|
||||
assert_eq!(
|
||||
template
|
||||
.path_safe_render("test_path_windows", &json!({"title": "关注/永雏塔菲喵"}))
|
||||
.unwrap(),
|
||||
r"关注_永雏塔菲\\test\\a"
|
||||
);
|
||||
}
|
||||
assert_eq!(
|
||||
template
|
||||
.path_safe_render("video", &json!({"bvid": "BV1b5411h7g7"}))
|
||||
.unwrap(),
|
||||
"testBV1b5411h7g7test"
|
||||
);
|
||||
assert_eq!(
|
||||
template
|
||||
.path_safe_render(
|
||||
"test_truncate",
|
||||
&json!({"title": "你说得对,但是 Rust 是由 Mozilla 自主研发的一款全新的编译期格斗游戏。\
|
||||
编译将发生在一个被称作「Cargo」的构建系统中。在这里,被引用的指针将被授予「生命周期」之力,导引对象安全。\
|
||||
你将扮演一位名为「Rustacean」的神秘角色,在与「Rustc」的搏斗中邂逅各种骨骼惊奇的傲娇报错。\
|
||||
征服她们、通过编译同时,逐步发掘「C++」程序崩溃的真相。"})
|
||||
)
|
||||
.unwrap(),
|
||||
"哈哈,你说得对,但是 Rust 是由 Mozilla 自主研发的一"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,12 +1,8 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::Result;
|
||||
use serde::de::{Deserializer, MapAccess, Visitor};
|
||||
use serde::ser::SerializeMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::bilibili::{CollectionItem, CollectionType};
|
||||
use crate::utils::filenamify::filenamify;
|
||||
|
||||
/// 稍后再看的配置
|
||||
@@ -17,7 +13,7 @@ pub struct WatchLaterConfig {
|
||||
}
|
||||
|
||||
/// NFO 文件使用的时间类型
|
||||
#[derive(Serialize, Deserialize, Default)]
|
||||
#[derive(Serialize, Deserialize, Default, Clone)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum NFOTimeType {
|
||||
#[default]
|
||||
@@ -26,14 +22,33 @@ pub enum NFOTimeType {
|
||||
}
|
||||
|
||||
/// 并发下载相关的配置
|
||||
#[derive(Serialize, Deserialize)]
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub struct ConcurrentLimit {
|
||||
pub video: usize,
|
||||
pub page: usize,
|
||||
pub rate_limit: Option<RateLimit>,
|
||||
#[serde(default)]
|
||||
pub download: ConcurrentDownloadLimit,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub struct ConcurrentDownloadLimit {
|
||||
pub enable: bool,
|
||||
pub concurrency: usize,
|
||||
pub threshold: u64,
|
||||
}
|
||||
|
||||
impl Default for ConcurrentDownloadLimit {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
enable: true,
|
||||
concurrency: 4,
|
||||
threshold: 20 * (1 << 20), // 20 MB
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub struct RateLimit {
|
||||
pub limit: usize,
|
||||
pub duration: u64,
|
||||
@@ -49,18 +64,20 @@ impl Default for ConcurrentLimit {
|
||||
limit: 4,
|
||||
duration: 250,
|
||||
}),
|
||||
download: ConcurrentDownloadLimit::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait PathSafeTemplate {
|
||||
fn path_safe_register(&mut self, name: &'static str, template: &'static str) -> Result<()>;
|
||||
fn path_safe_register(&mut self, name: &'static str, template: impl Into<String>) -> Result<()>;
|
||||
fn path_safe_render(&self, name: &'static str, data: &serde_json::Value) -> Result<String>;
|
||||
}
|
||||
|
||||
/// 通过将模板字符串中的分隔符替换为自定义的字符串,使得模板字符串中的分隔符得以保留
|
||||
impl PathSafeTemplate for handlebars::Handlebars<'_> {
|
||||
fn path_safe_register(&mut self, name: &'static str, template: &'static str) -> Result<()> {
|
||||
fn path_safe_register(&mut self, name: &'static str, template: impl Into<String>) -> Result<()> {
|
||||
let template = template.into();
|
||||
Ok(self.register_template_string(name, template.replace(std::path::MAIN_SEPARATOR_STR, "__SEP__"))?)
|
||||
}
|
||||
|
||||
@@ -68,72 +85,3 @@ impl PathSafeTemplate for handlebars::Handlebars<'_> {
|
||||
Ok(filenamify(&self.render(name, data)?).replace("__SEP__", std::path::MAIN_SEPARATOR_STR))
|
||||
}
|
||||
}
|
||||
/* 后面是用于自定义 Collection 的序列化、反序列化的样板代码 */
|
||||
pub(super) fn serialize_collection_list<S>(
|
||||
collection_list: &HashMap<CollectionItem, PathBuf>,
|
||||
serializer: S,
|
||||
) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
let mut map = serializer.serialize_map(Some(collection_list.len()))?;
|
||||
for (k, v) in collection_list {
|
||||
let prefix = match k.collection_type {
|
||||
CollectionType::Series => "series",
|
||||
CollectionType::Season => "season",
|
||||
};
|
||||
map.serialize_entry(&[prefix, &k.mid, &k.sid].join(":"), v)?;
|
||||
}
|
||||
map.end()
|
||||
}
|
||||
|
||||
pub(super) fn deserialize_collection_list<'de, D>(deserializer: D) -> Result<HashMap<CollectionItem, PathBuf>, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
struct CollectionListVisitor;
|
||||
|
||||
impl<'de> Visitor<'de> for CollectionListVisitor {
|
||||
type Value = HashMap<CollectionItem, PathBuf>;
|
||||
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
formatter.write_str("a map of collection list")
|
||||
}
|
||||
|
||||
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
|
||||
where
|
||||
A: MapAccess<'de>,
|
||||
{
|
||||
let mut collection_list = HashMap::new();
|
||||
while let Some((key, value)) = map.next_entry::<String, PathBuf>()? {
|
||||
let collection_item = match key.split(':').collect::<Vec<&str>>().as_slice() {
|
||||
[prefix, mid, sid] => {
|
||||
let collection_type = match *prefix {
|
||||
"series" => CollectionType::Series,
|
||||
"season" => CollectionType::Season,
|
||||
_ => {
|
||||
return Err(serde::de::Error::custom(
|
||||
"invalid collection type, should be series or season",
|
||||
))
|
||||
}
|
||||
};
|
||||
CollectionItem {
|
||||
mid: mid.to_string(),
|
||||
sid: sid.to_string(),
|
||||
collection_type,
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
return Err(serde::de::Error::custom(
|
||||
"invalid collection key, should be series:mid:sid or season:mid:sid",
|
||||
))
|
||||
}
|
||||
};
|
||||
collection_list.insert(collection_item, value);
|
||||
}
|
||||
Ok(collection_list)
|
||||
}
|
||||
}
|
||||
|
||||
deserializer.deserialize_map(CollectionListVisitor)
|
||||
}
|
||||
|
||||
134
crates/bili_sync/src/config/legacy.rs
Normal file
134
crates/bili_sync/src/config/legacy.rs
Normal file
@@ -0,0 +1,134 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use anyhow::Result;
|
||||
use sea_orm::DatabaseConnection;
|
||||
use serde::de::{Deserializer, MapAccess, Visitor};
|
||||
use serde::ser::SerializeMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::bilibili::{CollectionItem, CollectionType, Credential, DanmakuOption, FilterOption};
|
||||
use crate::config::Config;
|
||||
use crate::config::default::{default_auth_token, default_bind_address, default_time_format};
|
||||
use crate::config::item::{ConcurrentLimit, NFOTimeType, WatchLaterConfig};
|
||||
use crate::utils::model::migrate_legacy_config;
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct LegacyConfig {
|
||||
#[serde(default = "default_auth_token")]
|
||||
pub auth_token: String,
|
||||
#[serde(default = "default_bind_address")]
|
||||
pub bind_address: String,
|
||||
pub credential: Credential,
|
||||
pub filter_option: FilterOption,
|
||||
#[serde(default)]
|
||||
pub danmaku_option: DanmakuOption,
|
||||
pub favorite_list: HashMap<String, PathBuf>,
|
||||
#[serde(
|
||||
default,
|
||||
serialize_with = "serialize_collection_list",
|
||||
deserialize_with = "deserialize_collection_list"
|
||||
)]
|
||||
pub collection_list: HashMap<CollectionItem, PathBuf>,
|
||||
#[serde(default)]
|
||||
pub submission_list: HashMap<String, PathBuf>,
|
||||
#[serde(default)]
|
||||
pub watch_later: WatchLaterConfig,
|
||||
pub video_name: String,
|
||||
pub page_name: String,
|
||||
pub interval: u64,
|
||||
pub upper_path: PathBuf,
|
||||
#[serde(default)]
|
||||
pub nfo_time_type: NFOTimeType,
|
||||
#[serde(default)]
|
||||
pub concurrent_limit: ConcurrentLimit,
|
||||
#[serde(default = "default_time_format")]
|
||||
pub time_format: String,
|
||||
#[serde(default)]
|
||||
pub cdn_sorting: bool,
|
||||
}
|
||||
|
||||
impl LegacyConfig {
|
||||
async fn load_from_file(path: &Path) -> Result<Self> {
|
||||
let legacy_config_str = tokio::fs::read_to_string(path).await?;
|
||||
Ok(toml::from_str(&legacy_config_str)?)
|
||||
}
|
||||
|
||||
pub async fn migrate_from_file(path: &Path, connection: &DatabaseConnection) -> Result<Config> {
|
||||
let legacy_config = Self::load_from_file(path).await?;
|
||||
migrate_legacy_config(&legacy_config, connection).await?;
|
||||
Ok(legacy_config.into())
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
后面是用于自定义 Collection 的序列化、反序列化的样板代码
|
||||
*/
|
||||
pub(super) fn serialize_collection_list<S>(
|
||||
collection_list: &HashMap<CollectionItem, PathBuf>,
|
||||
serializer: S,
|
||||
) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
let mut map = serializer.serialize_map(Some(collection_list.len()))?;
|
||||
for (k, v) in collection_list {
|
||||
let prefix = match k.collection_type {
|
||||
CollectionType::Series => "series",
|
||||
CollectionType::Season => "season",
|
||||
};
|
||||
map.serialize_entry(&[prefix, &k.mid, &k.sid].join(":"), v)?;
|
||||
}
|
||||
map.end()
|
||||
}
|
||||
|
||||
pub(super) fn deserialize_collection_list<'de, D>(deserializer: D) -> Result<HashMap<CollectionItem, PathBuf>, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
struct CollectionListVisitor;
|
||||
|
||||
impl<'de> Visitor<'de> for CollectionListVisitor {
|
||||
type Value = HashMap<CollectionItem, PathBuf>;
|
||||
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
formatter.write_str("a map of collection list")
|
||||
}
|
||||
|
||||
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
|
||||
where
|
||||
A: MapAccess<'de>,
|
||||
{
|
||||
let mut collection_list = HashMap::new();
|
||||
while let Some((key, value)) = map.next_entry::<String, PathBuf>()? {
|
||||
let collection_item = match key.split(':').collect::<Vec<&str>>().as_slice() {
|
||||
[prefix, mid, sid] => {
|
||||
let collection_type = match *prefix {
|
||||
"series" => CollectionType::Series,
|
||||
"season" => CollectionType::Season,
|
||||
_ => {
|
||||
return Err(serde::de::Error::custom(
|
||||
"invalid collection type, should be series or season",
|
||||
));
|
||||
}
|
||||
};
|
||||
CollectionItem {
|
||||
mid: mid.to_string(),
|
||||
sid: sid.to_string(),
|
||||
collection_type,
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
return Err(serde::de::Error::custom(
|
||||
"invalid collection key, should be series:mid:sid or season:mid:sid",
|
||||
));
|
||||
}
|
||||
};
|
||||
collection_list.insert(collection_item, value);
|
||||
}
|
||||
Ok(collection_list)
|
||||
}
|
||||
}
|
||||
|
||||
deserializer.deserialize_map(CollectionListVisitor)
|
||||
}
|
||||
@@ -1,170 +1,16 @@
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::Result;
|
||||
use arc_swap::ArcSwapOption;
|
||||
use rand::seq::SliceRandom;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
mod clap;
|
||||
mod global;
|
||||
mod args;
|
||||
mod current;
|
||||
mod default;
|
||||
mod handlebar;
|
||||
mod item;
|
||||
mod legacy;
|
||||
mod versioned_cache;
|
||||
mod versioned_config;
|
||||
|
||||
use crate::bilibili::{CollectionItem, Credential, DanmakuOption, FilterOption};
|
||||
pub use crate::config::global::{ARGS, CONFIG, CONFIG_DIR, TEMPLATE};
|
||||
use crate::config::item::{deserialize_collection_list, serialize_collection_list, ConcurrentLimit};
|
||||
pub use crate::config::item::{NFOTimeType, PathSafeTemplate, RateLimit, WatchLaterConfig};
|
||||
|
||||
fn default_time_format() -> String {
|
||||
"%Y-%m-%d".to_string()
|
||||
}
|
||||
|
||||
/// 默认的 auth_token 实现,生成随机 16 位字符串
|
||||
fn default_auth_token() -> Option<String> {
|
||||
let byte_choices = b"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!@#$%^&*()_+-=";
|
||||
let mut rng = rand::thread_rng();
|
||||
Some(
|
||||
(0..16)
|
||||
.map(|_| *(byte_choices.choose(&mut rng).expect("choose byte failed")) as char)
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
|
||||
fn default_bind_address() -> String {
|
||||
"0.0.0.0:12345".to_string()
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct Config {
|
||||
#[serde(default = "default_auth_token")]
|
||||
pub auth_token: Option<String>,
|
||||
#[serde(default = "default_bind_address")]
|
||||
pub bind_address: String,
|
||||
pub credential: ArcSwapOption<Credential>,
|
||||
pub filter_option: FilterOption,
|
||||
#[serde(default)]
|
||||
pub danmaku_option: DanmakuOption,
|
||||
pub favorite_list: HashMap<String, PathBuf>,
|
||||
#[serde(
|
||||
default,
|
||||
serialize_with = "serialize_collection_list",
|
||||
deserialize_with = "deserialize_collection_list"
|
||||
)]
|
||||
pub collection_list: HashMap<CollectionItem, PathBuf>,
|
||||
#[serde(default)]
|
||||
pub submission_list: HashMap<String, PathBuf>,
|
||||
#[serde(default)]
|
||||
pub watch_later: WatchLaterConfig,
|
||||
pub video_name: Cow<'static, str>,
|
||||
pub page_name: Cow<'static, str>,
|
||||
pub interval: u64,
|
||||
pub upper_path: PathBuf,
|
||||
#[serde(default)]
|
||||
pub nfo_time_type: NFOTimeType,
|
||||
#[serde(default)]
|
||||
pub concurrent_limit: ConcurrentLimit,
|
||||
#[serde(default = "default_time_format")]
|
||||
pub time_format: String,
|
||||
}
|
||||
|
||||
impl Default for Config {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
auth_token: default_auth_token(),
|
||||
bind_address: default_bind_address(),
|
||||
credential: ArcSwapOption::from(Some(Arc::new(Credential::default()))),
|
||||
filter_option: FilterOption::default(),
|
||||
danmaku_option: DanmakuOption::default(),
|
||||
favorite_list: HashMap::new(),
|
||||
collection_list: HashMap::new(),
|
||||
submission_list: HashMap::new(),
|
||||
watch_later: Default::default(),
|
||||
video_name: Cow::Borrowed("{{title}}"),
|
||||
page_name: Cow::Borrowed("{{bvid}}"),
|
||||
interval: 1200,
|
||||
upper_path: CONFIG_DIR.join("upper_face"),
|
||||
nfo_time_type: NFOTimeType::FavTime,
|
||||
concurrent_limit: ConcurrentLimit::default(),
|
||||
time_format: default_time_format(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub fn save(&self) -> Result<()> {
|
||||
let config_path = CONFIG_DIR.join("config.toml");
|
||||
std::fs::create_dir_all(&*CONFIG_DIR)?;
|
||||
std::fs::write(config_path, toml::to_string_pretty(self)?)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(not(test))]
|
||||
fn load() -> Result<Self> {
|
||||
let config_path = CONFIG_DIR.join("config.toml");
|
||||
let config_content = std::fs::read_to_string(config_path)?;
|
||||
Ok(toml::from_str(&config_content)?)
|
||||
}
|
||||
|
||||
#[cfg(not(test))]
|
||||
pub fn check(&self) {
|
||||
let mut ok = true;
|
||||
if self.favorite_list.is_empty() && self.collection_list.is_empty() && !self.watch_later.enabled {
|
||||
ok = false;
|
||||
error!("没有配置任何需要扫描的内容,程序空转没有意义");
|
||||
}
|
||||
if self.watch_later.enabled && !self.watch_later.path.is_absolute() {
|
||||
error!(
|
||||
"稍后再看保存的路径应为绝对路径,检测到:{}",
|
||||
self.watch_later.path.display()
|
||||
);
|
||||
}
|
||||
for path in self.favorite_list.values() {
|
||||
if !path.is_absolute() {
|
||||
ok = false;
|
||||
error!("收藏夹保存的路径应为绝对路径,检测到: {}", path.display());
|
||||
}
|
||||
}
|
||||
if !self.upper_path.is_absolute() {
|
||||
ok = false;
|
||||
error!("up 主头像保存的路径应为绝对路径");
|
||||
}
|
||||
if self.video_name.is_empty() {
|
||||
ok = false;
|
||||
error!("未设置 video_name 模板");
|
||||
}
|
||||
if self.page_name.is_empty() {
|
||||
ok = false;
|
||||
error!("未设置 page_name 模板");
|
||||
}
|
||||
let credential = self.credential.load();
|
||||
match credential.as_deref() {
|
||||
Some(credential) => {
|
||||
if credential.sessdata.is_empty()
|
||||
|| credential.bili_jct.is_empty()
|
||||
|| credential.buvid3.is_empty()
|
||||
|| credential.dedeuserid.is_empty()
|
||||
|| credential.ac_time_value.is_empty()
|
||||
{
|
||||
ok = false;
|
||||
error!("Credential 信息不完整,请确保填写完整");
|
||||
}
|
||||
}
|
||||
None => {
|
||||
ok = false;
|
||||
error!("未设置 Credential 信息");
|
||||
}
|
||||
}
|
||||
if !(self.concurrent_limit.video > 0 && self.concurrent_limit.page > 0) {
|
||||
ok = false;
|
||||
error!("video 和 page 允许的并发数必须大于 0");
|
||||
}
|
||||
if !ok {
|
||||
panic!(
|
||||
"位于 {} 的配置文件不合法,请参考提示信息修复后继续运行",
|
||||
CONFIG_DIR.join("config.toml").display()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
pub use crate::config::args::{ARGS, version};
|
||||
pub use crate::config::current::{CONFIG_DIR, Config};
|
||||
pub use crate::config::handlebar::TEMPLATE;
|
||||
pub use crate::config::item::{NFOTimeType, PathSafeTemplate, RateLimit};
|
||||
pub use crate::config::legacy::LegacyConfig;
|
||||
pub use crate::config::versioned_cache::VersionedCache;
|
||||
pub use crate::config::versioned_config::VersionedConfig;
|
||||
|
||||
54
crates/bili_sync/src/config/versioned_cache.rs
Normal file
54
crates/bili_sync/src/config/versioned_cache.rs
Normal file
@@ -0,0 +1,54 @@
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::{AtomicU64, Ordering};
|
||||
|
||||
use anyhow::Result;
|
||||
use arc_swap::{ArcSwap, Guard};
|
||||
|
||||
use crate::config::{Config, VersionedConfig};
|
||||
|
||||
pub struct VersionedCache<T> {
|
||||
inner: ArcSwap<T>,
|
||||
version: AtomicU64,
|
||||
builder: fn(&Config) -> Result<T>,
|
||||
mutex: parking_lot::Mutex<()>,
|
||||
}
|
||||
|
||||
impl<T> VersionedCache<T> {
|
||||
pub fn new(builder: fn(&Config) -> Result<T>) -> Result<Self> {
|
||||
let current_config = VersionedConfig::get().load();
|
||||
let current_version = current_config.version;
|
||||
let initial_value = builder(¤t_config)?;
|
||||
Ok(Self {
|
||||
inner: ArcSwap::from_pointee(initial_value),
|
||||
version: AtomicU64::new(current_version),
|
||||
builder,
|
||||
mutex: parking_lot::Mutex::new(()),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn load(&self) -> Guard<Arc<T>> {
|
||||
self.reload_if_needed();
|
||||
self.inner.load()
|
||||
}
|
||||
|
||||
fn reload_if_needed(&self) {
|
||||
let current_config = VersionedConfig::get().load();
|
||||
let current_version = current_config.version;
|
||||
let version = self.version.load(Ordering::Relaxed);
|
||||
if version < current_version {
|
||||
let _lock = self.mutex.lock();
|
||||
if self.version.load(Ordering::Relaxed) >= current_version {
|
||||
return;
|
||||
}
|
||||
match (self.builder)(¤t_config) {
|
||||
Err(e) => {
|
||||
error!("Failed to rebuild versioned cache: {:?}", e);
|
||||
}
|
||||
Ok(new_value) => {
|
||||
self.inner.store(Arc::new(new_value));
|
||||
self.version.store(current_version, Ordering::Relaxed);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
120
crates/bili_sync/src/config/versioned_config.rs
Normal file
120
crates/bili_sync/src/config/versioned_config.rs
Normal file
@@ -0,0 +1,120 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{Result, anyhow, bail};
|
||||
use arc_swap::{ArcSwap, Guard};
|
||||
use sea_orm::DatabaseConnection;
|
||||
use tokio::sync::OnceCell;
|
||||
|
||||
use crate::bilibili::Credential;
|
||||
use crate::config::{CONFIG_DIR, Config, LegacyConfig};
|
||||
|
||||
pub static VERSIONED_CONFIG: OnceCell<VersionedConfig> = OnceCell::const_new();
|
||||
|
||||
pub struct VersionedConfig {
|
||||
inner: ArcSwap<Config>,
|
||||
update_lock: tokio::sync::Mutex<()>,
|
||||
}
|
||||
|
||||
impl VersionedConfig {
|
||||
/// 初始化全局的 `VersionedConfig`,初始化失败或者已初始化过则返回错误
|
||||
pub async fn init(connection: &DatabaseConnection) -> Result<()> {
|
||||
let mut config = match Config::load_from_database(connection).await? {
|
||||
Some(Ok(config)) => config,
|
||||
Some(Err(e)) => bail!("解析数据库配置失败: {}", e),
|
||||
None => {
|
||||
let config = match LegacyConfig::migrate_from_file(&CONFIG_DIR.join("config.toml"), connection).await {
|
||||
Ok(config) => config,
|
||||
Err(e) => {
|
||||
if e.downcast_ref::<std::io::Error>()
|
||||
.is_none_or(|e| e.kind() != std::io::ErrorKind::NotFound)
|
||||
{
|
||||
bail!("未成功读取并迁移旧版本配置:{:#}", e);
|
||||
} else {
|
||||
let config = Config::default();
|
||||
warn!(
|
||||
"生成 auth_token:{},可使用该 token 登录 web UI,该信息仅在首次运行时打印",
|
||||
config.auth_token
|
||||
);
|
||||
config
|
||||
}
|
||||
}
|
||||
};
|
||||
config.save_to_database(connection).await?;
|
||||
config
|
||||
}
|
||||
};
|
||||
// version 本身不具有实际意义,仅用于并发更新时的版本控制,在初始化时可以直接清空
|
||||
config.version = 0;
|
||||
let versioned_config = VersionedConfig::new(config);
|
||||
VERSIONED_CONFIG
|
||||
.set(versioned_config)
|
||||
.map_err(|e| anyhow!("VERSIONED_CONFIG has already been initialized: {}", e))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
/// 单元测试直接使用测试专用的配置即可
|
||||
pub fn get() -> &'static VersionedConfig {
|
||||
use std::sync::LazyLock;
|
||||
static TEST_CONFIG: LazyLock<VersionedConfig> = LazyLock::new(|| VersionedConfig::new(Config::test_default()));
|
||||
return &TEST_CONFIG;
|
||||
}
|
||||
|
||||
#[cfg(not(test))]
|
||||
/// 获取全局的 `VersionedConfig`,如果未初始化则会 panic
|
||||
pub fn get() -> &'static VersionedConfig {
|
||||
VERSIONED_CONFIG.get().expect("VERSIONED_CONFIG is not initialized")
|
||||
}
|
||||
|
||||
pub fn new(config: Config) -> Self {
|
||||
Self {
|
||||
inner: ArcSwap::from_pointee(config),
|
||||
update_lock: tokio::sync::Mutex::new(()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn load(&self) -> Guard<Arc<Config>> {
|
||||
self.inner.load()
|
||||
}
|
||||
|
||||
pub fn load_full(&self) -> Arc<Config> {
|
||||
self.inner.load_full()
|
||||
}
|
||||
|
||||
pub async fn update_credential(&self, new_credential: Credential, connection: &DatabaseConnection) -> Result<()> {
|
||||
// 确保更新内容与写入数据库的操作是原子性的
|
||||
let _lock = self.update_lock.lock().await;
|
||||
loop {
|
||||
let old_config = self.inner.load();
|
||||
let mut new_config = old_config.as_ref().clone();
|
||||
new_config.credential = new_credential.clone();
|
||||
new_config.version += 1;
|
||||
if Arc::ptr_eq(
|
||||
&old_config,
|
||||
&self.inner.compare_and_swap(&old_config, Arc::new(new_config)),
|
||||
) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
self.inner.load().save_to_database(connection).await
|
||||
}
|
||||
|
||||
/// 外部 API 会调用这个方法,如果更新失败直接返回错误
|
||||
pub async fn update(&self, mut new_config: Config, connection: &DatabaseConnection) -> Result<Arc<Config>> {
|
||||
let _lock = self.update_lock.lock().await;
|
||||
let old_config = self.inner.load();
|
||||
if old_config.version != new_config.version {
|
||||
bail!("配置版本不匹配,请刷新页面修改后重新提交");
|
||||
}
|
||||
new_config.version += 1;
|
||||
let new_config = Arc::new(new_config);
|
||||
if !Arc::ptr_eq(
|
||||
&old_config,
|
||||
&self.inner.compare_and_swap(&old_config, new_config.clone()),
|
||||
) {
|
||||
bail!("配置版本不匹配,请刷新页面修改后重新提交");
|
||||
}
|
||||
new_config.save_to_database(connection).await?;
|
||||
Ok(new_config)
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,10 @@
|
||||
use anyhow::Result;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use bili_sync_migration::{Migrator, MigratorTrait};
|
||||
use sea_orm::{ConnectOptions, Database, DatabaseConnection};
|
||||
use sea_orm::sqlx::sqlite::{SqliteConnectOptions, SqliteJournalMode, SqliteSynchronous};
|
||||
use sea_orm::sqlx::{ConnectOptions as SqlxConnectOptions, Sqlite};
|
||||
use sea_orm::{ConnectOptions, Database, DatabaseConnection, SqlxSqliteConnector};
|
||||
|
||||
use crate::config::CONFIG_DIR;
|
||||
|
||||
@@ -11,10 +15,24 @@ fn database_url() -> String {
|
||||
async fn database_connection() -> Result<DatabaseConnection> {
|
||||
let mut option = ConnectOptions::new(database_url());
|
||||
option
|
||||
.max_connections(100)
|
||||
.max_connections(50)
|
||||
.min_connections(5)
|
||||
.acquire_timeout(std::time::Duration::from_secs(90));
|
||||
Ok(Database::connect(option).await?)
|
||||
.acquire_timeout(Duration::from_secs(90));
|
||||
let connect_option = option
|
||||
.get_url()
|
||||
.parse::<SqliteConnectOptions>()
|
||||
.context("Failed to parse database URL")?
|
||||
.disable_statement_logging()
|
||||
.busy_timeout(Duration::from_secs(90))
|
||||
.journal_mode(SqliteJournalMode::Wal)
|
||||
.synchronous(SqliteSynchronous::Normal)
|
||||
.optimize_on_close(true, None);
|
||||
Ok(SqlxSqliteConnector::from_sqlx_sqlite_pool(
|
||||
option
|
||||
.sqlx_pool_options::<Sqlite>()
|
||||
.connect_with(connect_option)
|
||||
.await?,
|
||||
))
|
||||
}
|
||||
|
||||
async fn migrate_database() -> Result<()> {
|
||||
@@ -25,7 +43,10 @@ async fn migrate_database() -> Result<()> {
|
||||
}
|
||||
|
||||
/// 进行数据库迁移并获取数据库连接,供外部使用
|
||||
pub async fn setup_database() -> DatabaseConnection {
|
||||
migrate_database().await.expect("数据库迁移失败");
|
||||
database_connection().await.expect("获取数据库连接失败")
|
||||
pub async fn setup_database() -> Result<DatabaseConnection> {
|
||||
tokio::fs::create_dir_all(CONFIG_DIR.as_path()).await.context(
|
||||
"Failed to create config directory. Please check if you have granted necessary permissions to your folder.",
|
||||
)?;
|
||||
migrate_database().await.context("Failed to migrate database")?;
|
||||
database_connection().await.context("Failed to connect to database")
|
||||
}
|
||||
|
||||
@@ -1,14 +1,18 @@
|
||||
use core::str;
|
||||
use std::io::SeekFrom;
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{bail, ensure, Result};
|
||||
use anyhow::{Context, Result, bail, ensure};
|
||||
use futures::TryStreamExt;
|
||||
use reqwest::Method;
|
||||
use tokio::fs::{self, File};
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use reqwest::{Method, header};
|
||||
use tokio::fs::{self, File, OpenOptions};
|
||||
use tokio::io::{AsyncSeekExt, AsyncWriteExt};
|
||||
use tokio::task::JoinSet;
|
||||
use tokio_util::io::StreamReader;
|
||||
|
||||
use crate::bilibili::Client;
|
||||
use crate::config::VersionedConfig;
|
||||
pub struct Downloader {
|
||||
client: Client,
|
||||
}
|
||||
@@ -22,29 +26,132 @@ impl Downloader {
|
||||
}
|
||||
|
||||
pub async fn fetch(&self, url: &str, path: &Path) -> Result<()> {
|
||||
if let Some(parent) = path.parent() {
|
||||
fs::create_dir_all(parent).await?;
|
||||
if VersionedConfig::get().load().concurrent_limit.download.enable {
|
||||
self.fetch_parallel(url, path).await
|
||||
} else {
|
||||
self.fetch_serial(url, path).await
|
||||
}
|
||||
let mut file = File::create(path).await?;
|
||||
}
|
||||
|
||||
async fn fetch_serial(&self, url: &str, path: &Path) -> Result<()> {
|
||||
let resp = self
|
||||
.client
|
||||
.request(Method::GET, url, None)
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()?;
|
||||
let expected = resp.content_length().unwrap_or_default();
|
||||
let expected = resp.header_content_length();
|
||||
if let Some(parent) = path.parent() {
|
||||
fs::create_dir_all(parent).await?;
|
||||
}
|
||||
let mut file = File::create(path).await?;
|
||||
let mut stream_reader = StreamReader::new(resp.bytes_stream().map_err(std::io::Error::other));
|
||||
let received = tokio::io::copy(&mut stream_reader, &mut file).await?;
|
||||
file.flush().await?;
|
||||
ensure!(
|
||||
received >= expected,
|
||||
"received {} bytes, expected {} bytes",
|
||||
received,
|
||||
expected
|
||||
);
|
||||
if let Some(expected) = expected {
|
||||
ensure!(
|
||||
received == expected,
|
||||
"downloaded bytes mismatch: expected {}, got {}",
|
||||
expected,
|
||||
received
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn fetch_parallel(&self, url: &str, path: &Path) -> Result<()> {
|
||||
let (concurrency, threshold) = {
|
||||
let config = VersionedConfig::get().load();
|
||||
(
|
||||
config.concurrent_limit.download.concurrency,
|
||||
config.concurrent_limit.download.threshold,
|
||||
)
|
||||
};
|
||||
let resp = self
|
||||
.client
|
||||
.request(Method::HEAD, url, None)
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()?;
|
||||
let file_size = resp.header_content_length().unwrap_or_default();
|
||||
let chunk_size = file_size / concurrency as u64;
|
||||
if resp
|
||||
.headers()
|
||||
.get(header::ACCEPT_RANGES)
|
||||
.is_none_or(|v| v.to_str().unwrap_or_default() == "none") // https://developer.mozilla.org/en-US/docs/Web/HTTP/Reference/Headers/Accept-Ranges#none
|
||||
|| chunk_size < threshold
|
||||
{
|
||||
return self.fetch_serial(url, path).await;
|
||||
}
|
||||
if let Some(parent) = path.parent() {
|
||||
fs::create_dir_all(parent).await?;
|
||||
}
|
||||
let file = File::create(path).await?;
|
||||
file.set_len(file_size).await?;
|
||||
drop(file);
|
||||
let mut tasks = JoinSet::new();
|
||||
let url = Arc::new(url.to_string());
|
||||
let path = Arc::new(path.to_path_buf());
|
||||
for i in 0..concurrency {
|
||||
let start = i as u64 * chunk_size;
|
||||
let end = if i == concurrency - 1 {
|
||||
file_size
|
||||
} else {
|
||||
start + chunk_size
|
||||
} - 1;
|
||||
let (url_clone, path_clone, client_clone) = (url.clone(), path.clone(), self.client.clone());
|
||||
tasks.spawn(async move {
|
||||
let mut file = OpenOptions::new().write(true).open(path_clone.as_ref()).await?;
|
||||
file.seek(SeekFrom::Start(start)).await?;
|
||||
let range_header = format!("bytes={}-{}", start, end);
|
||||
let resp = client_clone
|
||||
.request(Method::GET, &url_clone, None)
|
||||
.header(header::RANGE, &range_header)
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()?;
|
||||
if let Some(content_length) = resp.header_content_length() {
|
||||
ensure!(
|
||||
content_length == end - start + 1,
|
||||
"content length mismatch: expected {}, got {}",
|
||||
end - start + 1,
|
||||
content_length
|
||||
);
|
||||
}
|
||||
let mut stream_reader = StreamReader::new(resp.bytes_stream().map_err(std::io::Error::other));
|
||||
let received = tokio::io::copy(&mut stream_reader, &mut file).await?;
|
||||
file.flush().await?;
|
||||
ensure!(
|
||||
received == end - start + 1,
|
||||
"downloaded bytes mismatch: expected {}, got {}",
|
||||
end - start + 1,
|
||||
received,
|
||||
);
|
||||
Ok(())
|
||||
});
|
||||
}
|
||||
while let Some(res) = tasks.join_next().await {
|
||||
res??;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn fetch_with_fallback(&self, urls: &[&str], path: &Path) -> Result<()> {
|
||||
if urls.is_empty() {
|
||||
bail!("no urls provided");
|
||||
}
|
||||
let mut res = Ok(());
|
||||
for url in urls {
|
||||
match self.fetch(url, path).await {
|
||||
Ok(_) => return Ok(()),
|
||||
Err(err) => {
|
||||
res = Err(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
res.context("failed to download file")
|
||||
}
|
||||
|
||||
pub async fn merge(&self, video_path: &Path, audio_path: &Path, output_path: &Path) -> Result<()> {
|
||||
let output = tokio::process::Command::new("ffmpeg")
|
||||
.args([
|
||||
@@ -54,14 +161,32 @@ impl Downloader {
|
||||
audio_path.to_string_lossy().as_ref(),
|
||||
"-c",
|
||||
"copy",
|
||||
"-strict",
|
||||
"unofficial",
|
||||
"-y",
|
||||
output_path.to_string_lossy().as_ref(),
|
||||
])
|
||||
.output()
|
||||
.await?;
|
||||
.await
|
||||
.context("failed to run ffmpeg")?;
|
||||
if !output.status.success() {
|
||||
bail!("ffmpeg error: {}", str::from_utf8(&output.stderr).unwrap_or("unknown"));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// reqwest.content_length() 居然指的是 body_size 而非 content-length header,没办法自己实现一下
|
||||
/// https://github.com/seanmonstar/reqwest/issues/1814
|
||||
trait ResponseExt {
|
||||
fn header_content_length(&self) -> Option<u64>;
|
||||
}
|
||||
|
||||
impl ResponseExt for reqwest::Response {
|
||||
fn header_content_length(&self) -> Option<u64> {
|
||||
self.headers()
|
||||
.get(header::CONTENT_LENGTH)
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.and_then(|s| s.parse::<u64>().ok())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,20 +26,25 @@ impl From<Result<ExecutionStatus>> for ExecutionStatus {
|
||||
match res {
|
||||
Ok(status) => status,
|
||||
Err(err) => {
|
||||
if let Some(error) = err.downcast_ref::<io::Error>() {
|
||||
let error_kind = error.kind();
|
||||
if error_kind == io::ErrorKind::PermissionDenied
|
||||
|| (error_kind == io::ErrorKind::Other
|
||||
&& error.get_ref().is_some_and(|e| {
|
||||
e.downcast_ref::<reqwest::Error>()
|
||||
.is_some_and(|e| e.is_decode() || e.is_body() || e.is_timeout())
|
||||
}))
|
||||
{
|
||||
return ExecutionStatus::Ignored(err);
|
||||
for cause in err.chain() {
|
||||
if let Some(io_err) = cause.downcast_ref::<io::Error>() {
|
||||
// 权限错误
|
||||
if io_err.kind() == io::ErrorKind::PermissionDenied {
|
||||
return ExecutionStatus::Ignored(err);
|
||||
}
|
||||
// 使用 io::Error 包裹的 reqwest::Error
|
||||
if io_err.kind() == io::ErrorKind::Other
|
||||
&& io_err.get_ref().is_some_and(|e| {
|
||||
e.downcast_ref::<reqwest::Error>().is_some_and(is_ignored_reqwest_error)
|
||||
})
|
||||
{
|
||||
return ExecutionStatus::Ignored(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(error) = err.downcast_ref::<reqwest::Error>() {
|
||||
if error.is_decode() || error.is_body() || error.is_timeout() {
|
||||
// 未包裹的 reqwest::Error
|
||||
if let Some(error) = cause.downcast_ref::<reqwest::Error>()
|
||||
&& is_ignored_reqwest_error(error)
|
||||
{
|
||||
return ExecutionStatus::Ignored(err);
|
||||
}
|
||||
}
|
||||
@@ -48,3 +53,7 @@ impl From<Result<ExecutionStatus>> for ExecutionStatus {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn is_ignored_reqwest_error(err: &reqwest::Error) -> bool {
|
||||
err.is_decode() || err.is_body() || err.is_timeout()
|
||||
}
|
||||
|
||||
@@ -12,32 +12,49 @@ mod task;
|
||||
mod utils;
|
||||
mod workflow;
|
||||
|
||||
use std::collections::VecDeque;
|
||||
use std::fmt::Debug;
|
||||
use std::future::Future;
|
||||
use std::sync::Arc;
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use bilibili::BiliClient;
|
||||
use parking_lot::Mutex;
|
||||
use sea_orm::DatabaseConnection;
|
||||
use task::{http_server, video_downloader};
|
||||
use tokio_util::sync::CancellationToken;
|
||||
use tokio_util::task::TaskTracker;
|
||||
|
||||
use crate::config::{ARGS, CONFIG};
|
||||
use crate::api::{LogHelper, MAX_HISTORY_LOGS};
|
||||
use crate::config::{ARGS, VersionedConfig};
|
||||
use crate::database::setup_database;
|
||||
use crate::utils::init_logger;
|
||||
use crate::utils::signal::terminate;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
init();
|
||||
let connection = Arc::new(setup_database().await);
|
||||
let (connection, log_writer) = init().await;
|
||||
let bili_client = Arc::new(BiliClient::new());
|
||||
|
||||
let token = CancellationToken::new();
|
||||
let tracker = TaskTracker::new();
|
||||
|
||||
spawn_task("HTTP 服务", http_server(connection.clone()), &tracker, token.clone());
|
||||
spawn_task("定时下载", video_downloader(connection), &tracker, token.clone());
|
||||
spawn_task(
|
||||
"HTTP 服务",
|
||||
http_server(connection.clone(), bili_client.clone(), log_writer),
|
||||
&tracker,
|
||||
token.clone(),
|
||||
);
|
||||
if !cfg!(debug_assertions) {
|
||||
spawn_task(
|
||||
"定时下载",
|
||||
video_downloader(connection.clone(), bili_client),
|
||||
&tracker,
|
||||
token.clone(),
|
||||
);
|
||||
}
|
||||
|
||||
tracker.close();
|
||||
handle_shutdown(tracker, token).await
|
||||
handle_shutdown(connection, tracker, token).await
|
||||
}
|
||||
|
||||
fn spawn_task(
|
||||
@@ -59,23 +76,38 @@ fn spawn_task(
|
||||
});
|
||||
}
|
||||
|
||||
/// 初始化日志系统,加载命令行参数和配置文件
|
||||
fn init() {
|
||||
Lazy::force(&ARGS);
|
||||
init_logger(&ARGS.log_level);
|
||||
Lazy::force(&CONFIG);
|
||||
/// 初始化日志系统、打印欢迎信息,初始化数据库连接和全局配置
|
||||
async fn init() -> (DatabaseConnection, LogHelper) {
|
||||
let (tx, _rx) = tokio::sync::broadcast::channel(30);
|
||||
let log_history = Arc::new(Mutex::new(VecDeque::with_capacity(MAX_HISTORY_LOGS + 1)));
|
||||
let log_writer = LogHelper::new(tx, log_history.clone());
|
||||
|
||||
init_logger(&ARGS.log_level, Some(log_writer.clone()));
|
||||
info!("欢迎使用 Bili-Sync,当前程序版本:{}", config::version());
|
||||
info!("项目地址:https://github.com/amtoaer/bili-sync");
|
||||
let connection = setup_database().await.expect("数据库初始化失败");
|
||||
info!("数据库初始化完成");
|
||||
VersionedConfig::init(&connection).await.expect("配置初始化失败");
|
||||
info!("配置初始化完成");
|
||||
|
||||
(connection, log_writer)
|
||||
}
|
||||
|
||||
async fn handle_shutdown(tracker: TaskTracker, token: CancellationToken) {
|
||||
async fn handle_shutdown(connection: DatabaseConnection, tracker: TaskTracker, token: CancellationToken) {
|
||||
tokio::select! {
|
||||
_ = tracker.wait() => {
|
||||
error!("所有任务均已终止,程序退出")
|
||||
error!("所有任务均已终止..")
|
||||
}
|
||||
_ = terminate() => {
|
||||
info!("接收到终止信号,正在终止任务..");
|
||||
info!("接收到终止信号,开始终止任务..");
|
||||
token.cancel();
|
||||
tracker.wait().await;
|
||||
info!("所有任务均已终止,程序退出");
|
||||
info!("所有任务均已终止..");
|
||||
}
|
||||
}
|
||||
info!("正在关闭数据库连接..");
|
||||
match connection.close().await {
|
||||
Ok(()) => info!("数据库连接已关闭,程序结束"),
|
||||
Err(e) => error!("关闭数据库连接时遇到错误:{:#},程序异常结束", e),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,61 +1,98 @@
|
||||
use std::collections::HashSet;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use axum::extract::Request;
|
||||
use axum::http::{header, Uri};
|
||||
use axum::http::header;
|
||||
use axum::response::IntoResponse;
|
||||
use axum::routing::{get, post};
|
||||
use axum::{middleware, Extension, Router, ServiceExt};
|
||||
use axum::routing::get;
|
||||
use axum::{Extension, ServiceExt};
|
||||
use reqwest::StatusCode;
|
||||
use rust_embed::Embed;
|
||||
use rust_embed_for_web::{EmbedableFile, RustEmbed};
|
||||
use sea_orm::DatabaseConnection;
|
||||
use utoipa::OpenApi;
|
||||
use utoipa_swagger_ui::{Config, SwaggerUi};
|
||||
|
||||
use crate::api::auth;
|
||||
use crate::api::handler::{get_video, get_video_sources, get_videos, reset_video, ApiDoc};
|
||||
use crate::config::CONFIG;
|
||||
use crate::api::{LogHelper, router};
|
||||
use crate::bilibili::BiliClient;
|
||||
use crate::config::VersionedConfig;
|
||||
|
||||
#[derive(Embed)]
|
||||
#[derive(RustEmbed)]
|
||||
#[preserve_source = false]
|
||||
#[folder = "../../web/build"]
|
||||
struct Asset;
|
||||
|
||||
pub async fn http_server(database_connection: Arc<DatabaseConnection>) -> Result<()> {
|
||||
let app = Router::new()
|
||||
.route("/api/video-sources", get(get_video_sources))
|
||||
.route("/api/videos", get(get_videos))
|
||||
.route("/api/videos/{id}", get(get_video))
|
||||
.route("/api/videos/{id}/reset", post(reset_video))
|
||||
.merge(
|
||||
SwaggerUi::new("/swagger-ui/")
|
||||
.url("/api-docs/openapi.json", ApiDoc::openapi())
|
||||
.config(
|
||||
Config::default()
|
||||
.try_it_out_enabled(true)
|
||||
.persist_authorization(true)
|
||||
.validator_url("none"),
|
||||
),
|
||||
)
|
||||
.fallback_service(get(frontend_files))
|
||||
pub async fn http_server(
|
||||
database_connection: DatabaseConnection,
|
||||
bili_client: Arc<BiliClient>,
|
||||
log_writer: LogHelper,
|
||||
) -> Result<()> {
|
||||
let app = router()
|
||||
.fallback_service(get(frontend_files).head(frontend_files))
|
||||
.layer(Extension(database_connection))
|
||||
.layer(middleware::from_fn(auth::auth));
|
||||
let listener = tokio::net::TcpListener::bind(&CONFIG.bind_address)
|
||||
.layer(Extension(bili_client))
|
||||
.layer(Extension(log_writer));
|
||||
let config = VersionedConfig::get().load_full();
|
||||
let listener = tokio::net::TcpListener::bind(&config.bind_address)
|
||||
.await
|
||||
.context("bind address failed")?;
|
||||
info!("开始监听 http 服务: http://{}", CONFIG.bind_address);
|
||||
info!("开始运行管理页:http://{}", config.bind_address);
|
||||
Ok(axum::serve(listener, ServiceExt::<Request>::into_make_service(app)).await?)
|
||||
}
|
||||
|
||||
async fn frontend_files(uri: Uri) -> impl IntoResponse {
|
||||
let mut path = uri.path().trim_start_matches('/');
|
||||
if path.is_empty() {
|
||||
async fn frontend_files(request: Request) -> impl IntoResponse {
|
||||
let mut path = request.uri().path().trim_start_matches('/');
|
||||
if path.is_empty() || Asset::get(path).is_none() {
|
||||
path = "index.html";
|
||||
}
|
||||
match Asset::get(path) {
|
||||
Some(content) => {
|
||||
let mime = mime_guess::from_path(path).first_or_octet_stream();
|
||||
([(header::CONTENT_TYPE, mime.as_ref())], content.data).into_response()
|
||||
}
|
||||
None => (StatusCode::NOT_FOUND, "404 Not Found").into_response(),
|
||||
let Some(content) = Asset::get(path) else {
|
||||
return (StatusCode::NOT_FOUND, "404 Not Found").into_response();
|
||||
};
|
||||
let mime_type = content.mime_type();
|
||||
let content_type = mime_type.as_deref().unwrap_or("application/octet-stream");
|
||||
let default_headers = [
|
||||
(header::CONTENT_TYPE, content_type),
|
||||
(header::CACHE_CONTROL, "no-cache"),
|
||||
(header::ETAG, &content.hash()),
|
||||
];
|
||||
if let Some(if_none_match) = request.headers().get(header::IF_NONE_MATCH)
|
||||
&& let Ok(client_etag) = if_none_match.to_str()
|
||||
&& client_etag == content.hash()
|
||||
{
|
||||
return (StatusCode::NOT_MODIFIED, default_headers).into_response();
|
||||
}
|
||||
|
||||
if request.method() == axum::http::Method::HEAD {
|
||||
return (StatusCode::OK, default_headers).into_response();
|
||||
}
|
||||
if cfg!(debug_assertions) {
|
||||
// safety: `RustEmbed` returns uncompressed files directly from the filesystem in debug mode
|
||||
return (StatusCode::OK, default_headers, content.data().unwrap()).into_response();
|
||||
}
|
||||
let accepted_encodings = request
|
||||
.headers()
|
||||
.get(header::ACCEPT_ENCODING)
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.map(|s| s.split(',').map(str::trim).collect::<HashSet<_>>())
|
||||
.unwrap_or_default();
|
||||
for (encoding, data) in [("br", content.data_br()), ("gzip", content.data_gzip())] {
|
||||
if accepted_encodings.contains(encoding)
|
||||
&& let Some(data) = data
|
||||
{
|
||||
return (
|
||||
StatusCode::OK,
|
||||
[
|
||||
(header::CONTENT_TYPE, content_type),
|
||||
(header::CACHE_CONTROL, "no-cache"),
|
||||
(header::ETAG, &content.hash()),
|
||||
(header::CONTENT_ENCODING, encoding),
|
||||
],
|
||||
data,
|
||||
)
|
||||
.into_response();
|
||||
}
|
||||
}
|
||||
(
|
||||
StatusCode::NOT_ACCEPTABLE,
|
||||
"Client must support gzip or brotli compression",
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
|
||||
@@ -1,21 +1,27 @@
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use sea_orm::DatabaseConnection;
|
||||
use tokio::time;
|
||||
|
||||
use crate::adapter::Args;
|
||||
use crate::adapter::VideoSource;
|
||||
use crate::bilibili::{self, BiliClient};
|
||||
use crate::config::CONFIG;
|
||||
use crate::config::VersionedConfig;
|
||||
use crate::utils::model::get_enabled_video_sources;
|
||||
use crate::utils::task_notifier::TASK_STATUS_NOTIFIER;
|
||||
use crate::workflow::process_video_source;
|
||||
|
||||
/// 启动周期下载视频的任务
|
||||
pub async fn video_downloader(connection: Arc<DatabaseConnection>) {
|
||||
pub async fn video_downloader(connection: DatabaseConnection, bili_client: Arc<BiliClient>) {
|
||||
let mut anchor = chrono::Local::now().date_naive();
|
||||
let bili_client = BiliClient::new();
|
||||
let params = collect_task_params();
|
||||
loop {
|
||||
info!("开始执行本轮视频下载任务..");
|
||||
let _lock = TASK_STATUS_NOTIFIER.start_running().await;
|
||||
let config = VersionedConfig::get().load_full();
|
||||
'inner: {
|
||||
if let Err(e) = config.check() {
|
||||
error!("配置检查失败,跳过本轮执行:\n{:#}", e);
|
||||
break 'inner;
|
||||
}
|
||||
match bili_client.wbi_img().await.map(|wbi_img| wbi_img.into()) {
|
||||
Ok(Some(mixin_key)) => bilibili::set_global_mixin_key(mixin_key),
|
||||
Ok(_) => {
|
||||
@@ -28,40 +34,29 @@ pub async fn video_downloader(connection: Arc<DatabaseConnection>) {
|
||||
}
|
||||
};
|
||||
if anchor != chrono::Local::now().date_naive() {
|
||||
if let Err(e) = bili_client.check_refresh().await {
|
||||
if let Err(e) = bili_client.check_refresh(&connection).await {
|
||||
error!("检查刷新 Credential 遇到错误:{:#},等待下一轮执行", e);
|
||||
break 'inner;
|
||||
}
|
||||
anchor = chrono::Local::now().date_naive();
|
||||
}
|
||||
for (args, path) in ¶ms {
|
||||
if let Err(e) = process_video_source(*args, &bili_client, path, &connection).await {
|
||||
error!("处理过程遇到错误:{:#}", e);
|
||||
let Ok(video_sources) = get_enabled_video_sources(&connection).await else {
|
||||
error!("获取视频源列表失败,等待下一轮执行");
|
||||
break 'inner;
|
||||
};
|
||||
if video_sources.is_empty() {
|
||||
info!("没有可用的视频源,等待下一轮执行");
|
||||
break 'inner;
|
||||
}
|
||||
for video_source in video_sources {
|
||||
let display_name = video_source.display_name();
|
||||
if let Err(e) = process_video_source(video_source, &bili_client, &connection).await {
|
||||
error!("处理 {} 时遇到错误:{:#},等待下一轮执行", display_name, e);
|
||||
}
|
||||
}
|
||||
info!("本轮任务执行完毕,等待下一轮执行");
|
||||
}
|
||||
time::sleep(time::Duration::from_secs(CONFIG.interval)).await;
|
||||
TASK_STATUS_NOTIFIER.finish_running(_lock);
|
||||
time::sleep(time::Duration::from_secs(config.interval)).await;
|
||||
}
|
||||
}
|
||||
|
||||
/// 构造下载视频任务执行所需的参数(下载类型和保存路径)
|
||||
fn collect_task_params() -> Vec<(Args<'static>, &'static PathBuf)> {
|
||||
let mut params = Vec::new();
|
||||
CONFIG
|
||||
.favorite_list
|
||||
.iter()
|
||||
.for_each(|(fid, path)| params.push((Args::Favorite { fid }, path)));
|
||||
CONFIG
|
||||
.collection_list
|
||||
.iter()
|
||||
.for_each(|(collection_item, path)| params.push((Args::Collection { collection_item }, path)));
|
||||
if CONFIG.watch_later.enabled {
|
||||
params.push((Args::WatchLater, &CONFIG.watch_later.path));
|
||||
}
|
||||
CONFIG
|
||||
.submission_list
|
||||
.iter()
|
||||
.for_each(|(upper_id, path)| params.push((Args::Submission { upper_id }, path)));
|
||||
params
|
||||
}
|
||||
|
||||
@@ -124,7 +124,7 @@ impl VideoInfo {
|
||||
ctime: Set(ctime.naive_utc()),
|
||||
pubtime: Set(pubtime.naive_utc()),
|
||||
favtime: if base_model.favtime != NaiveDateTime::default() {
|
||||
NotSet // 之前设置了 favtime,不覆盖
|
||||
Set(base_model.favtime) // 之前设置了 favtime,使用之前的值(等价于 unset,但设置上以支持后续的规则匹配)
|
||||
} else {
|
||||
Set(pubtime.naive_utc()) // 未设置过 favtime,使用 pubtime 填充
|
||||
},
|
||||
@@ -152,10 +152,7 @@ impl VideoInfo {
|
||||
}
|
||||
|
||||
impl PageInfo {
|
||||
pub fn into_active_model(
|
||||
self,
|
||||
video_model: &bili_sync_entity::video::Model,
|
||||
) -> bili_sync_entity::page::ActiveModel {
|
||||
pub fn into_active_model(self, video_model_id: i32) -> bili_sync_entity::page::ActiveModel {
|
||||
let (width, height) = match &self.dimension {
|
||||
Some(d) => {
|
||||
if d.rotate == 0 {
|
||||
@@ -167,7 +164,7 @@ impl PageInfo {
|
||||
None => (None, None),
|
||||
};
|
||||
bili_sync_entity::page::ActiveModel {
|
||||
video_id: Set(video_model.id),
|
||||
video_id: Set(video_model_id),
|
||||
cid: Set(self.cid),
|
||||
pid: Set(self.page),
|
||||
name: Set(self.name),
|
||||
|
||||
@@ -1,15 +1,16 @@
|
||||
use serde_json::json;
|
||||
|
||||
use crate::config::CONFIG;
|
||||
use crate::config::VersionedConfig;
|
||||
|
||||
pub fn video_format_args(video_model: &bili_sync_entity::video::Model) -> serde_json::Value {
|
||||
let config = VersionedConfig::get().load();
|
||||
json!({
|
||||
"bvid": &video_model.bvid,
|
||||
"title": &video_model.name,
|
||||
"upper_name": &video_model.upper_name,
|
||||
"upper_mid": &video_model.upper_id,
|
||||
"pubtime": &video_model.pubtime.and_utc().format(&CONFIG.time_format).to_string(),
|
||||
"fav_time": &video_model.favtime.and_utc().format(&CONFIG.time_format).to_string(),
|
||||
"pubtime": &video_model.pubtime.and_utc().format(&config.time_format).to_string(),
|
||||
"fav_time": &video_model.favtime.and_utc().format(&config.time_format).to_string(),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -17,6 +18,7 @@ pub fn page_format_args(
|
||||
video_model: &bili_sync_entity::video::Model,
|
||||
page_model: &bili_sync_entity::page::Model,
|
||||
) -> serde_json::Value {
|
||||
let config = VersionedConfig::get().load();
|
||||
json!({
|
||||
"bvid": &video_model.bvid,
|
||||
"title": &video_model.name,
|
||||
@@ -24,7 +26,7 @@ pub fn page_format_args(
|
||||
"upper_mid": &video_model.upper_id,
|
||||
"ptitle": &page_model.name,
|
||||
"pid": page_model.pid,
|
||||
"pubtime": video_model.pubtime.and_utc().format(&CONFIG.time_format).to_string(),
|
||||
"fav_time": video_model.favtime.and_utc().format(&CONFIG.time_format).to_string(),
|
||||
"pubtime": video_model.pubtime.and_utc().format(&config.time_format).to_string(),
|
||||
"fav_time": video_model.favtime.and_utc().format(&config.time_format).to_string(),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -3,20 +3,40 @@ pub mod filenamify;
|
||||
pub mod format_arg;
|
||||
pub mod model;
|
||||
pub mod nfo;
|
||||
pub mod rule;
|
||||
pub mod signal;
|
||||
pub mod status;
|
||||
|
||||
pub mod task_notifier;
|
||||
pub mod validation;
|
||||
use tracing_subscriber::fmt;
|
||||
use tracing_subscriber::layer::SubscriberExt;
|
||||
use tracing_subscriber::util::SubscriberInitExt;
|
||||
|
||||
pub fn init_logger(log_level: &str) {
|
||||
tracing_subscriber::fmt::Subscriber::builder()
|
||||
use crate::api::LogHelper;
|
||||
|
||||
pub fn init_logger(log_level: &str, log_writer: Option<LogHelper>) {
|
||||
let log = tracing_subscriber::fmt::Subscriber::builder()
|
||||
.compact()
|
||||
.with_env_filter(tracing_subscriber::EnvFilter::builder().parse_lossy(log_level))
|
||||
.with_target(false)
|
||||
.with_timer(tracing_subscriber::fmt::time::ChronoLocal::new(
|
||||
"%b %d %H:%M:%S".to_owned(),
|
||||
))
|
||||
.finish()
|
||||
.finish();
|
||||
if let Some(writer) = log_writer {
|
||||
log.with(
|
||||
fmt::layer()
|
||||
.with_ansi(false)
|
||||
.with_timer(tracing_subscriber::fmt::time::ChronoLocal::new(
|
||||
"%b %d %H:%M:%S".to_owned(),
|
||||
))
|
||||
.json()
|
||||
.flatten_event(true)
|
||||
.with_writer(writer),
|
||||
)
|
||||
.try_init()
|
||||
.expect("初始化日志失败");
|
||||
} else {
|
||||
log.try_init().expect("初始化日志失败");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
use anyhow::{Context, Result};
|
||||
use anyhow::{Context, Result, anyhow};
|
||||
use bili_sync_entity::*;
|
||||
use sea_orm::ActiveValue::Set;
|
||||
use sea_orm::entity::prelude::*;
|
||||
use sea_orm::sea_query::{OnConflict, SimpleExpr};
|
||||
use sea_orm::DatabaseTransaction;
|
||||
use sea_orm::{DatabaseTransaction, TransactionTrait};
|
||||
|
||||
use crate::adapter::{VideoSource, VideoSourceEnum};
|
||||
use crate::bilibili::{PageInfo, VideoInfo};
|
||||
use crate::bilibili::VideoInfo;
|
||||
use crate::config::{Config, LegacyConfig};
|
||||
use crate::utils::status::STATUS_COMPLETED;
|
||||
|
||||
/// 筛选未填充的视频
|
||||
@@ -39,6 +41,7 @@ pub async fn filter_unhandled_video_pages(
|
||||
.and(video::Column::DownloadStatus.lt(STATUS_COMPLETED))
|
||||
.and(video::Column::Category.eq(2))
|
||||
.and(video::Column::SinglePage.is_not_null())
|
||||
.and(video::Column::ShouldDownload.eq(true))
|
||||
.and(additional_expr),
|
||||
)
|
||||
.find_with_related(page::Entity)
|
||||
@@ -71,16 +74,8 @@ pub async fn create_videos(
|
||||
}
|
||||
|
||||
/// 尝试创建 Page Model,如果发生冲突则忽略
|
||||
pub async fn create_pages(
|
||||
pages_info: Vec<PageInfo>,
|
||||
video_model: &bili_sync_entity::video::Model,
|
||||
connection: &DatabaseTransaction,
|
||||
) -> Result<()> {
|
||||
let page_models = pages_info
|
||||
.into_iter()
|
||||
.map(|p| p.into_active_model(video_model))
|
||||
.collect::<Vec<page::ActiveModel>>();
|
||||
for page_chunk in page_models.chunks(50) {
|
||||
pub async fn create_pages(pages_model: Vec<page::ActiveModel>, connection: &DatabaseTransaction) -> Result<()> {
|
||||
for page_chunk in pages_model.chunks(200) {
|
||||
page::Entity::insert_many(page_chunk.to_vec())
|
||||
.on_conflict(
|
||||
OnConflict::columns([page::Column::VideoId, page::Column::Pid])
|
||||
@@ -117,3 +112,123 @@ pub async fn update_pages_model(pages: Vec<page::ActiveModel>, connection: &Data
|
||||
query.exec(connection).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// 获取所有已经启用的视频源
|
||||
pub async fn get_enabled_video_sources(connection: &DatabaseConnection) -> Result<Vec<VideoSourceEnum>> {
|
||||
let (favorite, watch_later, submission, collection) = tokio::try_join!(
|
||||
favorite::Entity::find()
|
||||
.filter(favorite::Column::Enabled.eq(true))
|
||||
.all(connection),
|
||||
watch_later::Entity::find()
|
||||
.filter(watch_later::Column::Enabled.eq(true))
|
||||
.all(connection),
|
||||
submission::Entity::find()
|
||||
.filter(submission::Column::Enabled.eq(true))
|
||||
.all(connection),
|
||||
collection::Entity::find()
|
||||
.filter(collection::Column::Enabled.eq(true))
|
||||
.all(connection),
|
||||
)?;
|
||||
let mut sources = Vec::with_capacity(favorite.len() + watch_later.len() + submission.len() + collection.len());
|
||||
sources.extend(favorite.into_iter().map(VideoSourceEnum::from));
|
||||
sources.extend(watch_later.into_iter().map(VideoSourceEnum::from));
|
||||
sources.extend(submission.into_iter().map(VideoSourceEnum::from));
|
||||
sources.extend(collection.into_iter().map(VideoSourceEnum::from));
|
||||
Ok(sources)
|
||||
}
|
||||
|
||||
/// 从数据库中加载配置
|
||||
pub async fn load_db_config(connection: &DatabaseConnection) -> Result<Option<Result<Config>>> {
|
||||
Ok(bili_sync_entity::config::Entity::find_by_id(1)
|
||||
.one(connection)
|
||||
.await?
|
||||
.map(|model| {
|
||||
serde_json::from_str(&model.data).map_err(|e| anyhow!("Failed to deserialize config data: {}", e))
|
||||
}))
|
||||
}
|
||||
|
||||
/// 保存配置到数据库
|
||||
pub async fn save_db_config(config: &Config, connection: &DatabaseConnection) -> Result<()> {
|
||||
let data = serde_json::to_string(config).context("Failed to serialize config data")?;
|
||||
let model = bili_sync_entity::config::ActiveModel {
|
||||
id: Set(1),
|
||||
data: Set(data),
|
||||
..Default::default()
|
||||
};
|
||||
bili_sync_entity::config::Entity::insert(model)
|
||||
.on_conflict(
|
||||
OnConflict::column(bili_sync_entity::config::Column::Id)
|
||||
.update_column(bili_sync_entity::config::Column::Data)
|
||||
.to_owned(),
|
||||
)
|
||||
.exec(connection)
|
||||
.await
|
||||
.context("Failed to save config to database")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// 迁移旧版本配置(即将所有相关联的内容设置为 enabled)
|
||||
pub async fn migrate_legacy_config(config: &LegacyConfig, connection: &DatabaseConnection) -> Result<()> {
|
||||
let transaction = connection.begin().await.context("Failed to begin transaction")?;
|
||||
tokio::try_join!(
|
||||
migrate_favorite(config, &transaction),
|
||||
migrate_watch_later(config, &transaction),
|
||||
migrate_submission(config, &transaction),
|
||||
migrate_collection(config, &transaction)
|
||||
)?;
|
||||
transaction.commit().await.context("Failed to commit transaction")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn migrate_favorite(config: &LegacyConfig, connection: &DatabaseTransaction) -> Result<()> {
|
||||
favorite::Entity::update_many()
|
||||
.filter(favorite::Column::FId.is_in(config.favorite_list.keys().collect::<Vec<_>>()))
|
||||
.col_expr(favorite::Column::Enabled, Expr::value(true))
|
||||
.exec(connection)
|
||||
.await
|
||||
.context("Failed to migrate favorite config")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn migrate_watch_later(config: &LegacyConfig, connection: &DatabaseTransaction) -> Result<()> {
|
||||
if config.watch_later.enabled {
|
||||
watch_later::Entity::update_many()
|
||||
.col_expr(watch_later::Column::Enabled, Expr::value(true))
|
||||
.exec(connection)
|
||||
.await
|
||||
.context("Failed to migrate watch later config")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn migrate_submission(config: &LegacyConfig, connection: &DatabaseTransaction) -> Result<()> {
|
||||
submission::Entity::update_many()
|
||||
.filter(submission::Column::UpperId.is_in(config.submission_list.keys().collect::<Vec<_>>()))
|
||||
.col_expr(submission::Column::Enabled, Expr::value(true))
|
||||
.exec(connection)
|
||||
.await
|
||||
.context("Failed to migrate submission config")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn migrate_collection(config: &LegacyConfig, connection: &DatabaseTransaction) -> Result<()> {
|
||||
let tuples: Vec<(i64, i64, i32)> = config
|
||||
.collection_list
|
||||
.keys()
|
||||
.filter_map(|key| Some((key.sid.parse().ok()?, key.mid.parse().ok()?, key.collection_type.into())))
|
||||
.collect();
|
||||
collection::Entity::update_many()
|
||||
.filter(
|
||||
Expr::tuple([
|
||||
Expr::column(collection::Column::SId),
|
||||
Expr::column(collection::Column::MId),
|
||||
Expr::column(collection::Column::Type),
|
||||
])
|
||||
.in_tuples(tuples),
|
||||
)
|
||||
.col_expr(collection::Column::Enabled, Expr::value(true))
|
||||
.exec(connection)
|
||||
.await
|
||||
.context("Failed to migrate collection config")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,213 +1,242 @@
|
||||
use anyhow::Result;
|
||||
use bili_sync_entity::*;
|
||||
use chrono::NaiveDateTime;
|
||||
use quick_xml::Error;
|
||||
use quick_xml::events::{BytesCData, BytesText};
|
||||
use quick_xml::writer::Writer;
|
||||
use quick_xml::Error;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use tokio::io::{AsyncWriteExt, BufWriter};
|
||||
|
||||
use crate::config::NFOTimeType;
|
||||
use crate::config::{NFOTimeType, VersionedConfig};
|
||||
|
||||
#[allow(clippy::upper_case_acronyms)]
|
||||
pub enum NFOMode {
|
||||
MOVIE,
|
||||
TVSHOW,
|
||||
EPOSODE,
|
||||
UPPER,
|
||||
pub enum NFO<'a> {
|
||||
Movie(Movie<'a>),
|
||||
TVShow(TVShow<'a>),
|
||||
Upper(Upper),
|
||||
Episode(Episode<'a>),
|
||||
}
|
||||
|
||||
pub enum ModelWrapper<'a> {
|
||||
Video(&'a video::Model),
|
||||
Page(&'a page::Model),
|
||||
pub struct Movie<'a> {
|
||||
pub name: &'a str,
|
||||
pub intro: &'a str,
|
||||
pub bvid: &'a str,
|
||||
pub upper_id: i64,
|
||||
pub upper_name: &'a str,
|
||||
pub aired: NaiveDateTime,
|
||||
pub tags: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
pub struct NFOSerializer<'a>(pub ModelWrapper<'a>, pub NFOMode);
|
||||
pub struct TVShow<'a> {
|
||||
pub name: &'a str,
|
||||
pub intro: &'a str,
|
||||
pub bvid: &'a str,
|
||||
pub upper_id: i64,
|
||||
pub upper_name: &'a str,
|
||||
pub aired: NaiveDateTime,
|
||||
pub tags: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
/// serde xml 似乎不太好用,先这么裸着写
|
||||
/// (真是又臭又长啊
|
||||
impl NFOSerializer<'_> {
|
||||
pub async fn generate_nfo(self, nfo_time_type: &NFOTimeType) -> Result<String> {
|
||||
pub struct Upper {
|
||||
pub upper_id: String,
|
||||
pub pubtime: NaiveDateTime,
|
||||
}
|
||||
|
||||
pub struct Episode<'a> {
|
||||
pub name: &'a str,
|
||||
pub pid: String,
|
||||
}
|
||||
|
||||
impl NFO<'_> {
|
||||
pub async fn generate_nfo(self) -> Result<String> {
|
||||
let mut buffer = r#"<?xml version="1.0" encoding="utf-8" standalone="yes"?>
|
||||
"#
|
||||
.as_bytes()
|
||||
.to_vec();
|
||||
let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer);
|
||||
let mut writer = Writer::new_with_indent(&mut tokio_buffer, b' ', 4);
|
||||
let mut tokio_buffer = BufWriter::new(&mut buffer);
|
||||
let writer = Writer::new_with_indent(&mut tokio_buffer, b' ', 4);
|
||||
match self {
|
||||
NFOSerializer(ModelWrapper::Video(v), NFOMode::MOVIE) => {
|
||||
let nfo_time = match nfo_time_type {
|
||||
NFOTimeType::FavTime => v.favtime,
|
||||
NFOTimeType::PubTime => v.pubtime,
|
||||
};
|
||||
writer
|
||||
.create_element("movie")
|
||||
.write_inner_content_async::<_, _, Error>(|writer| async move {
|
||||
writer
|
||||
.create_element("plot")
|
||||
.write_cdata_content_async(BytesCData::new(Self::format_plot(v)))
|
||||
.await?;
|
||||
writer.create_element("outline").write_empty_async().await?;
|
||||
writer
|
||||
.create_element("title")
|
||||
.write_text_content_async(BytesText::new(&v.name))
|
||||
.await?;
|
||||
writer
|
||||
.create_element("actor")
|
||||
.write_inner_content_async::<_, _, Error>(|writer| async move {
|
||||
writer
|
||||
.create_element("name")
|
||||
.write_text_content_async(BytesText::new(&v.upper_id.to_string()))
|
||||
.await?;
|
||||
writer
|
||||
.create_element("role")
|
||||
.write_text_content_async(BytesText::new(&v.upper_name))
|
||||
.await?;
|
||||
Ok(writer)
|
||||
})
|
||||
.await?;
|
||||
writer
|
||||
.create_element("year")
|
||||
.write_text_content_async(BytesText::new(&nfo_time.format("%Y").to_string()))
|
||||
.await?;
|
||||
if let Some(tags) = &v.tags {
|
||||
let tags: Vec<String> = serde_json::from_value(tags.clone()).unwrap_or_default();
|
||||
for tag in tags {
|
||||
writer
|
||||
.create_element("genre")
|
||||
.write_text_content_async(BytesText::new(&tag))
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
writer
|
||||
.create_element("uniqueid")
|
||||
.with_attribute(("type", "bilibili"))
|
||||
.write_text_content_async(BytesText::new(&v.bvid))
|
||||
.await?;
|
||||
writer
|
||||
.create_element("aired")
|
||||
.write_text_content_async(BytesText::new(&nfo_time.format("%Y-%m-%d").to_string()))
|
||||
.await?;
|
||||
Ok(writer)
|
||||
})
|
||||
.await?;
|
||||
NFO::Movie(movie) => {
|
||||
Self::write_movie_nfo(writer, movie).await?;
|
||||
}
|
||||
NFOSerializer(ModelWrapper::Video(v), NFOMode::TVSHOW) => {
|
||||
let nfo_time = match nfo_time_type {
|
||||
NFOTimeType::FavTime => v.favtime,
|
||||
NFOTimeType::PubTime => v.pubtime,
|
||||
};
|
||||
writer
|
||||
.create_element("tvshow")
|
||||
.write_inner_content_async::<_, _, Error>(|writer| async move {
|
||||
writer
|
||||
.create_element("plot")
|
||||
.write_cdata_content_async(BytesCData::new(Self::format_plot(v)))
|
||||
.await?;
|
||||
writer.create_element("outline").write_empty_async().await?;
|
||||
writer
|
||||
.create_element("title")
|
||||
.write_text_content_async(BytesText::new(&v.name))
|
||||
.await?;
|
||||
writer
|
||||
.create_element("actor")
|
||||
.write_inner_content_async::<_, _, Error>(|writer| async move {
|
||||
writer
|
||||
.create_element("name")
|
||||
.write_text_content_async(BytesText::new(&v.upper_id.to_string()))
|
||||
.await?;
|
||||
writer
|
||||
.create_element("role")
|
||||
.write_text_content_async(BytesText::new(&v.upper_name))
|
||||
.await?;
|
||||
Ok(writer)
|
||||
})
|
||||
.await?;
|
||||
writer
|
||||
.create_element("year")
|
||||
.write_text_content_async(BytesText::new(&nfo_time.format("%Y").to_string()))
|
||||
.await?;
|
||||
if let Some(tags) = &v.tags {
|
||||
let tags: Vec<String> = serde_json::from_value(tags.clone()).unwrap_or_default();
|
||||
for tag in tags {
|
||||
writer
|
||||
.create_element("genre")
|
||||
.write_text_content_async(BytesText::new(&tag))
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
writer
|
||||
.create_element("uniqueid")
|
||||
.with_attribute(("type", "bilibili"))
|
||||
.write_text_content_async(BytesText::new(&v.bvid))
|
||||
.await?;
|
||||
writer
|
||||
.create_element("aired")
|
||||
.write_text_content_async(BytesText::new(&nfo_time.format("%Y-%m-%d").to_string()))
|
||||
.await?;
|
||||
Ok(writer)
|
||||
})
|
||||
.await?;
|
||||
NFO::TVShow(tvshow) => {
|
||||
Self::write_tvshow_nfo(writer, tvshow).await?;
|
||||
}
|
||||
NFOSerializer(ModelWrapper::Video(v), NFOMode::UPPER) => {
|
||||
writer
|
||||
.create_element("person")
|
||||
.write_inner_content_async::<_, _, Error>(|writer| async move {
|
||||
writer.create_element("plot").write_empty_async().await?;
|
||||
writer.create_element("outline").write_empty_async().await?;
|
||||
writer
|
||||
.create_element("lockdata")
|
||||
.write_text_content_async(BytesText::new("false"))
|
||||
.await?;
|
||||
writer
|
||||
.create_element("dateadded")
|
||||
.write_text_content_async(BytesText::new(
|
||||
&v.pubtime.format("%Y-%m-%d %H:%M:%S").to_string(),
|
||||
))
|
||||
.await?;
|
||||
writer
|
||||
.create_element("title")
|
||||
.write_text_content_async(BytesText::new(&v.upper_id.to_string()))
|
||||
.await?;
|
||||
writer
|
||||
.create_element("sorttitle")
|
||||
.write_text_content_async(BytesText::new(&v.upper_id.to_string()))
|
||||
.await?;
|
||||
Ok(writer)
|
||||
})
|
||||
.await?;
|
||||
NFO::Upper(upper) => {
|
||||
Self::write_upper_nfo(writer, upper).await?;
|
||||
}
|
||||
NFOSerializer(ModelWrapper::Page(p), NFOMode::EPOSODE) => {
|
||||
writer
|
||||
.create_element("episodedetails")
|
||||
.write_inner_content_async::<_, _, Error>(|writer| async move {
|
||||
writer.create_element("plot").write_empty_async().await?;
|
||||
writer.create_element("outline").write_empty_async().await?;
|
||||
writer
|
||||
.create_element("title")
|
||||
.write_text_content_async(BytesText::new(&p.name))
|
||||
.await?;
|
||||
writer
|
||||
.create_element("season")
|
||||
.write_text_content_async(BytesText::new("1"))
|
||||
.await?;
|
||||
writer
|
||||
.create_element("episode")
|
||||
.write_text_content_async(BytesText::new(&p.pid.to_string()))
|
||||
.await?;
|
||||
Ok(writer)
|
||||
})
|
||||
.await?;
|
||||
NFO::Episode(episode) => {
|
||||
Self::write_episode_nfo(writer, episode).await?;
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
tokio_buffer.flush().await?;
|
||||
Ok(String::from_utf8(buffer)?)
|
||||
}
|
||||
|
||||
async fn write_movie_nfo(mut writer: Writer<&mut BufWriter<&mut Vec<u8>>>, movie: Movie<'_>) -> Result<()> {
|
||||
writer
|
||||
.create_element("movie")
|
||||
.write_inner_content_async::<_, _, Error>(|writer| async move {
|
||||
writer
|
||||
.create_element("plot")
|
||||
.write_cdata_content_async(BytesCData::new(Self::format_plot(movie.bvid, movie.intro)))
|
||||
.await?;
|
||||
writer.create_element("outline").write_empty_async().await?;
|
||||
writer
|
||||
.create_element("title")
|
||||
.write_text_content_async(BytesText::new(movie.name))
|
||||
.await?;
|
||||
writer
|
||||
.create_element("actor")
|
||||
.write_inner_content_async::<_, _, Error>(|writer| async move {
|
||||
writer
|
||||
.create_element("name")
|
||||
.write_text_content_async(BytesText::new(&movie.upper_id.to_string()))
|
||||
.await?;
|
||||
writer
|
||||
.create_element("role")
|
||||
.write_text_content_async(BytesText::new(movie.upper_name))
|
||||
.await?;
|
||||
Ok(writer)
|
||||
})
|
||||
.await?;
|
||||
writer
|
||||
.create_element("year")
|
||||
.write_text_content_async(BytesText::new(&movie.aired.format("%Y").to_string()))
|
||||
.await?;
|
||||
if let Some(tags) = movie.tags {
|
||||
for tag in tags {
|
||||
writer
|
||||
.create_element("genre")
|
||||
.write_text_content_async(BytesText::new(&tag))
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
writer
|
||||
.create_element("uniqueid")
|
||||
.with_attribute(("type", "bilibili"))
|
||||
.write_text_content_async(BytesText::new(movie.bvid))
|
||||
.await?;
|
||||
writer
|
||||
.create_element("aired")
|
||||
.write_text_content_async(BytesText::new(&movie.aired.format("%Y-%m-%d").to_string()))
|
||||
.await?;
|
||||
Ok(writer)
|
||||
})
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn write_tvshow_nfo(mut writer: Writer<&mut BufWriter<&mut Vec<u8>>>, tvshow: TVShow<'_>) -> Result<()> {
|
||||
writer
|
||||
.create_element("tvshow")
|
||||
.write_inner_content_async::<_, _, Error>(|writer| async move {
|
||||
writer
|
||||
.create_element("plot")
|
||||
.write_cdata_content_async(BytesCData::new(Self::format_plot(tvshow.bvid, tvshow.intro)))
|
||||
.await?;
|
||||
writer.create_element("outline").write_empty_async().await?;
|
||||
writer
|
||||
.create_element("title")
|
||||
.write_text_content_async(BytesText::new(tvshow.name))
|
||||
.await?;
|
||||
writer
|
||||
.create_element("actor")
|
||||
.write_inner_content_async::<_, _, Error>(|writer| async move {
|
||||
writer
|
||||
.create_element("name")
|
||||
.write_text_content_async(BytesText::new(&tvshow.upper_id.to_string()))
|
||||
.await?;
|
||||
writer
|
||||
.create_element("role")
|
||||
.write_text_content_async(BytesText::new(tvshow.upper_name))
|
||||
.await?;
|
||||
Ok(writer)
|
||||
})
|
||||
.await?;
|
||||
writer
|
||||
.create_element("year")
|
||||
.write_text_content_async(BytesText::new(&tvshow.aired.format("%Y").to_string()))
|
||||
.await?;
|
||||
if let Some(tags) = tvshow.tags {
|
||||
for tag in tags {
|
||||
writer
|
||||
.create_element("genre")
|
||||
.write_text_content_async(BytesText::new(&tag))
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
writer
|
||||
.create_element("uniqueid")
|
||||
.with_attribute(("type", "bilibili"))
|
||||
.write_text_content_async(BytesText::new(tvshow.bvid))
|
||||
.await?;
|
||||
writer
|
||||
.create_element("aired")
|
||||
.write_text_content_async(BytesText::new(&tvshow.aired.format("%Y-%m-%d").to_string()))
|
||||
.await?;
|
||||
Ok(writer)
|
||||
})
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn write_upper_nfo(mut writer: Writer<&mut BufWriter<&mut Vec<u8>>>, upper: Upper) -> Result<()> {
|
||||
writer
|
||||
.create_element("person")
|
||||
.write_inner_content_async::<_, _, Error>(|writer| async move {
|
||||
writer.create_element("plot").write_empty_async().await?;
|
||||
writer.create_element("outline").write_empty_async().await?;
|
||||
writer
|
||||
.create_element("lockdata")
|
||||
.write_text_content_async(BytesText::new("false"))
|
||||
.await?;
|
||||
writer
|
||||
.create_element("dateadded")
|
||||
.write_text_content_async(BytesText::new(&upper.pubtime.format("%Y-%m-%d %H:%M:%S").to_string()))
|
||||
.await?;
|
||||
writer
|
||||
.create_element("title")
|
||||
.write_text_content_async(BytesText::new(&upper.upper_id))
|
||||
.await?;
|
||||
writer
|
||||
.create_element("sorttitle")
|
||||
.write_text_content_async(BytesText::new(&upper.upper_id))
|
||||
.await?;
|
||||
Ok(writer)
|
||||
})
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn write_episode_nfo(mut writer: Writer<&mut BufWriter<&mut Vec<u8>>>, episode: Episode<'_>) -> Result<()> {
|
||||
writer
|
||||
.create_element("episodedetails")
|
||||
.write_inner_content_async::<_, _, Error>(|writer| async move {
|
||||
writer.create_element("plot").write_empty_async().await?;
|
||||
writer.create_element("outline").write_empty_async().await?;
|
||||
writer
|
||||
.create_element("title")
|
||||
.write_text_content_async(BytesText::new(episode.name))
|
||||
.await?;
|
||||
writer
|
||||
.create_element("season")
|
||||
.write_text_content_async(BytesText::new("1"))
|
||||
.await?;
|
||||
writer
|
||||
.create_element("episode")
|
||||
.write_text_content_async(BytesText::new(&episode.pid))
|
||||
.await?;
|
||||
Ok(writer)
|
||||
})
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn format_plot(model: &video::Model) -> String {
|
||||
fn format_plot(bvid: &str, intro: &str) -> String {
|
||||
format!(
|
||||
r#"原始视频:<a href="https://www.bilibili.com/video/{}/">{}</a><br/><br/>{}"#,
|
||||
model.bvid, model.bvid, model.intro
|
||||
bvid, bvid, intro,
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -232,14 +261,11 @@ mod tests {
|
||||
chrono::NaiveTime::from_hms_opt(3, 3, 3).unwrap(),
|
||||
),
|
||||
bvid: "BV1nWcSeeEkV".to_string(),
|
||||
tags: Some(serde_json::json!(["tag1", "tag2"])),
|
||||
tags: Some(vec!["tag1".to_owned(), "tag2".to_owned()].into()),
|
||||
..Default::default()
|
||||
};
|
||||
assert_eq!(
|
||||
NFOSerializer(ModelWrapper::Video(&video), NFOMode::MOVIE)
|
||||
.generate_nfo(&NFOTimeType::PubTime)
|
||||
.await
|
||||
.unwrap(),
|
||||
NFO::Movie((&video).into()).generate_nfo().await.unwrap(),
|
||||
r#"<?xml version="1.0" encoding="utf-8" standalone="yes"?>
|
||||
<movie>
|
||||
<plot><![CDATA[原始视频:<a href="https://www.bilibili.com/video/BV1nWcSeeEkV/">BV1nWcSeeEkV</a><br/><br/>intro]]></plot>
|
||||
@@ -249,18 +275,15 @@ mod tests {
|
||||
<name>1</name>
|
||||
<role>upper_name</role>
|
||||
</actor>
|
||||
<year>2033</year>
|
||||
<year>2022</year>
|
||||
<genre>tag1</genre>
|
||||
<genre>tag2</genre>
|
||||
<uniqueid type="bilibili">BV1nWcSeeEkV</uniqueid>
|
||||
<aired>2033-03-03</aired>
|
||||
<aired>2022-02-02</aired>
|
||||
</movie>"#,
|
||||
);
|
||||
assert_eq!(
|
||||
NFOSerializer(ModelWrapper::Video(&video), NFOMode::TVSHOW)
|
||||
.generate_nfo(&NFOTimeType::FavTime)
|
||||
.await
|
||||
.unwrap(),
|
||||
NFO::TVShow((&video).into()).generate_nfo().await.unwrap(),
|
||||
r#"<?xml version="1.0" encoding="utf-8" standalone="yes"?>
|
||||
<tvshow>
|
||||
<plot><![CDATA[原始视频:<a href="https://www.bilibili.com/video/BV1nWcSeeEkV/">BV1nWcSeeEkV</a><br/><br/>intro]]></plot>
|
||||
@@ -278,10 +301,7 @@ mod tests {
|
||||
</tvshow>"#,
|
||||
);
|
||||
assert_eq!(
|
||||
NFOSerializer(ModelWrapper::Video(&video), NFOMode::UPPER)
|
||||
.generate_nfo(&NFOTimeType::FavTime)
|
||||
.await
|
||||
.unwrap(),
|
||||
NFO::Upper((&video).into()).generate_nfo().await.unwrap(),
|
||||
r#"<?xml version="1.0" encoding="utf-8" standalone="yes"?>
|
||||
<person>
|
||||
<plot/>
|
||||
@@ -298,10 +318,7 @@ mod tests {
|
||||
..Default::default()
|
||||
};
|
||||
assert_eq!(
|
||||
NFOSerializer(ModelWrapper::Page(&page), NFOMode::EPOSODE)
|
||||
.generate_nfo(&NFOTimeType::FavTime)
|
||||
.await
|
||||
.unwrap(),
|
||||
NFO::Episode((&page).into()).generate_nfo().await.unwrap(),
|
||||
r#"<?xml version="1.0" encoding="utf-8" standalone="yes"?>
|
||||
<episodedetails>
|
||||
<plot/>
|
||||
@@ -313,3 +330,55 @@ mod tests {
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a video::Model> for Movie<'a> {
|
||||
fn from(video: &'a video::Model) -> Self {
|
||||
Self {
|
||||
name: &video.name,
|
||||
intro: &video.intro,
|
||||
bvid: &video.bvid,
|
||||
upper_id: video.upper_id,
|
||||
upper_name: &video.upper_name,
|
||||
aired: match VersionedConfig::get().load().nfo_time_type {
|
||||
NFOTimeType::FavTime => video.favtime,
|
||||
NFOTimeType::PubTime => video.pubtime,
|
||||
},
|
||||
tags: video.tags.as_ref().map(|tags| tags.clone().into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a video::Model> for TVShow<'a> {
|
||||
fn from(video: &'a video::Model) -> Self {
|
||||
Self {
|
||||
name: &video.name,
|
||||
intro: &video.intro,
|
||||
bvid: &video.bvid,
|
||||
upper_id: video.upper_id,
|
||||
upper_name: &video.upper_name,
|
||||
aired: match VersionedConfig::get().load().nfo_time_type {
|
||||
NFOTimeType::FavTime => video.favtime,
|
||||
NFOTimeType::PubTime => video.pubtime,
|
||||
},
|
||||
tags: video.tags.as_ref().map(|tags| tags.clone().into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a video::Model> for Upper {
|
||||
fn from(video: &'a video::Model) -> Self {
|
||||
Self {
|
||||
upper_id: video.upper_id.to_string(),
|
||||
pubtime: video.pubtime,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a page::Model> for Episode<'a> {
|
||||
fn from(page: &'a page::Model) -> Self {
|
||||
Self {
|
||||
name: &page.name,
|
||||
pid: page.pid.to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
267
crates/bili_sync/src/utils/rule.rs
Normal file
267
crates/bili_sync/src/utils/rule.rs
Normal file
@@ -0,0 +1,267 @@
|
||||
use bili_sync_entity::rule::{AndGroup, Condition, Rule, RuleTarget};
|
||||
use bili_sync_entity::{page, video};
|
||||
use chrono::{Local, NaiveDateTime};
|
||||
|
||||
pub(crate) trait Evaluatable<T> {
|
||||
fn evaluate(&self, value: T) -> bool;
|
||||
}
|
||||
|
||||
pub(crate) trait FieldEvaluatable {
|
||||
fn evaluate(&self, video: &video::ActiveModel, pages: &[page::ActiveModel]) -> bool;
|
||||
fn evaluate_model(&self, video: &video::Model, pages: &[page::Model]) -> bool;
|
||||
}
|
||||
|
||||
impl Evaluatable<&str> for Condition<String> {
|
||||
fn evaluate(&self, value: &str) -> bool {
|
||||
match self {
|
||||
Condition::Equals(expected) => expected == value,
|
||||
Condition::Contains(substring) => value.contains(substring),
|
||||
Condition::Prefix(prefix) => value.starts_with(prefix),
|
||||
Condition::Suffix(suffix) => value.ends_with(suffix),
|
||||
Condition::MatchesRegex(_, regex) => regex.is_match(value),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Evaluatable<usize> for Condition<usize> {
|
||||
fn evaluate(&self, value: usize) -> bool {
|
||||
match self {
|
||||
Condition::Equals(expected) => *expected == value,
|
||||
Condition::GreaterThan(threshold) => value > *threshold,
|
||||
Condition::LessThan(threshold) => value < *threshold,
|
||||
Condition::Between(start, end) => value > *start && value < *end,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Evaluatable<&NaiveDateTime> for Condition<NaiveDateTime> {
|
||||
fn evaluate(&self, value: &NaiveDateTime) -> bool {
|
||||
match self {
|
||||
Condition::Equals(expected) => expected == value,
|
||||
Condition::GreaterThan(threshold) => value > threshold,
|
||||
Condition::LessThan(threshold) => value < threshold,
|
||||
Condition::Between(start, end) => value > start && value < end,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FieldEvaluatable for RuleTarget {
|
||||
/// 修改模型后进行评估,此时能访问的是未保存的 activeModel,就地使用 activeModel 评估
|
||||
fn evaluate(&self, video: &video::ActiveModel, pages: &[page::ActiveModel]) -> bool {
|
||||
match self {
|
||||
RuleTarget::Title(cond) => video.name.try_as_ref().is_some_and(|title| cond.evaluate(title)),
|
||||
// 目前的所有条件都是分别针对全体标签进行 any 评估的,例如 Prefix("a") && Suffix("b") 意味着 any(tag.Prefix("a")) && any(tag.Suffix("b")) 而非 any(tag.Prefix("a") && tag.Suffix("b"))
|
||||
// 这可能不满足用户预期,但应该问题不大,如果真有很多人用复杂标签筛选再单独改
|
||||
RuleTarget::Tags(cond) => video
|
||||
.tags
|
||||
.try_as_ref()
|
||||
.and_then(|t| t.as_ref())
|
||||
.is_some_and(|tags| tags.0.iter().any(|tag| cond.evaluate(tag))),
|
||||
RuleTarget::FavTime(cond) => video
|
||||
.favtime
|
||||
.try_as_ref()
|
||||
.map(|fav_time| fav_time.and_utc().with_timezone(&Local).naive_local()) // 数据库中保存的一律是 utc 时间,转换为 local 时间再比较
|
||||
.is_some_and(|fav_time| cond.evaluate(&fav_time)),
|
||||
RuleTarget::PubTime(cond) => video
|
||||
.pubtime
|
||||
.try_as_ref()
|
||||
.map(|pub_time| pub_time.and_utc().with_timezone(&Local).naive_local())
|
||||
.is_some_and(|pub_time| cond.evaluate(&pub_time)),
|
||||
RuleTarget::PageCount(cond) => cond.evaluate(pages.len()),
|
||||
RuleTarget::Not(inner) => !inner.evaluate(video, pages),
|
||||
}
|
||||
}
|
||||
|
||||
/// 手动触发对历史视频的评估,拿到的是原始 Model,直接使用
|
||||
fn evaluate_model(&self, video: &video::Model, pages: &[page::Model]) -> bool {
|
||||
match self {
|
||||
RuleTarget::Title(cond) => cond.evaluate(&video.name),
|
||||
// 目前的所有条件都是分别针对全体标签进行 any 评估的,例如 Prefix("a") && Suffix("b") 意味着 any(tag.Prefix("a")) && any(tag.Suffix("b")) 而非 any(tag.Prefix("a") && tag.Suffix("b"))
|
||||
// 这可能不满足用户预期,但应该问题不大,如果真有很多人用复杂标签筛选再单独改
|
||||
RuleTarget::Tags(cond) => video
|
||||
.tags
|
||||
.as_ref()
|
||||
.is_some_and(|tags| tags.0.iter().any(|tag| cond.evaluate(tag))),
|
||||
RuleTarget::FavTime(cond) => cond.evaluate(&video.favtime.and_utc().with_timezone(&Local).naive_local()),
|
||||
RuleTarget::PubTime(cond) => cond.evaluate(&video.pubtime.and_utc().with_timezone(&Local).naive_local()),
|
||||
RuleTarget::PageCount(cond) => cond.evaluate(pages.len()),
|
||||
RuleTarget::Not(inner) => !inner.evaluate_model(video, pages),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FieldEvaluatable for AndGroup {
|
||||
fn evaluate(&self, video: &video::ActiveModel, pages: &[page::ActiveModel]) -> bool {
|
||||
self.iter().all(|target| target.evaluate(video, pages))
|
||||
}
|
||||
|
||||
fn evaluate_model(&self, video: &video::Model, pages: &[page::Model]) -> bool {
|
||||
self.iter().all(|target| target.evaluate_model(video, pages))
|
||||
}
|
||||
}
|
||||
|
||||
impl FieldEvaluatable for Rule {
|
||||
fn evaluate(&self, video: &video::ActiveModel, pages: &[page::ActiveModel]) -> bool {
|
||||
if self.0.is_empty() {
|
||||
return true;
|
||||
}
|
||||
self.0.iter().any(|group| group.evaluate(video, pages))
|
||||
}
|
||||
|
||||
fn evaluate_model(&self, video: &video::Model, pages: &[page::Model]) -> bool {
|
||||
if self.0.is_empty() {
|
||||
return true;
|
||||
}
|
||||
self.0.iter().any(|group| group.evaluate_model(video, pages))
|
||||
}
|
||||
}
|
||||
|
||||
/// 对于 Option<Rule> 如果 rule 不存在应该被认为是通过评估
|
||||
impl FieldEvaluatable for Option<Rule> {
|
||||
fn evaluate(&self, video: &video::ActiveModel, pages: &[page::ActiveModel]) -> bool {
|
||||
self.as_ref().is_none_or(|rule| rule.evaluate(video, pages))
|
||||
}
|
||||
|
||||
fn evaluate_model(&self, video: &video::Model, pages: &[page::Model]) -> bool {
|
||||
self.as_ref().is_none_or(|rule| rule.evaluate_model(video, pages))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use bili_sync_entity::page;
|
||||
use chrono::NaiveDate;
|
||||
use sea_orm::ActiveValue::Set;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_display() {
|
||||
let test_cases = vec![
|
||||
(
|
||||
Rule(vec![vec![RuleTarget::Title(Condition::Contains("唐氏".to_string()))]]),
|
||||
"「(标题包含“唐氏”)」",
|
||||
),
|
||||
(
|
||||
Rule(vec![vec![
|
||||
RuleTarget::Title(Condition::Prefix("街霸".to_string())),
|
||||
RuleTarget::Tags(Condition::Contains("套路".to_string())),
|
||||
]]),
|
||||
"「(标题以“街霸”开头)且(标签包含“套路”)」",
|
||||
),
|
||||
(
|
||||
Rule(vec![
|
||||
vec![
|
||||
RuleTarget::Title(Condition::Contains("Rust".to_string())),
|
||||
RuleTarget::PageCount(Condition::GreaterThan(5)),
|
||||
],
|
||||
vec![
|
||||
RuleTarget::Tags(Condition::Suffix("入门".to_string())),
|
||||
RuleTarget::PubTime(Condition::GreaterThan(
|
||||
NaiveDate::from_ymd_opt(2023, 1, 1)
|
||||
.unwrap()
|
||||
.and_hms_opt(0, 0, 0)
|
||||
.unwrap(),
|
||||
)),
|
||||
],
|
||||
]),
|
||||
"「(标题包含“Rust”)且(视频分页数量大于“5”)」或「(标签以“入门”结尾)且(发布时间大于“2023-01-01 00:00:00”)」",
|
||||
),
|
||||
(
|
||||
Rule(vec![vec![
|
||||
RuleTarget::Not(Box::new(RuleTarget::Title(Condition::Contains("广告".to_string())))),
|
||||
RuleTarget::PageCount(Condition::LessThan(10)),
|
||||
]]),
|
||||
"「(标题不包含“广告”)且(视频分页数量小于“10”)」",
|
||||
),
|
||||
(
|
||||
Rule(vec![vec![
|
||||
RuleTarget::FavTime(Condition::Between(
|
||||
NaiveDate::from_ymd_opt(2023, 6, 1)
|
||||
.unwrap()
|
||||
.and_hms_opt(0, 0, 0)
|
||||
.unwrap(),
|
||||
NaiveDate::from_ymd_opt(2023, 12, 31)
|
||||
.unwrap()
|
||||
.and_hms_opt(23, 59, 59)
|
||||
.unwrap(),
|
||||
)),
|
||||
// autocorrect-disable
|
||||
RuleTarget::Tags(Condition::MatchesRegex(
|
||||
"技术|教程".to_string(),
|
||||
regex::Regex::new("技术|教程").unwrap(),
|
||||
)),
|
||||
]]),
|
||||
"「(收藏时间在“2023-06-01 00:00:00”和“2023-12-31 23:59:59”之间)且(标签匹配“技术|教程”)」",
|
||||
// autocorrect-enable
|
||||
),
|
||||
];
|
||||
|
||||
for (rule, expected) in test_cases {
|
||||
assert_eq!(rule.to_string(), expected);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_evaluate() {
|
||||
let test_cases = vec![
|
||||
(
|
||||
(
|
||||
video::ActiveModel {
|
||||
name: Set("骂谁唐氏呢!!!".to_string()),
|
||||
..Default::default()
|
||||
},
|
||||
vec![],
|
||||
),
|
||||
Rule(vec![vec![RuleTarget::Title(Condition::Contains("唐氏".to_string()))]]),
|
||||
true,
|
||||
),
|
||||
(
|
||||
(
|
||||
video::ActiveModel::default(),
|
||||
vec![page::ActiveModel::default(); 2],
|
||||
),
|
||||
Rule(vec![vec![RuleTarget::PageCount(Condition::Equals(1))]]),
|
||||
false,
|
||||
),
|
||||
(
|
||||
(
|
||||
video::ActiveModel{
|
||||
tags: Set(Some(vec!["原神".to_owned(),"永雏塔菲".to_owned(),"虚拟主播".to_owned()].into())),
|
||||
..Default::default()
|
||||
},
|
||||
vec![],
|
||||
),
|
||||
Rule (vec![vec![RuleTarget::Not(Box::new(RuleTarget::Tags(Condition::Equals(
|
||||
"原神".to_string(),
|
||||
))))]],
|
||||
),
|
||||
false,
|
||||
),
|
||||
(
|
||||
(
|
||||
video::ActiveModel {
|
||||
name: Set(
|
||||
"万字怒扒网易《归唐》底裤!中国首款大厂买断制单机,靠谱吗?——全网最全!官方非独家幕后!关于《归唐》PV 的所有秘密~都在这里了~".to_owned(),
|
||||
),
|
||||
..Default::default()
|
||||
},
|
||||
vec![],
|
||||
),
|
||||
Rule(vec![vec![RuleTarget::Not(Box::new(RuleTarget::Title(Condition::MatchesRegex(
|
||||
r"^\S+字(解析|怒扒|拆解)".to_owned(),
|
||||
regex::Regex::new(r"^\S+字(解析|怒扒)").unwrap(),
|
||||
))))]],
|
||||
),
|
||||
false,
|
||||
),
|
||||
];
|
||||
|
||||
for ((video, pages), rule, expected) in test_cases {
|
||||
assert_eq!(rule.evaluate(&video, &pages), expected);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
use crate::error::ExecutionStatus;
|
||||
|
||||
pub static STATUS_NOT_STARTED: u32 = 0b000;
|
||||
pub(super) static STATUS_MAX_RETRY: u32 = 0b100;
|
||||
pub static STATUS_OK: u32 = 0b111;
|
||||
pub static STATUS_COMPLETED: u32 = 1 << 31;
|
||||
@@ -34,11 +35,19 @@ impl<const N: usize> Status<N> {
|
||||
for i in 0..N {
|
||||
let status = self.get_status(i);
|
||||
if !(status < STATUS_MAX_RETRY || status == STATUS_OK) {
|
||||
self.set_status(i, 0);
|
||||
self.set_status(i, STATUS_NOT_STARTED);
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
// 理论上 changed 可以直接从上面的循环中得到,因为 completed 标志位的改变是由子任务状态的改变引起的,子任务没有改变则 completed 也不会改变
|
||||
changed
|
||||
}
|
||||
|
||||
/// 重置所有失败的状态,将状态设置为 0b000,返回值表示 status 是否发生了变化
|
||||
/// force 版本在普通版本的基础上,会额外检查是否存在需要运行的任务,如果存在则修正 completed 标记位为“未完成”
|
||||
/// 这个方法的典型用例是在引入新的任务状态后重置历史视频,允许历史视频执行新引入的任务
|
||||
pub fn force_reset_failed(&mut self) -> bool {
|
||||
let mut changed = self.reset_failed();
|
||||
// 理论上上面的 changed 就足够了,因为 completed 标志位的改变是由子任务状态的改变引起的,子任务没有改变则 completed 也不会改变
|
||||
// 但考虑特殊情况,新版本引入了一个新的子任务项,此时会出现明明有子任务未执行,但 completed 标记位仍然为 true 的情况
|
||||
// 当然可以在新版本迁移文件中全局重置 completed 标记位,但这样影响范围太大感觉不太好
|
||||
// 在后面进行这部分额外判断可以兼容这种情况,在由用户手动触发的 reset_failed 调用中修正 completed 标记位
|
||||
@@ -160,14 +169,14 @@ impl<const N: usize> From<[u32; N]> for Status<N> {
|
||||
}
|
||||
}
|
||||
|
||||
/// 包含五个子任务,从前到后依次是:视频封面、视频信息、Up 主头像、Up 主信息、分 P 下载
|
||||
/// 包含五个子任务,从前到后依次是:视频封面、视频信息、Up 主头像、Up 主信息、分页下载
|
||||
pub type VideoStatus = Status<5>;
|
||||
|
||||
/// 包含五个子任务,从前到后分别是:视频封面、视频内容、视频信息、视频弹幕、视频字幕
|
||||
pub type PageStatus = Status<5>;
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
mod tests {
|
||||
use anyhow::anyhow;
|
||||
|
||||
use super::*;
|
||||
@@ -232,10 +241,13 @@ mod test {
|
||||
assert!(status.reset_failed());
|
||||
assert!(!status.get_completed());
|
||||
assert_eq!(<[u32; 3]>::from(status), [3, 0, 7]);
|
||||
// 没有内容需要重置,但 completed 标记位是错误的(模拟新增一个子任务状态的情况),此时 reset_failed 会修正 completed 标记位
|
||||
// 没有内容需要重置,但 completed 标记位是错误的(模拟新增一个子任务状态的情况)
|
||||
// 此时 reset_failed 不会修正 completed 标记位,而 force_reset_failed 会
|
||||
status.set_completed(true);
|
||||
assert!(status.get_completed());
|
||||
assert!(status.reset_failed());
|
||||
assert!(!status.reset_failed());
|
||||
assert!(status.get_completed());
|
||||
assert!(status.force_reset_failed());
|
||||
assert!(!status.get_completed());
|
||||
// 重置一个已经成功的任务,没有改变状态,也不会修改标记位
|
||||
let mut status = Status::<3>::from([7, 7, 7]);
|
||||
|
||||
68
crates/bili_sync/src/utils/task_notifier.rs
Normal file
68
crates/bili_sync/src/utils/task_notifier.rs
Normal file
@@ -0,0 +1,68 @@
|
||||
use std::sync::{Arc, LazyLock};
|
||||
|
||||
use serde::Serialize;
|
||||
use tokio::sync::MutexGuard;
|
||||
|
||||
use crate::config::VersionedConfig;
|
||||
|
||||
pub static TASK_STATUS_NOTIFIER: LazyLock<TaskStatusNotifier> = LazyLock::new(TaskStatusNotifier::new);
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct TaskStatus {
|
||||
is_running: bool,
|
||||
last_run: Option<chrono::DateTime<chrono::Local>>,
|
||||
last_finish: Option<chrono::DateTime<chrono::Local>>,
|
||||
next_run: Option<chrono::DateTime<chrono::Local>>,
|
||||
}
|
||||
|
||||
pub struct TaskStatusNotifier {
|
||||
mutex: tokio::sync::Mutex<()>,
|
||||
tx: tokio::sync::watch::Sender<Arc<TaskStatus>>,
|
||||
rx: tokio::sync::watch::Receiver<Arc<TaskStatus>>,
|
||||
}
|
||||
|
||||
impl TaskStatusNotifier {
|
||||
pub fn new() -> Self {
|
||||
let (tx, rx) = tokio::sync::watch::channel(Arc::new(TaskStatus::default()));
|
||||
Self {
|
||||
mutex: tokio::sync::Mutex::const_new(()),
|
||||
tx,
|
||||
rx,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn start_running(&'_ self) -> MutexGuard<'_, ()> {
|
||||
let lock = self.mutex.lock().await;
|
||||
let _ = self.tx.send(Arc::new(TaskStatus {
|
||||
is_running: true,
|
||||
last_run: Some(chrono::Local::now()),
|
||||
last_finish: None,
|
||||
next_run: None,
|
||||
}));
|
||||
lock
|
||||
}
|
||||
|
||||
pub fn finish_running(&self, _lock: MutexGuard<()>) {
|
||||
let last_status = self.tx.borrow();
|
||||
let last_run = last_status.last_run;
|
||||
drop(last_status);
|
||||
let config = VersionedConfig::get().load();
|
||||
let now = chrono::Local::now();
|
||||
|
||||
let _ = self.tx.send(Arc::new(TaskStatus {
|
||||
is_running: false,
|
||||
last_run,
|
||||
last_finish: Some(now),
|
||||
next_run: now.checked_add_signed(chrono::Duration::seconds(config.interval as i64)),
|
||||
}));
|
||||
}
|
||||
|
||||
/// 精确探测任务执行状态,保证如果读取到“未运行”,那么在锁释放之前任务不会被执行
|
||||
pub fn detect_running(&self) -> Option<MutexGuard<'_, ()>> {
|
||||
self.mutex.try_lock().ok()
|
||||
}
|
||||
|
||||
pub fn subscribe(&self) -> tokio::sync::watch::Receiver<Arc<TaskStatus>> {
|
||||
self.rx.clone()
|
||||
}
|
||||
}
|
||||
23
crates/bili_sync/src/utils/validation.rs
Normal file
23
crates/bili_sync/src/utils/validation.rs
Normal file
@@ -0,0 +1,23 @@
|
||||
use std::path::Path;
|
||||
|
||||
use validator::ValidationError;
|
||||
|
||||
use crate::utils::status::{STATUS_NOT_STARTED, STATUS_OK};
|
||||
|
||||
pub fn validate_status_value(value: u32) -> Result<(), ValidationError> {
|
||||
if value == STATUS_OK || value == STATUS_NOT_STARTED {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(ValidationError::new(
|
||||
"status_value must be either STATUS_OK or STATUS_NOT_STARTED",
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn validate_path(path: &str) -> Result<(), ValidationError> {
|
||||
if path.is_empty() || !Path::new(path).is_absolute() {
|
||||
Err(ValidationError::new("path must be a non-empty absolute path"))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -2,19 +2,19 @@ use std::collections::HashSet;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::pin::Pin;
|
||||
|
||||
use anyhow::{anyhow, bail, Context, Result};
|
||||
use anyhow::{Context, Result, anyhow, bail};
|
||||
use bili_sync_entity::*;
|
||||
use futures::stream::{FuturesOrdered, FuturesUnordered};
|
||||
use futures::{Future, Stream, StreamExt, TryStreamExt};
|
||||
use sea_orm::entity::prelude::*;
|
||||
use futures::stream::FuturesUnordered;
|
||||
use futures::{Stream, StreamExt, TryStreamExt};
|
||||
use sea_orm::ActiveValue::Set;
|
||||
use sea_orm::TransactionTrait;
|
||||
use sea_orm::entity::prelude::*;
|
||||
use tokio::fs;
|
||||
use tokio::sync::Semaphore;
|
||||
|
||||
use crate::adapter::{video_source_from, Args, VideoSource, VideoSourceEnum};
|
||||
use crate::adapter::{VideoSource, VideoSourceEnum};
|
||||
use crate::bilibili::{BestStream, BiliClient, BiliError, Dimension, PageInfo, Video, VideoInfo};
|
||||
use crate::config::{PathSafeTemplate, ARGS, CONFIG, TEMPLATE};
|
||||
use crate::config::{ARGS, PathSafeTemplate, TEMPLATE, VersionedConfig};
|
||||
use crate::downloader::Downloader;
|
||||
use crate::error::{DownloadAbortError, ExecutionStatus, ProcessPageError};
|
||||
use crate::utils::format_arg::{page_format_args, video_format_args};
|
||||
@@ -22,18 +22,18 @@ use crate::utils::model::{
|
||||
create_pages, create_videos, filter_unfilled_videos, filter_unhandled_video_pages, update_pages_model,
|
||||
update_videos_model,
|
||||
};
|
||||
use crate::utils::nfo::{ModelWrapper, NFOMode, NFOSerializer};
|
||||
use crate::utils::status::{PageStatus, VideoStatus, STATUS_OK};
|
||||
use crate::utils::nfo::NFO;
|
||||
use crate::utils::rule::FieldEvaluatable;
|
||||
use crate::utils::status::{PageStatus, STATUS_OK, VideoStatus};
|
||||
|
||||
/// 完整地处理某个视频来源
|
||||
pub async fn process_video_source(
|
||||
args: Args<'_>,
|
||||
video_source: VideoSourceEnum,
|
||||
bili_client: &BiliClient,
|
||||
path: &Path,
|
||||
connection: &DatabaseConnection,
|
||||
) -> Result<()> {
|
||||
// 从参数中获取视频列表的 Model 与视频流
|
||||
let (video_source, video_streams) = video_source_from(args, path, bili_client, connection).await?;
|
||||
let (video_source, video_streams) = video_source.refresh(bili_client, connection).await?;
|
||||
// 从视频流中获取新视频的简要信息,写入数据库
|
||||
refresh_video_source(&video_source, video_streams, connection).await?;
|
||||
// 单独请求视频详情接口,获取视频的详情信息与所有的分页,写入数据库
|
||||
@@ -72,11 +72,11 @@ pub async fn refresh_video_source<'a>(
|
||||
if release_datetime > &max_datetime {
|
||||
max_datetime = *release_datetime;
|
||||
}
|
||||
futures::future::ready(release_datetime > &latest_row_at)
|
||||
futures::future::ready(video_source.should_take(release_datetime, &latest_row_at))
|
||||
}
|
||||
}
|
||||
})
|
||||
.filter_map(|res| futures::future::ready(res.ok()))
|
||||
.filter_map(|res| futures::future::ready(video_source.should_filter(res, &latest_row_at)))
|
||||
.chunks(10);
|
||||
let mut count = 0;
|
||||
while let Some(videos_info) = video_streams.next().await {
|
||||
@@ -103,39 +103,52 @@ pub async fn fetch_video_details(
|
||||
) -> Result<()> {
|
||||
video_source.log_fetch_video_start();
|
||||
let videos_model = filter_unfilled_videos(video_source.filter_expr(), connection).await?;
|
||||
for video_model in videos_model {
|
||||
let video = Video::new(bili_client, video_model.bvid.clone());
|
||||
let info: Result<_> = async { Ok((video.get_tags().await?, video.get_view_info().await?)) }.await;
|
||||
match info {
|
||||
Err(e) => {
|
||||
error!(
|
||||
"获取视频 {} - {} 的详细信息失败,错误为:{:#}",
|
||||
&video_model.bvid, &video_model.name, e
|
||||
);
|
||||
if let Some(BiliError::RequestFailed(-404, _)) = e.downcast_ref::<BiliError>() {
|
||||
let mut video_active_model: bili_sync_entity::video::ActiveModel = video_model.into();
|
||||
video_active_model.valid = Set(false);
|
||||
video_active_model.save(connection).await?;
|
||||
let semaphore = Semaphore::new(VersionedConfig::get().load().concurrent_limit.video);
|
||||
let semaphore_ref = &semaphore;
|
||||
let tasks = videos_model
|
||||
.into_iter()
|
||||
.map(|video_model| async move {
|
||||
let _permit = semaphore_ref.acquire().await.context("acquire semaphore failed")?;
|
||||
let video = Video::new(bili_client, video_model.bvid.clone());
|
||||
let info: Result<_> = async { Ok((video.get_tags().await?, video.get_view_info().await?)) }.await;
|
||||
match info {
|
||||
Err(e) => {
|
||||
error!(
|
||||
"获取视频 {} - {} 的详细信息失败,错误为:{:#}",
|
||||
&video_model.bvid, &video_model.name, e
|
||||
);
|
||||
if let Some(BiliError::RequestFailed(-404, _)) = e.downcast_ref::<BiliError>() {
|
||||
let mut video_active_model: bili_sync_entity::video::ActiveModel = video_model.into();
|
||||
video_active_model.valid = Set(false);
|
||||
video_active_model.save(connection).await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok((tags, mut view_info)) => {
|
||||
let VideoInfo::Detail { pages, .. } = &mut view_info else {
|
||||
unreachable!()
|
||||
};
|
||||
let pages = std::mem::take(pages);
|
||||
let pages_len = pages.len();
|
||||
let txn = connection.begin().await?;
|
||||
// 将分页信息写入数据库
|
||||
create_pages(pages, &video_model, &txn).await?;
|
||||
let mut video_active_model = view_info.into_detail_model(video_model);
|
||||
video_source.set_relation_id(&mut video_active_model);
|
||||
video_active_model.single_page = Set(Some(pages_len == 1));
|
||||
video_active_model.tags = Set(Some(serde_json::to_value(tags)?));
|
||||
video_active_model.save(&txn).await?;
|
||||
txn.commit().await?;
|
||||
}
|
||||
};
|
||||
}
|
||||
Ok((tags, mut view_info)) => {
|
||||
let VideoInfo::Detail { pages, .. } = &mut view_info else {
|
||||
unreachable!()
|
||||
};
|
||||
// 构造 page model
|
||||
let pages = std::mem::take(pages);
|
||||
let pages = pages
|
||||
.into_iter()
|
||||
.map(|p| p.into_active_model(video_model.id))
|
||||
.collect::<Vec<page::ActiveModel>>();
|
||||
// 更新 video model 的各项有关属性
|
||||
let mut video_active_model = view_info.into_detail_model(video_model);
|
||||
video_source.set_relation_id(&mut video_active_model);
|
||||
video_active_model.single_page = Set(Some(pages.len() == 1));
|
||||
video_active_model.tags = Set(Some(tags.into()));
|
||||
video_active_model.should_download = Set(video_source.rule().evaluate(&video_active_model, &pages));
|
||||
let txn = connection.begin().await?;
|
||||
create_pages(pages, &txn).await?;
|
||||
video_active_model.save(&txn).await?;
|
||||
txn.commit().await?;
|
||||
}
|
||||
};
|
||||
Ok::<_, anyhow::Error>(())
|
||||
})
|
||||
.collect::<FuturesUnordered<_>>();
|
||||
tasks.try_collect::<Vec<_>>().await?;
|
||||
video_source.log_fetch_video_end();
|
||||
Ok(())
|
||||
}
|
||||
@@ -147,7 +160,7 @@ pub async fn download_unprocessed_videos(
|
||||
connection: &DatabaseConnection,
|
||||
) -> Result<()> {
|
||||
video_source.log_download_video_start();
|
||||
let semaphore = Semaphore::new(CONFIG.concurrent_limit.video);
|
||||
let semaphore = Semaphore::new(VersionedConfig::get().load().concurrent_limit.video);
|
||||
let downloader = Downloader::new(bili_client.client.clone());
|
||||
let unhandled_videos_pages = filter_unhandled_video_pages(video_source.filter_expr(), connection).await?;
|
||||
let mut assigned_upper = HashSet::new();
|
||||
@@ -208,58 +221,63 @@ pub async fn download_video_pages(
|
||||
let _permit = semaphore.acquire().await.context("acquire semaphore failed")?;
|
||||
let mut status = VideoStatus::from(video_model.download_status);
|
||||
let separate_status = status.should_run();
|
||||
let base_path = video_source
|
||||
.path()
|
||||
.join(TEMPLATE.path_safe_render("video", &video_format_args(&video_model))?);
|
||||
let base_path = video_source.path().join(
|
||||
TEMPLATE
|
||||
.load()
|
||||
.path_safe_render("video", &video_format_args(&video_model))?,
|
||||
);
|
||||
let upper_id = video_model.upper_id.to_string();
|
||||
let base_upper_path = &CONFIG
|
||||
let base_upper_path = VersionedConfig::get()
|
||||
.load()
|
||||
.upper_path
|
||||
.join(upper_id.chars().next().context("upper_id is empty")?.to_string())
|
||||
.join(upper_id);
|
||||
let is_single_page = video_model.single_page.context("single_page is null")?;
|
||||
// 对于单页视频,page 的下载已经足够
|
||||
// 对于多页视频,page 下载仅包含了分集内容,需要额外补上视频的 poster 的 tvshow.nfo
|
||||
let tasks: Vec<Pin<Box<dyn Future<Output = Result<ExecutionStatus>> + Send>>> = vec![
|
||||
let (res_1, res_2, res_3, res_4, res_5) = tokio::join!(
|
||||
// 下载视频封面
|
||||
Box::pin(fetch_video_poster(
|
||||
fetch_video_poster(
|
||||
separate_status[0] && !is_single_page,
|
||||
&video_model,
|
||||
downloader,
|
||||
base_path.join("poster.jpg"),
|
||||
base_path.join("fanart.jpg"),
|
||||
)),
|
||||
),
|
||||
// 生成视频信息的 nfo
|
||||
Box::pin(generate_video_nfo(
|
||||
generate_video_nfo(
|
||||
separate_status[1] && !is_single_page,
|
||||
&video_model,
|
||||
base_path.join("tvshow.nfo"),
|
||||
)),
|
||||
),
|
||||
// 下载 Up 主头像
|
||||
Box::pin(fetch_upper_face(
|
||||
fetch_upper_face(
|
||||
separate_status[2] && should_download_upper,
|
||||
&video_model,
|
||||
downloader,
|
||||
base_upper_path.join("folder.jpg"),
|
||||
)),
|
||||
),
|
||||
// 生成 Up 主信息的 nfo
|
||||
Box::pin(generate_upper_nfo(
|
||||
generate_upper_nfo(
|
||||
separate_status[3] && should_download_upper,
|
||||
&video_model,
|
||||
base_upper_path.join("person.nfo"),
|
||||
)),
|
||||
// 分发并执行分 P 下载的任务
|
||||
Box::pin(dispatch_download_page(
|
||||
),
|
||||
// 分发并执行分页下载的任务
|
||||
dispatch_download_page(
|
||||
separate_status[4],
|
||||
bili_client,
|
||||
&video_model,
|
||||
pages,
|
||||
connection,
|
||||
downloader,
|
||||
&base_path,
|
||||
)),
|
||||
];
|
||||
let tasks: FuturesOrdered<_> = tasks.into_iter().collect();
|
||||
let results: Vec<ExecutionStatus> = tasks.collect::<Vec<_>>().await.into_iter().map(Into::into).collect();
|
||||
&base_path
|
||||
)
|
||||
);
|
||||
let results = [res_1, res_2, res_3, res_4, res_5]
|
||||
.into_iter()
|
||||
.map(Into::into)
|
||||
.collect::<Vec<_>>();
|
||||
status.update_status(&results);
|
||||
results
|
||||
.iter()
|
||||
@@ -270,18 +288,18 @@ pub async fn download_video_pages(
|
||||
ExecutionStatus::Succeeded => info!("处理视频「{}」{}成功", &video_model.name, task_name),
|
||||
ExecutionStatus::Ignored(e) => {
|
||||
error!(
|
||||
"处理视频「{}」{}出现常见错误,已忽略: {:#}",
|
||||
"处理视频「{}」{}出现常见错误,已忽略:{:#}",
|
||||
&video_model.name, task_name, e
|
||||
)
|
||||
}
|
||||
ExecutionStatus::Failed(e) | ExecutionStatus::FixedFailed(_, e) => {
|
||||
error!("处理视频「{}」{}失败: {:#}", &video_model.name, task_name, e)
|
||||
error!("处理视频「{}」{}失败:{:#}", &video_model.name, task_name, e)
|
||||
}
|
||||
});
|
||||
if let ExecutionStatus::Failed(e) = results.into_iter().nth(4).context("page download result not found")? {
|
||||
if e.downcast_ref::<DownloadAbortError>().is_some() {
|
||||
return Err(e);
|
||||
}
|
||||
if let ExecutionStatus::Failed(e) = results.into_iter().nth(4).context("page download result not found")?
|
||||
&& e.downcast_ref::<DownloadAbortError>().is_some()
|
||||
{
|
||||
return Err(e);
|
||||
}
|
||||
let mut video_active_model: video::ActiveModel = video_model.into();
|
||||
video_active_model.download_status = Set(status.into());
|
||||
@@ -302,7 +320,7 @@ pub async fn dispatch_download_page(
|
||||
if !should_run {
|
||||
return Ok(ExecutionStatus::Skipped);
|
||||
}
|
||||
let child_semaphore = Semaphore::new(CONFIG.concurrent_limit.page);
|
||||
let child_semaphore = Semaphore::new(VersionedConfig::get().load().concurrent_limit.page);
|
||||
let tasks = pages
|
||||
.into_iter()
|
||||
.map(|page_model| {
|
||||
@@ -321,10 +339,10 @@ pub async fn dispatch_download_page(
|
||||
.take_while(|res| {
|
||||
match res {
|
||||
Ok(model) => {
|
||||
// 该视频的所有分页的下载状态都会在此返回,需要根据这些状态确认视频层“分 P 下载”子任务的状态
|
||||
// 该视频的所有分页的下载状态都会在此返回,需要根据这些状态确认视频层“分页下载”子任务的状态
|
||||
// 在过去的实现中,此处仅仅根据 page_download_status 的最高标志位来判断,如果最高标志位是 true 则认为完成
|
||||
// 这样会导致即使分页中有失败到 MAX_RETRY 的情况,视频层的分 P 下载状态也会被认为是 Succeeded,不够准确
|
||||
// 新版本实现会将此处取值为所有子任务状态的最小值,这样只有所有分页的子任务全部成功时才会认为视频层的分 P 下载状态是 Succeeded
|
||||
// 这样会导致即使分页中有失败到 MAX_RETRY 的情况,视频层的分页下载状态也会被认为是 Succeeded,不够准确
|
||||
// 新版本实现会将此处取值为所有子任务状态的最小值,这样只有所有分页的子任务全部成功时才会认为视频层的分页下载状态是 Succeeded
|
||||
let page_download_status = model.download_status.try_as_ref().expect("download_status must be set");
|
||||
let separate_status: [u32; 5] = PageStatus::from(*page_download_status).into();
|
||||
for status in separate_status {
|
||||
@@ -368,7 +386,9 @@ pub async fn download_page(
|
||||
let mut status = PageStatus::from(page_model.download_status);
|
||||
let separate_status = status.should_run();
|
||||
let is_single_page = video_model.single_page.context("single_page is null")?;
|
||||
let base_name = TEMPLATE.path_safe_render("page", &page_format_args(video_model, &page_model))?;
|
||||
let base_name = TEMPLATE
|
||||
.load()
|
||||
.path_safe_render("page", &page_format_args(video_model, &page_model))?;
|
||||
let (poster_path, video_path, nfo_path, danmaku_path, fanart_path, subtitle_path) = if is_single_page {
|
||||
(
|
||||
base_path.join(format!("{}-poster.jpg", &base_name)),
|
||||
@@ -413,46 +433,36 @@ pub async fn download_page(
|
||||
dimension,
|
||||
..Default::default()
|
||||
};
|
||||
let tasks: Vec<Pin<Box<dyn Future<Output = Result<ExecutionStatus>> + Send>>> = vec![
|
||||
Box::pin(fetch_page_poster(
|
||||
let (res_1, res_2, res_3, res_4, res_5) = tokio::join!(
|
||||
// 下载分页封面
|
||||
fetch_page_poster(
|
||||
separate_status[0],
|
||||
video_model,
|
||||
&page_model,
|
||||
downloader,
|
||||
poster_path,
|
||||
fanart_path,
|
||||
)),
|
||||
Box::pin(fetch_page_video(
|
||||
fanart_path
|
||||
),
|
||||
// 下载分页视频
|
||||
fetch_page_video(
|
||||
separate_status[1],
|
||||
bili_client,
|
||||
video_model,
|
||||
downloader,
|
||||
&page_info,
|
||||
&video_path,
|
||||
)),
|
||||
Box::pin(generate_page_nfo(
|
||||
separate_status[2],
|
||||
video_model,
|
||||
&page_model,
|
||||
nfo_path,
|
||||
)),
|
||||
Box::pin(fetch_page_danmaku(
|
||||
separate_status[3],
|
||||
bili_client,
|
||||
video_model,
|
||||
&page_info,
|
||||
danmaku_path,
|
||||
)),
|
||||
Box::pin(fetch_page_subtitle(
|
||||
separate_status[4],
|
||||
bili_client,
|
||||
video_model,
|
||||
&page_info,
|
||||
&subtitle_path,
|
||||
)),
|
||||
];
|
||||
let tasks: FuturesOrdered<_> = tasks.into_iter().collect();
|
||||
let results: Vec<ExecutionStatus> = tasks.collect::<Vec<_>>().await.into_iter().map(Into::into).collect();
|
||||
&video_path
|
||||
),
|
||||
// 生成分页视频信息的 nfo
|
||||
generate_page_nfo(separate_status[2], video_model, &page_model, nfo_path),
|
||||
// 下载分页弹幕
|
||||
fetch_page_danmaku(separate_status[3], bili_client, video_model, &page_info, danmaku_path),
|
||||
// 下载分页字幕
|
||||
fetch_page_subtitle(separate_status[4], bili_client, video_model, &page_info, &subtitle_path)
|
||||
);
|
||||
let results = [res_1, res_2, res_3, res_4, res_5]
|
||||
.into_iter()
|
||||
.map(Into::into)
|
||||
.collect::<Vec<_>>();
|
||||
status.update_status(&results);
|
||||
results
|
||||
.iter()
|
||||
@@ -468,20 +478,20 @@ pub async fn download_page(
|
||||
),
|
||||
ExecutionStatus::Ignored(e) => {
|
||||
error!(
|
||||
"处理视频「{}」第 {} 页{}出现常见错误,已忽略: {:#}",
|
||||
"处理视频「{}」第 {} 页{}出现常见错误,已忽略:{:#}",
|
||||
&video_model.name, page_model.pid, task_name, e
|
||||
)
|
||||
}
|
||||
ExecutionStatus::Failed(e) | ExecutionStatus::FixedFailed(_, e) => error!(
|
||||
"处理视频「{}」第 {} 页{}失败: {:#}",
|
||||
"处理视频「{}」第 {} 页{}失败:{:#}",
|
||||
&video_model.name, page_model.pid, task_name, e
|
||||
),
|
||||
});
|
||||
// 如果下载视频时触发风控,直接返回 DownloadAbortError
|
||||
if let ExecutionStatus::Failed(e) = results.into_iter().nth(1).context("video download result not found")? {
|
||||
if let Ok(BiliError::RiskControlOccurred) = e.downcast::<BiliError>() {
|
||||
bail!(DownloadAbortError());
|
||||
}
|
||||
if let ExecutionStatus::Failed(e) = results.into_iter().nth(1).context("video download result not found")?
|
||||
&& let Ok(BiliError::RiskControlOccurred) = e.downcast::<BiliError>()
|
||||
{
|
||||
bail!(DownloadAbortError());
|
||||
}
|
||||
let mut page_active_model: page::ActiveModel = page_model.into();
|
||||
page_active_model.download_status = Set(status.into());
|
||||
@@ -533,13 +543,13 @@ pub async fn fetch_page_video(
|
||||
let streams = bili_video
|
||||
.get_page_analyzer(page_info)
|
||||
.await?
|
||||
.best_stream(&CONFIG.filter_option)?;
|
||||
.best_stream(&VersionedConfig::get().load().filter_option)?;
|
||||
match streams {
|
||||
BestStream::Mixed(mix_stream) => downloader.fetch(mix_stream.url(), page_path).await?,
|
||||
BestStream::Mixed(mix_stream) => downloader.fetch_with_fallback(&mix_stream.urls(), page_path).await?,
|
||||
BestStream::VideoAudio {
|
||||
video: video_stream,
|
||||
audio: None,
|
||||
} => downloader.fetch(video_stream.url(), page_path).await?,
|
||||
} => downloader.fetch_with_fallback(&video_stream.urls(), page_path).await?,
|
||||
BestStream::VideoAudio {
|
||||
video: video_stream,
|
||||
audio: Some(audio_stream),
|
||||
@@ -549,8 +559,12 @@ pub async fn fetch_page_video(
|
||||
page_path.with_extension("tmp_audio"),
|
||||
);
|
||||
let res = async {
|
||||
downloader.fetch(video_stream.url(), &tmp_video_path).await?;
|
||||
downloader.fetch(audio_stream.url(), &tmp_audio_path).await?;
|
||||
downloader
|
||||
.fetch_with_fallback(&video_stream.urls(), &tmp_video_path)
|
||||
.await?;
|
||||
downloader
|
||||
.fetch_with_fallback(&audio_stream.urls(), &tmp_audio_path)
|
||||
.await?;
|
||||
downloader.merge(&tmp_video_path, &tmp_audio_path, page_path).await
|
||||
}
|
||||
.await;
|
||||
@@ -614,12 +628,12 @@ pub async fn generate_page_nfo(
|
||||
return Ok(ExecutionStatus::Skipped);
|
||||
}
|
||||
let single_page = video_model.single_page.context("single_page is null")?;
|
||||
let nfo_serializer = if single_page {
|
||||
NFOSerializer(ModelWrapper::Video(video_model), NFOMode::MOVIE)
|
||||
let nfo = if single_page {
|
||||
NFO::Movie(video_model.into())
|
||||
} else {
|
||||
NFOSerializer(ModelWrapper::Page(page_model), NFOMode::EPOSODE)
|
||||
NFO::Episode(page_model.into())
|
||||
};
|
||||
generate_nfo(nfo_serializer, nfo_path).await?;
|
||||
generate_nfo(nfo, nfo_path).await?;
|
||||
Ok(ExecutionStatus::Succeeded)
|
||||
}
|
||||
|
||||
@@ -659,8 +673,7 @@ pub async fn generate_upper_nfo(
|
||||
if !should_run {
|
||||
return Ok(ExecutionStatus::Skipped);
|
||||
}
|
||||
let nfo_serializer = NFOSerializer(ModelWrapper::Video(video_model), NFOMode::UPPER);
|
||||
generate_nfo(nfo_serializer, nfo_path).await?;
|
||||
generate_nfo(NFO::Upper(video_model.into()), nfo_path).await?;
|
||||
Ok(ExecutionStatus::Succeeded)
|
||||
}
|
||||
|
||||
@@ -672,93 +685,15 @@ pub async fn generate_video_nfo(
|
||||
if !should_run {
|
||||
return Ok(ExecutionStatus::Skipped);
|
||||
}
|
||||
let nfo_serializer = NFOSerializer(ModelWrapper::Video(video_model), NFOMode::TVSHOW);
|
||||
generate_nfo(nfo_serializer, nfo_path).await?;
|
||||
generate_nfo(NFO::TVShow(video_model.into()), nfo_path).await?;
|
||||
Ok(ExecutionStatus::Succeeded)
|
||||
}
|
||||
|
||||
/// 创建 nfo_path 的父目录,然后写入 nfo 文件
|
||||
async fn generate_nfo(serializer: NFOSerializer<'_>, nfo_path: PathBuf) -> Result<()> {
|
||||
async fn generate_nfo(nfo: NFO<'_>, nfo_path: PathBuf) -> Result<()> {
|
||||
if let Some(parent) = nfo_path.parent() {
|
||||
fs::create_dir_all(parent).await?;
|
||||
}
|
||||
fs::write(
|
||||
nfo_path,
|
||||
serializer.generate_nfo(&CONFIG.nfo_time_type).await?.as_bytes(),
|
||||
)
|
||||
.await?;
|
||||
fs::write(nfo_path, nfo.generate_nfo().await?.as_bytes()).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use handlebars::handlebars_helper;
|
||||
use serde_json::json;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_template_usage() {
|
||||
let mut template = handlebars::Handlebars::new();
|
||||
handlebars_helper!(truncate: |s: String, len: usize| {
|
||||
if s.chars().count() > len {
|
||||
s.chars().take(len).collect::<String>()
|
||||
} else {
|
||||
s.to_string()
|
||||
}
|
||||
});
|
||||
template.register_helper("truncate", Box::new(truncate));
|
||||
let _ = template.path_safe_register("video", "test{{bvid}}test");
|
||||
let _ = template.path_safe_register("test_truncate", "哈哈,{{ truncate title 30 }}");
|
||||
let _ = template.path_safe_register("test_path_unix", "{{ truncate title 7 }}/test/a");
|
||||
let _ = template.path_safe_register("test_path_windows", r"{{ truncate title 7 }}\\test\\a");
|
||||
#[cfg(not(windows))]
|
||||
{
|
||||
assert_eq!(
|
||||
template
|
||||
.path_safe_render("test_path_unix", &json!({"title": "关注/永雏塔菲喵"}))
|
||||
.unwrap(),
|
||||
"关注_永雏塔菲/test/a"
|
||||
);
|
||||
assert_eq!(
|
||||
template
|
||||
.path_safe_render("test_path_windows", &json!({"title": "关注/永雏塔菲喵"}))
|
||||
.unwrap(),
|
||||
"关注_永雏塔菲_test_a"
|
||||
);
|
||||
}
|
||||
#[cfg(windows)]
|
||||
{
|
||||
assert_eq!(
|
||||
template
|
||||
.path_safe_render("test_path_unix", &json!({"title": "关注/永雏塔菲喵"}))
|
||||
.unwrap(),
|
||||
"关注_永雏塔菲_test_a"
|
||||
);
|
||||
assert_eq!(
|
||||
template
|
||||
.path_safe_render("test_path_windows", &json!({"title": "关注/永雏塔菲喵"}))
|
||||
.unwrap(),
|
||||
r"关注_永雏塔菲\\test\\a"
|
||||
);
|
||||
}
|
||||
assert_eq!(
|
||||
template
|
||||
.path_safe_render("video", &json!({"bvid": "BV1b5411h7g7"}))
|
||||
.unwrap(),
|
||||
"testBV1b5411h7g7test"
|
||||
);
|
||||
assert_eq!(
|
||||
template
|
||||
.path_safe_render(
|
||||
"test_truncate",
|
||||
&json!({"title": "你说得对,但是 Rust 是由 Mozilla 自主研发的一款全新的编译期格斗游戏。\
|
||||
编译将发生在一个被称作「Cargo」的构建系统中。在这里,被引用的指针将被授予「生命周期」之力,导引对象安全。\
|
||||
你将扮演一位名为「Rustacean」的神秘角色, 在与「Rustc」的搏斗中邂逅各种骨骼惊奇的傲娇报错。\
|
||||
征服她们、通过编译同时,逐步发掘「C++」程序崩溃的真相。"})
|
||||
)
|
||||
.unwrap(),
|
||||
"哈哈,你说得对,但是 Rust 是由 Mozilla 自主研发的一"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,5 +5,8 @@ edition = { workspace = true }
|
||||
publish = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
derivative = { workspace = true }
|
||||
sea-orm = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
|
||||
2
crates/bili_sync_entity/src/custom_type/mod.rs
Normal file
2
crates/bili_sync_entity/src/custom_type/mod.rs
Normal file
@@ -0,0 +1,2 @@
|
||||
pub mod rule;
|
||||
pub mod string_vec;
|
||||
120
crates/bili_sync_entity/src/custom_type/rule.rs
Normal file
120
crates/bili_sync_entity/src/custom_type/rule.rs
Normal file
@@ -0,0 +1,120 @@
|
||||
use std::fmt::Display;
|
||||
|
||||
use derivative::Derivative;
|
||||
use sea_orm::FromJsonQueryResult;
|
||||
use sea_orm::prelude::DateTime;
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, Derivative)]
|
||||
#[derivative(PartialEq, Eq)]
|
||||
#[serde(rename_all = "camelCase", tag = "operator", content = "value")]
|
||||
pub enum Condition<T: Serialize + Display> {
|
||||
Equals(T),
|
||||
Contains(T),
|
||||
#[serde(deserialize_with = "deserialize_regex", serialize_with = "serialize_regex")]
|
||||
MatchesRegex(String, #[derivative(PartialEq = "ignore")] regex::Regex),
|
||||
Prefix(T),
|
||||
Suffix(T),
|
||||
GreaterThan(T),
|
||||
LessThan(T),
|
||||
Between(T, T),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
|
||||
#[serde(rename_all = "camelCase", tag = "field", content = "rule")]
|
||||
pub enum RuleTarget {
|
||||
Title(Condition<String>),
|
||||
Tags(Condition<String>),
|
||||
FavTime(Condition<DateTime>),
|
||||
PubTime(Condition<DateTime>),
|
||||
PageCount(Condition<usize>),
|
||||
Not(Box<RuleTarget>),
|
||||
}
|
||||
|
||||
pub type AndGroup = Vec<RuleTarget>;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
|
||||
pub struct Rule(pub Vec<AndGroup>);
|
||||
|
||||
impl<T: Serialize + Display> Display for Condition<T> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Condition::Equals(v) => write!(f, "等于“{}”", v),
|
||||
Condition::Contains(v) => write!(f, "包含“{}”", v),
|
||||
Condition::MatchesRegex(pat, _) => write!(f, "匹配“{}”", pat),
|
||||
Condition::Prefix(v) => write!(f, "以“{}”开头", v),
|
||||
Condition::Suffix(v) => write!(f, "以“{}”结尾", v),
|
||||
Condition::GreaterThan(v) => write!(f, "大于“{}”", v),
|
||||
Condition::LessThan(v) => write!(f, "小于“{}”", v),
|
||||
Condition::Between(start, end) => write!(f, "在“{}”和“{}”之间", start, end),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for RuleTarget {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
fn get_field_name(rt: &RuleTarget, depth: usize) -> &'static str {
|
||||
match rt {
|
||||
RuleTarget::Title(_) => "标题",
|
||||
RuleTarget::Tags(_) => "标签",
|
||||
RuleTarget::FavTime(_) => "收藏时间",
|
||||
RuleTarget::PubTime(_) => "发布时间",
|
||||
RuleTarget::PageCount(_) => "视频分页数量",
|
||||
RuleTarget::Not(inner) => {
|
||||
if depth == 0 {
|
||||
get_field_name(inner, depth + 1)
|
||||
} else {
|
||||
"格式化失败"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let field_name = get_field_name(self, 0);
|
||||
match self {
|
||||
RuleTarget::Not(inner) => match inner.as_ref() {
|
||||
RuleTarget::Title(cond) | RuleTarget::Tags(cond) => write!(f, "{}不{}", field_name, cond),
|
||||
RuleTarget::FavTime(cond) | RuleTarget::PubTime(cond) => {
|
||||
write!(f, "{}不{}", field_name, cond)
|
||||
}
|
||||
RuleTarget::PageCount(cond) => write!(f, "{}不{}", field_name, cond),
|
||||
RuleTarget::Not(_) => write!(f, "格式化失败"),
|
||||
},
|
||||
RuleTarget::Title(cond) | RuleTarget::Tags(cond) => write!(f, "{}{}", field_name, cond),
|
||||
RuleTarget::FavTime(cond) | RuleTarget::PubTime(cond) => {
|
||||
write!(f, "{}{}", field_name, cond)
|
||||
}
|
||||
RuleTarget::PageCount(cond) => write!(f, "{}{}", field_name, cond),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Rule {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let groups: Vec<String> = self
|
||||
.0
|
||||
.iter()
|
||||
.map(|group| {
|
||||
let conditions: Vec<String> = group.iter().map(|target| format!("({})", target)).collect();
|
||||
format!("「{}」", conditions.join("且"))
|
||||
})
|
||||
.collect();
|
||||
write!(f, "{}", groups.join("或"))
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_regex<'de, D>(deserializer: D) -> Result<(String, regex::Regex), D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let pattern = String::deserialize(deserializer)?;
|
||||
// 反序列化时预编译 regex,优化性能
|
||||
let regex = regex::Regex::new(&pattern).map_err(serde::de::Error::custom)?;
|
||||
Ok((pattern, regex))
|
||||
}
|
||||
|
||||
fn serialize_regex<S>(pattern: &str, _regex: ®ex::Regex, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
serializer.serialize_str(pattern)
|
||||
}
|
||||
20
crates/bili_sync_entity/src/custom_type/string_vec.rs
Normal file
20
crates/bili_sync_entity/src/custom_type/string_vec.rs
Normal file
@@ -0,0 +1,20 @@
|
||||
use sea_orm::FromJsonQueryResult;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
// reference: https://www.sea-ql.org/SeaORM/docs/generate-entity/column-types/#json-column
|
||||
// 在 entity 中使用裸 Vec 仅在 postgres 中支持,sea-orm 会将其映射为 postgres array
|
||||
// 如果需要实现跨数据库的 array,必须将其包裹在 wrapper type 中
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
|
||||
pub struct StringVec(pub Vec<String>);
|
||||
|
||||
impl From<Vec<String>> for StringVec {
|
||||
fn from(value: Vec<String>) -> Self {
|
||||
Self(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<StringVec> for Vec<String> {
|
||||
fn from(value: StringVec) -> Self {
|
||||
value.0
|
||||
}
|
||||
}
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
use crate::rule::Rule;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)]
|
||||
#[sea_orm(table_name = "collection")]
|
||||
pub struct Model {
|
||||
@@ -14,6 +16,8 @@ pub struct Model {
|
||||
pub path: String,
|
||||
pub created_at: String,
|
||||
pub latest_row_at: DateTime,
|
||||
pub rule: Option<Rule>,
|
||||
pub enabled: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
|
||||
17
crates/bili_sync_entity/src/entities/config.rs
Normal file
17
crates/bili_sync_entity/src/entities/config.rs
Normal file
@@ -0,0 +1,17 @@
|
||||
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.15
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)]
|
||||
#[sea_orm(table_name = "config")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub data: String,
|
||||
pub created_at: String,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
use crate::rule::Rule;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)]
|
||||
#[sea_orm(table_name = "favorite")]
|
||||
pub struct Model {
|
||||
@@ -13,6 +15,8 @@ pub struct Model {
|
||||
pub path: String,
|
||||
pub created_at: String,
|
||||
pub latest_row_at: DateTime,
|
||||
pub rule: Option<Rule>,
|
||||
pub enabled: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
pub mod prelude;
|
||||
|
||||
pub mod collection;
|
||||
pub mod config;
|
||||
pub mod favorite;
|
||||
pub mod page;
|
||||
pub mod submission;
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
use crate::rule::Rule;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)]
|
||||
#[sea_orm(table_name = "submission")]
|
||||
pub struct Model {
|
||||
@@ -12,6 +14,8 @@ pub struct Model {
|
||||
pub path: String,
|
||||
pub created_at: String,
|
||||
pub latest_row_at: DateTime,
|
||||
pub rule: Option<Rule>,
|
||||
pub enabled: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
use crate::string_vec::StringVec;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Default)]
|
||||
#[sea_orm(table_name = "video")]
|
||||
pub struct Model {
|
||||
@@ -25,7 +27,8 @@ pub struct Model {
|
||||
pub favtime: DateTime,
|
||||
pub download_status: u32,
|
||||
pub valid: bool,
|
||||
pub tags: Option<serde_json::Value>,
|
||||
pub should_download: bool,
|
||||
pub tags: Option<StringVec>,
|
||||
pub single_page: Option<bool>,
|
||||
pub created_at: String,
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
use crate::rule::Rule;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)]
|
||||
#[sea_orm(table_name = "watch_later")]
|
||||
pub struct Model {
|
||||
@@ -10,6 +12,8 @@ pub struct Model {
|
||||
pub path: String,
|
||||
pub created_at: String,
|
||||
pub latest_row_at: DateTime,
|
||||
pub rule: Option<Rule>,
|
||||
pub enabled: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
|
||||
@@ -1,2 +1,5 @@
|
||||
mod custom_type;
|
||||
mod entities;
|
||||
|
||||
pub use custom_type::*;
|
||||
pub use entities::*;
|
||||
|
||||
@@ -5,6 +5,10 @@ mod m20240505_130850_add_collection;
|
||||
mod m20240709_130914_watch_later;
|
||||
mod m20240724_161008_submission;
|
||||
mod m20250122_062926_add_latest_row_at;
|
||||
mod m20250612_090826_add_enabled;
|
||||
mod m20250613_043257_add_config;
|
||||
mod m20250712_080013_add_video_created_at_index;
|
||||
mod m20250903_094454_add_rule_and_should_download;
|
||||
|
||||
pub struct Migrator;
|
||||
|
||||
@@ -17,6 +21,10 @@ impl MigratorTrait for Migrator {
|
||||
Box::new(m20240709_130914_watch_later::Migration),
|
||||
Box::new(m20240724_161008_submission::Migration),
|
||||
Box::new(m20250122_062926_add_latest_row_at::Migration),
|
||||
Box::new(m20250612_090826_add_enabled::Migration),
|
||||
Box::new(m20250613_043257_add_config::Migration),
|
||||
Box::new(m20250712_080013_add_video_created_at_index::Migration),
|
||||
Box::new(m20250903_094454_add_rule_and_should_download::Migration),
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
101
crates/bili_sync_migration/src/m20250612_090826_add_enabled.rs
Normal file
101
crates/bili_sync_migration/src/m20250612_090826_add_enabled.rs
Normal file
@@ -0,0 +1,101 @@
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(WatchLater::Table)
|
||||
.add_column(ColumnDef::new(WatchLater::Enabled).boolean().not_null().default(false))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Submission::Table)
|
||||
.add_column(ColumnDef::new(Submission::Enabled).boolean().not_null().default(false))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Favorite::Table)
|
||||
.add_column(ColumnDef::new(Favorite::Enabled).boolean().not_null().default(false))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Collection::Table)
|
||||
.add_column(ColumnDef::new(Collection::Enabled).boolean().not_null().default(false))
|
||||
.to_owned(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(WatchLater::Table)
|
||||
.drop_column(WatchLater::Enabled)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Submission::Table)
|
||||
.drop_column(Submission::Enabled)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Favorite::Table)
|
||||
.drop_column(Favorite::Enabled)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Collection::Table)
|
||||
.drop_column(Collection::Enabled)
|
||||
.to_owned(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
enum WatchLater {
|
||||
Table,
|
||||
Enabled,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
enum Submission {
|
||||
Table,
|
||||
Enabled,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
enum Favorite {
|
||||
Table,
|
||||
Enabled,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
enum Collection {
|
||||
Table,
|
||||
Enabled,
|
||||
}
|
||||
@@ -0,0 +1,44 @@
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.create_table(
|
||||
Table::create()
|
||||
.table(Config::Table)
|
||||
.if_not_exists()
|
||||
.col(
|
||||
ColumnDef::new(Config::Id)
|
||||
.integer()
|
||||
.not_null()
|
||||
.auto_increment()
|
||||
.primary_key(),
|
||||
)
|
||||
.col(ColumnDef::new(Config::Data).text().not_null())
|
||||
.col(
|
||||
ColumnDef::new(Config::CreatedAt)
|
||||
.timestamp()
|
||||
.default(Expr::current_timestamp())
|
||||
.not_null(),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager.drop_table(Table::drop().table(Config::Table).to_owned()).await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
enum Config {
|
||||
Table,
|
||||
Id,
|
||||
Data,
|
||||
CreatedAt,
|
||||
}
|
||||
@@ -0,0 +1,36 @@
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.table(Video::Table)
|
||||
.name("video_created_at_index")
|
||||
.col(Video::CreatedAt)
|
||||
.to_owned(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.drop_index(
|
||||
Index::drop()
|
||||
.table(Video::Table)
|
||||
.name("video_created_at_index")
|
||||
.to_owned(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
enum Video {
|
||||
Table,
|
||||
CreatedAt,
|
||||
}
|
||||
@@ -0,0 +1,124 @@
|
||||
use sea_orm_migration::prelude::*;
|
||||
use sea_orm_migration::schema::*;
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Video::Table)
|
||||
.add_column(boolean(Video::ShouldDownload).default(true))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(WatchLater::Table)
|
||||
.add_column(text_null(WatchLater::Rule))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Submission::Table)
|
||||
.add_column(text_null(Submission::Rule))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Favorite::Table)
|
||||
.add_column(text_null(Favorite::Rule))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Collection::Table)
|
||||
.add_column(text_null(Collection::Rule))
|
||||
.to_owned(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Video::Table)
|
||||
.drop_column(Video::ShouldDownload)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(WatchLater::Table)
|
||||
.drop_column(WatchLater::Rule)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Submission::Table)
|
||||
.drop_column(Submission::Rule)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Favorite::Table)
|
||||
.drop_column(Favorite::Rule)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Collection::Table)
|
||||
.drop_column(Collection::Rule)
|
||||
.to_owned(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
enum Video {
|
||||
Table,
|
||||
ShouldDownload,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
enum WatchLater {
|
||||
Table,
|
||||
Rule,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
enum Submission {
|
||||
Table,
|
||||
Rule,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
enum Favorite {
|
||||
Table,
|
||||
Rule,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
enum Collection {
|
||||
Table,
|
||||
Rule,
|
||||
}
|
||||
@@ -21,7 +21,7 @@ export default defineConfig({
|
||||
nav: [
|
||||
{ text: "主页", link: "/" },
|
||||
{
|
||||
text: "v2.4.0",
|
||||
text: "v2.7.0",
|
||||
items: [
|
||||
{
|
||||
text: "程序更新",
|
||||
@@ -45,7 +45,7 @@ export default defineConfig({
|
||||
{
|
||||
text: "细节",
|
||||
items: [
|
||||
{ text: "配置文件", link: "/configuration" },
|
||||
{ text: "配置说明", link: "/configuration" },
|
||||
{ text: "命令行参数", link: "/args" },
|
||||
{ text: "工作原理", link: "/design" },
|
||||
],
|
||||
@@ -55,10 +55,10 @@ export default defineConfig({
|
||||
items: [
|
||||
{ text: "获取收藏夹信息", link: "/favorite" },
|
||||
{
|
||||
text: "获取视频合集/视频列表信息",
|
||||
text: "获取合集/列表信息",
|
||||
link: "/collection",
|
||||
},
|
||||
{ text: "获取投稿信息", link: "/submission" },
|
||||
{ text: "获取用户投稿信息", link: "/submission" },
|
||||
],
|
||||
},
|
||||
{
|
||||
|
||||
BIN
docs/assets/config.webp
Normal file
BIN
docs/assets/config.webp
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 78 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 167 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 78 KiB |
BIN
docs/assets/webui.webp
Normal file
BIN
docs/assets/webui.webp
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 95 KiB |
@@ -1,10 +1,10 @@
|
||||
# 获取视频合集/视频列表信息
|
||||
# 获取合集/列表信息
|
||||
|
||||
视频合集和视频列表虽然在哔哩哔哩网站交互上行为类似,但在接口层级是两个不同的概念,程序配置中需要对两者做出区分。
|
||||
|
||||
## 配置形式与区分方法
|
||||
目前 B 站绝大部分内容都是视频合集(Season),视频列表(Series)是古早的功能,现在已经不常见了。
|
||||
|
||||
在 bili-sync 的设计中,视频合集的 key 为 `season:{mid}:{season_id}`,而视频列表的 key 为 `series:{mid}:{series_id}`。
|
||||
## 配置形式与区分方法
|
||||
|
||||
新版本 b 站网页端已经对两种类型做了初步整合,将需要的参数展示在了视频合集/视频列表的 URL 中,不再需要手动查看接口。URL 的路径格式为:
|
||||
|
||||
@@ -13,16 +13,16 @@
|
||||
/{mid}/lists/{id}?type={season/series}
|
||||
```
|
||||
|
||||
点开你想要订阅的视频合集/视频列表详情,查看 URL 即可拼接出对应的 key。
|
||||
点开你想要订阅的视频合集/视频列表详情,对照查看 URL 即可获取所需参数。
|
||||
|
||||
### 视频合集
|
||||
|
||||

|
||||
|
||||
该视频合集的 key 为 `season:521722088:1987140`。
|
||||
类型为 `合集(Season)`,用户 ID 为 `521722088`,合集 ID 为 `1987140`。
|
||||
|
||||
### 视频列表
|
||||
|
||||

|
||||
|
||||
该视频列表的 key 为 `series:521722088:387214`。
|
||||
类型为 `列表(Series)`,用户 ID 为 `521722088`,列表 ID 为 `387214`。
|
||||
|
||||
@@ -1,10 +1,20 @@
|
||||
# 配置文件
|
||||
# 配置说明
|
||||
|
||||
默认的配置文件已经在[快速开始](/quick-start)中给出,该文档对配置文件的各个参数依次详细解释。
|
||||
## 基本设置
|
||||
|
||||
## video_name 与 page_name
|
||||
### 绑定地址
|
||||
|
||||
`video_name` 与 `page_name` 用于设置下载文件的命名规则,对于所有下载的内容,将会维持如下的目录结构:
|
||||
程序 Web Server 监听的地址,程序启动时会监听该地址,成功后可通过 `http://${bind_address}` 访问管理页。
|
||||
|
||||
该配置会在程序重启时生效。
|
||||
|
||||
### 同步间隔(秒)
|
||||
|
||||
表示程序每次执行扫描下载的间隔时间,单位为秒。
|
||||
|
||||
### 视频名称模板、分页名称模板
|
||||
|
||||
视频名称模板(`video_name`)和分页名称模板(`page_name`)用于设置下载文件的命名规则。对于所有下载的内容,将会维持如下的目录结构:
|
||||
|
||||
1. 单页视频:
|
||||
|
||||
@@ -30,7 +40,7 @@
|
||||
│ └── tvshow.nfo
|
||||
```
|
||||
|
||||
这两个参数支持使用模板,其中用 <code v-pre>{{ }}</code> 包裹的模板变量在执行时会被动态替换为对应的内容。
|
||||
这两个模板参数会在运行时解析,其中用 <code v-pre>{{ }}</code> 包裹的模板变量会被动态替换为对应的内容。
|
||||
|
||||
对于 `video_name`,支持设置 bvid(视频编号)、title(视频标题)、upper_name(up 主名称)、upper_mid(up 主 id)、pubtime(视频发布时间)、fav_time(视频收藏时间)。
|
||||
|
||||
@@ -40,7 +50,7 @@
|
||||
|
||||
> [!TIP]
|
||||
> 1. 仅收藏夹视频会区分 `fav_time` 和 `pubtime`,其它类型下载两者的取值是完全相同的;
|
||||
> 2. `fav_time` 和 `pubtime` 的格式受 `time_format` 参数控制,详情可参考 [time_format 小节](#time-format)。
|
||||
> 2. `fav_time` 和 `pubtime` 的格式受[时间格式](#时间格式)控制。
|
||||
|
||||
此外,`video_name` 和 `page_name` 还支持使用路径分割符,如 <code v-pre>{{ upper_mid }}/{{ title }}_{{ pubtime }}</code> 表示视频会根据 UP 主 id 将视频分到不同的文件夹中。
|
||||
|
||||
@@ -49,65 +59,44 @@
|
||||
> [!CAUTION]
|
||||
> **路径分隔符**在不同平台定义不同,Windows 下为 `\`,MacOS 和 Linux 下为 `/`。
|
||||
|
||||
## `auth_token`
|
||||
|
||||
表示调用程序管理 API 需要的身份凭据,程序会检查 API 请求 Header 中是否包含正确的 `Authorization` 字段。
|
||||
|
||||
内置管理页前端提供了 `auth_token` 的输入框,填写后即可成功调用 API 使用管理页。
|
||||
|
||||
## `bind_address`
|
||||
|
||||
程序 Web Server 监听的地址,程序启动时会监听该地址,成功后可通过 `http://${bind_address}` 访问管理页。
|
||||
|
||||
## `interval`
|
||||
|
||||
表示程序每次执行扫描下载的间隔时间,单位为秒。
|
||||
|
||||
## `upper_path`
|
||||
### UP 主头像保存路径
|
||||
|
||||
UP 主头像和信息的保存位置。对于使用 Emby、Jellyfin 媒体服务器的用户,需确保此处路径指向 Emby、Jellyfin 配置中的 `/metadata/people/` 才能够正常在媒体服务器中显示 UP 主的头像。
|
||||
|
||||
## `nfo_time_type`
|
||||
### 时间格式
|
||||
|
||||
表示在视频信息中使用的时间类型,可选值为 `favtime`(收藏时间)和 `pubtime`(发布时间)。
|
||||
用于设置 `fav_time` 和 `pubtime` 在视频名称模板、分页名称模板中使用时的显示格式,支持的格式符号可以参考 [chrono strftime 文档](https://docs.rs/chrono/latest/chrono/format/strftime/index.html)。
|
||||
|
||||
仅收藏夹视频会区分 `fav_time` 和 `pubtime`,其它类型下载两者取值相同。
|
||||
### 后端 API 认证 Token
|
||||
|
||||
## `time_format`
|
||||
表示调用程序管理 API 需要的身份凭据,程序会对 API 请求进行身份验证,身份验证不通过会拒绝访问。
|
||||
|
||||
时间格式,用于设置 `fav_time` 和 `pubtime` 在 `video_name`、 `page_name` 中使用时的显示格式,支持的格式符号可以参考 [chrono strftime 文档](https://docs.rs/chrono/latest/chrono/format/strftime/index.html)。
|
||||
在修改该 Token 后需要对应修改前端保存的 Token,才能正常访问管理页面。
|
||||
|
||||
## `credential`
|
||||
### 启动 CDN 排序
|
||||
|
||||
哔哩哔哩账号的身份凭据,请参考[凭据获取流程](https://nemo2011.github.io/bilibili-api/#/get-credential)获取并对应填写至配置文件中,后续 bili-sync 会在必要时自动刷新身份凭据,不再需要手动管理。
|
||||
一般情况下,b 站会为视频、音频流提供一个 baseUrl 与多个 backupUrl,程序默认会按照 baseUrl -> backupUrl 的顺序请求,依次尝试下载。
|
||||
|
||||
如果启用 CDN 排序,那么程序不再使用默认顺序,而是将所有 url 放到一起统一排序来决定请求顺序。排序优先级从高到低为:
|
||||
|
||||
1. 服务商 CDN:`upos-sz-mirrorxxxx.bilivideo.com`
|
||||
2. 自建 CDN:`cn-xxxx-dx-v-xxxx.bilivideo.com`
|
||||
3. MCDN:`xxxx.mcdn.bilivideo.com`
|
||||
4. PCDN:`xxxx.v1d.szbdyd.com`
|
||||
|
||||
这会让程序优先请求质量更高的 CDN,可能会提高下载速度并增加成功率,但效果因地区、网络环境而异。
|
||||
|
||||
## B 站认证
|
||||
|
||||
哔哩哔哩账号的身份凭据,请参考[凭据获取流程](https://nemo2011.github.io/bilibili-api/#/get-credential)获取并对应填写,后续 bili-sync 会在必要时自动刷新身份凭据,不再需要手动管理。
|
||||
|
||||
推荐使用匿名窗口获取,避免潜在的冲突。
|
||||
|
||||
## `filter_option`
|
||||
## 视频质量
|
||||
|
||||
过滤选项,用于设置程序的过滤规则,程序会从过滤结果中选择最优的视频、音频流下载。
|
||||
该页配置大部分都是显而易见的,仅对视频编码格式偏好进行说明。
|
||||
|
||||
这些内容的可选值可前往 [analyzer.rs](https://github.com/amtoaer/bili-sync/blob/24d0da0bf3ea65fd45d07587e4dcdbb24d11a589/crates/bili_sync/src/bilibili/analyzer.rs#L10-L55) 中查看。
|
||||
|
||||
注意将过滤范围设置过小可能导致筛选不到符合要求的流导致下载失败,建议谨慎修改。
|
||||
|
||||
### `video_max_quality`
|
||||
|
||||
视频流允许的最高质量。
|
||||
|
||||
### `video_min_quality`
|
||||
|
||||
视频流允许的最低质量。
|
||||
|
||||
### `audio_max_quality`
|
||||
|
||||
音频流允许的最高质量。
|
||||
|
||||
### `audio_min_quality`
|
||||
|
||||
音频流允许的最低质量。
|
||||
|
||||
### `codecs`
|
||||
### 视频编码格式偏好
|
||||
|
||||
这是 bili-sync 选择视频编码的优先级顺序,优先级按顺序从高到低。此处对编码格式做一个简单说明:
|
||||
|
||||
@@ -119,130 +108,100 @@ UP 主头像和信息的保存位置。对于使用 Emby、Jellyfin 媒体服务
|
||||
|
||||
而如果你的设备不支持,或者单纯懒得查询,那么推荐将 AVC 放在第一位以获得最好的兼容性。
|
||||
|
||||
### `no_dolby_video`
|
||||
|
||||
是否禁用杜比视频流。
|
||||
|
||||
### `no_dolby_audio`
|
||||
|
||||
是否禁用杜比音频流。
|
||||
|
||||
### `no_hdr`
|
||||
|
||||
是否禁用 HDR 视频流。
|
||||
|
||||
### `no_hires`
|
||||
|
||||
是否禁用 Hi-Res 音频流。
|
||||
|
||||
## `danmaku_option`
|
||||
## 弹幕渲染
|
||||
|
||||
弹幕的设置选项,用于设置下载弹幕的样式,几乎全部取自[上游仓库](https://github.com/gwy15/danmu2ass)。
|
||||
|
||||
### `duration`
|
||||
### 弹幕持续时间(秒)
|
||||
|
||||
弹幕在屏幕上的持续时间,单位为秒。
|
||||
|
||||
### `font`
|
||||
### 字体
|
||||
|
||||
弹幕的字体。
|
||||
弹幕使用的字体。
|
||||
|
||||
### `font_size`
|
||||
### 字体大小
|
||||
|
||||
弹幕的字体大小。
|
||||
|
||||
### `width_ratio`
|
||||
### 宽度比例
|
||||
|
||||
计算弹幕宽度的比例,为避免重叠可以调大这个数值。
|
||||
|
||||
### `horizontal_gap`
|
||||
### 水平间距
|
||||
|
||||
两条弹幕之间最小的水平距离。
|
||||
|
||||
### `lane_size`
|
||||
### 轨道大小
|
||||
|
||||
弹幕所占据的高度,即“行高度/行间距”。
|
||||
|
||||
### `float_percentage`
|
||||
### 滚动弹幕高度百分比
|
||||
|
||||
屏幕上滚动弹幕最多高度百分比。
|
||||
|
||||
### `bottom_percentage`
|
||||
### 底部弹幕高度百分比
|
||||
|
||||
屏幕上底部弹幕最多高度百分比。
|
||||
|
||||
### `opacity`
|
||||
### 透明度(0-255)
|
||||
|
||||
透明度,取值范围为 0-255。透明度可以通过 opacity / 255 计算得到。
|
||||
透明度,取值范围为 0-255。实际透明度百分比为 `透明度 / 255`。
|
||||
|
||||
### `bold`
|
||||
|
||||
是否加粗。
|
||||
### 描边宽度
|
||||
|
||||
### `outline`
|
||||
弹幕的描边宽度。
|
||||
|
||||
描边宽度。
|
||||
|
||||
### `time_offset`
|
||||
### 时间偏移(秒)
|
||||
|
||||
时间轴偏移,>0 会让弹幕延后,<0 会让弹幕提前,单位为秒。
|
||||
|
||||
## `favorite_list`
|
||||
### 粗体显示
|
||||
|
||||
你想要下载的收藏夹与想要保存的位置。简单示例:
|
||||
```toml
|
||||
3115878158 = "/home/amtoaer/Downloads/bili-sync/测试收藏夹"
|
||||
```
|
||||
收藏夹 ID 的获取方式可以参考[这里](/favorite)。
|
||||
弹幕是否加粗。
|
||||
|
||||
## `collection_list`
|
||||
## 高级设置
|
||||
|
||||
你想要下载的视频合集/视频列表与想要保存的位置。注意“视频合集”与“视频列表”是两种不同的类型。在配置文件中需要做区分:
|
||||
```toml
|
||||
"series:387051756:432248" = "/home/amtoaer/Downloads/bili-sync/测试视频列表"
|
||||
"season:1728547:101343" = "/home/amtoaer/Downloads/bili-sync/测试合集"
|
||||
```
|
||||
|
||||
具体说明可以参考[这里](/collection)。
|
||||
|
||||
## `submission_list`
|
||||
|
||||
你想要下载的 UP 主投稿与想要保存的位置。简单示例:
|
||||
```toml
|
||||
9183758 = "/home/amtoaer/Downloads/bili-sync/测试投稿"
|
||||
```
|
||||
UP 主 ID 的获取方式可以参考[这里](/submission)。
|
||||
|
||||
## `watch_later`
|
||||
|
||||
设置稍后再看的扫描开关与保存位置。
|
||||
|
||||
如果你希望下载稍后再看列表中的视频,可以将 `enabled` 设置为 `true`,并填写 `path`。
|
||||
|
||||
```toml
|
||||
enabled = true
|
||||
path = "/home/amtoaer/Downloads/bili-sync/稍后再看"
|
||||
```
|
||||
|
||||
## `concurrent_limit`
|
||||
|
||||
对 bili-sync 的并发下载进行多方面的限制,避免 api 请求过于频繁导致的风控。其中 video 和 page 表示下载任务的并发数,rate_limit 表示 api 请求的流量限制。默认取值为:
|
||||
```toml
|
||||
[concurrent_limit]
|
||||
video = 3
|
||||
page = 2
|
||||
|
||||
[concurrent_limit.rate_limit]
|
||||
limit = 4
|
||||
duration = 250
|
||||
```
|
||||
|
||||
具体来说,程序的处理逻辑是严格从上到下的,即程序会首先并发处理多个 video,每个 video 内再并发处理多个 page,程序的并行度可以简单衡量为 `video * page`(很多 video 都只有单个 page,实际会更接近 `video * 1`),配置项中的 `video` 和 `page` 两个参数就是控制此处的,调节这两个参数可以宏观上控制程序的并行度。
|
||||
|
||||
另一方面,每个执行的任务内部都会发起若干 api 请求以获取信息,这些请求的整体频率受到 `rate_limit` 的限制,使用漏桶算法实现。如默认配置表示的是每 250ms 允许 4 个 api 请求,超过这个频率的请求会被暂时阻塞,直到漏桶中有空间为止。调节 `rate_limit` 可以从微观上控制程序的并行度,同时也是最直接、最显著的控制 api 请求频率的方法。
|
||||
|
||||
据观察 b 站风控限制大多集中在主站,因此目前 `rate_limit` 仅作用于主站的各类请求,如请求各类视频列表、视频信息、获取流下载地址等,对实际的视频、图片下载过程不做限制。
|
||||
该页主要用于调整程序的请求与下载行为。
|
||||
|
||||
> [!TIP]
|
||||
> 1. 一般来说,`video` 和 `page` 的值不需要过大;
|
||||
> 2. `rate_limit` 的值可以根据网络环境和 api 请求频率进行调整,如果经常遇到风控可以优先调小 limit。
|
||||
> 1. 一般来说,视频、分页的并发数不需要过大;
|
||||
> 2. 请求频率限制可以根据网络环境和 api 请求频率进行调整,如果经常遇到风控可以优先调小该值。
|
||||
|
||||
### 视频并发数、分页并发数
|
||||
|
||||
视频并发数(video)和分页并发数(page)是控制 bili-sync 视频下载任务并发度的配置项。
|
||||
|
||||
程序的处理逻辑是严格从上到下的,即程序会首先并发处理多个 video,每个 video 内再并发处理多个 page,程序的并发度可以简单衡量为 `video * page`(很多 video 都只有单个 page,实际会更接近 `video * 1`),`video` 和 `page` 两个参数就是控制此处的,调节这两个参数可以宏观上控制程序的并发度。
|
||||
|
||||
### NFO 时间类型
|
||||
|
||||
表示在视频 NFO 文件中使用的时间类型,可选值为收藏时间和发布时间。
|
||||
|
||||
仅收藏夹视频会对这两项进行区分,其它类型的视频这两者取值完全相同。
|
||||
|
||||
### 请求频率限制
|
||||
|
||||
每个执行的任务内部都会发起若干 api 请求以获取信息,这些请求的整体频率受到请求频率的限制,使用漏桶算法实现。超过这个频率的请求会被暂时阻塞,直到漏桶中有空间为止。
|
||||
|
||||
时间间隔(毫秒)与限制请求数共同表明的意思时:程序在每个时间间隔内最多允许多少个请求。调节这一项可以从微观上控制程序的并行度,同时也是最直接、最显著的控制 api 请求频率的方法。
|
||||
|
||||
据观察 b 站风控限制大多集中在主站,因此目前请求频率限制仅作用于主站的各类请求,如请求各类视频列表、视频信息、获取流下载地址等,对实际的视频、图片下载过程不做限制。
|
||||
|
||||
### 单文件分块下载
|
||||
|
||||
单文件分块下载是指将单个视频文件分成多个小块进行下载,这可能有助于提高下载速度。
|
||||
|
||||
程序会首先为这个文件预分配空间,接着将文件分成若干个大小相同的块,为每个块启动单独的异步任务并行下载。
|
||||
|
||||
|
||||
#### 下载分块数
|
||||
|
||||
表示单个文件分成多少个小块,默认值为 4。
|
||||
|
||||
#### 启动分块下载的文件大小阈值(字节)
|
||||
|
||||
表示当单个文件大小超过多少字节时,才会启动分块下载。默认值为 20971520(20 MB)。
|
||||
|
||||
如果文件过小,分块成本可能会超过分块下载带来的收益,因此使用该阈值决定下载策略。
|
||||
@@ -32,7 +32,7 @@
|
||||
EMBY 的一般结构是: `媒体库 - 文件夹 - 电影/电视剧 - 分季/分集`,方便起见,我采用了如下的对应关系:
|
||||
|
||||
1. **文件夹**:对应 b 站的 video source;
|
||||
2. **电视剧**: 对应 b 站的 video;
|
||||
2. **电视剧**:对应 b 站的 video;
|
||||
3. **第一季的所有分集**:对应 b 站的 page。
|
||||
|
||||
特别的,当 video 仅有一个 page 时,为了避免过多的层级,bili-sync 会将 page 展开到第二层级,变成与电视剧同级的电影。
|
||||
|
||||
@@ -1,13 +1,5 @@
|
||||
# 管理页
|
||||
|
||||
在 2.4.0 版本,bili-sync 提供了一个内置的管理页,可以使用浏览器访问,实现一些简单的预览和重置操作。
|
||||
自 2.6.0 版本开始,bili-sync 的配置文件已经完全迁移至数据库中,程序的所有操作都可以通过 WebUI 管理页进行。
|
||||
|
||||
由于作者的前端水平有限,网页使用 90% AI + 10% 人工实现,问题会比较多,欢迎前端大能 PR(应该说比起 PR 缝缝补补不如直接重写 XD)。
|
||||
|
||||

|
||||
|
||||
# API
|
||||
|
||||
后端提供的 API 可以通过 `/swagger-ui/` 访问:
|
||||
|
||||

|
||||

|
||||
@@ -1,7 +1,7 @@
|
||||
# bili-sync 是什么?
|
||||
|
||||
> [!TIP]
|
||||
> 当前最新程序版本为 v2.4.0,文档将始终与最新程序版本保持一致。
|
||||
> 当前最新程序版本为 v2.7.0,文档将始终与最新程序版本保持一致。
|
||||
|
||||
bili-sync 是一款专为 NAS 用户编写的哔哩哔哩同步工具。
|
||||
|
||||
@@ -39,4 +39,5 @@ bili-sync 是一款专为 NAS 用户编写的哔哩哔哩同步工具。
|
||||
- [x] 支持对“稍后再看”内视频的自动扫描与下载
|
||||
- [x] 支持对 UP 主投稿视频的自动扫描与下载
|
||||
- [x] 支持限制任务的并行度和接口请求频率
|
||||
- [ ] 下载单个文件时支持断点续传与并发下载
|
||||
- [x] 支持单个文件的分块并行下载
|
||||
- [x] 支持使用 Web UI 配置,查看并管理视频、视频源
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# 常见问题
|
||||
|
||||
## 各种文件找不到问题,如运行后找不到初始 `config.toml`、提示成功下载但看不到视频文件等。
|
||||
## 各种文件找不到问题,如运行后找不到初始 `data.sqlite`、提示成功下载但看不到视频文件等。
|
||||
|
||||
请检查挂载位置与配置文件填写是否正确,需要理解的是:
|
||||
1. 容器挂载是把宿主机的 `/A` 挂载到容器内的 `/B`;
|
||||
@@ -22,4 +22,6 @@ bili-sync 在 2.4.0 版本引入了一个改动,不将此错误计入错误次
|
||||
|
||||
## 有些视频已经达到了最大重试次数还没有成功,我可以手动重试吗?
|
||||
|
||||
2.4.0 版本引入了一个简陋的[管理页](/frontend)来支持这个功能,你可以查询特定视频并点击重置,这样在下次下载任务触发时就会重试这个任务了。
|
||||
可以在 WebUI 中查找对应的视频源并点击“重置”,这会将所有失败的子任务重置为未下载状态,下一次视频下载任务就会开始重试。
|
||||
|
||||
此外还可以进入视频详情点击“编辑状态”,这允许用户自行修改每个子任务的状态。
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user