mirror of
https://github.com/amtoaer/bili-sync.git
synced 2026-05-08 20:52:40 +08:00
Compare commits
12 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d7ec0584bc | ||
|
|
1ec015856b | ||
|
|
99d4d900e6 | ||
|
|
f85f105e69 | ||
|
|
8a1395458c | ||
|
|
bafb4af8dd | ||
|
|
f52724b974 | ||
|
|
4e1e0c40cf | ||
|
|
439513e5ab | ||
|
|
33a61ec08d | ||
|
|
a6d0d6b777 | ||
|
|
ae685cbe61 |
1
.github/workflows/commit-build.yaml
vendored
1
.github/workflows/commit-build.yaml
vendored
@@ -7,4 +7,5 @@ on:
|
||||
|
||||
jobs:
|
||||
build-binary:
|
||||
if: ${{ !startsWith(github.ref, 'refs/tags/') }}
|
||||
uses: amtoaer/bili-sync/.github/workflows/build-binary.yaml@main
|
||||
|
||||
605
Cargo.lock
generated
605
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
43
Cargo.toml
43
Cargo.toml
@@ -4,7 +4,7 @@ default-members = ["crates/bili_sync"]
|
||||
resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
version = "2.4.1"
|
||||
version = "2.5.1"
|
||||
authors = ["amtoaer <amtoaer@gmail.com>"]
|
||||
license = "MIT"
|
||||
description = "由 Rust & Tokio 驱动的哔哩哔哩同步工具"
|
||||
@@ -15,34 +15,35 @@ publish = false
|
||||
bili_sync_entity = { path = "crates/bili_sync_entity" }
|
||||
bili_sync_migration = { path = "crates/bili_sync_migration" }
|
||||
|
||||
anyhow = { version = "1.0.96", features = ["backtrace"] }
|
||||
anyhow = { version = "1.0.98", features = ["backtrace"] }
|
||||
arc-swap = { version = "1.7.1", features = ["serde"] }
|
||||
assert_matches = "1.5.0"
|
||||
async-std = { version = "1.13.0", features = ["attributes", "tokio1"] }
|
||||
async-std = { version = "1.13.1", features = ["attributes", "tokio1"] }
|
||||
async-stream = "0.3.6"
|
||||
async-trait = "0.1.86"
|
||||
axum = { version = "0.8.1", features = ["macros"] }
|
||||
async-trait = "0.1.88"
|
||||
axum = { version = "0.8.4", features = ["macros"] }
|
||||
built = { version = "0.7.7", features = ["git2", "chrono"] }
|
||||
chrono = { version = "0.4.39", features = ["serde"] }
|
||||
clap = { version = "4.5.30", features = ["env", "string"] }
|
||||
chrono = { version = "0.4.41", features = ["serde"] }
|
||||
clap = { version = "4.5.38", features = ["env", "string"] }
|
||||
cookie = "0.18.1"
|
||||
cow-utils = "0.1.3"
|
||||
dirs = "6.0.0"
|
||||
enum_dispatch = "0.3.13"
|
||||
float-ord = "0.3.2"
|
||||
futures = "0.3.31"
|
||||
handlebars = "6.3.1"
|
||||
git2 = { version = "0.20.2", features = [], default-features = false }
|
||||
handlebars = "6.3.2"
|
||||
hex = "0.4.3"
|
||||
leaky-bucket = "1.1.2"
|
||||
md5 = "0.7.0"
|
||||
memchr = "2.7.4"
|
||||
mime_guess = "2.0.5"
|
||||
once_cell = "1.20.3"
|
||||
once_cell = "1.21.3"
|
||||
prost = "0.13.5"
|
||||
quick-xml = { version = "0.37.2", features = ["async-tokio"] }
|
||||
quick-xml = { version = "0.37.5", features = ["async-tokio"] }
|
||||
rand = "0.8.5"
|
||||
regex = "1.11.1"
|
||||
reqwest = { version = "0.12.12", features = [
|
||||
reqwest = { version = "0.12.15", features = [
|
||||
"charset",
|
||||
"cookies",
|
||||
"gzip",
|
||||
@@ -51,22 +52,22 @@ reqwest = { version = "0.12.12", features = [
|
||||
"rustls-tls",
|
||||
"stream",
|
||||
], default-features = false }
|
||||
rsa = { version = "0.9.7", features = ["sha2"] }
|
||||
rust-embed = "8.5.0"
|
||||
sea-orm = { version = "1.1.5", features = [
|
||||
rsa = { version = "0.9.8", features = ["sha2"] }
|
||||
rust-embed = "8.7.2"
|
||||
sea-orm = { version = "1.1.11", features = [
|
||||
"macros",
|
||||
"runtime-tokio-rustls",
|
||||
"sqlx-sqlite",
|
||||
] }
|
||||
sea-orm-migration = { version = "1.1.5", features = [] }
|
||||
serde = { version = "1.0.218", features = ["derive"] }
|
||||
serde_json = "1.0.139"
|
||||
sea-orm-migration = { version = "1.1.11", features = [] }
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
serde_json = "1.0.140"
|
||||
serde_urlencoded = "0.7.1"
|
||||
strum = { version = "0.27.1", features = ["derive"] }
|
||||
thiserror = "2.0.11"
|
||||
tokio = { version = "1.43.0", features = ["full"] }
|
||||
tokio-util = { version = "0.7.13", features = ["io", "rt"] }
|
||||
toml = "0.8.20"
|
||||
thiserror = "2.0.12"
|
||||
tokio = { version = "1.45.0", features = ["full"] }
|
||||
tokio-util = { version = "0.7.15", features = ["io", "rt"] }
|
||||
toml = "0.8.22"
|
||||
tower = "0.5.2"
|
||||
tracing = "0.1.41"
|
||||
tracing-subscriber = { version = "0.3.19", features = ["chrono"] }
|
||||
|
||||
@@ -58,6 +58,7 @@ assert_matches = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
built = { workspace = true }
|
||||
git2 = { workspace = true }
|
||||
|
||||
[package.metadata.release]
|
||||
release = true
|
||||
|
||||
@@ -3,6 +3,7 @@ use std::pin::Pin;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use bili_sync_entity::*;
|
||||
use chrono::Utc;
|
||||
use futures::Stream;
|
||||
use sea_orm::ActiveValue::Set;
|
||||
use sea_orm::entity::prelude::*;
|
||||
@@ -37,13 +38,19 @@ impl VideoSource for collection::Model {
|
||||
})
|
||||
}
|
||||
|
||||
fn should_take(&self, _release_datetime: &chrono::DateTime<Utc>, _latest_row_at: &chrono::DateTime<Utc>) -> bool {
|
||||
// collection(视频合集/视频列表)返回的内容似乎并非严格按照时间排序,并且不同 collection 的排序方式也不同
|
||||
// 为了保证程序正确性,collection 不根据时间提前 break,而是每次都全量拉取
|
||||
true
|
||||
}
|
||||
|
||||
fn log_refresh_video_start(&self) {
|
||||
info!("开始扫描{}「{}」..", CollectionType::from(self.r#type), self.name);
|
||||
}
|
||||
|
||||
fn log_refresh_video_end(&self, count: usize) {
|
||||
info!(
|
||||
"扫描{}「{}」完成,获取到 {} 条新视频",
|
||||
"扫描{}「{}」完成,已拉取 {} 条视频",
|
||||
CollectionType::from(self.r#type),
|
||||
self.name,
|
||||
count,
|
||||
|
||||
@@ -7,6 +7,7 @@ use std::path::Path;
|
||||
use std::pin::Pin;
|
||||
|
||||
use anyhow::Result;
|
||||
use chrono::Utc;
|
||||
use enum_dispatch::enum_dispatch;
|
||||
use futures::Stream;
|
||||
use sea_orm::DatabaseConnection;
|
||||
@@ -52,6 +53,11 @@ pub trait VideoSource {
|
||||
/// Box<dyn ActiveModelTrait> 又提示 ActiveModelTrait 没有 object safety,因此手写一个 Enum 静态分发
|
||||
fn update_latest_row_at(&self, datetime: DateTime) -> _ActiveModel;
|
||||
|
||||
// 判断是否应该继续拉取视频
|
||||
fn should_take(&self, release_datetime: &chrono::DateTime<Utc>, latest_row_at: &chrono::DateTime<Utc>) -> bool {
|
||||
release_datetime > latest_row_at
|
||||
}
|
||||
|
||||
/// 开始刷新视频
|
||||
fn log_refresh_video_start(&self);
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ use anyhow::{Context, Result, bail};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::bilibili::error::BiliError;
|
||||
use crate::config::CONFIG;
|
||||
|
||||
pub struct PageAnalyzer {
|
||||
info: serde_json::Value,
|
||||
@@ -63,6 +64,20 @@ pub enum VideoCodecs {
|
||||
AV1,
|
||||
}
|
||||
|
||||
impl TryFrom<u64> for VideoCodecs {
|
||||
type Error = anyhow::Error;
|
||||
|
||||
fn try_from(value: u64) -> std::result::Result<Self, Self::Error> {
|
||||
// https://socialsisteryi.github.io/bilibili-API-collect/docs/video/videostream_url.html#%E8%A7%86%E9%A2%91%E7%BC%96%E7%A0%81%E4%BB%A3%E7%A0%81
|
||||
match value {
|
||||
7 => Ok(Self::AVC),
|
||||
12 => Ok(Self::HEV),
|
||||
13 => Ok(Self::AV1),
|
||||
_ => bail!("invalid video codecs id: {}", value),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 视频流的筛选偏好
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct FilterOption {
|
||||
@@ -101,24 +116,41 @@ pub enum Stream {
|
||||
EpisodeTryMp4(String),
|
||||
DashVideo {
|
||||
url: String,
|
||||
backup_url: Vec<String>,
|
||||
quality: VideoQuality,
|
||||
codecs: VideoCodecs,
|
||||
},
|
||||
DashAudio {
|
||||
url: String,
|
||||
backup_url: Vec<String>,
|
||||
quality: AudioQuality,
|
||||
},
|
||||
}
|
||||
|
||||
// 通用的获取流链接的方法,交由 Downloader 使用
|
||||
impl Stream {
|
||||
pub fn url(&self) -> &str {
|
||||
pub fn urls(&self) -> Vec<&str> {
|
||||
match self {
|
||||
Self::Flv(url) => url,
|
||||
Self::Html5Mp4(url) => url,
|
||||
Self::EpisodeTryMp4(url) => url,
|
||||
Self::DashVideo { url, .. } => url,
|
||||
Self::DashAudio { url, .. } => url,
|
||||
Self::Flv(url) | Self::Html5Mp4(url) | Self::EpisodeTryMp4(url) => vec![url],
|
||||
Self::DashVideo { url, backup_url, .. } | Self::DashAudio { url, backup_url, .. } => {
|
||||
let mut urls = std::iter::once(url.as_str())
|
||||
.chain(backup_url.iter().map(|s| s.as_str()))
|
||||
.collect::<Vec<_>>();
|
||||
if CONFIG.cdn_sorting {
|
||||
urls.sort_by_key(|u| {
|
||||
if u.contains("upos-") {
|
||||
0 // 服务商 cdn
|
||||
} else if u.contains("cn-") {
|
||||
1 // 自建 cdn
|
||||
} else if u.contains("mcdn") {
|
||||
2 // mcdn
|
||||
} else {
|
||||
3 // pcdn 或者其它
|
||||
}
|
||||
});
|
||||
}
|
||||
urls
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -180,25 +212,22 @@ impl PageAnalyzer {
|
||||
)]);
|
||||
}
|
||||
let mut streams: Vec<Stream> = Vec::new();
|
||||
for video in self.info["dash"]["video"]
|
||||
.as_array()
|
||||
for video in self
|
||||
.info
|
||||
.pointer_mut("/dash/video")
|
||||
.and_then(|v| v.as_array_mut())
|
||||
.ok_or(BiliError::RiskControlOccurred)?
|
||||
.iter()
|
||||
.iter_mut()
|
||||
{
|
||||
let (Some(url), Some(quality), Some(codecs)) = (
|
||||
let (Some(url), Some(quality), Some(codecs_id)) = (
|
||||
video["baseUrl"].as_str(),
|
||||
video["id"].as_u64(),
|
||||
video["codecs"].as_str(),
|
||||
video["codecid"].as_u64(),
|
||||
) else {
|
||||
continue;
|
||||
};
|
||||
let quality = VideoQuality::from_repr(quality as usize).context("invalid video stream quality")?;
|
||||
// 从视频流的 codecs 字段中获取编码格式,此处并非精确匹配而是判断包含,比如 codecs 是 av1.42c01e,需要匹配为 av1
|
||||
let Some(codecs) = [VideoCodecs::HEV, VideoCodecs::AVC, VideoCodecs::AV1]
|
||||
.into_iter()
|
||||
.find(|c| codecs.contains(c.as_ref()))
|
||||
else {
|
||||
// 少数情况会走到此处,如 codecs 为 dvh1.08.09、hvc1.2.4.L123.90 等,直接跳过,不影响流程
|
||||
let Ok(codecs) = codecs_id.try_into() else {
|
||||
continue;
|
||||
};
|
||||
if !filter_option.codecs.contains(&codecs)
|
||||
@@ -211,12 +240,13 @@ impl PageAnalyzer {
|
||||
}
|
||||
streams.push(Stream::DashVideo {
|
||||
url: url.to_string(),
|
||||
backup_url: serde_json::from_value(video["backupUrl"].take()).unwrap_or_default(),
|
||||
quality,
|
||||
codecs,
|
||||
});
|
||||
}
|
||||
if let Some(audios) = self.info["dash"]["audio"].as_array() {
|
||||
for audio in audios.iter() {
|
||||
if let Some(audios) = self.info.pointer_mut("/dash/audio").and_then(|a| a.as_array_mut()) {
|
||||
for audio in audios.iter_mut() {
|
||||
let (Some(url), Some(quality)) = (audio["baseUrl"].as_str(), audio["id"].as_u64()) else {
|
||||
continue;
|
||||
};
|
||||
@@ -226,34 +256,44 @@ impl PageAnalyzer {
|
||||
}
|
||||
streams.push(Stream::DashAudio {
|
||||
url: url.to_string(),
|
||||
backup_url: serde_json::from_value(audio["backupUrl"].take()).unwrap_or_default(),
|
||||
quality,
|
||||
});
|
||||
}
|
||||
}
|
||||
let flac = &self.info["dash"]["flac"]["audio"];
|
||||
if !(filter_option.no_hires || flac.is_null()) {
|
||||
let (Some(url), Some(quality)) = (flac["baseUrl"].as_str(), flac["id"].as_u64()) else {
|
||||
bail!("invalid flac stream");
|
||||
};
|
||||
let quality = AudioQuality::from_repr(quality as usize).context("invalid flac stream quality")?;
|
||||
if quality >= filter_option.audio_min_quality && quality <= filter_option.audio_max_quality {
|
||||
streams.push(Stream::DashAudio {
|
||||
url: url.to_string(),
|
||||
quality,
|
||||
});
|
||||
if !filter_option.no_hires {
|
||||
if let Some(flac) = self.info.pointer_mut("/dash/flac/audio") {
|
||||
let (Some(url), Some(quality)) = (flac["baseUrl"].as_str(), flac["id"].as_u64()) else {
|
||||
bail!("invalid flac stream");
|
||||
};
|
||||
let quality = AudioQuality::from_repr(quality as usize).context("invalid flac stream quality")?;
|
||||
if quality >= filter_option.audio_min_quality && quality <= filter_option.audio_max_quality {
|
||||
streams.push(Stream::DashAudio {
|
||||
url: url.to_string(),
|
||||
backup_url: serde_json::from_value(flac["backupUrl"].take()).unwrap_or_default(),
|
||||
quality,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
let dolby_audio = &self.info["dash"]["dolby"]["audio"][0];
|
||||
if !(filter_option.no_dolby_audio || dolby_audio.is_null()) {
|
||||
let (Some(url), Some(quality)) = (dolby_audio["baseUrl"].as_str(), dolby_audio["id"].as_u64()) else {
|
||||
bail!("invalid dolby audio stream");
|
||||
};
|
||||
let quality = AudioQuality::from_repr(quality as usize).context("invalid dolby audio stream quality")?;
|
||||
if quality >= filter_option.audio_min_quality && quality <= filter_option.audio_max_quality {
|
||||
streams.push(Stream::DashAudio {
|
||||
url: url.to_string(),
|
||||
quality,
|
||||
});
|
||||
if !filter_option.no_dolby_audio {
|
||||
if let Some(dolby_audio) = self
|
||||
.info
|
||||
.pointer_mut("/dash/dolby/audio/0")
|
||||
.and_then(|a| a.as_object_mut())
|
||||
{
|
||||
let (Some(url), Some(quality)) = (dolby_audio["baseUrl"].as_str(), dolby_audio["id"].as_u64()) else {
|
||||
bail!("invalid dolby audio stream");
|
||||
};
|
||||
let quality =
|
||||
AudioQuality::from_repr(quality as usize).context("invalid dolby audio stream quality")?;
|
||||
if quality >= filter_option.audio_min_quality && quality <= filter_option.audio_max_quality {
|
||||
streams.push(Stream::DashAudio {
|
||||
url: url.to_string(),
|
||||
backup_url: serde_json::from_value(dolby_audio["backupUrl"].take()).unwrap_or_default(),
|
||||
quality,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(streams)
|
||||
@@ -270,32 +310,34 @@ impl PageAnalyzer {
|
||||
let (videos, audios): (Vec<Stream>, Vec<Stream>) =
|
||||
streams.into_iter().partition(|s| matches!(s, Stream::DashVideo { .. }));
|
||||
Ok(BestStream::VideoAudio {
|
||||
video: Iterator::max_by(videos.into_iter(), |a, b| match (a, b) {
|
||||
(
|
||||
Stream::DashVideo {
|
||||
quality: a_quality,
|
||||
codecs: a_codecs,
|
||||
..
|
||||
},
|
||||
Stream::DashVideo {
|
||||
quality: b_quality,
|
||||
codecs: b_codecs,
|
||||
..
|
||||
},
|
||||
) => {
|
||||
if a_quality != b_quality {
|
||||
return a_quality.cmp(b_quality);
|
||||
};
|
||||
filter_option
|
||||
.codecs
|
||||
.iter()
|
||||
.position(|c| c == b_codecs)
|
||||
.cmp(&filter_option.codecs.iter().position(|c| c == a_codecs))
|
||||
}
|
||||
_ => unreachable!(),
|
||||
})
|
||||
.context("no video stream found")?,
|
||||
audio: Iterator::max_by(audios.into_iter(), |a, b| match (a, b) {
|
||||
video: videos
|
||||
.into_iter()
|
||||
.max_by(|a, b| match (a, b) {
|
||||
(
|
||||
Stream::DashVideo {
|
||||
quality: a_quality,
|
||||
codecs: a_codecs,
|
||||
..
|
||||
},
|
||||
Stream::DashVideo {
|
||||
quality: b_quality,
|
||||
codecs: b_codecs,
|
||||
..
|
||||
},
|
||||
) => {
|
||||
if a_quality != b_quality {
|
||||
return a_quality.cmp(b_quality);
|
||||
};
|
||||
filter_option
|
||||
.codecs
|
||||
.iter()
|
||||
.position(|c| c == b_codecs)
|
||||
.cmp(&filter_option.codecs.iter().position(|c| c == a_codecs))
|
||||
}
|
||||
_ => unreachable!(),
|
||||
})
|
||||
.context("no video stream found")?,
|
||||
audio: audios.into_iter().max_by(|a, b| match (a, b) {
|
||||
(Stream::DashAudio { quality: a_quality, .. }, Stream::DashAudio { quality: b_quality, .. }) => {
|
||||
a_quality.cmp(b_quality)
|
||||
}
|
||||
@@ -348,18 +390,41 @@ mod tests {
|
||||
(
|
||||
"BV1xRChYUE2R",
|
||||
VideoQuality::Quality8k,
|
||||
VideoCodecs::HEV,
|
||||
Some(AudioQuality::QualityHiRES),
|
||||
),
|
||||
// 一个没有声音的纯视频
|
||||
("BV1J7411H7KQ", VideoQuality::Quality720p, None),
|
||||
("BV1J7411H7KQ", VideoQuality::Quality720p, VideoCodecs::HEV, None),
|
||||
// 一个杜比全景声的演示片
|
||||
(
|
||||
"BV1Mm4y1P7JV",
|
||||
VideoQuality::Quality4k,
|
||||
VideoQuality::QualityDolby,
|
||||
VideoCodecs::HEV,
|
||||
Some(AudioQuality::QualityDolby),
|
||||
),
|
||||
// 影视飓风的杜比视界视频
|
||||
(
|
||||
"BV1HEf2YWEvs",
|
||||
VideoQuality::QualityDolby,
|
||||
VideoCodecs::HEV,
|
||||
Some(AudioQuality::QualityDolby),
|
||||
),
|
||||
// 孤独摇滚的杜比视界 + hires + 杜比全景声视频
|
||||
(
|
||||
"BV1YDVYzeE39",
|
||||
VideoQuality::QualityDolby,
|
||||
VideoCodecs::HEV,
|
||||
Some(AudioQuality::QualityHiRES),
|
||||
),
|
||||
// 一个京紫的 HDR 视频
|
||||
(
|
||||
"BV1cZ4y1b7iB",
|
||||
VideoQuality::QualityHdr,
|
||||
VideoCodecs::HEV,
|
||||
Some(AudioQuality::Quality192k),
|
||||
),
|
||||
];
|
||||
for (bvid, video_quality, audio_quality) in testcases.into_iter() {
|
||||
for (bvid, video_quality, video_codec, audio_quality) in testcases.into_iter() {
|
||||
let client = BiliClient::new();
|
||||
let video = Video::new(&client, bvid.to_owned());
|
||||
let pages = video.get_pages().await.expect("failed to get pages");
|
||||
@@ -373,10 +438,11 @@ mod tests {
|
||||
dbg!(bvid, &best_stream);
|
||||
match best_stream {
|
||||
BestStream::VideoAudio {
|
||||
video: Stream::DashVideo { quality, .. },
|
||||
video: Stream::DashVideo { quality, codecs, .. },
|
||||
audio,
|
||||
} => {
|
||||
assert_eq!(quality, video_quality);
|
||||
assert_eq!(codecs, video_codec);
|
||||
assert_eq!(
|
||||
audio.map(|audio_stream| match audio_stream {
|
||||
Stream::DashAudio { quality, .. } => quality,
|
||||
@@ -389,4 +455,27 @@ mod tests {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_url_sort() {
|
||||
let stream = Stream::DashVideo {
|
||||
url: "https://xy116x207x155x163xy240ey95dy1010y700yy8dxy.mcdn.bilivideo.cn:4483".to_owned(),
|
||||
backup_url: vec![
|
||||
"https://upos-sz-mirrorcos.bilivideo.com".to_owned(),
|
||||
"https://cn-tj-cu-01-11.bilivideo.com".to_owned(),
|
||||
"https://xxx.v1d.szbdys.com".to_owned(),
|
||||
],
|
||||
quality: VideoQuality::Quality1080p,
|
||||
codecs: VideoCodecs::AVC,
|
||||
};
|
||||
assert_eq!(
|
||||
stream.urls(),
|
||||
vec![
|
||||
"https://upos-sz-mirrorcos.bilivideo.com",
|
||||
"https://cn-tj-cu-01-11.bilivideo.com",
|
||||
"https://xy116x207x155x163xy240ey95dy1010y700yy8dxy.mcdn.bilivideo.cn:4483",
|
||||
"https://xxx.v1d.szbdys.com"
|
||||
]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
#[cfg(test)]
|
||||
pub use analyzer::VideoCodecs;
|
||||
pub use analyzer::{BestStream, FilterOption};
|
||||
use anyhow::{Result, bail, ensure};
|
||||
use arc_swap::ArcSwapOption;
|
||||
|
||||
@@ -112,7 +112,7 @@ impl<'a> Video<'a> {
|
||||
|
||||
pub async fn get_danmaku_writer(&self, page: &'a PageInfo) -> Result<DanmakuWriter> {
|
||||
let tasks = FuturesUnordered::new();
|
||||
for i in 1..=(page.duration + 359) / 360 {
|
||||
for i in 1..=page.duration.div_ceil(360) {
|
||||
tasks.push(self.get_danmaku_segment(page, i as i64));
|
||||
}
|
||||
let result: Vec<Vec<DanmakuElem>> = tasks.try_collect().await?;
|
||||
|
||||
@@ -61,6 +61,8 @@ fn load_config() -> Config {
|
||||
|
||||
#[cfg(test)]
|
||||
fn load_config() -> Config {
|
||||
use crate::bilibili::{FilterOption, VideoCodecs};
|
||||
|
||||
let credential = match (
|
||||
std::env::var("TEST_SESSDATA"),
|
||||
std::env::var("TEST_BILI_JCT"),
|
||||
@@ -81,6 +83,11 @@ fn load_config() -> Config {
|
||||
};
|
||||
Config {
|
||||
credential: arc_swap::ArcSwapOption::from(credential),
|
||||
cdn_sorting: true,
|
||||
filter_option: FilterOption {
|
||||
codecs: vec![VideoCodecs::HEV, VideoCodecs::AV1, VideoCodecs::AVC],
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -69,6 +69,8 @@ pub struct Config {
|
||||
pub concurrent_limit: ConcurrentLimit,
|
||||
#[serde(default = "default_time_format")]
|
||||
pub time_format: String,
|
||||
#[serde(default)]
|
||||
pub cdn_sorting: bool,
|
||||
}
|
||||
|
||||
impl Default for Config {
|
||||
@@ -90,6 +92,7 @@ impl Default for Config {
|
||||
nfo_time_type: NFOTimeType::FavTime,
|
||||
concurrent_limit: ConcurrentLimit::default(),
|
||||
time_format: default_time_format(),
|
||||
cdn_sorting: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use core::str;
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::{Result, bail, ensure};
|
||||
use anyhow::{Context, Result, bail, ensure};
|
||||
use futures::TryStreamExt;
|
||||
use reqwest::Method;
|
||||
use tokio::fs::{self, File};
|
||||
@@ -45,6 +45,22 @@ impl Downloader {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn fetch_with_fallback(&self, urls: &[&str], path: &Path) -> Result<()> {
|
||||
if urls.is_empty() {
|
||||
bail!("no urls provided");
|
||||
}
|
||||
let mut res = Ok(());
|
||||
for url in urls {
|
||||
match self.fetch(url, path).await {
|
||||
Ok(_) => return Ok(()),
|
||||
Err(err) => {
|
||||
res = Err(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
res.with_context(|| format!("failed to download from {:?}", urls))
|
||||
}
|
||||
|
||||
pub async fn merge(&self, video_path: &Path, audio_path: &Path, output_path: &Path) -> Result<()> {
|
||||
let output = tokio::process::Command::new("ffmpeg")
|
||||
.args([
|
||||
@@ -54,6 +70,8 @@ impl Downloader {
|
||||
audio_path.to_string_lossy().as_ref(),
|
||||
"-c",
|
||||
"copy",
|
||||
"-strict",
|
||||
"unofficial",
|
||||
"-y",
|
||||
output_path.to_string_lossy().as_ref(),
|
||||
])
|
||||
|
||||
@@ -26,21 +26,26 @@ impl From<Result<ExecutionStatus>> for ExecutionStatus {
|
||||
match res {
|
||||
Ok(status) => status,
|
||||
Err(err) => {
|
||||
if let Some(error) = err.downcast_ref::<io::Error>() {
|
||||
let error_kind = error.kind();
|
||||
if error_kind == io::ErrorKind::PermissionDenied
|
||||
|| (error_kind == io::ErrorKind::Other
|
||||
&& error.get_ref().is_some_and(|e| {
|
||||
e.downcast_ref::<reqwest::Error>()
|
||||
.is_some_and(|e| e.is_decode() || e.is_body() || e.is_timeout())
|
||||
}))
|
||||
{
|
||||
return ExecutionStatus::Ignored(err);
|
||||
for cause in err.chain() {
|
||||
if let Some(io_err) = cause.downcast_ref::<io::Error>() {
|
||||
// 权限错误
|
||||
if io_err.kind() == io::ErrorKind::PermissionDenied {
|
||||
return ExecutionStatus::Ignored(err);
|
||||
}
|
||||
// 使用 io::Error 包裹的 reqwest::Error
|
||||
if io_err.kind() == io::ErrorKind::Other
|
||||
&& io_err.get_ref().is_some_and(|e| {
|
||||
e.downcast_ref::<reqwest::Error>().is_some_and(is_ignored_reqwest_error)
|
||||
})
|
||||
{
|
||||
return ExecutionStatus::Ignored(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(error) = err.downcast_ref::<reqwest::Error>() {
|
||||
if error.is_decode() || error.is_body() || error.is_timeout() {
|
||||
return ExecutionStatus::Ignored(err);
|
||||
// 未包裹的 reqwest::Error
|
||||
if let Some(error) = cause.downcast_ref::<reqwest::Error>() {
|
||||
if is_ignored_reqwest_error(error) {
|
||||
return ExecutionStatus::Ignored(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
ExecutionStatus::Failed(err)
|
||||
@@ -48,3 +53,7 @@ impl From<Result<ExecutionStatus>> for ExecutionStatus {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn is_ignored_reqwest_error(err: &reqwest::Error) -> bool {
|
||||
err.is_decode() || err.is_body() || err.is_timeout()
|
||||
}
|
||||
|
||||
@@ -72,7 +72,7 @@ pub async fn refresh_video_source<'a>(
|
||||
if release_datetime > &max_datetime {
|
||||
max_datetime = *release_datetime;
|
||||
}
|
||||
futures::future::ready(release_datetime > &latest_row_at)
|
||||
futures::future::ready(video_source.should_take(release_datetime, &latest_row_at))
|
||||
}
|
||||
}
|
||||
})
|
||||
@@ -535,11 +535,11 @@ pub async fn fetch_page_video(
|
||||
.await?
|
||||
.best_stream(&CONFIG.filter_option)?;
|
||||
match streams {
|
||||
BestStream::Mixed(mix_stream) => downloader.fetch(mix_stream.url(), page_path).await?,
|
||||
BestStream::Mixed(mix_stream) => downloader.fetch_with_fallback(&mix_stream.urls(), page_path).await?,
|
||||
BestStream::VideoAudio {
|
||||
video: video_stream,
|
||||
audio: None,
|
||||
} => downloader.fetch(video_stream.url(), page_path).await?,
|
||||
} => downloader.fetch_with_fallback(&video_stream.urls(), page_path).await?,
|
||||
BestStream::VideoAudio {
|
||||
video: video_stream,
|
||||
audio: Some(audio_stream),
|
||||
@@ -549,8 +549,12 @@ pub async fn fetch_page_video(
|
||||
page_path.with_extension("tmp_audio"),
|
||||
);
|
||||
let res = async {
|
||||
downloader.fetch(video_stream.url(), &tmp_video_path).await?;
|
||||
downloader.fetch(audio_stream.url(), &tmp_audio_path).await?;
|
||||
downloader
|
||||
.fetch_with_fallback(&video_stream.urls(), &tmp_video_path)
|
||||
.await?;
|
||||
downloader
|
||||
.fetch_with_fallback(&audio_stream.urls(), &tmp_audio_path)
|
||||
.await?;
|
||||
downloader.merge(&tmp_video_path, &tmp_audio_path, page_path).await
|
||||
}
|
||||
.await;
|
||||
|
||||
@@ -21,7 +21,7 @@ export default defineConfig({
|
||||
nav: [
|
||||
{ text: "主页", link: "/" },
|
||||
{
|
||||
text: "v2.4.1",
|
||||
text: "v2.5.1",
|
||||
items: [
|
||||
{
|
||||
text: "程序更新",
|
||||
|
||||
@@ -77,6 +77,22 @@ UP 主头像和信息的保存位置。对于使用 Emby、Jellyfin 媒体服务
|
||||
|
||||
时间格式,用于设置 `fav_time` 和 `pubtime` 在 `video_name`、 `page_name` 中使用时的显示格式,支持的格式符号可以参考 [chrono strftime 文档](https://docs.rs/chrono/latest/chrono/format/strftime/index.html)。
|
||||
|
||||
## `cdn_sorting`
|
||||
|
||||
一般情况下,b 站会为视频、音频流提供一个 baseUrl 与多个 backupUrl,程序默认会按照 baseUrl -> backupUrl 的顺序请求,依次尝试下载。
|
||||
|
||||
如果将 `cdn_sorting` 设置为 `true`,程序不再使用默认顺序,而是将所有 url 放到一起统一排序来决定请求顺序。排序优先级从高到低为:
|
||||
|
||||
1. 服务商 CDN:`upos-sz-mirrorxxxx.bilivideo.com`
|
||||
|
||||
2. 自建 CDN:`cn-xxxx-dx-v-xxxx.bilivideo.com`
|
||||
|
||||
3. MCDN:`xxxx.mcdn.bilivideo.com`
|
||||
|
||||
4. PCDN:`xxxx.v1d.szbdyd.com`
|
||||
|
||||
这会让程序优先请求质量更高的 CDN,可能会提高下载速度并增加成功率,但效果因地区、网络环境而异。
|
||||
|
||||
## `credential`
|
||||
|
||||
哔哩哔哩账号的身份凭据,请参考[凭据获取流程](https://nemo2011.github.io/bilibili-api/#/get-credential)获取并对应填写至配置文件中,后续 bili-sync 会在必要时自动刷新身份凭据,不再需要手动管理。
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# bili-sync 是什么?
|
||||
|
||||
> [!TIP]
|
||||
> 当前最新程序版本为 v2.4.1,文档将始终与最新程序版本保持一致。
|
||||
> 当前最新程序版本为 v2.5.1,文档将始终与最新程序版本保持一致。
|
||||
|
||||
bili-sync 是一款专为 NAS 用户编写的哔哩哔哩同步工具。
|
||||
|
||||
|
||||
@@ -81,6 +81,7 @@ interval = 1200
|
||||
upper_path = "/Users/amtoaer/Library/Application Support/bili-sync/upper_face"
|
||||
nfo_time_type = "favtime"
|
||||
time_format = "%Y-%m-%d"
|
||||
cdn_sorting = false
|
||||
|
||||
[credential]
|
||||
sessdata = ""
|
||||
|
||||
Reference in New Issue
Block a user