Compare commits

..

9 Commits
1.1.6 ... 1.1.7

Author SHA1 Message Date
amtoaer
e25ed452b4 chore: bump version from 1.1.6 to 1.1.7 2024-02-25 01:11:53 +08:00
amtoaer
2f36220582 chore: 加入一键发版的 make 命令 2024-02-25 01:11:13 +08:00
amtoaer
f6a5238b6e fix: 修复执行错误 2024-02-25 00:47:03 +08:00
amtoaer
ec5776a0ed feat: recheck 对分 p 视频做适配,为所有的数据库批量操作指定 batch_size 2024-02-24 21:37:34 +08:00
ᴀᴍᴛᴏᴀᴇʀ
c21da25c6f feat: 将下载视频时选择流的部分参数提取为配置 (#47) 2024-02-24 17:36:56 +08:00
amtoaer
bde142a896 doc: 修正一些表述 2024-02-24 03:52:58 +08:00
amtoaer
af8cd0d819 refactor: refresh 中异步保存文件 2024-02-24 03:49:28 +08:00
ᴀᴍᴛᴏᴀᴇʀ
a4c362d8ab feat: 支持分 p 视频下载,待额外测试 (#24) 2024-02-24 03:38:08 +08:00
amtoaer
1dd760d445 chore: 更换代码格式化器,移除无用依赖 2024-02-21 23:54:39 +08:00
15 changed files with 672 additions and 538 deletions

View File

@@ -1,4 +1,4 @@
.PHONY: install fmt start-daemon start-once db-init db-migrate db-upgrade sync-conf .PHONY: install fmt start-daemon start-once db-init db-migrate db-upgrade sync-conf release
install: install:
@echo "Installing dependencies..." @echo "Installing dependencies..."
@@ -6,8 +6,8 @@ install:
fmt: fmt:
@echo "Formatting..." @echo "Formatting..."
@poetry run black . @poetry run ruff format .
@poetry run ruff --fix . @poetry run ruff check --fix .
start-daemon: start-daemon:
@poetry run python entry.py @poetry run python entry.py
@@ -28,4 +28,12 @@ sync-conf:
@echo "Syncing config..." @echo "Syncing config..."
@cp ${CONFIG_SRC} ./config/ @cp ${CONFIG_SRC} ./config/
@cp ${DB_SRC} ./data/ @cp ${DB_SRC} ./data/
@echo "Done." @echo "Done."
release:
@echo "Releasing..."
@git checkout main
@bump-my-version bump patch
@git push origin main
@git push origin --tags
@echo "Done."

View File

@@ -56,7 +56,7 @@ services:
bili-sync: bili-sync:
image: amtoaer/bili-sync:latest image: amtoaer/bili-sync:latest
user: 1000:1000 # 此处可以指定以哪个用户的权限运行,不填写的话默认 root推荐填写。 user: 1000:1000 # 此处可以指定以哪个用户的权限运行,不填写的话默认 root推荐填写。
tty: true # 加上这一行可以让日志变成彩色 tty: true # 加上这一行可以让支持的终端以彩色显示日志(如果发现日志出现乱码就去掉)
volumes: volumes:
- /home/amtoaer/Videos/Bilibilis/:/Videos/Bilibilis/ # 视频文件 - /home/amtoaer/Videos/Bilibilis/:/Videos/Bilibilis/ # 视频文件
- /home/amtoaer/.config/nas/bili-sync/config/:/app/config/ # 配置文件 - /home/amtoaer/.config/nas/bili-sync/config/:/app/config/ # 配置文件
@@ -129,7 +129,7 @@ services:
- [x] 凭证认证 - [x] 凭证认证
- [x] 视频选优 - [x] 视频选优
- [x] 视频下载 - [x] 视频下载
- [x] 支持并下载 - [x] 支持并下载
- [x] 支持作为 daemon 运行 - [x] 支持作为 daemon 运行
- [x] 构建 nfo 和 poster 文件,方便以单集形式导入 emby - [x] 构建 nfo 和 poster 文件,方便以单集形式导入 emby
- [x] 支持收藏夹翻页,下载全部历史视频 - [x] 支持收藏夹翻页,下载全部历史视频

View File

@@ -13,30 +13,32 @@ from utils import aexists, aremove
async def recheck(): async def recheck():
"""刷新数据库中视频的状态,如果发现文件不存在则标记未下载,以便在下次任务重新下载,在自己手动删除文件后调用""" """刷新数据库中视频的状态,如果发现文件不存在则标记未下载,以便在下次任务重新下载,在自己手动删除文件后调用"""
async def is_ok(item: FavoriteItem) -> bool:
if len(item.pages):
# 多 p 视频全部存在才算存在
return all(await asyncio.gather(*[aexists(page.video_path) for page in item.pages]))
return await aexists(item.video_path)
items = await FavoriteItem.filter( items = await FavoriteItem.filter(
type=MediaType.VIDEO, type=MediaType.VIDEO, status=MediaStatus.NORMAL, downloaded=True
status=MediaStatus.NORMAL, ).prefetch_related("pages")
downloaded=True, items_to_update = []
) for item in items:
exists = await asyncio.gather(*[aexists(item.video_path) for item in items]) for page in item.pages:
for item, exist in zip(items, exists): # 疑似 tortoise 的 bugprefetch_related 不会更新反向引用的字段,这里手动更新一下
if isinstance(exist, Exception): page.favorite_item = item
logger.error( items_ok = await asyncio.gather(*[is_ok(item) for item in items], return_exceptions=True)
"Error when checking file {} {}: {}", for item, ok in zip(items, items_ok):
item.bvid, if isinstance(ok, Exception):
item.name, logger.error("Error when checking file {} {}: {}.", item.bvid, item.name, ok)
exist,
)
continue continue
if not exist: if not ok:
logger.info( logger.info("Lack of file detected for {} {}, mark as not downloaded.", item.bvid, item.name)
"File {} {} not exists, mark as not downloaded.",
item.bvid,
item.name,
)
item.downloaded = False item.downloaded = False
items_to_update.append(item)
logger.info("Updating database...") logger.info("Updating database...")
await FavoriteItem.bulk_update(items, fields=["downloaded"]) await FavoriteItem.bulk_update(items_to_update, fields=["downloaded"], batch_size=300)
logger.info("Database updated.") logger.info("Database updated.")
@@ -52,10 +54,7 @@ async def _refresh_favorite_item_info(
items = await FavoriteItem.filter(downloaded=True).prefetch_related("upper") items = await FavoriteItem.filter(downloaded=True).prefetch_related("upper")
if force: if force:
# 如果强制刷新,那么就先把现存的所有内容删除 # 如果强制刷新,那么就先把现存的所有内容删除
await asyncio.gather( await asyncio.gather(*[aremove(path) for item in items for path in path_getter(item)], return_exceptions=True)
*[aremove(path) for item in items for path in path_getter(item)],
return_exceptions=True,
)
await asyncio.gather( await asyncio.gather(
*[ *[
process_favorite_item( process_favorite_item(
@@ -72,30 +71,14 @@ async def _refresh_favorite_item_info(
) )
refresh_nfo = functools.partial( refresh_nfo = functools.partial(_refresh_favorite_item_info, lambda item: [item.nfo_path], process_nfo=True)
_refresh_favorite_item_info, lambda item: [item.nfo_path], process_nfo=True
)
refresh_poster = functools.partial( refresh_poster = functools.partial(_refresh_favorite_item_info, lambda item: [item.poster_path], process_poster=True)
_refresh_favorite_item_info,
lambda item: [item.poster_path],
process_poster=True,
)
refresh_video = functools.partial( refresh_video = functools.partial(_refresh_favorite_item_info, lambda item: [item.video_path], process_video=True)
_refresh_favorite_item_info,
lambda item: [item.video_path],
process_video=True,
)
refresh_upper = functools.partial( refresh_upper = functools.partial(_refresh_favorite_item_info, lambda item: item.upper_path, process_upper=True)
_refresh_favorite_item_info,
lambda item: item.upper_path,
process_upper=True,
)
refresh_subtitle = functools.partial( refresh_subtitle = functools.partial(
_refresh_favorite_item_info, _refresh_favorite_item_info, lambda item: [item.subtitle_path], process_subtitle=True
lambda item: [item.subtitle_path],
process_subtitle=True,
) )

View File

@@ -4,11 +4,7 @@ from pathlib import Path
def get_base(dir_name: str) -> Path: def get_base(dir_name: str) -> Path:
path = ( path = Path(base) if (base := os.getenv(f"{dir_name.upper()}_PATH")) else Path(__file__).parent / dir_name
Path(base)
if (base := os.getenv(f"{dir_name.upper()}_PATH"))
else Path(__file__).parent / dir_name
)
path.mkdir(parents=True, exist_ok=True) path.mkdir(parents=True, exist_ok=True)
return path return path
@@ -37,20 +33,18 @@ class MediaStatus(IntEnum):
@property @property
def text(self) -> str: def text(self) -> str:
return { return {MediaStatus.NORMAL: "normal", MediaStatus.INVISIBLE: "invisible", MediaStatus.DELETED: "deleted"}[self]
MediaStatus.NORMAL: "normal",
MediaStatus.INVISIBLE: "invisible",
MediaStatus.DELETED: "deleted", class NfoMode(IntEnum):
}[self] MOVIE = 1
TVSHOW = 2
EPISODE = 3
UPPER = 4
TORTOISE_ORM = { TORTOISE_ORM = {
"connections": {"default": f"sqlite://{DEFAULT_DATABASE_PATH}"}, "connections": {"default": f"sqlite://{DEFAULT_DATABASE_PATH}"},
"apps": { "apps": {"models": {"models": ["models", "aerich.models"], "default_connection": "default"}},
"models": {
"models": ["models", "aerich.models"],
"default_connection": "default",
},
},
"use_tz": True, "use_tz": True,
} }

View File

@@ -6,28 +6,18 @@ from settings import settings
class PersistedCredential(Credential): class PersistedCredential(Credential):
def __init__(self) -> None: def __init__(self) -> None:
super().__init__( super().__init__(
settings.sessdata, settings.sessdata, settings.bili_jct, settings.buvid3, settings.dedeuserid, settings.ac_time_value
settings.bili_jct,
settings.buvid3,
settings.dedeuserid,
settings.ac_time_value,
) )
async def refresh(self) -> None: async def refresh(self) -> None:
await super().refresh() await super().refresh()
( (settings.sessdata, settings.bili_jct, settings.dedeuserid, settings.ac_time_value) = (
settings.sessdata,
settings.bili_jct,
settings.dedeuserid,
settings.ac_time_value,
) = (
self.sessdata, self.sessdata,
self.bili_jct, self.bili_jct,
self.dedeuserid, self.dedeuserid,
self.ac_time_value, self.ac_time_value,
) )
# 暂时使用同步调用 await settings.asave()
settings.save()
credential = PersistedCredential() credential = PersistedCredential()

View File

@@ -6,14 +6,7 @@ import sys
import uvloop import uvloop
from loguru import logger from loguru import logger
from commands import ( from commands import recheck, refresh_nfo, refresh_poster, refresh_subtitle, refresh_upper, refresh_video
recheck,
refresh_nfo,
refresh_poster,
refresh_subtitle,
refresh_upper,
refresh_video,
)
from models import init_model from models import init_model
from processor import cleanup, process from processor import cleanup, process
from settings import settings from settings import settings

View File

@@ -0,0 +1,21 @@
from tortoise import BaseDBAsyncClient
async def upgrade(db: BaseDBAsyncClient) -> str:
return """
CREATE TABLE IF NOT EXISTS "favoriteitempage" (
"id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
"cid" INT NOT NULL,
"page" INT NOT NULL,
"name" VARCHAR(255) NOT NULL,
"image" TEXT NOT NULL,
"status" SMALLINT NOT NULL DEFAULT 1 /* NORMAL: 1\nINVISIBLE: 2\nDELETED: 3 */,
"downloaded" INT NOT NULL DEFAULT 0,
"favorite_item_id" INT NOT NULL REFERENCES "favoriteitem" ("id") ON DELETE CASCADE,
CONSTRAINT "uid_favoriteite_favorit_c3b50e" UNIQUE ("favorite_item_id", "page")
) /* 收藏条目的分p */;"""
async def downgrade(db: BaseDBAsyncClient) -> str:
return """
DROP TABLE IF EXISTS "favoriteitempage";"""

125
models.py
View File

@@ -3,17 +3,11 @@ from asyncio import create_subprocess_exec
from pathlib import Path from pathlib import Path
from tortoise import Tortoise, fields from tortoise import Tortoise, fields
from tortoise.fields import Field
from tortoise.models import Model from tortoise.models import Model
from constants import ( from constants import DEFAULT_THUMB_PATH, MIGRATE_COMMAND, TORTOISE_ORM, MediaStatus, MediaType
DEFAULT_THUMB_PATH,
MIGRATE_COMMAND,
TORTOISE_ORM,
MediaStatus,
MediaType,
)
from settings import settings from settings import settings
from utils import aopen
from version import VERSION from version import VERSION
@@ -47,22 +41,6 @@ class Upper(Model):
def meta_path(self) -> Path: def meta_path(self) -> Path:
return DEFAULT_THUMB_PATH / str(self.mid)[0] / f"{self.mid}" / "person.nfo" return DEFAULT_THUMB_PATH / str(self.mid)[0] / f"{self.mid}" / "person.nfo"
async def save_metadata(self):
async with aopen(self.meta_path, "w") as f:
await f.write(
f"""
<?xml version="1.0" encoding="utf-8" standalone="yes"?>
<person>
<plot />
<outline />
<lockdata>false</lockdata>
<dateadded>{self.created_at.strftime("%Y-%m-%d %H:%M:%S")}</dateadded>
<title>{self.mid}</title>
<sorttitle>{self.mid}</sorttitle>
</person>
""".strip()
)
class FavoriteItem(Model): class FavoriteItem(Model):
"""收藏条目""" """收藏条目"""
@@ -75,8 +53,8 @@ class FavoriteItem(Model):
desc = fields.TextField() desc = fields.TextField()
cover = fields.TextField() cover = fields.TextField()
tags = fields.JSONField(null=True) tags = fields.JSONField(null=True)
favorite_list = fields.ForeignKeyField("models.FavoriteList", related_name="items") favorite_list: Field[FavoriteList] = fields.ForeignKeyField("models.FavoriteList", related_name="items")
upper = fields.ForeignKeyField("models.Upper", related_name="uploads") upper: Field[Upper] = fields.ForeignKeyField("models.Upper", related_name="uploads")
ctime = fields.DatetimeField() ctime = fields.DatetimeField()
pubtime = fields.DatetimeField() pubtime = fields.DatetimeField()
fav_time = fields.DatetimeField() fav_time = fields.DatetimeField()
@@ -113,15 +91,92 @@ class FavoriteItem(Model):
@property @property
def upper_path(self) -> list[Path]: def upper_path(self) -> list[Path]:
return [ return [self.upper.thumb_path, self.upper.meta_path]
self.upper.thumb_path,
self.upper.meta_path,
]
@property @property
def subtitle_path(self) -> Path: def subtitle_path(self) -> Path:
return Path(settings.path_mapper[self.favorite_list_id]) / f"{self.bvid}.zh-CN.default.ass" return Path(settings.path_mapper[self.favorite_list_id]) / f"{self.bvid}.zh-CN.default.ass"
@property
def tvshow_nfo_path(self) -> Path:
"""分p视频时使用"""
return Path(settings.path_mapper[self.favorite_list_id]) / self.bvid / "tvshow.nfo"
@property
def tvshow_poster_path(self) -> Path:
"""分p视频时使用"""
return Path(settings.path_mapper[self.favorite_list_id]) / self.bvid / "poster.jpg"
class FavoriteItemPage(Model):
"""收藏条目的分p"""
id = fields.IntField(pk=True)
favorite_item: Field[FavoriteItem] = fields.ForeignKeyField("models.FavoriteItem", related_name="pages")
cid = fields.IntField()
page = fields.IntField()
name = fields.CharField(max_length=255)
image = fields.TextField()
status = fields.IntEnumField(enum_type=MediaStatus, default=MediaStatus.NORMAL)
downloaded = fields.BooleanField(default=False)
class Meta:
unique_together = (("favorite_item_id", "page"),)
@property
def tmp_video_path(self) -> Path:
return (
Path(settings.path_mapper[self.favorite_item.favorite_list_id])
/ self.favorite_item.bvid
/ "Season 1"
/ f"tmp_{self.favorite_item.bvid} - S01E{f'{self.page:02d}'}_video"
)
@property
def tmp_audio_path(self) -> Path:
return (
Path(settings.path_mapper[self.favorite_item.favorite_list_id])
/ self.favorite_item.bvid
/ "Season 1"
/ f"tmp_{self.favorite_item.bvid} - S01E{f'{self.page:02d}'}_audio"
)
@property
def video_path(self) -> Path:
return (
Path(settings.path_mapper[self.favorite_item.favorite_list_id])
/ self.favorite_item.bvid
/ "Season 1"
/ f"{self.favorite_item.bvid} - S01E{f'{self.page:02d}'}.mp4"
)
@property
def nfo_path(self) -> Path:
return (
Path(settings.path_mapper[self.favorite_item.favorite_list_id])
/ self.favorite_item.bvid
/ "Season 1"
/ f"{self.favorite_item.bvid} - S01E{f'{self.page:02d}'}.nfo"
)
@property
def poster_path(self) -> Path:
return (
Path(settings.path_mapper[self.favorite_item.favorite_list_id])
/ self.favorite_item.bvid
/ "Season 1"
/ f"{self.favorite_item.bvid} - S01E{f'{self.page:02d}'}-thumb.jpg"
)
@property
def subtitle_path(self) -> Path:
return (
Path(settings.path_mapper[self.favorite_item.favorite_list_id])
/ self.favorite_item.bvid
/ "Season 1"
/ f"{self.favorite_item.bvid} - S01E{f'{self.page:02d}'}.zh-CN.default.ass"
)
class Program(Model): class Program(Model):
id = fields.IntField(pk=True) id = fields.IntField(pk=True)
@@ -131,17 +186,11 @@ class Program(Model):
async def init_model() -> None: async def init_model() -> None:
await Tortoise.init(config=TORTOISE_ORM) await Tortoise.init(config=TORTOISE_ORM)
migrate_commands = ( migrate_commands = (
[MIGRATE_COMMAND, "upgrade"] [MIGRATE_COMMAND, "upgrade"] if os.getenv("BILI_IN_DOCKER") else ["poetry", "run", MIGRATE_COMMAND, "upgrade"]
if os.getenv("BILI_IN_DOCKER")
else ["poetry", "run", MIGRATE_COMMAND, "upgrade"]
) )
process = await create_subprocess_exec(*migrate_commands) process = await create_subprocess_exec(*migrate_commands)
await process.communicate() await process.communicate()
program, created = await Program.get_or_create( program, created = await Program.get_or_create(defaults={"version": VERSION})
defaults={
"version": VERSION,
}
)
if created or program.version != VERSION: if created or program.version != VERSION:
# 把新版本的迁移逻辑写在这里 # 把新版本的迁移逻辑写在这里
pass pass

140
nfo.py
View File

@@ -1,28 +1,73 @@
import datetime import datetime
from abc import abstractmethod
from dataclasses import dataclass from dataclasses import dataclass
from pathlib import Path from pathlib import Path
from models import FavoriteItem, FavoriteItemPage, Upper
from utils import aopen from utils import aopen
@dataclass @dataclass
class Actor: class Base:
"""基类,有个工具方法"""
@abstractmethod
def to_xml(self) -> str:
...
async def to_file(self, path: Path) -> None:
"""把 xml 写入文件"""
async with aopen(path, "w", encoding="utf-8") as f:
await f.write(self.to_xml())
@dataclass
class EpisodeInfo(Base):
"""分p的单集信息"""
title: str
season: int
episode: int
@staticmethod
def from_favorite_item_page(page: FavoriteItemPage) -> "EpisodeInfo":
return EpisodeInfo(title=page.name, season=1, episode=page.page)
def to_xml(self) -> str:
return f"""
<?xml version="1.0" encoding="utf-8" standalone="yes"?>
<episodedetails>
<plot />
<outline />
<title>{self.title}</title>
<season>{self.season}</season>
<episode>{self.episode}</episode>
</episodedetails>
""".strip()
@dataclass
class Actor(Base):
name: str name: str
role: str role: str
@staticmethod
def from_upper(upper: Upper) -> "Actor":
return Actor(name=upper.mid, role=upper.name)
def to_xml(self) -> str: def to_xml(self) -> str:
return f""" return f"""
<actor> <actor>
<name>{self.name}</name> <name>{self.name}</name>
<role>{self.role}</role> <role>{self.role}</role>
</actor> </actor>
""".strip( """.strip()
"\n"
)
@dataclass @dataclass
class EpisodeInfo: class MovieInfo(Base):
"""单p的视频信息"""
title: str title: str
plot: str plot: str
tags: list[str] tags: list[str]
@@ -30,20 +75,23 @@ class EpisodeInfo:
bvid: str bvid: str
aired: datetime.datetime aired: datetime.datetime
async def write_nfo(self, path: Path) -> None: @staticmethod
async with aopen(path, "w", encoding="utf-8") as f: def from_favorite_item(fav_item: FavoriteItem) -> "MovieInfo":
await f.write(self.to_xml()) return MovieInfo(
title=fav_item.name,
plot=fav_item.desc,
actor=[Actor.from_upper(fav_item.upper)],
tags=fav_item.tags,
bvid=fav_item.bvid,
aired=fav_item.ctime,
)
def to_xml(self) -> str: def to_xml(self) -> str:
actor = "\n".join(_.to_xml() for _ in self.actor) actor = "\n".join(_.to_xml() for _ in self.actor)
tags = ( tags = "\n".join(f" <genre>{_}</genre>" for _ in self.tags) if isinstance(self.tags, list) else ""
"\n".join(f" <genre>{_}</genre>" for _ in self.tags)
if isinstance(self.tags, list)
else ""
)
return f""" return f"""
<?xml version="1.0" encoding="utf-8" standalone="yes"?> <?xml version="1.0" encoding="utf-8" standalone="yes"?>
<episodedetails> <movie>
<plot><![CDATA[{self.plot}]]></plot> <plot><![CDATA[{self.plot}]]></plot>
<outline /> <outline />
<title>{self.title}</title> <title>{self.title}</title>
@@ -52,7 +100,65 @@ class EpisodeInfo:
{tags} {tags}
<uniqueid type="bilibili">{self.bvid}</uniqueid> <uniqueid type="bilibili">{self.bvid}</uniqueid>
<aired>{self.aired.strftime("%Y-%m-%d")}</aired> <aired>{self.aired.strftime("%Y-%m-%d")}</aired>
</episodedetails> </movie>
""".strip( """.strip()
"\n"
@dataclass
class TVShowInfo(Base):
title: str
plot: str
tags: list[str]
actor: list[Actor]
bvid: str
aired: datetime.datetime
@staticmethod
def from_favorite_item(fav_item: FavoriteItem) -> "TVShowInfo":
return TVShowInfo(
title=fav_item.name,
plot=fav_item.desc,
actor=[Actor.from_upper(fav_item.upper)],
tags=fav_item.tags,
bvid=fav_item.bvid,
aired=fav_item.ctime,
) )
def to_xml(self) -> str:
actor = "\n".join(_.to_xml() for _ in self.actor)
tags = "\n".join(f" <genre>{_}</genre>" for _ in self.tags) if isinstance(self.tags, list) else ""
return f"""
<?xml version="1.0" encoding="utf-8" standalone="yes"?>
<tvshow>
<plot><![CDATA[{self.plot}]]></plot>
<outline />
<title>{self.title}</title>
{actor}
<year>{self.aired.year}</year>
{tags}
<uniqueid type="bilibili">{self.bvid}</uniqueid>
<aired>{self.aired.strftime("%Y-%m-%d")}</aired>
</tvshow>
""".strip()
@dataclass
class UpperInfo(Base):
mid: int
created_at: datetime.datetime
def from_upper(upper: Upper) -> "UpperInfo":
return UpperInfo(mid=upper.mid, created_at=upper.created_at)
def to_xml(self) -> str:
return f"""
<?xml version="1.0" encoding="utf-8" standalone="yes"?>
<person>
<plot />
<outline />
<lockdata>false</lockdata>
<dateadded>{self.created_at.strftime("%Y-%m-%d %H:%M:%S")}</dateadded>
<title>{self.mid}</title>
<sorttitle>{self.mid}</sorttitle>
</person>
""".strip()

176
poetry.lock generated
View File

@@ -317,46 +317,6 @@ url = "https://github.com/Nemo2011/bilibili-api.git"
reference = "16.2.0b2" reference = "16.2.0b2"
resolved_reference = "d2e53b1f993e4e6777849a232ef076b73ee8ca7c" resolved_reference = "d2e53b1f993e4e6777849a232ef076b73ee8ca7c"
[[package]]
name = "black"
version = "23.11.0"
description = "The uncompromising code formatter."
optional = false
python-versions = ">=3.8"
files = [
{file = "black-23.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dbea0bb8575c6b6303cc65017b46351dc5953eea5c0a59d7b7e3a2d2f433a911"},
{file = "black-23.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:412f56bab20ac85927f3a959230331de5614aecda1ede14b373083f62ec24e6f"},
{file = "black-23.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d136ef5b418c81660ad847efe0e55c58c8208b77a57a28a503a5f345ccf01394"},
{file = "black-23.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:6c1cac07e64433f646a9a838cdc00c9768b3c362805afc3fce341af0e6a9ae9f"},
{file = "black-23.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cf57719e581cfd48c4efe28543fea3d139c6b6f1238b3f0102a9c73992cbb479"},
{file = "black-23.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:698c1e0d5c43354ec5d6f4d914d0d553a9ada56c85415700b81dc90125aac244"},
{file = "black-23.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:760415ccc20f9e8747084169110ef75d545f3b0932ee21368f63ac0fee86b221"},
{file = "black-23.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:58e5f4d08a205b11800332920e285bd25e1a75c54953e05502052738fe16b3b5"},
{file = "black-23.11.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:45aa1d4675964946e53ab81aeec7a37613c1cb71647b5394779e6efb79d6d187"},
{file = "black-23.11.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c44b7211a3a0570cc097e81135faa5f261264f4dfaa22bd5ee2875a4e773bd6"},
{file = "black-23.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a9acad1451632021ee0d146c8765782a0c3846e0e0ea46659d7c4f89d9b212b"},
{file = "black-23.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:fc7f6a44d52747e65a02558e1d807c82df1d66ffa80a601862040a43ec2e3142"},
{file = "black-23.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7f622b6822f02bfaf2a5cd31fdb7cd86fcf33dab6ced5185c35f5db98260b055"},
{file = "black-23.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:250d7e60f323fcfc8ea6c800d5eba12f7967400eb6c2d21ae85ad31c204fb1f4"},
{file = "black-23.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5133f5507007ba08d8b7b263c7aa0f931af5ba88a29beacc4b2dc23fcefe9c06"},
{file = "black-23.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:421f3e44aa67138ab1b9bfbc22ee3780b22fa5b291e4db8ab7eee95200726b07"},
{file = "black-23.11.0-py3-none-any.whl", hash = "sha256:54caaa703227c6e0c87b76326d0862184729a69b73d3b7305b6288e1d830067e"},
{file = "black-23.11.0.tar.gz", hash = "sha256:4c68855825ff432d197229846f971bc4d6666ce90492e5b02013bcaca4d9ab05"},
]
[package.dependencies]
click = ">=8.0.0"
mypy-extensions = ">=0.4.3"
packaging = ">=22.0"
pathspec = ">=0.9.0"
platformdirs = ">=2"
[package.extras]
colorama = ["colorama (>=0.4.3)"]
d = ["aiohttp (>=3.7.4)"]
jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
uvloop = ["uvloop (>=0.15.2)"]
[[package]] [[package]]
name = "brotli" name = "brotli"
version = "1.1.0" version = "1.1.0"
@@ -609,21 +569,6 @@ files = [
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
] ]
[[package]]
name = "dataclasses-json"
version = "0.6.2"
description = "Easily serialize dataclasses to and from JSON."
optional = false
python-versions = ">=3.7,<4.0"
files = [
{file = "dataclasses_json-0.6.2-py3-none-any.whl", hash = "sha256:71816ced3d0f55a2c5bc1a813ace1b8d4234e79a08744269a7cf84d6f7c06e99"},
{file = "dataclasses_json-0.6.2.tar.gz", hash = "sha256:1b934c1bd63e775880946b8361a902d7de86e894bab8098eab27c010f95724d1"},
]
[package.dependencies]
marshmallow = ">=3.18.0,<4.0.0"
typing-inspect = ">=0.4.0,<1"
[[package]] [[package]]
name = "decorator" name = "decorator"
version = "5.1.1" version = "5.1.1"
@@ -1045,26 +990,6 @@ profiling = ["gprof2dot"]
rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"]
testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
[[package]]
name = "marshmallow"
version = "3.20.1"
description = "A lightweight library for converting complex datatypes to and from native Python datatypes."
optional = false
python-versions = ">=3.8"
files = [
{file = "marshmallow-3.20.1-py3-none-any.whl", hash = "sha256:684939db93e80ad3561392f47be0230743131560a41c5110684c16e21ade0a5c"},
{file = "marshmallow-3.20.1.tar.gz", hash = "sha256:5d2371bbe42000f2b3fb5eaa065224df7d8f8597bc19a1bbfa5bfe7fba8da889"},
]
[package.dependencies]
packaging = ">=17.0"
[package.extras]
dev = ["flake8 (==6.0.0)", "flake8-bugbear (==23.7.10)", "mypy (==1.4.1)", "pre-commit (>=2.4,<4.0)", "pytest", "pytz", "simplejson", "tox"]
docs = ["alabaster (==0.7.13)", "autodocsumm (==0.2.11)", "sphinx (==7.0.1)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"]
lint = ["flake8 (==6.0.0)", "flake8-bugbear (==23.7.10)", "mypy (==1.4.1)", "pre-commit (>=2.4,<4.0)"]
tests = ["pytest", "pytz", "simplejson"]
[[package]] [[package]]
name = "matplotlib-inline" name = "matplotlib-inline"
version = "0.1.6" version = "0.1.6"
@@ -1189,28 +1114,6 @@ files = [
{file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"},
] ]
[[package]]
name = "mypy-extensions"
version = "1.0.0"
description = "Type system extensions for programs checked with the mypy type checker."
optional = false
python-versions = ">=3.5"
files = [
{file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
{file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
]
[[package]]
name = "packaging"
version = "23.2"
description = "Core utilities for Python packages"
optional = false
python-versions = ">=3.7"
files = [
{file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"},
{file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"},
]
[[package]] [[package]]
name = "parso" name = "parso"
version = "0.8.3" version = "0.8.3"
@@ -1226,17 +1129,6 @@ files = [
qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] qa = ["flake8 (==3.8.3)", "mypy (==0.782)"]
testing = ["docopt", "pytest (<6.0.0)"] testing = ["docopt", "pytest (<6.0.0)"]
[[package]]
name = "pathspec"
version = "0.11.2"
description = "Utility library for gitignore style pattern matching of file paths."
optional = false
python-versions = ">=3.7"
files = [
{file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"},
{file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"},
]
[[package]] [[package]]
name = "pexpect" name = "pexpect"
version = "4.8.0" version = "4.8.0"
@@ -1318,21 +1210,6 @@ files = [
docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"]
tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"]
[[package]]
name = "platformdirs"
version = "4.0.0"
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
optional = false
python-versions = ">=3.7"
files = [
{file = "platformdirs-4.0.0-py3-none-any.whl", hash = "sha256:118c954d7e949b35437270383a3f2531e99dd93cf7ce4dc8340d3356d30f173b"},
{file = "platformdirs-4.0.0.tar.gz", hash = "sha256:cb633b2bcf10c51af60beb0ab06d2f1d69064b43abf4c185ca6b28865f3f9731"},
]
[package.extras]
docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"]
test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"]
[[package]] [[package]]
name = "prompt-toolkit" name = "prompt-toolkit"
version = "3.0.41" version = "3.0.41"
@@ -1821,28 +1698,28 @@ pyasn1 = ">=0.1.3"
[[package]] [[package]]
name = "ruff" name = "ruff"
version = "0.1.6" version = "0.2.2"
description = "An extremely fast Python linter and code formatter, written in Rust." description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
files = [ files = [
{file = "ruff-0.1.6-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:88b8cdf6abf98130991cbc9f6438f35f6e8d41a02622cc5ee130a02a0ed28703"}, {file = "ruff-0.2.2-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0a9efb032855ffb3c21f6405751d5e147b0c6b631e3ca3f6b20f917572b97eb6"},
{file = "ruff-0.1.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:5c549ed437680b6105a1299d2cd30e4964211606eeb48a0ff7a93ef70b902248"}, {file = "ruff-0.2.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:d450b7fbff85913f866a5384d8912710936e2b96da74541c82c1b458472ddb39"},
{file = "ruff-0.1.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cf5f701062e294f2167e66d11b092bba7af6a057668ed618a9253e1e90cfd76"}, {file = "ruff-0.2.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecd46e3106850a5c26aee114e562c329f9a1fbe9e4821b008c4404f64ff9ce73"},
{file = "ruff-0.1.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:05991ee20d4ac4bb78385360c684e4b417edd971030ab12a4fbd075ff535050e"}, {file = "ruff-0.2.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e22676a5b875bd72acd3d11d5fa9075d3a5f53b877fe7b4793e4673499318ba"},
{file = "ruff-0.1.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87455a0c1f739b3c069e2f4c43b66479a54dea0276dd5d4d67b091265f6fd1dc"}, {file = "ruff-0.2.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1695700d1e25a99d28f7a1636d85bafcc5030bba9d0578c0781ba1790dbcf51c"},
{file = "ruff-0.1.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:683aa5bdda5a48cb8266fcde8eea2a6af4e5700a392c56ea5fb5f0d4bfdc0240"}, {file = "ruff-0.2.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:b0c232af3d0bd8f521806223723456ffebf8e323bd1e4e82b0befb20ba18388e"},
{file = "ruff-0.1.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:137852105586dcbf80c1717facb6781555c4e99f520c9c827bd414fac67ddfb6"}, {file = "ruff-0.2.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f63d96494eeec2fc70d909393bcd76c69f35334cdbd9e20d089fb3f0640216ca"},
{file = "ruff-0.1.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd98138a98d48a1c36c394fd6b84cd943ac92a08278aa8ac8c0fdefcf7138f35"}, {file = "ruff-0.2.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a61ea0ff048e06de273b2e45bd72629f470f5da8f71daf09fe481278b175001"},
{file = "ruff-0.1.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a0cd909d25f227ac5c36d4e7e681577275fb74ba3b11d288aff7ec47e3ae745"}, {file = "ruff-0.2.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e1439c8f407e4f356470e54cdecdca1bd5439a0673792dbe34a2b0a551a2fe3"},
{file = "ruff-0.1.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e8fd1c62a47aa88a02707b5dd20c5ff20d035d634aa74826b42a1da77861b5ff"}, {file = "ruff-0.2.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:940de32dc8853eba0f67f7198b3e79bc6ba95c2edbfdfac2144c8235114d6726"},
{file = "ruff-0.1.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fd89b45d374935829134a082617954120d7a1470a9f0ec0e7f3ead983edc48cc"}, {file = "ruff-0.2.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:0c126da55c38dd917621552ab430213bdb3273bb10ddb67bc4b761989210eb6e"},
{file = "ruff-0.1.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:491262006e92f825b145cd1e52948073c56560243b55fb3b4ecb142f6f0e9543"}, {file = "ruff-0.2.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:3b65494f7e4bed2e74110dac1f0d17dc8e1f42faaa784e7c58a98e335ec83d7e"},
{file = "ruff-0.1.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:ea284789861b8b5ca9d5443591a92a397ac183d4351882ab52f6296b4fdd5462"}, {file = "ruff-0.2.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1ec49be4fe6ddac0503833f3ed8930528e26d1e60ad35c2446da372d16651ce9"},
{file = "ruff-0.1.6-py3-none-win32.whl", hash = "sha256:1610e14750826dfc207ccbcdd7331b6bd285607d4181df9c1c6ae26646d6848a"}, {file = "ruff-0.2.2-py3-none-win32.whl", hash = "sha256:d920499b576f6c68295bc04e7b17b6544d9d05f196bb3aac4358792ef6f34325"},
{file = "ruff-0.1.6-py3-none-win_amd64.whl", hash = "sha256:4558b3e178145491e9bc3b2ee3c4b42f19d19384eaa5c59d10acf6e8f8b57e33"}, {file = "ruff-0.2.2-py3-none-win_amd64.whl", hash = "sha256:cc9a91ae137d687f43a44c900e5d95e9617cb37d4c989e462980ba27039d239d"},
{file = "ruff-0.1.6-py3-none-win_arm64.whl", hash = "sha256:03910e81df0d8db0e30050725a5802441c2022ea3ae4fe0609b76081731accbc"}, {file = "ruff-0.2.2-py3-none-win_arm64.whl", hash = "sha256:c9d15fc41e6054bfc7200478720570078f0b41c9ae4f010bcc16bd6f4d1aacdd"},
{file = "ruff-0.1.6.tar.gz", hash = "sha256:1b09f29b16c6ead5ea6b097ef2764b42372aebe363722f1605ecbcd2b9207184"}, {file = "ruff-0.2.2.tar.gz", hash = "sha256:e62ed7f36b3068a30ba39193a14274cd706bc486fad521276458022f7bccb31d"},
] ]
[[package]] [[package]]
@@ -1979,21 +1856,6 @@ files = [
{file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"},
] ]
[[package]]
name = "typing-inspect"
version = "0.9.0"
description = "Runtime inspection utilities for typing module."
optional = false
python-versions = "*"
files = [
{file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"},
{file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"},
]
[package.dependencies]
mypy-extensions = ">=0.3.0"
typing-extensions = ">=3.7.4"
[[package]] [[package]]
name = "tzdata" name = "tzdata"
version = "2023.4" version = "2023.4"
@@ -2214,4 +2076,4 @@ multidict = ">=4.0"
[metadata] [metadata]
lock-version = "2.0" lock-version = "2.0"
python-versions = "^3.11" python-versions = "^3.11"
content-hash = "12a8fd1ae9e27d9a2a85f6fb28a39a3fe73ee8ecbc31ba3a9400943b85573501" content-hash = "e460803a11bacc655d364566b994ba3b038889c0b1b8aaf16492f302184e9eb7"

View File

@@ -1,18 +1,21 @@
import asyncio import asyncio
import contextlib
import datetime import datetime
import itertools
from asyncio import Semaphore, create_subprocess_exec from asyncio import Semaphore, create_subprocess_exec
from asyncio.subprocess import DEVNULL from asyncio.subprocess import PIPE
from pathlib import Path
from bilibili_api import ass, favorite_list, video from bilibili_api import ass, favorite_list, video
from bilibili_api.exceptions import ResponseCodeException from bilibili_api.exceptions import ResponseCodeException
from loguru import logger from loguru import logger
from tortoise.connection import connections from tortoise.connection import connections
from tortoise.models import Model
from constants import FFMPEG_COMMAND, MediaStatus, MediaType from constants import FFMPEG_COMMAND, MediaStatus, MediaType, NfoMode
from credential import credential from credential import credential
from models import FavoriteItem, FavoriteList, Upper from models import FavoriteItem, FavoriteItemPage, FavoriteList, Upper
from nfo import Actor, EpisodeInfo from nfo import Base as NfoBase
from nfo import EpisodeInfo, MovieInfo, TVShowInfo, UpperInfo
from settings import settings from settings import settings
from utils import aexists, amakedirs, client, download_content from utils import aexists, amakedirs, client, download_content
@@ -25,6 +28,7 @@ async def cleanup() -> None:
def concurrent_decorator(concurrency: int) -> callable: def concurrent_decorator(concurrency: int) -> callable:
"""一个简单的并发限制装饰器,被装饰的函数同时仅能运行 concurrency 个"""
sem = Semaphore(value=concurrency) sem = Semaphore(value=concurrency)
def decorator(func: callable) -> callable: def decorator(func: callable) -> callable:
@@ -37,16 +41,12 @@ def concurrent_decorator(concurrency: int) -> callable:
return decorator return decorator
async def manage_model(medias: list[dict], fav_list: FavoriteList) -> None: async def update_favorite_item(medias: list[dict], fav_list: FavoriteList) -> None:
"""根据收藏夹里的视频列表更新数据库记录"""
uppers = [ uppers = [
Upper( Upper(mid=media["upper"]["mid"], name=media["upper"]["name"], thumb=media["upper"]["face"]) for media in medias
mid=media["upper"]["mid"],
name=media["upper"]["name"],
thumb=media["upper"]["face"],
)
for media in medias
] ]
await Upper.bulk_create(uppers, on_conflict=["mid"], update_fields=["name", "thumb"]) await Upper.bulk_create(uppers, on_conflict=["mid"], update_fields=["name", "thumb"], batch_size=300)
items = [ items = [
FavoriteItem( FavoriteItem(
name=media["title"], name=media["title"],
@@ -66,15 +66,8 @@ async def manage_model(medias: list[dict], fav_list: FavoriteList) -> None:
await FavoriteItem.bulk_create( await FavoriteItem.bulk_create(
items, items,
on_conflict=["bvid", "favorite_list_id"], on_conflict=["bvid", "favorite_list_id"],
update_fields=[ update_fields=["name", "type", "desc", "cover", "ctime", "pubtime", "fav_time"],
"name", batch_size=300,
"type",
"desc",
"cover",
"ctime",
"pubtime",
"fav_time",
],
) )
@@ -100,11 +93,7 @@ async def process_favorite(favorite_id: int) -> None:
favorite_id, page=1, credential=credential favorite_id, page=1, credential=credential
) )
title = favorite_video_list["info"]["title"] title = favorite_video_list["info"]["title"]
logger.info( logger.info("Start to process favorite {}: {}.", favorite_id, title)
"Start to process favorite {}: {}",
favorite_id,
title,
)
fav_list, _ = await FavoriteList.get_or_create( fav_list, _ = await FavoriteList.get_or_create(
id=favorite_id, defaults={"name": favorite_video_list["info"]["title"]} id=favorite_id, defaults={"name": favorite_video_list["info"]["title"]}
) )
@@ -118,32 +107,23 @@ async def process_favorite(favorite_id: int) -> None:
) )
# 先看看对应 bvid 的记录是否存在 # 先看看对应 bvid 的记录是否存在
existed_items = await FavoriteItem.filter( existed_items = await FavoriteItem.filter(
favorite_list=fav_list, favorite_list=fav_list, bvid__in=[media["bvid"] for media in favorite_video_list["medias"]]
bvid__in=[media["bvid"] for media in favorite_video_list["medias"]],
) )
# 记录一下获得的列表中的 bvid 和 fav_time # 记录一下获得的列表中的 bvid 和 fav_time
media_info = {(media["bvid"], media["fav_time"]) for media in favorite_video_list["medias"]} media_info = {(media["bvid"], media["fav_time"]) for media in favorite_video_list["medias"]}
# 如果有 bvid 和 fav_time 都相同的记录,说明已经到达了上次处理到的位置 # 如果有 bvid 和 fav_time 都相同的记录,说明已经到达了上次处理到的位置
continue_flag = not media_info & { continue_flag = not media_info & {(item.bvid, int(item.fav_time.timestamp())) for item in existed_items}
(item.bvid, int(item.fav_time.timestamp())) for item in existed_items await update_favorite_item(favorite_video_list["medias"], fav_list)
}
await manage_model(favorite_video_list["medias"], fav_list)
if not (continue_flag and favorite_video_list["has_more"]): if not (continue_flag and favorite_video_list["has_more"]):
break break
all_unprocessed_items = await FavoriteItem.filter( all_unprocessed_items = await FavoriteItem.filter(
favorite_list=fav_list, favorite_list=fav_list, type=MediaType.VIDEO, status=MediaStatus.NORMAL, downloaded=False
type=MediaType.VIDEO,
status=MediaStatus.NORMAL,
downloaded=False,
).prefetch_related("upper") ).prefetch_related("upper")
await asyncio.gather( await asyncio.gather(*[process_favorite_item(item) for item in all_unprocessed_items], return_exceptions=True)
*[process_favorite_item(item) for item in all_unprocessed_items], logger.info("Favorite {} {} has been processed.", favorite_id, title)
return_exceptions=True,
)
logger.info("Favorite {} {} processed successfully.", favorite_id, title)
@concurrent_decorator(4) @concurrent_decorator(concurrency=4)
async def process_favorite_item( async def process_favorite_item(
fav_item: FavoriteItem, fav_item: FavoriteItem,
process_poster=True, process_poster=True,
@@ -152,201 +132,323 @@ async def process_favorite_item(
process_upper=True, process_upper=True,
process_subtitle=True, process_subtitle=True,
) -> None: ) -> None:
logger.info("Start to process video {} {}", fav_item.bvid, fav_item.name) logger.info("Start to process video {} {}.", fav_item.bvid, fav_item.name)
if fav_item.type != MediaType.VIDEO: if fav_item.type != MediaType.VIDEO:
logger.warning("Media {} is not a video, skipped.", fav_item.name) logger.warning("Media {} {} is not a video, skipped.", fav_item.bvid, fav_item.name)
return return
v = video.Video(fav_item.bvid, credential=credential) v = video.Video(fav_item.bvid, credential=credential)
# 如果没有获取过 tags那么尝试获取一下 # 如果没有获取过 tags那么尝试获取一下(不关键,忽略掉错误)
try: with contextlib.suppress(Exception):
if fav_item.tags is None: if fav_item.tags is None:
fav_item.tags = [_["tag_name"] for _ in await v.get_tags()] fav_item.tags = [_["tag_name"] for _ in await v.get_tags()]
except Exception: # 处理 up 主信息和是否分 p 无关,放到前面
logger.exception(
"Failed to get tags of video {} {}",
fav_item.bvid,
fav_item.name,
)
if process_upper: if process_upper:
result = await asyncio.gather(
get_file(fav_item.upper.thumb, fav_item.upper.thumb_path),
get_nfo(fav_item.upper.meta_path, obj=fav_item.upper, mode=NfoMode.UPPER),
return_exceptions=True,
)
if any(isinstance(_, FileExistsError) for _ in result):
logger.info("Upper {} {} already exists, skipped.", fav_item.upper.mid, fav_item.upper.name)
elif any(isinstance(_, Exception) for _ in result):
logger.exception("Failed to process upper {} {}.", fav_item.upper.mid, fav_item.upper.name)
single_page = False
if settings.paginated_video:
pages = None
try: try:
if not all( pages = await v.get_pages()
await asyncio.gather( pages = [
aexists(fav_item.upper.thumb_path), FavoriteItemPage(
aexists(fav_item.upper.meta_path), favorite_item=fav_item,
cid=page["cid"],
page=page["page"],
name=page["part"],
image=page["first_frame"],
) )
): for page in pages
await amakedirs(fav_item.upper.thumb_path.parent, exist_ok=True) ]
except Exception:
logger.exception("Failed to get pages of video {} {}.", fav_item.bvid, fav_item.name)
if pages:
if len(pages) == 1:
single_page = True
else:
# 如果有多个分 p那么先创建记录
await FavoriteItemPage.bulk_create(
pages,
on_conflict=["favorite_item_id", "page"],
update_fields=["cid", "name", "image"],
batch_size=300,
)
# 重新拉一下数据,不能用 bulk create 的返回值,因为 bulk_create 不会填充主键
pages = await FavoriteItemPage.filter(favorite_item=fav_item).order_by("page")
for page in pages:
page.favorite_item = fav_item
if process_nfo:
try:
await get_nfo(fav_item.tvshow_nfo_path, obj=fav_item, mode=NfoMode.TVSHOW)
except FileExistsError:
logger.info("Nfo of {} {} already exists, skipped.", fav_item.bvid, fav_item.name)
except Exception:
logger.exception("Failed to process nfo of video {} {}.", fav_item.bvid, fav_item.name)
if process_poster:
try:
await get_file(fav_item.cover, fav_item.tvshow_poster_path)
except FileExistsError:
logger.info("Poster of {} {} already exists, skipped.", fav_item.bvid, fav_item.name)
except Exception:
logger.exception("Failed to process poster of video {} {}.", fav_item.bvid, fav_item.name)
await asyncio.gather( await asyncio.gather(
fav_item.upper.save_metadata(), *[
download_content(fav_item.upper.thumb, fav_item.upper.thumb_path), process_favorite_item_page(
page, v, process_poster, process_video, process_nfo, process_subtitle
)
for page in pages
],
return_exceptions=True, return_exceptions=True,
) )
else: fav_item.downloaded = all(page.downloaded for page in pages)
logger.info( page_status = {page.status for page in pages}
"Upper {} {} already exists, skipped.", if MediaStatus.INVISIBLE in page_status:
fav_item.upper.mid, fav_item.status = MediaStatus.INVISIBLE
fav_item.upper.name, elif MediaStatus.DELETED in page_status:
) fav_item.status = MediaStatus.DELETED
except Exception: else:
logger.exception( fav_item.status = MediaStatus.NORMAL
"Failed to process upper {} {}", if single_page or not settings.paginated_video:
fav_item.upper.mid, if process_nfo:
fav_item.upper.name, try:
) await get_nfo(fav_item.nfo_path, obj=fav_item, mode=NfoMode.MOVIE)
except FileExistsError:
if process_nfo: logger.info("NFO of {} {} already exists, skipped.", fav_item.bvid, fav_item.name)
try: except Exception:
if not await aexists(fav_item.nfo_path): logger.exception("Failed to process nfo of video {} {}.", fav_item.bvid, fav_item.name)
await EpisodeInfo( if process_poster:
title=fav_item.name, try:
plot=fav_item.desc, await get_file(fav_item.cover, fav_item.poster_path)
actor=[ except FileExistsError:
Actor( logger.info("Poster of {} {} already exists, skipped.", fav_item.bvid, fav_item.name)
name=fav_item.upper.mid, except Exception:
role=fav_item.upper.name, logger.exception("Failed to process poster of video {} {}.", fav_item.bvid, fav_item.name)
) if process_subtitle:
], try:
tags=fav_item.tags, await get_subtitle(v, 0, fav_item.subtitle_path)
bvid=fav_item.bvid, except FileExistsError:
aired=fav_item.ctime, logger.info("Subtitle of {} {} already exists, skipped.", fav_item.bvid, fav_item.name)
).write_nfo(fav_item.nfo_path) except Exception:
else: logger.exception("Failed to process subtitle of video {} {}.", fav_item.bvid, fav_item.name)
logger.info( if process_video:
"NFO of {} {} already exists, skipped.", try:
fav_item.bvid, await get_video(v, 0, fav_item.tmp_video_path, fav_item.tmp_audio_path, fav_item.video_path)
fav_item.name, fav_item.downloaded = True
) except FileExistsError:
except Exception: logger.info("Video {} {} already exists, skipped.", fav_item.bvid, fav_item.name)
logger.exception( fav_item.downloaded = True
"Failed to process nfo of video {} {}", except Exception as e:
fav_item.bvid, errcode_status = {62002: MediaStatus.INVISIBLE, -404: MediaStatus.DELETED}
fav_item.name, if not (isinstance(e, ResponseCodeException) and (status := errcode_status.get(e.code))):
) logger.exception("Failed to process video {} {}.", fav_item.bvid, fav_item.name)
else:
if process_poster: fav_item.status = status
try: logger.error(
if not await aexists(fav_item.poster_path): "Video {} {} is not available, marked as {}.",
try:
await download_content(fav_item.cover, fav_item.poster_path)
except Exception:
logger.exception(
"Failed to download poster of video {} {}",
fav_item.bvid, fav_item.bvid,
fav_item.name, fav_item.name,
fav_item.status.text,
) )
else: await fav_item.save()
logger.info( logger.info("{} {} has been processed.", fav_item.bvid, fav_item.name)
"Poster of {} {} already exists, skipped.",
fav_item.bvid,
fav_item.name, @concurrent_decorator(concurrency=4)
) async def process_favorite_item_page(
fav_page: FavoriteItemPage,
v: video.Video,
process_poster=True,
process_video=True,
process_nfo=True,
process_subtitle=True,
):
logger.info(
"Start to process video {} {} page {}.", fav_page.favorite_item.bvid, fav_page.favorite_item.name, fav_page.page
)
if process_nfo:
try:
await get_nfo(fav_page.nfo_path, obj=fav_page, mode=NfoMode.EPISODE)
except FileExistsError:
logger.info(
"NFO of {} {} page {} already exists, skipped.",
fav_page.favorite_item.bvid,
fav_page.favorite_item.name,
fav_page.page,
)
except Exception: except Exception:
logger.exception( logger.exception(
"Failed to process poster of video {} {}", "Failed to process nfo of video {} {} page {}.",
fav_item.bvid, fav_page.favorite_item.bvid,
fav_item.name, fav_page.favorite_item.name,
fav_page.page,
)
if process_poster:
try:
await get_file(fav_page.image, fav_page.poster_path)
except FileExistsError:
logger.info(
"Poster of {} {} page {} already exists, skipped.",
fav_page.favorite_item.bvid,
fav_page.favorite_item.name,
fav_page.page,
)
except Exception:
logger.exception(
"Failed to process poster of video {} {} page {}.",
fav_page.favorite_item.bvid,
fav_page.favorite_item.name,
fav_page.page,
) )
if process_subtitle: if process_subtitle:
try: try:
if not await aexists(fav_item.subtitle_path): await get_subtitle(v, fav_page.page - 1, fav_page.subtitle_path)
await ass.make_ass_file_danmakus_protobuf( except FileExistsError:
v, logger.info(
0, "Subtitle of {} {} page {} already exists, skipped.",
str(fav_item.subtitle_path.resolve()), fav_page.favorite_item.bvid,
credential=credential, fav_page.favorite_item.name,
font_name=settings.subtitle.font_name, fav_page.page,
font_size=settings.subtitle.font_size, )
alpha=settings.subtitle.alpha,
fly_time=settings.subtitle.fly_time,
static_time=settings.subtitle.static_time,
)
else:
logger.info(
"Subtitle of {} {} already exists, skipped.",
fav_item.bvid,
fav_item.name,
)
except Exception: except Exception:
logger.exception( logger.exception(
"Failed to process subtitle of video {} {}", "Failed to process subtitle of video {} {} page {}.",
fav_item.bvid, fav_page.favorite_item.bvid,
fav_item.name, fav_page.favorite_item.name,
fav_page.page,
) )
if process_video: if process_video:
try: try:
if await aexists(fav_item.video_path): await get_video(v, fav_page.page - 1, fav_page.tmp_video_path, fav_page.tmp_audio_path, fav_page.video_path)
fav_item.downloaded = True fav_page.downloaded = True
logger.info( except FileExistsError:
"Video {} {} already exists, skipped.", logger.info(
fav_item.bvid, "Video {} {} page {} already exists, skipped.",
fav_item.name, fav_page.favorite_item.bvid,
fav_page.favorite_item.name,
fav_page.page,
)
fav_page.downloaded = True
except Exception as e:
errcode_status = {62002: MediaStatus.INVISIBLE, -404: MediaStatus.DELETED}
if not (isinstance(e, ResponseCodeException) and (status := errcode_status.get(e.code))):
logger.exception(
"Failed to process video {} {} page {}.",
fav_page.favorite_item.bvid,
fav_page.favorite_item.name,
fav_page.page,
) )
else: else:
# 开始处理视频内容 fav_page.status = status
detector = video.VideoDownloadURLDataDetecter(
await v.get_download_url(page_index=0)
)
streams = detector.detect_best_streams(codecs=settings.codec)
if detector.check_flv_stream():
await download_content(streams[0].url, fav_item.tmp_video_path)
process = await create_subprocess_exec(
FFMPEG_COMMAND,
"-i",
fav_item.tmp_video_path,
fav_item.video_path,
stdout=DEVNULL,
stderr=DEVNULL,
)
await process.communicate()
fav_item.tmp_video_path.unlink()
else:
paths, tasks = [fav_item.tmp_video_path], [
download_content(streams[0].url, fav_item.tmp_video_path)
]
if streams[1]:
paths.append(fav_item.tmp_audio_path)
tasks.append(download_content(streams[1].url, fav_item.tmp_audio_path))
await asyncio.gather(*tasks)
process = await create_subprocess_exec(
FFMPEG_COMMAND,
*list(itertools.chain(*zip(["-i"] * len(paths), paths))),
"-c",
"copy",
fav_item.video_path,
stdout=DEVNULL,
stderr=DEVNULL,
)
await process.communicate()
for path in paths:
path.unlink()
fav_item.downloaded = True
except ResponseCodeException as e:
match e.code:
case 62002:
fav_item.status = MediaStatus.INVISIBLE
case -404:
fav_item.status = MediaStatus.DELETED
case _:
logger.exception(
"Failed to process video {} {}, error_code: {}",
fav_item.bvid,
fav_item.name,
e.code,
)
if fav_item.status != MediaStatus.NORMAL:
logger.error( logger.error(
"Video {} {} is not available, marked as {}", "Video {} {} page {} is not available, marked as {}.",
fav_item.bvid, fav_page.favorite_item.bvid,
fav_item.name, fav_page.favorite_item.name,
fav_item.status.text, fav_page.page,
fav_page.status.text,
) )
except Exception: await fav_page.save()
logger.exception("Failed to process video {} {}", fav_item.bvid, fav_item.name)
await fav_item.save()
logger.info( logger.info(
"{} {} is processed successfully.", "{} {} page {} has been processed.", fav_page.favorite_item.bvid, fav_page.favorite_item.name, fav_page.page
fav_item.bvid,
fav_item.name,
) )
async def get_video(v: video.Video, page_id: int, tmp_video_path: Path, tmp_audio_path: Path, video_path: Path) -> None:
"""指定临时视频、音频和目标视频目录下载视频的某个分p"""
if await aexists(video_path):
# 目标视频已经存在,忽略掉
raise FileExistsError
await amakedirs(video_path.parent, exist_ok=True)
# 分析对应分p的视频流
detector = video.VideoDownloadURLDataDetecter(await v.get_download_url(page_index=page_id))
streams = detector.detect_best_streams(**settings.stream.model_dump())
if detector.check_flv_stream():
# 对于 flv直接下载
await download_content(streams[0].url, tmp_video_path)
process = await create_subprocess_exec(
FFMPEG_COMMAND, "-i", tmp_video_path, video_path, stdout=PIPE, stderr=PIPE
)
stdout, stderr = await process.communicate()
tmp_video_path.unlink(missing_ok=True)
else:
# 对于非 flv首先要下载视频流
paths, tasks = ([tmp_video_path], [download_content(streams[0].url, tmp_video_path)])
if streams[1]:
# 如果有音频流,也下载
paths.append(tmp_audio_path)
tasks.append(download_content(streams[1].url, tmp_audio_path))
await asyncio.gather(*tasks)
process = await create_subprocess_exec(
FFMPEG_COMMAND,
*sum([["-i", path] for path in paths], []),
"-c",
"copy",
video_path,
stdout=PIPE,
stderr=PIPE,
)
stdout, stderr = await process.communicate()
for path in paths:
path.unlink(missing_ok=True)
if process.returncode != 0:
raise RuntimeError(
f"{FFMPEG_COMMAND} exited with non-zero code {process.returncode}."
f"\nstdout:\n{stdout.decode()}"
f"\nstderr:\n{stderr.decode()}"
)
async def get_file(url: str, path: Path) -> None:
"""一个简单的下载封装,用于下载封面等内容"""
if await aexists(path):
# 目标文件已经存在,忽略掉
raise FileExistsError
await amakedirs(path.parent, exist_ok=True)
await download_content(url, path)
async def get_subtitle(v: video.Video, page_id: int, subtitle_path: Path) -> None:
"""指定目标字幕文件下载视频的某个分p的字幕"""
if await aexists(subtitle_path):
# 目标字幕已经存在,忽略掉
raise FileExistsError
await amakedirs(subtitle_path.parent, exist_ok=True)
await ass.make_ass_file_danmakus_protobuf(
v,
page_id,
str(subtitle_path.resolve()),
credential=credential,
font_name=settings.subtitle.font_name,
font_size=settings.subtitle.font_size,
alpha=settings.subtitle.alpha,
fly_time=settings.subtitle.fly_time,
static_time=settings.subtitle.static_time,
)
async def get_nfo(nfo_path: Path, *, obj: Model, mode: NfoMode) -> None:
"""指定 nfo 路径、对象和模式,将对应的 nfo 信息写入到文件"""
if await aexists(nfo_path):
# 目标 nfo 已经存在,忽略掉
raise FileExistsError
await amakedirs(nfo_path.parent, exist_ok=True)
# 根据不同的模式,生成不同的 nfo
nfo: NfoBase = None
match obj, mode:
case FavoriteItem(), NfoMode.MOVIE:
nfo = MovieInfo.from_favorite_item(obj)
case FavoriteItem(), NfoMode.TVSHOW:
nfo = TVShowInfo.from_favorite_item(obj)
case FavoriteItemPage(), NfoMode.EPISODE:
nfo = EpisodeInfo.from_favorite_item_page(obj)
case Upper(), NfoMode.UPPER:
nfo = UpperInfo.from_upper(obj)
case _:
raise ValueError
await nfo.to_file(nfo_path)

View File

@@ -1,6 +1,6 @@
[tool.poetry] [tool.poetry]
name = "bili-sync" name = "bili-sync"
version = "1.1.6" version = "1.1.7"
description = "" description = ""
authors = ["amtoaer <amtoaer@gmail.com>"] authors = ["amtoaer <amtoaer@gmail.com>"]
license = "GPL-3.0" license = "GPL-3.0"
@@ -11,24 +11,22 @@ python = "^3.11"
aerich = "0.7.2" aerich = "0.7.2"
aiofiles = "23.2.1" aiofiles = "23.2.1"
bilibili-api-python = {git = "https://github.com/Nemo2011/bilibili-api.git", rev = "16.2.0b2"} bilibili-api-python = {git = "https://github.com/Nemo2011/bilibili-api.git", rev = "16.2.0b2"}
dataclasses-json = "0.6.2"
loguru = "0.7.2" loguru = "0.7.2"
pydantic = "2.5.3" pydantic = "2.5.3"
tortoise-orm = "0.20.0" tortoise-orm = "0.20.0"
uvloop = "0.19.0" uvloop = "0.19.0"
[tool.poetry.group.dev.dependencies] [tool.poetry.group.dev.dependencies]
black = "23.11.0"
bump-my-version = "0.15.4" bump-my-version = "0.15.4"
ipython = "8.17.2" ipython = "8.17.2"
ruff = "0.1.6" ruff = "0.2.2"
[tool.black] [tool.black]
line-length = 100 line-length = 100
[tool.ruff] [tool.ruff]
line-length = 100 line-length = 120
select = [ lint.select = [
"F", # https://beta.ruff.rs/docs/rules/#pyflakes-f "F", # https://beta.ruff.rs/docs/rules/#pyflakes-f
"E", "E",
"W", # https://beta.ruff.rs/docs/rules/#pycodestyle-e-w "W", # https://beta.ruff.rs/docs/rules/#pycodestyle-e-w
@@ -52,9 +50,11 @@ select = [
"NPY", # https://beta.ruff.rs/docs/rules/#numpy-specific-rules-npy "NPY", # https://beta.ruff.rs/docs/rules/#numpy-specific-rules-npy
"RUF100", # https://beta.ruff.rs/docs/configuration/#automatic-noqa-management "RUF100", # https://beta.ruff.rs/docs/configuration/#automatic-noqa-management
] ]
ignore = [ lint.ignore = [
"A003", # Class attribute `id` is shadowing a Python builtin "A003", # Class attribute `id` is shadowing a Python builtin
] ]
lint.isort.split-on-trailing-comma = false
format.skip-magic-trailing-comma = true
exclude = ["migrations"] exclude = ["migrations"]
[tool.aerich] [tool.aerich]
@@ -68,7 +68,7 @@ message = "chore: bump version from {current_version} to {new_version}"
tag = true tag = true
tag_name = "{new_version}" tag_name = "{new_version}"
tag_message = "" tag_message = ""
current_version = "1.1.6" current_version = "1.1.7"
parse = "(?P<major>\\d+)\\.(?P<minor>\\d+)\\.(?P<patch>\\d+)" parse = "(?P<major>\\d+)\\.(?P<minor>\\d+)\\.(?P<patch>\\d+)"
[[tool.bumpversion.files]] [[tool.bumpversion.files]]
@@ -81,6 +81,7 @@ filename = "pyproject.toml"
[build-system] [build-system]
requires = ["poetry-core"] requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api" build-backend = "poetry.core.masonry.api"

View File

@@ -1,11 +1,12 @@
from pathlib import Path from pathlib import Path
from bilibili_api.video import VideoCodecs from bilibili_api.video import AudioQuality, VideoCodecs, VideoQuality
from pydantic import BaseModel, Field, field_validator from pydantic import BaseModel, Field, field_validator, root_validator
from pydantic_core import PydanticCustomError from pydantic_core import PydanticCustomError
from typing_extensions import Annotated from typing_extensions import Annotated
from constants import DEFAULT_CONFIG_PATH from constants import DEFAULT_CONFIG_PATH
from utils import amakedirs, aopen
class SubtitleConfig(BaseModel): class SubtitleConfig(BaseModel):
@@ -16,6 +17,26 @@ class SubtitleConfig(BaseModel):
static_time: float = 10 # 静态弹幕持续时间 static_time: float = 10 # 静态弹幕持续时间
class StreamConfig(BaseModel):
video_max_quality: VideoQuality = VideoQuality._8K
audio_max_quality: AudioQuality = AudioQuality._192K
video_min_quality: VideoQuality = VideoQuality._360P
audio_min_quality: AudioQuality = AudioQuality._64K
codecs: list[VideoCodecs] = Field(
default_factory=lambda: [VideoCodecs.AV1, VideoCodecs.AVC, VideoCodecs.HEV], min_length=1
)
no_dolby_video: bool = False
no_dolby_audio: bool = False
no_hdr: bool = False
no_hires: bool = False
@field_validator("codecs", mode="after")
def codec_validator(cls, codecs: list[VideoCodecs]) -> list[VideoCodecs]:
if len(codecs) != len(set(codecs)):
raise PydanticCustomError("unique_list", "List must be unique")
return codecs
class Config(BaseModel): class Config(BaseModel):
sessdata: Annotated[str, Field(min_length=1)] = "" sessdata: Annotated[str, Field(min_length=1)] = ""
bili_jct: Annotated[str, Field(min_length=1)] = "" bili_jct: Annotated[str, Field(min_length=1)] = ""
@@ -25,20 +46,15 @@ class Config(BaseModel):
interval: int = 20 interval: int = 20
path_mapper: dict[int, str] = Field(default_factory=dict) path_mapper: dict[int, str] = Field(default_factory=dict)
subtitle: SubtitleConfig = Field(default_factory=SubtitleConfig) subtitle: SubtitleConfig = Field(default_factory=SubtitleConfig)
codec: list[VideoCodecs] = Field( stream: StreamConfig = Field(default_factory=StreamConfig)
default_factory=lambda: [ paginated_video: bool = False
VideoCodecs.AV1,
VideoCodecs.AVC,
VideoCodecs.HEV,
],
min_length=1,
)
@field_validator("codec", mode="after") @root_validator(pre=True)
def codec_validator(cls, codecs: list[VideoCodecs]) -> list[VideoCodecs]: def migrate(cls, values: dict) -> dict:
if len(codecs) != len(set(codecs)): # 把旧版本的 codec 迁移为 stream 中的 codecs
raise PydanticCustomError("unique_list", "List must be unique") if "codec" in values and "stream" not in values:
return codecs values["stream"] = {"codecs": values.pop("codec")}
return values
@staticmethod @staticmethod
def load(path: Path | None = None) -> "Config": def load(path: Path | None = None) -> "Config":
@@ -61,6 +77,17 @@ class Config(BaseModel):
except Exception as e: except Exception as e:
raise RuntimeError(f"Failed to save config file: {path}") from e raise RuntimeError(f"Failed to save config file: {path}") from e
async def asave(self, path: Path | None = None) -> "Config":
if not path:
path = DEFAULT_CONFIG_PATH
try:
await amakedirs(path.parent, exist_ok=True)
async with aopen(path, "w") as f:
await f.write(Config.model_dump_json(self, indent=4))
return self
except Exception as e:
raise RuntimeError(f"Failed to save config file: {path}") from e
def init_settings() -> Config: def init_settings() -> Config:
if not DEFAULT_CONFIG_PATH.exists(): if not DEFAULT_CONFIG_PATH.exists():

View File

@@ -27,9 +27,7 @@ async def amakedirs(path: Path, exist_ok=False) -> None:
await makedirs(path, exist_ok=exist_ok) await makedirs(path, exist_ok=exist_ok)
def aopen( def aopen(path: Path, mode: str = "r", **kwargs) -> AiofilesContextManager[None, None, AsyncTextIOWrapper]:
path: Path, mode: str = "r", **kwargs
) -> AiofilesContextManager[None, None, AsyncTextIOWrapper]:
return aiofiles.open(path, mode, **kwargs) return aiofiles.open(path, mode, **kwargs)

View File

@@ -1 +1 @@
VERSION = "1.1.6" VERSION = "1.1.7"