mirror of
https://github.com/amtoaer/bili-sync.git
synced 2026-05-09 11:22:41 +08:00
Compare commits
9 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e25ed452b4 | ||
|
|
2f36220582 | ||
|
|
f6a5238b6e | ||
|
|
ec5776a0ed | ||
|
|
c21da25c6f | ||
|
|
bde142a896 | ||
|
|
af8cd0d819 | ||
|
|
a4c362d8ab | ||
|
|
1dd760d445 |
16
Makefile
16
Makefile
@@ -1,4 +1,4 @@
|
||||
.PHONY: install fmt start-daemon start-once db-init db-migrate db-upgrade sync-conf
|
||||
.PHONY: install fmt start-daemon start-once db-init db-migrate db-upgrade sync-conf release
|
||||
|
||||
install:
|
||||
@echo "Installing dependencies..."
|
||||
@@ -6,8 +6,8 @@ install:
|
||||
|
||||
fmt:
|
||||
@echo "Formatting..."
|
||||
@poetry run black .
|
||||
@poetry run ruff --fix .
|
||||
@poetry run ruff format .
|
||||
@poetry run ruff check --fix .
|
||||
|
||||
start-daemon:
|
||||
@poetry run python entry.py
|
||||
@@ -28,4 +28,12 @@ sync-conf:
|
||||
@echo "Syncing config..."
|
||||
@cp ${CONFIG_SRC} ./config/
|
||||
@cp ${DB_SRC} ./data/
|
||||
@echo "Done."
|
||||
@echo "Done."
|
||||
|
||||
release:
|
||||
@echo "Releasing..."
|
||||
@git checkout main
|
||||
@bump-my-version bump patch
|
||||
@git push origin main
|
||||
@git push origin --tags
|
||||
@echo "Done."
|
||||
|
||||
@@ -56,7 +56,7 @@ services:
|
||||
bili-sync:
|
||||
image: amtoaer/bili-sync:latest
|
||||
user: 1000:1000 # 此处可以指定以哪个用户的权限运行,不填写的话默认 root,推荐填写。
|
||||
tty: true # 加上这一行可以让日志变成彩色
|
||||
tty: true # 加上这一行可以让支持的终端以彩色显示日志(如果发现日志出现乱码就去掉)
|
||||
volumes:
|
||||
- /home/amtoaer/Videos/Bilibilis/:/Videos/Bilibilis/ # 视频文件
|
||||
- /home/amtoaer/.config/nas/bili-sync/config/:/app/config/ # 配置文件
|
||||
@@ -129,7 +129,7 @@ services:
|
||||
- [x] 凭证认证
|
||||
- [x] 视频选优
|
||||
- [x] 视频下载
|
||||
- [x] 支持并行下载
|
||||
- [x] 支持并发下载
|
||||
- [x] 支持作为 daemon 运行
|
||||
- [x] 构建 nfo 和 poster 文件,方便以单集形式导入 emby
|
||||
- [x] 支持收藏夹翻页,下载全部历史视频
|
||||
|
||||
73
commands.py
73
commands.py
@@ -13,30 +13,32 @@ from utils import aexists, aremove
|
||||
|
||||
async def recheck():
|
||||
"""刷新数据库中视频的状态,如果发现文件不存在则标记未下载,以便在下次任务重新下载,在自己手动删除文件后调用"""
|
||||
|
||||
async def is_ok(item: FavoriteItem) -> bool:
|
||||
if len(item.pages):
|
||||
# 多 p 视频全部存在才算存在
|
||||
return all(await asyncio.gather(*[aexists(page.video_path) for page in item.pages]))
|
||||
return await aexists(item.video_path)
|
||||
|
||||
items = await FavoriteItem.filter(
|
||||
type=MediaType.VIDEO,
|
||||
status=MediaStatus.NORMAL,
|
||||
downloaded=True,
|
||||
)
|
||||
exists = await asyncio.gather(*[aexists(item.video_path) for item in items])
|
||||
for item, exist in zip(items, exists):
|
||||
if isinstance(exist, Exception):
|
||||
logger.error(
|
||||
"Error when checking file {} {}: {}",
|
||||
item.bvid,
|
||||
item.name,
|
||||
exist,
|
||||
)
|
||||
type=MediaType.VIDEO, status=MediaStatus.NORMAL, downloaded=True
|
||||
).prefetch_related("pages")
|
||||
items_to_update = []
|
||||
for item in items:
|
||||
for page in item.pages:
|
||||
# 疑似 tortoise 的 bug,prefetch_related 不会更新反向引用的字段,这里手动更新一下
|
||||
page.favorite_item = item
|
||||
items_ok = await asyncio.gather(*[is_ok(item) for item in items], return_exceptions=True)
|
||||
for item, ok in zip(items, items_ok):
|
||||
if isinstance(ok, Exception):
|
||||
logger.error("Error when checking file {} {}: {}.", item.bvid, item.name, ok)
|
||||
continue
|
||||
if not exist:
|
||||
logger.info(
|
||||
"File {} {} not exists, mark as not downloaded.",
|
||||
item.bvid,
|
||||
item.name,
|
||||
)
|
||||
if not ok:
|
||||
logger.info("Lack of file detected for {} {}, mark as not downloaded.", item.bvid, item.name)
|
||||
item.downloaded = False
|
||||
items_to_update.append(item)
|
||||
logger.info("Updating database...")
|
||||
await FavoriteItem.bulk_update(items, fields=["downloaded"])
|
||||
await FavoriteItem.bulk_update(items_to_update, fields=["downloaded"], batch_size=300)
|
||||
logger.info("Database updated.")
|
||||
|
||||
|
||||
@@ -52,10 +54,7 @@ async def _refresh_favorite_item_info(
|
||||
items = await FavoriteItem.filter(downloaded=True).prefetch_related("upper")
|
||||
if force:
|
||||
# 如果强制刷新,那么就先把现存的所有内容删除
|
||||
await asyncio.gather(
|
||||
*[aremove(path) for item in items for path in path_getter(item)],
|
||||
return_exceptions=True,
|
||||
)
|
||||
await asyncio.gather(*[aremove(path) for item in items for path in path_getter(item)], return_exceptions=True)
|
||||
await asyncio.gather(
|
||||
*[
|
||||
process_favorite_item(
|
||||
@@ -72,30 +71,14 @@ async def _refresh_favorite_item_info(
|
||||
)
|
||||
|
||||
|
||||
refresh_nfo = functools.partial(
|
||||
_refresh_favorite_item_info, lambda item: [item.nfo_path], process_nfo=True
|
||||
)
|
||||
refresh_nfo = functools.partial(_refresh_favorite_item_info, lambda item: [item.nfo_path], process_nfo=True)
|
||||
|
||||
refresh_poster = functools.partial(
|
||||
_refresh_favorite_item_info,
|
||||
lambda item: [item.poster_path],
|
||||
process_poster=True,
|
||||
)
|
||||
refresh_poster = functools.partial(_refresh_favorite_item_info, lambda item: [item.poster_path], process_poster=True)
|
||||
|
||||
refresh_video = functools.partial(
|
||||
_refresh_favorite_item_info,
|
||||
lambda item: [item.video_path],
|
||||
process_video=True,
|
||||
)
|
||||
refresh_video = functools.partial(_refresh_favorite_item_info, lambda item: [item.video_path], process_video=True)
|
||||
|
||||
refresh_upper = functools.partial(
|
||||
_refresh_favorite_item_info,
|
||||
lambda item: item.upper_path,
|
||||
process_upper=True,
|
||||
)
|
||||
refresh_upper = functools.partial(_refresh_favorite_item_info, lambda item: item.upper_path, process_upper=True)
|
||||
|
||||
refresh_subtitle = functools.partial(
|
||||
_refresh_favorite_item_info,
|
||||
lambda item: [item.subtitle_path],
|
||||
process_subtitle=True,
|
||||
_refresh_favorite_item_info, lambda item: [item.subtitle_path], process_subtitle=True
|
||||
)
|
||||
|
||||
26
constants.py
26
constants.py
@@ -4,11 +4,7 @@ from pathlib import Path
|
||||
|
||||
|
||||
def get_base(dir_name: str) -> Path:
|
||||
path = (
|
||||
Path(base)
|
||||
if (base := os.getenv(f"{dir_name.upper()}_PATH"))
|
||||
else Path(__file__).parent / dir_name
|
||||
)
|
||||
path = Path(base) if (base := os.getenv(f"{dir_name.upper()}_PATH")) else Path(__file__).parent / dir_name
|
||||
path.mkdir(parents=True, exist_ok=True)
|
||||
return path
|
||||
|
||||
@@ -37,20 +33,18 @@ class MediaStatus(IntEnum):
|
||||
|
||||
@property
|
||||
def text(self) -> str:
|
||||
return {
|
||||
MediaStatus.NORMAL: "normal",
|
||||
MediaStatus.INVISIBLE: "invisible",
|
||||
MediaStatus.DELETED: "deleted",
|
||||
}[self]
|
||||
return {MediaStatus.NORMAL: "normal", MediaStatus.INVISIBLE: "invisible", MediaStatus.DELETED: "deleted"}[self]
|
||||
|
||||
|
||||
class NfoMode(IntEnum):
|
||||
MOVIE = 1
|
||||
TVSHOW = 2
|
||||
EPISODE = 3
|
||||
UPPER = 4
|
||||
|
||||
|
||||
TORTOISE_ORM = {
|
||||
"connections": {"default": f"sqlite://{DEFAULT_DATABASE_PATH}"},
|
||||
"apps": {
|
||||
"models": {
|
||||
"models": ["models", "aerich.models"],
|
||||
"default_connection": "default",
|
||||
},
|
||||
},
|
||||
"apps": {"models": {"models": ["models", "aerich.models"], "default_connection": "default"}},
|
||||
"use_tz": True,
|
||||
}
|
||||
|
||||
@@ -6,28 +6,18 @@ from settings import settings
|
||||
class PersistedCredential(Credential):
|
||||
def __init__(self) -> None:
|
||||
super().__init__(
|
||||
settings.sessdata,
|
||||
settings.bili_jct,
|
||||
settings.buvid3,
|
||||
settings.dedeuserid,
|
||||
settings.ac_time_value,
|
||||
settings.sessdata, settings.bili_jct, settings.buvid3, settings.dedeuserid, settings.ac_time_value
|
||||
)
|
||||
|
||||
async def refresh(self) -> None:
|
||||
await super().refresh()
|
||||
(
|
||||
settings.sessdata,
|
||||
settings.bili_jct,
|
||||
settings.dedeuserid,
|
||||
settings.ac_time_value,
|
||||
) = (
|
||||
(settings.sessdata, settings.bili_jct, settings.dedeuserid, settings.ac_time_value) = (
|
||||
self.sessdata,
|
||||
self.bili_jct,
|
||||
self.dedeuserid,
|
||||
self.ac_time_value,
|
||||
)
|
||||
# 暂时使用同步调用
|
||||
settings.save()
|
||||
await settings.asave()
|
||||
|
||||
|
||||
credential = PersistedCredential()
|
||||
|
||||
9
entry.py
9
entry.py
@@ -6,14 +6,7 @@ import sys
|
||||
import uvloop
|
||||
from loguru import logger
|
||||
|
||||
from commands import (
|
||||
recheck,
|
||||
refresh_nfo,
|
||||
refresh_poster,
|
||||
refresh_subtitle,
|
||||
refresh_upper,
|
||||
refresh_video,
|
||||
)
|
||||
from commands import recheck, refresh_nfo, refresh_poster, refresh_subtitle, refresh_upper, refresh_video
|
||||
from models import init_model
|
||||
from processor import cleanup, process
|
||||
from settings import settings
|
||||
|
||||
21
migrations/models/4_20240224020723_update.py
Normal file
21
migrations/models/4_20240224020723_update.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from tortoise import BaseDBAsyncClient
|
||||
|
||||
|
||||
async def upgrade(db: BaseDBAsyncClient) -> str:
|
||||
return """
|
||||
CREATE TABLE IF NOT EXISTS "favoriteitempage" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
"cid" INT NOT NULL,
|
||||
"page" INT NOT NULL,
|
||||
"name" VARCHAR(255) NOT NULL,
|
||||
"image" TEXT NOT NULL,
|
||||
"status" SMALLINT NOT NULL DEFAULT 1 /* NORMAL: 1\nINVISIBLE: 2\nDELETED: 3 */,
|
||||
"downloaded" INT NOT NULL DEFAULT 0,
|
||||
"favorite_item_id" INT NOT NULL REFERENCES "favoriteitem" ("id") ON DELETE CASCADE,
|
||||
CONSTRAINT "uid_favoriteite_favorit_c3b50e" UNIQUE ("favorite_item_id", "page")
|
||||
) /* 收藏条目的分p */;"""
|
||||
|
||||
|
||||
async def downgrade(db: BaseDBAsyncClient) -> str:
|
||||
return """
|
||||
DROP TABLE IF EXISTS "favoriteitempage";"""
|
||||
125
models.py
125
models.py
@@ -3,17 +3,11 @@ from asyncio import create_subprocess_exec
|
||||
from pathlib import Path
|
||||
|
||||
from tortoise import Tortoise, fields
|
||||
from tortoise.fields import Field
|
||||
from tortoise.models import Model
|
||||
|
||||
from constants import (
|
||||
DEFAULT_THUMB_PATH,
|
||||
MIGRATE_COMMAND,
|
||||
TORTOISE_ORM,
|
||||
MediaStatus,
|
||||
MediaType,
|
||||
)
|
||||
from constants import DEFAULT_THUMB_PATH, MIGRATE_COMMAND, TORTOISE_ORM, MediaStatus, MediaType
|
||||
from settings import settings
|
||||
from utils import aopen
|
||||
from version import VERSION
|
||||
|
||||
|
||||
@@ -47,22 +41,6 @@ class Upper(Model):
|
||||
def meta_path(self) -> Path:
|
||||
return DEFAULT_THUMB_PATH / str(self.mid)[0] / f"{self.mid}" / "person.nfo"
|
||||
|
||||
async def save_metadata(self):
|
||||
async with aopen(self.meta_path, "w") as f:
|
||||
await f.write(
|
||||
f"""
|
||||
<?xml version="1.0" encoding="utf-8" standalone="yes"?>
|
||||
<person>
|
||||
<plot />
|
||||
<outline />
|
||||
<lockdata>false</lockdata>
|
||||
<dateadded>{self.created_at.strftime("%Y-%m-%d %H:%M:%S")}</dateadded>
|
||||
<title>{self.mid}</title>
|
||||
<sorttitle>{self.mid}</sorttitle>
|
||||
</person>
|
||||
""".strip()
|
||||
)
|
||||
|
||||
|
||||
class FavoriteItem(Model):
|
||||
"""收藏条目"""
|
||||
@@ -75,8 +53,8 @@ class FavoriteItem(Model):
|
||||
desc = fields.TextField()
|
||||
cover = fields.TextField()
|
||||
tags = fields.JSONField(null=True)
|
||||
favorite_list = fields.ForeignKeyField("models.FavoriteList", related_name="items")
|
||||
upper = fields.ForeignKeyField("models.Upper", related_name="uploads")
|
||||
favorite_list: Field[FavoriteList] = fields.ForeignKeyField("models.FavoriteList", related_name="items")
|
||||
upper: Field[Upper] = fields.ForeignKeyField("models.Upper", related_name="uploads")
|
||||
ctime = fields.DatetimeField()
|
||||
pubtime = fields.DatetimeField()
|
||||
fav_time = fields.DatetimeField()
|
||||
@@ -113,15 +91,92 @@ class FavoriteItem(Model):
|
||||
|
||||
@property
|
||||
def upper_path(self) -> list[Path]:
|
||||
return [
|
||||
self.upper.thumb_path,
|
||||
self.upper.meta_path,
|
||||
]
|
||||
return [self.upper.thumb_path, self.upper.meta_path]
|
||||
|
||||
@property
|
||||
def subtitle_path(self) -> Path:
|
||||
return Path(settings.path_mapper[self.favorite_list_id]) / f"{self.bvid}.zh-CN.default.ass"
|
||||
|
||||
@property
|
||||
def tvshow_nfo_path(self) -> Path:
|
||||
"""分p视频时使用"""
|
||||
return Path(settings.path_mapper[self.favorite_list_id]) / self.bvid / "tvshow.nfo"
|
||||
|
||||
@property
|
||||
def tvshow_poster_path(self) -> Path:
|
||||
"""分p视频时使用"""
|
||||
return Path(settings.path_mapper[self.favorite_list_id]) / self.bvid / "poster.jpg"
|
||||
|
||||
|
||||
class FavoriteItemPage(Model):
|
||||
"""收藏条目的分p"""
|
||||
|
||||
id = fields.IntField(pk=True)
|
||||
favorite_item: Field[FavoriteItem] = fields.ForeignKeyField("models.FavoriteItem", related_name="pages")
|
||||
cid = fields.IntField()
|
||||
page = fields.IntField()
|
||||
name = fields.CharField(max_length=255)
|
||||
image = fields.TextField()
|
||||
status = fields.IntEnumField(enum_type=MediaStatus, default=MediaStatus.NORMAL)
|
||||
downloaded = fields.BooleanField(default=False)
|
||||
|
||||
class Meta:
|
||||
unique_together = (("favorite_item_id", "page"),)
|
||||
|
||||
@property
|
||||
def tmp_video_path(self) -> Path:
|
||||
return (
|
||||
Path(settings.path_mapper[self.favorite_item.favorite_list_id])
|
||||
/ self.favorite_item.bvid
|
||||
/ "Season 1"
|
||||
/ f"tmp_{self.favorite_item.bvid} - S01E{f'{self.page:02d}'}_video"
|
||||
)
|
||||
|
||||
@property
|
||||
def tmp_audio_path(self) -> Path:
|
||||
return (
|
||||
Path(settings.path_mapper[self.favorite_item.favorite_list_id])
|
||||
/ self.favorite_item.bvid
|
||||
/ "Season 1"
|
||||
/ f"tmp_{self.favorite_item.bvid} - S01E{f'{self.page:02d}'}_audio"
|
||||
)
|
||||
|
||||
@property
|
||||
def video_path(self) -> Path:
|
||||
return (
|
||||
Path(settings.path_mapper[self.favorite_item.favorite_list_id])
|
||||
/ self.favorite_item.bvid
|
||||
/ "Season 1"
|
||||
/ f"{self.favorite_item.bvid} - S01E{f'{self.page:02d}'}.mp4"
|
||||
)
|
||||
|
||||
@property
|
||||
def nfo_path(self) -> Path:
|
||||
return (
|
||||
Path(settings.path_mapper[self.favorite_item.favorite_list_id])
|
||||
/ self.favorite_item.bvid
|
||||
/ "Season 1"
|
||||
/ f"{self.favorite_item.bvid} - S01E{f'{self.page:02d}'}.nfo"
|
||||
)
|
||||
|
||||
@property
|
||||
def poster_path(self) -> Path:
|
||||
return (
|
||||
Path(settings.path_mapper[self.favorite_item.favorite_list_id])
|
||||
/ self.favorite_item.bvid
|
||||
/ "Season 1"
|
||||
/ f"{self.favorite_item.bvid} - S01E{f'{self.page:02d}'}-thumb.jpg"
|
||||
)
|
||||
|
||||
@property
|
||||
def subtitle_path(self) -> Path:
|
||||
return (
|
||||
Path(settings.path_mapper[self.favorite_item.favorite_list_id])
|
||||
/ self.favorite_item.bvid
|
||||
/ "Season 1"
|
||||
/ f"{self.favorite_item.bvid} - S01E{f'{self.page:02d}'}.zh-CN.default.ass"
|
||||
)
|
||||
|
||||
|
||||
class Program(Model):
|
||||
id = fields.IntField(pk=True)
|
||||
@@ -131,17 +186,11 @@ class Program(Model):
|
||||
async def init_model() -> None:
|
||||
await Tortoise.init(config=TORTOISE_ORM)
|
||||
migrate_commands = (
|
||||
[MIGRATE_COMMAND, "upgrade"]
|
||||
if os.getenv("BILI_IN_DOCKER")
|
||||
else ["poetry", "run", MIGRATE_COMMAND, "upgrade"]
|
||||
[MIGRATE_COMMAND, "upgrade"] if os.getenv("BILI_IN_DOCKER") else ["poetry", "run", MIGRATE_COMMAND, "upgrade"]
|
||||
)
|
||||
process = await create_subprocess_exec(*migrate_commands)
|
||||
await process.communicate()
|
||||
program, created = await Program.get_or_create(
|
||||
defaults={
|
||||
"version": VERSION,
|
||||
}
|
||||
)
|
||||
program, created = await Program.get_or_create(defaults={"version": VERSION})
|
||||
if created or program.version != VERSION:
|
||||
# 把新版本的迁移逻辑写在这里
|
||||
pass
|
||||
|
||||
140
nfo.py
140
nfo.py
@@ -1,28 +1,73 @@
|
||||
import datetime
|
||||
from abc import abstractmethod
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
|
||||
from models import FavoriteItem, FavoriteItemPage, Upper
|
||||
from utils import aopen
|
||||
|
||||
|
||||
@dataclass
|
||||
class Actor:
|
||||
class Base:
|
||||
"""基类,有个工具方法"""
|
||||
|
||||
@abstractmethod
|
||||
def to_xml(self) -> str:
|
||||
...
|
||||
|
||||
async def to_file(self, path: Path) -> None:
|
||||
"""把 xml 写入文件"""
|
||||
async with aopen(path, "w", encoding="utf-8") as f:
|
||||
await f.write(self.to_xml())
|
||||
|
||||
|
||||
@dataclass
|
||||
class EpisodeInfo(Base):
|
||||
"""分p的单集信息"""
|
||||
|
||||
title: str
|
||||
season: int
|
||||
episode: int
|
||||
|
||||
@staticmethod
|
||||
def from_favorite_item_page(page: FavoriteItemPage) -> "EpisodeInfo":
|
||||
return EpisodeInfo(title=page.name, season=1, episode=page.page)
|
||||
|
||||
def to_xml(self) -> str:
|
||||
return f"""
|
||||
<?xml version="1.0" encoding="utf-8" standalone="yes"?>
|
||||
<episodedetails>
|
||||
<plot />
|
||||
<outline />
|
||||
<title>{self.title}</title>
|
||||
<season>{self.season}</season>
|
||||
<episode>{self.episode}</episode>
|
||||
</episodedetails>
|
||||
""".strip()
|
||||
|
||||
|
||||
@dataclass
|
||||
class Actor(Base):
|
||||
name: str
|
||||
role: str
|
||||
|
||||
@staticmethod
|
||||
def from_upper(upper: Upper) -> "Actor":
|
||||
return Actor(name=upper.mid, role=upper.name)
|
||||
|
||||
def to_xml(self) -> str:
|
||||
return f"""
|
||||
<actor>
|
||||
<name>{self.name}</name>
|
||||
<role>{self.role}</role>
|
||||
</actor>
|
||||
""".strip(
|
||||
"\n"
|
||||
)
|
||||
""".strip()
|
||||
|
||||
|
||||
@dataclass
|
||||
class EpisodeInfo:
|
||||
class MovieInfo(Base):
|
||||
"""单p的视频信息"""
|
||||
|
||||
title: str
|
||||
plot: str
|
||||
tags: list[str]
|
||||
@@ -30,20 +75,23 @@ class EpisodeInfo:
|
||||
bvid: str
|
||||
aired: datetime.datetime
|
||||
|
||||
async def write_nfo(self, path: Path) -> None:
|
||||
async with aopen(path, "w", encoding="utf-8") as f:
|
||||
await f.write(self.to_xml())
|
||||
@staticmethod
|
||||
def from_favorite_item(fav_item: FavoriteItem) -> "MovieInfo":
|
||||
return MovieInfo(
|
||||
title=fav_item.name,
|
||||
plot=fav_item.desc,
|
||||
actor=[Actor.from_upper(fav_item.upper)],
|
||||
tags=fav_item.tags,
|
||||
bvid=fav_item.bvid,
|
||||
aired=fav_item.ctime,
|
||||
)
|
||||
|
||||
def to_xml(self) -> str:
|
||||
actor = "\n".join(_.to_xml() for _ in self.actor)
|
||||
tags = (
|
||||
"\n".join(f" <genre>{_}</genre>" for _ in self.tags)
|
||||
if isinstance(self.tags, list)
|
||||
else ""
|
||||
)
|
||||
tags = "\n".join(f" <genre>{_}</genre>" for _ in self.tags) if isinstance(self.tags, list) else ""
|
||||
return f"""
|
||||
<?xml version="1.0" encoding="utf-8" standalone="yes"?>
|
||||
<episodedetails>
|
||||
<movie>
|
||||
<plot><![CDATA[{self.plot}]]></plot>
|
||||
<outline />
|
||||
<title>{self.title}</title>
|
||||
@@ -52,7 +100,65 @@ class EpisodeInfo:
|
||||
{tags}
|
||||
<uniqueid type="bilibili">{self.bvid}</uniqueid>
|
||||
<aired>{self.aired.strftime("%Y-%m-%d")}</aired>
|
||||
</episodedetails>
|
||||
""".strip(
|
||||
"\n"
|
||||
</movie>
|
||||
""".strip()
|
||||
|
||||
|
||||
@dataclass
|
||||
class TVShowInfo(Base):
|
||||
title: str
|
||||
plot: str
|
||||
tags: list[str]
|
||||
actor: list[Actor]
|
||||
bvid: str
|
||||
aired: datetime.datetime
|
||||
|
||||
@staticmethod
|
||||
def from_favorite_item(fav_item: FavoriteItem) -> "TVShowInfo":
|
||||
return TVShowInfo(
|
||||
title=fav_item.name,
|
||||
plot=fav_item.desc,
|
||||
actor=[Actor.from_upper(fav_item.upper)],
|
||||
tags=fav_item.tags,
|
||||
bvid=fav_item.bvid,
|
||||
aired=fav_item.ctime,
|
||||
)
|
||||
|
||||
def to_xml(self) -> str:
|
||||
actor = "\n".join(_.to_xml() for _ in self.actor)
|
||||
tags = "\n".join(f" <genre>{_}</genre>" for _ in self.tags) if isinstance(self.tags, list) else ""
|
||||
return f"""
|
||||
<?xml version="1.0" encoding="utf-8" standalone="yes"?>
|
||||
<tvshow>
|
||||
<plot><![CDATA[{self.plot}]]></plot>
|
||||
<outline />
|
||||
<title>{self.title}</title>
|
||||
{actor}
|
||||
<year>{self.aired.year}</year>
|
||||
{tags}
|
||||
<uniqueid type="bilibili">{self.bvid}</uniqueid>
|
||||
<aired>{self.aired.strftime("%Y-%m-%d")}</aired>
|
||||
</tvshow>
|
||||
""".strip()
|
||||
|
||||
|
||||
@dataclass
|
||||
class UpperInfo(Base):
|
||||
mid: int
|
||||
created_at: datetime.datetime
|
||||
|
||||
def from_upper(upper: Upper) -> "UpperInfo":
|
||||
return UpperInfo(mid=upper.mid, created_at=upper.created_at)
|
||||
|
||||
def to_xml(self) -> str:
|
||||
return f"""
|
||||
<?xml version="1.0" encoding="utf-8" standalone="yes"?>
|
||||
<person>
|
||||
<plot />
|
||||
<outline />
|
||||
<lockdata>false</lockdata>
|
||||
<dateadded>{self.created_at.strftime("%Y-%m-%d %H:%M:%S")}</dateadded>
|
||||
<title>{self.mid}</title>
|
||||
<sorttitle>{self.mid}</sorttitle>
|
||||
</person>
|
||||
""".strip()
|
||||
|
||||
176
poetry.lock
generated
176
poetry.lock
generated
@@ -317,46 +317,6 @@ url = "https://github.com/Nemo2011/bilibili-api.git"
|
||||
reference = "16.2.0b2"
|
||||
resolved_reference = "d2e53b1f993e4e6777849a232ef076b73ee8ca7c"
|
||||
|
||||
[[package]]
|
||||
name = "black"
|
||||
version = "23.11.0"
|
||||
description = "The uncompromising code formatter."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "black-23.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dbea0bb8575c6b6303cc65017b46351dc5953eea5c0a59d7b7e3a2d2f433a911"},
|
||||
{file = "black-23.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:412f56bab20ac85927f3a959230331de5614aecda1ede14b373083f62ec24e6f"},
|
||||
{file = "black-23.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d136ef5b418c81660ad847efe0e55c58c8208b77a57a28a503a5f345ccf01394"},
|
||||
{file = "black-23.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:6c1cac07e64433f646a9a838cdc00c9768b3c362805afc3fce341af0e6a9ae9f"},
|
||||
{file = "black-23.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cf57719e581cfd48c4efe28543fea3d139c6b6f1238b3f0102a9c73992cbb479"},
|
||||
{file = "black-23.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:698c1e0d5c43354ec5d6f4d914d0d553a9ada56c85415700b81dc90125aac244"},
|
||||
{file = "black-23.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:760415ccc20f9e8747084169110ef75d545f3b0932ee21368f63ac0fee86b221"},
|
||||
{file = "black-23.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:58e5f4d08a205b11800332920e285bd25e1a75c54953e05502052738fe16b3b5"},
|
||||
{file = "black-23.11.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:45aa1d4675964946e53ab81aeec7a37613c1cb71647b5394779e6efb79d6d187"},
|
||||
{file = "black-23.11.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c44b7211a3a0570cc097e81135faa5f261264f4dfaa22bd5ee2875a4e773bd6"},
|
||||
{file = "black-23.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a9acad1451632021ee0d146c8765782a0c3846e0e0ea46659d7c4f89d9b212b"},
|
||||
{file = "black-23.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:fc7f6a44d52747e65a02558e1d807c82df1d66ffa80a601862040a43ec2e3142"},
|
||||
{file = "black-23.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7f622b6822f02bfaf2a5cd31fdb7cd86fcf33dab6ced5185c35f5db98260b055"},
|
||||
{file = "black-23.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:250d7e60f323fcfc8ea6c800d5eba12f7967400eb6c2d21ae85ad31c204fb1f4"},
|
||||
{file = "black-23.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5133f5507007ba08d8b7b263c7aa0f931af5ba88a29beacc4b2dc23fcefe9c06"},
|
||||
{file = "black-23.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:421f3e44aa67138ab1b9bfbc22ee3780b22fa5b291e4db8ab7eee95200726b07"},
|
||||
{file = "black-23.11.0-py3-none-any.whl", hash = "sha256:54caaa703227c6e0c87b76326d0862184729a69b73d3b7305b6288e1d830067e"},
|
||||
{file = "black-23.11.0.tar.gz", hash = "sha256:4c68855825ff432d197229846f971bc4d6666ce90492e5b02013bcaca4d9ab05"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
click = ">=8.0.0"
|
||||
mypy-extensions = ">=0.4.3"
|
||||
packaging = ">=22.0"
|
||||
pathspec = ">=0.9.0"
|
||||
platformdirs = ">=2"
|
||||
|
||||
[package.extras]
|
||||
colorama = ["colorama (>=0.4.3)"]
|
||||
d = ["aiohttp (>=3.7.4)"]
|
||||
jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
|
||||
uvloop = ["uvloop (>=0.15.2)"]
|
||||
|
||||
[[package]]
|
||||
name = "brotli"
|
||||
version = "1.1.0"
|
||||
@@ -609,21 +569,6 @@ files = [
|
||||
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dataclasses-json"
|
||||
version = "0.6.2"
|
||||
description = "Easily serialize dataclasses to and from JSON."
|
||||
optional = false
|
||||
python-versions = ">=3.7,<4.0"
|
||||
files = [
|
||||
{file = "dataclasses_json-0.6.2-py3-none-any.whl", hash = "sha256:71816ced3d0f55a2c5bc1a813ace1b8d4234e79a08744269a7cf84d6f7c06e99"},
|
||||
{file = "dataclasses_json-0.6.2.tar.gz", hash = "sha256:1b934c1bd63e775880946b8361a902d7de86e894bab8098eab27c010f95724d1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
marshmallow = ">=3.18.0,<4.0.0"
|
||||
typing-inspect = ">=0.4.0,<1"
|
||||
|
||||
[[package]]
|
||||
name = "decorator"
|
||||
version = "5.1.1"
|
||||
@@ -1045,26 +990,6 @@ profiling = ["gprof2dot"]
|
||||
rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"]
|
||||
testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
|
||||
|
||||
[[package]]
|
||||
name = "marshmallow"
|
||||
version = "3.20.1"
|
||||
description = "A lightweight library for converting complex datatypes to and from native Python datatypes."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "marshmallow-3.20.1-py3-none-any.whl", hash = "sha256:684939db93e80ad3561392f47be0230743131560a41c5110684c16e21ade0a5c"},
|
||||
{file = "marshmallow-3.20.1.tar.gz", hash = "sha256:5d2371bbe42000f2b3fb5eaa065224df7d8f8597bc19a1bbfa5bfe7fba8da889"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
packaging = ">=17.0"
|
||||
|
||||
[package.extras]
|
||||
dev = ["flake8 (==6.0.0)", "flake8-bugbear (==23.7.10)", "mypy (==1.4.1)", "pre-commit (>=2.4,<4.0)", "pytest", "pytz", "simplejson", "tox"]
|
||||
docs = ["alabaster (==0.7.13)", "autodocsumm (==0.2.11)", "sphinx (==7.0.1)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"]
|
||||
lint = ["flake8 (==6.0.0)", "flake8-bugbear (==23.7.10)", "mypy (==1.4.1)", "pre-commit (>=2.4,<4.0)"]
|
||||
tests = ["pytest", "pytz", "simplejson"]
|
||||
|
||||
[[package]]
|
||||
name = "matplotlib-inline"
|
||||
version = "0.1.6"
|
||||
@@ -1189,28 +1114,6 @@ files = [
|
||||
{file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mypy-extensions"
|
||||
version = "1.0.0"
|
||||
description = "Type system extensions for programs checked with the mypy type checker."
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
files = [
|
||||
{file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
|
||||
{file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "23.2"
|
||||
description = "Core utilities for Python packages"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"},
|
||||
{file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "parso"
|
||||
version = "0.8.3"
|
||||
@@ -1226,17 +1129,6 @@ files = [
|
||||
qa = ["flake8 (==3.8.3)", "mypy (==0.782)"]
|
||||
testing = ["docopt", "pytest (<6.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "pathspec"
|
||||
version = "0.11.2"
|
||||
description = "Utility library for gitignore style pattern matching of file paths."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"},
|
||||
{file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pexpect"
|
||||
version = "4.8.0"
|
||||
@@ -1318,21 +1210,6 @@ files = [
|
||||
docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"]
|
||||
tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"]
|
||||
|
||||
[[package]]
|
||||
name = "platformdirs"
|
||||
version = "4.0.0"
|
||||
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "platformdirs-4.0.0-py3-none-any.whl", hash = "sha256:118c954d7e949b35437270383a3f2531e99dd93cf7ce4dc8340d3356d30f173b"},
|
||||
{file = "platformdirs-4.0.0.tar.gz", hash = "sha256:cb633b2bcf10c51af60beb0ab06d2f1d69064b43abf4c185ca6b28865f3f9731"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"]
|
||||
test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "prompt-toolkit"
|
||||
version = "3.0.41"
|
||||
@@ -1821,28 +1698,28 @@ pyasn1 = ">=0.1.3"
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.1.6"
|
||||
version = "0.2.2"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "ruff-0.1.6-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:88b8cdf6abf98130991cbc9f6438f35f6e8d41a02622cc5ee130a02a0ed28703"},
|
||||
{file = "ruff-0.1.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:5c549ed437680b6105a1299d2cd30e4964211606eeb48a0ff7a93ef70b902248"},
|
||||
{file = "ruff-0.1.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cf5f701062e294f2167e66d11b092bba7af6a057668ed618a9253e1e90cfd76"},
|
||||
{file = "ruff-0.1.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:05991ee20d4ac4bb78385360c684e4b417edd971030ab12a4fbd075ff535050e"},
|
||||
{file = "ruff-0.1.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87455a0c1f739b3c069e2f4c43b66479a54dea0276dd5d4d67b091265f6fd1dc"},
|
||||
{file = "ruff-0.1.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:683aa5bdda5a48cb8266fcde8eea2a6af4e5700a392c56ea5fb5f0d4bfdc0240"},
|
||||
{file = "ruff-0.1.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:137852105586dcbf80c1717facb6781555c4e99f520c9c827bd414fac67ddfb6"},
|
||||
{file = "ruff-0.1.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd98138a98d48a1c36c394fd6b84cd943ac92a08278aa8ac8c0fdefcf7138f35"},
|
||||
{file = "ruff-0.1.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a0cd909d25f227ac5c36d4e7e681577275fb74ba3b11d288aff7ec47e3ae745"},
|
||||
{file = "ruff-0.1.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e8fd1c62a47aa88a02707b5dd20c5ff20d035d634aa74826b42a1da77861b5ff"},
|
||||
{file = "ruff-0.1.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fd89b45d374935829134a082617954120d7a1470a9f0ec0e7f3ead983edc48cc"},
|
||||
{file = "ruff-0.1.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:491262006e92f825b145cd1e52948073c56560243b55fb3b4ecb142f6f0e9543"},
|
||||
{file = "ruff-0.1.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:ea284789861b8b5ca9d5443591a92a397ac183d4351882ab52f6296b4fdd5462"},
|
||||
{file = "ruff-0.1.6-py3-none-win32.whl", hash = "sha256:1610e14750826dfc207ccbcdd7331b6bd285607d4181df9c1c6ae26646d6848a"},
|
||||
{file = "ruff-0.1.6-py3-none-win_amd64.whl", hash = "sha256:4558b3e178145491e9bc3b2ee3c4b42f19d19384eaa5c59d10acf6e8f8b57e33"},
|
||||
{file = "ruff-0.1.6-py3-none-win_arm64.whl", hash = "sha256:03910e81df0d8db0e30050725a5802441c2022ea3ae4fe0609b76081731accbc"},
|
||||
{file = "ruff-0.1.6.tar.gz", hash = "sha256:1b09f29b16c6ead5ea6b097ef2764b42372aebe363722f1605ecbcd2b9207184"},
|
||||
{file = "ruff-0.2.2-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0a9efb032855ffb3c21f6405751d5e147b0c6b631e3ca3f6b20f917572b97eb6"},
|
||||
{file = "ruff-0.2.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:d450b7fbff85913f866a5384d8912710936e2b96da74541c82c1b458472ddb39"},
|
||||
{file = "ruff-0.2.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecd46e3106850a5c26aee114e562c329f9a1fbe9e4821b008c4404f64ff9ce73"},
|
||||
{file = "ruff-0.2.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e22676a5b875bd72acd3d11d5fa9075d3a5f53b877fe7b4793e4673499318ba"},
|
||||
{file = "ruff-0.2.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1695700d1e25a99d28f7a1636d85bafcc5030bba9d0578c0781ba1790dbcf51c"},
|
||||
{file = "ruff-0.2.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:b0c232af3d0bd8f521806223723456ffebf8e323bd1e4e82b0befb20ba18388e"},
|
||||
{file = "ruff-0.2.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f63d96494eeec2fc70d909393bcd76c69f35334cdbd9e20d089fb3f0640216ca"},
|
||||
{file = "ruff-0.2.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a61ea0ff048e06de273b2e45bd72629f470f5da8f71daf09fe481278b175001"},
|
||||
{file = "ruff-0.2.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e1439c8f407e4f356470e54cdecdca1bd5439a0673792dbe34a2b0a551a2fe3"},
|
||||
{file = "ruff-0.2.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:940de32dc8853eba0f67f7198b3e79bc6ba95c2edbfdfac2144c8235114d6726"},
|
||||
{file = "ruff-0.2.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:0c126da55c38dd917621552ab430213bdb3273bb10ddb67bc4b761989210eb6e"},
|
||||
{file = "ruff-0.2.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:3b65494f7e4bed2e74110dac1f0d17dc8e1f42faaa784e7c58a98e335ec83d7e"},
|
||||
{file = "ruff-0.2.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1ec49be4fe6ddac0503833f3ed8930528e26d1e60ad35c2446da372d16651ce9"},
|
||||
{file = "ruff-0.2.2-py3-none-win32.whl", hash = "sha256:d920499b576f6c68295bc04e7b17b6544d9d05f196bb3aac4358792ef6f34325"},
|
||||
{file = "ruff-0.2.2-py3-none-win_amd64.whl", hash = "sha256:cc9a91ae137d687f43a44c900e5d95e9617cb37d4c989e462980ba27039d239d"},
|
||||
{file = "ruff-0.2.2-py3-none-win_arm64.whl", hash = "sha256:c9d15fc41e6054bfc7200478720570078f0b41c9ae4f010bcc16bd6f4d1aacdd"},
|
||||
{file = "ruff-0.2.2.tar.gz", hash = "sha256:e62ed7f36b3068a30ba39193a14274cd706bc486fad521276458022f7bccb31d"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1979,21 +1856,6 @@ files = [
|
||||
{file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typing-inspect"
|
||||
version = "0.9.0"
|
||||
description = "Runtime inspection utilities for typing module."
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"},
|
||||
{file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
mypy-extensions = ">=0.3.0"
|
||||
typing-extensions = ">=3.7.4"
|
||||
|
||||
[[package]]
|
||||
name = "tzdata"
|
||||
version = "2023.4"
|
||||
@@ -2214,4 +2076,4 @@ multidict = ">=4.0"
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.11"
|
||||
content-hash = "12a8fd1ae9e27d9a2a85f6fb28a39a3fe73ee8ecbc31ba3a9400943b85573501"
|
||||
content-hash = "e460803a11bacc655d364566b994ba3b038889c0b1b8aaf16492f302184e9eb7"
|
||||
|
||||
524
processor.py
524
processor.py
@@ -1,18 +1,21 @@
|
||||
import asyncio
|
||||
import contextlib
|
||||
import datetime
|
||||
import itertools
|
||||
from asyncio import Semaphore, create_subprocess_exec
|
||||
from asyncio.subprocess import DEVNULL
|
||||
from asyncio.subprocess import PIPE
|
||||
from pathlib import Path
|
||||
|
||||
from bilibili_api import ass, favorite_list, video
|
||||
from bilibili_api.exceptions import ResponseCodeException
|
||||
from loguru import logger
|
||||
from tortoise.connection import connections
|
||||
from tortoise.models import Model
|
||||
|
||||
from constants import FFMPEG_COMMAND, MediaStatus, MediaType
|
||||
from constants import FFMPEG_COMMAND, MediaStatus, MediaType, NfoMode
|
||||
from credential import credential
|
||||
from models import FavoriteItem, FavoriteList, Upper
|
||||
from nfo import Actor, EpisodeInfo
|
||||
from models import FavoriteItem, FavoriteItemPage, FavoriteList, Upper
|
||||
from nfo import Base as NfoBase
|
||||
from nfo import EpisodeInfo, MovieInfo, TVShowInfo, UpperInfo
|
||||
from settings import settings
|
||||
from utils import aexists, amakedirs, client, download_content
|
||||
|
||||
@@ -25,6 +28,7 @@ async def cleanup() -> None:
|
||||
|
||||
|
||||
def concurrent_decorator(concurrency: int) -> callable:
|
||||
"""一个简单的并发限制装饰器,被装饰的函数同时仅能运行 concurrency 个"""
|
||||
sem = Semaphore(value=concurrency)
|
||||
|
||||
def decorator(func: callable) -> callable:
|
||||
@@ -37,16 +41,12 @@ def concurrent_decorator(concurrency: int) -> callable:
|
||||
return decorator
|
||||
|
||||
|
||||
async def manage_model(medias: list[dict], fav_list: FavoriteList) -> None:
|
||||
async def update_favorite_item(medias: list[dict], fav_list: FavoriteList) -> None:
|
||||
"""根据收藏夹里的视频列表更新数据库记录"""
|
||||
uppers = [
|
||||
Upper(
|
||||
mid=media["upper"]["mid"],
|
||||
name=media["upper"]["name"],
|
||||
thumb=media["upper"]["face"],
|
||||
)
|
||||
for media in medias
|
||||
Upper(mid=media["upper"]["mid"], name=media["upper"]["name"], thumb=media["upper"]["face"]) for media in medias
|
||||
]
|
||||
await Upper.bulk_create(uppers, on_conflict=["mid"], update_fields=["name", "thumb"])
|
||||
await Upper.bulk_create(uppers, on_conflict=["mid"], update_fields=["name", "thumb"], batch_size=300)
|
||||
items = [
|
||||
FavoriteItem(
|
||||
name=media["title"],
|
||||
@@ -66,15 +66,8 @@ async def manage_model(medias: list[dict], fav_list: FavoriteList) -> None:
|
||||
await FavoriteItem.bulk_create(
|
||||
items,
|
||||
on_conflict=["bvid", "favorite_list_id"],
|
||||
update_fields=[
|
||||
"name",
|
||||
"type",
|
||||
"desc",
|
||||
"cover",
|
||||
"ctime",
|
||||
"pubtime",
|
||||
"fav_time",
|
||||
],
|
||||
update_fields=["name", "type", "desc", "cover", "ctime", "pubtime", "fav_time"],
|
||||
batch_size=300,
|
||||
)
|
||||
|
||||
|
||||
@@ -100,11 +93,7 @@ async def process_favorite(favorite_id: int) -> None:
|
||||
favorite_id, page=1, credential=credential
|
||||
)
|
||||
title = favorite_video_list["info"]["title"]
|
||||
logger.info(
|
||||
"Start to process favorite {}: {}",
|
||||
favorite_id,
|
||||
title,
|
||||
)
|
||||
logger.info("Start to process favorite {}: {}.", favorite_id, title)
|
||||
fav_list, _ = await FavoriteList.get_or_create(
|
||||
id=favorite_id, defaults={"name": favorite_video_list["info"]["title"]}
|
||||
)
|
||||
@@ -118,32 +107,23 @@ async def process_favorite(favorite_id: int) -> None:
|
||||
)
|
||||
# 先看看对应 bvid 的记录是否存在
|
||||
existed_items = await FavoriteItem.filter(
|
||||
favorite_list=fav_list,
|
||||
bvid__in=[media["bvid"] for media in favorite_video_list["medias"]],
|
||||
favorite_list=fav_list, bvid__in=[media["bvid"] for media in favorite_video_list["medias"]]
|
||||
)
|
||||
# 记录一下获得的列表中的 bvid 和 fav_time
|
||||
media_info = {(media["bvid"], media["fav_time"]) for media in favorite_video_list["medias"]}
|
||||
# 如果有 bvid 和 fav_time 都相同的记录,说明已经到达了上次处理到的位置
|
||||
continue_flag = not media_info & {
|
||||
(item.bvid, int(item.fav_time.timestamp())) for item in existed_items
|
||||
}
|
||||
await manage_model(favorite_video_list["medias"], fav_list)
|
||||
continue_flag = not media_info & {(item.bvid, int(item.fav_time.timestamp())) for item in existed_items}
|
||||
await update_favorite_item(favorite_video_list["medias"], fav_list)
|
||||
if not (continue_flag and favorite_video_list["has_more"]):
|
||||
break
|
||||
all_unprocessed_items = await FavoriteItem.filter(
|
||||
favorite_list=fav_list,
|
||||
type=MediaType.VIDEO,
|
||||
status=MediaStatus.NORMAL,
|
||||
downloaded=False,
|
||||
favorite_list=fav_list, type=MediaType.VIDEO, status=MediaStatus.NORMAL, downloaded=False
|
||||
).prefetch_related("upper")
|
||||
await asyncio.gather(
|
||||
*[process_favorite_item(item) for item in all_unprocessed_items],
|
||||
return_exceptions=True,
|
||||
)
|
||||
logger.info("Favorite {} {} processed successfully.", favorite_id, title)
|
||||
await asyncio.gather(*[process_favorite_item(item) for item in all_unprocessed_items], return_exceptions=True)
|
||||
logger.info("Favorite {} {} has been processed.", favorite_id, title)
|
||||
|
||||
|
||||
@concurrent_decorator(4)
|
||||
@concurrent_decorator(concurrency=4)
|
||||
async def process_favorite_item(
|
||||
fav_item: FavoriteItem,
|
||||
process_poster=True,
|
||||
@@ -152,201 +132,323 @@ async def process_favorite_item(
|
||||
process_upper=True,
|
||||
process_subtitle=True,
|
||||
) -> None:
|
||||
logger.info("Start to process video {} {}", fav_item.bvid, fav_item.name)
|
||||
logger.info("Start to process video {} {}.", fav_item.bvid, fav_item.name)
|
||||
if fav_item.type != MediaType.VIDEO:
|
||||
logger.warning("Media {} is not a video, skipped.", fav_item.name)
|
||||
logger.warning("Media {} {} is not a video, skipped.", fav_item.bvid, fav_item.name)
|
||||
return
|
||||
v = video.Video(fav_item.bvid, credential=credential)
|
||||
# 如果没有获取过 tags,那么尝试获取一下
|
||||
try:
|
||||
# 如果没有获取过 tags,那么尝试获取一下(不关键,忽略掉错误)
|
||||
with contextlib.suppress(Exception):
|
||||
if fav_item.tags is None:
|
||||
fav_item.tags = [_["tag_name"] for _ in await v.get_tags()]
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to get tags of video {} {}",
|
||||
fav_item.bvid,
|
||||
fav_item.name,
|
||||
)
|
||||
|
||||
# 处理 up 主信息和是否分 p 无关,放到前面
|
||||
if process_upper:
|
||||
result = await asyncio.gather(
|
||||
get_file(fav_item.upper.thumb, fav_item.upper.thumb_path),
|
||||
get_nfo(fav_item.upper.meta_path, obj=fav_item.upper, mode=NfoMode.UPPER),
|
||||
return_exceptions=True,
|
||||
)
|
||||
if any(isinstance(_, FileExistsError) for _ in result):
|
||||
logger.info("Upper {} {} already exists, skipped.", fav_item.upper.mid, fav_item.upper.name)
|
||||
elif any(isinstance(_, Exception) for _ in result):
|
||||
logger.exception("Failed to process upper {} {}.", fav_item.upper.mid, fav_item.upper.name)
|
||||
single_page = False
|
||||
if settings.paginated_video:
|
||||
pages = None
|
||||
try:
|
||||
if not all(
|
||||
await asyncio.gather(
|
||||
aexists(fav_item.upper.thumb_path),
|
||||
aexists(fav_item.upper.meta_path),
|
||||
pages = await v.get_pages()
|
||||
pages = [
|
||||
FavoriteItemPage(
|
||||
favorite_item=fav_item,
|
||||
cid=page["cid"],
|
||||
page=page["page"],
|
||||
name=page["part"],
|
||||
image=page["first_frame"],
|
||||
)
|
||||
):
|
||||
await amakedirs(fav_item.upper.thumb_path.parent, exist_ok=True)
|
||||
for page in pages
|
||||
]
|
||||
except Exception:
|
||||
logger.exception("Failed to get pages of video {} {}.", fav_item.bvid, fav_item.name)
|
||||
if pages:
|
||||
if len(pages) == 1:
|
||||
single_page = True
|
||||
else:
|
||||
# 如果有多个分 p,那么先创建记录
|
||||
await FavoriteItemPage.bulk_create(
|
||||
pages,
|
||||
on_conflict=["favorite_item_id", "page"],
|
||||
update_fields=["cid", "name", "image"],
|
||||
batch_size=300,
|
||||
)
|
||||
# 重新拉一下数据,不能用 bulk create 的返回值,因为 bulk_create 不会填充主键
|
||||
pages = await FavoriteItemPage.filter(favorite_item=fav_item).order_by("page")
|
||||
for page in pages:
|
||||
page.favorite_item = fav_item
|
||||
if process_nfo:
|
||||
try:
|
||||
await get_nfo(fav_item.tvshow_nfo_path, obj=fav_item, mode=NfoMode.TVSHOW)
|
||||
except FileExistsError:
|
||||
logger.info("Nfo of {} {} already exists, skipped.", fav_item.bvid, fav_item.name)
|
||||
except Exception:
|
||||
logger.exception("Failed to process nfo of video {} {}.", fav_item.bvid, fav_item.name)
|
||||
if process_poster:
|
||||
try:
|
||||
await get_file(fav_item.cover, fav_item.tvshow_poster_path)
|
||||
except FileExistsError:
|
||||
logger.info("Poster of {} {} already exists, skipped.", fav_item.bvid, fav_item.name)
|
||||
except Exception:
|
||||
logger.exception("Failed to process poster of video {} {}.", fav_item.bvid, fav_item.name)
|
||||
await asyncio.gather(
|
||||
fav_item.upper.save_metadata(),
|
||||
download_content(fav_item.upper.thumb, fav_item.upper.thumb_path),
|
||||
*[
|
||||
process_favorite_item_page(
|
||||
page, v, process_poster, process_video, process_nfo, process_subtitle
|
||||
)
|
||||
for page in pages
|
||||
],
|
||||
return_exceptions=True,
|
||||
)
|
||||
else:
|
||||
logger.info(
|
||||
"Upper {} {} already exists, skipped.",
|
||||
fav_item.upper.mid,
|
||||
fav_item.upper.name,
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to process upper {} {}",
|
||||
fav_item.upper.mid,
|
||||
fav_item.upper.name,
|
||||
)
|
||||
|
||||
if process_nfo:
|
||||
try:
|
||||
if not await aexists(fav_item.nfo_path):
|
||||
await EpisodeInfo(
|
||||
title=fav_item.name,
|
||||
plot=fav_item.desc,
|
||||
actor=[
|
||||
Actor(
|
||||
name=fav_item.upper.mid,
|
||||
role=fav_item.upper.name,
|
||||
)
|
||||
],
|
||||
tags=fav_item.tags,
|
||||
bvid=fav_item.bvid,
|
||||
aired=fav_item.ctime,
|
||||
).write_nfo(fav_item.nfo_path)
|
||||
else:
|
||||
logger.info(
|
||||
"NFO of {} {} already exists, skipped.",
|
||||
fav_item.bvid,
|
||||
fav_item.name,
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to process nfo of video {} {}",
|
||||
fav_item.bvid,
|
||||
fav_item.name,
|
||||
)
|
||||
|
||||
if process_poster:
|
||||
try:
|
||||
if not await aexists(fav_item.poster_path):
|
||||
try:
|
||||
await download_content(fav_item.cover, fav_item.poster_path)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to download poster of video {} {}",
|
||||
fav_item.downloaded = all(page.downloaded for page in pages)
|
||||
page_status = {page.status for page in pages}
|
||||
if MediaStatus.INVISIBLE in page_status:
|
||||
fav_item.status = MediaStatus.INVISIBLE
|
||||
elif MediaStatus.DELETED in page_status:
|
||||
fav_item.status = MediaStatus.DELETED
|
||||
else:
|
||||
fav_item.status = MediaStatus.NORMAL
|
||||
if single_page or not settings.paginated_video:
|
||||
if process_nfo:
|
||||
try:
|
||||
await get_nfo(fav_item.nfo_path, obj=fav_item, mode=NfoMode.MOVIE)
|
||||
except FileExistsError:
|
||||
logger.info("NFO of {} {} already exists, skipped.", fav_item.bvid, fav_item.name)
|
||||
except Exception:
|
||||
logger.exception("Failed to process nfo of video {} {}.", fav_item.bvid, fav_item.name)
|
||||
if process_poster:
|
||||
try:
|
||||
await get_file(fav_item.cover, fav_item.poster_path)
|
||||
except FileExistsError:
|
||||
logger.info("Poster of {} {} already exists, skipped.", fav_item.bvid, fav_item.name)
|
||||
except Exception:
|
||||
logger.exception("Failed to process poster of video {} {}.", fav_item.bvid, fav_item.name)
|
||||
if process_subtitle:
|
||||
try:
|
||||
await get_subtitle(v, 0, fav_item.subtitle_path)
|
||||
except FileExistsError:
|
||||
logger.info("Subtitle of {} {} already exists, skipped.", fav_item.bvid, fav_item.name)
|
||||
except Exception:
|
||||
logger.exception("Failed to process subtitle of video {} {}.", fav_item.bvid, fav_item.name)
|
||||
if process_video:
|
||||
try:
|
||||
await get_video(v, 0, fav_item.tmp_video_path, fav_item.tmp_audio_path, fav_item.video_path)
|
||||
fav_item.downloaded = True
|
||||
except FileExistsError:
|
||||
logger.info("Video {} {} already exists, skipped.", fav_item.bvid, fav_item.name)
|
||||
fav_item.downloaded = True
|
||||
except Exception as e:
|
||||
errcode_status = {62002: MediaStatus.INVISIBLE, -404: MediaStatus.DELETED}
|
||||
if not (isinstance(e, ResponseCodeException) and (status := errcode_status.get(e.code))):
|
||||
logger.exception("Failed to process video {} {}.", fav_item.bvid, fav_item.name)
|
||||
else:
|
||||
fav_item.status = status
|
||||
logger.error(
|
||||
"Video {} {} is not available, marked as {}.",
|
||||
fav_item.bvid,
|
||||
fav_item.name,
|
||||
fav_item.status.text,
|
||||
)
|
||||
else:
|
||||
logger.info(
|
||||
"Poster of {} {} already exists, skipped.",
|
||||
fav_item.bvid,
|
||||
fav_item.name,
|
||||
)
|
||||
await fav_item.save()
|
||||
logger.info("{} {} has been processed.", fav_item.bvid, fav_item.name)
|
||||
|
||||
|
||||
@concurrent_decorator(concurrency=4)
|
||||
async def process_favorite_item_page(
|
||||
fav_page: FavoriteItemPage,
|
||||
v: video.Video,
|
||||
process_poster=True,
|
||||
process_video=True,
|
||||
process_nfo=True,
|
||||
process_subtitle=True,
|
||||
):
|
||||
logger.info(
|
||||
"Start to process video {} {} page {}.", fav_page.favorite_item.bvid, fav_page.favorite_item.name, fav_page.page
|
||||
)
|
||||
if process_nfo:
|
||||
try:
|
||||
await get_nfo(fav_page.nfo_path, obj=fav_page, mode=NfoMode.EPISODE)
|
||||
except FileExistsError:
|
||||
logger.info(
|
||||
"NFO of {} {} page {} already exists, skipped.",
|
||||
fav_page.favorite_item.bvid,
|
||||
fav_page.favorite_item.name,
|
||||
fav_page.page,
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to process poster of video {} {}",
|
||||
fav_item.bvid,
|
||||
fav_item.name,
|
||||
"Failed to process nfo of video {} {} page {}.",
|
||||
fav_page.favorite_item.bvid,
|
||||
fav_page.favorite_item.name,
|
||||
fav_page.page,
|
||||
)
|
||||
if process_poster:
|
||||
try:
|
||||
await get_file(fav_page.image, fav_page.poster_path)
|
||||
except FileExistsError:
|
||||
logger.info(
|
||||
"Poster of {} {} page {} already exists, skipped.",
|
||||
fav_page.favorite_item.bvid,
|
||||
fav_page.favorite_item.name,
|
||||
fav_page.page,
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to process poster of video {} {} page {}.",
|
||||
fav_page.favorite_item.bvid,
|
||||
fav_page.favorite_item.name,
|
||||
fav_page.page,
|
||||
)
|
||||
|
||||
if process_subtitle:
|
||||
try:
|
||||
if not await aexists(fav_item.subtitle_path):
|
||||
await ass.make_ass_file_danmakus_protobuf(
|
||||
v,
|
||||
0,
|
||||
str(fav_item.subtitle_path.resolve()),
|
||||
credential=credential,
|
||||
font_name=settings.subtitle.font_name,
|
||||
font_size=settings.subtitle.font_size,
|
||||
alpha=settings.subtitle.alpha,
|
||||
fly_time=settings.subtitle.fly_time,
|
||||
static_time=settings.subtitle.static_time,
|
||||
)
|
||||
else:
|
||||
logger.info(
|
||||
"Subtitle of {} {} already exists, skipped.",
|
||||
fav_item.bvid,
|
||||
fav_item.name,
|
||||
)
|
||||
await get_subtitle(v, fav_page.page - 1, fav_page.subtitle_path)
|
||||
except FileExistsError:
|
||||
logger.info(
|
||||
"Subtitle of {} {} page {} already exists, skipped.",
|
||||
fav_page.favorite_item.bvid,
|
||||
fav_page.favorite_item.name,
|
||||
fav_page.page,
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to process subtitle of video {} {}",
|
||||
fav_item.bvid,
|
||||
fav_item.name,
|
||||
"Failed to process subtitle of video {} {} page {}.",
|
||||
fav_page.favorite_item.bvid,
|
||||
fav_page.favorite_item.name,
|
||||
fav_page.page,
|
||||
)
|
||||
if process_video:
|
||||
try:
|
||||
if await aexists(fav_item.video_path):
|
||||
fav_item.downloaded = True
|
||||
logger.info(
|
||||
"Video {} {} already exists, skipped.",
|
||||
fav_item.bvid,
|
||||
fav_item.name,
|
||||
await get_video(v, fav_page.page - 1, fav_page.tmp_video_path, fav_page.tmp_audio_path, fav_page.video_path)
|
||||
fav_page.downloaded = True
|
||||
except FileExistsError:
|
||||
logger.info(
|
||||
"Video {} {} page {} already exists, skipped.",
|
||||
fav_page.favorite_item.bvid,
|
||||
fav_page.favorite_item.name,
|
||||
fav_page.page,
|
||||
)
|
||||
fav_page.downloaded = True
|
||||
except Exception as e:
|
||||
errcode_status = {62002: MediaStatus.INVISIBLE, -404: MediaStatus.DELETED}
|
||||
if not (isinstance(e, ResponseCodeException) and (status := errcode_status.get(e.code))):
|
||||
logger.exception(
|
||||
"Failed to process video {} {} page {}.",
|
||||
fav_page.favorite_item.bvid,
|
||||
fav_page.favorite_item.name,
|
||||
fav_page.page,
|
||||
)
|
||||
else:
|
||||
# 开始处理视频内容
|
||||
detector = video.VideoDownloadURLDataDetecter(
|
||||
await v.get_download_url(page_index=0)
|
||||
)
|
||||
streams = detector.detect_best_streams(codecs=settings.codec)
|
||||
if detector.check_flv_stream():
|
||||
await download_content(streams[0].url, fav_item.tmp_video_path)
|
||||
process = await create_subprocess_exec(
|
||||
FFMPEG_COMMAND,
|
||||
"-i",
|
||||
fav_item.tmp_video_path,
|
||||
fav_item.video_path,
|
||||
stdout=DEVNULL,
|
||||
stderr=DEVNULL,
|
||||
)
|
||||
await process.communicate()
|
||||
fav_item.tmp_video_path.unlink()
|
||||
else:
|
||||
paths, tasks = [fav_item.tmp_video_path], [
|
||||
download_content(streams[0].url, fav_item.tmp_video_path)
|
||||
]
|
||||
if streams[1]:
|
||||
paths.append(fav_item.tmp_audio_path)
|
||||
tasks.append(download_content(streams[1].url, fav_item.tmp_audio_path))
|
||||
await asyncio.gather(*tasks)
|
||||
process = await create_subprocess_exec(
|
||||
FFMPEG_COMMAND,
|
||||
*list(itertools.chain(*zip(["-i"] * len(paths), paths))),
|
||||
"-c",
|
||||
"copy",
|
||||
fav_item.video_path,
|
||||
stdout=DEVNULL,
|
||||
stderr=DEVNULL,
|
||||
)
|
||||
await process.communicate()
|
||||
for path in paths:
|
||||
path.unlink()
|
||||
fav_item.downloaded = True
|
||||
except ResponseCodeException as e:
|
||||
match e.code:
|
||||
case 62002:
|
||||
fav_item.status = MediaStatus.INVISIBLE
|
||||
case -404:
|
||||
fav_item.status = MediaStatus.DELETED
|
||||
case _:
|
||||
logger.exception(
|
||||
"Failed to process video {} {}, error_code: {}",
|
||||
fav_item.bvid,
|
||||
fav_item.name,
|
||||
e.code,
|
||||
)
|
||||
if fav_item.status != MediaStatus.NORMAL:
|
||||
fav_page.status = status
|
||||
logger.error(
|
||||
"Video {} {} is not available, marked as {}",
|
||||
fav_item.bvid,
|
||||
fav_item.name,
|
||||
fav_item.status.text,
|
||||
"Video {} {} page {} is not available, marked as {}.",
|
||||
fav_page.favorite_item.bvid,
|
||||
fav_page.favorite_item.name,
|
||||
fav_page.page,
|
||||
fav_page.status.text,
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("Failed to process video {} {}", fav_item.bvid, fav_item.name)
|
||||
await fav_item.save()
|
||||
await fav_page.save()
|
||||
logger.info(
|
||||
"{} {} is processed successfully.",
|
||||
fav_item.bvid,
|
||||
fav_item.name,
|
||||
"{} {} page {} has been processed.", fav_page.favorite_item.bvid, fav_page.favorite_item.name, fav_page.page
|
||||
)
|
||||
|
||||
|
||||
async def get_video(v: video.Video, page_id: int, tmp_video_path: Path, tmp_audio_path: Path, video_path: Path) -> None:
|
||||
"""指定临时视频、音频和目标视频目录,下载视频的某个分p"""
|
||||
if await aexists(video_path):
|
||||
# 目标视频已经存在,忽略掉
|
||||
raise FileExistsError
|
||||
await amakedirs(video_path.parent, exist_ok=True)
|
||||
# 分析对应分p的视频流
|
||||
detector = video.VideoDownloadURLDataDetecter(await v.get_download_url(page_index=page_id))
|
||||
streams = detector.detect_best_streams(**settings.stream.model_dump())
|
||||
if detector.check_flv_stream():
|
||||
# 对于 flv,直接下载
|
||||
await download_content(streams[0].url, tmp_video_path)
|
||||
process = await create_subprocess_exec(
|
||||
FFMPEG_COMMAND, "-i", tmp_video_path, video_path, stdout=PIPE, stderr=PIPE
|
||||
)
|
||||
stdout, stderr = await process.communicate()
|
||||
tmp_video_path.unlink(missing_ok=True)
|
||||
else:
|
||||
# 对于非 flv,首先要下载视频流
|
||||
paths, tasks = ([tmp_video_path], [download_content(streams[0].url, tmp_video_path)])
|
||||
if streams[1]:
|
||||
# 如果有音频流,也下载
|
||||
paths.append(tmp_audio_path)
|
||||
tasks.append(download_content(streams[1].url, tmp_audio_path))
|
||||
await asyncio.gather(*tasks)
|
||||
process = await create_subprocess_exec(
|
||||
FFMPEG_COMMAND,
|
||||
*sum([["-i", path] for path in paths], []),
|
||||
"-c",
|
||||
"copy",
|
||||
video_path,
|
||||
stdout=PIPE,
|
||||
stderr=PIPE,
|
||||
)
|
||||
stdout, stderr = await process.communicate()
|
||||
for path in paths:
|
||||
path.unlink(missing_ok=True)
|
||||
if process.returncode != 0:
|
||||
raise RuntimeError(
|
||||
f"{FFMPEG_COMMAND} exited with non-zero code {process.returncode}."
|
||||
f"\nstdout:\n{stdout.decode()}"
|
||||
f"\nstderr:\n{stderr.decode()}"
|
||||
)
|
||||
|
||||
|
||||
async def get_file(url: str, path: Path) -> None:
|
||||
"""一个简单的下载封装,用于下载封面等内容"""
|
||||
if await aexists(path):
|
||||
# 目标文件已经存在,忽略掉
|
||||
raise FileExistsError
|
||||
await amakedirs(path.parent, exist_ok=True)
|
||||
await download_content(url, path)
|
||||
|
||||
|
||||
async def get_subtitle(v: video.Video, page_id: int, subtitle_path: Path) -> None:
|
||||
"""指定目标字幕文件,下载视频的某个分p的字幕"""
|
||||
if await aexists(subtitle_path):
|
||||
# 目标字幕已经存在,忽略掉
|
||||
raise FileExistsError
|
||||
await amakedirs(subtitle_path.parent, exist_ok=True)
|
||||
await ass.make_ass_file_danmakus_protobuf(
|
||||
v,
|
||||
page_id,
|
||||
str(subtitle_path.resolve()),
|
||||
credential=credential,
|
||||
font_name=settings.subtitle.font_name,
|
||||
font_size=settings.subtitle.font_size,
|
||||
alpha=settings.subtitle.alpha,
|
||||
fly_time=settings.subtitle.fly_time,
|
||||
static_time=settings.subtitle.static_time,
|
||||
)
|
||||
|
||||
|
||||
async def get_nfo(nfo_path: Path, *, obj: Model, mode: NfoMode) -> None:
|
||||
"""指定 nfo 路径、对象和模式,将对应的 nfo 信息写入到文件"""
|
||||
if await aexists(nfo_path):
|
||||
# 目标 nfo 已经存在,忽略掉
|
||||
raise FileExistsError
|
||||
await amakedirs(nfo_path.parent, exist_ok=True)
|
||||
# 根据不同的模式,生成不同的 nfo
|
||||
nfo: NfoBase = None
|
||||
match obj, mode:
|
||||
case FavoriteItem(), NfoMode.MOVIE:
|
||||
nfo = MovieInfo.from_favorite_item(obj)
|
||||
case FavoriteItem(), NfoMode.TVSHOW:
|
||||
nfo = TVShowInfo.from_favorite_item(obj)
|
||||
case FavoriteItemPage(), NfoMode.EPISODE:
|
||||
nfo = EpisodeInfo.from_favorite_item_page(obj)
|
||||
case Upper(), NfoMode.UPPER:
|
||||
nfo = UpperInfo.from_upper(obj)
|
||||
case _:
|
||||
raise ValueError
|
||||
await nfo.to_file(nfo_path)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[tool.poetry]
|
||||
name = "bili-sync"
|
||||
version = "1.1.6"
|
||||
version = "1.1.7"
|
||||
description = ""
|
||||
authors = ["amtoaer <amtoaer@gmail.com>"]
|
||||
license = "GPL-3.0"
|
||||
@@ -11,24 +11,22 @@ python = "^3.11"
|
||||
aerich = "0.7.2"
|
||||
aiofiles = "23.2.1"
|
||||
bilibili-api-python = {git = "https://github.com/Nemo2011/bilibili-api.git", rev = "16.2.0b2"}
|
||||
dataclasses-json = "0.6.2"
|
||||
loguru = "0.7.2"
|
||||
pydantic = "2.5.3"
|
||||
tortoise-orm = "0.20.0"
|
||||
uvloop = "0.19.0"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
black = "23.11.0"
|
||||
bump-my-version = "0.15.4"
|
||||
ipython = "8.17.2"
|
||||
ruff = "0.1.6"
|
||||
ruff = "0.2.2"
|
||||
|
||||
[tool.black]
|
||||
line-length = 100
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 100
|
||||
select = [
|
||||
line-length = 120
|
||||
lint.select = [
|
||||
"F", # https://beta.ruff.rs/docs/rules/#pyflakes-f
|
||||
"E",
|
||||
"W", # https://beta.ruff.rs/docs/rules/#pycodestyle-e-w
|
||||
@@ -52,9 +50,11 @@ select = [
|
||||
"NPY", # https://beta.ruff.rs/docs/rules/#numpy-specific-rules-npy
|
||||
"RUF100", # https://beta.ruff.rs/docs/configuration/#automatic-noqa-management
|
||||
]
|
||||
ignore = [
|
||||
lint.ignore = [
|
||||
"A003", # Class attribute `id` is shadowing a Python builtin
|
||||
]
|
||||
lint.isort.split-on-trailing-comma = false
|
||||
format.skip-magic-trailing-comma = true
|
||||
exclude = ["migrations"]
|
||||
|
||||
[tool.aerich]
|
||||
@@ -68,7 +68,7 @@ message = "chore: bump version from {current_version} to {new_version}"
|
||||
tag = true
|
||||
tag_name = "{new_version}"
|
||||
tag_message = ""
|
||||
current_version = "1.1.6"
|
||||
current_version = "1.1.7"
|
||||
parse = "(?P<major>\\d+)\\.(?P<minor>\\d+)\\.(?P<patch>\\d+)"
|
||||
|
||||
[[tool.bumpversion.files]]
|
||||
@@ -81,6 +81,7 @@ filename = "pyproject.toml"
|
||||
|
||||
|
||||
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
57
settings.py
57
settings.py
@@ -1,11 +1,12 @@
|
||||
from pathlib import Path
|
||||
|
||||
from bilibili_api.video import VideoCodecs
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
from bilibili_api.video import AudioQuality, VideoCodecs, VideoQuality
|
||||
from pydantic import BaseModel, Field, field_validator, root_validator
|
||||
from pydantic_core import PydanticCustomError
|
||||
from typing_extensions import Annotated
|
||||
|
||||
from constants import DEFAULT_CONFIG_PATH
|
||||
from utils import amakedirs, aopen
|
||||
|
||||
|
||||
class SubtitleConfig(BaseModel):
|
||||
@@ -16,6 +17,26 @@ class SubtitleConfig(BaseModel):
|
||||
static_time: float = 10 # 静态弹幕持续时间
|
||||
|
||||
|
||||
class StreamConfig(BaseModel):
|
||||
video_max_quality: VideoQuality = VideoQuality._8K
|
||||
audio_max_quality: AudioQuality = AudioQuality._192K
|
||||
video_min_quality: VideoQuality = VideoQuality._360P
|
||||
audio_min_quality: AudioQuality = AudioQuality._64K
|
||||
codecs: list[VideoCodecs] = Field(
|
||||
default_factory=lambda: [VideoCodecs.AV1, VideoCodecs.AVC, VideoCodecs.HEV], min_length=1
|
||||
)
|
||||
no_dolby_video: bool = False
|
||||
no_dolby_audio: bool = False
|
||||
no_hdr: bool = False
|
||||
no_hires: bool = False
|
||||
|
||||
@field_validator("codecs", mode="after")
|
||||
def codec_validator(cls, codecs: list[VideoCodecs]) -> list[VideoCodecs]:
|
||||
if len(codecs) != len(set(codecs)):
|
||||
raise PydanticCustomError("unique_list", "List must be unique")
|
||||
return codecs
|
||||
|
||||
|
||||
class Config(BaseModel):
|
||||
sessdata: Annotated[str, Field(min_length=1)] = ""
|
||||
bili_jct: Annotated[str, Field(min_length=1)] = ""
|
||||
@@ -25,20 +46,15 @@ class Config(BaseModel):
|
||||
interval: int = 20
|
||||
path_mapper: dict[int, str] = Field(default_factory=dict)
|
||||
subtitle: SubtitleConfig = Field(default_factory=SubtitleConfig)
|
||||
codec: list[VideoCodecs] = Field(
|
||||
default_factory=lambda: [
|
||||
VideoCodecs.AV1,
|
||||
VideoCodecs.AVC,
|
||||
VideoCodecs.HEV,
|
||||
],
|
||||
min_length=1,
|
||||
)
|
||||
stream: StreamConfig = Field(default_factory=StreamConfig)
|
||||
paginated_video: bool = False
|
||||
|
||||
@field_validator("codec", mode="after")
|
||||
def codec_validator(cls, codecs: list[VideoCodecs]) -> list[VideoCodecs]:
|
||||
if len(codecs) != len(set(codecs)):
|
||||
raise PydanticCustomError("unique_list", "List must be unique")
|
||||
return codecs
|
||||
@root_validator(pre=True)
|
||||
def migrate(cls, values: dict) -> dict:
|
||||
# 把旧版本的 codec 迁移为 stream 中的 codecs
|
||||
if "codec" in values and "stream" not in values:
|
||||
values["stream"] = {"codecs": values.pop("codec")}
|
||||
return values
|
||||
|
||||
@staticmethod
|
||||
def load(path: Path | None = None) -> "Config":
|
||||
@@ -61,6 +77,17 @@ class Config(BaseModel):
|
||||
except Exception as e:
|
||||
raise RuntimeError(f"Failed to save config file: {path}") from e
|
||||
|
||||
async def asave(self, path: Path | None = None) -> "Config":
|
||||
if not path:
|
||||
path = DEFAULT_CONFIG_PATH
|
||||
try:
|
||||
await amakedirs(path.parent, exist_ok=True)
|
||||
async with aopen(path, "w") as f:
|
||||
await f.write(Config.model_dump_json(self, indent=4))
|
||||
return self
|
||||
except Exception as e:
|
||||
raise RuntimeError(f"Failed to save config file: {path}") from e
|
||||
|
||||
|
||||
def init_settings() -> Config:
|
||||
if not DEFAULT_CONFIG_PATH.exists():
|
||||
|
||||
4
utils.py
4
utils.py
@@ -27,9 +27,7 @@ async def amakedirs(path: Path, exist_ok=False) -> None:
|
||||
await makedirs(path, exist_ok=exist_ok)
|
||||
|
||||
|
||||
def aopen(
|
||||
path: Path, mode: str = "r", **kwargs
|
||||
) -> AiofilesContextManager[None, None, AsyncTextIOWrapper]:
|
||||
def aopen(path: Path, mode: str = "r", **kwargs) -> AiofilesContextManager[None, None, AsyncTextIOWrapper]:
|
||||
return aiofiles.open(path, mode, **kwargs)
|
||||
|
||||
|
||||
|
||||
@@ -1 +1 @@
|
||||
VERSION = "1.1.6"
|
||||
VERSION = "1.1.7"
|
||||
|
||||
Reference in New Issue
Block a user