fix: 预检查收藏夹内容类型,支持写入 nfo

This commit is contained in:
amtoaer
2023-11-22 23:38:34 +08:00
parent 8a6f05da9c
commit 2d28cf096d
4 changed files with 70 additions and 3 deletions

3
.gitignore vendored
View File

@@ -2,4 +2,5 @@
__pycache__
debug.py
videos
config.test.json
config.test.json
example.json

View File

@@ -1,5 +1,6 @@
from pathlib import Path
import os
from enum import IntEnum
DEFAULT_CONFIG_PATH = (
Path(__file__).parent / "config.json"
@@ -8,3 +9,9 @@ DEFAULT_CONFIG_PATH = (
)
FFMPEG_COMMAND = "ffmpeg"
class MediaType(IntEnum):
VIDEO = 2
AUDIO = 12
VIDEO_COLLECTION = 21

47
nfo.py Normal file
View File

@@ -0,0 +1,47 @@
from dataclasses import dataclass
import datetime
from pathlib import Path
@dataclass
class Actor:
name: str
def to_xml(self) -> str:
return f"""
<actor>
<name>{self.name}</name>
</actor>
""".strip(
"\n"
)
@dataclass
class EpisodeInfo:
title: str
plot: str
actor: list[Actor]
bvid: str
aired: datetime.datetime
def write_nfo(self, path: Path) -> None:
with path.open("w", encoding="utf-8") as f:
f.write(self.to_xml())
def to_xml(self) -> str:
actor = "\n".join(_.to_xml() for _ in self.actor)
return f"""
<?xml version="1.0" encoding="utf-8" standalone="yes"?>
<episodedetails>
<plot><![CDATA[{self.plot}]]></plot>
<outline />
<title>{self.title}</title>
{actor}
<year>{self.aired.year}</year>
<uniqueid type="bilibili">{self.bvid}</uniqueid>
<aired>{self.aired.strftime("%Y-%m-%d")}</aired>
</episodedetails>
""".strip(
"\n"
)

View File

@@ -1,4 +1,5 @@
from constants import FFMPEG_COMMAND
from constants import FFMPEG_COMMAND, MediaType
from nfo import Actor, EpisodeInfo
from settings import settings
from credential import credential
from bilibili_api import favorite_list, video, HEADERS
@@ -57,7 +58,7 @@ async def process_favorite(favorite_id: int) -> None:
favorite_id, credential=credential
)
logger.info("start to process favorite {}", favorite_video_list["info"]["title"])
medias = favorite_video_list["medias"][:12]
medias = favorite_video_list["medias"][:4]
tasks = [process_video(save_path, media) for media in medias]
video_result = await asyncio.gather(*tasks, return_exceptions=True)
for idx, result in enumerate(video_result):
@@ -67,11 +68,22 @@ async def process_favorite(favorite_id: int) -> None:
async def process_video(save_path: Path, media: dict) -> None:
title = media["title"]
if media["type"] != MediaType.VIDEO:
logger.warning("Media {} is not a video, skipped.", title)
return
logger.info("start to process video {}", title)
final_path = save_path / f"{title}.mp4"
if final_path.exists():
logger.info(f"{final_path} already exists, skipped.")
return
nfo_path = save_path / f"{title}.nfo"
EpisodeInfo(
title=title,
plot=media["intro"],
actor=[Actor(f"{media['upper']['mid']} - {media['upper']['name']}")],
bvid=media["bvid"],
aired=datetime.datetime.fromtimestamp(media["ctime"]),
).write_nfo(nfo_path)
v = video.Video(media["bvid"], credential=credential)
detector = video.VideoDownloadURLDataDetecter(
await v.get_download_url(page_index=0)