mirror of
https://github.com/amtoaer/bili-sync.git
synced 2026-05-07 16:53:40 +08:00
Compare commits
19 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
46d1810e7c | ||
|
|
89e2567fef | ||
|
|
38caf1f0d6 | ||
|
|
6877171f4d | ||
|
|
29d06a040b | ||
|
|
ceec5d6780 | ||
|
|
650498d4a1 | ||
|
|
96ff84391d | ||
|
|
44e8a2c97d | ||
|
|
c3bfb3c2e5 | ||
|
|
ec91cbf3ed | ||
|
|
f174a3b898 | ||
|
|
c8fca7fcca | ||
|
|
6ef25d6409 | ||
|
|
f10fc9dd97 | ||
|
|
d21f14d851 | ||
|
|
012b3f9f31 | ||
|
|
bbde9d6ba6 | ||
|
|
e040ab2d75 |
29
.github/workflows/docker-image-debug.yml
vendored
Normal file
29
.github/workflows/docker-image-debug.yml
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
name: Docker Image CI (DEBUG)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
-
|
||||
name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
-
|
||||
name: Build and push images
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKERHUB_USERNAME }}/bili-sync:debug
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -4,7 +4,7 @@ debug.py
|
||||
videos
|
||||
config.test.json
|
||||
database.test.db*
|
||||
example.json
|
||||
example*.json
|
||||
thumbs.test
|
||||
config
|
||||
data
|
||||
|
||||
36
README.md
36
README.md
@@ -1,4 +1,6 @@
|
||||
# bili-sync
|
||||

|
||||
|
||||
## 简介
|
||||
|
||||
为 NAS 用户编写的 BILIBILI 收藏夹同步工具,可方便导入 EMBY 等媒体库工具浏览。
|
||||
|
||||
@@ -37,15 +39,6 @@ class Config(DataClassJsonMixin):
|
||||
|
||||
即:我们可以通过运行一次程序,等程序写入初始配置并提示配置错误终止后编辑 `config.json` 文件,编辑后即可重新运行。
|
||||
|
||||
## 关于 UP 头像
|
||||
|
||||
目前开放全局的环境变量 `THUMB_PATH` 作为 up 主头像的存储位置。
|
||||
|
||||
在下载某条视频时,如果 UP 的头像还不存在,就会将 UP 的头像下载至 `THUMB_PATH`,同时在视频的 NFO 文件中写入 UP 头像的绝对路径。
|
||||
|
||||
但实际测试下来,EMBY 似乎无法正常读取 NFO 文件中的本地头像路径,待找到处理办法后再修复。
|
||||
|
||||
> 虽然但是,一个基本的逻辑是,如果期望 `bili-sync` 在 NFO 中写入的头像绝对路径能够被 EMBY 读取到,那么两个容器中头像的绝对路径必须完全相同。因此虽然头像还没办法正常加载,但为后续考虑,还是推荐将 THUMB_PATH 填写上,并确保该路径在 `bili-sync` 和 `emby` 两个容器中指向的是相同的文件夹(也就是把一个文件夹同时挂载到 `bili-sync` 和 `emby` 的 THUMB_PATH 下)。
|
||||
|
||||
## Docker 运行示例
|
||||
|
||||
@@ -60,17 +53,16 @@ services:
|
||||
- /home/amtoaer/Videos/Bilibilis/:/Videos/Bilibilis/ # 视频文件
|
||||
- /home/amtoaer/.config/nas/bili-sync/config/:/app/config/ # 配置文件
|
||||
- /home/amtoaer/.config/nas/bili-sync/data/:/app/data/ # 数据库
|
||||
# 注:如需在 emby 内查看 up 主头像,需要将 emby 的 metadata/people/ 配置目录挂载至容器的 /app/thumb/
|
||||
- /home/amtoaer/.config/nas/emby/metadata/people/:/app/thumb/
|
||||
environment:
|
||||
- THUMB_PATH=/Videos/Bilibilis/thumb/ # 将头像放到视频文件的 thumb 文件夹下
|
||||
- TZ=Asia/Shanghai
|
||||
restart: always
|
||||
network_mode: bridge
|
||||
hostname: bili-sync
|
||||
container_name: bili-sync
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "30m"
|
||||
|
||||
driver: "local"
|
||||
```
|
||||
|
||||
对应的配置文件:
|
||||
@@ -92,9 +84,19 @@ services:
|
||||
}
|
||||
```
|
||||
|
||||
## 目前的问题
|
||||
## 支持的额外命令
|
||||
|
||||
- [ ] 研究一下 NFO,看看怎么正常读取本地的演员头像
|
||||
为满足需要,该应用包含几个单独的命令,可在程序目录下使用 `python entry.py ${command name}` 运行。
|
||||
|
||||
1. `once`
|
||||
|
||||
处理收藏夹,和一般定时任务触发时执行的操作完全相同,但仅运行一次。
|
||||
2. `recheck`
|
||||
|
||||
将本地不存在的视频文件标记成未下载,下次定时任务触发时将一并下载。
|
||||
3. `upper_thumb`
|
||||
|
||||
手动触发全量下载 up 主头像,为使用老版本时下载的没有 up 头像的视频添加头像。
|
||||
|
||||
## 路线图
|
||||
|
||||
|
||||
60
commands.py
60
commands.py
@@ -1,10 +1,11 @@
|
||||
import asyncio
|
||||
|
||||
from aiofiles.os import path
|
||||
from loguru import logger
|
||||
|
||||
from constants import MediaStatus, MediaType
|
||||
from models import FavoriteItem
|
||||
from models import FavoriteItem, Upper
|
||||
from processor import download_content, process_video
|
||||
from utils import aexists, amakedirs, aremove
|
||||
|
||||
|
||||
async def recheck():
|
||||
@@ -14,9 +15,7 @@ async def recheck():
|
||||
status=MediaStatus.NORMAL,
|
||||
downloaded=True,
|
||||
)
|
||||
exists = await asyncio.gather(
|
||||
*[path.exists(item.video_path) for item in items]
|
||||
)
|
||||
exists = await asyncio.gather(*[aexists(item.video_path) for item in items])
|
||||
for item, exist in zip(items, exists):
|
||||
if isinstance(exist, Exception):
|
||||
logger.error(
|
||||
@@ -36,3 +35,54 @@ async def recheck():
|
||||
logger.info("Updating database...")
|
||||
await FavoriteItem.bulk_update(items, fields=["downloaded"])
|
||||
logger.info("Database updated.")
|
||||
|
||||
|
||||
async def upper_thumb():
|
||||
"""将up主的头像批量写入数据库,从不支持up主头像的版本升级上来后需要手动调用一次"""
|
||||
makedir_tasks = []
|
||||
other_tasks = []
|
||||
for upper in await Upper.all():
|
||||
if all(
|
||||
await asyncio.gather(
|
||||
aexists(upper.thumb_path), aexists(upper.meta_path)
|
||||
)
|
||||
):
|
||||
logger.info(
|
||||
"Upper {} {} already exists, skipped.", upper.mid, upper.name
|
||||
)
|
||||
makedir_tasks.append(amakedirs(upper.thumb_path.parent, exist_ok=True))
|
||||
logger.info("Saving metadata for upper {} {}...", upper.mid, upper.name)
|
||||
other_tasks.extend(
|
||||
[
|
||||
upper.save_metadata(),
|
||||
download_content(upper.thumb, upper.thumb_path),
|
||||
]
|
||||
)
|
||||
await asyncio.gather(*makedir_tasks)
|
||||
await asyncio.gather(*other_tasks)
|
||||
logger.info("All done.")
|
||||
|
||||
|
||||
async def refresh_tags():
|
||||
"""刷新已存在的视频的标签,从不支持标签的版本升级上来后需要手动调用一次"""
|
||||
items = await FavoriteItem.filter(
|
||||
downloaded=True,
|
||||
tags=None,
|
||||
).prefetch_related("upper")
|
||||
await asyncio.gather(
|
||||
*[aremove(item.nfo_path) for item in items],
|
||||
return_exceptions=True,
|
||||
)
|
||||
await asyncio.gather(
|
||||
*[
|
||||
process_video(
|
||||
item,
|
||||
process_poster=False,
|
||||
process_video=False,
|
||||
process_nfo=True,
|
||||
process_upper=False,
|
||||
)
|
||||
for item in items
|
||||
],
|
||||
return_exceptions=True,
|
||||
)
|
||||
|
||||
22
entry.py
22
entry.py
@@ -4,7 +4,7 @@ import sys
|
||||
import uvloop
|
||||
from loguru import logger
|
||||
|
||||
from commands import recheck
|
||||
from commands import recheck, refresh_tags, upper_thumb
|
||||
from models import init_model
|
||||
from processor import cleanup, process
|
||||
from settings import settings
|
||||
@@ -14,16 +14,16 @@ asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
|
||||
|
||||
async def entry() -> None:
|
||||
await init_model()
|
||||
if any("once" in _ for _ in sys.argv):
|
||||
# 单次运行
|
||||
logger.info("Running once...")
|
||||
await process()
|
||||
return
|
||||
if any("recheck" in _ for _ in sys.argv):
|
||||
# 重新检查
|
||||
logger.info("Rechecking...")
|
||||
await recheck()
|
||||
return
|
||||
for command, func in [
|
||||
("once", process),
|
||||
("recheck", recheck),
|
||||
("upper_thumb", upper_thumb),
|
||||
("refresh_tags", refresh_tags),
|
||||
]:
|
||||
if any(command in _ for _ in sys.argv):
|
||||
logger.info("Running {}...", command)
|
||||
await func()
|
||||
return
|
||||
logger.info("Running daemon...")
|
||||
while True:
|
||||
await process()
|
||||
|
||||
11
migrations/models/2_20231204003326_update.py
Normal file
11
migrations/models/2_20231204003326_update.py
Normal file
@@ -0,0 +1,11 @@
|
||||
from tortoise import BaseDBAsyncClient
|
||||
|
||||
|
||||
async def upgrade(db: BaseDBAsyncClient) -> str:
|
||||
return """
|
||||
ALTER TABLE "favoriteitem" ADD "tags" JSON;"""
|
||||
|
||||
|
||||
async def downgrade(db: BaseDBAsyncClient) -> str:
|
||||
return """
|
||||
ALTER TABLE "favoriteitem" DROP COLUMN "tags";"""
|
||||
28
models.py
28
models.py
@@ -13,6 +13,7 @@ from constants import (
|
||||
MediaType,
|
||||
)
|
||||
from settings import settings
|
||||
from utils import aopen
|
||||
|
||||
|
||||
class FavoriteList(Model):
|
||||
@@ -39,7 +40,31 @@ class Upper(Model):
|
||||
|
||||
@property
|
||||
def thumb_path(self) -> Path:
|
||||
return DEFAULT_THUMB_PATH / f"{self.mid}.jpg"
|
||||
return (
|
||||
DEFAULT_THUMB_PATH / str(self.mid)[0] / f"{self.mid}" / "folder.jpg"
|
||||
)
|
||||
|
||||
@property
|
||||
def meta_path(self) -> Path:
|
||||
return (
|
||||
DEFAULT_THUMB_PATH / str(self.mid)[0] / f"{self.mid}" / "person.nfo"
|
||||
)
|
||||
|
||||
async def save_metadata(self):
|
||||
async with aopen(self.meta_path, "w") as f:
|
||||
await f.write(
|
||||
f"""
|
||||
<?xml version="1.0" encoding="utf-8" standalone="yes"?>
|
||||
<person>
|
||||
<plot />
|
||||
<outline />
|
||||
<lockdata>false</lockdata>
|
||||
<dateadded>{self.created_at.strftime("%Y-%m-%d %H:%M:%S")}</dateadded>
|
||||
<title>{self.mid}</title>
|
||||
<sorttitle>{self.mid}</sorttitle>
|
||||
</person>
|
||||
""".strip()
|
||||
)
|
||||
|
||||
|
||||
class FavoriteItem(Model):
|
||||
@@ -54,6 +79,7 @@ class FavoriteItem(Model):
|
||||
bvid = fields.CharField(max_length=255)
|
||||
desc = fields.TextField()
|
||||
cover = fields.TextField()
|
||||
tags = fields.JSONField(null=True)
|
||||
favorite_list = fields.ForeignKeyField(
|
||||
"models.FavoriteList", related_name="items"
|
||||
)
|
||||
|
||||
17
nfo.py
17
nfo.py
@@ -2,19 +2,19 @@ import datetime
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
|
||||
from utils import aopen
|
||||
|
||||
|
||||
@dataclass
|
||||
class Actor:
|
||||
name: str
|
||||
role: str
|
||||
thumb: Path
|
||||
|
||||
def to_xml(self) -> str:
|
||||
return f"""
|
||||
<actor>
|
||||
<name>{self.name}</name>
|
||||
<role>{self.role}</role>
|
||||
<thumb>{self.thumb.resolve()}</thumb>
|
||||
</actor>
|
||||
""".strip(
|
||||
"\n"
|
||||
@@ -25,16 +25,22 @@ class Actor:
|
||||
class EpisodeInfo:
|
||||
title: str
|
||||
plot: str
|
||||
tags: list[str]
|
||||
actor: list[Actor]
|
||||
bvid: str
|
||||
aired: datetime.datetime
|
||||
|
||||
def write_nfo(self, path: Path) -> None:
|
||||
with path.open("w", encoding="utf-8") as f:
|
||||
f.write(self.to_xml())
|
||||
async def write_nfo(self, path: Path) -> None:
|
||||
async with aopen(path, "w", encoding="utf-8") as f:
|
||||
await f.write(self.to_xml())
|
||||
|
||||
def to_xml(self) -> str:
|
||||
actor = "\n".join(_.to_xml() for _ in self.actor)
|
||||
tags = (
|
||||
"\n".join(f" <genre>{_}</genre>" for _ in self.tags)
|
||||
if isinstance(self.tags, list)
|
||||
else ""
|
||||
)
|
||||
return f"""
|
||||
<?xml version="1.0" encoding="utf-8" standalone="yes"?>
|
||||
<episodedetails>
|
||||
@@ -43,6 +49,7 @@ class EpisodeInfo:
|
||||
<title>{self.title}</title>
|
||||
{actor}
|
||||
<year>{self.aired.year}</year>
|
||||
{tags}
|
||||
<uniqueid type="bilibili">{self.bvid}</uniqueid>
|
||||
<aired>{self.aired.strftime("%Y-%m-%d")}</aired>
|
||||
</episodedetails>
|
||||
|
||||
238
processor.py
238
processor.py
@@ -2,11 +2,8 @@ import asyncio
|
||||
import datetime
|
||||
from asyncio import Semaphore, create_subprocess_exec
|
||||
from asyncio.subprocess import DEVNULL
|
||||
from pathlib import Path
|
||||
|
||||
import aiofiles
|
||||
import httpx
|
||||
from bilibili_api import HEADERS, favorite_list, video
|
||||
from bilibili_api import favorite_list, video
|
||||
from bilibili_api.exceptions import ResponseCodeException
|
||||
from loguru import logger
|
||||
from tortoise import Tortoise
|
||||
@@ -16,8 +13,9 @@ from credential import credential
|
||||
from models import FavoriteItem, FavoriteList, Upper
|
||||
from nfo import Actor, EpisodeInfo
|
||||
from settings import settings
|
||||
from utils import aexists, amakedirs, client, download_content
|
||||
|
||||
client = httpx.AsyncClient(headers=HEADERS)
|
||||
anchor = datetime.date.today()
|
||||
|
||||
|
||||
async def cleanup() -> None:
|
||||
@@ -38,16 +36,6 @@ def concurrent_decorator(concurrency: int) -> callable:
|
||||
return decorator
|
||||
|
||||
|
||||
async def download_content(url: str, path: Path) -> None:
|
||||
async with client.stream("GET", url) as resp, aiofiles.open(
|
||||
path, "wb"
|
||||
) as f:
|
||||
async for chunk in resp.aiter_bytes(40960):
|
||||
if not chunk:
|
||||
return
|
||||
await f.write(chunk)
|
||||
|
||||
|
||||
async def manage_model(medias: list[dict], fav_list: FavoriteList) -> None:
|
||||
uppers = [
|
||||
Upper(
|
||||
@@ -93,16 +81,16 @@ async def manage_model(medias: list[dict], fav_list: FavoriteList) -> None:
|
||||
|
||||
async def process() -> None:
|
||||
global anchor
|
||||
if not await credential.check_valid():
|
||||
logger.error("Credential is invalid, skipped.")
|
||||
return
|
||||
if await credential.check_refresh():
|
||||
try:
|
||||
await credential.refresh()
|
||||
logger.info("Credential refreshed.")
|
||||
except Exception:
|
||||
logger.exception("Failed to refresh credential.")
|
||||
return
|
||||
if (today := datetime.date.today()) > anchor:
|
||||
anchor = today
|
||||
logger.info("Check credential.")
|
||||
if await credential.check_refresh():
|
||||
try:
|
||||
await credential.refresh()
|
||||
logger.info("Credential refreshed.")
|
||||
except Exception:
|
||||
logger.exception("Failed to refresh credential.")
|
||||
return
|
||||
for favorite_id in settings.favorite_ids:
|
||||
if favorite_id not in settings.path_mapper:
|
||||
logger.warning(
|
||||
@@ -168,82 +156,141 @@ async def process_favorite(favorite_id: int) -> None:
|
||||
|
||||
|
||||
@concurrent_decorator(4)
|
||||
async def process_video(fav_item: FavoriteItem) -> None:
|
||||
async def process_video(
|
||||
fav_item: FavoriteItem,
|
||||
process_poster=True,
|
||||
process_video=True,
|
||||
process_nfo=True,
|
||||
process_upper=True,
|
||||
) -> None:
|
||||
logger.info("Start to process video {} {}", fav_item.bvid, fav_item.name)
|
||||
if fav_item.type != MediaType.VIDEO:
|
||||
logger.warning("Media {} is not a video, skipped.", fav_item.name)
|
||||
return
|
||||
v = video.Video(fav_item.bvid, credential=credential)
|
||||
try:
|
||||
if fav_item.video_path.exists():
|
||||
fav_item.downloaded = True
|
||||
await fav_item.save()
|
||||
logger.info(
|
||||
"{} {} already exists, skipped.", fav_item.bvid, fav_item.name
|
||||
)
|
||||
return
|
||||
# 写入 up 主头像
|
||||
if not fav_item.upper.thumb_path.exists():
|
||||
await download_content(
|
||||
fav_item.upper.thumb, fav_item.upper.thumb_path
|
||||
)
|
||||
# 写入 nfo
|
||||
EpisodeInfo(
|
||||
title=fav_item.name,
|
||||
plot=fav_item.desc,
|
||||
actor=[
|
||||
Actor(
|
||||
name=fav_item.upper.mid,
|
||||
role=fav_item.upper.name,
|
||||
thumb=fav_item.upper.thumb_path,
|
||||
if process_upper:
|
||||
# 写入 up 主头像
|
||||
if not all(
|
||||
await asyncio.gather(
|
||||
aexists(fav_item.upper.thumb_path),
|
||||
aexists(fav_item.upper.meta_path),
|
||||
)
|
||||
],
|
||||
bvid=fav_item.bvid,
|
||||
aired=fav_item.ctime,
|
||||
).write_nfo(fav_item.nfo_path)
|
||||
# 写入 poster
|
||||
await download_content(fav_item.cover, fav_item.poster_path)
|
||||
# 开始处理视频内容
|
||||
v = video.Video(fav_item.bvid, credential=credential)
|
||||
detector = video.VideoDownloadURLDataDetecter(
|
||||
await v.get_download_url(page_index=0)
|
||||
)
|
||||
streams = detector.detect_best_streams()
|
||||
if detector.check_flv_stream():
|
||||
await download_content(streams[0].url, fav_item.tmp_video_path)
|
||||
process = await create_subprocess_exec(
|
||||
FFMPEG_COMMAND,
|
||||
"-i",
|
||||
str(fav_item.tmp_video_path),
|
||||
str(fav_item.video_path),
|
||||
stdout=DEVNULL,
|
||||
stderr=DEVNULL,
|
||||
)
|
||||
await process.communicate()
|
||||
fav_item.tmp_video_path.unlink()
|
||||
else:
|
||||
await asyncio.gather(
|
||||
download_content(streams[0].url, fav_item.tmp_video_path),
|
||||
download_content(streams[1].url, fav_item.tmp_audio_path),
|
||||
)
|
||||
process = await create_subprocess_exec(
|
||||
FFMPEG_COMMAND,
|
||||
"-i",
|
||||
str(fav_item.tmp_video_path),
|
||||
"-i",
|
||||
str(fav_item.tmp_audio_path),
|
||||
"-c",
|
||||
"copy",
|
||||
str(fav_item.video_path),
|
||||
stdout=DEVNULL,
|
||||
stderr=DEVNULL,
|
||||
)
|
||||
await process.communicate()
|
||||
fav_item.tmp_video_path.unlink()
|
||||
fav_item.tmp_audio_path.unlink()
|
||||
fav_item.downloaded = True
|
||||
await fav_item.save()
|
||||
):
|
||||
await amakedirs(fav_item.upper.thumb_path.parent, exist_ok=True)
|
||||
await asyncio.gather(
|
||||
fav_item.upper.save_metadata(),
|
||||
download_content(
|
||||
fav_item.upper.thumb, fav_item.upper.thumb_path
|
||||
),
|
||||
return_exceptions=True,
|
||||
)
|
||||
else:
|
||||
logger.info(
|
||||
"Upper {} {} already exists, skipped.",
|
||||
fav_item.upper.mid,
|
||||
fav_item.upper.name,
|
||||
)
|
||||
if process_nfo:
|
||||
if not await aexists(fav_item.nfo_path):
|
||||
if fav_item.tags is None:
|
||||
try:
|
||||
fav_item.tags = [
|
||||
_["tag_name"] for _ in await v.get_tags()
|
||||
]
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to get tags of video {} {}",
|
||||
fav_item.bvid,
|
||||
fav_item.name,
|
||||
)
|
||||
# 写入 nfo
|
||||
await EpisodeInfo(
|
||||
title=fav_item.name,
|
||||
plot=fav_item.desc,
|
||||
actor=[
|
||||
Actor(
|
||||
name=fav_item.upper.mid,
|
||||
role=fav_item.upper.name,
|
||||
)
|
||||
],
|
||||
tags=fav_item.tags,
|
||||
bvid=fav_item.bvid,
|
||||
aired=fav_item.ctime,
|
||||
).write_nfo(fav_item.nfo_path)
|
||||
else:
|
||||
logger.info(
|
||||
"NFO of {} {} already exists, skipped.",
|
||||
fav_item.bvid,
|
||||
fav_item.name,
|
||||
)
|
||||
if process_poster:
|
||||
# 写入 poster
|
||||
if not await aexists(fav_item.poster_path):
|
||||
await download_content(fav_item.cover, fav_item.poster_path)
|
||||
else:
|
||||
logger.info(
|
||||
"Poster of {} {} already exists, skipped.",
|
||||
fav_item.bvid,
|
||||
fav_item.name,
|
||||
)
|
||||
if process_video:
|
||||
if await aexists(fav_item.video_path):
|
||||
fav_item.downloaded = True
|
||||
logger.info(
|
||||
"Video {} {} already exists, skipped.",
|
||||
fav_item.bvid,
|
||||
fav_item.name,
|
||||
)
|
||||
else:
|
||||
# 开始处理视频内容
|
||||
detector = video.VideoDownloadURLDataDetecter(
|
||||
await v.get_download_url(page_index=0)
|
||||
)
|
||||
streams = detector.detect_best_streams()
|
||||
if detector.check_flv_stream():
|
||||
await download_content(
|
||||
streams[0].url, fav_item.tmp_video_path
|
||||
)
|
||||
process = await create_subprocess_exec(
|
||||
FFMPEG_COMMAND,
|
||||
"-i",
|
||||
str(fav_item.tmp_video_path),
|
||||
str(fav_item.video_path),
|
||||
stdout=DEVNULL,
|
||||
stderr=DEVNULL,
|
||||
)
|
||||
await process.communicate()
|
||||
fav_item.tmp_video_path.unlink()
|
||||
else:
|
||||
await asyncio.gather(
|
||||
download_content(
|
||||
streams[0].url, fav_item.tmp_video_path
|
||||
),
|
||||
download_content(
|
||||
streams[1].url, fav_item.tmp_audio_path
|
||||
),
|
||||
)
|
||||
process = await create_subprocess_exec(
|
||||
FFMPEG_COMMAND,
|
||||
"-i",
|
||||
str(fav_item.tmp_video_path),
|
||||
"-i",
|
||||
str(fav_item.tmp_audio_path),
|
||||
"-c",
|
||||
"copy",
|
||||
str(fav_item.video_path),
|
||||
stdout=DEVNULL,
|
||||
stderr=DEVNULL,
|
||||
)
|
||||
await process.communicate()
|
||||
fav_item.tmp_video_path.unlink()
|
||||
fav_item.tmp_audio_path.unlink()
|
||||
fav_item.downloaded = True
|
||||
logger.info(
|
||||
"{} {} processed successfully.", fav_item.bvid, fav_item.name
|
||||
"{} {} processed successfully.",
|
||||
fav_item.bvid,
|
||||
fav_item.name,
|
||||
)
|
||||
except ResponseCodeException as e:
|
||||
match e.code:
|
||||
@@ -259,7 +306,6 @@ async def process_video(fav_item: FavoriteItem) -> None:
|
||||
e.code,
|
||||
)
|
||||
return
|
||||
await fav_item.save()
|
||||
logger.error(
|
||||
"Video {} {} is not available, marked as {}",
|
||||
fav_item.bvid,
|
||||
@@ -270,3 +316,5 @@ async def process_video(fav_item: FavoriteItem) -> None:
|
||||
logger.exception(
|
||||
"Failed to process video {} {}", fav_item.bvid, fav_item.name
|
||||
)
|
||||
finally:
|
||||
await fav_item.save()
|
||||
|
||||
37
utils.py
Normal file
37
utils.py
Normal file
@@ -0,0 +1,37 @@
|
||||
from pathlib import Path
|
||||
|
||||
import aiofiles
|
||||
import httpx
|
||||
from aiofiles.base import AiofilesContextManager
|
||||
from aiofiles.os import makedirs, remove
|
||||
from aiofiles.ospath import exists
|
||||
from aiofiles.threadpool.text import AsyncTextIOWrapper
|
||||
from bilibili_api import HEADERS
|
||||
|
||||
client = httpx.AsyncClient(headers=HEADERS)
|
||||
|
||||
|
||||
async def download_content(url: str, path: Path) -> None:
|
||||
async with client.stream("GET", url) as resp, aopen(path, "wb") as f:
|
||||
async for chunk in resp.aiter_bytes(40960):
|
||||
if not chunk:
|
||||
return
|
||||
await f.write(chunk)
|
||||
|
||||
|
||||
async def aexists(path: Path) -> bool:
|
||||
return await exists(path)
|
||||
|
||||
|
||||
async def amakedirs(path: Path, exist_ok=False) -> None:
|
||||
await makedirs(path, exist_ok=exist_ok)
|
||||
|
||||
|
||||
def aopen(
|
||||
path: Path, mode: str = "r", **kwargs
|
||||
) -> AiofilesContextManager[None, None, AsyncTextIOWrapper]:
|
||||
return aiofiles.open(path, mode, **kwargs)
|
||||
|
||||
|
||||
async def aremove(path: Path) -> None:
|
||||
await remove(path)
|
||||
Reference in New Issue
Block a user