Compare commits

...

44 Commits

Author SHA1 Message Date
jxxghp
c8d0c14ebc 更新 plex.py 2025-08-11 13:57:03 +08:00
jxxghp
6ac8455c74 fix 2025-08-11 13:30:15 +08:00
jxxghp
143b21631f Merge pull request #4737 from baozaodetudou/nginx 2025-08-11 13:27:23 +08:00
doumao
d760facad8 nginx cache js bug 2025-08-11 13:13:29 +08:00
jxxghp
3a1a4c5cfe 更新 download.py 2025-08-10 22:15:30 +08:00
jxxghp
c3045e2cd4 更新 mtorrent.py 2025-08-10 22:10:11 +08:00
jxxghp
1efb9af7ab 更新 nginx.common.conf 2025-08-10 21:32:53 +08:00
jxxghp
e03471159a 更新 version.py 2025-08-10 18:45:40 +08:00
jxxghp
a92e493742 fix README 2025-08-10 14:01:26 +08:00
jxxghp
225d413ed1 fix README 2025-08-10 13:52:35 +08:00
jxxghp
184e4ba7d5 fix 插件Release安装逻辑 2025-08-10 13:26:22 +08:00
jxxghp
917cae27b1 更新插件release安装逻辑 2025-08-10 13:06:03 +08:00
jxxghp
60e0463051 fix 2025-08-10 12:53:42 +08:00
jxxghp
c15022c7d5 fix:插件通过release安装 2025-08-10 12:45:38 +08:00
jxxghp
2a84e3a606 feat: 插件异步安装 2025-08-10 10:10:30 +08:00
jxxghp
fddbbd5714 feat:插件通过release安装 2025-08-10 10:00:13 +08:00
jxxghp
51b8f7c713 fix #4721 2025-08-10 09:11:44 +08:00
jxxghp
e97c246741 try fix #4716 2025-08-10 09:04:20 +08:00
jxxghp
9a81f55ac0 fix #4510 2025-08-10 08:51:52 +08:00
jxxghp
a38b702acc fix alist 2025-08-10 08:46:29 +08:00
jxxghp
e4e0605e92 更新 metavideo.py 2025-08-08 10:19:21 +08:00
jxxghp
8875a8f12c 更新 nginx.common.conf 2025-08-07 11:42:52 +08:00
jxxghp
4dd1deefa5 Merge pull request #4709 from wikrin/v2 2025-08-07 06:54:24 +08:00
Attente
1f6dc93ea3 fix(transfer): 修复目录监控下意外删除未完成种子的问题
- 如果种子尚未下载完成,则直接返回 False
2025-08-06 23:13:01 +08:00
jxxghp
426e920fff fix log 2025-08-06 16:54:24 +08:00
jxxghp
1f6bbce326 fix:优化重试识别次数限制 2025-08-06 16:48:37 +08:00
jxxghp
41f89a35fa 切换v2 release为最新 2025-08-06 16:36:29 +08:00
jxxghp
099d7874d7 - 修复日志滚动问题 2025-08-06 16:32:54 +08:00
jxxghp
e2367103a1 - 修复日志滚动问题 2025-08-06 16:29:51 +08:00
jxxghp
37f8ba7d72 fix #4705 2025-08-06 16:24:47 +08:00
jxxghp
c20bd84edd fix plex error 2025-08-06 12:21:02 +08:00
jxxghp
b4ee0d2487 Merge remote-tracking branch 'origin/v2' into v2 2025-08-05 20:14:06 +08:00
jxxghp
420fa7645f mask key 2025-08-05 20:14:00 +08:00
jxxghp
5bb1e72760 Update README.md 2025-08-05 19:37:51 +08:00
jxxghp
e2a007b62a Update README.md 2025-08-05 19:37:33 +08:00
jxxghp
210813367f fix #4694 2025-08-04 20:50:06 +08:00
jxxghp
770a50764e 更新 transferhistory.py 2025-08-04 19:39:51 +08:00
jxxghp
e339a22aa4 更新 version.py 2025-08-04 19:04:32 +08:00
jxxghp
913afed378 fix #4700 2025-08-04 12:19:24 +08:00
jxxghp
db3efb4452 fix SiteStatistic 2025-08-04 08:34:31 +08:00
jxxghp
840351acb7 fix Subscribe api 2025-08-04 07:05:23 +08:00
jxxghp
da76a7f299 Merge pull request #4693 from wumode/fix_4691 2025-08-03 15:40:19 +08:00
wumode
cbd999f88d Update app/modules/qbittorrent/qbittorrent.py
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2025-08-03 14:12:57 +08:00
wumode
2fa8a266c5 fix:#4691 2025-08-03 13:56:58 +08:00
27 changed files with 589 additions and 210 deletions

View File

@@ -92,6 +92,6 @@ jobs:
body: ${{ env.RELEASE_BODY }}
draft: false
prerelease: false
make_latest: false
make_latest: true
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -18,17 +18,19 @@
## 主要特性
- 前后端分离基于FastApi + Vue3,前端项目地址:[MoviePilot-Frontend](https://github.com/jxxghp/MoviePilot-Frontend)APIhttp://localhost:3001/docs
- 前后端分离基于FastApi + Vue3
- 聚焦核心需求,简化功能和设置,部分设置项可直接使用默认值。
- 重新设计了用户界面,更加美观易用。
## 安装使用
访问官方Wikihttps://wiki.movie-pilot.org
官方Wikihttps://wiki.movie-pilot.org
## 参与开发
需要 `Python 3.12``Node JS v20.12.1`
API文档https://api.movie-pilot.org
本地运行需要 `Python 3.12``Node JS v20.12.1`
- 克隆主项目 [MoviePilot](https://github.com/jxxghp/MoviePilot)
```shell
@@ -54,6 +56,20 @@ yarn dev
```
- 参考 [插件开发指引](https://wiki.movie-pilot.org/zh/plugindev) 在 `app/plugins` 目录下开发插件代码
## 相关项目
- [MoviePilot-Frontend](https://github.com/jxxghp/MoviePilot-Frontend)
- [MoviePilot-Resources](https://github.com/jxxghp/MoviePilot-Resources)
- [MoviePilot-Plugins](https://github.com/jxxghp/MoviePilot-Plugins)
- [MoviePilot-Server](https://github.com/jxxghp/MoviePilot-Server)
- [MoviePilot-Wiki](https://github.com/jxxghp/MoviePilot-Wiki)
## 免责申明
- 本软件仅供学习交流使用,任何人不得将本软件用于商业用途,任何人不得将本软件用于违法犯罪活动,软件对用户行为不知情,一切责任由使用者承担。
- 本软件代码开源,基于开源代码进行修改,人为去除相关限制导致软件被分发、传播并造成责任事件的,需由代码修改发布者承担全部责任,不建议对用户认证机制进行规避或修改并公开发布。
- 本项目不接受捐赠,没有在任何地方发布捐赠信息页面,软件本身不收费也不提供任何收费相关服务,请仔细辨别避免误导。
## 贡献者
<a href="https://github.com/jxxghp/MoviePilot/graphs/contributors">

View File

@@ -102,7 +102,8 @@ async def search(title: str,
for index, source in enumerate(setting_order):
sort_order[source] = index
result = sorted(result, key=lambda x: sort_order.get(__get_source(x), 4))
return result[(page - 1) * count:page * count]
return result[(page - 1) * count:page * count]
return []
@router.post("/scrape/{storage}", summary="刮削媒体信息", response_model=schemas.Response)

View File

@@ -66,7 +66,7 @@ async def get_web_message(_: schemas.TokenPayload = Depends(verify_token),
获取WEB消息列表
"""
ret_messages = []
messages = Message.async_list_by_page(db, page=page, count=count)
messages = await Message.async_list_by_page(db, page=page, count=count)
for message in messages:
try:
ret_messages.append(message.to_dict())

View File

@@ -5,6 +5,7 @@ from typing import Annotated, Any, List, Optional
import aiofiles
from aiopath import AsyncPath
from fastapi import APIRouter, Depends, Header, HTTPException
from fastapi.concurrency import run_in_threadpool
from starlette import status
from starlette.responses import StreamingResponse
@@ -216,10 +217,10 @@ def reload_plugin(plugin_id: str, _: User = Depends(get_current_active_superuser
@router.get("/install/{plugin_id}", summary="安装插件", response_model=schemas.Response)
def install(plugin_id: str,
repo_url: Optional[str] = "",
force: Optional[bool] = False,
_: User = Depends(get_current_active_superuser)) -> Any:
async def install(plugin_id: str,
repo_url: Optional[str] = "",
force: Optional[bool] = False,
_: User = Depends(get_current_active_superuser_async)) -> Any:
"""
安装插件
"""
@@ -228,11 +229,11 @@ def install(plugin_id: str,
# 首先检查插件是否已经存在,并且是否强制安装,否则只进行安装统计
plugin_helper = PluginHelper()
if not force and plugin_id in PluginManager().get_plugin_ids():
plugin_helper.install_reg(pid=plugin_id)
await plugin_helper.async_install_reg(pid=plugin_id)
else:
# 插件不存在或需要强制安装,下载安装并注册插件
if repo_url:
state, msg = plugin_helper.install(pid=plugin_id, repo_url=repo_url)
state, msg = await plugin_helper.async_install(pid=plugin_id, repo_url=repo_url)
# 安装失败则直接响应
if not state:
return schemas.Response(success=False, message=msg)
@@ -243,14 +244,14 @@ def install(plugin_id: str,
if plugin_id not in install_plugins:
install_plugins.append(plugin_id)
# 保存设置
SystemConfigOper().set(SystemConfigKey.UserInstalledPlugins, install_plugins)
await SystemConfigOper().async_set(SystemConfigKey.UserInstalledPlugins, install_plugins)
# 重新加载插件
reload_plugin(plugin_id)
await run_in_threadpool(reload_plugin, plugin_id)
return schemas.Response(success=True)
@router.get("/remotes", summary="获取插件联邦组件列表", response_model=List[dict])
def remotes(token: str) -> Any:
async def remotes(token: str) -> Any:
"""
获取插件联邦组件列表
"""

View File

@@ -79,9 +79,9 @@ async def create_subscribe(
# 订阅用户
subscribe_in.username = current_user.name
sid, message = await SubscribeChain().async_add(mtype=mtype,
title=title,
exist_ok=True,
**subscribe_in.dict())
title=title,
exist_ok=True,
**subscribe_in.dict())
return schemas.Response(
success=bool(sid), message=message, data={"id": sid}
)
@@ -117,12 +117,13 @@ async def update_subscribe(
subscribe_dict["manual_total_episode"] = 1
# 更新到数据库
await subscribe.async_update(db, subscribe_dict)
# 重新获取更新后的订阅数据
updated_subscribe = await Subscribe.async_get(db, subscribe_in.id)
# 发送订阅调整事件
subscribe = await subscribe.async_get(db, subscribe_in.id)
await eventmanager.async_send_event(EventType.SubscribeModified, {
"subscribe_id": subscribe_in.id,
"old_subscribe_info": old_subscribe_dict,
"subscribe_info": subscribe.to_dict(),
"subscribe_info": updated_subscribe.to_dict() if updated_subscribe else {},
})
return schemas.Response(success=True)
@@ -146,11 +147,13 @@ async def update_subscribe_status(
await subscribe.async_update(db, {
"state": state
})
# 重新获取更新后的订阅数据
updated_subscribe = await Subscribe.async_get(db, subid)
# 发送订阅调整事件
await eventmanager.async_send_event(EventType.SubscribeModified, {
"subscribe_id": subscribe.id,
"subscribe_id": subid,
"old_subscribe_info": old_subscribe_dict,
"subscribe_info": subscribe.to_dict(),
"subscribe_info": updated_subscribe.to_dict() if updated_subscribe else {},
})
return schemas.Response(success=True)
@@ -219,17 +222,21 @@ async def reset_subscribes(
"""
subscribe = await Subscribe.async_get(db, subid)
if subscribe:
# 在更新之前获取旧数据
old_subscribe_dict = subscribe.to_dict()
# 更新订阅
await subscribe.async_update(db, {
"note": [],
"lack_episode": subscribe.total_episode,
"state": "R"
})
# 重新获取更新后的订阅数据
updated_subscribe = await Subscribe.async_get(db, subid)
# 发送订阅调整事件
await eventmanager.async_send_event(EventType.SubscribeModified, {
"subscribe_id": subscribe.id,
"subscribe_id": subid,
"old_subscribe_info": old_subscribe_dict,
"subscribe_info": subscribe.to_dict(),
"subscribe_info": updated_subscribe.to_dict() if updated_subscribe else {},
})
return schemas.Response(success=True)
return schemas.Response(success=False, message="订阅不存在")
@@ -313,11 +320,14 @@ async def delete_subscribe_by_mediaid(
if subscribe:
delete_subscribes.append(subscribe)
for subscribe in delete_subscribes:
await Subscribe.async_delete(db, subscribe.id)
# 在删除之前获取订阅信息
subscribe_info = subscribe.to_dict()
subscribe_id = subscribe.id
await Subscribe.async_delete(db, subscribe_id)
# 发送事件
await eventmanager.async_send_event(EventType.SubscribeDeleted, {
"subscribe_id": subscribe.id,
"subscribe_info": subscribe.to_dict()
"subscribe_id": subscribe_id,
"subscribe_info": subscribe_info
})
return schemas.Response(success=True)
@@ -596,11 +606,13 @@ async def delete_subscribe(
"""
subscribe = await Subscribe.async_get(db, subscribe_id)
if subscribe:
# 在删除之前获取订阅信息
subscribe_info = subscribe.to_dict()
await Subscribe.async_delete(db, subscribe_id)
# 发送事件
await eventmanager.async_send_event(EventType.SubscribeDeleted, {
"subscribe_id": subscribe_id,
"subscribe_info": subscribe.to_dict()
"subscribe_info": subscribe_info
})
# 统计订阅
SubscribeHelper().sub_done_async({

View File

@@ -188,7 +188,8 @@ def get_global_setting(token: str):
# FIXME: 新增敏感配置项时要在此处添加排除项
info = settings.dict(
exclude={"SECRET_KEY", "RESOURCE_SECRET_KEY", "API_TOKEN", "TMDB_API_KEY", "TVDB_API_KEY", "FANART_API_KEY",
"COOKIECLOUD_KEY", "COOKIECLOUD_PASSWORD", "GITHUB_TOKEN", "REPO_GITHUB_TOKEN"}
"COOKIECLOUD_KEY", "COOKIECLOUD_PASSWORD", "GITHUB_TOKEN", "REPO_GITHUB_TOKEN", "U115_APP_ID",
"ALIPAN_APP_ID", "TVDB_V4_API_KEY", "TVDB_V4_API_PIN"}
)
# 追加用户唯一ID和订阅分享管理权限
share_admin = SubscribeHelper().is_admin_user()

View File

@@ -60,6 +60,8 @@ class DownloadChain(ChainBase):
# 是否使用cookie
if not req_params.get('cookie'):
cookie = None
# 代理
proxy = req_params.get('proxy')
# 请求头
if req_params.get('header'):
headers = req_params.get('header')
@@ -70,14 +72,16 @@ class DownloadChain(ChainBase):
res = RequestUtils(
ua=ua,
cookies=cookie,
headers=headers
headers=headers,
proxies=settings.PROXY if proxy else None
).get_res(url, params=req_params.get('params'))
else:
# POST请求
res = RequestUtils(
ua=ua,
cookies=cookie,
headers=headers
headers=headers,
proxies=settings.PROXY if proxy else None
).post_res(url, params=req_params.get('params'))
if not res:
return None

View File

@@ -801,15 +801,27 @@ class SubscribeChain(ChainBase):
for context in contexts:
if global_vars.is_system_stopped:
break
# 如果种子未识别,尝试识别
if not context.media_info or (not context.media_info.tmdb_id
and not context.media_info.douban_id):
# 如果种子未识别且失败次数未超过3次,尝试识别
if (not context.media_info or (not context.media_info.tmdb_id
and not context.media_info.douban_id)) and context.media_recognize_fail_count < 3:
logger.debug(
f'尝试重新识别种子:{context.torrent_info.title},当前失败次数:{context.media_recognize_fail_count}/3')
re_mediainfo = self.recognize_media(meta=context.meta_info)
if re_mediainfo:
# 清理多余信息
re_mediainfo.clear()
# 更新种子缓存
context.media_info = re_mediainfo
# 重置失败次数
context.media_recognize_fail_count = 0
logger.debug(f'种子 {context.torrent_info.title} 重新识别成功')
else:
# 识别失败,增加失败次数
context.media_recognize_fail_count += 1
logger.debug(
f'种子 {context.torrent_info.title} 媒体识别失败,失败次数:{context.media_recognize_fail_count}/3')
elif context.media_recognize_fail_count >= 3:
logger.debug(f'种子 {context.torrent_info.title} 已达到最大识别失败次数(3次),跳过识别')
# 添加已预处理
processed_torrents[domain].append(context)
@@ -912,7 +924,7 @@ class SubscribeChain(ChainBase):
# 如果仍然没有识别到媒体信息,尝试标题匹配
if not torrent_mediainfo or (
not torrent_mediainfo.tmdb_id and not torrent_mediainfo.douban_id):
logger.info(
logger.debug(
f'{torrent_info.site_name} - {torrent_info.title} 重新识别失败,尝试通过标题匹配...')
if torrenthelper.match_torrent(mediainfo=mediainfo,
torrent_meta=torrent_meta,

View File

@@ -56,9 +56,14 @@ class TorrentsChain(ChainBase):
# 读取缓存
if stype == 'spider':
return self.load_cache(self._spider_file) or {}
torrents_cache = self.load_cache(self._spider_file) or {}
else:
return self.load_cache(self._rss_file) or {}
torrents_cache = self.load_cache(self._rss_file) or {}
# 兼容性处理为旧版本的Context对象添加失败次数字段
self._ensure_context_compatibility(torrents_cache)
return torrents_cache
async def async_get_torrents(self, stype: Optional[str] = None) -> Dict[str, List[Context]]:
"""
@@ -71,9 +76,14 @@ class TorrentsChain(ChainBase):
# 异步读取缓存
if stype == 'spider':
return await self.async_load_cache(self._spider_file) or {}
torrents_cache = await self.async_load_cache(self._spider_file) or {}
else:
return await self.async_load_cache(self._rss_file) or {}
torrents_cache = await self.async_load_cache(self._rss_file) or {}
# 兼容性处理为旧版本的Context对象添加失败次数字段
self._ensure_context_compatibility(torrents_cache)
return torrents_cache
def clear_torrents(self):
"""
@@ -274,6 +284,9 @@ class TorrentsChain(ChainBase):
mediainfo.clear()
# 上下文
context = Context(meta_info=meta, media_info=mediainfo, torrent_info=torrent)
# 如果未识别到媒体信息设置初始失败次数为1
if not mediainfo or (not mediainfo.tmdb_id and not mediainfo.douban_id):
context.media_recognize_fail_count = 1
# 添加到缓存
if not torrents_cache.get(domain):
torrents_cache[domain] = [context]
@@ -300,6 +313,21 @@ class TorrentsChain(ChainBase):
return torrents_cache
@staticmethod
def _ensure_context_compatibility(torrents_cache: Dict[str, List[Context]]):
"""
确保Context对象的兼容性为旧版本添加缺失的字段
"""
for domain, contexts in torrents_cache.items():
for context in contexts:
# 如果Context对象没有media_recognize_fail_count字段添加默认值
if not hasattr(context, 'media_recognize_fail_count'):
context.media_recognize_fail_count = 0
# 如果媒体信息未识别,设置初始失败次数
if (not context.media_info or
(not context.media_info.tmdb_id and not context.media_info.douban_id)):
context.media_recognize_fail_count = 1
def __renew_rss_url(self, domain: str, site: dict):
"""
保留原配置生成新的rss地址

View File

@@ -1450,6 +1450,10 @@ class TransferChain(ChainBase, metaclass=Singleton):
if not torrents:
return False
# 未下载完成
if torrents[0].progress < 100:
return False
# 获取种子文件列表
torrent_files = self.torrent_files(download_hash, downloader)
if not torrent_files:

View File

@@ -814,6 +814,8 @@ class Context:
media_info: MediaInfo = None
# 种子信息
torrent_info: TorrentInfo = None
# 媒体识别失败次数
media_recognize_fail_count: int = 0
def to_dict(self):
"""
@@ -822,5 +824,6 @@ class Context:
return {
"meta_info": self.meta_info.to_dict() if self.meta_info else None,
"torrent_info": self.torrent_info.to_dict() if self.torrent_info else None,
"media_info": self.media_info.to_dict() if self.media_info else None
"media_info": self.media_info.to_dict() if self.media_info else None,
"media_recognize_fail_count": self.media_recognize_fail_count
}

View File

@@ -428,8 +428,17 @@ class EventManager(metaclass=Singleton):
if not handlers:
logger.debug(f"No handlers found for broadcast event: {event}")
return
# 为每个处理器提供独立的事件实例,防止某个处理器对 event_data 的修改影响其他处理器
for handler_id, handler in handlers.items():
self.__executor.submit(self.__safe_invoke_handler, handler, event)
# 仅浅拷贝顶层字典,避免不必要的深拷贝开销;这样可以隔离键级别的替换/赋值
if isinstance(event.event_data, dict):
event_data_copy = event.event_data.copy()
else:
event_data_copy = event.event_data
isolated_event = Event(event_type=event.event_type,
event_data=event_data_copy,
priority=event.priority)
self.__executor.submit(self.__safe_invoke_handler, handler, isolated_event)
def __safe_invoke_handler(self, handler: Callable, event: Event):
"""

View File

@@ -200,7 +200,7 @@ class MetaVideo(MetaBase):
name = re.sub(r'%s' % self._name_nostring_re, '', name,
flags=re.IGNORECASE).strip()
name = re.sub(r'\s+', ' ', name)
if name.isdigit() \
if name.isdecimal() \
and int(name) < 1800 \
and not self.year \
and not self.begin_season \

View File

@@ -186,7 +186,7 @@ class TransferHistory(Base):
result = await db.execute(
select(sub_query.c.date, func.count(sub_query.c.id)).group_by(sub_query.c.date)
)
return result.scalars().all()
return result.all()
@classmethod
@db_query

View File

@@ -198,13 +198,17 @@ class SiteOper(DbOper):
lst_date = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
sta = SiteStatistic.get_by_domain(self._db, domain)
if sta:
avg_seconds, note = None, sta.note or {}
# 使用深复制确保 note 是全新的字典对象
note = dict(sta.note) if sta.note else {}
avg_seconds = None
if seconds is not None:
note[lst_date] = seconds or 1
avg_times = len(note.keys())
if avg_times > 10:
note = dict(sorted(note.items(), key=lambda x: x[0], reverse=True)[:10])
avg_seconds = sum([v for v in note.values()]) // avg_times
sta.update(self._db, {
"success": sta.success + 1,
"seconds": avg_seconds or sta.seconds,
@@ -256,13 +260,17 @@ class SiteOper(DbOper):
lst_date = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
sta = await SiteStatistic.async_get_by_domain(self._db, domain)
if sta:
avg_seconds, note = None, sta.note or {}
# 使用深复制确保 note 是全新的字典对象
note = dict(sta.note) if sta.note else {}
avg_seconds = None
if seconds is not None:
note[lst_date] = seconds or 1
avg_times = len(note.keys())
if avg_times > 10:
note = dict(sorted(note.items(), key=lambda x: x[0], reverse=True)[:10])
avg_seconds = sum([v for v in note.values()]) // avg_times
await sta.async_update(self._db, {
"success": sta.success + 1,
"seconds": avg_seconds or sta.seconds,

View File

@@ -5,7 +5,9 @@ import site
import sys
import traceback
from pathlib import Path
from typing import Dict, List, Optional, Tuple, Set
from typing import Dict, List, Optional, Tuple, Set, Callable, Awaitable
import zipfile
import io
import aiofiles
import aioshutil
@@ -234,62 +236,31 @@ class PluginHelper(metaclass=WeakSingleton):
else:
logger.debug(f"{pid} 从 package.{package_version}.json 中找到适用于当前版本的插件")
# 2. 获取插件文件列表(包括 requirements.txt
file_list, msg = self.__get_file_list(pid.lower(), user_repo, package_version)
if not file_list:
return False, msg
# 2. 决定安装方式release 或 文件列表)并执行统一安装流程
meta = self.__get_plugin_meta(pid, repo_url, package_version)
# 是否release打包
is_release = meta.get("release")
# 插件版本号
plugin_version = meta.get("version")
if is_release:
# 使用 插件ID_插件版本号 作为 Release tag
if not plugin_version:
return False, f"未在插件清单中找到 {pid} 的版本号,无法进行 Release 安装"
# 拼接 release_tag
release_tag = f"{pid}_v{plugin_version}"
# 使用 release 进行安装
def prepare_release() -> Tuple[bool, str]:
return self.__install_from_release(
pid.lower(), user_repo, release_tag
)
# 3. 删除旧的插件目录,如果不强制安装则备份
backup_dir = None
if not force_install:
backup_dir = self.__backup_plugin(pid.lower())
self.__remove_old_plugin(pid.lower())
# 4. 查找并安装 requirements.txt 中的依赖,确保插件环境的依赖尽可能完整。依赖安装可能失败且不影响插件安装,目前只记录日志
requirements_file_info = next((f for f in file_list if f.get("name") == "requirements.txt"), None)
if requirements_file_info:
logger.debug(f"{pid} 发现 requirements.txt提前下载并预安装依赖")
success, message = self.__download_and_install_requirements(requirements_file_info,
pid, user_repo)
if not success:
logger.debug(f"{pid} 依赖预安装失败:{message}")
else:
logger.debug(f"{pid} 依赖预安装成功")
# 5. 下载插件的其他文件
logger.info(f"{pid} 准备开始下载插件文件")
success, message = self.__download_files(pid.lower(), file_list, user_repo, package_version, True)
if not success:
logger.error(f"{pid} 下载插件文件失败:{message}")
if backup_dir:
self.__restore_plugin(pid.lower(), backup_dir)
logger.warning(f"{pid} 插件安装失败,已还原备份插件")
else:
self.__remove_old_plugin(pid.lower())
logger.warning(f"{pid} 已清理对应插件目录,请尝试重新安装")
return False, message
return self.__install_flow_sync(pid.lower(), force_install, prepare_release)
else:
logger.info(f"{pid} 下载插件文件成功")
# 如果 release_tag 不存在,说明插件没有发布版本,使用文件列表方式安装
def prepare_filelist() -> Tuple[bool, str]:
return self.__prepare_content_via_filelist_sync(pid.lower(), user_repo, package_version)
# 6. 插件文件安装成功后,再次尝试安装依赖,避免因为遗漏依赖导致的插件运行问题,目前依旧只记录日志
dependencies_exist, success, message = self.__install_dependencies_if_required(pid)
if dependencies_exist:
if not success:
logger.error(f"{pid} 依赖安装失败:{message}")
if backup_dir:
self.__restore_plugin(pid.lower(), backup_dir)
logger.warning(f"{pid} 插件安装失败,已还原备份插件")
else:
self.__remove_old_plugin(pid.lower())
logger.warning(f"{pid} 已清理对应插件目录,请尝试重新安装")
else:
logger.info(f"{pid} 依赖安装成功")
# 插件安装成功后,统计安装信息
self.install_reg(pid)
return True, ""
return self.__install_flow_sync(pid.lower(), force_install, prepare_filelist)
def __get_file_list(self, pid: str, user_repo: str, package_version: Optional[str] = None) -> \
Tuple[Optional[list], Optional[str]]:
@@ -561,6 +532,126 @@ class PluginHelper(metaclass=WeakSingleton):
logger.error(f"[GitHub] 所有策略均请求失败URL: {url},请检查网络连接或 GitHub 配置")
return None
def __get_plugin_meta(self, pid: str, repo_url: str,
package_version: Optional[str]) -> dict:
try:
plugins = (
self.get_plugins(repo_url) if not package_version
else self.get_plugins(repo_url, package_version)
) or {}
meta = plugins.get(pid)
return meta if isinstance(meta, dict) else {}
except Exception as e:
logger.error(f"获取插件 {pid} 元数据失败:{e}")
return {}
def __install_flow_sync(self, pid_lower: str, force_install: bool,
prepare_content: Callable[[], Tuple[bool, str]]) -> Tuple[bool, str]:
"""
同步安装统一流程:备份→清理→准备内容→安装依赖→上报
prepare_content 负责把插件文件放到 app/plugins/{pid}
"""
backup_dir = None
if not force_install:
backup_dir = self.__backup_plugin(pid_lower)
self.__remove_old_plugin(pid_lower)
success, message = prepare_content()
if not success:
logger.error(f"{pid_lower} 准备插件内容失败:{message}")
if backup_dir:
self.__restore_plugin(pid_lower, backup_dir)
logger.warning(f"{pid_lower} 插件安装失败,已还原备份插件")
else:
self.__remove_old_plugin(pid_lower)
logger.warning(f"{pid_lower} 已清理对应插件目录,请尝试重新安装")
return False, message
dependencies_exist, dep_ok, dep_msg = self.__install_dependencies_if_required(pid_lower)
if dependencies_exist and not dep_ok:
logger.error(f"{pid_lower} 依赖安装失败:{dep_msg}")
if backup_dir:
self.__restore_plugin(pid_lower, backup_dir)
logger.warning(f"{pid_lower} 插件安装失败,已还原备份插件")
else:
self.__remove_old_plugin(pid_lower)
logger.warning(f"{pid_lower} 已清理对应插件目录,请尝试重新安装")
return False, dep_msg
self.install_reg(pid_lower)
return True, ""
def __install_from_release(self, pid: str, user_repo: str, release_tag: str) -> Tuple[bool, str]:
"""
通过 GitHub Release 资产文件安装插件。
规范release 中存在名为 "{pid}_v{version}.zip" 的资产zip 根即插件文件;
将其全部解压到 app/plugins/{pid}
"""
# 拼接资产文件名
asset_name = f"{release_tag.lower()}.zip"
release_api = f"https://api.github.com/repos/{user_repo}/releases/tags/{release_tag}"
rel_res = self.__request_with_fallback(
release_api,
headers=settings.REPO_GITHUB_HEADERS(repo=user_repo),
timeout=30,
is_api=True,
)
if rel_res is None or rel_res.status_code != 200:
return False, f"获取 Release 信息失败:{rel_res.status_code if rel_res else '连接失败'}"
try:
rel_json = rel_res.json()
assets = rel_json.get("assets") or []
asset = next((a for a in assets if a.get("name") == asset_name), None)
if not asset:
return False, f"未找到资产文件:{asset_name}"
download_url = asset.get("browser_download_url")
if not download_url:
return False, "资产缺少下载地址"
except Exception as e:
logger.error(f"解析 Release 信息失败:{e}")
return False, f"解析 Release 信息失败:{e}"
res = self.__request_with_fallback(download_url, headers=settings.REPO_GITHUB_HEADERS(repo=user_repo))
if res is None or res.status_code != 200:
return False, f"下载资产失败:{res.status_code if res else '连接失败'}"
try:
with zipfile.ZipFile(io.BytesIO(res.content)) as zf:
namelist = zf.namelist()
if not namelist:
return False, "压缩包内容为空"
# 若所有条目均在同一顶层目录下(如 pid/),则剥离这一层,避免出现双层目录
names_with_slash = [n for n in namelist if '/' in n]
base_prefix = ''
if names_with_slash and len(names_with_slash) == len(namelist):
first_seg = names_with_slash[0].split('/')[0]
if all(n.startswith(first_seg + '/') for n in namelist):
base_prefix = first_seg + '/'
dest_base = Path(settings.ROOT_PATH) / "app" / "plugins" / pid.lower()
wrote_any = False
for name in namelist:
rel_path = name[len(base_prefix):]
if not rel_path:
continue
if rel_path.endswith('/'):
(dest_base / rel_path.rstrip('/')).mkdir(parents=True, exist_ok=True)
continue
dest_path = dest_base / rel_path
dest_path.parent.mkdir(parents=True, exist_ok=True)
with zf.open(name, 'r') as src, open(dest_path, 'wb') as dst:
dst.write(src.read())
wrote_any = True
if not wrote_any:
return False, "压缩包中无可写入文件"
return True, ""
except Exception as e:
logger.error(f"解压 Release 压缩包失败:{e}")
return False, f"解压 Release 压缩包失败:{e}"
def find_missing_dependencies(self) -> List[str]:
"""
收集所有需要安装或更新的依赖项
@@ -1040,8 +1131,7 @@ class PluginHelper(metaclass=WeakSingleton):
return str(backup_dir) if await backup_dir.exists() else None
@staticmethod
async def __async_restore_plugin(pid: str, backup_dir: str):
async def __async_restore_plugin(self, pid: str, backup_dir: str):
"""
异步还原旧插件目录
:param pid: 插件 ID
@@ -1054,7 +1144,7 @@ class PluginHelper(metaclass=WeakSingleton):
backup_path = AsyncPath(backup_dir)
if await backup_path.exists():
await PluginHelper._async_copytree(backup_path, plugin_dir)
await self._async_copytree(src=backup_path, dst=plugin_dir)
logger.debug(f"{pid} 已还原插件目录 {plugin_dir}")
await aioshutil.rmtree(backup_path, ignore_errors=True)
logger.debug(f"{pid} 已删除备份目录 {backup_dir}")
@@ -1291,59 +1381,188 @@ class PluginHelper(metaclass=WeakSingleton):
else:
logger.debug(f"{pid} 从 package.{package_version}.json 中找到适用于当前版本的插件")
# 2. 获取插件文件列表(包括 requirements.txt
file_list, msg = await self.__async_get_file_list(pid.lower(), user_repo, package_version)
if not file_list:
return False, msg
# 2. 统一异步安装流程release 或 文件列表
meta = await self.__async_get_plugin_meta(pid, repo_url, package_version)
# 是否release打包
is_release = meta.get("release")
# 插件版本号
plugin_version = meta.get("version")
if is_release:
# 使用 插件ID_插件版本号 作为 Release tag
if not plugin_version:
return False, f"未在插件清单中找到 {pid} 的版本号,无法进行 Release 安装"
# 拼接 release_tag
release_tag = f"{pid}_v{plugin_version}"
# 使用 release 进行安装
async def prepare_release() -> Tuple[bool, str]:
return await self.__async_install_from_release(
pid.lower(), user_repo, release_tag
)
# 3. 删除旧的插件目录,如果不强制安装则备份
return await self.__install_flow_async(pid.lower(), force_install, prepare_release)
else:
# 如果没有 release_tag则使用文件列表安装方式
async def prepare_filelist() -> Tuple[bool, str]:
return await self.__prepare_content_via_filelist_async(pid.lower(), user_repo, package_version)
return await self.__install_flow_async(pid.lower(), force_install, prepare_filelist)
async def __async_get_plugin_meta(self, pid: str, repo_url: str,
package_version: Optional[str]) -> dict:
try:
plugins = (
await self.async_get_plugins(repo_url) if not package_version
else await self.async_get_plugins(repo_url, package_version)
) or {}
meta = plugins.get(pid)
return meta if isinstance(meta, dict) else {}
except Exception as e:
logger.warn(f"获取插件 {pid} 元数据失败:{e}")
return {}
async def __install_flow_async(self, pid_lower: str, force_install: bool,
prepare_content: Callable[[], Awaitable[Tuple[bool, str]]]) -> Tuple[bool, str]:
"""
异步安装流程,处理插件内容准备、依赖安装和注册
"""
backup_dir = None
if not force_install:
backup_dir = await self.__async_backup_plugin(pid.lower())
backup_dir = await self.__async_backup_plugin(pid_lower)
await self.__async_remove_old_plugin(pid.lower())
await self.__async_remove_old_plugin(pid_lower)
# 4. 查找并安装 requirements.txt 中的依赖,确保插件环境的依赖尽可能完整。依赖安装可能失败且不影响插件安装,目前只记录日志
success, message = await prepare_content()
if not success:
logger.error(f"{pid_lower} 准备插件内容失败:{message}")
if backup_dir:
await self.__async_restore_plugin(pid_lower, backup_dir)
logger.warning(f"{pid_lower} 插件安装失败,已还原备份插件")
else:
await self.__async_remove_old_plugin(pid_lower)
logger.warning(f"{pid_lower} 已清理对应插件目录,请尝试重新安装")
return False, message
dependencies_exist, dep_ok, dep_msg = await self.__async_install_dependencies_if_required(pid_lower)
if dependencies_exist and not dep_ok:
logger.error(f"{pid_lower} 依赖安装失败:{dep_msg}")
if backup_dir:
await self.__async_restore_plugin(pid_lower, backup_dir)
logger.warning(f"{pid_lower} 插件安装失败,已还原备份插件")
else:
await self.__async_remove_old_plugin(pid_lower)
logger.warning(f"{pid_lower} 已清理对应插件目录,请尝试重新安装")
return False, dep_msg
await self.async_install_reg(pid_lower)
return True, ""
def __prepare_content_via_filelist_sync(self, pid_lower: str, user_repo: str,
package_version: Optional[str]) -> Tuple[bool, str]:
"""
同步准备插件内容,通过文件列表获取插件文件和依赖
"""
file_list, msg = self.__get_file_list(pid_lower, user_repo, package_version)
if not file_list:
return False, msg
requirements_file_info = next((f for f in file_list if f.get("name") == "requirements.txt"), None)
if requirements_file_info:
logger.debug(f"{pid} 发现 requirements.txt提前下载并预安装依赖")
success, message = await self.__async_download_and_install_requirements(requirements_file_info,
pid, user_repo)
if not success:
logger.debug(f"{pid} 依赖预安装失败:{message}")
ok, m = self.__download_and_install_requirements(requirements_file_info, pid_lower, user_repo)
if not ok:
logger.debug(f"{pid_lower} 依赖预安装失败:{m}")
else:
logger.debug(f"{pid} 依赖预安装成功")
# 5. 下载插件的其他文件
logger.info(f"{pid} 准备开始下载插件文件")
success, message = await self.__async_download_files(pid.lower(), file_list, user_repo, package_version, True)
if not success:
logger.error(f"{pid} 下载插件文件失败:{message}")
if backup_dir:
await self.__async_restore_plugin(pid.lower(), backup_dir)
logger.warning(f"{pid} 插件安装失败,已还原备份插件")
else:
await self.__async_remove_old_plugin(pid.lower())
logger.warning(f"{pid} 已清理对应插件目录,请尝试重新安装")
return False, message
else:
logger.info(f"{pid} 下载插件文件成功")
# 6. 插件文件安装成功后,再次尝试安装依赖,避免因为遗漏依赖导致的插件运行问题,目前依旧只记录日志
dependencies_exist, success, message = await self.__async_install_dependencies_if_required(pid)
if dependencies_exist:
if not success:
logger.error(f"{pid} 依赖安装失败:{message}")
if backup_dir:
await self.__async_restore_plugin(pid.lower(), backup_dir)
logger.warning(f"{pid} 插件安装失败,已还原备份插件")
else:
await self.__async_remove_old_plugin(pid.lower())
logger.warning(f"{pid} 已清理对应插件目录,请尝试重新安装")
else:
logger.info(f"{pid} 依赖安装成功")
# 插件安装成功后,统计安装信息
await self.async_install_reg(pid)
logger.debug(f"{pid_lower} 依赖预安装成功")
ok, m = self.__download_files(pid_lower, file_list, user_repo, package_version, True)
if not ok:
return False, m
return True, ""
async def __prepare_content_via_filelist_async(self, pid_lower: str, user_repo: str,
package_version: Optional[str]) -> Tuple[bool, str]:
"""
异步准备插件内容,通过文件列表获取插件文件和依赖
"""
file_list, msg = await self.__async_get_file_list(pid_lower, user_repo, package_version)
if not file_list:
return False, msg
requirements_file_info = next((f for f in file_list if f.get("name") == "requirements.txt"), None)
if requirements_file_info:
ok, m = await self.__async_download_and_install_requirements(requirements_file_info, pid_lower, user_repo)
if not ok:
logger.debug(f"{pid_lower} 依赖预安装失败:{m}")
else:
logger.debug(f"{pid_lower} 依赖预安装成功")
ok, m = await self.__async_download_files(pid_lower, file_list, user_repo, package_version, True)
if not ok:
return False, m
return True, ""
async def __async_install_from_release(self, pid: str, user_repo: str, release_tag: str) -> Tuple[bool, str]:
"""
通过 GitHub Release 资产文件安装插件(异步)。
规范release 中存在名为 "{pid}_v{version}.zip" 的资产zip 根即插件文件;
将其全部解压到 app/plugins/{pid}
"""
# 拼接资产文件名
asset_name = f"{release_tag.lower()}.zip"
release_api = f"https://api.github.com/repos/{user_repo}/releases/tags/{release_tag}"
rel_res = await self.__async_request_with_fallback(
release_api,
headers=settings.REPO_GITHUB_HEADERS(repo=user_repo),
timeout=30,
is_api=True,
)
if rel_res is None or rel_res.status_code != 200:
return False, f"获取 Release 信息失败:{rel_res.status_code if rel_res else '连接失败'}"
try:
rel_json = rel_res.json()
assets = rel_json.get("assets") or []
asset = next((a for a in assets if a.get("name") == asset_name), None)
if not asset:
return False, f"未找到资产文件:{asset_name}"
download_url = asset.get("browser_download_url")
if not download_url:
return False, "资产缺少下载地址"
except Exception as e:
logger.error(f"解析 Release 信息失败:{e}")
return False, f"解析 Release 信息失败:{e}"
res = await self.__async_request_with_fallback(download_url, headers=settings.REPO_GITHUB_HEADERS(repo=user_repo))
if res is None or res.status_code != 200:
return False, f"下载资产失败:{res.status_code if res else '连接失败'}"
try:
with zipfile.ZipFile(io.BytesIO(res.content)) as zf:
namelist = zf.namelist()
if not namelist:
return False, "压缩包内容为空"
names_with_slash = [n for n in namelist if '/' in n]
base_prefix = ''
if names_with_slash and len(names_with_slash) == len(namelist):
first_seg = names_with_slash[0].split('/')[0]
if all(n.startswith(first_seg + '/') for n in namelist):
base_prefix = first_seg + '/'
dest_base = AsyncPath(settings.ROOT_PATH) / "app" / "plugins" / pid.lower()
wrote_any = False
for name in namelist:
rel_path = name[len(base_prefix):]
if not rel_path:
continue
if rel_path.endswith('/'):
await (dest_base / rel_path.rstrip('/')).mkdir(parents=True, exist_ok=True)
continue
dest_path = dest_base / rel_path
await dest_path.parent.mkdir(parents=True, exist_ok=True)
with zf.open(name, 'r') as src:
data = src.read()
async with aiofiles.open(dest_path, 'wb') as dst:
await dst.write(data)
wrote_any = True
if not wrote_any:
return False, "压缩包中无可写入文件"
return True, ""
except Exception as e:
logger.error(f"解压 Release 压缩包失败:{e}")
return False, f"解压 Release 压缩包失败:{e}"

View File

@@ -6,6 +6,7 @@ import threading
import time
from concurrent.futures import ThreadPoolExecutor
from datetime import datetime
from logging.handlers import RotatingFileHandler
from pathlib import Path
from typing import Dict, Any, Optional
@@ -117,10 +118,11 @@ class LogEntry:
class NonBlockingFileHandler:
"""
非阻塞文件处理器 - 透明地处理协程环境中的文件写入
非阻塞文件处理器 - 使用RotatingFileHandler实现日志滚动
"""
_instance = None
_lock = threading.Lock()
_rotating_handlers = {}
def __new__(cls):
if cls._instance is None:
@@ -137,14 +139,36 @@ class NonBlockingFileHandler:
self._write_queue = queue.Queue(maxsize=log_settings.ASYNC_FILE_QUEUE_SIZE)
self._executor = ThreadPoolExecutor(max_workers=log_settings.ASYNC_FILE_WORKERS,
thread_name_prefix="LogWriter")
self._batch_buffer = {}
self._last_flush = {}
self._running = True
# 启动后台写入线程
self._write_thread = threading.Thread(target=self._batch_writer, daemon=True)
self._write_thread.start()
def _get_rotating_handler(self, file_path: Path) -> RotatingFileHandler:
"""
获取或创建RotatingFileHandler实例
"""
if file_path not in self._rotating_handlers:
# 确保目录存在
file_path.parent.mkdir(parents=True, exist_ok=True)
# 创建RotatingFileHandler
handler = RotatingFileHandler(
filename=str(file_path),
maxBytes=log_settings.LOG_MAX_FILE_SIZE_BYTES,
backupCount=log_settings.LOG_BACKUP_COUNT,
encoding='utf-8'
)
# 设置格式化器
formatter = logging.Formatter(log_settings.LOG_FILE_FORMAT)
handler.setFormatter(formatter)
self._rotating_handlers[file_path] = handler
return self._rotating_handlers[file_path]
def write_log(self, level: str, message: str, file_path: Path):
"""
写入日志 - 自动检测协程环境并使用合适的方式
@@ -186,16 +210,20 @@ class NonBlockingFileHandler:
同步写入日志
"""
try:
# 确保目录存在
entry.file_path.parent.mkdir(parents=True, exist_ok=True)
# 获取RotatingFileHandler实例
handler = NonBlockingFileHandler()._get_rotating_handler(entry.file_path)
# 格式化时间戳
timestamp = entry.timestamp.strftime('%Y-%m-%d %H:%M:%S,') + f"{entry.timestamp.microsecond // 1000:03d}"
line = f"{entry.level.upper()}{timestamp} - {entry.message}\n"
# 写入文件
with open(entry.file_path, 'a', encoding='utf-8') as f:
f.write(line)
# 使用RotatingFileHandler的emit方法只传递原始消息
handler.emit(logging.LogRecord(
name='',
level=getattr(logging, entry.level.upper(), logging.INFO),
pathname='',
lineno=0,
msg=entry.message,
args=(),
exc_info=None,
created=entry.timestamp.timestamp()
))
except Exception as e:
# 如果文件写入失败,至少输出到控制台
print(f"日志写入失败 {entry.file_path}: {e}")
@@ -240,16 +268,22 @@ class NonBlockingFileHandler:
# 批量写入每个文件
for file_path, entries in file_groups.items():
try:
# 确保目录存在
file_path.parent.mkdir(parents=True, exist_ok=True)
# 获取RotatingFileHandler
handler = self._get_rotating_handler(file_path)
# 批量写入
with open(file_path, 'a', encoding='utf-8') as f:
for entry in entries:
timestamp = entry.timestamp.strftime(
'%Y-%m-%d %H:%M:%S,') + f"{entry.timestamp.microsecond // 1000:03d}"
line = f"{entry.level.upper()}{timestamp} - {entry.message}\n"
f.write(line)
for entry in entries:
# 使用RotatingFileHandler的emit方法只传递原始消息
handler.emit(logging.LogRecord(
name='',
level=getattr(logging, entry.level.upper(), logging.INFO),
pathname='',
lineno=0,
msg=entry.message,
args=(),
exc_info=None,
created=entry.timestamp.timestamp()
))
except Exception as e:
print(f"批量写入失败 {file_path}: {e}")
# 回退到逐个写入
@@ -266,6 +300,9 @@ class NonBlockingFileHandler:
if self._executor:
self._executor.shutdown(wait=True)
# 清理缓存
self._rotating_handlers.clear()
class LoggerManager:
"""

View File

@@ -65,9 +65,8 @@ class Alist(StorageBase, metaclass=WeakSingleton):
如果设置永久令牌则返回永久令牌
否则使用账号密码生成临时令牌
"""
return self.__generate_token
return self.__generate_token()
@property
@cached(maxsize=1, ttl=60 * 60 * 24 * 2 - 60 * 5, skip_empty=True)
def __generate_token(self) -> str:
"""
@@ -129,7 +128,7 @@ class Alist(StorageBase, metaclass=WeakSingleton):
"""
检查存储是否可用
"""
return True if self.__generate_token else False
return True if self.__generate_token() else False
def list(
self,

View File

@@ -251,6 +251,7 @@ class MTorrentSpider:
'Accept': 'application/json, text/plain, */*',
'x-api-key': self._apikey
},
'proxy': True if self._proxy else False,
'result': 'data'
}
# base64编码

View File

@@ -170,9 +170,9 @@ class Plex:
sections = self._plex.library.sections()
movie_count = tv_count = episode_count = 0
# 媒体库白名单
allow_library = [lib.id for lib in self.get_librarys(hidden=True)]
allow_library = [str(lib.id) for lib in self.get_librarys(hidden=True)]
for sec in sections:
if sec.key not in allow_library:
if str(sec.key) not in allow_library:
continue
if sec.type == "movie":
movie_count += sec.totalSize
@@ -379,7 +379,10 @@ class Plex:
file_path = item.target_path
lib_key, path = self.__find_librarie(file_path, self._libraries)
# 如果存在同一剧集的多集,key(path)相同会合并
result_dict[path.as_posix()] = lib_key
if path:
result_dict[path.as_posix()] = lib_key
else:
result_dict[""] = lib_key
if "" in result_dict:
# 如果有匹配失败的,刷新整个库
self._plex.library.update()

View File

@@ -23,6 +23,7 @@ class Qbittorrent:
"""
若不设置参数,则创建配置文件设置的下载器
"""
self.qbc = None
if host and port:
self._host, self._port = host, port
elif host:
@@ -97,7 +98,7 @@ class Qbittorrent:
if tags:
results = []
if not isinstance(tags, list):
tags = [tags]
tags = tags.split(',')
try:
for torrent in torrents:
torrent_tags = [str(tag).strip() for tag in torrent.get("tags").split(',')]

View File

@@ -657,6 +657,17 @@ class TheMovieDbModule(_ModuleBase):
return [MediaInfo(tmdb_info=info) for info in results]
return []
async def async_search_collections(self, name: str) -> Optional[List[MediaInfo]]:
"""
异步搜索集合信息
"""
if not name:
return []
results = await self.tmdb.async_search_collections(name)
if results:
return [MediaInfo(tmdb_info=info) for info in results]
return []
def tmdb_collection(self, collection_id: int) -> Optional[List[MediaInfo]]:
"""
根据合集ID查询集合

View File

@@ -139,7 +139,7 @@ class TransmissionModule(_ModuleBase, _DownloaderBase[Transmission]):
if label:
labels = label.split(',')
elif settings.TORRENT_TAG:
labels = [settings.TORRENT_TAG]
labels = settings.TORRENT_TAG.split(',')
else:
labels = None
# 添加任务
@@ -253,7 +253,8 @@ class TransmissionModule(_ModuleBase, _DownloaderBase[Transmission]):
path=Path(torrent.download_dir) / torrent.name,
hash=torrent.hashString,
size=torrent.total_size,
tags=",".join(torrent.labels or [])
tags=",".join(torrent.labels or []),
progress=torrent.progress
))
finally:
torrents.clear()

View File

@@ -24,6 +24,7 @@ class Transmission:
"""
若不设置参数,则创建配置文件设置的下载器
"""
self.trc = None
if host and port:
self._protocol, self._host, self._port = kwargs.get("protocol", "http"), host, port
elif host:
@@ -91,7 +92,7 @@ class Transmission:
if status and not isinstance(status, list):
status = [status]
if tags and not isinstance(tags, list):
tags = [tags]
tags = tags.split(',')
ret_torrents = []
try:
for torrent in torrents:

View File

@@ -9,35 +9,6 @@ location / {
try_files $uri $uri/ /index.html;
}
# 图片类静态资源
location ~* \.(png|jpg|jpeg|gif|ico|svg)$ {
expires 1y;
add_header Cache-Control "public, immutable";
}
# assets目录
location /assets {
expires 1y;
add_header Cache-Control "public, immutable";
}
# 站点图标
location /api/v1/site/icon/ {
# 站点图标缓存
proxy_cache my_cache;
# 缓存响应码为200和302的请求1小时
proxy_cache_valid 200 302 1h;
# 缓存其他响应码的请求5分钟
proxy_cache_valid any 5m;
# 缓存键的生成规则
proxy_cache_key "$scheme$request_method$host$request_uri";
proxy_cache_use_stale error timeout updating http_500 http_502 http_503 http_504;
# 向后端API转发请求
proxy_pass http://backend_api;
}
# 本地CookieCloud
location /cookiecloud {
proxy_pass http://backend_api;
@@ -97,4 +68,40 @@ location /api {
# 超时设置
proxy_read_timeout 600s;
}
# 图片类静态资源
location ~* \.(png|jpg|jpeg|gif|ico|svg)$ {
expires 1y;
add_header Cache-Control "public, immutable";
}
# JS 和 CSS 静态资源缓存(排除 /api/v1 路径)
location ~* ^/(?!api/v1).*\.(js|css)$ {
try_files $uri =404;
expires 30d;
add_header Cache-Control "public";
add_header Vary Accept-Encoding;
}
# assets目录
location /assets {
expires 1y;
add_header Cache-Control "public, immutable";
}
# 站点图标
location /api/v1/site/icon/ {
# 站点图标缓存
proxy_cache my_cache;
# 缓存响应码为200和302的请求1小时
proxy_cache_valid 200 302 1h;
# 缓存其他响应码的请求5分钟
proxy_cache_valid any 5m;
# 缓存键的生成规则
proxy_cache_key "$scheme$request_method$host$request_uri";
proxy_cache_use_stale error timeout updating http_500 http_502 http_503 http_504;
# 向后端API转发请求
proxy_pass http://backend_api;
}

View File

@@ -1,2 +1,2 @@
APP_VERSION = 'v2.6.9'
FRONTEND_VERSION = 'v2.6.9'
APP_VERSION = 'v2.7.0'
FRONTEND_VERSION = 'v2.7.0'