Compare commits

...

71 Commits
v2.7.6 ... v1

Author SHA1 Message Date
jxxghp
1336b2136d Merge pull request #4340 from jtcymc/main 2025-05-25 07:59:41 +08:00
shaw
b20e21e700 fix(SearchChain): with 关闭线程池
- 使用 with 语句管理 ThreadPoolExecutor,确保线程池正确关闭
2025-05-25 00:50:34 +08:00
jxxghp
c27ab4a4c7 v1.9.19
- 默认关闭自动升级
2025-05-12 16:41:57 +08:00
jxxghp
d9e6532325 更新 version.py 2025-04-10 14:55:24 +08:00
jxxghp
049f16ba01 Merge pull request #4130 from cddjr/fix_v1_mteam 2025-04-10 14:36:21 +08:00
景大侠
6541458326 backport: 适配馒头API变动 2025-04-10 14:18:04 +08:00
jxxghp
9f2912426b Merge pull request #2833 from wikrin/main 2024-10-10 22:49:12 +08:00
Attente
fde33d267a fix: 修正重复的特殊字符
将重复的特殊字符 `—`[U+2014](https://symbl.cc/cn/2014/) 修改为 `―`[U+2015](https://symbl.cc/cn/2015/)
2024-10-10 22:23:43 +08:00
jxxghp
ef7f0afa37 v1.9.17
- 修复115扫码登录问题
- 索引站点新增支持 `PTLGS`
2024-09-18 17:52:40 +08:00
jxxghp
bea77a8243 fix 115 2024-09-18 13:39:39 +08:00
jxxghp
b984b83870 v1.9.16
- 修复了有些情况下新增目录类型为全部时不生效的问题
2024-09-08 13:11:59 +08:00
jxxghp
2153ad48db v1.9.15
- 修复部分通知消息查看详细链接错误的问题
- 修复了麒麟无法索引综艺的问题
- 修复了插件无升级提示图标的问题
2024-08-28 15:01:46 +08:00
jxxghp
c9c43fde74 Merge pull request #2638 from Linvery/main 2024-08-13 12:41:51 +08:00
Linvery
e2c9742f64 fix: 解决推送消息错误的url路径 2024-08-13 12:40:33 +08:00
jxxghp
3d459a40f7 - 仅调整了插件页面的UI 2024-08-13 11:46:01 +08:00
jxxghp
5675cd5b11 v1.9.13
- 修复已知问题
2024-07-30 06:36:30 +08:00
jxxghp
74a4d0bd66 Merge pull request #2614 from InfinityPacer/main 2024-07-30 06:33:00 +08:00
InfinityPacer
2b8c313019 refactor(event): 事件处理调整为深复制,避免多线程环境下数据异常 2024-07-29 23:18:58 +08:00
jxxghp
62fb6b80a3 Merge pull request #2612 from audichuang/main 2024-07-28 18:48:06 +08:00
audichuang
eea86528d8 Modifying the Bilingual Subtitle Matching Specification 2024-07-28 18:07:28 +08:00
jxxghp
84e6abb659 Merge pull request #2600 from InfinityPacer/main 2024-07-23 19:45:18 +08:00
InfinityPacer
da2c755b6d fix(Plugin): 重置插件时初始化调整为reload,以保留默认配置 2024-07-23 19:41:29 +08:00
jxxghp
51f39be9bc Merge pull request #2590 from Akimio521/main 2024-07-20 20:15:04 +08:00
Akimio521
21b762e75c perfect(releasegroup):完善anime字幕组 2024-07-20 20:05:05 +08:00
jxxghp
54095074b6 v1.9.12
- 新增认证站点:`ROUSI`
- 修复Telegram部分消息格式异常问题
2024-07-16 08:07:24 +08:00
jxxghp
33525730b5 fix #2515 2024-07-16 08:04:48 +08:00
jxxghp
71260f04b5 Merge pull request #2560 from InfinityPacer/main 2024-07-12 17:03:55 +08:00
InfinityPacer
e2acec321d fix tips 2024-07-12 16:48:44 +08:00
InfinityPacer
74a462a09f fix SitesHelper import tips 2024-07-12 16:30:06 +08:00
jxxghp
ad9e1a5da6 Merge pull request #2552 from BrettDean/main 2024-07-11 21:26:04 +08:00
Dean
d90e3c29a5 优化微信文本消息发送:支持长文本分块发送 2024-07-11 20:30:09 +08:00
jxxghp
19165eff75 Merge pull request #2537 from InfinityPacer/main 2024-07-09 11:01:53 +08:00
jxxghp
52d0703812 v1.9.11
- 支持环境变量配置DOH域名和DNS服务器
- 问题修复
2024-07-09 08:09:58 +08:00
InfinityPacer
1431a5e82a fix #2518 移除不必要的debug日志 2024-07-09 01:40:37 +08:00
jxxghp
23fe643526 Merge pull request #2534 from InfinityPacer/main 2024-07-08 12:23:16 +08:00
jxxghp
545b3c0482 Merge pull request #2527 from s0urcelab/main 2024-07-08 12:22:50 +08:00
InfinityPacer
f102119eef fix #2526 Backdrop优先调整为取art 2024-07-07 23:03:34 +08:00
s0urce
9bb3d707c9 feat: history query add title field 2024-07-07 18:27:06 +08:00
jxxghp
b892ef50dc Merge pull request #2526 from InfinityPacer/main 2024-07-07 16:49:11 +08:00
InfinityPacer
41e2907168 fix jxxghp/MoviePilot-Plugins#38 2024-07-07 16:32:37 +08:00
jxxghp
14e28ed693 Merge pull request #2518 from InfinityPacer/main 2024-07-07 08:58:39 +08:00
InfinityPacer
79393c21ff feat: 支持插件进行私钥认证 2024-07-06 20:03:49 +08:00
InfinityPacer
cafa4d217c feat: 增加指定的仓库Github token 2024-07-06 16:07:46 +08:00
jxxghp
2b9e69b112 Merge pull request #2515 from BrettDean/main 2024-07-06 07:50:59 +08:00
Dean
3ffcea70a7 Fixed parsing of Telegram entities 2024-07-06 01:44:51 +08:00
jxxghp
ffc72ba6fe fix #2508 2024-07-05 17:03:00 +08:00
jxxghp
848becd946 Merge pull request #2506 from Akimio521/main 2024-07-05 14:44:50 +08:00
Akimio521
71fe96d7f9 feat: 添加 DOH 解析服务器列表至配置文件实现自定义 DOH 服务器 2024-07-05 13:55:48 +08:00
jxxghp
35c7238ede Merge pull request #2503 from Akimio521/main 2024-07-05 11:31:21 +08:00
Akimio521
3578204508 style 2024-07-05 10:52:39 +08:00
Akimio521
c11cf17f62 style:app.core.config.settings 2024-07-05 10:34:57 +08:00
Akimio521
5a59652684 feat:将使用 DOH 域名解析的域名添加至 app.core.config.settings 2024-07-05 09:31:41 +08:00
jxxghp
7f5f31f143 Merge pull request #2484 from InfinityPacer/main 2024-07-02 06:12:13 +08:00
InfinityPacer
dc1cee80b1 fix plugin install and reg 2024-07-02 01:11:42 +08:00
jxxghp
92cb066748 更新 version.py 2024-07-01 21:46:07 +08:00
jxxghp
6c8ef4122b fix e5ec02e043 2024-07-01 12:23:02 +08:00
jxxghp
971b02ac8c - 重新兼容了v1.9.1之前的版本直接升级
- 索引站点新增支持`HDVBits`
- 自定义重命名新增季年份`season_year`占位符
- 修复了普通用户搜索越权问题
2024-07-01 10:46:29 +08:00
jxxghp
d4a9643f47 Merge pull request #2463 from InfinityPacer/main
处理链run_module支持raise_exception
2024-07-01 10:33:55 +08:00
InfinityPacer
e56d31fedc fix exception 2024-06-30 11:50:26 +08:00
InfinityPacer
b9d91c5cd7 feat: DoubanModule触发限流时支持立即抛出限流异常 2024-06-30 11:48:29 +08:00
InfinityPacer
57cdb57331 feat: retry支持立即抛出异常 2024-06-30 11:47:30 +08:00
InfinityPacer
0f7a7ef44f feat: 添加ImmediateException 2024-06-30 11:47:00 +08:00
InfinityPacer
6267b3f670 feat: run_module支持raise_exception 2024-06-30 11:41:00 +08:00
jxxghp
82f77b4729 Merge pull request #2456 from AisukaYuki/main 2024-06-30 09:09:36 +08:00
jxxghp
58da0ebb4f Merge pull request #2460 from thsrite/main 2024-06-30 09:08:35 +08:00
thsrite
7a43e43478 fix 删除文件未删除thumb.jpg 2024-06-29 20:12:26 +08:00
AisukaYuki
e5ec02e043 add 自定义重命名新增季年份season_year 2024-06-29 13:50:40 +08:00
jxxghp
2944c343a8 Merge pull request #2432 from InfinityPacer/main 2024-06-26 18:21:00 +08:00
InfinityPacer
940cc566c8 fix douban rate_limit tips 2024-06-26 18:17:31 +08:00
jxxghp
db7b2cdcac fix error 2024-06-26 17:42:08 +08:00
jxxghp
8111cf5dc8 - 站点索引及用户认证新增支持海胆之家 2024-06-26 16:18:14 +08:00
57 changed files with 1276 additions and 164 deletions

View File

@@ -1,6 +1,11 @@
name: MoviePilot Builder
on:
workflow_dispatch:
push:
branches:
- main
paths:
- version.py
jobs:
Docker-build:

3
.gitignore vendored
View File

@@ -16,4 +16,5 @@ config/sites/**
*.pyc
*.log
.vscode
venv
venv
.DS_Store

View File

@@ -11,7 +11,7 @@ ENV LANG="C.UTF-8" \
PORT=3001 \
NGINX_PORT=3000 \
PROXY_HOST="" \
MOVIEPILOT_AUTO_UPDATE=release \
MOVIEPILOT_AUTO_UPDATE=false \
AUTH_SITE="iyuu" \
IYUU_SIGN=""
WORKDIR "/app"

View File

@@ -108,13 +108,19 @@ def install(plugin_id: str,
"""
# 已安装插件
install_plugins = SystemConfigOper().get(SystemConfigKey.UserInstalledPlugins) or []
# 如果是非本地括件,或者强制安装时,则需要下载安装
if repo_url and (force or plugin_id not in PluginManager().get_plugin_ids()):
# 下载安装
state, msg = PluginHelper().install(pid=plugin_id, repo_url=repo_url)
if not state:
# 安装失败
return schemas.Response(success=False, message=msg)
# 首先检查插件是否已经存在,并且是否强制安装,否则只进行安装统计
if not force and plugin_id in PluginManager().get_plugin_ids():
PluginHelper().install_reg(pid=plugin_id)
else:
# 插件不存在或需要强制安装,下载安装并注册插件
if repo_url:
state, msg = PluginHelper().install(pid=plugin_id, repo_url=repo_url)
# 安装失败则直接响应
if not state:
return schemas.Response(success=False, message=msg)
else:
# repo_url 为空时,也直接响应
return schemas.Response(success=False, message="没有传入仓库地址,无法正确安装插件,请检查配置")
# 安装插件
if plugin_id not in install_plugins:
install_plugins.append(plugin_id)
@@ -186,10 +192,7 @@ def reset_plugin(plugin_id: str, _: schemas.TokenPayload = Depends(verify_token)
# 删除插件所有数据
PluginManager().delete_plugin_data(plugin_id)
# 重新生效插件
PluginManager().init_plugin(plugin_id, {
"enabled": False,
"enable": False
})
PluginManager().reload_plugin(plugin_id)
# 注册插件服务
Scheduler().update_plugin_job(plugin_id)
# 注册插件API

View File

@@ -10,8 +10,7 @@ from ruamel.yaml import CommentedMap
from transmission_rpc import File
from app.core.config import settings
from app.core.context import Context
from app.core.context import MediaInfo, TorrentInfo
from app.core.context import Context, MediaInfo, TorrentInfo
from app.core.event import EventManager
from app.core.meta import MetaBase
from app.core.module import ModuleManager
@@ -79,6 +78,7 @@ class ChainBase(metaclass=ABCMeta):
def run_module(self, method: str, *args, **kwargs) -> Any:
"""
运行包含该方法的所有模块,然后返回结果
当kwargs包含命名参数raise_exception时如模块方法抛出异常且raise_exception为True则同步抛出异常
"""
def is_result_empty(ret):
@@ -117,6 +117,8 @@ class ChainBase(metaclass=ABCMeta):
# 中止继续执行
break
except Exception as err:
if kwargs.get("raise_exception"):
raise
logger.error(
f"运行模块 {module_id}.{method} 出错:{str(err)}\n{traceback.format_exc()}")
self.messagehelper.put(title=f"{module_name}发生了错误",
@@ -166,7 +168,8 @@ class ChainBase(metaclass=ABCMeta):
tmdbid=tmdbid, doubanid=doubanid, bangumiid=bangumiid, cache=cache)
def match_doubaninfo(self, name: str, imdbid: str = None,
mtype: MediaType = None, year: str = None, season: int = None) -> Optional[dict]:
mtype: MediaType = None, year: str = None, season: int = None,
raise_exception: bool = False) -> Optional[dict]:
"""
搜索和匹配豆瓣信息
:param name: 标题
@@ -174,9 +177,10 @@ class ChainBase(metaclass=ABCMeta):
:param mtype: 类型
:param year: 年份
:param season: 季
:param raise_exception: 触发速率限制时是否抛出异常
"""
return self.run_module("match_doubaninfo", name=name, imdbid=imdbid,
mtype=mtype, year=year, season=season)
mtype=mtype, year=year, season=season, raise_exception=raise_exception)
def match_tmdbinfo(self, name: str, mtype: MediaType = None,
year: str = None, season: int = None) -> Optional[dict]:
@@ -214,14 +218,15 @@ class ChainBase(metaclass=ABCMeta):
image_prefix=image_prefix, image_type=image_type,
season=season, episode=episode)
def douban_info(self, doubanid: str, mtype: MediaType = None) -> Optional[dict]:
def douban_info(self, doubanid: str, mtype: MediaType = None, raise_exception: bool = False) -> Optional[dict]:
"""
获取豆瓣信息
:param doubanid: 豆瓣ID
:param mtype: 媒体类型
:return: 豆瓣信息
:param raise_exception: 触发速率限制时是否抛出异常
"""
return self.run_module("douban_info", doubanid=doubanid, mtype=mtype)
return self.run_module("douban_info", doubanid=doubanid, mtype=mtype, raise_exception=raise_exception)
def tvdb_info(self, tvdbid: int) -> Optional[dict]:
"""

View File

@@ -76,6 +76,8 @@ class DownloadChain(ChainBase):
msg_text = f"{msg_text}\n促销:{torrent.volume_factor}"
if torrent.hit_and_run:
msg_text = f"{msg_text}\nHit&Run"
if torrent.labels:
msg_text = f"{msg_text}\n标签:{' '.join(torrent.labels)}"
if torrent.description:
html_re = re.compile(r'<[^>]+>', re.S)
description = html_re.sub('', torrent.description)

View File

@@ -316,34 +316,34 @@ class SearchChain(ChainBase):
self.progress.update(value=0,
text=f"开始搜索,共 {total_num} 个站点 ...",
key=ProgressKey.Search)
# 多线程
executor = ThreadPoolExecutor(max_workers=len(indexer_sites))
all_task = []
for site in indexer_sites:
if area == "imdbid":
# 搜索IMDBID
task = executor.submit(self.search_torrents, site=site,
keywords=[mediainfo.imdb_id] if mediainfo else None,
mtype=mediainfo.type if mediainfo else None,
page=page)
else:
# 搜索标题
task = executor.submit(self.search_torrents, site=site,
keywords=keywords,
mtype=mediainfo.type if mediainfo else None,
page=page)
all_task.append(task)
# 结果集
results = []
for future in as_completed(all_task):
finish_count += 1
result = future.result()
if result:
results.extend(result)
logger.info(f"站点搜索进度:{finish_count} / {total_num}")
self.progress.update(value=finish_count / total_num * 100,
text=f"正在搜索{keywords or ''},已完成 {finish_count} / {total_num} 个站点 ...",
key=ProgressKey.Search)
# 多线程
with ThreadPoolExecutor(max_workers=len(indexer_sites)) as executor:
all_task = []
for site in indexer_sites:
if area == "imdbid":
# 搜索IMDBID
task = executor.submit(self.search_torrents, site=site,
keywords=[mediainfo.imdb_id] if mediainfo else None,
mtype=mediainfo.type if mediainfo else None,
page=page)
else:
# 搜索标题
task = executor.submit(self.search_torrents, site=site,
keywords=keywords,
mtype=mediainfo.type if mediainfo else None,
page=page)
all_task.append(task)
for future in as_completed(all_task):
finish_count += 1
result = future.result()
if result:
results.extend(result)
logger.info(f"站点搜索进度:{finish_count} / {total_num}")
self.progress.update(value=finish_count / total_num * 100,
text=f"正在搜索{keywords or ''},已完成 {finish_count} / {total_num} 个站点 ...",
key=ProgressKey.Search)
# 计算耗时
end_time = datetime.now()
# 更新进度

View File

@@ -110,30 +110,24 @@ class SiteChain(ChainBase):
domain = StringUtils.get_url_domain(site.url)
url = f"https://api.{domain}/api/member/profile"
headers = {
"Content-Type": "application/json",
"User-Agent": user_agent,
"Accept": "application/json, text/plain, */*",
"Authorization": site.token
"x-api-key": site.apikey,
}
res = RequestUtils(
headers=headers,
proxies=settings.PROXY if site.proxy else None,
timeout=site.timeout or 15
).post_res(url=url)
if res and res.status_code == 200:
user_info = res.json()
if user_info and user_info.get("data"):
# 更新最后访问时间
res = RequestUtils(headers=headers,
timeout=site.timeout or 15,
proxies=settings.PROXY if site.proxy else None,
referer=f"{site.url}index"
).post_res(url=f"https://api.{domain}/api/member/updateLastBrowse")
if res:
return True, "连接成功"
else:
return True, f"连接成功,但更新状态失败"
return False, "鉴权已过期或无效"
if res is None:
return False, "无法打开网站!"
if res.status_code == 200:
user_info = res.json() or {}
if user_info.get("data"):
return True, "连接成功"
return False, user_info.get("message", "鉴权已过期或无效")
else:
return False, f"错误:{res.status_code} {res.reason}"
@staticmethod
def __yema_test(site: Site) -> Tuple[bool, str]:

View File

@@ -179,9 +179,9 @@ class SubscribeChain(ChainBase):
text = f"评分:{mediainfo.vote_average}"
# 群发
if mediainfo.type == MediaType.TV:
link = settings.MP_DOMAIN('#/subscribe-tv?tab=mysub')
link = settings.MP_DOMAIN('#/subscribe/tv?tab=mysub')
else:
link = settings.MP_DOMAIN('#/subscribe-movie?tab=mysub')
link = settings.MP_DOMAIN('#/subscribe/movie?tab=mysub')
self.post_message(Notification(mtype=NotificationType.Subscribe,
title=f"{mediainfo.title_year} {metainfo.season} 已添加订阅",
text=text,
@@ -922,9 +922,9 @@ class SubscribeChain(ChainBase):
self.subscribeoper.delete(subscribe.id)
# 发送通知
if mediainfo.type == MediaType.TV:
link = settings.MP_DOMAIN('#/subscribe-tv?tab=mysub')
link = settings.MP_DOMAIN('#/subscribe/tv?tab=mysub')
else:
link = settings.MP_DOMAIN('#/subscribe-movie?tab=mysub')
link = settings.MP_DOMAIN('#/subscribe/movie?tab=mysub')
self.post_message(Notification(mtype=NotificationType.Subscribe,
title=f'{mediainfo.title_year} {meta.season} 已完成{msgstr}',
image=mediainfo.get_message_image(),

View File

@@ -949,6 +949,11 @@ class TransferChain(ChainBase):
for file in files:
Path(file).unlink()
logger.warn(f"文件 {path} 已删除")
# 删除thumb图片
thumb_file = path.parent / (path.stem + "-thumb.jpg")
if thumb_file.exists():
thumb_file.unlink()
logger.info(f"文件 {thumb_file} 已删除")
# 需要删除父目录
elif str(path.parent) == str(path.root):
# 根目录,不删除

View File

@@ -1,3 +1,4 @@
import copy
import importlib
import threading
import traceback
@@ -11,8 +12,7 @@ from app.chain.subscribe import SubscribeChain
from app.chain.system import SystemChain
from app.chain.transfer import TransferChain
from app.core.config import settings
from app.core.event import Event as ManagerEvent
from app.core.event import eventmanager, EventManager
from app.core.event import Event as ManagerEvent, eventmanager, EventManager
from app.core.plugin import PluginManager
from app.helper.message import MessageHelper
from app.helper.thread import ThreadHelper
@@ -194,7 +194,7 @@ class Command(metaclass=Singleton):
# 插件事件
self.threader.submit(
self.pluginmanager.run_plugin_method,
class_name, method_name, event
class_name, method_name, copy.deepcopy(event)
)
else:
@@ -217,7 +217,7 @@ class Command(metaclass=Singleton):
if hasattr(class_obj, method_name):
self.threader.submit(
getattr(class_obj, method_name),
event
copy.deepcopy(event)
)
except Exception as e:
logger.error(f"事件处理出错:{str(e)} - {traceback.format_exc()}")

View File

@@ -224,14 +224,20 @@ class Settings(BaseSettings):
PLUGIN_MARKET: str = "https://github.com/jxxghp/MoviePilot-Plugins,https://github.com/thsrite/MoviePilot-Plugins,https://github.com/honue/MoviePilot-Plugins,https://github.com/InfinityPacer/MoviePilot-Plugins"
# Github token提高请求api限流阈值 ghp_****
GITHUB_TOKEN: Optional[str] = None
# 指定的仓库Github token多个仓库使用,分隔,格式:{user1}/{repo1}:ghp_****,{user2}/{repo2}:github_pat_****
REPO_GITHUB_TOKEN: Optional[str] = None
# Github代理服务器格式https://mirror.ghproxy.com/
GITHUB_PROXY: Optional[str] = ''
# 自动检查和更新站点资源包(站点索引、认证等)
AUTO_UPDATE_RESOURCE: bool = True
AUTO_UPDATE_RESOURCE: bool = False
# 元数据识别缓存过期时间(小时)
META_CACHE_EXPIRE: int = 0
# 是否启用DOH解析域名
DOH_ENABLE: bool = True
# 使用 DOH 解析的域名列表
DOH_DOMAINS: str = "api.themoviedb.org,api.tmdb.org,webservice.fanart.tv,api.github.com,github.com,raw.githubusercontent.com,api.telegram.org"
# DOH 解析服务器列表
DOH_RESOLVERS: str = "1.0.0.1,1.1.1.1,9.9.9.9,149.112.112.112"
# 搜索多个名称
SEARCH_MULTIPLE_NAME: bool = False
# 订阅数据共享
@@ -241,6 +247,29 @@ class Settings(BaseSettings):
# 服务器地址,对应 https://github.com/jxxghp/MoviePilot-Server 项目
MP_SERVER_HOST: str = "https://movie-pilot.org"
# 【已弃用】刮削入库的媒体文件
SCRAP_METADATA: bool = True
# 【已弃用】下载保存目录,容器内映射路径需要一致
DOWNLOAD_PATH: Optional[str] = None
# 【已弃用】电影下载保存目录,容器内映射路径需要一致
DOWNLOAD_MOVIE_PATH: Optional[str] = None
# 【已弃用】电视剧下载保存目录,容器内映射路径需要一致
DOWNLOAD_TV_PATH: Optional[str] = None
# 【已弃用】动漫下载保存目录,容器内映射路径需要一致
DOWNLOAD_ANIME_PATH: Optional[str] = None
# 【已弃用】下载目录二级分类
DOWNLOAD_CATEGORY: bool = False
# 【已弃用】媒体库目录,多个目录使用,分隔
LIBRARY_PATH: Optional[str] = None
# 【已弃用】电影媒体库目录名
LIBRARY_MOVIE_NAME: str = "电影"
# 【已弃用】电视剧媒体库目录名
LIBRARY_TV_NAME: str = "电视剧"
# 【已弃用】动漫媒体库目录名,不设置时使用电视剧目录
LIBRARY_ANIME_NAME: Optional[str] = None
# 【已弃用】二级分类
LIBRARY_CATEGORY: bool = True
@validator("SUBSCRIBE_RSS_INTERVAL",
"COOKIECLOUD_INTERVAL",
"MEDIASERVER_SYNC_INTERVAL",
@@ -335,6 +364,37 @@ class Settings(BaseSettings):
}
return {}
def REPO_GITHUB_HEADERS(self, repo: str = None):
"""
Github指定的仓库请求头
:param repo: 指定的仓库名称,格式为 "user/repo"。如果为空,或者没有找到指定仓库请求头,则返回默认的请求头信息
:return: Github请求头
"""
# 如果没有传入指定的仓库名称或没有配置指定的仓库Token则返回默认的请求头信息
if not repo or not self.REPO_GITHUB_TOKEN:
return self.GITHUB_HEADERS
headers = {}
# 格式:{user1}/{repo1}:ghp_****,{user2}/{repo2}:github_pat_****
token_pairs = self.REPO_GITHUB_TOKEN.split(",")
for token_pair in token_pairs:
try:
parts = token_pair.split(":")
if len(parts) != 2:
print(f"无效的令牌格式: {token_pair}")
continue
repo_info = parts[0].strip()
token = parts[1].strip()
if not repo_info or not token:
print(f"无效的令牌或仓库信息: {token_pair}")
continue
headers[repo_info] = {
"Authorization": f"Bearer {token}"
}
except Exception as e:
print(f"处理令牌对 '{token_pair}' 时出错: {e}")
# 如果传入了指定的仓库名称,则返回该仓库的请求头信息,否则返回默认请求头
return headers.get(repo, self.GITHUB_HEADERS)
@property
def DEFAULT_DOWNLOADER(self):
"""

View File

@@ -347,10 +347,10 @@ class MediaInfo:
return [], []
directors = []
actors = []
for cast in _credits.get("cast"):
for cast in _credits.get("cast") or []:
if cast.get("known_for_department") == "Acting":
actors.append(cast)
for crew in _credits.get("crew"):
for crew in _credits.get("crew") or []:
if crew.get("job") in ["Director", "Writer", "Editor", "Producer"]:
directors.append(crew)
return directors, actors

View File

@@ -71,7 +71,10 @@ class ReleaseGroupsMatcher(metaclass=Singleton):
"ultrahd": [],
"others": ['B(?:MDru|eyondHD|TN)', 'C(?:fandora|trlhd|MRG)', 'DON', 'EVO', 'FLUX', 'HONE(?:|yG)',
'N(?:oGroup|T(?:b|G))', 'PandaMoon', 'SMURF', 'T(?:EPES|aengoo|rollHD )'],
"anime": ['ANi', 'HYSUB', 'KTXP', 'LoliHouse', 'MCE', 'Nekomoe kissaten', '(?:Lilith|NC)-Raws', '织梦字幕组']
"anime": ['ANi', 'HYSUB', 'KTXP', 'LoliHouse', 'MCE', 'Nekomoe kissaten', 'SweetSub', 'MingY',
'(?:Lilith|NC)-Raws', '织梦字幕组', '枫叶字幕组', '猎户手抄部', '喵萌奶茶屋', '漫猫字幕社',
'霜庭云花Sub', '北宇治字幕组', '氢气烤肉架', '云歌字幕组', '萌樱字幕组','极影字幕社','悠哈璃羽字幕社',
'❀拨雪寻春❀', '沸羊羊(?:制作|字幕组)', '(?:桜|樱)都字幕组',]
}
def __init__(self):

View File

@@ -1,11 +1,13 @@
import concurrent
import concurrent.futures
import importlib.util
import inspect
import os
import threading
import time
import traceback
from pathlib import Path
from typing import Any, Callable, Dict, List, Optional, Tuple
from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union
from watchdog.events import FileSystemEventHandler
from watchdog.observers import Observer
@@ -20,6 +22,7 @@ from app.helper.plugin import PluginHelper
from app.helper.sites import SitesHelper
from app.log import logger
from app.schemas.types import SystemConfigKey
from app.utils.crypto import RSAUtils
from app.utils.object import ObjectUtils
from app.utils.singleton import Singleton
from app.utils.string import StringUtils
@@ -158,11 +161,12 @@ class PluginManager(metaclass=Singleton):
if pid and plugin_id != pid:
continue
try:
# 如果插件具有认证级别且当前认证级别不足,则不进行实例化
if hasattr(plugin, "auth_level"):
plugin.auth_level = plugin.auth_level
if self.siteshelper.auth_level < plugin.auth_level:
continue
# 判断插件是否满足认证要求,如不满足则不进行实例化
if not self.__set_and_check_auth_level(plugin=plugin):
# 如果是插件热更新实例,这里则进行替换
if plugin_id in self._plugins:
self._plugins[plugin_id] = plugin
continue
# 存储Class
self._plugins[plugin_id] = plugin
# 未安装的不加载
@@ -220,8 +224,6 @@ class PluginManager(metaclass=Singleton):
# 清空指定插件
if pid in self._running_plugins:
self._running_plugins.pop(pid)
if pid in self._plugins:
self._plugins.pop(pid)
else:
# 清空
self._plugins = {}
@@ -602,11 +604,12 @@ class PluginManager(metaclass=Singleton):
if plugin_obj and hasattr(plugin_obj, "get_page"):
if ObjectUtils.check_method(plugin_obj.get_page):
plugin.has_page = True
# 公钥
if plugin_info.get("key"):
plugin.plugin_public_key = plugin_info.get("key")
# 权限
if plugin_info.get("level"):
plugin.auth_level = plugin_info.get("level")
if self.siteshelper.auth_level < plugin.auth_level:
continue
if not self.__set_and_check_auth_level(plugin=plugin, source=plugin_info):
continue
# 名称
if plugin_info.get("name"):
plugin.plugin_name = plugin_info.get("name")
@@ -709,11 +712,12 @@ class PluginManager(metaclass=Singleton):
plugin.has_page = True
else:
plugin.has_page = False
# 公钥
if hasattr(plugin_class, "plugin_public_key"):
plugin.plugin_public_key = plugin_class.plugin_public_key
# 权限
if hasattr(plugin_class, "auth_level"):
plugin.auth_level = plugin_class.auth_level
if self.siteshelper.auth_level < plugin.auth_level:
continue
if not self.__set_and_check_auth_level(plugin=plugin, source=plugin_class):
continue
# 名称
if hasattr(plugin_class, "plugin_name"):
plugin.plugin_name = plugin_class.plugin_name
@@ -748,10 +752,70 @@ class PluginManager(metaclass=Singleton):
@staticmethod
def is_plugin_exists(pid: str) -> bool:
"""
判断插件是否在本地文件系统存在
判断插件是否在本地包中存在
:param pid: 插件ID
"""
if not pid:
return False
plugin_dir = settings.ROOT_PATH / "app" / "plugins" / pid.lower()
return plugin_dir.exists()
try:
# 构建包名
package_name = f"app.plugins.{pid.lower()}"
# 检查包是否存在
package_exists = importlib.util.find_spec(package_name) is not None
logger.debug(f"{pid} exists: {package_exists}")
return package_exists
except Exception as e:
logger.debug(f"获取插件是否在本地包中存在失败,{e}")
return False
def __set_and_check_auth_level(self, plugin: Union[schemas.Plugin, Type[Any]],
source: Optional[Union[dict, Type[Any]]] = None) -> bool:
"""
设置并检查插件的认证级别
:param plugin: 插件对象或包含 auth_level 属性的对象
:param source: 可选的字典对象或类对象,可能包含 "level""auth_level"
:return: 如果插件的认证级别有效且当前环境的认证级别满足要求,返回 True否则返回 False
"""
# 检查并赋值 source 中的 level 或 auth_level
if source:
if isinstance(source, dict) and "level" in source:
plugin.auth_level = source.get("level")
elif hasattr(source, "auth_level"):
plugin.auth_level = source.auth_level
# 如果 source 为空且 plugin 本身没有 auth_level直接返回 True
elif not hasattr(plugin, "auth_level"):
return True
# auth_level 级别说明
# 1 - 所有用户可见
# 2 - 站点认证用户可见
# 3 - 站点&密钥认证可见
# 99 - 站点&特殊密钥认证可见
# 如果当前站点认证级别大于 1 且插件级别为 99并存在插件公钥说明为特殊密钥认证通过密钥匹配进行认证
if self.siteshelper.auth_level > 1 and plugin.auth_level == 99 and hasattr(plugin, "plugin_public_key"):
plugin_id = plugin.id if isinstance(plugin, schemas.Plugin) else plugin.__name__
public_key = plugin.plugin_public_key
if public_key:
private_key = PluginManager.__get_plugin_private_key(plugin_id)
verify = RSAUtils.verify_rsa_keys(public_key=public_key, private_key=private_key)
return verify
# 如果当前站点认证级别小于插件级别,则返回 False
if self.siteshelper.auth_level < plugin.auth_level:
return False
return True
@staticmethod
def __get_plugin_private_key(plugin_id: str) -> Optional[str]:
"""
根据插件标识获取对应的私钥
:param plugin_id: 插件标识
:return: 对应的插件私钥,如果未找到则返回 None
"""
try:
# 将插件标识转换为大写并构建环境变量名称
env_var_name = f"PLUGIN_{plugin_id.upper()}_PRIVATE_KEY"
private_key = os.environ.get(env_var_name)
return private_key
except Exception as e:
logger.debug(f"获取插件 {plugin_id} 的私钥时发生错误:{e}")
return None

View File

@@ -57,6 +57,7 @@ class TransferHistory(Base):
).offset((page - 1) * count).limit(count).all()
else:
result = db.query(TransferHistory).filter(or_(
TransferHistory.title.like(f'%{title}%'),
TransferHistory.src.like(f'%{title}%'),
TransferHistory.dest.like(f'%{title}%'),
)).order_by(
@@ -128,6 +129,7 @@ class TransferHistory(Base):
return db.query(func.count(TransferHistory.id)).filter(TransferHistory.status == status).first()[0]
else:
return db.query(func.count(TransferHistory.id)).filter(or_(
TransferHistory.title.like(f'%{title}%'),
TransferHistory.src.like(f'%{title}%'),
TransferHistory.dest.like(f'%{title}%')
)).first()[0]

View File

@@ -15,16 +15,6 @@ from typing import Dict, Optional
from app.core.config import settings
from app.log import logger
# 定义一个全局集合来存储注册的主机
_registered_hosts = {
'api.themoviedb.org',
'api.tmdb.org',
'webservice.fanart.tv',
'api.github.com',
'github.com',
'raw.githubusercontent.com',
'api.telegram.org'
}
# 定义一个全局线程池执行器
_executor = concurrent.futures.ThreadPoolExecutor()
@@ -32,21 +22,13 @@ _executor = concurrent.futures.ThreadPoolExecutor()
# 定义默认的DoH配置
_doh_timeout = 5
_doh_cache: Dict[str, str] = {}
_doh_resolvers = [
# https://developers.cloudflare.com/1.1.1.1/encryption/dns-over-https
"1.0.0.1",
"1.1.1.1",
# https://support.quad9.net/hc/en-us
"9.9.9.9",
"149.112.112.112"
]
def _patched_getaddrinfo(host, *args, **kwargs):
"""
socket.getaddrinfo的补丁版本。
"""
if host not in _registered_hosts:
if host not in settings.DOH_DOMAINS.split(","):
return _orig_getaddrinfo(host, *args, **kwargs)
# 检查主机是否已解析
@@ -57,7 +39,7 @@ def _patched_getaddrinfo(host, *args, **kwargs):
# 使用DoH解析主机
futures = []
for resolver in _doh_resolvers:
for resolver in settings.DOH_RESOLVERS.split(","):
futures.append(_executor.submit(_doh_query, resolver, host))
for future in concurrent.futures.as_completed(futures):

View File

@@ -51,7 +51,8 @@ class PluginHelper(metaclass=Singleton):
if not user or not repo:
return {}
raw_url = self._base_url % (user, repo)
res = RequestUtils(proxies=self.proxies, headers=settings.GITHUB_HEADERS,
res = RequestUtils(proxies=self.proxies,
headers=settings.REPO_GITHUB_HEADERS(repo=f"{user}/{repo}"),
timeout=10).get_res(f"{raw_url}package.json")
if res:
try:
@@ -137,12 +138,16 @@ class PluginHelper(metaclass=Singleton):
if not user or not repo:
return False, "不支持的插件仓库地址格式"
user_repo = f"{user}/{repo}"
def __get_filelist(_p: str) -> Tuple[Optional[list], Optional[str]]:
"""
获取插件的文件列表
"""
file_api = f"https://api.github.com/repos/{user}/{repo}/contents/plugins/{_p}"
r = RequestUtils(proxies=settings.PROXY, headers=settings.GITHUB_HEADERS, timeout=30).get_res(file_api)
file_api = f"https://api.github.com/repos/{user_repo}/contents/plugins/{_p}"
r = RequestUtils(proxies=settings.PROXY,
headers=settings.REPO_GITHUB_HEADERS(repo=user_repo),
timeout=30).get_res(file_api)
if r is None:
return None, "连接仓库失败"
elif r.status_code != 200:
@@ -164,7 +169,8 @@ class PluginHelper(metaclass=Singleton):
download_url = f"{settings.GITHUB_PROXY}{item.get('download_url')}"
# 下载插件文件
res = RequestUtils(proxies=self.proxies,
headers=settings.GITHUB_HEADERS, timeout=60).get_res(download_url)
headers=settings.REPO_GITHUB_HEADERS(repo=user_repo),
timeout=60).get_res(download_url)
if not res:
return False, f"文件 {item.get('name')} 下载失败!"
elif res.status_code != 200:

View File

@@ -20,12 +20,20 @@ if SystemUtils.is_frozen():
from app.core.config import settings, global_vars
from app.core.module import ModuleManager
# SitesHelper涉及资源包拉取提前引入并容错提示
try:
from app.helper.sites import SitesHelper
except ImportError as e:
error_message = f"错误: {str(e)}\n站点认证及索引相关资源导入失败,请尝试重建容器或手动拉取资源"
print(error_message, file=sys.stderr)
sys.exit(1)
from app.core.plugin import PluginManager
from app.db.init import init_db, update_db, init_super_user
from app.helper.thread import ThreadHelper
from app.helper.display import DisplayHelper
from app.helper.resource import ResourceHelper
from app.helper.sites import SitesHelper
from app.helper.message import MessageHelper
from app.scheduler import Scheduler
from app.command import Command, CommandChian

View File

@@ -15,6 +15,7 @@ from app.modules.douban.apiv2 import DoubanApi
from app.modules.douban.douban_cache import DoubanCache
from app.modules.douban.scraper import DoubanScraper
from app.schemas import MediaPerson
from app.schemas.exception import APIRateLimitException
from app.schemas.types import MediaType
from app.utils.common import retry
from app.utils.http import RequestUtils
@@ -147,11 +148,12 @@ class DoubanModule(_ModuleBase):
return None
def douban_info(self, doubanid: str, mtype: MediaType = None) -> Optional[dict]:
def douban_info(self, doubanid: str, mtype: MediaType = None, raise_exception: bool = True) -> Optional[dict]:
"""
获取豆瓣信息
:param doubanid: 豆瓣ID
:param mtype: 媒体类型
:param raise_exception: 触发速率限制时是否抛出异常
:return: 豆瓣信息
"""
"""
@@ -427,7 +429,10 @@ class DoubanModule(_ModuleBase):
info = self.doubanapi.tv_detail(doubanid)
if info:
if "subject_ip_rate_limit" in info.get("msg", ""):
logger.warn(f"触发豆瓣IP速率限制错误信息{info} ...")
msg = f"触发豆瓣IP速率限制错误信息{info} ..."
logger.warn(msg)
if raise_exception:
raise APIRateLimitException(msg)
return None
celebrities = self.doubanapi.tv_celebrities(doubanid)
if celebrities:
@@ -442,7 +447,10 @@ class DoubanModule(_ModuleBase):
info = self.doubanapi.movie_detail(doubanid)
if info:
if "subject_ip_rate_limit" in info.get("msg", ""):
logger.warn(f"触发豆瓣IP速率限制错误信息{info} ...")
msg = f"触发豆瓣IP速率限制错误信息{info} ..."
logger.warn(msg)
if raise_exception:
raise APIRateLimitException(msg)
return None
celebrities = self.doubanapi.movie_celebrities(doubanid)
if celebrities:
@@ -601,7 +609,8 @@ class DoubanModule(_ModuleBase):
@retry(Exception, 5, 3, 3, logger=logger)
def match_doubaninfo(self, name: str, imdbid: str = None,
mtype: MediaType = None, year: str = None, season: int = None) -> dict:
mtype: MediaType = None, year: str = None, season: int = None,
raise_exception: bool = False) -> dict:
"""
搜索和匹配豆瓣信息
:param name: 名称
@@ -609,6 +618,7 @@ class DoubanModule(_ModuleBase):
:param mtype: 类型
:param year: 年份
:param season: 季号
:param raise_exception: 触发速率限制时是否抛出异常
"""
if imdbid:
# 优先使用IMDBID查询
@@ -624,13 +634,19 @@ class DoubanModule(_ModuleBase):
# 搜索
logger.info(f"开始使用名称 {name} 匹配豆瓣信息 ...")
result = self.doubanapi.search(f"{name} {year or ''}".strip())
if not result or not result.get("items"):
if not result:
logger.warn(f"未找到 {name} 的豆瓣信息")
return {}
# 触发rate limit
if "search_access_rate_limit" in result.values():
logger.warn(f"触发豆瓣API速率限制 错误信息 {result} ...")
raise Exception("触发豆瓣API速率限制")
msg = f"触发豆瓣API速率限制错误信息{result} ..."
logger.warn(msg)
if raise_exception:
raise APIRateLimitException(msg)
return {}
if not result.get("items"):
logger.warn(f"未找到 {name} 的豆瓣信息")
return {}
for item_obj in result.get("items"):
type_name = item_obj.get("type_name")
if type_name not in [MediaType.TV.value, MediaType.MOVIE.value]:

View File

@@ -219,12 +219,13 @@ class FileTransferModule(_ModuleBase):
"""
# 字幕正则式
_zhcn_sub_re = r"([.\[(](((zh[-_])?(cn|ch[si]|sg|sc))|zho?" \
r"|chinese|(cn|ch[si]|sg|zho?|eng)[-_&](cn|ch[si]|sg|zho?|eng)" \
r"|chinese|(cn|ch[si]|sg|zho?|eng)[-_&]?(cn|ch[si]|sg|zho?|eng)" \
r"|简[体中]?)[.\])])" \
r"|([\u4e00-\u9fa5]{0,3}[中双][\u4e00-\u9fa5]{0,2}[字文语][\u4e00-\u9fa5]{0,3})" \
r"|简体|简中|JPSC" \
r"|(?<![a-z0-9])gb(?![a-z0-9])"
_zhtw_sub_re = r"([.\[(](((zh[-_])?(hk|tw|cht|tc))" \
r"|(cht|eng)[-_&]?(cht|eng)" \
r"|繁[体中]?)[.\])])" \
r"|繁体中[文字]|中[文字]繁体|繁体|JPTC" \
r"|(?<![a-z0-9])big5(?![a-z0-9])"
@@ -690,6 +691,10 @@ class FileTransferModule(_ModuleBase):
"doubanid": mediainfo.douban_id,
# 季号
"season": meta.season_seq,
# 季年份根据season值获取
"season_year": mediainfo.season_years.get(
int(meta.season_seq),
None) if (mediainfo.season_years and meta.season_seq) else None,
# 集号
"episode": meta.episode_seqs,
# 季集 SxxExx

View File

@@ -193,7 +193,6 @@ class MTorrentSpider:
'id': torrent_id
},
'header': {
'Content-Type': 'application/json',
'User-Agent': f'{self._ua}',
'Accept': 'application/json, text/plain, */*',
'x-api-key': self._apikey

View File

@@ -265,25 +265,59 @@ class Plex:
season_episodes[episode.seasonNumber].append(episode.index)
return videos.key, season_episodes
def get_remote_image_by_id(self, item_id: str, image_type: str) -> Optional[str]:
def get_remote_image_by_id(self, item_id: str, image_type: str, depth: int = 0) -> Optional[str]:
"""
根据ItemId从Plex查询图片地址
:param item_id: 在Emby中的ID
:param item_id: 在Plex中的ID
:param image_type: 图片的类型Poster或者Backdrop等
:param depth: 当前递归深度默认为0
:return: 图片对应在TMDB中的URL
"""
if not self._plex:
if not self._plex or depth > 2 or not item_id:
return None
try:
if image_type == "Poster":
images = self._plex.fetchItems('/library/metadata/%s/posters' % item_id,
cls=media.Poster)
image_url = None
ekey = f"/library/metadata/{item_id}"
item = self._plex.fetchItem(ekey=ekey)
if not item:
return None
# 如果配置了外网播放地址以及Token则默认从Plex媒体服务器获取图片否则返回有外网地址的图片资源
if settings.PLEX_PLAY_HOST and settings.PLEX_TOKEN:
query = {"X-Plex-Token": settings.PLEX_TOKEN}
if image_type == "Poster":
if item.thumb:
image_url = RequestUtils.combine_url(host=settings.PLEX_PLAY_HOST, path=item.thumb, query=query)
else:
# 默认使用art也就是Backdrop进行处理
if item.art:
image_url = RequestUtils.combine_url(host=settings.PLEX_PLAY_HOST, path=item.art, query=query)
# 这里对episode进行特殊处理实际上episode的Backdrop是Poster
# 也有个别情况比如机智的凡人小子episode就是Poster因此这里把episode的优先级降低默认还是取art
if not image_url and item.TYPE == "episode" and item.thumb:
image_url = RequestUtils.combine_url(host=settings.PLEX_PLAY_HOST, path=item.thumb, query=query)
else:
images = self._plex.fetchItems('/library/metadata/%s/arts' % item_id,
cls=media.Art)
for image in images:
if hasattr(image, 'key') and image.key.startswith('http'):
return image.key
if image_type == "Poster":
images = self._plex.fetchItems(ekey=f"{ekey}/posters",
cls=media.Poster)
else:
# 默认使用art也就是Backdrop进行处理
images = self._plex.fetchItems(ekey=f"{ekey}/arts",
cls=media.Art)
# 这里对episode进行特殊处理实际上episode的Backdrop是Poster
# 也有个别情况比如机智的凡人小子episode就是Poster因此这里把episode的优先级降低默认还是取art
if not images and item.TYPE == "episode":
images = self._plex.fetchItems(ekey=f"{ekey}/posters",
cls=media.Poster)
for image in images:
if hasattr(image, "key") and image.key.startswith("http"):
image_url = image.key
break
# 如果最后还是找不到,则递归父级进行查找
if not image_url and hasattr(item, "parentRatingKey"):
return self.get_remote_image_by_id(item_id=item.parentRatingKey,
image_type=image_type,
depth=depth + 1)
return image_url
except Exception as e:
logger.error(f"获取封面出错:" + str(e))
return None

View File

@@ -1,5 +1,6 @@
import re
import threading
import uuid
from pathlib import Path
from threading import Event
from typing import Optional, List, Dict
@@ -87,6 +88,8 @@ class Telegram:
try:
if text:
# 对text进行Markdown特殊字符转义
text = re.sub(r"([_`])", r"\\\1", text)
caption = f"*{title}*\n{text}"
else:
caption = f"*{title}*"
@@ -199,13 +202,15 @@ class Telegram:
"""
if image:
req = RequestUtils(proxies=settings.PROXY).get_res(image)
if req is None:
res = RequestUtils(proxies=settings.PROXY).get_res(image)
if res is None:
raise Exception("获取图片失败")
if req.content:
image_file = Path(settings.TEMP_PATH) / Path(image).name
image_file.write_bytes(req.content)
if res.content:
# 使用随机标识构建图片文件的完整路径,并写入图片内容到文件
image_file = Path(settings.TEMP_PATH) / str(uuid.uuid4())
image_file.write_bytes(res.content)
photo = InputFile(image_file)
# 发送图片到Telegram
ret = self._bot.send_photo(chat_id=userid or self._telegram_chat_id,
photo=photo,
caption=caption,

View File

@@ -414,9 +414,9 @@ class TheMovieDbModule(_ModuleBase):
:param season: 季
"""
season_info = self.tmdb.get_tv_season_detail(tmdbid=tmdbid, season=season)
if not season_info:
if not season_info or not season_info.get("episodes"):
return []
return [schemas.TmdbEpisode(**episode) for episode in season_info.get("episodes", [])]
return [schemas.TmdbEpisode(**episode) for episode in season_info.get("episodes")]
def scheduler_job(self) -> None:
"""

View File

@@ -100,28 +100,57 @@ class WeChat:
"""
message_url = self._send_msg_url % self.__get_access_token()
if text:
conent = "%s\n%s" % (title, text.replace("\n\n", "\n"))
content = "%s\n%s" % (title, text.replace("\n\n", "\n"))
else:
conent = title
content = title
if link:
conent = f"{conent}\n点击查看:{link}"
content = f"{content}\n点击查看:{link}"
if not userid:
userid = "@all"
req_json = {
"touser": userid,
"msgtype": "text",
"agentid": self._appid,
"text": {
"content": conent
},
"safe": 0,
"enable_id_trans": 0,
"enable_duplicate_check": 0
}
return self.__post_request(message_url, req_json)
# Check if content exceeds 2048 bytes and split if necessary
if len(content.encode('utf-8')) > 2048:
content_chunks = []
current_chunk = ""
for line in content.splitlines():
if len(current_chunk.encode('utf-8')) + len(line.encode('utf-8')) > 2048:
content_chunks.append(current_chunk.strip())
current_chunk = ""
current_chunk += line + "\n"
if current_chunk:
content_chunks.append(current_chunk.strip())
# Send each chunk as a separate message
for chunk in content_chunks:
req_json = {
"touser": userid,
"msgtype": "text",
"agentid": self._appid,
"text": {
"content": chunk
},
"safe": 0,
"enable_id_trans": 0,
"enable_duplicate_check": 0
}
result = self.__post_request(message_url, req_json)
else:
req_json = {
"touser": userid,
"msgtype": "text",
"agentid": self._appid,
"text": {
"content": content
},
"safe": 0,
"enable_id_trans": 0,
"enable_duplicate_check": 0
}
return self.__post_request(message_url, req_json)
return result
def __send_image_message(self, title: str, text: str, image_url: str,
userid: str = None, link: str = None) -> Optional[bool]:

View File

@@ -15,3 +15,4 @@ from .tmdb import *
from .transfer import *
from .file import *
from .filetransfer import *
from .exception import *

14
app/schemas/exception.py Normal file
View File

@@ -0,0 +1,14 @@
class ImmediateException(Exception):
"""
用于立即抛出异常而不重试的特殊异常类。
当不希望使用重试机制时,可以抛出此异常。
"""
pass
class APIRateLimitException(ImmediateException):
"""
用于表示API速率限制的异常类。
当API调用触发速率限制时可以抛出此异常以立即终止操作并报告错误。
"""
pass

View File

@@ -46,6 +46,8 @@ class Plugin(BaseModel):
history: Optional[dict] = {}
# 添加时间,值越小表示越靠后发布
add_time: Optional[int] = 0
# 插件公钥
plugin_public_key: Optional[str] = None
class PluginDashboard(Plugin):

View File

@@ -6,6 +6,8 @@ from typing import Any
from Crypto import Random
from Crypto.Cipher import AES
from app.schemas.exception import ImmediateException
def retry(ExceptionToCheck: Any,
tries: int = 3, delay: int = 3, backoff: int = 2, logger: Any = None):
@@ -23,6 +25,8 @@ def retry(ExceptionToCheck: Any,
while mtries > 1:
try:
return f(*args, **kwargs)
except ImmediateException:
raise
except ExceptionToCheck as e:
msg = f"{str(e)}, {mdelay} 秒后重试 ..."
if logger:

91
app/utils/crypto.py Normal file
View File

@@ -0,0 +1,91 @@
import base64
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization, hashes
from cryptography.hazmat.primitives.asymmetric import rsa, padding
class RSAUtils:
@staticmethod
def generate_rsa_key_pair() -> (str, str):
"""
生成RSA密钥对并返回Base64编码的公钥和私钥DER格式
:return: Tuple containing Base64 encoded public key and private key
"""
# 生成RSA密钥对
private_key = rsa.generate_private_key(
public_exponent=65537,
key_size=2048,
)
public_key = private_key.public_key()
# 导出私钥为DER格式
private_key_der = private_key.private_bytes(
encoding=serialization.Encoding.DER,
format=serialization.PrivateFormat.PKCS8,
encryption_algorithm=serialization.NoEncryption()
)
# 导出公钥为DER格式
public_key_der = public_key.public_bytes(
encoding=serialization.Encoding.DER,
format=serialization.PublicFormat.SubjectPublicKeyInfo
)
# 将DER格式的密钥编码为Base64
private_key_b64 = base64.b64encode(private_key_der).decode('utf-8')
public_key_b64 = base64.b64encode(public_key_der).decode('utf-8')
return private_key_b64, public_key_b64
@staticmethod
def verify_rsa_keys(private_key: str, public_key: str) -> bool:
"""
使用 RSA 验证公钥和私钥是否匹配
:param private_key: 私钥字符串 (Base64 编码,无标识符)
:param public_key: 公钥字符串 (Base64 编码,无标识符)
:return: 如果匹配则返回 True否则返回 False
"""
if not private_key or not public_key:
return False
try:
# 解码 Base64 编码的公钥和私钥
public_key_bytes = base64.b64decode(public_key)
private_key_bytes = base64.b64decode(private_key)
# 加载公钥
public_key = serialization.load_der_public_key(public_key_bytes, backend=default_backend())
# 加载私钥
private_key = serialization.load_der_private_key(private_key_bytes, password=None,
backend=default_backend())
# 测试加解密
message = b'test'
encrypted_message = public_key.encrypt(
message,
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA256()),
algorithm=hashes.SHA256(),
label=None
)
)
decrypted_message = private_key.decrypt(
encrypted_message,
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA256()),
algorithm=hashes.SHA256(),
label=None
)
)
return message == decrypted_message
except Exception as e:
print(f"RSA 密钥验证失败: {e}")
return False

View File

@@ -1,5 +1,5 @@
from typing import Union, Any, Optional
from urllib.parse import urljoin
from urllib.parse import urljoin, urlparse, parse_qs, urlencode, urlunparse
import requests
import urllib3
@@ -255,3 +255,37 @@ class RequestUtils:
return endpoint
host = RequestUtils.standardize_base_url(host)
return urljoin(host, endpoint) if host else endpoint
@staticmethod
def combine_url(host: str, path: Optional[str] = None, query: Optional[dict] = None) -> Optional[str]:
"""
使用给定的主机头、路径和查询参数组合生成完整的URL。
:param host: str, 主机头,例如 https://example.com
:param path: Optional[str], 包含路径和可能已经包含的查询参数的端点,例如 /path/to/resource?current=1
:param query: Optional[dict], 可选,额外的查询参数,例如 {"key": "value"}
:return: str, 完整的请求URL字符串
"""
try:
# 如果路径为空,则默认为 '/'
if path is None:
path = '/'
host = RequestUtils.standardize_base_url(host)
# 使用 urljoin 合并 host 和 path
url = urljoin(host, path)
# 解析当前 URL 的组成部分
url_parts = urlparse(url)
# 解析已存在的查询参数,并与额外的查询参数合并
query_params = parse_qs(url_parts.query)
if query:
for key, value in query.items():
query_params[key] = value
# 重新构建查询字符串
query_string = urlencode(query_params, doseq=True)
# 构建完整的 URL
new_url_parts = url_parts._replace(query=query_string)
complete_url = urlunparse(new_url_parts)
return str(complete_url)
except Exception as e:
logger.debug(f"Error combining URL: {e}")
return None

View File

@@ -186,7 +186,7 @@ class StringUtils:
忽略特殊字符
"""
# 需要忽略的特殊字符
CONVERT_EMPTY_CHARS = r"[、.。,,·:;!'\"“”()\[\]【】「」\-—\+\|\\_/&#~]"
CONVERT_EMPTY_CHARS = r"[、.。,,·:;!'\"“”()\[\]【】「」\-—\+\|\\_/&#~]"
if not text:
return text
if not isinstance(text, list):

View File

@@ -13,10 +13,14 @@ SUPERUSER=admin
BIG_MEMORY_MODE=false
# 是否启用DOH域名解析启用后对于api.themovie.org等域名通过DOH解析避免域名DNS被污染
DOH_ENABLE=true
# 使用 DOH 解析的域名列表,多个域名使用`,`分隔
DOH_DOMAINS=api.themoviedb.org,api.tmdb.org,webservice.fanart.tv,api.github.com,github.com,raw.githubusercontent.com,api.telegram.org
# DOH 解析服务器列表,多个服务器使用`,`分隔
DOH_RESOLVERS=1.0.0.1,1.1.1.1,9.9.9.9,149.112.112.112
# 元数据识别缓存过期时间数字型单位小时0为系统默认大内存模式为7天滞则为3天调大该值可减少themoviedb的访问次数
META_CACHE_EXPIRE=0
# 自动检查和更新站点资源包(索引、认证等)
AUTO_UPDATE_RESOURCE=true
AUTO_UPDATE_RESOURCE=false
# 【*】API密钥建议更换复杂字符串有Jellyseerr/Overseerr、媒体服务器Webhook等配置以及部分支持API_TOKEN的API中使用
API_TOKEN=moviepilot
# 登录页面电影海报tmdb/bingtmdb要求能正常连接api.themoviedb.org

View File

@@ -0,0 +1,30 @@
"""1.0.11
Revision ID: 06abf3e7090b
Revises: d633ca6cd572
Create Date: 2023-10-27 12:22:56.213376
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '06abf3e7090b'
down_revision = 'd633ca6cd572'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
with op.batch_alter_table("downloadhistory") as batch_op:
batch_op.add_column(sa.Column('username', sa.String, nullable=True))
except Exception as e:
pass
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@@ -0,0 +1,30 @@
"""1.0.13
Revision ID: 127a25fdf0e8
Revises: d71e624f0208
Create Date: 2024-02-24 03:11:32.005540
"""
import contextlib
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '127a25fdf0e8'
down_revision = 'd71e624f0208'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with contextlib.suppress(Exception):
with op.batch_alter_table("subscribe") as batch_op:
batch_op.add_column(sa.Column('search_imdbid', sa.Integer, nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@@ -0,0 +1,31 @@
"""1.0.1
Revision ID: 14f1813ae8e3
Revises: 9f4edd55c2d4
Create Date: 2023-07-27 12:34:57.839443
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '14f1813ae8e3'
down_revision = '9f4edd55c2d4'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
with op.batch_alter_table("subscribe") as batch_op:
batch_op.add_column(sa.Column('best_version', sa.Integer, nullable=True))
batch_op.add_column(sa.Column('current_priority', sa.Integer, nullable=True))
except Exception as e:
pass
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@@ -0,0 +1,30 @@
"""1.0.4
Revision ID: 1e169250e949
Revises: 52ab4930be04
Create Date: 2023-09-01 09:56:33.907661
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '1e169250e949'
down_revision = '52ab4930be04'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
with op.batch_alter_table("subscribe") as batch_op:
batch_op.add_column(sa.Column('date', sa.String, nullable=True))
except Exception as e:
pass
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@@ -0,0 +1,38 @@
"""1.0.6
Revision ID: 232dfa044617
Revises: e734c7fe6056
Create Date: 2023-09-19 21:34:41.994617
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = '232dfa044617'
down_revision = 'e734c7fe6056'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
# 搜索优先级
op.execute("delete from systemconfig where key = 'SearchFilterRules';")
op.execute(
"insert into systemconfig(key, value) VALUES('SearchFilterRules', (select value from systemconfig where key= 'FilterRules'));")
# 订阅优先级
op.execute("delete from systemconfig where key = 'SubscribeFilterRules';")
op.execute(
"insert into systemconfig(key, value) VALUES('SubscribeFilterRules', (select value from systemconfig where key= 'FilterRules'));")
# 洗版优先级
op.execute("delete from systemconfig where key = 'BestVersionFilterRules';")
op.execute(
"insert into systemconfig(key, value) VALUES('BestVersionFilterRules', (select value from systemconfig where key= 'FilterRules2'));")
# 删除旧的优先级规则
op.execute("delete from systemconfig where key = 'FilterRules';")
op.execute("delete from systemconfig where key = 'FilterRules2';")
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@@ -0,0 +1,27 @@
"""1.0.7
Revision ID: 30329639c12b
Revises: 232dfa044617
Create Date: 2023-09-23 08:25:59.776488
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = '30329639c12b'
down_revision = '232dfa044617'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.execute("delete from systemconfig where key = 'DefaultFilterRules';")
op.execute(
"insert into systemconfig(key, value) VALUES('DefaultFilterRules', (select value from systemconfig where key= 'DefaultIncludeExcludeFilter'));")
op.execute("delete from systemconfig where key = 'DefaultIncludeExcludeFilter';")
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@@ -0,0 +1,28 @@
"""1_0_3
Revision ID: 52ab4930be04
Revises: ec5fb51fc300
Create Date: 2023-08-28 13:21:45.152012
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = '52ab4930be04'
down_revision = 'ec5fb51fc300'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.execute("delete from systemconfig where key = 'RssSites';")
op.execute("insert into systemconfig(key, value) VALUES('RssSites', (select value from systemconfig where key= 'IndexerSites'));")
op.execute("delete from systemconfig where key = 'SearchResults';")
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###

View File

@@ -0,0 +1,34 @@
"""1.0.15
Revision ID: 5813aaa7cb3a
Revises: f94cd1217fd7
Create Date: 2024-03-17 09:04:51.785716
"""
import contextlib
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '5813aaa7cb3a'
down_revision = 'f94cd1217fd7'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with contextlib.suppress(Exception):
with op.batch_alter_table("message") as batch_op:
batch_op.add_column(sa.Column('note', sa.String, nullable=True))
try:
op.create_index('ix_message_reg_time', 'message', ['reg_time'], unique=False)
except Exception as err:
pass
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@@ -0,0 +1,31 @@
"""1.0.18
Revision ID: 735c01e0453d
Revises: 9cb3993e340e
Create Date: 2024-04-29 19:40:38.375072
"""
import contextlib
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '735c01e0453d'
down_revision = '9cb3993e340e'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with contextlib.suppress(Exception):
with op.batch_alter_table("site") as batch_op:
batch_op.add_column(sa.Column('apikey', sa.VARCHAR))
batch_op.add_column(sa.Column('token', sa.VARCHAR))
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@@ -0,0 +1,30 @@
"""1_0_17
Revision ID: 9cb3993e340e
Revises: d146dea51516
Create Date: 2024-03-28 14:36:35.588392
"""
import contextlib
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '9cb3993e340e'
down_revision = 'd146dea51516'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with contextlib.suppress(Exception):
with op.batch_alter_table("user") as batch_op:
batch_op.add_column(sa.Column('is_otp', sa.BOOLEAN, server_default='0'))
batch_op.add_column(sa.Column('otp_secret', sa.VARCHAR))
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@@ -0,0 +1,32 @@
"""1.0.0
Revision ID: 9f4edd55c2d4
Revises:
Create Date: 2023-07-13 12:27:26.402317
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '9f4edd55c2d4'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
with op.batch_alter_table("subscribe") as batch_op:
batch_op.add_column(sa.Column('sites', sa.Text, nullable=True))
except Exception as e:
pass
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###

View File

@@ -0,0 +1,118 @@
"""1.0.20
Revision ID: a40261701909
Revises: ae9d8ed8df97
Create Date: 2024-05-22 19:16:21.374806
"""
import json
from pathlib import Path
from alembic import op
from app.core.config import Settings
# revision identifiers, used by Alembic.
revision = 'a40261701909'
down_revision = 'ae9d8ed8df97'
branch_labels = None
depends_on = None
def upgrade() -> None:
"""
升级目录配置
"""
# 实例化配置
_settings = Settings(
_env_file=Settings().CONFIG_PATH / "app.env",
_env_file_encoding="utf-8"
)
# 下载目录配置升级
download_dirs = []
if _settings.DOWNLOAD_MOVIE_PATH:
download_dirs.append({
"type": "download",
"name": "电影目录",
"path": _settings.DOWNLOAD_MOVIE_PATH,
"media_type": "电影",
"category": "",
"auto_category": True if _settings.DOWNLOAD_CATEGORY else False,
"priority": 1
})
if _settings.DOWNLOAD_TV_PATH:
download_dirs.append({
"type": "download",
"name": "电视剧目录",
"path": _settings.DOWNLOAD_TV_PATH,
"media_type": "电视剧",
"category": "",
"auto_category": True if _settings.DOWNLOAD_CATEGORY else False,
"priority": 2
})
if _settings.DOWNLOAD_PATH:
download_dirs.append({
"type": "download",
"name": "下载目录",
"path": _settings.DOWNLOAD_PATH,
"media_type": "",
"category": "",
"auto_category": True if _settings.DOWNLOAD_CATEGORY else False,
"priority": 4
})
# 插入数据库,报错的话则更新
if download_dirs:
download_dirs_value = json.dumps(download_dirs)
try:
op.execute(f"INSERT INTO systemconfig (key, value) VALUES ('DownloadDirectories', '{download_dirs_value}');")
except Exception as e:
op.execute(f"UPDATE systemconfig SET value = '{download_dirs_value}' WHERE key = 'DownloadDirectories';")
# 媒体库目录配置升级
library_dirs = []
if _settings.LIBRARY_PATH:
for library_path in _settings.LIBRARY_PATH.split(","):
if _settings.LIBRARY_MOVIE_NAME:
library_dirs.append({
"type": "library",
"name": "电影目录",
"path": str(Path(library_path) / _settings.LIBRARY_MOVIE_NAME),
"media_type": "电影",
"category": "",
"auto_category": True if _settings.LIBRARY_CATEGORY else False,
"scrape": True if _settings.SCRAP_METADATA else False,
"priority": 1
})
if _settings.LIBRARY_TV_NAME:
library_dirs.append({
"type": "library",
"name": "电视剧目录",
"path": str(Path(library_path) / _settings.LIBRARY_TV_NAME),
"media_type": "电视剧",
"category": "",
"auto_category": True if _settings.LIBRARY_CATEGORY else False,
"scrape": True if _settings.SCRAP_METADATA else False,
"priority": 2
})
library_dirs.append({
"type": "library",
"name": "媒体库目录",
"path": library_path,
"media_type": "",
"category": "",
"auto_category": True if _settings.LIBRARY_CATEGORY else False,
"scrape": True if _settings.SCRAP_METADATA else False,
"priority": 4
})
# 插入数据库,报错的话则更新
if library_dirs:
library_dirs_value = json.dumps(library_dirs)
try:
op.execute(f"INSERT INTO systemconfig (key, value) VALUES ('LibraryDirectories', '{library_dirs_value}');")
except Exception as e:
op.execute(f"UPDATE systemconfig SET value = '{library_dirs_value}' WHERE key = 'LibraryDirectories';")
def downgrade() -> None:
pass

View File

@@ -0,0 +1,30 @@
"""1_0_9
Revision ID: a521fbc28b18
Revises: b2f011d3a8b7
Create Date: 2023-09-28 13:37:16.479360
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'a521fbc28b18'
down_revision = 'b2f011d3a8b7'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
with op.batch_alter_table("downloadhistory") as batch_op:
batch_op.add_column(sa.Column('date', sa.String, nullable=True))
batch_op.add_column(sa.Column('channel', sa.String, nullable=True))
except Exception as e:
pass
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@@ -0,0 +1,30 @@
"""1.0.19
Revision ID: ae9d8ed8df97
Revises: 735c01e0453d
Create Date: 2024-05-16 14:21:46.108359
"""
import contextlib
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'ae9d8ed8df97'
down_revision = '735c01e0453d'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with contextlib.suppress(Exception):
with op.batch_alter_table("site") as batch_op:
batch_op.add_column(sa.Column('timeout', sa.INTEGER))
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@@ -0,0 +1,29 @@
"""1_0_8
Revision ID: b2f011d3a8b7
Revises: 30329639c12b
Create Date: 2023-09-28 10:15:58.410003
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'b2f011d3a8b7'
down_revision = '30329639c12b'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
with op.batch_alter_table("downloadhistory") as batch_op:
batch_op.add_column(sa.Column('userid', sa.String, nullable=True))
except Exception as e:
pass
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@@ -0,0 +1,34 @@
"""1.0.16
Revision ID: d146dea51516
Revises: 5813aaa7cb3a
Create Date: 2024-03-18 18:13:38.099531
"""
import contextlib
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'd146dea51516'
down_revision = '5813aaa7cb3a'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with contextlib.suppress(Exception):
with op.batch_alter_table("subscribe") as batch_op:
batch_op.add_column(sa.Column('bangumiid', sa.Integer, nullable=True))
try:
op.create_index('ix_subscribe_bangumiid', 'subscribe', ['bangumiid'], unique=False)
except Exception as err:
pass
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@@ -0,0 +1,32 @@
"""1.0.10
Revision ID: d633ca6cd572
Revises: a521fbc28b18
Create Date: 2023-10-12 08:54:49.728638
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'd633ca6cd572'
down_revision = 'a521fbc28b18'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
with op.batch_alter_table("subscribe") as batch_op:
batch_op.add_column(sa.Column('quality', sa.String, nullable=True))
batch_op.add_column(sa.Column('resolution', sa.String, nullable=True))
batch_op.add_column(sa.Column('effect', sa.String, nullable=True))
except Exception as e:
pass
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@@ -0,0 +1,30 @@
"""1_0_12
Revision ID: d71e624f0208
Revises: 06abf3e7090b
Create Date: 2023-12-12 13:26:34.039497
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'd71e624f0208'
down_revision = '06abf3e7090b'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
with op.batch_alter_table("subscribe") as batch_op:
batch_op.add_column(sa.Column('save_path', sa.String, nullable=True))
except Exception as e:
pass
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@@ -0,0 +1,27 @@
"""1.0.5
Revision ID: e734c7fe6056
Revises: 1e169250e949
Create Date: 2023-09-07 18:19:41.250957
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = 'e734c7fe6056'
down_revision = '1e169250e949'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
op.create_index('ix_transferhistory_tmdbid', 'transferhistory', ['tmdbid'], unique=False)
except Exception as err:
pass
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@@ -0,0 +1,32 @@
"""1.0.2
Revision ID: ec5fb51fc300
Revises: 14f1813ae8e3
Create Date: 2023-08-12 17:55:06.509548
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'ec5fb51fc300'
down_revision = '14f1813ae8e3'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
with op.batch_alter_table("transferhistory") as batch_op:
batch_op.add_column(sa.Column('files', sa.String, nullable=True))
with op.batch_alter_table("rss") as batch_op:
batch_op.add_column(sa.Column('filter', sa.Integer, nullable=True))
except Exception as e:
pass
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@@ -0,0 +1,31 @@
"""1_0_14
Revision ID: f94cd1217fd7
Revises: 127a25fdf0e8
Create Date: 2024-03-06 19:19:33.053186
"""
import contextlib
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'f94cd1217fd7'
down_revision = '127a25fdf0e8'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with contextlib.suppress(Exception):
with op.batch_alter_table("subscribe") as batch_op:
batch_op.add_column(sa.Column('manual_total_episode', sa.Integer, nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
pass

View File

@@ -58,5 +58,5 @@ pystray~=0.19.5
pyotp~=2.9.0
Pinyin2Hanzi~=0.1.1
pywebpush~=2.0.0
py115~=0.0.4
py115j~=0.0.6
oss2~=2.18.6

View File

@@ -1 +1 @@
APP_VERSION = 'v2.0.0-alpha'
APP_VERSION = 'v1.9.19'