mirror of
https://github.com/jxxghp/MoviePilot.git
synced 2026-05-08 01:03:08 +08:00
Compare commits
133 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
499bdf9b48 | ||
|
|
41cd1ccda1 | ||
|
|
b9521cb3a9 | ||
|
|
1f40663b90 | ||
|
|
5261ed7c4c | ||
|
|
aa8768b18a | ||
|
|
aad07433f4 | ||
|
|
4a7630079b | ||
|
|
44a6ee1994 | ||
|
|
56bd6e69ed | ||
|
|
d1e04588d0 | ||
|
|
21cdaef6d5 | ||
|
|
a1723d18fb | ||
|
|
9e065138e9 | ||
|
|
1c73c92bfd | ||
|
|
bcd560d74e | ||
|
|
02339562ed | ||
|
|
e5804378c2 | ||
|
|
da1c8a162d | ||
|
|
d457a23a1f | ||
|
|
b6154e58b8 | ||
|
|
5f18776c61 | ||
|
|
68b0b9ec7a | ||
|
|
0f5036972e | ||
|
|
0b199b8421 | ||
|
|
a59730f6eb | ||
|
|
c6c84fe65b | ||
|
|
03c757bba6 | ||
|
|
bfeb8d238a | ||
|
|
daf0c08c4b | ||
|
|
d12c1b9ac4 | ||
|
|
bc242f4fd4 | ||
|
|
a240c1bca9 | ||
|
|
219aa6c574 | ||
|
|
abca1b481a | ||
|
|
db72fd2ef5 | ||
|
|
31cca58943 | ||
|
|
c06a4b759c | ||
|
|
f05a23a490 | ||
|
|
1e0f2ffde0 | ||
|
|
06df42ee3d | ||
|
|
65ee1638f7 | ||
|
|
87eefe7673 | ||
|
|
5c124d3988 | ||
|
|
8c69ce624f | ||
|
|
bb73acdde5 | ||
|
|
993bc3775b | ||
|
|
3d2ff28bcd | ||
|
|
9b78deb802 | ||
|
|
dadc525d0b | ||
|
|
22b2140c94 | ||
|
|
f07496a4a0 | ||
|
|
1b2938cbc8 | ||
|
|
d4d2f58830 | ||
|
|
b3113e13ec | ||
|
|
055c8e26f0 | ||
|
|
2a7a7239d7 | ||
|
|
2fa40dac3f | ||
|
|
6b4fbd7dc2 | ||
|
|
5b0bb19717 | ||
|
|
843dfc430a | ||
|
|
69cb07c527 | ||
|
|
89e8a64734 | ||
|
|
5eb2dec32d | ||
|
|
db0ea7d6c4 | ||
|
|
1eb85003de | ||
|
|
cca170f84a | ||
|
|
c8c016caa8 | ||
|
|
45d5874026 | ||
|
|
69b1ce60ff | ||
|
|
3ff3e4b106 | ||
|
|
dc50a68b01 | ||
|
|
968cfd8654 | ||
|
|
cf28d93be6 | ||
|
|
be08d6ebb5 | ||
|
|
4bc24f3b00 | ||
|
|
15833f94cf | ||
|
|
aeb297efcf | ||
|
|
d48c6b98e8 | ||
|
|
b79ccfafed | ||
|
|
c87ba59552 | ||
|
|
91fd71c858 | ||
|
|
6f64e67538 | ||
|
|
bd7a0b072f | ||
|
|
01ca001c97 | ||
|
|
324ad2a87c | ||
|
|
d9ad2630f0 | ||
|
|
83958a4a48 | ||
|
|
f6a6efdc42 | ||
|
|
1bbe7657b9 | ||
|
|
38189753b5 | ||
|
|
5b0e658617 | ||
|
|
b6cf54d57f | ||
|
|
e8058c8813 | ||
|
|
784868048d | ||
|
|
2bf9779f2f | ||
|
|
d98ceea381 | ||
|
|
1ab2da74b9 | ||
|
|
086b1f1403 | ||
|
|
19608fa98e | ||
|
|
b0d17deda1 | ||
|
|
4c979c458e | ||
|
|
c5e93169ad | ||
|
|
1e2ca294de | ||
|
|
7165c4a275 | ||
|
|
cbe81ba33c | ||
|
|
fdbfae953d | ||
|
|
c7ba274877 | ||
|
|
8b15a16ca1 | ||
|
|
9f2c8d3811 | ||
|
|
7343dfbed8 | ||
|
|
90f74d8d2b | ||
|
|
7e3e0e1178 | ||
|
|
d890e38a10 | ||
|
|
e505b5c85f | ||
|
|
6230f55116 | ||
|
|
c8d0c14ebc | ||
|
|
6ac8455c74 | ||
|
|
143b21631f | ||
|
|
d760facad8 | ||
|
|
3a1a4c5cfe | ||
|
|
c3045e2cd4 | ||
|
|
1efb9af7ab | ||
|
|
e03471159a | ||
|
|
a92e493742 | ||
|
|
225d413ed1 | ||
|
|
184e4ba7d5 | ||
|
|
917cae27b1 | ||
|
|
60e0463051 | ||
|
|
c15022c7d5 | ||
|
|
2a84e3a606 | ||
|
|
fddbbd5714 | ||
|
|
51b8f7c713 |
@@ -1,3 +1,84 @@
|
||||
# Ignore git
|
||||
# Git
|
||||
.github
|
||||
.git
|
||||
.git
|
||||
.gitignore
|
||||
|
||||
# Documentation
|
||||
docs/
|
||||
README.md
|
||||
LICENSE
|
||||
|
||||
# Development files
|
||||
.pylintrc
|
||||
*.pyc
|
||||
__pycache__/
|
||||
*.pyo
|
||||
*.pyd
|
||||
.Python
|
||||
*.so
|
||||
.pytest_cache/
|
||||
.coverage
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.hypothesis/
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Virtual environments
|
||||
venv/
|
||||
env/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# IDE
|
||||
.vscode/
|
||||
.idea/
|
||||
*.swp
|
||||
*.swo
|
||||
*~
|
||||
|
||||
# OS
|
||||
.DS_Store
|
||||
.DS_Store?
|
||||
._*
|
||||
.Spotlight-V100
|
||||
.Trashes
|
||||
ehthumbs.db
|
||||
Thumbs.db
|
||||
|
||||
# Logs
|
||||
*.log
|
||||
logs/
|
||||
|
||||
# Temporary files
|
||||
*.tmp
|
||||
*.temp
|
||||
tmp/
|
||||
temp/
|
||||
|
||||
# Database
|
||||
*.db
|
||||
*.sqlite
|
||||
*.sqlite3
|
||||
|
||||
# Test files
|
||||
tests/
|
||||
test_*
|
||||
*_test.py
|
||||
|
||||
# Build artifacts
|
||||
build/
|
||||
dist/
|
||||
*.egg-info/
|
||||
|
||||
# Docker
|
||||
Dockerfile*
|
||||
docker-compose*
|
||||
.dockerignore
|
||||
|
||||
# Other
|
||||
app.ico
|
||||
frozen.spec
|
||||
60
.github/workflows/beta.yml
vendored
Normal file
60
.github/workflows/beta.yml
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
name: MoviePilot Builder Beta
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
Docker-build:
|
||||
runs-on: ubuntu-latest
|
||||
name: Build Docker Image
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Release version
|
||||
id: release_version
|
||||
run: |
|
||||
app_version=$(cat version.py |sed -ne "s/APP_VERSION\s=\s'v\(.*\)'/\1/gp")
|
||||
echo "app_version=$app_version" >> $GITHUB_ENV
|
||||
|
||||
- name: Docker Meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
${{ secrets.DOCKER_USERNAME }}/moviepilot-v2
|
||||
ghcr.io/${{ github.repository }}
|
||||
tags: |
|
||||
type=raw,value=beta
|
||||
|
||||
- name: Set Up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set Up Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Login GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: docker/Dockerfile
|
||||
platforms: |
|
||||
linux/amd64
|
||||
linux/arm64/v8
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha, scope=${{ github.workflow }}-docker
|
||||
cache-to: type=gha, scope=${{ github.workflow }}-docker
|
||||
1
.github/workflows/build.yml
vendored
1
.github/workflows/build.yml
vendored
@@ -27,6 +27,7 @@ jobs:
|
||||
with:
|
||||
images: |
|
||||
${{ secrets.DOCKER_USERNAME }}/moviepilot-v2
|
||||
${{ secrets.DOCKER_USERNAME }}/moviepilot
|
||||
ghcr.io/${{ github.repository }}
|
||||
tags: |
|
||||
type=raw,value=${{ env.app_version }}
|
||||
|
||||
22
README.md
22
README.md
@@ -18,7 +18,7 @@
|
||||
|
||||
## 主要特性
|
||||
|
||||
- 前后端分离,基于FastApi + Vue3,前端项目地址:[MoviePilot-Frontend](https://github.com/jxxghp/MoviePilot-Frontend)
|
||||
- 前后端分离,基于FastApi + Vue3。
|
||||
- 聚焦核心需求,简化功能和设置,部分设置项可直接使用默认值。
|
||||
- 重新设计了用户界面,更加美观易用。
|
||||
|
||||
@@ -26,11 +26,11 @@
|
||||
|
||||
官方Wiki:https://wiki.movie-pilot.org
|
||||
|
||||
API文档:https://api.movie-pilot.org
|
||||
|
||||
## 参与开发
|
||||
|
||||
需要 `Python 3.12`、`Node JS v20.12.1`
|
||||
API文档:https://api.movie-pilot.org
|
||||
|
||||
本地运行需要 `Python 3.12`、`Node JS v20.12.1`
|
||||
|
||||
- 克隆主项目 [MoviePilot](https://github.com/jxxghp/MoviePilot)
|
||||
```shell
|
||||
@@ -56,6 +56,20 @@ yarn dev
|
||||
```
|
||||
- 参考 [插件开发指引](https://wiki.movie-pilot.org/zh/plugindev) 在 `app/plugins` 目录下开发插件代码
|
||||
|
||||
## 相关项目
|
||||
|
||||
- [MoviePilot-Frontend](https://github.com/jxxghp/MoviePilot-Frontend)
|
||||
- [MoviePilot-Resources](https://github.com/jxxghp/MoviePilot-Resources)
|
||||
- [MoviePilot-Plugins](https://github.com/jxxghp/MoviePilot-Plugins)
|
||||
- [MoviePilot-Server](https://github.com/jxxghp/MoviePilot-Server)
|
||||
- [MoviePilot-Wiki](https://github.com/jxxghp/MoviePilot-Wiki)
|
||||
|
||||
## 免责申明
|
||||
|
||||
- 本软件仅供学习交流使用,任何人不得将本软件用于商业用途,任何人不得将本软件用于违法犯罪活动,软件对用户行为不知情,一切责任由使用者承担。
|
||||
- 本软件代码开源,基于开源代码进行修改,人为去除相关限制导致软件被分发、传播并造成责任事件的,需由代码修改发布者承担全部责任,不建议对用户认证机制进行规避或修改并公开发布。
|
||||
- 本项目不接受捐赠,没有在任何地方发布捐赠信息页面,软件本身不收费也不提供任何收费相关服务,请仔细辨别避免误导。
|
||||
|
||||
## 贡献者
|
||||
|
||||
<a href="https://github.com/jxxghp/MoviePilot/graphs/contributors">
|
||||
|
||||
@@ -90,7 +90,7 @@ def delete_transfer_history(history_in: schemas.TransferHistory,
|
||||
# 册除媒体库文件
|
||||
if deletedest and history.dest_fileitem:
|
||||
dest_fileitem = schemas.FileItem(**history.dest_fileitem)
|
||||
StorageChain().delete_media_file(fileitem=dest_fileitem, mtype=MediaType(history.type))
|
||||
StorageChain().delete_media_file(dest_fileitem)
|
||||
|
||||
# 删除源文件
|
||||
if deletesrc and history.src_fileitem:
|
||||
|
||||
@@ -106,7 +106,7 @@ def wechat_verify(echostr: str, msg_signature: str, timestamp: Union[str, int],
|
||||
return str(err)
|
||||
|
||||
|
||||
async def vocechat_verify() -> Any:
|
||||
def vocechat_verify() -> Any:
|
||||
"""
|
||||
VoceChat验证响应
|
||||
"""
|
||||
|
||||
@@ -3,8 +3,9 @@ import shutil
|
||||
from typing import Annotated, Any, List, Optional
|
||||
|
||||
import aiofiles
|
||||
from aiopath import AsyncPath
|
||||
from anyio import Path as AsyncPath
|
||||
from fastapi import APIRouter, Depends, Header, HTTPException
|
||||
from fastapi.concurrency import run_in_threadpool
|
||||
from starlette import status
|
||||
from starlette.responses import StreamingResponse
|
||||
|
||||
@@ -216,10 +217,10 @@ def reload_plugin(plugin_id: str, _: User = Depends(get_current_active_superuser
|
||||
|
||||
|
||||
@router.get("/install/{plugin_id}", summary="安装插件", response_model=schemas.Response)
|
||||
def install(plugin_id: str,
|
||||
repo_url: Optional[str] = "",
|
||||
force: Optional[bool] = False,
|
||||
_: User = Depends(get_current_active_superuser)) -> Any:
|
||||
async def install(plugin_id: str,
|
||||
repo_url: Optional[str] = "",
|
||||
force: Optional[bool] = False,
|
||||
_: User = Depends(get_current_active_superuser_async)) -> Any:
|
||||
"""
|
||||
安装插件
|
||||
"""
|
||||
@@ -228,11 +229,11 @@ def install(plugin_id: str,
|
||||
# 首先检查插件是否已经存在,并且是否强制安装,否则只进行安装统计
|
||||
plugin_helper = PluginHelper()
|
||||
if not force and plugin_id in PluginManager().get_plugin_ids():
|
||||
plugin_helper.install_reg(pid=plugin_id)
|
||||
await plugin_helper.async_install_reg(pid=plugin_id)
|
||||
else:
|
||||
# 插件不存在或需要强制安装,下载安装并注册插件
|
||||
if repo_url:
|
||||
state, msg = plugin_helper.install(pid=plugin_id, repo_url=repo_url)
|
||||
state, msg = await plugin_helper.async_install(pid=plugin_id, repo_url=repo_url)
|
||||
# 安装失败则直接响应
|
||||
if not state:
|
||||
return schemas.Response(success=False, message=msg)
|
||||
@@ -243,14 +244,14 @@ def install(plugin_id: str,
|
||||
if plugin_id not in install_plugins:
|
||||
install_plugins.append(plugin_id)
|
||||
# 保存设置
|
||||
SystemConfigOper().set(SystemConfigKey.UserInstalledPlugins, install_plugins)
|
||||
await SystemConfigOper().async_set(SystemConfigKey.UserInstalledPlugins, install_plugins)
|
||||
# 重新加载插件
|
||||
reload_plugin(plugin_id)
|
||||
await run_in_threadpool(reload_plugin, plugin_id)
|
||||
return schemas.Response(success=True)
|
||||
|
||||
|
||||
@router.get("/remotes", summary="获取插件联邦组件列表", response_model=List[dict])
|
||||
def remotes(token: str) -> Any:
|
||||
async def remotes(token: str) -> Any:
|
||||
"""
|
||||
获取插件联邦组件列表
|
||||
"""
|
||||
|
||||
@@ -78,10 +78,14 @@ async def create_subscribe(
|
||||
title = None
|
||||
# 订阅用户
|
||||
subscribe_in.username = current_user.name
|
||||
# 转化为字典
|
||||
subscribe_dict = subscribe_in.dict()
|
||||
if subscribe_in.id:
|
||||
subscribe_dict.pop("id", None)
|
||||
sid, message = await SubscribeChain().async_add(mtype=mtype,
|
||||
title=title,
|
||||
exist_ok=True,
|
||||
**subscribe_in.dict())
|
||||
**subscribe_dict)
|
||||
return schemas.Response(
|
||||
success=bool(sid), message=message, data={"id": sid}
|
||||
)
|
||||
|
||||
@@ -4,19 +4,20 @@ import json
|
||||
import re
|
||||
from collections import deque
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Optional, Union, Annotated
|
||||
|
||||
import aiofiles
|
||||
import pillow_avif # noqa 用于自动注册AVIF支持
|
||||
from PIL import Image
|
||||
from aiopath import AsyncPath
|
||||
from app.helper.sites import SitesHelper # noqa # noqa
|
||||
from anyio import Path as AsyncPath
|
||||
from fastapi import APIRouter, Body, Depends, HTTPException, Header, Request, Response
|
||||
from fastapi.responses import StreamingResponse
|
||||
|
||||
from app import schemas
|
||||
from app.chain.search import SearchChain
|
||||
from app.chain.system import SystemChain
|
||||
from app.core.cache import AsyncFileCache
|
||||
from app.core.config import global_vars, settings
|
||||
from app.core.event import eventmanager
|
||||
from app.core.metainfo import MetaInfo
|
||||
@@ -24,11 +25,13 @@ from app.core.module import ModuleManager
|
||||
from app.core.security import verify_apitoken, verify_resource_token, verify_token
|
||||
from app.db.models import User
|
||||
from app.db.systemconfig_oper import SystemConfigOper
|
||||
from app.db.user_oper import get_current_active_superuser, get_current_active_superuser_async
|
||||
from app.db.user_oper import get_current_active_superuser, get_current_active_superuser_async, \
|
||||
get_current_active_user_async
|
||||
from app.helper.mediaserver import MediaServerHelper
|
||||
from app.helper.message import MessageHelper
|
||||
from app.helper.progress import ProgressHelper
|
||||
from app.helper.rule import RuleHelper
|
||||
from app.helper.sites import SitesHelper # noqa # noqa
|
||||
from app.helper.subscribe import SubscribeHelper
|
||||
from app.helper.system import SystemHelper
|
||||
from app.log import logger
|
||||
@@ -47,7 +50,7 @@ router = APIRouter()
|
||||
async def fetch_image(
|
||||
url: str,
|
||||
proxy: bool = False,
|
||||
use_disk_cache: bool = False,
|
||||
use_cache: bool = False,
|
||||
if_none_match: Optional[str] = None,
|
||||
allowed_domains: Optional[set[str]] = None) -> Response:
|
||||
"""
|
||||
@@ -63,37 +66,31 @@ async def fetch_image(
|
||||
if not SecurityUtils.is_safe_url(url, allowed_domains):
|
||||
raise HTTPException(status_code=404, detail="Unsafe URL")
|
||||
|
||||
# 后续观察系统性能表现,如果发现磁盘缓存和HTTP缓存无法满足高并发情况下的响应速度需求,可以考虑重新引入内存缓存
|
||||
cache_path: Optional[AsyncPath] = None
|
||||
if use_disk_cache:
|
||||
# 生成缓存路径
|
||||
base_path = AsyncPath(settings.CACHE_PATH)
|
||||
sanitized_path = SecurityUtils.sanitize_url_path(url)
|
||||
cache_path = base_path / "images" / sanitized_path
|
||||
|
||||
# 缓存路径
|
||||
sanitized_path = SecurityUtils.sanitize_url_path(url)
|
||||
cache_path = Path("images") / sanitized_path
|
||||
if not cache_path.suffix:
|
||||
# 没有文件类型,则添加后缀,在恶意文件类型和实际需求下的折衷选择
|
||||
if not cache_path.suffix:
|
||||
cache_path = cache_path.with_suffix(".jpg")
|
||||
cache_path = cache_path.with_suffix(".jpg")
|
||||
|
||||
# 确保缓存路径和文件类型合法
|
||||
if not await SecurityUtils.async_is_safe_path(base_path=base_path,
|
||||
user_path=cache_path,
|
||||
allowed_suffixes=settings.SECURITY_IMAGE_SUFFIXES):
|
||||
raise HTTPException(status_code=400, detail="Invalid cache path or file type")
|
||||
# 缓存对像,缓存过期时间为全局图片缓存天数
|
||||
cache_backend = AsyncFileCache(base=settings.CACHE_PATH,
|
||||
ttl=settings.GLOBAL_IMAGE_CACHE_DAYS * 24 * 3600)
|
||||
|
||||
# 目前暂不考虑磁盘缓存文件是否过期,后续通过缓存清理机制处理
|
||||
if cache_path and await cache_path.exists():
|
||||
try:
|
||||
async with cache_path.open('rb') as f:
|
||||
content = await f.read()
|
||||
etag = HashUtils.md5(content)
|
||||
headers = RequestUtils.generate_cache_headers(etag, max_age=86400 * 7)
|
||||
if if_none_match == etag:
|
||||
return Response(status_code=304, headers=headers)
|
||||
return Response(content=content, media_type="image/jpeg", headers=headers)
|
||||
except Exception as e:
|
||||
# 如果读取磁盘缓存发生异常,这里仅记录日志,尝试再次请求远端进行处理
|
||||
logger.debug(f"Failed to read cache file {cache_path}: {e}")
|
||||
if use_cache:
|
||||
content = await cache_backend.get(cache_path.as_posix(), region="images")
|
||||
if content:
|
||||
# 检查 If-None-Match
|
||||
etag = HashUtils.md5(content)
|
||||
headers = RequestUtils.generate_cache_headers(etag, max_age=86400 * 7)
|
||||
if if_none_match == etag:
|
||||
return Response(status_code=304, headers=headers)
|
||||
# 返回缓存图片
|
||||
return Response(
|
||||
content=content,
|
||||
media_type=UrlUtils.get_mime_type(url, "image/jpeg"),
|
||||
headers=headers
|
||||
)
|
||||
|
||||
# 请求远程图片
|
||||
referer = "https://movie.douban.com/" if "doubanio.com" in url else None
|
||||
@@ -111,22 +108,15 @@ async def fetch_image(
|
||||
logger.debug(f"Invalid image format for URL {url}: {e}")
|
||||
raise HTTPException(status_code=502, detail="Invalid image format")
|
||||
|
||||
# 获取请求响应头
|
||||
response_headers = response.headers
|
||||
|
||||
cache_control_header = response_headers.get("Cache-Control", "")
|
||||
cache_directive, max_age = RequestUtils.parse_cache_control(cache_control_header)
|
||||
|
||||
# 如果需要使用磁盘缓存,则保存到磁盘
|
||||
if use_disk_cache and cache_path:
|
||||
try:
|
||||
if not await cache_path.parent.exists():
|
||||
await cache_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
async with aiofiles.tempfile.NamedTemporaryFile(dir=cache_path.parent, delete=False) as tmp_file:
|
||||
await tmp_file.write(content)
|
||||
temp_path = AsyncPath(tmp_file.name)
|
||||
await temp_path.replace(cache_path)
|
||||
except Exception as e:
|
||||
logger.debug(f"Failed to write cache file {cache_path}: {e}")
|
||||
# 保存缓存
|
||||
if use_cache:
|
||||
await cache_backend.set(cache_path.as_posix(), content, region="images")
|
||||
logger.debug(f"Image cached at {cache_path.as_posix()}")
|
||||
|
||||
# 检查 If-None-Match
|
||||
etag = HashUtils.md5(content)
|
||||
@@ -134,8 +124,8 @@ async def fetch_image(
|
||||
headers = RequestUtils.generate_cache_headers(etag, cache_directive, max_age)
|
||||
return Response(status_code=304, headers=headers)
|
||||
|
||||
# 响应
|
||||
headers = RequestUtils.generate_cache_headers(etag, cache_directive, max_age)
|
||||
|
||||
return Response(
|
||||
content=content,
|
||||
media_type=response_headers.get("Content-Type") or UrlUtils.get_mime_type(url, "image/jpeg"),
|
||||
@@ -158,7 +148,7 @@ async def proxy_img(
|
||||
hosts = [config.config.get("host") for config in MediaServerHelper().get_configs().values() if
|
||||
config and config.config and config.config.get("host")]
|
||||
allowed_domains = set(settings.SECURITY_IMAGE_DOMAINS) | set(hosts)
|
||||
return await fetch_image(url=imgurl, proxy=proxy, use_disk_cache=cache,
|
||||
return await fetch_image(url=imgurl, proxy=proxy, use_cache=cache,
|
||||
if_none_match=if_none_match, allowed_domains=allowed_domains)
|
||||
|
||||
|
||||
@@ -173,7 +163,7 @@ async def cache_img(
|
||||
"""
|
||||
# 如果没有启用全局图片缓存,则不使用磁盘缓存
|
||||
proxy = "doubanio.com" not in url
|
||||
return await fetch_image(url=url, proxy=proxy, use_disk_cache=settings.GLOBAL_IMAGE_CACHE,
|
||||
return await fetch_image(url=url, proxy=proxy, use_cache=settings.GLOBAL_IMAGE_CACHE,
|
||||
if_none_match=if_none_match)
|
||||
|
||||
|
||||
@@ -203,7 +193,7 @@ def get_global_setting(token: str):
|
||||
|
||||
|
||||
@router.get("/env", summary="查询系统配置", response_model=schemas.Response)
|
||||
async def get_env_setting(_: User = Depends(get_current_active_superuser_async)):
|
||||
async def get_env_setting(_: User = Depends(get_current_active_user_async)):
|
||||
"""
|
||||
查询系统环境变量,包括当前版本号(仅管理员)
|
||||
"""
|
||||
@@ -282,7 +272,7 @@ async def get_progress(request: Request, process_type: str, _: schemas.TokenPayl
|
||||
|
||||
@router.get("/setting/{key}", summary="查询系统设置", response_model=schemas.Response)
|
||||
async def get_setting(key: str,
|
||||
_: User = Depends(get_current_active_superuser_async)):
|
||||
_: User = Depends(get_current_active_user_async)):
|
||||
"""
|
||||
查询系统设置(仅管理员)
|
||||
"""
|
||||
@@ -381,7 +371,7 @@ async def get_logging(request: Request, length: Optional[int] = 50, logfile: Opt
|
||||
file_size = file_stat.st_size
|
||||
|
||||
# 读取历史日志
|
||||
async with log_path.open(mode="r", encoding="utf-8", errors="ignore") as f:
|
||||
async with aiofiles.open(log_path, mode="r", encoding="utf-8", errors="ignore") as f:
|
||||
# 优化大文件读取策略
|
||||
if file_size > 100 * 1024:
|
||||
# 只读取最后100KB的内容
|
||||
@@ -408,7 +398,7 @@ async def get_logging(request: Request, length: Optional[int] = 50, logfile: Opt
|
||||
yield f"data: {line}\n\n"
|
||||
|
||||
# 实时监听新日志
|
||||
async with log_path.open(mode="r", encoding="utf-8", errors="ignore") as f:
|
||||
async with aiofiles.open(log_path, mode="r", encoding="utf-8", errors="ignore") as f:
|
||||
# 移动文件指针到文件末尾,继续监听新增内容
|
||||
await f.seek(0, 2)
|
||||
# 记录初始文件大小
|
||||
@@ -445,7 +435,7 @@ async def get_logging(request: Request, length: Optional[int] = 50, logfile: Opt
|
||||
return Response(content="日志文件不存在!", media_type="text/plain")
|
||||
try:
|
||||
# 使用 aiofiles 异步读取文件
|
||||
async with log_path.open(mode="r", encoding="utf-8", errors="ignore") as file:
|
||||
async with aiofiles.open(log_path, mode="r", encoding="utf-8", errors="ignore") as file:
|
||||
text = await file.read()
|
||||
# 倒序输出
|
||||
text = "\n".join(text.split("\n")[::-1])
|
||||
|
||||
@@ -135,8 +135,8 @@ def refresh_cache(_: User = Depends(get_current_active_superuser)):
|
||||
|
||||
@router.post("/cache/reidentify/{domain}/{torrent_hash}", summary="重新识别种子", response_model=schemas.Response)
|
||||
async def reidentify_cache(domain: str, torrent_hash: str,
|
||||
tmdbid: Optional[int] = None, doubanid: Optional[str] = None,
|
||||
_: User = Depends(get_current_active_superuser_async)):
|
||||
tmdbid: Optional[int] = None, doubanid: Optional[str] = None,
|
||||
_: User = Depends(get_current_active_superuser_async)):
|
||||
"""
|
||||
重新识别指定的种子
|
||||
:param domain: 站点域名
|
||||
|
||||
@@ -109,7 +109,7 @@ def manual_transfer(transer_item: ManualTransferItem,
|
||||
if history.dest_fileitem:
|
||||
# 删除旧的已整理文件
|
||||
dest_fileitem = FileItem(**history.dest_fileitem)
|
||||
state = StorageChain().delete_media_file(dest_fileitem, mtype=MediaType(history.type))
|
||||
state = StorageChain().delete_media_file(dest_fileitem)
|
||||
if not state:
|
||||
return schemas.Response(success=False, message=f"{dest_fileitem.path} 删除失败")
|
||||
|
||||
|
||||
@@ -2,7 +2,8 @@ import gzip
|
||||
import json
|
||||
from typing import Annotated, Callable, Any, Dict, Optional
|
||||
|
||||
from aiopath import AsyncPath
|
||||
import aiofiles
|
||||
from anyio import Path as AsyncPath
|
||||
from fastapi import APIRouter, Depends, HTTPException, Path, Request, Response
|
||||
from fastapi.responses import PlainTextResponse
|
||||
from fastapi.routing import APIRoute
|
||||
@@ -67,9 +68,9 @@ async def update_cookie(req: schemas.CookieData):
|
||||
"""
|
||||
file_path = AsyncPath(settings.COOKIE_PATH) / f"{req.uuid}.json"
|
||||
content = json.dumps({"encrypted": req.encrypted})
|
||||
async with file_path.open(encoding="utf-8", mode="w") as file:
|
||||
async with aiofiles.open(file_path, encoding="utf-8", mode="w") as file:
|
||||
await file.write(content)
|
||||
async with file_path.open(encoding="utf-8", mode="r") as file:
|
||||
async with aiofiles.open(file_path, encoding="utf-8", mode="r") as file:
|
||||
read_content = await file.read()
|
||||
if read_content == content:
|
||||
return {"action": "done"}
|
||||
@@ -88,7 +89,7 @@ async def load_encrypt_data(uuid: str) -> Dict[str, Any]:
|
||||
raise HTTPException(status_code=404, detail="Item not found")
|
||||
|
||||
# 读取文件
|
||||
async with file_path.open(encoding="utf-8", mode="r") as file:
|
||||
async with aiofiles.open(file_path, encoding="utf-8", mode="r") as file:
|
||||
read_content = await file.read()
|
||||
data = json.loads(read_content.encode("utf-8"))
|
||||
return data
|
||||
|
||||
@@ -8,12 +8,10 @@ from pathlib import Path
|
||||
from typing import Optional, Any, Tuple, List, Set, Union, Dict
|
||||
|
||||
from fastapi.concurrency import run_in_threadpool
|
||||
|
||||
import aiofiles
|
||||
from aiopath import AsyncPath
|
||||
from qbittorrentapi import TorrentFilesList
|
||||
from transmission_rpc import File
|
||||
|
||||
from app.core.cache import FileCache, AsyncFileCache
|
||||
from app.core.config import settings
|
||||
from app.core.context import Context, MediaInfo, TorrentInfo
|
||||
from app.core.event import EventManager
|
||||
@@ -48,78 +46,66 @@ class ChainBase(metaclass=ABCMeta):
|
||||
send_callback=self.run_module
|
||||
)
|
||||
self.pluginmanager = PluginManager()
|
||||
self.filecache = FileCache()
|
||||
self.async_filecache = AsyncFileCache()
|
||||
|
||||
@staticmethod
|
||||
def load_cache(filename: str) -> Any:
|
||||
def load_cache(self, filename: str) -> Any:
|
||||
"""
|
||||
从本地加载缓存
|
||||
加载缓存
|
||||
"""
|
||||
cache_path = settings.TEMP_PATH / filename
|
||||
if cache_path.exists():
|
||||
try:
|
||||
with open(cache_path, 'rb') as f:
|
||||
return pickle.load(f)
|
||||
except Exception as err:
|
||||
logger.error(f"加载缓存 {filename} 出错:{str(err)}")
|
||||
return None
|
||||
content = self.filecache.get(filename)
|
||||
if not content:
|
||||
return None
|
||||
try:
|
||||
return pickle.loads(content)
|
||||
except Exception as err:
|
||||
logger.error(f"加载缓存 {filename} 出错:{str(err)}")
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
async def async_load_cache(filename: str) -> Any:
|
||||
async def async_load_cache(self, filename: str) -> Any:
|
||||
"""
|
||||
异步从本地加载缓存
|
||||
异步加载缓存
|
||||
"""
|
||||
cache_path = settings.TEMP_PATH / filename
|
||||
if cache_path.exists():
|
||||
try:
|
||||
async with aiofiles.open(cache_path, 'rb') as f:
|
||||
content = await f.read()
|
||||
return pickle.loads(content)
|
||||
except Exception as err:
|
||||
logger.error(f"加载缓存 {filename} 出错:{str(err)}")
|
||||
return None
|
||||
content = await self.async_filecache.get(filename)
|
||||
if not content:
|
||||
return None
|
||||
try:
|
||||
return pickle.loads(content)
|
||||
except Exception as err:
|
||||
logger.error(f"异步加载缓存 {filename} 出错:{str(err)}")
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
async def async_save_cache(cache: Any, filename: str) -> None:
|
||||
async def async_save_cache(self, cache: Any, filename: str) -> None:
|
||||
"""
|
||||
异步保存缓存到本地
|
||||
异步保存缓存
|
||||
"""
|
||||
try:
|
||||
async with aiofiles.open(settings.TEMP_PATH / filename, 'wb') as f:
|
||||
await f.write(pickle.dumps(cache))
|
||||
await self.async_filecache.set(filename, pickle.dumps(cache))
|
||||
except Exception as err:
|
||||
logger.error(f"保存缓存 {filename} 出错:{str(err)}")
|
||||
logger.error(f"异步保存缓存 {filename} 出错:{str(err)}")
|
||||
return
|
||||
|
||||
@staticmethod
|
||||
def save_cache(cache: Any, filename: str) -> None:
|
||||
def save_cache(self, cache: Any, filename: str) -> None:
|
||||
"""
|
||||
保存缓存到本地
|
||||
保存缓存
|
||||
"""
|
||||
try:
|
||||
with open(settings.TEMP_PATH / filename, 'wb') as f:
|
||||
pickle.dump(cache, f) # noqa
|
||||
self.filecache.set(filename, pickle.dumps(cache))
|
||||
except Exception as err:
|
||||
logger.error(f"保存缓存 {filename} 出错:{str(err)}")
|
||||
return
|
||||
|
||||
@staticmethod
|
||||
def remove_cache(filename: str) -> None:
|
||||
def remove_cache(self, filename: str) -> None:
|
||||
"""
|
||||
删除本地缓存
|
||||
删除缓存,同时删除Redis和本地缓存
|
||||
"""
|
||||
cache_path = settings.TEMP_PATH / filename
|
||||
if cache_path.exists():
|
||||
cache_path.unlink()
|
||||
self.filecache.delete(filename)
|
||||
|
||||
@staticmethod
|
||||
async def async_remove_cache(filename: str) -> None:
|
||||
async def async_remove_cache(self, filename: str) -> None:
|
||||
"""
|
||||
异步删除本地缓存
|
||||
异步删除缓存,同时删除Redis和本地缓存
|
||||
"""
|
||||
cache_path = AsyncPath(settings.TEMP_PATH) / filename
|
||||
if await cache_path.exists():
|
||||
try:
|
||||
await cache_path.unlink()
|
||||
except Exception as err:
|
||||
logger.error(f"异步删除缓存 {filename} 出错:{str(err)}")
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def __is_valid_empty(ret):
|
||||
@@ -700,13 +686,13 @@ class ChainBase(metaclass=ABCMeta):
|
||||
return self.run_module("filter_torrents", rule_groups=rule_groups,
|
||||
torrent_list=torrent_list, mediainfo=mediainfo)
|
||||
|
||||
def download(self, content: Union[Path, str], download_dir: Path, cookie: str,
|
||||
def download(self, content: Union[Path, str, bytes], download_dir: Path, cookie: str,
|
||||
episodes: Set[int] = None, category: Optional[str] = None, label: Optional[str] = None,
|
||||
downloader: Optional[str] = None
|
||||
) -> Optional[Tuple[Optional[str], Optional[str], Optional[str], str]]:
|
||||
"""
|
||||
根据种子文件,选择并添加下载任务
|
||||
:param content: 种子文件地址或者磁力链接
|
||||
:param content: 种子文件地址或者磁力链接或者种子内容
|
||||
:param download_dir: 下载目录
|
||||
:param cookie: cookie
|
||||
:param episodes: 需要下载的集数
|
||||
@@ -719,15 +705,16 @@ class ChainBase(metaclass=ABCMeta):
|
||||
cookie=cookie, episodes=episodes, category=category, label=label,
|
||||
downloader=downloader)
|
||||
|
||||
def download_added(self, context: Context, download_dir: Path, torrent_path: Path = None) -> None:
|
||||
def download_added(self, context: Context, download_dir: Path, torrent_content: Union[str, bytes] = None) -> None:
|
||||
"""
|
||||
添加下载任务成功后,从站点下载字幕,保存到下载目录
|
||||
:param context: 上下文,包括识别信息、媒体信息、种子信息
|
||||
:param download_dir: 下载目录
|
||||
:param torrent_path: 种子文件地址
|
||||
:param torrent_content: 种子内容,如果有则直接使用该内容,否则从context中获取种子文件路径
|
||||
:return: None,该方法可被多个模块同时处理
|
||||
"""
|
||||
return self.run_module("download_added", context=context, torrent_path=torrent_path,
|
||||
return self.run_module("download_added", context=context,
|
||||
torrent_content=torrent_content,
|
||||
download_dir=download_dir)
|
||||
|
||||
def list_torrents(self, status: TorrentStatus = None,
|
||||
@@ -923,12 +910,12 @@ class ChainBase(metaclass=ABCMeta):
|
||||
immediately=True if message.userid else False)
|
||||
|
||||
async def async_post_message(self,
|
||||
message: Optional[Notification] = None,
|
||||
meta: Optional[MetaBase] = None,
|
||||
mediainfo: Optional[MediaInfo] = None,
|
||||
torrentinfo: Optional[TorrentInfo] = None,
|
||||
transferinfo: Optional[TransferInfo] = None,
|
||||
**kwargs) -> None:
|
||||
message: Optional[Notification] = None,
|
||||
meta: Optional[MetaBase] = None,
|
||||
mediainfo: Optional[MediaInfo] = None,
|
||||
torrentinfo: Optional[TorrentInfo] = None,
|
||||
transferinfo: Optional[TransferInfo] = None,
|
||||
**kwargs) -> None:
|
||||
"""
|
||||
异步发送消息
|
||||
:param message: Notification实例
|
||||
@@ -991,15 +978,16 @@ class ChainBase(metaclass=ABCMeta):
|
||||
break
|
||||
# 按设定发送
|
||||
await self.eventmanager.async_send_event(etype=EventType.NoticeMessage,
|
||||
data={**send_message.dict(), "type": send_message.mtype})
|
||||
data={**send_message.dict(), "type": send_message.mtype})
|
||||
await self.messagequeue.async_send_message("post_message", message=send_message)
|
||||
if not send_orignal:
|
||||
return
|
||||
# 发送消息事件
|
||||
await self.eventmanager.async_send_event(etype=EventType.NoticeMessage, data={**message.dict(), "type": message.mtype})
|
||||
await self.eventmanager.async_send_event(etype=EventType.NoticeMessage,
|
||||
data={**message.dict(), "type": message.mtype})
|
||||
# 按原消息发送
|
||||
await self.messagequeue.async_send_message("post_message", message=message,
|
||||
immediately=True if message.userid else False)
|
||||
immediately=True if message.userid else False)
|
||||
|
||||
def post_medias_message(self, message: Notification, medias: List[MediaInfo]) -> None:
|
||||
"""
|
||||
|
||||
@@ -8,6 +8,7 @@ from typing import List, Optional, Tuple, Set, Dict, Union
|
||||
|
||||
from app import schemas
|
||||
from app.chain import ChainBase
|
||||
from app.core.cache import FileCache
|
||||
from app.core.config import settings, global_vars
|
||||
from app.core.context import MediaInfo, TorrentInfo, Context
|
||||
from app.core.event import eventmanager, Event
|
||||
@@ -35,10 +36,10 @@ class DownloadChain(ChainBase):
|
||||
channel: MessageChannel = None,
|
||||
source: Optional[str] = None,
|
||||
userid: Union[str, int] = None
|
||||
) -> Tuple[Optional[Union[Path, str]], str, list]:
|
||||
) -> Tuple[Optional[Union[str, bytes]], str, list]:
|
||||
"""
|
||||
下载种子文件,如果是磁力链,会返回磁力链接本身
|
||||
:return: 种子路径,种子目录名,种子文件清单
|
||||
:return: 种子内容,种子目录名,种子文件清单
|
||||
"""
|
||||
|
||||
def __get_redict_url(url: str, ua: Optional[str] = None, cookie: Optional[str] = None) -> Optional[str]:
|
||||
@@ -60,6 +61,8 @@ class DownloadChain(ChainBase):
|
||||
# 是否使用cookie
|
||||
if not req_params.get('cookie'):
|
||||
cookie = None
|
||||
# 代理
|
||||
proxy = req_params.get('proxy')
|
||||
# 请求头
|
||||
if req_params.get('header'):
|
||||
headers = req_params.get('header')
|
||||
@@ -70,14 +73,16 @@ class DownloadChain(ChainBase):
|
||||
res = RequestUtils(
|
||||
ua=ua,
|
||||
cookies=cookie,
|
||||
headers=headers
|
||||
headers=headers,
|
||||
proxies=settings.PROXY if proxy else None
|
||||
).get_res(url, params=req_params.get('params'))
|
||||
else:
|
||||
# POST请求
|
||||
res = RequestUtils(
|
||||
ua=ua,
|
||||
cookies=cookie,
|
||||
headers=headers
|
||||
headers=headers,
|
||||
proxies=settings.PROXY if proxy else None
|
||||
).post_res(url, params=req_params.get('params'))
|
||||
if not res:
|
||||
return None
|
||||
@@ -113,7 +118,7 @@ class DownloadChain(ChainBase):
|
||||
logger.error(f"{torrent.title} 无法获取下载地址:{torrent.enclosure}!")
|
||||
return None, "", []
|
||||
# 下载种子文件
|
||||
torrent_file, content, download_folder, files, error_msg = TorrentHelper().download_torrent(
|
||||
_, content, download_folder, files, error_msg = TorrentHelper().download_torrent(
|
||||
url=torrent_url,
|
||||
cookie=site_cookie,
|
||||
ua=torrent.site_ua or settings.USER_AGENT,
|
||||
@@ -123,7 +128,7 @@ class DownloadChain(ChainBase):
|
||||
# 磁力链
|
||||
return content, "", []
|
||||
|
||||
if not torrent_file:
|
||||
if not content:
|
||||
logger.error(f"下载种子文件失败:{torrent.title} - {torrent_url}")
|
||||
self.post_message(Notification(
|
||||
channel=channel,
|
||||
@@ -135,9 +140,11 @@ class DownloadChain(ChainBase):
|
||||
return None, "", []
|
||||
|
||||
# 返回 种子文件路径,种子目录名,种子文件清单
|
||||
return torrent_file, download_folder, files
|
||||
return content, download_folder, files
|
||||
|
||||
def download_single(self, context: Context, torrent_file: Path = None,
|
||||
def download_single(self, context: Context,
|
||||
torrent_file: Path = None,
|
||||
torrent_content: Optional[Union[str, bytes]] = None,
|
||||
episodes: Set[int] = None,
|
||||
channel: MessageChannel = None,
|
||||
source: Optional[str] = None,
|
||||
@@ -150,6 +157,7 @@ class DownloadChain(ChainBase):
|
||||
下载及发送通知
|
||||
:param context: 资源上下文
|
||||
:param torrent_file: 种子文件路径
|
||||
:param torrent_content: 种子内容(磁力链或种子文件内容)
|
||||
:param episodes: 需要下载的集数
|
||||
:param channel: 通知渠道
|
||||
:param source: 来源(消息通知、Subscribe、Manual等)
|
||||
@@ -203,18 +211,26 @@ class DownloadChain(ChainBase):
|
||||
# 实际下载的集数
|
||||
download_episodes = StringUtils.format_ep(list(episodes)) if episodes else None
|
||||
_folder_name = ""
|
||||
if not torrent_file:
|
||||
if not torrent_file and not torrent_content:
|
||||
# 下载种子文件,得到的可能是文件也可能是磁力链
|
||||
content, _folder_name, _file_list = self.download_torrent(_torrent,
|
||||
channel=channel,
|
||||
source=source,
|
||||
userid=userid)
|
||||
if not content:
|
||||
return None
|
||||
else:
|
||||
content = torrent_file
|
||||
# 获取种子文件的文件夹名和文件清单
|
||||
_folder_name, _file_list = TorrentHelper().get_torrent_info(torrent_file)
|
||||
torrent_content, _folder_name, _file_list = self.download_torrent(_torrent,
|
||||
channel=channel,
|
||||
source=source,
|
||||
userid=userid)
|
||||
elif torrent_file:
|
||||
if torrent_file.exists():
|
||||
torrent_content = torrent_file.read_bytes()
|
||||
else:
|
||||
# 缓存处理器
|
||||
cache_backend = FileCache()
|
||||
# 读取缓存的种子文件
|
||||
torrent_content = cache_backend.get(torrent_file.as_posix(), region="torrents")
|
||||
|
||||
if not torrent_content:
|
||||
return None
|
||||
|
||||
# 获取种子文件的文件夹名和文件清单
|
||||
_folder_name, _file_list = TorrentHelper().get_fileinfo_from_torrent_content(torrent_content)
|
||||
|
||||
# 下载目录
|
||||
if save_path:
|
||||
@@ -245,7 +261,7 @@ class DownloadChain(ChainBase):
|
||||
return None
|
||||
|
||||
# 添加下载
|
||||
result: Optional[tuple] = self.download(content=content,
|
||||
result: Optional[tuple] = self.download(content=torrent_content,
|
||||
cookie=_torrent.site_cookie,
|
||||
episodes=episodes,
|
||||
download_dir=download_dir,
|
||||
@@ -342,7 +358,7 @@ class DownloadChain(ChainBase):
|
||||
username=username,
|
||||
)
|
||||
# 下载成功后处理
|
||||
self.download_added(context=context, download_dir=download_dir, torrent_path=torrent_file)
|
||||
self.download_added(context=context, download_dir=download_dir, torrent_content=torrent_content)
|
||||
# 广播事件
|
||||
self.eventmanager.send_event(EventType.DownloadAdded, {
|
||||
"hash": _hash,
|
||||
@@ -556,7 +572,7 @@ class DownloadChain(ChainBase):
|
||||
logger.info(f"开始下载 {torrent.title} ...")
|
||||
download_id = self.download_single(
|
||||
context=context,
|
||||
torrent_file=content if isinstance(content, Path) else None,
|
||||
torrent_content=content,
|
||||
save_path=save_path,
|
||||
channel=channel,
|
||||
source=source,
|
||||
@@ -723,7 +739,7 @@ class DownloadChain(ChainBase):
|
||||
logger.info(f"开始下载 {torrent.title} ...")
|
||||
download_id = self.download_single(
|
||||
context=context,
|
||||
torrent_file=content if isinstance(content, Path) else None,
|
||||
torrent_content=content,
|
||||
episodes=selected_episodes,
|
||||
save_path=save_path,
|
||||
channel=channel,
|
||||
|
||||
@@ -318,11 +318,17 @@ class MediaChain(ChainBase):
|
||||
if not event:
|
||||
return
|
||||
event_data = event.event_data or {}
|
||||
# 媒体根目录
|
||||
fileitem: FileItem = event_data.get("fileitem")
|
||||
# 媒体文件列表
|
||||
file_list: List[str] = event_data.get("file_list", [])
|
||||
# 媒体元数据
|
||||
meta: MetaBase = event_data.get("meta")
|
||||
# 媒体信息
|
||||
mediainfo: MediaInfo = event_data.get("mediainfo")
|
||||
# 是否覆盖
|
||||
overwrite = event_data.get("overwrite", False)
|
||||
# 检查媒体根目录
|
||||
if not fileitem:
|
||||
return
|
||||
|
||||
@@ -342,31 +348,62 @@ class MediaChain(ChainBase):
|
||||
parent=storagechain.get_parent_item(fileitem),
|
||||
overwrite=overwrite)
|
||||
else:
|
||||
# 检查目的目录下是否已经有nfo刮削文件
|
||||
has_nfo_file = storagechain.any_files(fileitem, extensions=['.nfo'])
|
||||
if has_nfo_file and file_list:
|
||||
logger.info(f"目录 {fileitem.path} 已有NFO文件,开始增量刮削...")
|
||||
for file_path in file_list:
|
||||
file_item = storagechain.get_file_item(storage=fileitem.storage,
|
||||
path=Path(file_path))
|
||||
if file_item:
|
||||
# 对于电视剧文件,应该保存到与视频文件相同的目录
|
||||
# 而不是电视剧根目录
|
||||
self.scrape_metadata(fileitem=file_item,
|
||||
if file_list:
|
||||
# 1. 收集fileitem和file_list中每个文件之间所有子目录
|
||||
all_dirs = set()
|
||||
root_path = Path(fileitem.path)
|
||||
|
||||
logger.debug(f"开始收集目录,根目录:{root_path}")
|
||||
# 收集根目录
|
||||
all_dirs.add(root_path)
|
||||
|
||||
# 收集所有目录(包括所有层级)
|
||||
for sub_file in file_list:
|
||||
sub_path = Path(sub_file)
|
||||
# 收集从根目录到文件的所有父目录
|
||||
current_path = sub_path.parent
|
||||
while current_path != root_path and current_path.is_relative_to(root_path):
|
||||
all_dirs.add(current_path)
|
||||
current_path = current_path.parent
|
||||
|
||||
logger.debug(f"共收集到 {len(all_dirs)} 个目录")
|
||||
|
||||
# 2. 初始化一遍子目录,但不处理文件
|
||||
for sub_dir in all_dirs:
|
||||
sub_dir_item = storagechain.get_file_item(storage=fileitem.storage, path=sub_dir)
|
||||
if sub_dir_item:
|
||||
logger.info(f"为目录生成海报和nfo:{sub_dir}")
|
||||
# 初始化目录元数据,但不处理文件
|
||||
self.scrape_metadata(fileitem=sub_dir_item,
|
||||
mediainfo=mediainfo,
|
||||
init_folder=True,
|
||||
recursive=False,
|
||||
overwrite=overwrite)
|
||||
else:
|
||||
logger.warn(f"无法获取目录项:{sub_dir}")
|
||||
|
||||
# 3. 刮削每个文件
|
||||
logger.info(f"开始刮削 {len(file_list)} 个文件")
|
||||
for sub_file_path in file_list:
|
||||
sub_file_item = storagechain.get_file_item(storage=fileitem.storage,
|
||||
path=Path(sub_file_path))
|
||||
if sub_file_item:
|
||||
self.scrape_metadata(fileitem=sub_file_item,
|
||||
mediainfo=mediainfo,
|
||||
init_folder=False,
|
||||
parent=None, # 让函数内部自动获取正确的父目录
|
||||
overwrite=overwrite)
|
||||
else:
|
||||
logger.warn(f"无法获取文件项:{sub_file_path}")
|
||||
else:
|
||||
# 执行全量刮削
|
||||
logger.info(f"开始全量刮削目录 {fileitem.path} ...")
|
||||
logger.info(f"开始刮削目录 {fileitem.path} ...")
|
||||
self.scrape_metadata(fileitem=fileitem, meta=meta, init_folder=True,
|
||||
mediainfo=mediainfo, overwrite=overwrite)
|
||||
|
||||
def scrape_metadata(self, fileitem: schemas.FileItem,
|
||||
meta: MetaBase = None, mediainfo: MediaInfo = None,
|
||||
init_folder: bool = True, parent: schemas.FileItem = None,
|
||||
overwrite: bool = False):
|
||||
overwrite: bool = False, recursive: bool = True):
|
||||
"""
|
||||
手动刮削媒体信息
|
||||
:param fileitem: 刮削目录或文件
|
||||
@@ -375,6 +412,7 @@ class MediaChain(ChainBase):
|
||||
:param init_folder: 是否刮削根目录
|
||||
:param parent: 上级目录
|
||||
:param overwrite: 是否覆盖已有文件
|
||||
:param recursive: 是否递归处理目录内文件
|
||||
"""
|
||||
|
||||
storagechain = StorageChain()
|
||||
@@ -481,31 +519,33 @@ class MediaChain(ChainBase):
|
||||
logger.info("电影NFO刮削已关闭,跳过")
|
||||
else:
|
||||
# 电影目录
|
||||
if is_bluray_folder(fileitem):
|
||||
# 原盘目录
|
||||
if scraping_switchs.get('movie_nfo', True):
|
||||
nfo_path = filepath / (filepath.name + ".nfo")
|
||||
if overwrite or not storagechain.get_file_item(storage=fileitem.storage, path=nfo_path):
|
||||
# 生成原盘nfo
|
||||
movie_nfo = self.metadata_nfo(meta=meta, mediainfo=mediainfo)
|
||||
if movie_nfo:
|
||||
# 保存或上传nfo文件到当前目录
|
||||
__save_file(_fileitem=fileitem, _path=nfo_path, _content=movie_nfo)
|
||||
if recursive:
|
||||
# 处理文件
|
||||
if is_bluray_folder(fileitem):
|
||||
# 原盘目录
|
||||
if scraping_switchs.get('movie_nfo', True):
|
||||
nfo_path = filepath / (filepath.name + ".nfo")
|
||||
if overwrite or not storagechain.get_file_item(storage=fileitem.storage, path=nfo_path):
|
||||
# 生成原盘nfo
|
||||
movie_nfo = self.metadata_nfo(meta=meta, mediainfo=mediainfo)
|
||||
if movie_nfo:
|
||||
# 保存或上传nfo文件到当前目录
|
||||
__save_file(_fileitem=fileitem, _path=nfo_path, _content=movie_nfo)
|
||||
else:
|
||||
logger.warn(f"{filepath.name} nfo文件生成失败!")
|
||||
else:
|
||||
logger.warn(f"{filepath.name} nfo文件生成失败!")
|
||||
logger.info(f"已存在nfo文件:{nfo_path}")
|
||||
else:
|
||||
logger.info(f"已存在nfo文件:{nfo_path}")
|
||||
logger.info("电影NFO刮削已关闭,跳过")
|
||||
else:
|
||||
logger.info("电影NFO刮削已关闭,跳过")
|
||||
else:
|
||||
# 处理目录内的文件
|
||||
files = __list_files(_fileitem=fileitem)
|
||||
for file in files:
|
||||
self.scrape_metadata(fileitem=file,
|
||||
mediainfo=mediainfo,
|
||||
init_folder=False,
|
||||
parent=fileitem,
|
||||
overwrite=overwrite)
|
||||
# 处理目录内的文件
|
||||
files = __list_files(_fileitem=fileitem)
|
||||
for file in files:
|
||||
self.scrape_metadata(fileitem=file,
|
||||
mediainfo=mediainfo,
|
||||
init_folder=False,
|
||||
parent=fileitem,
|
||||
overwrite=overwrite)
|
||||
# 生成目录内图片文件
|
||||
if init_folder:
|
||||
# 图片
|
||||
@@ -597,13 +637,14 @@ class MediaChain(ChainBase):
|
||||
logger.info("集缩略图刮削已关闭,跳过")
|
||||
else:
|
||||
# 当前为电视剧目录,处理目录内的文件
|
||||
files = __list_files(_fileitem=fileitem)
|
||||
for file in files:
|
||||
self.scrape_metadata(fileitem=file,
|
||||
mediainfo=mediainfo,
|
||||
parent=fileitem if file.type == "file" else None,
|
||||
init_folder=True if file.type == "dir" else False,
|
||||
overwrite=overwrite)
|
||||
if recursive:
|
||||
files = __list_files(_fileitem=fileitem)
|
||||
for file in files:
|
||||
self.scrape_metadata(fileitem=file,
|
||||
mediainfo=mediainfo,
|
||||
parent=fileitem if file.type == "file" else None,
|
||||
init_folder=True if file.type == "dir" else False,
|
||||
overwrite=overwrite)
|
||||
# 生成目录的nfo和图片
|
||||
if init_folder:
|
||||
# 识别文件夹名称
|
||||
|
||||
@@ -1,48 +1,142 @@
|
||||
import asyncio
|
||||
import io
|
||||
from pathlib import Path
|
||||
from typing import List, Optional
|
||||
|
||||
import aiofiles
|
||||
import pillow_avif # noqa 用于自动注册AVIF支持
|
||||
from PIL import Image
|
||||
from aiopath import AsyncPath
|
||||
|
||||
from app.chain import ChainBase
|
||||
from app.chain.bangumi import BangumiChain
|
||||
from app.chain.douban import DoubanChain
|
||||
from app.chain.tmdb import TmdbChain
|
||||
from app.core.cache import cache_backend, cached
|
||||
from app.core.cache import cached, FileCache
|
||||
from app.core.config import settings, global_vars
|
||||
from app.log import logger
|
||||
from app.schemas import MediaType
|
||||
from app.utils.asyncio import AsyncUtils
|
||||
from app.utils.common import log_execution_time
|
||||
from app.utils.http import AsyncRequestUtils
|
||||
from app.utils.http import RequestUtils
|
||||
from app.utils.security import SecurityUtils
|
||||
from app.utils.singleton import Singleton
|
||||
|
||||
# 推荐相关的专用缓存
|
||||
recommend_ttl = 24 * 3600
|
||||
recommend_cache_region = "recommend"
|
||||
|
||||
|
||||
class RecommendChain(ChainBase, metaclass=Singleton):
|
||||
"""
|
||||
推荐处理链,单例运行
|
||||
"""
|
||||
|
||||
# 推荐数据的缓存页数
|
||||
# 推荐缓存时间
|
||||
recommend_ttl = 24 * 3600
|
||||
# 推荐缓存页数
|
||||
cache_max_pages = 5
|
||||
# 推荐缓存区域
|
||||
recommend_cache_region = "recommend"
|
||||
|
||||
def refresh_recommend(self):
|
||||
"""
|
||||
刷新推荐数据 - 同步包装器
|
||||
刷新推荐
|
||||
"""
|
||||
logger.debug("Starting to refresh Recommend data.")
|
||||
|
||||
# 推荐来源方法
|
||||
recommend_methods = [
|
||||
self.tmdb_movies,
|
||||
self.tmdb_tvs,
|
||||
self.tmdb_trending,
|
||||
self.bangumi_calendar,
|
||||
self.douban_movie_showing,
|
||||
self.douban_movies,
|
||||
self.douban_tvs,
|
||||
self.douban_movie_top250,
|
||||
self.douban_tv_weekly_chinese,
|
||||
self.douban_tv_weekly_global,
|
||||
self.douban_tv_animation,
|
||||
self.douban_movie_hot,
|
||||
self.douban_tv_hot,
|
||||
]
|
||||
|
||||
# 缓存并刷新所有推荐数据
|
||||
recommends = []
|
||||
# 记录哪些方法已完成
|
||||
methods_finished = set()
|
||||
# 这里避免区间内连续调用相同来源,因此遍历方案为每页遍历所有推荐来源,再进行页数遍历
|
||||
for page in range(1, self.cache_max_pages + 1):
|
||||
for method in recommend_methods:
|
||||
if global_vars.is_system_stopped:
|
||||
return
|
||||
if method in methods_finished:
|
||||
continue
|
||||
logger.debug(f"Fetch {method.__name__} data for page {page}.")
|
||||
data = method(page=page)
|
||||
if not data:
|
||||
logger.debug("All recommendation methods have finished fetching data. Ending pagination early.")
|
||||
methods_finished.add(method)
|
||||
continue
|
||||
recommends.extend(data)
|
||||
# 如果所有方法都已经完成,提前结束循环
|
||||
if len(methods_finished) == len(recommend_methods):
|
||||
break
|
||||
|
||||
# 缓存收集到的海报
|
||||
self.__cache_posters(recommends)
|
||||
logger.debug("Recommend data refresh completed.")
|
||||
|
||||
def __cache_posters(self, datas: List[dict]):
|
||||
"""
|
||||
提取 poster_path 并缓存图片
|
||||
:param datas: 数据列表
|
||||
"""
|
||||
if not settings.GLOBAL_IMAGE_CACHE:
|
||||
return
|
||||
|
||||
for data in datas:
|
||||
if global_vars.is_system_stopped:
|
||||
return
|
||||
poster_path = data.get("poster_path")
|
||||
if poster_path:
|
||||
poster_url = poster_path.replace("original", "w500")
|
||||
logger.debug(f"Caching poster image: {poster_url}")
|
||||
self.__fetch_and_save_image(poster_url)
|
||||
|
||||
@staticmethod
|
||||
def __fetch_and_save_image(url: str):
|
||||
"""
|
||||
请求并保存图片
|
||||
:param url: 图片路径
|
||||
"""
|
||||
# 生成缓存路径
|
||||
sanitized_path = SecurityUtils.sanitize_url_path(url)
|
||||
cache_path = Path("images") / sanitized_path
|
||||
# 没有文件类型,则添加后缀,在恶意文件类型和实际需求下的折衷选择
|
||||
if not cache_path.suffix:
|
||||
cache_path = cache_path.with_suffix(".jpg")
|
||||
|
||||
# 获取缓存后端,并设置缓存时间为全局配置的缓存天数
|
||||
cache_backend = FileCache(base=settings.CACHE_PATH,
|
||||
ttl=settings.GLOBAL_IMAGE_CACHE_DAYS * 24 * 3600)
|
||||
|
||||
# 本地存在缓存图片,则直接跳过
|
||||
if cache_backend.get(cache_path.as_posix(), region="images"):
|
||||
logger.debug(f"Cache hit: Image already exists at {cache_path}")
|
||||
return
|
||||
|
||||
# 请求远程图片
|
||||
referer = "https://movie.douban.com/" if "doubanio.com" in url else None
|
||||
proxies = settings.PROXY if not referer else None
|
||||
response = RequestUtils(ua=settings.NORMAL_USER_AGENT, proxies=proxies, referer=referer).get_res(url=url)
|
||||
if not response:
|
||||
logger.debug(f"Empty response for URL: {url}")
|
||||
return
|
||||
|
||||
# 验证下载的内容是否为有效图片
|
||||
try:
|
||||
AsyncUtils.run_async(self.async_refresh_recommend())
|
||||
Image.open(io.BytesIO(response.content)).verify()
|
||||
except Exception as e:
|
||||
logger.error(f"刷新推荐数据失败:{str(e)}")
|
||||
raise
|
||||
logger.debug(f"Invalid image format for URL {url}: {e}")
|
||||
return
|
||||
|
||||
# 保存缓存
|
||||
cache_backend.set(cache_path.as_posix(), response.content, region="images")
|
||||
logger.debug(f"Successfully cached image at {cache_path} for URL: {url}")
|
||||
|
||||
@log_execution_time(logger=logger)
|
||||
@cached(ttl=recommend_ttl, region=recommend_cache_region)
|
||||
@@ -199,162 +293,6 @@ class RecommendChain(ChainBase, metaclass=Singleton):
|
||||
tvs = DoubanChain().tv_hot(page=page, count=count)
|
||||
return [media.to_dict() for media in tvs] if tvs else []
|
||||
|
||||
# 异步版本的方法
|
||||
async def async_refresh_recommend(self):
|
||||
"""
|
||||
异步刷新推荐
|
||||
"""
|
||||
logger.debug("Starting to async refresh Recommend data.")
|
||||
cache_backend.clear(region=recommend_cache_region)
|
||||
logger.debug("Recommend Cache has been cleared.")
|
||||
|
||||
# 推荐来源方法
|
||||
recommend_methods = [
|
||||
self.async_tmdb_movies,
|
||||
self.async_tmdb_tvs,
|
||||
self.async_tmdb_trending,
|
||||
self.async_bangumi_calendar,
|
||||
self.async_douban_movie_showing,
|
||||
self.async_douban_movies,
|
||||
self.async_douban_tvs,
|
||||
self.async_douban_movie_top250,
|
||||
self.async_douban_tv_weekly_chinese,
|
||||
self.async_douban_tv_weekly_global,
|
||||
self.async_douban_tv_animation,
|
||||
self.async_douban_movie_hot,
|
||||
self.async_douban_tv_hot,
|
||||
]
|
||||
|
||||
# 缓存并刷新所有推荐数据
|
||||
recommends = []
|
||||
# 记录哪些方法已完成
|
||||
methods_finished = set()
|
||||
# 这里避免区间内连续调用相同来源,因此遍历方案为每页遍历所有推荐来源,再进行页数遍历
|
||||
for page in range(1, self.cache_max_pages + 1):
|
||||
# 为每个页面并发执行所有方法
|
||||
tasks = []
|
||||
for method in recommend_methods:
|
||||
if global_vars.is_system_stopped:
|
||||
return
|
||||
if method in methods_finished:
|
||||
continue
|
||||
tasks.append(self._async_fetch_method_data(method, page, methods_finished))
|
||||
|
||||
# 并发执行所有任务
|
||||
if tasks:
|
||||
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
for result in results:
|
||||
if isinstance(result, list) and result:
|
||||
recommends.extend(result)
|
||||
|
||||
# 如果所有方法都已经完成,提前结束循环
|
||||
if len(methods_finished) == len(recommend_methods):
|
||||
break
|
||||
|
||||
# 缓存收集到的海报
|
||||
await self.__async_cache_posters(recommends)
|
||||
logger.debug("Async recommend data refresh completed.")
|
||||
|
||||
@staticmethod
|
||||
async def _async_fetch_method_data(method, page: int, methods_finished: set):
|
||||
"""
|
||||
异步获取方法数据的辅助函数
|
||||
"""
|
||||
try:
|
||||
logger.debug(f"Async fetch {method.__name__} data for page {page}.")
|
||||
data = await method(page=page)
|
||||
if not data:
|
||||
logger.debug(f"Method {method.__name__} finished fetching data. Ending pagination early.")
|
||||
methods_finished.add(method)
|
||||
return []
|
||||
return data
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching data from {method.__name__}: {e}")
|
||||
methods_finished.add(method)
|
||||
return []
|
||||
|
||||
async def __async_cache_posters(self, datas: List[dict]):
|
||||
"""
|
||||
异步提取 poster_path 并缓存图片
|
||||
:param datas: 数据列表
|
||||
"""
|
||||
if not settings.GLOBAL_IMAGE_CACHE:
|
||||
return
|
||||
|
||||
tasks = []
|
||||
for data in datas:
|
||||
if global_vars.is_system_stopped:
|
||||
return
|
||||
poster_path = data.get("poster_path")
|
||||
if poster_path:
|
||||
poster_url = poster_path.replace("original", "w500")
|
||||
logger.debug(f"Async caching poster image: {poster_url}")
|
||||
tasks.append(self.__async_fetch_and_save_image(poster_url))
|
||||
|
||||
# 并发缓存图片
|
||||
if tasks:
|
||||
await asyncio.gather(*tasks, return_exceptions=True)
|
||||
|
||||
@staticmethod
|
||||
async def __async_fetch_and_save_image(url: str):
|
||||
"""
|
||||
异步请求并保存图片
|
||||
:param url: 图片路径
|
||||
"""
|
||||
if not settings.GLOBAL_IMAGE_CACHE or not url:
|
||||
return
|
||||
|
||||
# 生成缓存路径
|
||||
base_path = AsyncPath(settings.CACHE_PATH)
|
||||
sanitized_path = SecurityUtils.sanitize_url_path(url)
|
||||
cache_path = base_path / "images" / sanitized_path
|
||||
|
||||
# 没有文件类型,则添加后缀,在恶意文件类型和实际需求下的折衷选择
|
||||
if not cache_path.suffix:
|
||||
cache_path = cache_path.with_suffix(".jpg")
|
||||
|
||||
# 确保缓存路径和文件类型合法
|
||||
if not await SecurityUtils.async_is_safe_path(base_path=base_path,
|
||||
user_path=cache_path,
|
||||
allowed_suffixes=settings.SECURITY_IMAGE_SUFFIXES):
|
||||
logger.debug(f"Invalid cache path or file type for URL: {url}, sanitized path: {sanitized_path}")
|
||||
return
|
||||
|
||||
# 本地存在缓存图片,则直接跳过
|
||||
if await cache_path.exists():
|
||||
logger.debug(f"Cache hit: Image already exists at {cache_path}")
|
||||
return
|
||||
|
||||
# 请求远程图片
|
||||
referer = "https://movie.douban.com/" if "doubanio.com" in url else None
|
||||
proxies = settings.PROXY if not referer else None
|
||||
response = await AsyncRequestUtils(ua=settings.NORMAL_USER_AGENT,
|
||||
proxies=proxies, referer=referer).get_res(url=url)
|
||||
if not response:
|
||||
logger.debug(f"Empty response for URL: {url}")
|
||||
return
|
||||
|
||||
# 验证下载的内容是否为有效图片
|
||||
try:
|
||||
Image.open(io.BytesIO(response.content)).verify()
|
||||
except Exception as e:
|
||||
logger.debug(f"Invalid image format for URL {url}: {e}")
|
||||
return
|
||||
|
||||
if not cache_path:
|
||||
return
|
||||
|
||||
try:
|
||||
if not await cache_path.parent.exists():
|
||||
await cache_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
async with aiofiles.tempfile.NamedTemporaryFile(dir=cache_path.parent, delete=False) as tmp_file:
|
||||
await tmp_file.write(response.content)
|
||||
temp_path = AsyncPath(tmp_file.name)
|
||||
await temp_path.replace(cache_path)
|
||||
logger.debug(f"Successfully cached image at {cache_path} for URL: {url}")
|
||||
except Exception as e:
|
||||
logger.debug(f"Failed to write cache file {cache_path} for URL {url}: {e}")
|
||||
|
||||
@log_execution_time(logger=logger)
|
||||
@cached(ttl=recommend_ttl, region=recommend_cache_region)
|
||||
async def async_tmdb_movies(self, sort_by: Optional[str] = "popularity.desc",
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
import asyncio
|
||||
import pickle
|
||||
import random
|
||||
import time
|
||||
import traceback
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
from datetime import datetime
|
||||
from typing import Dict, Tuple
|
||||
@@ -59,7 +57,7 @@ class SearchChain(ChainBase):
|
||||
results = self.process(mediainfo=mediainfo, sites=sites, area=area, no_exists=no_exists)
|
||||
# 保存到本地文件
|
||||
if cache_local:
|
||||
self.save_cache(pickle.dumps(results), self.__result_temp_file)
|
||||
self.save_cache(results, self.__result_temp_file)
|
||||
return results
|
||||
|
||||
def search_by_title(self, title: str, page: Optional[int] = 0,
|
||||
@@ -85,36 +83,20 @@ class SearchChain(ChainBase):
|
||||
torrent_info=torrent) for torrent in torrents]
|
||||
# 保存到本地文件
|
||||
if cache_local:
|
||||
self.save_cache(pickle.dumps(contexts), self.__result_temp_file)
|
||||
self.save_cache(contexts, self.__result_temp_file)
|
||||
return contexts
|
||||
|
||||
def last_search_results(self) -> List[Context]:
|
||||
"""
|
||||
获取上次搜索结果
|
||||
"""
|
||||
# 读取本地文件缓存
|
||||
content = self.load_cache(self.__result_temp_file)
|
||||
if not content:
|
||||
return []
|
||||
try:
|
||||
return pickle.loads(content)
|
||||
except Exception as e:
|
||||
logger.error(f'加载搜索结果失败:{str(e)} - {traceback.format_exc()}')
|
||||
return []
|
||||
return self.load_cache(self.__result_temp_file)
|
||||
|
||||
async def async_last_search_results(self) -> List[Context]:
|
||||
"""
|
||||
异步获取上次搜索结果
|
||||
"""
|
||||
# 读取本地文件缓存
|
||||
content = await self.async_load_cache(self.__result_temp_file)
|
||||
if not content:
|
||||
return []
|
||||
try:
|
||||
return pickle.loads(content)
|
||||
except Exception as e:
|
||||
logger.error(f'加载搜索结果失败:{str(e)} - {traceback.format_exc()}')
|
||||
return []
|
||||
return await self.async_load_cache(self.__result_temp_file)
|
||||
|
||||
async def async_search_by_id(self, tmdbid: Optional[int] = None, doubanid: Optional[str] = None,
|
||||
mtype: MediaType = None, area: Optional[str] = "title", season: Optional[int] = None,
|
||||
@@ -143,7 +125,7 @@ class SearchChain(ChainBase):
|
||||
results = await self.async_process(mediainfo=mediainfo, sites=sites, area=area, no_exists=no_exists)
|
||||
# 保存到本地文件
|
||||
if cache_local:
|
||||
await self.async_save_cache(pickle.dumps(results), self.__result_temp_file)
|
||||
await self.async_save_cache(results, self.__result_temp_file)
|
||||
return results
|
||||
|
||||
async def async_search_by_title(self, title: str, page: Optional[int] = 0,
|
||||
@@ -169,7 +151,7 @@ class SearchChain(ChainBase):
|
||||
torrent_info=torrent) for torrent in torrents]
|
||||
# 保存到本地文件
|
||||
if cache_local:
|
||||
await self.async_save_cache(pickle.dumps(contexts), self.__result_temp_file)
|
||||
await self.async_save_cache(contexts, self.__result_temp_file)
|
||||
return contexts
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -317,7 +317,7 @@ class SiteChain(ChainBase):
|
||||
indexer = siteshelper.get_indexer(domain)
|
||||
# 数据库的站点信息
|
||||
site_info = siteoper.get_by_domain(domain)
|
||||
if site_info and site_info.is_active == 1:
|
||||
if site_info and site_info.is_active:
|
||||
# 站点已存在,检查站点连通性
|
||||
status, msg = self.test(domain)
|
||||
# 更新站点Cookie
|
||||
@@ -330,7 +330,8 @@ class SiteChain(ChainBase):
|
||||
url=site_info.url,
|
||||
cookie=cookie,
|
||||
ua=site_info.ua or settings.USER_AGENT,
|
||||
proxy=True if site_info.proxy else False
|
||||
proxy=True if site_info.proxy else False,
|
||||
timeout=site_info.timeout
|
||||
)
|
||||
if rss_url:
|
||||
logger.info(f"更新站点 {domain} RSS地址 ...")
|
||||
@@ -558,13 +559,15 @@ class SiteChain(ChainBase):
|
||||
public = site_info.public
|
||||
proxies = settings.PROXY if site_info.proxy else None
|
||||
proxy_server = settings.PROXY_SERVER if site_info.proxy else None
|
||||
timeout = site_info.timeout or 60
|
||||
|
||||
# 访问链接
|
||||
if render:
|
||||
page_source = PlaywrightHelper().get_page_source(url=site_url,
|
||||
cookies=site_cookie,
|
||||
ua=ua,
|
||||
proxies=proxy_server)
|
||||
proxies=proxy_server,
|
||||
timeout=timeout)
|
||||
if not public and not SiteUtils.is_logged_in(page_source):
|
||||
if under_challenge(page_source):
|
||||
return False, f"无法通过Cloudflare!"
|
||||
@@ -697,7 +700,8 @@ class SiteChain(ChainBase):
|
||||
username=username,
|
||||
password=password,
|
||||
two_step_code=two_step_code,
|
||||
proxies=settings.PROXY_HOST if site_info.proxy else None
|
||||
proxies=settings.PROXY_SERVER if site_info.proxy else None,
|
||||
timeout=site_info.timeout or 60
|
||||
)
|
||||
if result:
|
||||
cookie, ua, msg = result
|
||||
|
||||
@@ -6,7 +6,6 @@ from app.chain import ChainBase
|
||||
from app.core.config import settings
|
||||
from app.helper.directory import DirectoryHelper
|
||||
from app.log import logger
|
||||
from app.schemas import MediaType
|
||||
|
||||
|
||||
class StorageChain(ChainBase):
|
||||
@@ -134,8 +133,7 @@ class StorageChain(ChainBase):
|
||||
"""
|
||||
return self.run_module("support_transtype", storage=storage)
|
||||
|
||||
def delete_media_file(self, fileitem: schemas.FileItem,
|
||||
mtype: MediaType = None, delete_self: bool = True) -> bool:
|
||||
def delete_media_file(self, fileitem: schemas.FileItem, delete_self: bool = True) -> bool:
|
||||
"""
|
||||
删除媒体文件,以及不含媒体文件的目录
|
||||
"""
|
||||
@@ -152,7 +150,8 @@ class StorageChain(ChainBase):
|
||||
return False
|
||||
|
||||
media_exts = settings.RMT_MEDIAEXT + settings.DOWNLOAD_TMPEXT
|
||||
if fileitem.path == "/" or len(Path(fileitem.path).parts) <= 2:
|
||||
fileitem_path = Path(fileitem.path) if fileitem.path else Path("")
|
||||
if len(fileitem_path.parts) <= 2:
|
||||
logger.warn(f"【{fileitem.storage}】{fileitem.path} 根目录或一级目录不允许删除")
|
||||
return False
|
||||
if fileitem.type == "dir":
|
||||
@@ -162,13 +161,7 @@ class StorageChain(ChainBase):
|
||||
if not self.delete_file(fileitem):
|
||||
logger.warn(f"【{fileitem.storage}】{fileitem.path} 删除失败")
|
||||
return False
|
||||
elif self.any_files(fileitem, extensions=media_exts) is False:
|
||||
logger.warn(f"【{fileitem.storage}】{fileitem.path} 不存在其它媒体文件,正在删除空目录")
|
||||
if not self.delete_file(fileitem):
|
||||
logger.warn(f"【{fileitem.storage}】{fileitem.path} 删除失败")
|
||||
return False
|
||||
# 不处理父目录
|
||||
return True
|
||||
|
||||
elif delete_self:
|
||||
# 本身是文件,需要删除文件
|
||||
logger.warn(f"正在删除文件【{fileitem.storage}】{fileitem.path}")
|
||||
@@ -176,35 +169,43 @@ class StorageChain(ChainBase):
|
||||
logger.warn(f"【{fileitem.storage}】{fileitem.path} 删除失败")
|
||||
return False
|
||||
|
||||
if mtype:
|
||||
# 重命名格式
|
||||
rename_format = settings.RENAME_FORMAT(mtype)
|
||||
media_path = DirectoryHelper.get_media_root_path(
|
||||
rename_format, rename_path=Path(fileitem.path)
|
||||
)
|
||||
if not media_path:
|
||||
return True
|
||||
# 处理媒体文件根目录
|
||||
dir_item = self.get_file_item(storage=fileitem.storage, path=media_path)
|
||||
else:
|
||||
# 处理上级目录
|
||||
dir_item = self.get_parent_item(fileitem)
|
||||
# 检查和删除上级空目录
|
||||
dir_item = fileitem if fileitem.type == "dir" else self.get_parent_item(fileitem)
|
||||
if not dir_item:
|
||||
logger.warn(f"【{fileitem.storage}】{fileitem.path} 上级目录不存在")
|
||||
return False
|
||||
|
||||
# 检查和删除上级目录
|
||||
if dir_item and len(Path(dir_item.path).parts) > 2:
|
||||
# 如何目录是所有下载目录、媒体库目录的上级,则不处理
|
||||
for d in DirectoryHelper().get_dirs():
|
||||
if d.download_path and Path(d.download_path).is_relative_to(Path(dir_item.path)):
|
||||
logger.debug(f"【{dir_item.storage}】{dir_item.path} 是下载目录本级或上级目录,不删除")
|
||||
return True
|
||||
if d.library_path and Path(d.library_path).is_relative_to(Path(dir_item.path)):
|
||||
logger.debug(f"【{dir_item.storage}】{dir_item.path} 是媒体库目录本级或上级目录,不删除")
|
||||
return True
|
||||
# 不存在其他媒体文件,删除空目录
|
||||
if self.any_files(dir_item, extensions=media_exts) is False:
|
||||
logger.warn(f"【{dir_item.storage}】{dir_item.path} 不存在其它媒体文件,正在删除空目录")
|
||||
if not self.delete_file(dir_item):
|
||||
logger.warn(f"【{dir_item.storage}】{dir_item.path} 删除失败")
|
||||
return False
|
||||
# 查找操作文件项匹配的配置目录(资源目录、媒体库目录)
|
||||
associated_dir = max(
|
||||
(
|
||||
Path(p)
|
||||
for d in DirectoryHelper().get_dirs()
|
||||
for p in (d.download_path, d.library_path)
|
||||
if p and fileitem_path.is_relative_to(p)
|
||||
),
|
||||
key=lambda path: len(path.parts),
|
||||
default=None,
|
||||
)
|
||||
|
||||
while dir_item and len(Path(dir_item.path).parts) > 2:
|
||||
# 目录是资源目录、媒体库目录的上级,则不处理
|
||||
if associated_dir and associated_dir.is_relative_to(Path(dir_item.path)):
|
||||
logger.debug(f"【{dir_item.storage}】{dir_item.path} 位于资源或媒体库目录结构中,不删除")
|
||||
break
|
||||
|
||||
elif not associated_dir and self.list_files(dir_item, recursion=False):
|
||||
logger.debug(f"【{dir_item.storage}】{dir_item.path} 不是空目录,不删除")
|
||||
break
|
||||
|
||||
if self.any_files(dir_item, extensions=media_exts) is not False:
|
||||
logger.debug(f"【{dir_item.storage}】{dir_item.path} 存在媒体文件,不删除")
|
||||
break
|
||||
|
||||
# 删除空目录并继续处理父目录
|
||||
logger.warn(f"【{dir_item.storage}】{dir_item.path} 不存在其它媒体文件,正在删除空目录")
|
||||
if not self.delete_file(dir_item):
|
||||
logger.warn(f"【{dir_item.storage}】{dir_item.path} 删除失败")
|
||||
return False
|
||||
dir_item = self.get_parent_item(dir_item)
|
||||
|
||||
return True
|
||||
|
||||
@@ -340,7 +340,8 @@ class TorrentsChain(ChainBase):
|
||||
url=site.get("url"),
|
||||
cookie=site.get("cookie"),
|
||||
ua=site.get("ua") or settings.USER_AGENT,
|
||||
proxy=True if site.get("proxy") else False
|
||||
proxy=True if site.get("proxy") else False,
|
||||
timeout=site.get("timeout"),
|
||||
)
|
||||
if rss_url:
|
||||
# 获取新的日期的passkey
|
||||
|
||||
@@ -501,7 +501,8 @@ class TransferChain(ChainBase, metaclass=Singleton):
|
||||
# 获取整理屏蔽词
|
||||
transfer_exclude_words = SystemConfigOper().get(SystemConfigKey.TransferExcludeWords)
|
||||
for t in tasks:
|
||||
if t.download_hash and self._can_delete_torrent(t.download_hash, t.downloader, transfer_exclude_words):
|
||||
if t.download_hash and self._can_delete_torrent(t.download_hash, t.downloader,
|
||||
transfer_exclude_words):
|
||||
if self.remove_torrents(t.download_hash, downloader=t.downloader):
|
||||
logger.info(f"移动模式删除种子成功:{t.download_hash}")
|
||||
if t.fileitem:
|
||||
@@ -1435,7 +1436,6 @@ class TransferChain(ChainBase, metaclass=Singleton):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _can_delete_torrent(self, download_hash: str, downloader: str, transfer_exclude_words) -> bool:
|
||||
"""
|
||||
检查是否可以删除种子文件
|
||||
@@ -1468,11 +1468,11 @@ class TransferChain(ChainBase, metaclass=Singleton):
|
||||
file_path = save_path / file.name
|
||||
# 如果存在未被屏蔽的媒体文件,则不删除种子
|
||||
if (
|
||||
file_path.suffix in self.all_exts
|
||||
and not self._is_blocked_by_exclude_words(
|
||||
str(file_path), transfer_exclude_words
|
||||
)
|
||||
and file_path.exists()
|
||||
file_path.suffix in self.all_exts
|
||||
and not self._is_blocked_by_exclude_words(
|
||||
str(file_path), transfer_exclude_words
|
||||
)
|
||||
and file_path.exists()
|
||||
):
|
||||
return False
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -8,6 +8,7 @@ import sys
|
||||
import threading
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Tuple, Type
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from dotenv import set_key
|
||||
from pydantic import BaseModel, BaseSettings, validator, Field
|
||||
@@ -41,10 +42,6 @@ class SystemConfModel(BaseModel):
|
||||
scheduler: int = 0
|
||||
# 线程池大小
|
||||
threadpool: int = 0
|
||||
# 数据库连接池大小
|
||||
dbpool: int = 0
|
||||
# 数据库连接池溢出数量
|
||||
dbpooloverflow: int = 0
|
||||
|
||||
|
||||
class ConfigModel(BaseModel):
|
||||
@@ -55,6 +52,7 @@ class ConfigModel(BaseModel):
|
||||
class Config:
|
||||
extra = "ignore" # 忽略未定义的配置项
|
||||
|
||||
# ==================== 基础应用配置 ====================
|
||||
# 项目名称
|
||||
PROJECT_NAME: str = "MoviePilot"
|
||||
# 域名 格式;https://movie-pilot.org
|
||||
@@ -63,6 +61,22 @@ class ConfigModel(BaseModel):
|
||||
API_V1_STR: str = "/api/v1"
|
||||
# 前端资源路径
|
||||
FRONTEND_PATH: str = "/public"
|
||||
# 时区
|
||||
TZ: str = "Asia/Shanghai"
|
||||
# API监听地址
|
||||
HOST: str = "0.0.0.0"
|
||||
# API监听端口
|
||||
PORT: int = 3001
|
||||
# 前端监听端口
|
||||
NGINX_PORT: int = 3000
|
||||
# 配置文件目录
|
||||
CONFIG_DIR: Optional[str] = None
|
||||
# 是否调试模式
|
||||
DEBUG: bool = False
|
||||
# 是否开发模式
|
||||
DEV: bool = False
|
||||
|
||||
# ==================== 安全认证配置 ====================
|
||||
# 密钥
|
||||
SECRET_KEY: str = secrets.token_urlsafe(32)
|
||||
# RESOURCE密钥
|
||||
@@ -73,20 +87,24 @@ class ConfigModel(BaseModel):
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES: int = 60 * 24 * 8
|
||||
# RESOURCE_TOKEN过期时间
|
||||
RESOURCE_ACCESS_TOKEN_EXPIRE_SECONDS: int = 60 * 30
|
||||
# 时区
|
||||
TZ: str = "Asia/Shanghai"
|
||||
# API监听地址
|
||||
HOST: str = "0.0.0.0"
|
||||
# API监听端口
|
||||
PORT: int = 3001
|
||||
# 前端监听端口
|
||||
NGINX_PORT: int = 3000
|
||||
# 是否调试模式
|
||||
DEBUG: bool = False
|
||||
# 是否开发模式
|
||||
DEV: bool = False
|
||||
# 超级管理员
|
||||
SUPERUSER: str = "admin"
|
||||
# 辅助认证,允许通过外部服务进行认证、单点登录以及自动创建用户
|
||||
AUXILIARY_AUTH_ENABLE: bool = False
|
||||
# API密钥,需要更换
|
||||
API_TOKEN: Optional[str] = None
|
||||
# 用户认证站点
|
||||
AUTH_SITE: str = ""
|
||||
|
||||
# ==================== 数据库配置 ====================
|
||||
# 数据库类型,支持 sqlite 和 postgresql,默认使用 sqlite
|
||||
DB_TYPE: str = "sqlite"
|
||||
# 是否在控制台输出 SQL 语句,默认关闭
|
||||
DB_ECHO: bool = False
|
||||
# 数据库连接超时时间(秒),默认为 60 秒
|
||||
DB_TIMEOUT: int = 60
|
||||
# 是否启用 WAL 模式,仅适用于SQLite,默认开启
|
||||
DB_WAL_ENABLE: bool = True
|
||||
# 数据库连接池类型,QueuePool, NullPool
|
||||
DB_POOL_TYPE: str = "QueuePool"
|
||||
# 是否在获取连接时进行预先 ping 操作
|
||||
@@ -95,71 +113,44 @@ class ConfigModel(BaseModel):
|
||||
DB_POOL_RECYCLE: int = 300
|
||||
# 数据库连接池获取连接的超时时间(秒)
|
||||
DB_POOL_TIMEOUT: int = 30
|
||||
# SQLite 的 busy_timeout 参数,默认为 60 秒
|
||||
DB_TIMEOUT: int = 60
|
||||
# SQLite 是否启用 WAL 模式,默认开启
|
||||
DB_WAL_ENABLE: bool = True
|
||||
# SQLite 连接池大小
|
||||
DB_SQLITE_POOL_SIZE: int = 30
|
||||
# SQLite 连接池溢出数量
|
||||
DB_SQLITE_MAX_OVERFLOW: int = 50
|
||||
# PostgreSQL 主机地址
|
||||
DB_POSTGRESQL_HOST: str = "localhost"
|
||||
# PostgreSQL 端口
|
||||
DB_POSTGRESQL_PORT: int = 5432
|
||||
# PostgreSQL 数据库名
|
||||
DB_POSTGRESQL_DATABASE: str = "moviepilot"
|
||||
# PostgreSQL 用户名
|
||||
DB_POSTGRESQL_USERNAME: str = "moviepilot"
|
||||
# PostgreSQL 密码
|
||||
DB_POSTGRESQL_PASSWORD: str = "moviepilot"
|
||||
# PostgreSQL 连接池大小
|
||||
DB_POSTGRESQL_POOL_SIZE: int = 30
|
||||
# PostgreSQL 连接池溢出数量
|
||||
DB_POSTGRESQL_MAX_OVERFLOW: int = 50
|
||||
|
||||
# ==================== 缓存配置 ====================
|
||||
# 缓存类型,支持 cachetools 和 redis,默认使用 cachetools
|
||||
CACHE_BACKEND_TYPE: str = "cachetools"
|
||||
# 缓存连接字符串,仅外部缓存(如 Redis、Memcached)需要
|
||||
CACHE_BACKEND_URL: Optional[str] = None
|
||||
CACHE_BACKEND_URL: Optional[str] = "redis://localhost:6379"
|
||||
# Redis 缓存最大内存限制,未配置时,如开启大内存模式时为 "1024mb",未开启时为 "256mb"
|
||||
CACHE_REDIS_MAXMEMORY: Optional[str] = None
|
||||
# 配置文件目录
|
||||
CONFIG_DIR: Optional[str] = None
|
||||
# 超级管理员
|
||||
SUPERUSER: str = "admin"
|
||||
# 辅助认证,允许通过外部服务进行认证、单点登录以及自动创建用户
|
||||
AUXILIARY_AUTH_ENABLE: bool = False
|
||||
# API密钥,需要更换
|
||||
API_TOKEN: Optional[str] = None
|
||||
# 全局图片缓存,将媒体图片缓存到本地
|
||||
GLOBAL_IMAGE_CACHE: bool = False
|
||||
# 全局图片缓存保留天数
|
||||
GLOBAL_IMAGE_CACHE_DAYS: int = 7
|
||||
# 临时文件保留天数
|
||||
TEMP_FILE_DAYS: int = 3
|
||||
# 元数据识别缓存过期时间(小时),0为自动
|
||||
META_CACHE_EXPIRE: int = 0
|
||||
|
||||
# ==================== 网络代理配置 ====================
|
||||
# 网络代理服务器地址
|
||||
PROXY_HOST: Optional[str] = None
|
||||
# 登录页面电影海报,tmdb/bing/mediaserver
|
||||
WALLPAPER: str = "tmdb"
|
||||
# 自定义壁纸api地址
|
||||
CUSTOMIZE_WALLPAPER_API_URL: Optional[str] = None
|
||||
# 媒体搜索来源 themoviedb/douban/bangumi,多个用,分隔
|
||||
SEARCH_SOURCE: str = "themoviedb,douban,bangumi"
|
||||
# 媒体识别来源 themoviedb/douban
|
||||
RECOGNIZE_SOURCE: str = "themoviedb"
|
||||
# 刮削来源 themoviedb/douban
|
||||
SCRAP_SOURCE: str = "themoviedb"
|
||||
# 新增已入库媒体是否跟随TMDB信息变化
|
||||
SCRAP_FOLLOW_TMDB: bool = True
|
||||
# TMDB图片地址
|
||||
TMDB_IMAGE_DOMAIN: str = "image.tmdb.org"
|
||||
# TMDB API地址
|
||||
TMDB_API_DOMAIN: str = "api.themoviedb.org"
|
||||
# TMDB元数据语言
|
||||
TMDB_LOCALE: str = "zh"
|
||||
# 刮削使用TMDB原始语种图片
|
||||
TMDB_SCRAP_ORIGINAL_IMAGE: bool = False
|
||||
# TMDB API Key
|
||||
TMDB_API_KEY: str = "db55323b8d3e4154498498a75642b381"
|
||||
# TVDB API Key
|
||||
TVDB_V4_API_KEY: str = "ed2aa66b-7899-4677-92a7-67bc9ce3d93a"
|
||||
TVDB_V4_API_PIN: str = ""
|
||||
# Fanart开关
|
||||
FANART_ENABLE: bool = True
|
||||
# Fanart语言
|
||||
FANART_LANG: str = "zh,en"
|
||||
# Fanart API Key
|
||||
FANART_API_KEY: str = "d2d31f9ecabea050fc7d68aa3146015f"
|
||||
# 115 AppId
|
||||
U115_APP_ID: str = "100196807"
|
||||
# Alipan AppId
|
||||
ALIPAN_APP_ID: str = "ac1bf04dc9fd4d9aaabb65b4a668d403"
|
||||
# 元数据识别缓存过期时间(小时)
|
||||
META_CACHE_EXPIRE: int = 0
|
||||
# 电视剧动漫的分类genre_ids
|
||||
ANIME_GENREIDS: List[int] = Field(default=[16])
|
||||
# 用户认证站点
|
||||
AUTH_SITE: str = ""
|
||||
# 重启自动升级
|
||||
MOVIEPILOT_AUTO_UPDATE: str = 'release'
|
||||
# 自动检查和更新站点资源包(站点索引、认证等)
|
||||
AUTO_UPDATE_RESOURCE: bool = True
|
||||
# 是否启用DOH解析域名
|
||||
DOH_ENABLE: bool = False
|
||||
# 使用 DOH 解析的域名列表
|
||||
@@ -173,6 +164,55 @@ class ConfigModel(BaseModel):
|
||||
"api.telegram.org")
|
||||
# DOH 解析服务器列表
|
||||
DOH_RESOLVERS: str = "1.0.0.1,1.1.1.1,9.9.9.9,149.112.112.112"
|
||||
|
||||
# ==================== 媒体元数据配置 ====================
|
||||
# 媒体搜索来源 themoviedb/douban/bangumi,多个用,分隔
|
||||
SEARCH_SOURCE: str = "themoviedb,douban,bangumi"
|
||||
# 媒体识别来源 themoviedb/douban
|
||||
RECOGNIZE_SOURCE: str = "themoviedb"
|
||||
# 刮削来源 themoviedb/douban
|
||||
SCRAP_SOURCE: str = "themoviedb"
|
||||
# 电视剧动漫的分类genre_ids
|
||||
ANIME_GENREIDS: List[int] = Field(default=[16])
|
||||
|
||||
# ==================== TMDB配置 ====================
|
||||
# TMDB图片地址
|
||||
TMDB_IMAGE_DOMAIN: str = "image.tmdb.org"
|
||||
# TMDB API地址
|
||||
TMDB_API_DOMAIN: str = "api.themoviedb.org"
|
||||
# TMDB元数据语言
|
||||
TMDB_LOCALE: str = "zh"
|
||||
# 刮削使用TMDB原始语种图片
|
||||
TMDB_SCRAP_ORIGINAL_IMAGE: bool = False
|
||||
# TMDB API Key
|
||||
TMDB_API_KEY: str = "db55323b8d3e4154498498a75642b381"
|
||||
|
||||
# ==================== TVDB配置 ====================
|
||||
# TVDB API Key
|
||||
TVDB_V4_API_KEY: str = "ed2aa66b-7899-4677-92a7-67bc9ce3d93a"
|
||||
TVDB_V4_API_PIN: str = ""
|
||||
|
||||
# ==================== Fanart配置 ====================
|
||||
# Fanart开关
|
||||
FANART_ENABLE: bool = True
|
||||
# Fanart语言
|
||||
FANART_LANG: str = "zh,en"
|
||||
# Fanart API Key
|
||||
FANART_API_KEY: str = "d2d31f9ecabea050fc7d68aa3146015f"
|
||||
|
||||
# ==================== 云盘配置 ====================
|
||||
# 115 AppId
|
||||
U115_APP_ID: str = "100196807"
|
||||
# Alipan AppId
|
||||
ALIPAN_APP_ID: str = "ac1bf04dc9fd4d9aaabb65b4a668d403"
|
||||
|
||||
# ==================== 系统升级配置 ====================
|
||||
# 重启自动升级
|
||||
MOVIEPILOT_AUTO_UPDATE: str = 'release'
|
||||
# 自动检查和更新站点资源包(站点索引、认证等)
|
||||
AUTO_UPDATE_RESOURCE: bool = True
|
||||
|
||||
# ==================== 媒体文件格式配置 ====================
|
||||
# 支持的后缀格式
|
||||
RMT_MEDIAEXT: list = Field(
|
||||
default_factory=lambda: ['.mp4', '.mkv', '.ts', '.iso',
|
||||
@@ -195,10 +235,12 @@ class ConfigModel(BaseModel):
|
||||
'.aifc', '.aiff', '.alac', '.adif', '.adts',
|
||||
'.flac', '.midi', '.opus', '.sfalc']
|
||||
)
|
||||
# 下载器临时文件后缀
|
||||
DOWNLOAD_TMPEXT: list = Field(default_factory=lambda: ['.!qb', '.part'])
|
||||
|
||||
# ==================== 媒体服务器配置 ====================
|
||||
# 媒体服务器同步间隔(小时)
|
||||
MEDIASERVER_SYNC_INTERVAL: int = 6
|
||||
|
||||
# ==================== 订阅配置 ====================
|
||||
# 订阅模式
|
||||
SUBSCRIBE_MODE: str = "spider"
|
||||
# RSS订阅模式刷新时间间隔(分钟)
|
||||
@@ -209,22 +251,38 @@ class ConfigModel(BaseModel):
|
||||
SUBSCRIBE_SEARCH: bool = False
|
||||
# 检查本地媒体库是否存在资源开关
|
||||
LOCAL_EXISTS_SEARCH: bool = False
|
||||
# 搜索多个名称
|
||||
SEARCH_MULTIPLE_NAME: bool = False
|
||||
# 最大搜索名称数量
|
||||
MAX_SEARCH_NAME_LIMIT: int = 2
|
||||
|
||||
# ==================== 站点配置 ====================
|
||||
# 站点数据刷新间隔(小时)
|
||||
SITEDATA_REFRESH_INTERVAL: int = 6
|
||||
# 读取和发送站点消息
|
||||
SITE_MESSAGE: bool = True
|
||||
# 不能缓存站点资源的站点域名,多个使用,分隔
|
||||
NO_CACHE_SITE_KEY: str = "m-team"
|
||||
# OCR服务器地址,用于识别站点验证码
|
||||
OCR_HOST: str = "https://movie-pilot.org"
|
||||
# 仿真类型:playwright 或 flaresolverr
|
||||
BROWSER_EMULATION: str = "playwright"
|
||||
# FlareSolverr 服务地址,例如 http://127.0.0.1:8191
|
||||
FLARESOLVERR_URL: Optional[str] = None
|
||||
|
||||
# ==================== 搜索配置 ====================
|
||||
# 搜索多个名称
|
||||
SEARCH_MULTIPLE_NAME: bool = False
|
||||
# 最大搜索名称数量
|
||||
MAX_SEARCH_NAME_LIMIT: int = 2
|
||||
|
||||
# ==================== 下载配置 ====================
|
||||
# 种子标签
|
||||
TORRENT_TAG: str = "MOVIEPILOT"
|
||||
# 下载站点字幕
|
||||
DOWNLOAD_SUBTITLE: bool = True
|
||||
# 交互搜索自动下载用户ID,使用,分割
|
||||
AUTO_DOWNLOAD_USER: Optional[str] = None
|
||||
# 下载器临时文件后缀
|
||||
DOWNLOAD_TMPEXT: list = Field(default_factory=lambda: ['.!qb', '.part'])
|
||||
|
||||
# ==================== CookieCloud配置 ====================
|
||||
# CookieCloud是否启动本地服务
|
||||
COOKIECLOUD_ENABLE_LOCAL: Optional[bool] = False
|
||||
# CookieCloud服务器地址
|
||||
@@ -237,6 +295,8 @@ class ConfigModel(BaseModel):
|
||||
COOKIECLOUD_INTERVAL: Optional[int] = 60 * 24
|
||||
# CookieCloud同步黑名单,多个域名,分割
|
||||
COOKIECLOUD_BLACKLIST: Optional[str] = None
|
||||
|
||||
# ==================== 整理配置 ====================
|
||||
# 电影重命名格式
|
||||
MOVIE_RENAME_FORMAT: str = "{{title}}{% if year %} ({{year}}){% endif %}" \
|
||||
"/{{title}}{% if year %} ({{year}}){% endif %}{% if part %}-{{part}}{% endif %}{% if videoFormat %} - {{videoFormat}}{% endif %}" \
|
||||
@@ -246,10 +306,24 @@ class ConfigModel(BaseModel):
|
||||
"/Season {{season}}" \
|
||||
"/{{title}} - {{season_episode}}{% if part %}-{{part}}{% endif %}{% if episode %} - 第 {{episode}} 集{% endif %}" \
|
||||
"{{fileExt}}"
|
||||
# OCR服务器地址
|
||||
OCR_HOST: str = "https://movie-pilot.org"
|
||||
# 重命名时支持的S0别名
|
||||
RENAME_FORMAT_S0_NAMES: list = Field(default=["Specials", "SPs"])
|
||||
# 为指定默认字幕添加.default后缀
|
||||
DEFAULT_SUB: Optional[str] = "zh-cn"
|
||||
# 新增已入库媒体是否跟随TMDB信息变化
|
||||
SCRAP_FOLLOW_TMDB: bool = True
|
||||
|
||||
# ==================== 服务地址配置 ====================
|
||||
# 服务器地址,对应 https://github.com/jxxghp/MoviePilot-Server 项目
|
||||
MP_SERVER_HOST: str = "https://movie-pilot.org"
|
||||
|
||||
# ==================== 个性化 ====================
|
||||
# 登录页面电影海报,tmdb/bing/mediaserver
|
||||
WALLPAPER: str = "tmdb"
|
||||
# 自定义壁纸api地址
|
||||
CUSTOMIZE_WALLPAPER_API_URL: Optional[str] = None
|
||||
|
||||
# ==================== 插件配置 ====================
|
||||
# 插件市场仓库地址,多个地址使用,分隔,地址以/结尾
|
||||
PLUGIN_MARKET: str = ("https://github.com/jxxghp/MoviePilot-Plugins,"
|
||||
"https://github.com/thsrite/MoviePilot-Plugins,"
|
||||
@@ -270,6 +344,8 @@ class ConfigModel(BaseModel):
|
||||
PLUGIN_STATISTIC_SHARE: bool = True
|
||||
# 是否开启插件热加载
|
||||
PLUGIN_AUTO_RELOAD: bool = False
|
||||
|
||||
# ==================== Github & PIP ====================
|
||||
# Github token,提高请求api限流阈值 ghp_****
|
||||
GITHUB_TOKEN: Optional[str] = None
|
||||
# Github代理服务器,格式:https://mirror.ghproxy.com/
|
||||
@@ -278,16 +354,18 @@ class ConfigModel(BaseModel):
|
||||
PIP_PROXY: Optional[str] = ''
|
||||
# 指定的仓库Github token,多个仓库使用,分隔,格式:{user1}/{repo1}:ghp_****,{user2}/{repo2}:github_pat_****
|
||||
REPO_GITHUB_TOKEN: Optional[str] = None
|
||||
|
||||
# ==================== 性能配置 ====================
|
||||
# 大内存模式
|
||||
BIG_MEMORY_MODE: bool = False
|
||||
# FastApi性能监控
|
||||
PERFORMANCE_MONITOR_ENABLE: bool = False
|
||||
# 全局图片缓存,将媒体图片缓存到本地
|
||||
GLOBAL_IMAGE_CACHE: bool = False
|
||||
# 是否启用编码探测的性能模式
|
||||
ENCODING_DETECTION_PERFORMANCE_MODE: bool = True
|
||||
# 编码探测的最低置信度阈值
|
||||
ENCODING_DETECTION_MIN_CONFIDENCE: float = 0.8
|
||||
|
||||
# ==================== 安全配置 ====================
|
||||
# 允许的图片缓存域名
|
||||
SECURITY_IMAGE_DOMAINS: list = Field(default=[
|
||||
"image.tmdb.org",
|
||||
@@ -307,19 +385,21 @@ class ConfigModel(BaseModel):
|
||||
])
|
||||
# 允许的图片文件后缀格式
|
||||
SECURITY_IMAGE_SUFFIXES: list = Field(default=[".jpg", ".jpeg", ".png", ".webp", ".gif", ".svg", ".avif"])
|
||||
# 重命名时支持的S0别名
|
||||
RENAME_FORMAT_S0_NAMES: list = Field(default=["Specials", "SPs"])
|
||||
# 为指定默认字幕添加.default后缀
|
||||
DEFAULT_SUB: Optional[str] = "zh-cn"
|
||||
# Docker Client API地址
|
||||
DOCKER_CLIENT_API: Optional[str] = "tcp://127.0.0.1:38379"
|
||||
|
||||
# ==================== 工作流配置 ====================
|
||||
# 工作流数据共享
|
||||
WORKFLOW_STATISTIC_SHARE: bool = True
|
||||
|
||||
# ==================== 存储配置 ====================
|
||||
# 对rclone进行快照对比时,是否检查文件夹的修改时间
|
||||
RCLONE_SNAPSHOT_CHECK_FOLDER_MODTIME = True
|
||||
# 对OpenList进行快照对比时,是否检查文件夹的修改时间
|
||||
OPENLIST_SNAPSHOT_CHECK_FOLDER_MODTIME = True
|
||||
|
||||
# ==================== Docker配置 ====================
|
||||
# Docker Client API地址
|
||||
DOCKER_CLIENT_API: Optional[str] = "tcp://127.0.0.1:38379"
|
||||
|
||||
|
||||
class Settings(BaseSettings, ConfigModel, LogConfigModel):
|
||||
"""
|
||||
@@ -585,9 +665,7 @@ class Settings(BaseSettings, ConfigModel, LogConfigModel):
|
||||
fanart=512,
|
||||
meta=(self.META_CACHE_EXPIRE or 24) * 3600,
|
||||
scheduler=100,
|
||||
threadpool=100,
|
||||
dbpool=100,
|
||||
dbpooloverflow=50
|
||||
threadpool=100
|
||||
)
|
||||
return SystemConfModel(
|
||||
torrents=100,
|
||||
@@ -598,9 +676,7 @@ class Settings(BaseSettings, ConfigModel, LogConfigModel):
|
||||
fanart=128,
|
||||
meta=(self.META_CACHE_EXPIRE or 2) * 3600,
|
||||
scheduler=50,
|
||||
threadpool=50,
|
||||
dbpool=50,
|
||||
dbpooloverflow=20
|
||||
threadpool=50
|
||||
)
|
||||
|
||||
@property
|
||||
@@ -615,9 +691,22 @@ class Settings(BaseSettings, ConfigModel, LogConfigModel):
|
||||
@property
|
||||
def PROXY_SERVER(self):
|
||||
if self.PROXY_HOST:
|
||||
return {
|
||||
"server": self.PROXY_HOST
|
||||
}
|
||||
try:
|
||||
parsed = urlparse(self.PROXY_HOST)
|
||||
if not parsed.scheme:
|
||||
return {"server": self.PROXY_HOST}
|
||||
host = parsed.hostname or ""
|
||||
port = f":{parsed.port}" if parsed.port else ""
|
||||
server = f"{parsed.scheme}://{host}{port}"
|
||||
proxy = {"server": server}
|
||||
if parsed.username:
|
||||
proxy["username"] = parsed.username
|
||||
if parsed.password:
|
||||
proxy["password"] = parsed.password
|
||||
return proxy
|
||||
except Exception as err:
|
||||
logger.error(f"解析代理服务器地址 '{self.PROXY_HOST}' 时出错: {err}")
|
||||
return {"server": self.PROXY_HOST}
|
||||
return None
|
||||
|
||||
@property
|
||||
|
||||
@@ -483,7 +483,7 @@ class MediaInfo:
|
||||
continue
|
||||
if current_value is None:
|
||||
setattr(self, key, value)
|
||||
elif type(current_value) == type(value):
|
||||
elif type(current_value) is type(value):
|
||||
setattr(self, key, value)
|
||||
|
||||
def set_douban_info(self, info: dict):
|
||||
@@ -624,7 +624,7 @@ class MediaInfo:
|
||||
continue
|
||||
if current_value is None:
|
||||
setattr(self, key, value)
|
||||
elif type(current_value) == type(value):
|
||||
elif type(current_value) is type(value):
|
||||
setattr(self, key, value)
|
||||
|
||||
def set_bangumi_info(self, info: dict):
|
||||
|
||||
@@ -105,10 +105,11 @@ class ReleaseGroupsMatcher(metaclass=Singleton):
|
||||
else:
|
||||
groups = self.__release_groups
|
||||
title = f"{title} "
|
||||
groups_re = re.compile(r"(?<=[-@\[£【&])(?:%s)(?=[@.\s\S\]\[】&])" % groups, re.I)
|
||||
# 处理一个制作组识别多次的情况,保留顺序
|
||||
groups_re = re.compile(r"(?<=[-@\[£【&])(?:(?:%s))(?=[@.\s\S\]\[】&])" % groups, re.I)
|
||||
unique_groups = []
|
||||
for item in re.findall(groups_re, title):
|
||||
if item not in unique_groups:
|
||||
unique_groups.append(item)
|
||||
item_str = item[0] if isinstance(item, tuple) else item
|
||||
if item_str not in unique_groups:
|
||||
unique_groups.append(item_str)
|
||||
|
||||
return "@".join(unique_groups)
|
||||
|
||||
@@ -1,19 +1,43 @@
|
||||
import asyncio
|
||||
from typing import Any, Generator, List, Optional, Self, Tuple, AsyncGenerator, Sequence, Union
|
||||
from typing import Any, Generator, List, Optional, Self, Tuple, AsyncGenerator, Union
|
||||
|
||||
from sqlalchemy import NullPool, QueuePool, and_, create_engine, inspect, text, select, delete
|
||||
from sqlalchemy import NullPool, QueuePool, and_, create_engine, inspect, text, select, delete, Column, Integer, \
|
||||
Sequence, Identity
|
||||
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
|
||||
from sqlalchemy.orm import Session, as_declarative, declared_attr, scoped_session, sessionmaker
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
|
||||
def get_id_column():
|
||||
"""
|
||||
根据数据库类型返回合适的ID列定义
|
||||
"""
|
||||
if settings.DB_TYPE.lower() == "postgresql":
|
||||
# PostgreSQL使用SERIAL类型,让数据库自动处理序列
|
||||
return Column(Integer, Identity(start=1, cycle=True), primary_key=True, index=True)
|
||||
else:
|
||||
# SQLite使用Sequence
|
||||
return Column(Integer, Sequence('id'), primary_key=True, index=True)
|
||||
|
||||
|
||||
def _get_database_engine(is_async: bool = False):
|
||||
"""
|
||||
获取数据库连接参数并设置WAL模式
|
||||
:param is_async: 是否创建异步引擎,True - 异步引擎, False - 同步引擎
|
||||
:return: 返回对应的数据库引擎
|
||||
"""
|
||||
# 根据数据库类型选择连接方式
|
||||
if settings.DB_TYPE.lower() == "postgresql":
|
||||
return _get_postgresql_engine(is_async)
|
||||
else:
|
||||
return _get_sqlite_engine(is_async)
|
||||
|
||||
|
||||
def _get_sqlite_engine(is_async: bool = False):
|
||||
"""
|
||||
获取SQLite数据库引擎
|
||||
"""
|
||||
# 连接参数
|
||||
_connect_args = {
|
||||
"timeout": settings.DB_TIMEOUT,
|
||||
@@ -40,9 +64,9 @@ def _get_database_engine(is_async: bool = False):
|
||||
# 当使用 QueuePool 时,添加 QueuePool 特有的参数
|
||||
if _pool_class == QueuePool:
|
||||
_db_kwargs.update({
|
||||
"pool_size": settings.CONF.dbpool,
|
||||
"pool_size": settings.DB_SQLITE_POOL_SIZE,
|
||||
"pool_timeout": settings.DB_POOL_TIMEOUT,
|
||||
"max_overflow": settings.CONF.dbpooloverflow
|
||||
"max_overflow": settings.DB_SQLITE_MAX_OVERFLOW
|
||||
})
|
||||
|
||||
# 创建数据库引擎
|
||||
@@ -52,7 +76,7 @@ def _get_database_engine(is_async: bool = False):
|
||||
_journal_mode = "WAL" if settings.DB_WAL_ENABLE else "DELETE"
|
||||
with engine.connect() as connection:
|
||||
current_mode = connection.execute(text(f"PRAGMA journal_mode={_journal_mode};")).scalar()
|
||||
print(f"Database journal mode set to: {current_mode}")
|
||||
print(f"SQLite database journal mode set to: {current_mode}")
|
||||
|
||||
return engine
|
||||
else:
|
||||
@@ -78,12 +102,73 @@ def _get_database_engine(is_async: bool = False):
|
||||
async with async_engine.connect() as _connection:
|
||||
result = await _connection.execute(text(f"PRAGMA journal_mode={_journal_mode};"))
|
||||
_current_mode = result.scalar()
|
||||
print(f"Async database journal mode set to: {_current_mode}")
|
||||
print(f"Async SQLite database journal mode set to: {_current_mode}")
|
||||
|
||||
try:
|
||||
asyncio.run(set_async_wal_mode())
|
||||
except Exception as e:
|
||||
print(f"Failed to set async WAL mode: {e}")
|
||||
print(f"Failed to set async SQLite WAL mode: {e}")
|
||||
|
||||
return async_engine
|
||||
|
||||
|
||||
def _get_postgresql_engine(is_async: bool = False):
|
||||
"""
|
||||
获取PostgreSQL数据库引擎
|
||||
"""
|
||||
# 构建PostgreSQL连接URL
|
||||
if settings.DB_POSTGRESQL_PASSWORD:
|
||||
db_url = f"postgresql://{settings.DB_POSTGRESQL_USERNAME}:{settings.DB_POSTGRESQL_PASSWORD}@{settings.DB_POSTGRESQL_HOST}:{settings.DB_POSTGRESQL_PORT}/{settings.DB_POSTGRESQL_DATABASE}"
|
||||
else:
|
||||
db_url = f"postgresql://{settings.DB_POSTGRESQL_USERNAME}@{settings.DB_POSTGRESQL_HOST}:{settings.DB_POSTGRESQL_PORT}/{settings.DB_POSTGRESQL_DATABASE}"
|
||||
|
||||
# PostgreSQL连接参数
|
||||
_connect_args = {}
|
||||
|
||||
# 创建同步引擎
|
||||
if not is_async:
|
||||
# 根据池类型设置 poolclass 和相关参数
|
||||
_pool_class = NullPool if settings.DB_POOL_TYPE == "NullPool" else QueuePool
|
||||
|
||||
# 数据库参数
|
||||
_db_kwargs = {
|
||||
"url": db_url,
|
||||
"pool_pre_ping": settings.DB_POOL_PRE_PING,
|
||||
"echo": settings.DB_ECHO,
|
||||
"poolclass": _pool_class,
|
||||
"pool_recycle": settings.DB_POOL_RECYCLE,
|
||||
"connect_args": _connect_args
|
||||
}
|
||||
|
||||
# 当使用 QueuePool 时,添加 QueuePool 特有的参数
|
||||
if _pool_class == QueuePool:
|
||||
_db_kwargs.update({
|
||||
"pool_size": settings.DB_POSTGRESQL_POOL_SIZE,
|
||||
"pool_timeout": settings.DB_POOL_TIMEOUT,
|
||||
"max_overflow": settings.DB_POSTGRESQL_MAX_OVERFLOW
|
||||
})
|
||||
|
||||
# 创建数据库引擎
|
||||
engine = create_engine(**_db_kwargs)
|
||||
print(f"PostgreSQL database connected to {settings.DB_POSTGRESQL_HOST}:{settings.DB_POSTGRESQL_PORT}/{settings.DB_POSTGRESQL_DATABASE}")
|
||||
|
||||
return engine
|
||||
else:
|
||||
# 构建异步PostgreSQL连接URL
|
||||
async_db_url = f"postgresql+asyncpg://{settings.DB_POSTGRESQL_USERNAME}:{settings.DB_POSTGRESQL_PASSWORD}@{settings.DB_POSTGRESQL_HOST}:{settings.DB_POSTGRESQL_PORT}/{settings.DB_POSTGRESQL_DATABASE}"
|
||||
|
||||
# 数据库参数,只能使用 NullPool
|
||||
_db_kwargs = {
|
||||
"url": async_db_url,
|
||||
"pool_pre_ping": settings.DB_POOL_PRE_PING,
|
||||
"echo": settings.DB_ECHO,
|
||||
"poolclass": NullPool,
|
||||
"pool_recycle": settings.DB_POOL_RECYCLE,
|
||||
"connect_args": _connect_args
|
||||
}
|
||||
# 创建异步数据库引擎
|
||||
async_engine = create_async_engine(**_db_kwargs)
|
||||
print(f"Async PostgreSQL database connected to {settings.DB_POSTGRESQL_HOST}:{settings.DB_POSTGRESQL_PORT}/{settings.DB_POSTGRESQL_DATABASE}")
|
||||
|
||||
return async_engine
|
||||
|
||||
|
||||
@@ -18,12 +18,22 @@ def update_db():
|
||||
"""
|
||||
更新数据库
|
||||
"""
|
||||
db_location = settings.CONFIG_PATH / 'user.db'
|
||||
script_location = settings.ROOT_PATH / 'database'
|
||||
try:
|
||||
alembic_cfg = Config()
|
||||
alembic_cfg.set_main_option('script_location', str(script_location))
|
||||
alembic_cfg.set_main_option('sqlalchemy.url', f"sqlite:///{db_location}")
|
||||
|
||||
# 根据数据库类型设置不同的URL
|
||||
if settings.DB_TYPE.lower() == "postgresql":
|
||||
if settings.DB_POSTGRESQL_PASSWORD:
|
||||
db_url = f"postgresql://{settings.DB_POSTGRESQL_USERNAME}:{settings.DB_POSTGRESQL_PASSWORD}@{settings.DB_POSTGRESQL_HOST}:{settings.DB_POSTGRESQL_PORT}/{settings.DB_POSTGRESQL_DATABASE}"
|
||||
else:
|
||||
db_url = f"postgresql://{settings.DB_POSTGRESQL_USERNAME}@{settings.DB_POSTGRESQL_HOST}:{settings.DB_POSTGRESQL_PORT}/{settings.DB_POSTGRESQL_DATABASE}"
|
||||
else:
|
||||
db_location = settings.CONFIG_PATH / 'user.db'
|
||||
db_url = f"sqlite:///{db_location}"
|
||||
|
||||
alembic_cfg.set_main_option('sqlalchemy.url', db_url)
|
||||
upgrade(alembic_cfg, 'head')
|
||||
except Exception as e:
|
||||
logger.error(f'数据库更新失败:{str(e)}')
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
import time
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import Column, Integer, String, Sequence, JSON, select
|
||||
from sqlalchemy import Column, Integer, String, JSON, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.db import db_query, db_update, Base, async_db_query
|
||||
from app.db import db_query, db_update, get_id_column, Base, async_db_query
|
||||
|
||||
|
||||
class DownloadHistory(Base):
|
||||
"""
|
||||
下载历史记录
|
||||
"""
|
||||
id = Column(Integer, Sequence('id'), primary_key=True, index=True)
|
||||
id = get_id_column()
|
||||
# 保存路径
|
||||
path = Column(String, nullable=False, index=True)
|
||||
# 类型 电影/电视剧
|
||||
@@ -188,7 +188,7 @@ class DownloadFiles(Base):
|
||||
"""
|
||||
下载文件记录
|
||||
"""
|
||||
id = Column(Integer, Sequence('id'), primary_key=True, index=True)
|
||||
id = get_id_column()
|
||||
# 下载器
|
||||
downloader = Column(String)
|
||||
# 下载任务Hash
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import Column, Integer, String, Sequence, JSON
|
||||
from sqlalchemy import Column, Integer, String, JSON
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.db import db_query, db_update, async_db_query, Base
|
||||
from app.db import db_query, db_update, get_id_column, async_db_query, Base
|
||||
|
||||
|
||||
class MediaServerItem(Base):
|
||||
"""
|
||||
媒体服务器媒体条目表
|
||||
"""
|
||||
id = Column(Integer, Sequence('id'), primary_key=True, index=True)
|
||||
id = get_id_column()
|
||||
# 服务器类型
|
||||
server = Column(String)
|
||||
# 媒体库ID
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import Column, Integer, String, Sequence, JSON, select
|
||||
from sqlalchemy import Column, Integer, String, JSON, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.db import db_query, Base, async_db_query
|
||||
from app.db import db_query, Base, get_id_column, async_db_query
|
||||
|
||||
|
||||
class Message(Base):
|
||||
"""
|
||||
消息表
|
||||
"""
|
||||
id = Column(Integer, Sequence('id'), primary_key=True, index=True)
|
||||
id = get_id_column()
|
||||
# 消息渠道
|
||||
channel = Column(String)
|
||||
# 消息来源
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
from sqlalchemy import Column, Integer, String, Sequence, JSON
|
||||
from sqlalchemy import Column, String, JSON
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.db import db_query, db_update, Base
|
||||
from app.db import db_query, db_update, get_id_column, Base
|
||||
|
||||
|
||||
class PluginData(Base):
|
||||
"""
|
||||
插件数据表
|
||||
"""
|
||||
id = Column(Integer, Sequence('id'), primary_key=True, index=True)
|
||||
id = get_id_column()
|
||||
plugin_id = Column(String, nullable=False, index=True)
|
||||
key = Column(String, index=True, nullable=False)
|
||||
value = Column(JSON)
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import Boolean, Column, Integer, String, Sequence, JSON, select, delete
|
||||
from sqlalchemy import Boolean, Column, Integer, String, JSON, select, delete
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.db import db_query, db_update, Base, async_db_query, async_db_update
|
||||
from app.db import db_query, db_update, Base, async_db_query, async_db_update, get_id_column
|
||||
|
||||
|
||||
class Site(Base):
|
||||
"""
|
||||
站点表
|
||||
"""
|
||||
id = Column(Integer, Sequence('id'), primary_key=True, index=True)
|
||||
id = get_id_column()
|
||||
# 站点名
|
||||
name = Column(String, nullable=False)
|
||||
# 域名Key
|
||||
@@ -69,12 +69,12 @@ class Site(Base):
|
||||
@classmethod
|
||||
@db_query
|
||||
def get_actives(cls, db: Session):
|
||||
return db.query(cls).filter(cls.is_active == 1).all()
|
||||
return db.query(cls).filter(cls.is_active).all()
|
||||
|
||||
@classmethod
|
||||
@async_db_query
|
||||
async def async_get_actives(cls, db: AsyncSession):
|
||||
result = await db.execute(select(cls).where(cls.is_active == 1))
|
||||
result = await db.execute(select(cls).where(cls.is_active))
|
||||
return result.scalars().all()
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
from sqlalchemy import Column, Integer, String, Sequence, select
|
||||
from sqlalchemy import Column, String, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.db import db_query, Base, async_db_query
|
||||
from app.db import db_query, Base, get_id_column, async_db_query
|
||||
|
||||
|
||||
class SiteIcon(Base):
|
||||
"""
|
||||
站点图标表
|
||||
"""
|
||||
id = Column(Integer, Sequence('id'), primary_key=True, index=True)
|
||||
id = get_id_column()
|
||||
# 站点名称
|
||||
name = Column(String, nullable=False)
|
||||
# 域名Key
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import Column, Integer, String, Sequence, JSON, select
|
||||
from sqlalchemy import Column, Integer, String, JSON, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.db import db_query, db_update, Base, async_db_query
|
||||
from app.db import db_query, db_update, get_id_column, Base, async_db_query
|
||||
|
||||
|
||||
class SiteStatistic(Base):
|
||||
"""
|
||||
站点统计表
|
||||
"""
|
||||
id = Column(Integer, Sequence('id'), primary_key=True, index=True)
|
||||
id = get_id_column()
|
||||
# 域名Key
|
||||
domain = Column(String, index=True)
|
||||
# 成功次数
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import Column, Integer, String, Sequence, Float, JSON, func, or_, select
|
||||
from sqlalchemy import Column, Integer, String, Float, JSON, func, or_, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.db import db_query, Base, async_db_query
|
||||
from app.db import db_query, Base, get_id_column, async_db_query
|
||||
|
||||
|
||||
class SiteUserData(Base):
|
||||
"""
|
||||
站点数据表
|
||||
"""
|
||||
id = Column(Integer, Sequence('id'), primary_key=True, index=True)
|
||||
id = get_id_column()
|
||||
# 站点域名
|
||||
domain = Column(String, index=True)
|
||||
# 站点名称
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
import time
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import Column, Integer, String, Sequence, Float, JSON, select
|
||||
from sqlalchemy import Column, Integer, String, Float, JSON, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.db import db_query, db_update, Base, async_db_query, async_db_update
|
||||
from app.db import db_query, db_update, get_id_column, Base, async_db_query, async_db_update
|
||||
|
||||
|
||||
class Subscribe(Base):
|
||||
"""
|
||||
订阅表
|
||||
"""
|
||||
id = Column(Integer, Sequence('id'), primary_key=True, index=True)
|
||||
id = get_id_column()
|
||||
# 标题
|
||||
name = Column(String, nullable=False, index=True)
|
||||
# 年份
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import Column, Integer, String, Sequence, Float, JSON, select
|
||||
from sqlalchemy import Column, Integer, String, Float, JSON, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.db import db_query, Base, async_db_query
|
||||
from app.db import db_query, Base, get_id_column, async_db_query
|
||||
|
||||
|
||||
class SubscribeHistory(Base):
|
||||
"""
|
||||
订阅历史表
|
||||
"""
|
||||
id = Column(Integer, Sequence('id'), primary_key=True, index=True)
|
||||
id = get_id_column()
|
||||
# 标题
|
||||
name = Column(String, nullable=False, index=True)
|
||||
# 年份
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
from sqlalchemy import Column, Integer, String, Sequence, JSON, select
|
||||
from sqlalchemy import Column, String, JSON, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.db import db_query, db_update, Base, async_db_query
|
||||
from app.db import db_query, db_update, Base, async_db_query, get_id_column
|
||||
|
||||
|
||||
class SystemConfig(Base):
|
||||
"""
|
||||
配置表
|
||||
"""
|
||||
id = Column(Integer, Sequence('id'), primary_key=True, index=True)
|
||||
id = get_id_column()
|
||||
# 主键
|
||||
key = Column(String, index=True)
|
||||
# 值
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
import time
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import Column, Integer, String, Sequence, Boolean, func, or_, JSON, select
|
||||
from sqlalchemy import Column, Integer, String, Boolean, func, or_, JSON, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.db import db_query, db_update, Base, async_db_query
|
||||
from app.db import db_query, db_update, get_id_column, Base, async_db_query
|
||||
|
||||
|
||||
class TransferHistory(Base):
|
||||
"""
|
||||
整理记录
|
||||
"""
|
||||
id = Column(Integer, Sequence('id'), primary_key=True, index=True)
|
||||
id = get_id_column()
|
||||
# 源路径
|
||||
src = Column(String, index=True)
|
||||
# 源存储
|
||||
@@ -65,76 +65,92 @@ class TransferHistory(Base):
|
||||
def list_by_title(cls, db: Session, title: str, page: Optional[int] = 1, count: Optional[int] = 30,
|
||||
status: bool = None):
|
||||
if status is not None:
|
||||
return db.query(cls).filter(
|
||||
query = db.query(cls).filter(
|
||||
cls.status == status
|
||||
).order_by(
|
||||
cls.date.desc()
|
||||
).offset((page - 1) * count).limit(count).all()
|
||||
)
|
||||
else:
|
||||
return db.query(cls).filter(or_(
|
||||
query = db.query(cls).filter(or_(
|
||||
cls.title.like(f'%{title}%'),
|
||||
cls.src.like(f'%{title}%'),
|
||||
cls.dest.like(f'%{title}%'),
|
||||
)).order_by(
|
||||
cls.date.desc()
|
||||
).offset((page - 1) * count).limit(count).all()
|
||||
)
|
||||
|
||||
# 当count为负数时,不限制页数查询所有
|
||||
if count >= 0:
|
||||
query = query.offset((page - 1) * count).limit(count)
|
||||
|
||||
return query.all()
|
||||
|
||||
@classmethod
|
||||
@async_db_query
|
||||
async def async_list_by_title(cls, db: AsyncSession, title: str, page: Optional[int] = 1, count: Optional[int] = 30,
|
||||
status: bool = None):
|
||||
if status is not None:
|
||||
result = await db.execute(
|
||||
select(cls).filter(
|
||||
cls.status == status
|
||||
).order_by(
|
||||
cls.date.desc()
|
||||
).offset((page - 1) * count).limit(count)
|
||||
query = select(cls).filter(
|
||||
cls.status == status
|
||||
).order_by(
|
||||
cls.date.desc()
|
||||
)
|
||||
else:
|
||||
result = await db.execute(
|
||||
select(cls).filter(or_(
|
||||
cls.title.like(f'%{title}%'),
|
||||
cls.src.like(f'%{title}%'),
|
||||
cls.dest.like(f'%{title}%'),
|
||||
)).order_by(
|
||||
cls.date.desc()
|
||||
).offset((page - 1) * count).limit(count)
|
||||
query = select(cls).filter(or_(
|
||||
cls.title.like(f'%{title}%'),
|
||||
cls.src.like(f'%{title}%'),
|
||||
cls.dest.like(f'%{title}%'),
|
||||
)).order_by(
|
||||
cls.date.desc()
|
||||
)
|
||||
|
||||
# 当count为负数时,不限制页数查询所有
|
||||
if count >= 0:
|
||||
query = query.offset((page - 1) * count).limit(count)
|
||||
|
||||
result = await db.execute(query)
|
||||
return result.scalars().all()
|
||||
|
||||
@classmethod
|
||||
@db_query
|
||||
def list_by_page(cls, db: Session, page: Optional[int] = 1, count: Optional[int] = 30, status: bool = None):
|
||||
if status is not None:
|
||||
return db.query(cls).filter(
|
||||
query = db.query(cls).filter(
|
||||
cls.status == status
|
||||
).order_by(
|
||||
cls.date.desc()
|
||||
).offset((page - 1) * count).limit(count).all()
|
||||
)
|
||||
else:
|
||||
return db.query(cls).order_by(
|
||||
query = db.query(cls).order_by(
|
||||
cls.date.desc()
|
||||
).offset((page - 1) * count).limit(count).all()
|
||||
)
|
||||
|
||||
# 当count为负数时,不限制页数查询所有
|
||||
if count >= 0:
|
||||
query = query.offset((page - 1) * count).limit(count)
|
||||
|
||||
return query.all()
|
||||
|
||||
@classmethod
|
||||
@async_db_query
|
||||
async def async_list_by_page(cls, db: AsyncSession, page: Optional[int] = 1, count: Optional[int] = 30,
|
||||
status: bool = None):
|
||||
if status is not None:
|
||||
result = await db.execute(
|
||||
select(cls).filter(
|
||||
cls.status == status
|
||||
).order_by(
|
||||
cls.date.desc()
|
||||
).offset((page - 1) * count).limit(count)
|
||||
query = select(cls).filter(
|
||||
cls.status == status
|
||||
).order_by(
|
||||
cls.date.desc()
|
||||
)
|
||||
else:
|
||||
result = await db.execute(
|
||||
select(cls).order_by(
|
||||
cls.date.desc()
|
||||
).offset((page - 1) * count).limit(count)
|
||||
query = select(cls).order_by(
|
||||
cls.date.desc()
|
||||
)
|
||||
|
||||
# 当count为负数时,不限制页数查询所有
|
||||
if count >= 0:
|
||||
query = query.offset((page - 1) * count).limit(count)
|
||||
|
||||
result = await db.execute(query)
|
||||
return result.scalars().all()
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
from sqlalchemy import Boolean, Column, Integer, JSON, Sequence, String, select
|
||||
from sqlalchemy import Boolean, Column, JSON, String, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.db import Base, db_query, db_update, async_db_query, async_db_update
|
||||
from app.db import Base, db_query, db_update, async_db_query, async_db_update, get_id_column
|
||||
|
||||
|
||||
class User(Base):
|
||||
@@ -10,7 +10,7 @@ class User(Base):
|
||||
用户表
|
||||
"""
|
||||
# ID
|
||||
id = Column(Integer, Sequence('id'), primary_key=True, index=True)
|
||||
id = get_id_column()
|
||||
# 用户名,唯一值
|
||||
name = Column(String, index=True, nullable=False)
|
||||
# 邮箱
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
from sqlalchemy import Column, Integer, String, Sequence, UniqueConstraint, Index, JSON
|
||||
from sqlalchemy import Column, String, UniqueConstraint, Index, JSON
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.db import db_query, db_update, Base
|
||||
from app.db import db_query, db_update, get_id_column, Base
|
||||
|
||||
|
||||
class UserConfig(Base):
|
||||
"""
|
||||
用户配置表
|
||||
"""
|
||||
id = Column(Integer, Sequence('id'), primary_key=True, index=True)
|
||||
id = get_id_column()
|
||||
# 用户名
|
||||
username = Column(String, index=True)
|
||||
# 配置键
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import Column, Integer, JSON, Sequence, String, and_, or_, select
|
||||
from sqlalchemy import Column, Integer, JSON, String, and_, or_, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.db import Base, db_query, db_update, async_db_query, async_db_update
|
||||
from app.db import Base, db_query, get_id_column, db_update, async_db_query, async_db_update
|
||||
|
||||
|
||||
class Workflow(Base):
|
||||
@@ -12,7 +12,7 @@ class Workflow(Base):
|
||||
工作流表
|
||||
"""
|
||||
# ID
|
||||
id = Column(Integer, Sequence('id'), primary_key=True, index=True)
|
||||
id = get_id_column()
|
||||
# 名称
|
||||
name = Column(String, index=True, nullable=False)
|
||||
# 描述
|
||||
|
||||
@@ -108,7 +108,7 @@ class SubscribeOper(DbOper):
|
||||
"""
|
||||
获取订阅
|
||||
"""
|
||||
return await Subscribe.async_get(self._db, id=sid)
|
||||
return await Subscribe.async_get(self._db, rid=sid)
|
||||
|
||||
def list(self, state: Optional[str] = None) -> List[Subscribe]:
|
||||
"""
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
import uuid
|
||||
from typing import Callable, Any, Optional
|
||||
|
||||
from cf_clearance import sync_cf_retry, sync_stealth
|
||||
from playwright.sync_api import sync_playwright, Page
|
||||
|
||||
from app.core.config import settings
|
||||
from app.log import logger
|
||||
from app.utils.http import RequestUtils, cookie_parse
|
||||
|
||||
|
||||
class PlaywrightHelper:
|
||||
@@ -19,13 +22,120 @@ class PlaywrightHelper:
|
||||
page.goto(url)
|
||||
return sync_cf_retry(page)[0]
|
||||
|
||||
@staticmethod
|
||||
def __fs_cookie_str(cookies: list) -> str:
|
||||
if not cookies:
|
||||
return ""
|
||||
return "; ".join([f"{c.get('name')}={c.get('value')}" for c in cookies if c and c.get('name') is not None])
|
||||
|
||||
@staticmethod
|
||||
def __flaresolverr_request(url: str,
|
||||
cookies: Optional[str] = None,
|
||||
proxy_config: Optional[dict] = None,
|
||||
timeout: Optional[int] = 60) -> Optional[dict]:
|
||||
"""
|
||||
调用 FlareSolverr 解决 Cloudflare 并返回 solution 结果
|
||||
参考: https://github.com/FlareSolverr/FlareSolverr
|
||||
"""
|
||||
if not settings.FLARESOLVERR_URL:
|
||||
logger.warn("未配置 FLARESOLVERR_URL,无法使用 FlareSolverr")
|
||||
return None
|
||||
|
||||
fs_api = settings.FLARESOLVERR_URL.rstrip("/") + "/v1"
|
||||
session_id = None
|
||||
|
||||
try:
|
||||
# 检查是否需要代理认证
|
||||
need_proxy_auth = (proxy_config and proxy_config.get("server") and
|
||||
(proxy_config.get("username") or proxy_config.get("password")))
|
||||
|
||||
if need_proxy_auth:
|
||||
# 使用 session 模式支持代理认证
|
||||
logger.debug("检测到flaresolverr代理需要认证,使用 session 模式")
|
||||
|
||||
# 1. 创建会话
|
||||
session_id = str(uuid.uuid4())
|
||||
create_payload: dict = {
|
||||
"cmd": "sessions.create",
|
||||
"session": session_id
|
||||
}
|
||||
|
||||
# 添加代理配置到会话创建请求
|
||||
if proxy_config and proxy_config.get("server"):
|
||||
proxy_payload: dict = {"url": proxy_config["server"]}
|
||||
if proxy_config.get("username"):
|
||||
proxy_payload["username"] = proxy_config["username"]
|
||||
if proxy_config.get("password"):
|
||||
proxy_payload["password"] = proxy_config["password"]
|
||||
create_payload["proxy"] = proxy_payload
|
||||
|
||||
# 创建会话
|
||||
create_result = RequestUtils(content_type="application/json",
|
||||
timeout=timeout or 60).post_json(url=fs_api, json=create_payload)
|
||||
if not create_result or create_result.get("status") != "ok":
|
||||
logger.error(
|
||||
f"创建 FlareSolverr 会话失败: {create_result.get('message') if create_result else '无响应'}")
|
||||
return None
|
||||
|
||||
# 2. 使用会话发送请求
|
||||
request_payload = {
|
||||
"cmd": "request.get",
|
||||
"url": url,
|
||||
"session": session_id,
|
||||
"maxTimeout": int(timeout or 60) * 1000,
|
||||
}
|
||||
else:
|
||||
# 使用普通模式(无代理认证)
|
||||
request_payload = {
|
||||
"cmd": "request.get",
|
||||
"url": url,
|
||||
"maxTimeout": int(timeout or 60) * 1000,
|
||||
}
|
||||
# 添加代理配置(仅 URL,无认证)
|
||||
if proxy_config and proxy_config.get("server"):
|
||||
request_payload["proxy"] = {"url": proxy_config["server"]}
|
||||
|
||||
# 将 cookies 以数组形式传递给 FlareSolverr
|
||||
if cookies:
|
||||
try:
|
||||
request_payload["cookies"] = cookie_parse(cookies, array=True)
|
||||
except Exception as e:
|
||||
logger.debug(f"解析 cookies 失败,忽略: {str(e)}")
|
||||
|
||||
# 发送请求
|
||||
data = RequestUtils(content_type="application/json",
|
||||
timeout=timeout or 60).post_json(url=fs_api, json=request_payload)
|
||||
if not data:
|
||||
logger.error("FlareSolverr 返回空响应")
|
||||
return None
|
||||
if data.get("status") != "ok":
|
||||
logger.error(f"FlareSolverr 调用失败: {data.get('message')}")
|
||||
return None
|
||||
return data.get("solution")
|
||||
except Exception as e:
|
||||
logger.error(f"调用 FlareSolverr 失败: {str(e)}")
|
||||
return None
|
||||
finally:
|
||||
# 清理会话
|
||||
if session_id:
|
||||
try:
|
||||
destroy_payload = {
|
||||
"cmd": "sessions.destroy",
|
||||
"session": session_id
|
||||
}
|
||||
RequestUtils(content_type="application/json",
|
||||
timeout=10).post_json(url=fs_api, json=destroy_payload)
|
||||
logger.debug(f"已清理 FlareSolverr 会话: {session_id}")
|
||||
except Exception as e:
|
||||
logger.warning(f"清理 FlareSolverr 会话失败: {str(e)}")
|
||||
|
||||
def action(self, url: str,
|
||||
callback: Callable,
|
||||
cookies: Optional[str] = None,
|
||||
ua: Optional[str] = None,
|
||||
proxies: Optional[dict] = None,
|
||||
headless: Optional[bool] = False,
|
||||
timeout: Optional[int] = 30) -> Any:
|
||||
timeout: Optional[int] = 60) -> Any:
|
||||
"""
|
||||
访问网页,接收Page对象并执行操作
|
||||
:param url: 网页地址
|
||||
@@ -43,15 +153,30 @@ class PlaywrightHelper:
|
||||
context = None
|
||||
page = None
|
||||
try:
|
||||
# 如果配置使用 FlareSolverr,先通过其获取清除后的 cookies 与 UA
|
||||
fs_cookie_header = None
|
||||
fs_ua = None
|
||||
if settings.BROWSER_EMULATION == "flaresolverr":
|
||||
solution = self.__flaresolverr_request(url=url, cookies=cookies,
|
||||
proxy_config=proxies, timeout=timeout)
|
||||
if solution:
|
||||
fs_cookie_header = self.__fs_cookie_str(solution.get("cookies", []))
|
||||
fs_ua = solution.get("userAgent")
|
||||
|
||||
browser = playwright[self.browser_type].launch(headless=headless)
|
||||
context = browser.new_context(user_agent=ua, proxy=proxies)
|
||||
context = browser.new_context(user_agent=fs_ua or ua, proxy=proxies)
|
||||
page = context.new_page()
|
||||
|
||||
if cookies:
|
||||
page.set_extra_http_headers({"cookie": cookies})
|
||||
# 优先使用 FlareSolverr 返回,其次使用入参
|
||||
merged_cookie = fs_cookie_header or cookies
|
||||
if merged_cookie:
|
||||
page.set_extra_http_headers({"cookie": merged_cookie})
|
||||
|
||||
if not self.__pass_cloudflare(url, page):
|
||||
logger.warn("cloudflare challenge fail!")
|
||||
if settings.BROWSER_EMULATION == "playwright":
|
||||
if not self.__pass_cloudflare(url, page):
|
||||
logger.warn("cloudflare challenge fail!")
|
||||
else:
|
||||
page.goto(url)
|
||||
page.wait_for_load_state("networkidle", timeout=timeout * 1000)
|
||||
|
||||
# 回调函数
|
||||
@@ -76,7 +201,7 @@ class PlaywrightHelper:
|
||||
ua: Optional[str] = None,
|
||||
proxies: Optional[dict] = None,
|
||||
headless: Optional[bool] = False,
|
||||
timeout: Optional[int] = 20) -> Optional[str]:
|
||||
timeout: Optional[int] = 60) -> Optional[str]:
|
||||
"""
|
||||
获取网页源码
|
||||
:param url: 网页地址
|
||||
@@ -87,6 +212,15 @@ class PlaywrightHelper:
|
||||
:param timeout: 超时时间
|
||||
"""
|
||||
source = None
|
||||
# 如果配置为 FlareSolverr,则直接调用获取页面源码
|
||||
if settings.BROWSER_EMULATION == "flaresolverr":
|
||||
try:
|
||||
solution = self.__flaresolverr_request(url=url, cookies=cookies,
|
||||
proxy_config=proxies, timeout=timeout)
|
||||
if solution:
|
||||
return solution.get("response")
|
||||
except Exception as e:
|
||||
logger.error(f"FlareSolverr 获取源码失败: {str(e)}")
|
||||
try:
|
||||
with sync_playwright() as playwright:
|
||||
browser = None
|
||||
@@ -121,13 +255,3 @@ class PlaywrightHelper:
|
||||
logger.error(f"Playwright初始化失败: {str(e)}")
|
||||
|
||||
return source
|
||||
|
||||
|
||||
# 示例用法
|
||||
if __name__ == "__main__":
|
||||
utils = PlaywrightHelper()
|
||||
test_url = "https://piggo.me"
|
||||
test_cookies = ""
|
||||
test_user_agent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.63 Safari/537.36"
|
||||
source_code = utils.get_page_source(test_url, cookies=test_cookies, ua=test_user_agent)
|
||||
print(source_code)
|
||||
|
||||
@@ -74,7 +74,8 @@ class CookieHelper:
|
||||
username: str,
|
||||
password: str,
|
||||
two_step_code: Optional[str] = None,
|
||||
proxies: Optional[dict] = None) -> Tuple[Optional[str], Optional[str], str]:
|
||||
proxies: Optional[dict] = None,
|
||||
timeout: int = None) -> Tuple[Optional[str], Optional[str], str]:
|
||||
"""
|
||||
获取站点cookie和ua
|
||||
:param url: 站点地址
|
||||
@@ -82,6 +83,7 @@ class CookieHelper:
|
||||
:param password: 密码
|
||||
:param two_step_code: 二步验证码或密钥
|
||||
:param proxies: 代理
|
||||
:param timeout: 超时时间
|
||||
:return: cookie、ua、message
|
||||
"""
|
||||
|
||||
@@ -230,7 +232,8 @@ class CookieHelper:
|
||||
|
||||
return PlaywrightHelper().action(url=url,
|
||||
callback=__page_handler,
|
||||
proxies=proxies)
|
||||
proxies=proxies,
|
||||
timeout=timeout)
|
||||
|
||||
@staticmethod
|
||||
def __get_captcha_text(cookie: str, ua: str, code_url: str) -> str:
|
||||
|
||||
@@ -10,9 +10,9 @@ from datetime import datetime
|
||||
from typing import Any, Literal, Optional, List, Dict, Union
|
||||
from typing import Callable
|
||||
|
||||
from cachetools import TTLCache
|
||||
from jinja2 import Template
|
||||
|
||||
from app.core.cache import TTLCache
|
||||
from app.core.config import global_vars
|
||||
from app.core.context import MediaInfo, TorrentInfo
|
||||
from app.core.meta import MetaBase
|
||||
@@ -307,7 +307,7 @@ class TemplateHelper(metaclass=SingletonClass):
|
||||
|
||||
def __init__(self):
|
||||
self.builder = TemplateContextBuilder()
|
||||
self.cache = TTLCache(maxsize=100, ttl=600)
|
||||
self.cache = TTLCache(region="notification", maxsize=100, ttl=600)
|
||||
|
||||
@staticmethod
|
||||
def _generate_cache_key(cuntent: Union[str, dict]) -> str:
|
||||
@@ -471,6 +471,13 @@ class TemplateHelper(metaclass=SingletonClass):
|
||||
except json.JSONDecodeError:
|
||||
return rendered
|
||||
|
||||
def close(self):
|
||||
"""
|
||||
清理资源
|
||||
"""
|
||||
if self.cache:
|
||||
self.cache.close()
|
||||
|
||||
|
||||
class MessageTemplateHelper:
|
||||
"""
|
||||
@@ -704,6 +711,7 @@ class MessageQueueManager(metaclass=SingletonClass):
|
||||
停止队列管理器
|
||||
"""
|
||||
self._running = False
|
||||
logger.info("正在停止消息队列...")
|
||||
self.thread.join()
|
||||
|
||||
|
||||
@@ -765,3 +773,13 @@ class MessageHelper(metaclass=Singleton):
|
||||
if not self.user_queue.empty():
|
||||
return self.user_queue.get(block=False)
|
||||
return None
|
||||
|
||||
|
||||
def stop_message():
|
||||
"""
|
||||
停止消息服务
|
||||
"""
|
||||
# 停止消息队列
|
||||
MessageQueueManager().stop()
|
||||
# 关闭消息演染器
|
||||
TemplateHelper().close()
|
||||
|
||||
@@ -1,16 +1,18 @@
|
||||
import importlib
|
||||
import io
|
||||
import json
|
||||
import shutil
|
||||
import site
|
||||
import sys
|
||||
import traceback
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional, Tuple, Set
|
||||
from typing import Dict, List, Optional, Tuple, Set, Callable, Awaitable
|
||||
|
||||
import aiofiles
|
||||
import aioshutil
|
||||
import httpx
|
||||
from aiopath import AsyncPath
|
||||
from anyio import Path as AsyncPath
|
||||
from packaging.specifiers import SpecifierSet, InvalidSpecifier
|
||||
from packaging.version import Version, InvalidVersion
|
||||
from pkg_resources import Requirement, working_set
|
||||
@@ -234,62 +236,32 @@ class PluginHelper(metaclass=WeakSingleton):
|
||||
else:
|
||||
logger.debug(f"{pid} 从 package.{package_version}.json 中找到适用于当前版本的插件")
|
||||
|
||||
# 2. 获取插件文件列表(包括 requirements.txt)
|
||||
file_list, msg = self.__get_file_list(pid.lower(), user_repo, package_version)
|
||||
if not file_list:
|
||||
return False, msg
|
||||
# 2. 决定安装方式(release 或 文件列表)并执行统一安装流程
|
||||
meta = self.__get_plugin_meta(pid, repo_url, package_version)
|
||||
# 是否release打包
|
||||
is_release = meta.get("release")
|
||||
# 插件版本号
|
||||
plugin_version = meta.get("version")
|
||||
if is_release:
|
||||
# 使用 插件ID_插件版本号 作为 Release tag
|
||||
if not plugin_version:
|
||||
return False, f"未在插件清单中找到 {pid} 的版本号,无法进行 Release 安装"
|
||||
# 拼接 release_tag
|
||||
release_tag = f"{pid}_v{plugin_version}"
|
||||
|
||||
# 3. 删除旧的插件目录,如果不强制安装则备份
|
||||
backup_dir = None
|
||||
if not force_install:
|
||||
backup_dir = self.__backup_plugin(pid.lower())
|
||||
# 使用 release 进行安装
|
||||
def prepare_release() -> Tuple[bool, str]:
|
||||
return self.__install_from_release(
|
||||
pid.lower(), user_repo, release_tag
|
||||
)
|
||||
|
||||
self.__remove_old_plugin(pid.lower())
|
||||
|
||||
# 4. 查找并安装 requirements.txt 中的依赖,确保插件环境的依赖尽可能完整。依赖安装可能失败且不影响插件安装,目前只记录日志
|
||||
requirements_file_info = next((f for f in file_list if f.get("name") == "requirements.txt"), None)
|
||||
if requirements_file_info:
|
||||
logger.debug(f"{pid} 发现 requirements.txt,提前下载并预安装依赖")
|
||||
success, message = self.__download_and_install_requirements(requirements_file_info,
|
||||
pid, user_repo)
|
||||
if not success:
|
||||
logger.debug(f"{pid} 依赖预安装失败:{message}")
|
||||
else:
|
||||
logger.debug(f"{pid} 依赖预安装成功")
|
||||
|
||||
# 5. 下载插件的其他文件
|
||||
logger.info(f"{pid} 准备开始下载插件文件")
|
||||
success, message = self.__download_files(pid.lower(), file_list, user_repo, package_version, True)
|
||||
if not success:
|
||||
logger.error(f"{pid} 下载插件文件失败:{message}")
|
||||
if backup_dir:
|
||||
self.__restore_plugin(pid.lower(), backup_dir)
|
||||
logger.warning(f"{pid} 插件安装失败,已还原备份插件")
|
||||
else:
|
||||
self.__remove_old_plugin(pid.lower())
|
||||
logger.warning(f"{pid} 已清理对应插件目录,请尝试重新安装")
|
||||
|
||||
return False, message
|
||||
return self.__install_flow_sync(pid.lower(), force_install, prepare_release)
|
||||
else:
|
||||
logger.info(f"{pid} 下载插件文件成功")
|
||||
# 如果 release_tag 不存在,说明插件没有发布版本,使用文件列表方式安装
|
||||
def prepare_filelist() -> Tuple[bool, str]:
|
||||
return self.__prepare_content_via_filelist_sync(pid.lower(), user_repo, package_version)
|
||||
|
||||
# 6. 插件文件安装成功后,再次尝试安装依赖,避免因为遗漏依赖导致的插件运行问题,目前依旧只记录日志
|
||||
dependencies_exist, success, message = self.__install_dependencies_if_required(pid)
|
||||
if dependencies_exist:
|
||||
if not success:
|
||||
logger.error(f"{pid} 依赖安装失败:{message}")
|
||||
if backup_dir:
|
||||
self.__restore_plugin(pid.lower(), backup_dir)
|
||||
logger.warning(f"{pid} 插件安装失败,已还原备份插件")
|
||||
else:
|
||||
self.__remove_old_plugin(pid.lower())
|
||||
logger.warning(f"{pid} 已清理对应插件目录,请尝试重新安装")
|
||||
else:
|
||||
logger.info(f"{pid} 依赖安装成功")
|
||||
|
||||
# 插件安装成功后,统计安装信息
|
||||
self.install_reg(pid)
|
||||
return True, ""
|
||||
return self.__install_flow_sync(pid.lower(), force_install, prepare_filelist)
|
||||
|
||||
def __get_file_list(self, pid: str, user_repo: str, package_version: Optional[str] = None) -> \
|
||||
Tuple[Optional[list], Optional[str]]:
|
||||
@@ -561,6 +533,126 @@ class PluginHelper(metaclass=WeakSingleton):
|
||||
logger.error(f"[GitHub] 所有策略均请求失败,URL: {url},请检查网络连接或 GitHub 配置")
|
||||
return None
|
||||
|
||||
def __get_plugin_meta(self, pid: str, repo_url: str,
|
||||
package_version: Optional[str]) -> dict:
|
||||
try:
|
||||
plugins = (
|
||||
self.get_plugins(repo_url) if not package_version
|
||||
else self.get_plugins(repo_url, package_version)
|
||||
) or {}
|
||||
meta = plugins.get(pid)
|
||||
return meta if isinstance(meta, dict) else {}
|
||||
except Exception as e:
|
||||
logger.error(f"获取插件 {pid} 元数据失败:{e}")
|
||||
return {}
|
||||
|
||||
def __install_flow_sync(self, pid_lower: str, force_install: bool,
|
||||
prepare_content: Callable[[], Tuple[bool, str]]) -> Tuple[bool, str]:
|
||||
"""
|
||||
同步安装统一流程:备份→清理→准备内容→安装依赖→上报
|
||||
prepare_content 负责把插件文件放到 app/plugins/{pid}
|
||||
"""
|
||||
backup_dir = None
|
||||
if not force_install:
|
||||
backup_dir = self.__backup_plugin(pid_lower)
|
||||
|
||||
self.__remove_old_plugin(pid_lower)
|
||||
|
||||
success, message = prepare_content()
|
||||
if not success:
|
||||
logger.error(f"{pid_lower} 准备插件内容失败:{message}")
|
||||
if backup_dir:
|
||||
self.__restore_plugin(pid_lower, backup_dir)
|
||||
logger.warning(f"{pid_lower} 插件安装失败,已还原备份插件")
|
||||
else:
|
||||
self.__remove_old_plugin(pid_lower)
|
||||
logger.warning(f"{pid_lower} 已清理对应插件目录,请尝试重新安装")
|
||||
return False, message
|
||||
|
||||
dependencies_exist, dep_ok, dep_msg = self.__install_dependencies_if_required(pid_lower)
|
||||
if dependencies_exist and not dep_ok:
|
||||
logger.error(f"{pid_lower} 依赖安装失败:{dep_msg}")
|
||||
if backup_dir:
|
||||
self.__restore_plugin(pid_lower, backup_dir)
|
||||
logger.warning(f"{pid_lower} 插件安装失败,已还原备份插件")
|
||||
else:
|
||||
self.__remove_old_plugin(pid_lower)
|
||||
logger.warning(f"{pid_lower} 已清理对应插件目录,请尝试重新安装")
|
||||
return False, dep_msg
|
||||
|
||||
self.install_reg(pid_lower)
|
||||
return True, ""
|
||||
|
||||
def __install_from_release(self, pid: str, user_repo: str, release_tag: str) -> Tuple[bool, str]:
|
||||
"""
|
||||
通过 GitHub Release 资产文件安装插件。
|
||||
规范:release 中存在名为 "{pid}_v{version}.zip" 的资产,zip 根即插件文件;
|
||||
将其全部解压到 app/plugins/{pid}
|
||||
"""
|
||||
# 拼接资产文件名
|
||||
asset_name = f"{release_tag.lower()}.zip"
|
||||
|
||||
release_api = f"https://api.github.com/repos/{user_repo}/releases/tags/{release_tag}"
|
||||
rel_res = self.__request_with_fallback(
|
||||
release_api,
|
||||
headers=settings.REPO_GITHUB_HEADERS(repo=user_repo),
|
||||
timeout=30,
|
||||
is_api=True,
|
||||
)
|
||||
if rel_res is None or rel_res.status_code != 200:
|
||||
return False, f"获取 Release 信息失败:{rel_res.status_code if rel_res else '连接失败'}"
|
||||
|
||||
try:
|
||||
rel_json = rel_res.json()
|
||||
assets = rel_json.get("assets") or []
|
||||
asset = next((a for a in assets if a.get("name") == asset_name), None)
|
||||
if not asset:
|
||||
return False, f"未找到资产文件:{asset_name}"
|
||||
download_url = asset.get("browser_download_url")
|
||||
if not download_url:
|
||||
return False, "资产缺少下载地址"
|
||||
except Exception as e:
|
||||
logger.error(f"解析 Release 信息失败:{e}")
|
||||
return False, f"解析 Release 信息失败:{e}"
|
||||
|
||||
res = self.__request_with_fallback(download_url, headers=settings.REPO_GITHUB_HEADERS(repo=user_repo))
|
||||
if res is None or res.status_code != 200:
|
||||
return False, f"下载资产失败:{res.status_code if res else '连接失败'}"
|
||||
|
||||
try:
|
||||
with zipfile.ZipFile(io.BytesIO(res.content)) as zf:
|
||||
namelist = zf.namelist()
|
||||
if not namelist:
|
||||
return False, "压缩包内容为空"
|
||||
# 若所有条目均在同一顶层目录下(如 pid/),则剥离这一层,避免出现双层目录
|
||||
names_with_slash = [n for n in namelist if '/' in n]
|
||||
base_prefix = ''
|
||||
if names_with_slash and len(names_with_slash) == len(namelist):
|
||||
first_seg = names_with_slash[0].split('/')[0]
|
||||
if all(n.startswith(first_seg + '/') for n in namelist):
|
||||
base_prefix = first_seg + '/'
|
||||
|
||||
dest_base = Path(settings.ROOT_PATH) / "app" / "plugins" / pid.lower()
|
||||
wrote_any = False
|
||||
for name in namelist:
|
||||
rel_path = name[len(base_prefix):]
|
||||
if not rel_path:
|
||||
continue
|
||||
if rel_path.endswith('/'):
|
||||
(dest_base / rel_path.rstrip('/')).mkdir(parents=True, exist_ok=True)
|
||||
continue
|
||||
dest_path = dest_base / rel_path
|
||||
dest_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with zf.open(name, 'r') as src, open(dest_path, 'wb') as dst:
|
||||
dst.write(src.read())
|
||||
wrote_any = True
|
||||
if not wrote_any:
|
||||
return False, "压缩包中无可写入文件"
|
||||
return True, ""
|
||||
except Exception as e:
|
||||
logger.error(f"解压 Release 压缩包失败:{e}")
|
||||
return False, f"解压 Release 压缩包失败:{e}"
|
||||
|
||||
def find_missing_dependencies(self) -> List[str]:
|
||||
"""
|
||||
收集所有需要安装或更新的依赖项
|
||||
@@ -1040,8 +1132,7 @@ class PluginHelper(metaclass=WeakSingleton):
|
||||
|
||||
return str(backup_dir) if await backup_dir.exists() else None
|
||||
|
||||
@staticmethod
|
||||
async def __async_restore_plugin(pid: str, backup_dir: str):
|
||||
async def __async_restore_plugin(self, pid: str, backup_dir: str):
|
||||
"""
|
||||
异步还原旧插件目录
|
||||
:param pid: 插件 ID
|
||||
@@ -1054,7 +1145,7 @@ class PluginHelper(metaclass=WeakSingleton):
|
||||
|
||||
backup_path = AsyncPath(backup_dir)
|
||||
if await backup_path.exists():
|
||||
await PluginHelper._async_copytree(backup_path, plugin_dir)
|
||||
await self._async_copytree(src=backup_path, dst=plugin_dir)
|
||||
logger.debug(f"{pid} 已还原插件目录 {plugin_dir}")
|
||||
await aioshutil.rmtree(backup_path, ignore_errors=True)
|
||||
logger.debug(f"{pid} 已删除备份目录 {backup_dir}")
|
||||
@@ -1291,59 +1382,190 @@ class PluginHelper(metaclass=WeakSingleton):
|
||||
else:
|
||||
logger.debug(f"{pid} 从 package.{package_version}.json 中找到适用于当前版本的插件")
|
||||
|
||||
# 2. 获取插件文件列表(包括 requirements.txt)
|
||||
file_list, msg = await self.__async_get_file_list(pid.lower(), user_repo, package_version)
|
||||
if not file_list:
|
||||
return False, msg
|
||||
# 2. 统一异步安装流程(release 或 文件列表)
|
||||
meta = await self.__async_get_plugin_meta(pid, repo_url, package_version)
|
||||
# 是否release打包
|
||||
is_release = meta.get("release")
|
||||
# 插件版本号
|
||||
plugin_version = meta.get("version")
|
||||
if is_release:
|
||||
# 使用 插件ID_插件版本号 作为 Release tag
|
||||
if not plugin_version:
|
||||
return False, f"未在插件清单中找到 {pid} 的版本号,无法进行 Release 安装"
|
||||
# 拼接 release_tag
|
||||
release_tag = f"{pid}_v{plugin_version}"
|
||||
|
||||
# 3. 删除旧的插件目录,如果不强制安装则备份
|
||||
# 使用 release 进行安装
|
||||
async def prepare_release() -> Tuple[bool, str]:
|
||||
return await self.__async_install_from_release(
|
||||
pid.lower(), user_repo, release_tag
|
||||
)
|
||||
|
||||
return await self.__install_flow_async(pid.lower(), force_install, prepare_release)
|
||||
else:
|
||||
# 如果没有 release_tag,则使用文件列表安装方式
|
||||
async def prepare_filelist() -> Tuple[bool, str]:
|
||||
return await self.__prepare_content_via_filelist_async(pid.lower(), user_repo, package_version)
|
||||
|
||||
return await self.__install_flow_async(pid.lower(), force_install, prepare_filelist)
|
||||
|
||||
async def __async_get_plugin_meta(self, pid: str, repo_url: str,
|
||||
package_version: Optional[str]) -> dict:
|
||||
try:
|
||||
plugins = (
|
||||
await self.async_get_plugins(repo_url) if not package_version
|
||||
else await self.async_get_plugins(repo_url, package_version)
|
||||
) or {}
|
||||
meta = plugins.get(pid)
|
||||
return meta if isinstance(meta, dict) else {}
|
||||
except Exception as e:
|
||||
logger.warn(f"获取插件 {pid} 元数据失败:{e}")
|
||||
return {}
|
||||
|
||||
async def __install_flow_async(self, pid_lower: str, force_install: bool,
|
||||
prepare_content: Callable[[], Awaitable[Tuple[bool, str]]]) -> Tuple[bool, str]:
|
||||
"""
|
||||
异步安装流程,处理插件内容准备、依赖安装和注册
|
||||
"""
|
||||
backup_dir = None
|
||||
if not force_install:
|
||||
backup_dir = await self.__async_backup_plugin(pid.lower())
|
||||
backup_dir = await self.__async_backup_plugin(pid_lower)
|
||||
|
||||
await self.__async_remove_old_plugin(pid.lower())
|
||||
await self.__async_remove_old_plugin(pid_lower)
|
||||
|
||||
# 4. 查找并安装 requirements.txt 中的依赖,确保插件环境的依赖尽可能完整。依赖安装可能失败且不影响插件安装,目前只记录日志
|
||||
success, message = await prepare_content()
|
||||
if not success:
|
||||
logger.error(f"{pid_lower} 准备插件内容失败:{message}")
|
||||
if backup_dir:
|
||||
await self.__async_restore_plugin(pid_lower, backup_dir)
|
||||
logger.warning(f"{pid_lower} 插件安装失败,已还原备份插件")
|
||||
else:
|
||||
await self.__async_remove_old_plugin(pid_lower)
|
||||
logger.warning(f"{pid_lower} 已清理对应插件目录,请尝试重新安装")
|
||||
return False, message
|
||||
|
||||
dependencies_exist, dep_ok, dep_msg = await self.__async_install_dependencies_if_required(pid_lower)
|
||||
if dependencies_exist and not dep_ok:
|
||||
logger.error(f"{pid_lower} 依赖安装失败:{dep_msg}")
|
||||
if backup_dir:
|
||||
await self.__async_restore_plugin(pid_lower, backup_dir)
|
||||
logger.warning(f"{pid_lower} 插件安装失败,已还原备份插件")
|
||||
else:
|
||||
await self.__async_remove_old_plugin(pid_lower)
|
||||
logger.warning(f"{pid_lower} 已清理对应插件目录,请尝试重新安装")
|
||||
return False, dep_msg
|
||||
|
||||
await self.async_install_reg(pid_lower)
|
||||
return True, ""
|
||||
|
||||
def __prepare_content_via_filelist_sync(self, pid_lower: str, user_repo: str,
|
||||
package_version: Optional[str]) -> Tuple[bool, str]:
|
||||
"""
|
||||
同步准备插件内容,通过文件列表获取插件文件和依赖
|
||||
"""
|
||||
file_list, msg = self.__get_file_list(pid_lower, user_repo, package_version)
|
||||
if not file_list:
|
||||
return False, msg
|
||||
requirements_file_info = next((f for f in file_list if f.get("name") == "requirements.txt"), None)
|
||||
if requirements_file_info:
|
||||
logger.debug(f"{pid} 发现 requirements.txt,提前下载并预安装依赖")
|
||||
success, message = await self.__async_download_and_install_requirements(requirements_file_info,
|
||||
pid, user_repo)
|
||||
if not success:
|
||||
logger.debug(f"{pid} 依赖预安装失败:{message}")
|
||||
ok, m = self.__download_and_install_requirements(requirements_file_info, pid_lower, user_repo)
|
||||
if not ok:
|
||||
logger.debug(f"{pid_lower} 依赖预安装失败:{m}")
|
||||
else:
|
||||
logger.debug(f"{pid} 依赖预安装成功")
|
||||
|
||||
# 5. 下载插件的其他文件
|
||||
logger.info(f"{pid} 准备开始下载插件文件")
|
||||
success, message = await self.__async_download_files(pid.lower(), file_list, user_repo, package_version, True)
|
||||
if not success:
|
||||
logger.error(f"{pid} 下载插件文件失败:{message}")
|
||||
if backup_dir:
|
||||
await self.__async_restore_plugin(pid.lower(), backup_dir)
|
||||
logger.warning(f"{pid} 插件安装失败,已还原备份插件")
|
||||
else:
|
||||
await self.__async_remove_old_plugin(pid.lower())
|
||||
logger.warning(f"{pid} 已清理对应插件目录,请尝试重新安装")
|
||||
|
||||
return False, message
|
||||
else:
|
||||
logger.info(f"{pid} 下载插件文件成功")
|
||||
|
||||
# 6. 插件文件安装成功后,再次尝试安装依赖,避免因为遗漏依赖导致的插件运行问题,目前依旧只记录日志
|
||||
dependencies_exist, success, message = await self.__async_install_dependencies_if_required(pid)
|
||||
if dependencies_exist:
|
||||
if not success:
|
||||
logger.error(f"{pid} 依赖安装失败:{message}")
|
||||
if backup_dir:
|
||||
await self.__async_restore_plugin(pid.lower(), backup_dir)
|
||||
logger.warning(f"{pid} 插件安装失败,已还原备份插件")
|
||||
else:
|
||||
await self.__async_remove_old_plugin(pid.lower())
|
||||
logger.warning(f"{pid} 已清理对应插件目录,请尝试重新安装")
|
||||
else:
|
||||
logger.info(f"{pid} 依赖安装成功")
|
||||
|
||||
# 插件安装成功后,统计安装信息
|
||||
await self.async_install_reg(pid)
|
||||
logger.debug(f"{pid_lower} 依赖预安装成功")
|
||||
ok, m = self.__download_files(pid_lower, file_list, user_repo, package_version, True)
|
||||
if not ok:
|
||||
return False, m
|
||||
return True, ""
|
||||
|
||||
async def __prepare_content_via_filelist_async(self, pid_lower: str, user_repo: str,
|
||||
package_version: Optional[str]) -> Tuple[bool, str]:
|
||||
"""
|
||||
异步准备插件内容,通过文件列表获取插件文件和依赖
|
||||
"""
|
||||
file_list, msg = await self.__async_get_file_list(pid_lower, user_repo, package_version)
|
||||
if not file_list:
|
||||
return False, msg
|
||||
requirements_file_info = next((f for f in file_list if f.get("name") == "requirements.txt"), None)
|
||||
if requirements_file_info:
|
||||
ok, m = await self.__async_download_and_install_requirements(requirements_file_info, pid_lower, user_repo)
|
||||
if not ok:
|
||||
logger.debug(f"{pid_lower} 依赖预安装失败:{m}")
|
||||
else:
|
||||
logger.debug(f"{pid_lower} 依赖预安装成功")
|
||||
ok, m = await self.__async_download_files(pid_lower, file_list, user_repo, package_version, True)
|
||||
if not ok:
|
||||
return False, m
|
||||
return True, ""
|
||||
|
||||
async def __async_install_from_release(self, pid: str, user_repo: str, release_tag: str) -> Tuple[bool, str]:
|
||||
"""
|
||||
通过 GitHub Release 资产文件安装插件(异步)。
|
||||
规范:release 中存在名为 "{pid}_v{version}.zip" 的资产,zip 根即插件文件;
|
||||
将其全部解压到 app/plugins/{pid}
|
||||
"""
|
||||
# 拼接资产文件名
|
||||
asset_name = f"{release_tag.lower()}.zip"
|
||||
|
||||
release_api = f"https://api.github.com/repos/{user_repo}/releases/tags/{release_tag}"
|
||||
rel_res = await self.__async_request_with_fallback(
|
||||
release_api,
|
||||
headers=settings.REPO_GITHUB_HEADERS(repo=user_repo),
|
||||
timeout=30,
|
||||
is_api=True,
|
||||
)
|
||||
if rel_res is None or rel_res.status_code != 200:
|
||||
return False, f"获取 Release 信息失败:{rel_res.status_code if rel_res else '连接失败'}"
|
||||
|
||||
try:
|
||||
rel_json = rel_res.json()
|
||||
assets = rel_json.get("assets") or []
|
||||
asset = next((a for a in assets if a.get("name") == asset_name), None)
|
||||
if not asset:
|
||||
return False, f"未找到资产文件:{asset_name}"
|
||||
download_url = asset.get("browser_download_url")
|
||||
if not download_url:
|
||||
return False, "资产缺少下载地址"
|
||||
except Exception as e:
|
||||
logger.error(f"解析 Release 信息失败:{e}")
|
||||
return False, f"解析 Release 信息失败:{e}"
|
||||
|
||||
res = await self.__async_request_with_fallback(download_url,
|
||||
headers=settings.REPO_GITHUB_HEADERS(repo=user_repo))
|
||||
if res is None or res.status_code != 200:
|
||||
return False, f"下载资产失败:{res.status_code if res else '连接失败'}"
|
||||
|
||||
try:
|
||||
with zipfile.ZipFile(io.BytesIO(res.content)) as zf:
|
||||
namelist = zf.namelist()
|
||||
if not namelist:
|
||||
return False, "压缩包内容为空"
|
||||
names_with_slash = [n for n in namelist if '/' in n]
|
||||
base_prefix = ''
|
||||
if names_with_slash and len(names_with_slash) == len(namelist):
|
||||
first_seg = names_with_slash[0].split('/')[0]
|
||||
if all(n.startswith(first_seg + '/') for n in namelist):
|
||||
base_prefix = first_seg + '/'
|
||||
|
||||
dest_base = AsyncPath(settings.ROOT_PATH) / "app" / "plugins" / pid.lower()
|
||||
wrote_any = False
|
||||
for name in namelist:
|
||||
rel_path = name[len(base_prefix):]
|
||||
if not rel_path:
|
||||
continue
|
||||
if rel_path.endswith('/'):
|
||||
await (dest_base / rel_path.rstrip('/')).mkdir(parents=True, exist_ok=True)
|
||||
continue
|
||||
dest_path = dest_base / rel_path
|
||||
await dest_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with zf.open(name, 'r') as src:
|
||||
data = src.read()
|
||||
async with aiofiles.open(dest_path, 'wb') as dst:
|
||||
await dst.write(data)
|
||||
wrote_any = True
|
||||
if not wrote_any:
|
||||
return False, "压缩包中无可写入文件"
|
||||
return True, ""
|
||||
except Exception as e:
|
||||
logger.error(f"解压 Release 压缩包失败:{e}")
|
||||
return False, f"解压 Release 压缩包失败:{e}"
|
||||
|
||||
519
app/helper/redis.py
Normal file
519
app/helper/redis.py
Normal file
@@ -0,0 +1,519 @@
|
||||
import json
|
||||
import pickle
|
||||
from typing import Any, Optional, Generator, Tuple, AsyncGenerator
|
||||
from urllib.parse import quote
|
||||
|
||||
import redis
|
||||
from redis.asyncio import Redis
|
||||
|
||||
from app.core.config import settings
|
||||
from app.core.event import eventmanager, Event
|
||||
from app.log import logger
|
||||
from app.schemas import ConfigChangeEventData
|
||||
from app.schemas.types import EventType
|
||||
from app.utils.singleton import Singleton
|
||||
|
||||
# 类型缓存集合,针对非容器简单类型
|
||||
_complex_serializable_types = set()
|
||||
_simple_serializable_types = set()
|
||||
|
||||
|
||||
def serialize(value: Any) -> bytes:
|
||||
"""
|
||||
将值序列化为二进制数据,根据序列化方式标识格式
|
||||
"""
|
||||
|
||||
def _is_container_type(t):
|
||||
"""
|
||||
判断是否为容器类型
|
||||
"""
|
||||
return t in (list, dict, tuple, set)
|
||||
|
||||
vt = type(value)
|
||||
# 针对非容器类型使用缓存策略
|
||||
if not _is_container_type(vt):
|
||||
# 如果已知需要复杂序列化
|
||||
if vt in _complex_serializable_types:
|
||||
return b"PICKLE" + b"\x00" + pickle.dumps(value)
|
||||
# 如果已知可以简单序列化
|
||||
if vt in _simple_serializable_types:
|
||||
json_data = json.dumps(value).encode("utf-8")
|
||||
return b"JSON" + b"\x00" + json_data
|
||||
# 对于未知的非容器类型,尝试简单序列化,如抛出异常,再使用复杂序列化
|
||||
try:
|
||||
json_data = json.dumps(value).encode("utf-8")
|
||||
_simple_serializable_types.add(vt)
|
||||
return b"JSON" + b"\x00" + json_data
|
||||
except TypeError:
|
||||
_complex_serializable_types.add(vt)
|
||||
return b"PICKLE" + b"\x00" + pickle.dumps(value)
|
||||
else:
|
||||
# 针对容器类型,每次尝试简单序列化,不使用缓存
|
||||
try:
|
||||
json_data = json.dumps(value).encode("utf-8")
|
||||
return b"JSON" + b"\x00" + json_data
|
||||
except TypeError:
|
||||
return b"PICKLE" + b"\x00" + pickle.dumps(value)
|
||||
|
||||
|
||||
def deserialize(value: bytes) -> Any:
|
||||
"""
|
||||
将二进制数据反序列化为原始值,根据格式标识区分序列化方式
|
||||
"""
|
||||
format_marker, data = value.split(b"\x00", 1)
|
||||
if format_marker == b"JSON":
|
||||
return json.loads(data.decode("utf-8"))
|
||||
elif format_marker == b"PICKLE":
|
||||
return pickle.loads(data)
|
||||
else:
|
||||
raise ValueError("Unknown serialization format")
|
||||
|
||||
|
||||
class RedisHelper(metaclass=Singleton):
|
||||
"""
|
||||
Redis连接和操作助手类,单例模式
|
||||
|
||||
特性:
|
||||
- 管理Redis连接池和客户端
|
||||
- 提供序列化和反序列化功能
|
||||
- 支持内存限制和淘汰策略设置
|
||||
- 提供键名生成和区域管理功能
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
初始化Redis助手实例
|
||||
"""
|
||||
self.redis_url = settings.CACHE_BACKEND_URL
|
||||
self.client = None
|
||||
|
||||
def _connect(self):
|
||||
"""
|
||||
建立Redis连接
|
||||
"""
|
||||
try:
|
||||
if self.client is None:
|
||||
self.client = redis.Redis.from_url(
|
||||
self.redis_url,
|
||||
decode_responses=False,
|
||||
socket_timeout=30,
|
||||
socket_connect_timeout=5,
|
||||
health_check_interval=60,
|
||||
)
|
||||
# 测试连接,确保Redis可用
|
||||
self.client.ping()
|
||||
logger.info(f"Successfully connected to Redis:{self.redis_url}")
|
||||
self.set_memory_limit()
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to connect to Redis: {e}")
|
||||
self.client = None
|
||||
raise RuntimeError("Redis connection failed") from e
|
||||
|
||||
@eventmanager.register(EventType.ConfigChanged)
|
||||
def handle_config_changed(self, event: Event):
|
||||
"""
|
||||
处理配置变更事件,更新Redis设置
|
||||
:param event: 事件对象
|
||||
"""
|
||||
if not event:
|
||||
return
|
||||
event_data: ConfigChangeEventData = event.event_data
|
||||
if event_data.key not in ['CACHE_BACKEND_TYPE', 'CACHE_BACKEND_URL', 'CACHE_REDIS_MAXMEMORY']:
|
||||
return
|
||||
logger.info("配置变更,重连Redis...")
|
||||
self.close()
|
||||
self._connect()
|
||||
|
||||
def set_memory_limit(self, policy: Optional[str] = "allkeys-lru"):
|
||||
"""
|
||||
动态设置Redis最大内存和内存淘汰策略
|
||||
|
||||
:param policy: 淘汰策略(如'allkeys-lru')
|
||||
"""
|
||||
try:
|
||||
# 如果有显式值,则直接使用,为0时说明不限制,如果未配置,开启BIG_MEMORY_MODE时为"1024mb",未开启时为"256mb"
|
||||
maxmemory = settings.CACHE_REDIS_MAXMEMORY or ("1024mb" if settings.BIG_MEMORY_MODE else "256mb")
|
||||
self.client.config_set("maxmemory", maxmemory)
|
||||
self.client.config_set("maxmemory-policy", policy)
|
||||
logger.debug(f"Redis maxmemory set to {maxmemory}, policy: {policy}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to set Redis maxmemory or policy: {e}")
|
||||
|
||||
@staticmethod
|
||||
def get_region(region: Optional[str] = "DEFAULT"):
|
||||
"""
|
||||
获取缓存的区
|
||||
"""
|
||||
return f"region:{region}" if region else "region:default"
|
||||
|
||||
def get_redis_key(self, region: str, key: str) -> str:
|
||||
"""
|
||||
获取缓存Key
|
||||
"""
|
||||
# 使用region作为缓存键的一部分
|
||||
region = self.get_region(quote(region))
|
||||
return f"{region}:key:{quote(key)}"
|
||||
|
||||
def set(self, key: str, value: Any, ttl: Optional[int] = None,
|
||||
region: Optional[str] = "DEFAULT", **kwargs) -> None:
|
||||
"""
|
||||
设置缓存
|
||||
|
||||
:param key: 缓存的键
|
||||
:param value: 缓存的值
|
||||
:param ttl: 缓存的存活时间,单位秒
|
||||
:param region: 缓存的区
|
||||
:param kwargs: 其他参数
|
||||
"""
|
||||
try:
|
||||
self._connect()
|
||||
redis_key = self.get_redis_key(region, key)
|
||||
# 对值进行序列化
|
||||
serialized_value = serialize(value)
|
||||
kwargs.pop("maxsize", None)
|
||||
self.client.set(redis_key, serialized_value, ex=ttl, **kwargs)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to set key: {key} in region: {region}, error: {e}")
|
||||
|
||||
def exists(self, key: str, region: Optional[str] = "DEFAULT") -> bool:
|
||||
"""
|
||||
判断缓存键是否存在
|
||||
|
||||
:param key: 缓存的键
|
||||
:param region: 缓存的区
|
||||
:return: 存在返回True,否则返回False
|
||||
"""
|
||||
try:
|
||||
self._connect()
|
||||
redis_key = self.get_redis_key(region, key)
|
||||
return self.client.exists(redis_key) == 1
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to exists key: {key} region: {region}, error: {e}")
|
||||
return False
|
||||
|
||||
def get(self, key: str, region: Optional[str] = "DEFAULT") -> Optional[Any]:
|
||||
"""
|
||||
获取缓存的值
|
||||
|
||||
:param key: 缓存的键
|
||||
:param region: 缓存的区
|
||||
:return: 返回缓存的值,如果缓存不存在返回None
|
||||
"""
|
||||
try:
|
||||
self._connect()
|
||||
redis_key = self.get_redis_key(region, key)
|
||||
value = self.client.get(redis_key)
|
||||
if value is not None:
|
||||
return deserialize(value)
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get key: {key} in region: {region}, error: {e}")
|
||||
return None
|
||||
|
||||
def delete(self, key: str, region: Optional[str] = "DEFAULT") -> None:
|
||||
"""
|
||||
删除缓存
|
||||
|
||||
:param key: 缓存的键
|
||||
:param region: 缓存的区
|
||||
"""
|
||||
try:
|
||||
self._connect()
|
||||
redis_key = self.get_redis_key(region, key)
|
||||
self.client.delete(redis_key)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to delete key: {key} in region: {region}, error: {e}")
|
||||
|
||||
def clear(self, region: Optional[str] = None) -> None:
|
||||
"""
|
||||
清除指定区域的缓存或全部缓存
|
||||
|
||||
:param region: 缓存的区
|
||||
"""
|
||||
try:
|
||||
self._connect()
|
||||
if region:
|
||||
cache_region = self.get_region(quote(region))
|
||||
redis_key = f"{cache_region}:key:*"
|
||||
with self.client.pipeline() as pipe:
|
||||
for key in self.client.scan_iter(redis_key):
|
||||
pipe.delete(key)
|
||||
pipe.execute()
|
||||
logger.info(f"Cleared Redis cache for region: {region}")
|
||||
else:
|
||||
self.client.flushdb()
|
||||
logger.info("Cleared all Redis cache")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to clear cache, region: {region}, error: {e}")
|
||||
|
||||
def items(self, region: Optional[str] = None) -> Generator[Tuple[str, Any], None, None]:
|
||||
"""
|
||||
获取指定区域的所有缓存键值对
|
||||
|
||||
:param region: 缓存的区
|
||||
:return: 返回键值对生成器
|
||||
"""
|
||||
try:
|
||||
self._connect()
|
||||
if region:
|
||||
cache_region = self.get_region(quote(region))
|
||||
redis_key = f"{cache_region}:key:*"
|
||||
for key in self.client.scan_iter(redis_key):
|
||||
value = self.client.get(key)
|
||||
if value is not None:
|
||||
yield key, deserialize(value)
|
||||
else:
|
||||
for key in self.client.scan_iter("*"):
|
||||
value = self.client.get(key)
|
||||
if value is not None:
|
||||
yield key, deserialize(value)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get items from Redis, region: {region}, error: {e}")
|
||||
|
||||
def test(self) -> bool:
|
||||
"""
|
||||
测试Redis连接性
|
||||
"""
|
||||
try:
|
||||
self._connect()
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Redis connection test failed: {e}")
|
||||
return False
|
||||
|
||||
def close(self) -> None:
|
||||
"""
|
||||
关闭Redis客户端的连接池
|
||||
"""
|
||||
if self.client:
|
||||
self.client.close()
|
||||
self.client = None
|
||||
logger.debug("Redis connection closed")
|
||||
|
||||
|
||||
class AsyncRedisHelper(metaclass=Singleton):
|
||||
"""
|
||||
异步Redis连接和操作助手类,单例模式
|
||||
|
||||
特性:
|
||||
- 管理异步Redis连接池和客户端
|
||||
- 提供序列化和反序列化功能
|
||||
- 支持内存限制和淘汰策略设置
|
||||
- 提供键名生成和区域管理功能
|
||||
- 所有操作都是异步的
|
||||
"""
|
||||
|
||||
# 类型缓存集合,针对非容器简单类型
|
||||
_complex_serializable_types = set()
|
||||
_simple_serializable_types = set()
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
初始化异步Redis助手实例
|
||||
"""
|
||||
self.redis_url = settings.CACHE_BACKEND_URL
|
||||
self.client: Optional[Redis] = None
|
||||
|
||||
async def _connect(self):
|
||||
"""
|
||||
建立异步Redis连接
|
||||
"""
|
||||
try:
|
||||
if self.client is None:
|
||||
self.client = Redis.from_url(
|
||||
self.redis_url,
|
||||
decode_responses=False,
|
||||
socket_timeout=30,
|
||||
socket_connect_timeout=5,
|
||||
health_check_interval=60,
|
||||
)
|
||||
# 测试连接,确保Redis可用
|
||||
await self.client.ping()
|
||||
logger.info(f"Successfully connected to Redis (async):{self.redis_url}")
|
||||
await self.set_memory_limit()
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to connect to Redis (async): {e}")
|
||||
self.client = None
|
||||
raise RuntimeError("Redis async connection failed") from e
|
||||
|
||||
@eventmanager.register(EventType.ConfigChanged)
|
||||
async def handle_config_changed(self, event: Event):
|
||||
"""
|
||||
处理配置变更事件,更新Redis设置
|
||||
:param event: 事件对象
|
||||
"""
|
||||
if not event:
|
||||
return
|
||||
event_data: ConfigChangeEventData = event.event_data
|
||||
if event_data.key not in ['CACHE_BACKEND_TYPE', 'CACHE_BACKEND_URL', 'CACHE_REDIS_MAXMEMORY']:
|
||||
return
|
||||
logger.info("配置变更,重连Redis (async)...")
|
||||
await self.close()
|
||||
await self._connect()
|
||||
|
||||
async def set_memory_limit(self, policy: Optional[str] = "allkeys-lru"):
|
||||
"""
|
||||
动态设置Redis最大内存和内存淘汰策略
|
||||
|
||||
:param policy: 淘汰策略(如'allkeys-lru')
|
||||
"""
|
||||
try:
|
||||
# 如果有显式值,则直接使用,为0时说明不限制,如果未配置,开启BIG_MEMORY_MODE时为"1024mb",未开启时为"256mb"
|
||||
maxmemory = settings.CACHE_REDIS_MAXMEMORY or ("1024mb" if settings.BIG_MEMORY_MODE else "256mb")
|
||||
await self.client.config_set("maxmemory", maxmemory)
|
||||
await self.client.config_set("maxmemory-policy", policy)
|
||||
logger.debug(f"Redis maxmemory set to {maxmemory}, policy: {policy} (async)")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to set Redis maxmemory or policy (async): {e}")
|
||||
|
||||
@staticmethod
|
||||
def get_region(region: Optional[str] = "DEFAULT"):
|
||||
"""
|
||||
获取缓存的区
|
||||
"""
|
||||
return f"region:{region}" if region else "region:default"
|
||||
|
||||
def get_redis_key(self, region: str, key: str) -> str:
|
||||
"""
|
||||
获取缓存Key
|
||||
"""
|
||||
# 使用region作为缓存键的一部分
|
||||
region = self.get_region(quote(region))
|
||||
return f"{region}:key:{quote(key)}"
|
||||
|
||||
async def set(self, key: str, value: Any, ttl: Optional[int] = None,
|
||||
region: Optional[str] = "DEFAULT", **kwargs) -> None:
|
||||
"""
|
||||
异步设置缓存
|
||||
|
||||
:param key: 缓存的键
|
||||
:param value: 缓存的值
|
||||
:param ttl: 缓存的存活时间,单位秒
|
||||
:param region: 缓存的区
|
||||
:param kwargs: 其他参数
|
||||
"""
|
||||
try:
|
||||
await self._connect()
|
||||
redis_key = self.get_redis_key(region, key)
|
||||
# 对值进行序列化
|
||||
serialized_value = serialize(value)
|
||||
kwargs.pop("maxsize", None)
|
||||
await self.client.set(redis_key, serialized_value, ex=ttl, **kwargs)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to set key (async): {key} in region: {region}, error: {e}")
|
||||
|
||||
async def exists(self, key: str, region: Optional[str] = "DEFAULT") -> bool:
|
||||
"""
|
||||
异步判断缓存键是否存在
|
||||
|
||||
:param key: 缓存的键
|
||||
:param region: 缓存的区
|
||||
:return: 存在返回True,否则返回False
|
||||
"""
|
||||
try:
|
||||
await self._connect()
|
||||
redis_key = self.get_redis_key(region, key)
|
||||
result = await self.client.exists(redis_key)
|
||||
return result == 1
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to exists key (async): {key} region: {region}, error: {e}")
|
||||
return False
|
||||
|
||||
async def get(self, key: str, region: Optional[str] = "DEFAULT") -> Optional[Any]:
|
||||
"""
|
||||
异步获取缓存的值
|
||||
|
||||
:param key: 缓存的键
|
||||
:param region: 缓存的区
|
||||
:return: 返回缓存的值,如果缓存不存在返回None
|
||||
"""
|
||||
try:
|
||||
await self._connect()
|
||||
redis_key = self.get_redis_key(region, key)
|
||||
value = await self.client.get(redis_key)
|
||||
if value is not None:
|
||||
return deserialize(value)
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get key (async): {key} in region: {region}, error: {e}")
|
||||
return None
|
||||
|
||||
async def delete(self, key: str, region: Optional[str] = "DEFAULT") -> None:
|
||||
"""
|
||||
异步删除缓存
|
||||
|
||||
:param key: 缓存的键
|
||||
:param region: 缓存的区
|
||||
"""
|
||||
try:
|
||||
await self._connect()
|
||||
redis_key = self.get_redis_key(region, key)
|
||||
await self.client.delete(redis_key)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to delete key (async): {key} in region: {region}, error: {e}")
|
||||
|
||||
async def clear(self, region: Optional[str] = None) -> None:
|
||||
"""
|
||||
异步清除指定区域的缓存或全部缓存
|
||||
|
||||
:param region: 缓存的区
|
||||
"""
|
||||
try:
|
||||
await self._connect()
|
||||
if region:
|
||||
cache_region = self.get_region(quote(region))
|
||||
redis_key = f"{cache_region}:key:*"
|
||||
async with self.client.pipeline() as pipe:
|
||||
async for key in self.client.scan_iter(redis_key):
|
||||
await pipe.delete(key)
|
||||
await pipe.execute()
|
||||
logger.info(f"Cleared Redis cache for region (async): {region}")
|
||||
else:
|
||||
await self.client.flushdb()
|
||||
logger.info("Cleared all Redis cache (async)")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to clear cache (async), region: {region}, error: {e}")
|
||||
|
||||
async def items(self, region: Optional[str] = None) -> AsyncGenerator[Tuple[str, Any], None]:
|
||||
"""
|
||||
获取指定区域的所有缓存键值对
|
||||
|
||||
:param region: 缓存的区
|
||||
:return: 返回键值对生成器
|
||||
"""
|
||||
try:
|
||||
await self._connect()
|
||||
if region:
|
||||
cache_region = self.get_region(quote(region))
|
||||
redis_key = f"{cache_region}:key:*"
|
||||
async for key in self.client.scan_iter(redis_key):
|
||||
value = await self.client.get(key)
|
||||
if value is not None:
|
||||
yield key, deserialize(value)
|
||||
else:
|
||||
async for key in self.client.scan_iter("*"):
|
||||
value = await self.client.get(key)
|
||||
if value is not None:
|
||||
yield key, deserialize(value)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get items from Redis, region: {region}, error: {e}")
|
||||
|
||||
async def test(self) -> bool:
|
||||
"""
|
||||
异步测试Redis连接性
|
||||
"""
|
||||
try:
|
||||
await self._connect()
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Redis async connection test failed: {e}")
|
||||
return False
|
||||
|
||||
async def close(self) -> None:
|
||||
"""
|
||||
关闭异步Redis客户端的连接池
|
||||
"""
|
||||
if self.client:
|
||||
await self.client.close()
|
||||
self.client = None
|
||||
logger.debug("Redis async connection closed")
|
||||
@@ -429,13 +429,14 @@ class RssHelper:
|
||||
|
||||
return ret_array
|
||||
|
||||
def get_rss_link(self, url: str, cookie: str, ua: str, proxy: bool = False) -> Tuple[str, str]:
|
||||
def get_rss_link(self, url: str, cookie: str, ua: str, proxy: bool = False, timeout: int = None) -> Tuple[str, str]:
|
||||
"""
|
||||
获取站点rss地址
|
||||
:param url: 站点地址
|
||||
:param cookie: 站点cookie
|
||||
:param ua: 站点ua
|
||||
:param proxy: 是否使用代理
|
||||
:param timeout: 请求超时时间
|
||||
:return: rss地址、错误信息
|
||||
"""
|
||||
try:
|
||||
@@ -453,12 +454,13 @@ class RssHelper:
|
||||
url=rss_url,
|
||||
cookies=cookie,
|
||||
ua=ua,
|
||||
proxies=settings.PROXY if proxy else None
|
||||
proxies=settings.PROXY_SERVER if proxy else None,
|
||||
timeout=timeout or 60
|
||||
)
|
||||
else:
|
||||
res = RequestUtils(
|
||||
cookies=cookie,
|
||||
timeout=60,
|
||||
timeout=timeout or 30,
|
||||
ua=ua,
|
||||
proxies=settings.PROXY if proxy else None
|
||||
).post_res(url=rss_url, data=rss_params)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from threading import Thread
|
||||
from typing import List, Tuple, Optional
|
||||
|
||||
from app.core.cache import cached, cache_backend
|
||||
from app.core.cache import cached
|
||||
from app.core.config import settings
|
||||
from app.db.subscribe_oper import SubscribeOper
|
||||
from app.db.systemconfig_oper import SystemConfigOper
|
||||
@@ -111,7 +111,12 @@ class SubscribeHelper(metaclass=WeakSingleton):
|
||||
if res and res.status_code == 200:
|
||||
# 清除缓存
|
||||
if clear_cache:
|
||||
cache_backend.clear(region=self._shares_cache_region)
|
||||
self.get_shares.cache_clear()
|
||||
self.get_statistic.cache_clear()
|
||||
self.get_share_statistics.cache_clear()
|
||||
self.async_get_shares.cache_clear()
|
||||
self.async_get_statistic.cache_clear()
|
||||
self.async_get_share_statistics.cache_clear()
|
||||
return True, ""
|
||||
else:
|
||||
return False, res.json().get("message")
|
||||
|
||||
@@ -6,6 +6,7 @@ from urllib.parse import unquote
|
||||
|
||||
from torrentool.api import Torrent
|
||||
|
||||
from app.core.cache import FileCache
|
||||
from app.core.config import settings
|
||||
from app.core.context import Context, TorrentInfo, MediaInfo
|
||||
from app.core.meta import MetaBase
|
||||
@@ -35,27 +36,29 @@ class TorrentHelper(metaclass=WeakSingleton):
|
||||
-> Tuple[Optional[Path], Optional[Union[str, bytes]], Optional[str], Optional[list], Optional[str]]:
|
||||
"""
|
||||
把种子下载到本地
|
||||
:return: 种子保存路径、种子内容、种子主目录、种子文件清单、错误信息
|
||||
:return: 种子缓存相对路径【用于索引缓存】, 种子内容、种子主目录、种子文件清单、错误信息
|
||||
"""
|
||||
if url.startswith("magnet:"):
|
||||
return None, url, "", [], f"磁力链接"
|
||||
# 构建 torrent 种子文件的存储路径
|
||||
file_path = (Path(settings.TEMP_PATH) / StringUtils.md5_hash(url)).with_suffix(".torrent")
|
||||
if file_path.exists():
|
||||
# 构建 torrent 种子文件的缓存路径
|
||||
cache_path = Path(StringUtils.md5_hash(url)).with_suffix(".torrent")
|
||||
# 缓存处理器
|
||||
cache_backend = FileCache()
|
||||
# 读取缓存的种子文件
|
||||
torrent_content = cache_backend.get(cache_path.as_posix(), region="torrents")
|
||||
if torrent_content:
|
||||
# 缓存已存在
|
||||
try:
|
||||
# 获取种子目录和文件清单
|
||||
folder_name, file_list = self.get_torrent_info(file_path)
|
||||
folder_name, file_list = self.get_fileinfo_from_torrent_content(torrent_content)
|
||||
# 无法获取信息,则认为缓存文件无效
|
||||
if not folder_name and not file_list:
|
||||
raise ValueError("无效的缓存种子文件")
|
||||
# 获取种子数据
|
||||
content = file_path.read_bytes()
|
||||
# 成功拿到种子数据
|
||||
return file_path, content, folder_name, file_list, ""
|
||||
return cache_path, torrent_content, folder_name, file_list, ""
|
||||
except Exception as err:
|
||||
logger.error(f"处理缓存的种子文件 {file_path} 时出错: {err},将重新下载")
|
||||
file_path.unlink(missing_ok=True)
|
||||
# 请求种子文件
|
||||
logger.error(f"处理缓存的种子文件 {cache_path} 时出错: {err},将重新下载")
|
||||
# 下载种子文件
|
||||
req = RequestUtils(
|
||||
ua=ua,
|
||||
cookies=cookie,
|
||||
@@ -74,11 +77,11 @@ class TorrentHelper(metaclass=WeakSingleton):
|
||||
).get_res(url=url, allow_redirects=False)
|
||||
if req and req.status_code == 200:
|
||||
if not req.content:
|
||||
return None, None, "", [], "未下载到种子数据"
|
||||
return cache_path, None, "", [], "未下载到种子数据"
|
||||
# 解析内容格式
|
||||
if req.content.startswith(b"magnet:"):
|
||||
# 磁力链接
|
||||
return None, req.text, "", [], f"获取到磁力链接"
|
||||
return cache_path, req.text, "", [], f"获取到磁力链接"
|
||||
if "下载种子文件".encode("utf-8") in req.content:
|
||||
# 首次下载提示页面
|
||||
skip_flag = False
|
||||
@@ -116,34 +119,34 @@ class TorrentHelper(metaclass=WeakSingleton):
|
||||
except Exception as err:
|
||||
logger.warn(f"触发了站点首次种子下载,尝试自动跳过时出现错误:{str(err)},链接:{url}")
|
||||
if not skip_flag:
|
||||
return None, None, "", [], "种子数据有误,请确认链接是否正确,如为PT站点则需手工在站点下载一次种子"
|
||||
return cache_path, None, "", [], "种子数据有误,请确认链接是否正确,如为PT站点则需手工在站点下载一次种子"
|
||||
# 种子内容
|
||||
if req.content:
|
||||
# 检查是不是种子文件,如果不是仍然抛出异常
|
||||
try:
|
||||
# 保存到文件
|
||||
file_path.write_bytes(req.content)
|
||||
# 获取种子目录和文件清单
|
||||
folder_name, file_list = self.get_torrent_info(file_path)
|
||||
folder_name, file_list = self.get_fileinfo_from_torrent_content(req.content)
|
||||
if file_list:
|
||||
# 保存到缓存
|
||||
cache_backend.set(cache_path.as_posix(), req.content, region="torrents")
|
||||
# 成功拿到种子数据
|
||||
return file_path, req.content, folder_name, file_list, ""
|
||||
return cache_path, req.content, folder_name, file_list, ""
|
||||
except Exception as err:
|
||||
logger.error(f"种子文件解析失败:{str(err)}")
|
||||
# 种子数据仍然错误
|
||||
return None, None, "", [], "种子数据有误,请确认链接是否正确"
|
||||
return cache_path, None, "", [], "种子数据有误,请确认链接是否正确"
|
||||
# 返回失败
|
||||
return None, None, "", [], ""
|
||||
return cache_path, None, "", [], ""
|
||||
elif req is None:
|
||||
return None, None, "", [], "无法打开链接"
|
||||
return cache_path, None, "", [], "无法打开链接"
|
||||
elif req.status_code == 429:
|
||||
return None, None, "", [], "触发站点流控,请稍后重试"
|
||||
return cache_path, None, "", [], "触发站点流控,请稍后重试"
|
||||
else:
|
||||
# 把错误的种子记下来,避免重复使用
|
||||
self.add_invalid(url)
|
||||
return None, None, "", [], f"下载种子出错,状态码:{req.status_code}"
|
||||
return cache_path, None, "", [], f"下载种子出错,状态码:{req.status_code}"
|
||||
|
||||
@staticmethod
|
||||
def get_torrent_info(torrent_path: Path) -> Tuple[str, List[str]]:
|
||||
def get_torrent_info(self, torrent_path: Path) -> Tuple[str, List[str]]:
|
||||
"""
|
||||
获取种子文件的文件夹名和文件清单
|
||||
:param torrent_path: 种子文件路径
|
||||
@@ -154,32 +157,59 @@ class TorrentHelper(metaclass=WeakSingleton):
|
||||
try:
|
||||
torrentinfo = Torrent.from_file(torrent_path)
|
||||
# 获取文件清单
|
||||
if (not torrentinfo.files
|
||||
or (len(torrentinfo.files) == 1
|
||||
and torrentinfo.files[0].name == torrentinfo.name)):
|
||||
# 单文件种子目录名返回空
|
||||
folder_name = ""
|
||||
# 单文件种子
|
||||
file_list = [torrentinfo.name]
|
||||
else:
|
||||
# 目录名
|
||||
folder_name = torrentinfo.name
|
||||
# 文件清单,如果一级目录与种子名相同则去掉
|
||||
file_list = []
|
||||
for fileinfo in torrentinfo.files:
|
||||
file_path = Path(fileinfo.name)
|
||||
# 根路径
|
||||
root_path = file_path.parts[0]
|
||||
if root_path == folder_name:
|
||||
file_list.append(str(file_path.relative_to(root_path)))
|
||||
else:
|
||||
file_list.append(fileinfo.name)
|
||||
logger.debug(f"解析种子:{torrent_path.name} => 目录:{folder_name},文件清单:{file_list}")
|
||||
return folder_name, file_list
|
||||
return self.get_fileinfo_from_torrent(torrentinfo)
|
||||
except Exception as err:
|
||||
logger.error(f"种子文件解析失败:{str(err)}")
|
||||
return "", []
|
||||
|
||||
@staticmethod
|
||||
def get_fileinfo_from_torrent(torrent: Torrent) -> Tuple[str, List[str]]:
|
||||
"""
|
||||
从种子文件中获取文件清单
|
||||
:param torrent: 种子文件对象
|
||||
:return: 文件夹名、文件清单,单文件种子返回空文件夹名
|
||||
"""
|
||||
if not torrent or not torrent.files:
|
||||
return "", []
|
||||
# 获取文件清单
|
||||
if len(torrent.files) == 1 and torrent.files[0].name == torrent.name:
|
||||
# 单文件种子目录名返回空
|
||||
folder_name = ""
|
||||
# 单文件种子
|
||||
file_list = [torrent.name]
|
||||
else:
|
||||
# 目录名
|
||||
folder_name = torrent.name
|
||||
# 文件清单,如果一级目录与种子名相同则去掉
|
||||
file_list = []
|
||||
for fileinfo in torrent.files:
|
||||
file_path = Path(fileinfo.name)
|
||||
# 根路径
|
||||
root_path = file_path.parts[0]
|
||||
if root_path == folder_name:
|
||||
file_list.append(str(file_path.relative_to(root_path)))
|
||||
else:
|
||||
file_list.append(fileinfo.name)
|
||||
logger.debug(f"解析种子:{torrent.name} => 目录:{folder_name},文件清单:{file_list}")
|
||||
return folder_name, file_list
|
||||
|
||||
def get_fileinfo_from_torrent_content(self, torrent_content: Union[str, bytes]) -> Tuple[str, List[str]]:
|
||||
"""
|
||||
从种子内容中获取文件夹名和文件清单
|
||||
:param torrent_content: 种子内容
|
||||
:return: 文件夹名、文件清单,单文件种子返回空文件夹名
|
||||
"""
|
||||
if not torrent_content:
|
||||
return "", []
|
||||
try:
|
||||
# 解析种子内容
|
||||
torrentinfo = Torrent.from_string(torrent_content)
|
||||
# 获取文件清单
|
||||
return self.get_fileinfo_from_torrent(torrentinfo)
|
||||
except Exception as err:
|
||||
logger.error(f"种子内容解析失败:{str(err)}")
|
||||
return "", []
|
||||
|
||||
@staticmethod
|
||||
def get_url_filename(req: Any, url: str) -> str:
|
||||
"""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import json
|
||||
from typing import List, Tuple, Optional
|
||||
|
||||
from app.core.cache import cached, cache_backend
|
||||
from app.core.cache import cached
|
||||
from app.core.config import settings
|
||||
from app.db.models import Workflow
|
||||
from app.db.workflow_oper import WorkflowOper
|
||||
@@ -89,7 +89,8 @@ class WorkflowHelper(metaclass=WeakSingleton):
|
||||
if success:
|
||||
# 清除缓存
|
||||
if clear_cache:
|
||||
cache_backend.clear(region=self._shares_cache_region)
|
||||
self.get_shares.cache_clear()
|
||||
self.async_get_shares.cache_clear()
|
||||
return True, ""
|
||||
else:
|
||||
try:
|
||||
|
||||
@@ -1,23 +1,19 @@
|
||||
import pickle
|
||||
import random
|
||||
import time
|
||||
import traceback
|
||||
from pathlib import Path
|
||||
from threading import RLock
|
||||
from typing import Optional
|
||||
|
||||
from app.core.cache import TTLCache
|
||||
from app.core.config import settings
|
||||
from app.core.meta import MetaBase
|
||||
from app.core.metainfo import MetaInfo
|
||||
from app.log import logger
|
||||
from app.utils.singleton import WeakSingleton
|
||||
from app.schemas.types import MediaType
|
||||
from app.utils.singleton import WeakSingleton
|
||||
|
||||
lock = RLock()
|
||||
|
||||
CACHE_EXPIRE_TIMESTAMP_STR = "cache_expire_timestamp"
|
||||
EXPIRE_TIMESTAMP = settings.CONF.meta
|
||||
|
||||
|
||||
class DoubanCache(metaclass=WeakSingleton):
|
||||
"""
|
||||
@@ -30,18 +26,26 @@ class DoubanCache(metaclass=WeakSingleton):
|
||||
}
|
||||
"""
|
||||
# TMDB缓存过期
|
||||
_tmdb_cache_expire: bool = True
|
||||
_douban_cache_expire: bool = True
|
||||
|
||||
def __init__(self):
|
||||
self._meta_path = settings.TEMP_PATH / "__douban_cache__"
|
||||
self._meta_data = self.__load(self._meta_path)
|
||||
self.maxsize = settings.CONF.douban
|
||||
self.ttl = settings.CONF.meta
|
||||
self.region = "__douban_cache__"
|
||||
self._meta_filepath = settings.TEMP_PATH / self.region
|
||||
# 初始化缓存
|
||||
self._cache = TTLCache(region=self.region, maxsize=self.maxsize, ttl=self.ttl)
|
||||
# 非Redis加载本地缓存数据
|
||||
if not self._cache.is_redis():
|
||||
for key, value in self.__load(self._meta_filepath).items():
|
||||
self._cache.set(key, value)
|
||||
|
||||
def clear(self):
|
||||
"""
|
||||
清空所有TMDB缓存
|
||||
清空所有豆瓣缓存
|
||||
"""
|
||||
with lock:
|
||||
self._meta_data = {}
|
||||
self._cache.clear()
|
||||
|
||||
@staticmethod
|
||||
def __get_key(meta: MetaBase) -> str:
|
||||
@@ -57,15 +61,7 @@ class DoubanCache(metaclass=WeakSingleton):
|
||||
"""
|
||||
key = self.__get_key(meta)
|
||||
with lock:
|
||||
info: dict = self._meta_data.get(key)
|
||||
if info:
|
||||
expire = info.get(CACHE_EXPIRE_TIMESTAMP_STR)
|
||||
if not expire or int(time.time()) < expire:
|
||||
info[CACHE_EXPIRE_TIMESTAMP_STR] = int(time.time()) + EXPIRE_TIMESTAMP
|
||||
self._meta_data[key] = info
|
||||
elif expire and self._tmdb_cache_expire:
|
||||
self.delete(key)
|
||||
return info or {}
|
||||
return self._cache.get(key) or {}
|
||||
|
||||
def delete(self, key: str) -> dict:
|
||||
"""
|
||||
@@ -74,38 +70,26 @@ class DoubanCache(metaclass=WeakSingleton):
|
||||
@return: 被删除的缓存内容
|
||||
"""
|
||||
with lock:
|
||||
return self._meta_data.pop(key, {})
|
||||
|
||||
def delete_by_doubanid(self, doubanid: str) -> None:
|
||||
"""
|
||||
清空对应豆瓣ID的所有缓存记录,以强制更新TMDB中最新的数据
|
||||
"""
|
||||
for key in list(self._meta_data):
|
||||
if self._meta_data.get(key, {}).get("id") == doubanid:
|
||||
with lock:
|
||||
self._meta_data.pop(key)
|
||||
|
||||
def delete_unknown(self) -> None:
|
||||
"""
|
||||
清除未识别的缓存记录,以便重新搜索TMDB
|
||||
"""
|
||||
for key in list(self._meta_data):
|
||||
if self._meta_data.get(key, {}).get("id") == "0":
|
||||
with lock:
|
||||
self._meta_data.pop(key)
|
||||
redis_data = self._cache.get(key)
|
||||
if redis_data:
|
||||
self._cache.delete(key)
|
||||
return redis_data
|
||||
return {}
|
||||
|
||||
def modify(self, key: str, title: str) -> dict:
|
||||
"""
|
||||
删除缓存信息
|
||||
修改缓存信息
|
||||
@param key: 缓存key
|
||||
@param title: 标题
|
||||
@return: 被修改后缓存内容
|
||||
"""
|
||||
with lock:
|
||||
if self._meta_data.get(key):
|
||||
self._meta_data[key]['title'] = title
|
||||
self._meta_data[key][CACHE_EXPIRE_TIMESTAMP_STR] = int(time.time()) + EXPIRE_TIMESTAMP
|
||||
return self._meta_data.get(key)
|
||||
redis_data = self._cache.get(key)
|
||||
if redis_data:
|
||||
redis_data["title"] = title
|
||||
self._cache.set(key, redis_data)
|
||||
return redis_data
|
||||
return {}
|
||||
|
||||
@staticmethod
|
||||
def __load(path: Path) -> dict:
|
||||
@@ -117,119 +101,72 @@ class DoubanCache(metaclass=WeakSingleton):
|
||||
with open(path, 'rb') as f:
|
||||
data = pickle.load(f)
|
||||
return data
|
||||
return {}
|
||||
except Exception as e:
|
||||
logger.error(f"加载缓存失败: {str(e)} - {traceback.format_exc()}")
|
||||
return {}
|
||||
return {}
|
||||
|
||||
def update(self, meta: MetaBase, info: dict) -> None:
|
||||
"""
|
||||
新增或更新缓存条目
|
||||
"""
|
||||
with lock:
|
||||
if info:
|
||||
# 缓存标题
|
||||
cache_title = info.get("title")
|
||||
# 缓存年份
|
||||
cache_year = info.get('year')
|
||||
# 类型
|
||||
if isinstance(info.get('media_type'), MediaType):
|
||||
mtype = info.get('media_type')
|
||||
elif info.get("type"):
|
||||
mtype = MediaType.MOVIE if info.get("type") == "movie" else MediaType.TV
|
||||
if info:
|
||||
# 缓存标题
|
||||
cache_title = info.get("title")
|
||||
# 缓存年份
|
||||
cache_year = info.get('year')
|
||||
# 类型
|
||||
if isinstance(info.get('media_type'), MediaType):
|
||||
mtype = info.get('media_type')
|
||||
elif info.get("type"):
|
||||
mtype = MediaType.MOVIE if info.get("type") == "movie" else MediaType.TV
|
||||
else:
|
||||
meta = MetaInfo(cache_title)
|
||||
if meta.begin_season:
|
||||
mtype = MediaType.TV
|
||||
else:
|
||||
meta = MetaInfo(cache_title)
|
||||
if meta.begin_season:
|
||||
mtype = MediaType.TV
|
||||
else:
|
||||
mtype = MediaType.MOVIE
|
||||
# 海报
|
||||
poster_path = info.get("pic", {}).get("large")
|
||||
if not poster_path and info.get("cover_url"):
|
||||
poster_path = info.get("cover_url")
|
||||
if not poster_path and info.get("cover"):
|
||||
poster_path = info.get("cover").get("url")
|
||||
mtype = MediaType.MOVIE
|
||||
# 海报
|
||||
poster_path = info.get("pic", {}).get("large")
|
||||
if not poster_path and info.get("cover_url"):
|
||||
poster_path = info.get("cover_url")
|
||||
if not poster_path and info.get("cover"):
|
||||
poster_path = info.get("cover").get("url")
|
||||
|
||||
self._meta_data[self.__get_key(meta)] = {
|
||||
"id": info.get("id"),
|
||||
"type": mtype,
|
||||
"year": cache_year,
|
||||
"title": cache_title,
|
||||
"poster_path": poster_path,
|
||||
CACHE_EXPIRE_TIMESTAMP_STR: int(time.time()) + EXPIRE_TIMESTAMP
|
||||
}
|
||||
elif info is not None:
|
||||
# None时不缓存,此时代表网络错误,允许重复请求
|
||||
self._meta_data[self.__get_key(meta)] = {'id': "0"}
|
||||
with lock:
|
||||
self._cache.set(self.__get_key(meta), {
|
||||
"id": info.get("id"),
|
||||
"type": mtype,
|
||||
"year": cache_year,
|
||||
"title": cache_title,
|
||||
"poster_path": poster_path
|
||||
})
|
||||
|
||||
elif info is not None:
|
||||
# None时不缓存,此时代表网络错误,允许重复请求
|
||||
with lock:
|
||||
self._cache.set(self.__get_key(meta), {
|
||||
"id": 0
|
||||
})
|
||||
|
||||
def save(self, force: Optional[bool] = False) -> None:
|
||||
"""
|
||||
保存缓存数据到文件
|
||||
"""
|
||||
# Redis不需要保存到本地文件
|
||||
if self._cache.is_redis():
|
||||
return
|
||||
|
||||
meta_data = self.__load(self._meta_path)
|
||||
new_meta_data = {k: v for k, v in self._meta_data.items() if v.get("id")}
|
||||
# 本地文件
|
||||
meta_data = self.__load(self._meta_filepath)
|
||||
# 当前缓存数据(去除无法识别)
|
||||
new_meta_data = {k: v for k, v in self._cache.items() if v.get("id")}
|
||||
|
||||
if not force \
|
||||
and not self._random_sample(new_meta_data) \
|
||||
and meta_data.keys() == new_meta_data.keys():
|
||||
return
|
||||
|
||||
with open(self._meta_path, 'wb') as f:
|
||||
pickle.dump(new_meta_data, f, pickle.HIGHEST_PROTOCOL) # noqa
|
||||
|
||||
def _random_sample(self, new_meta_data: dict) -> bool:
|
||||
"""
|
||||
采样分析是否需要保存
|
||||
"""
|
||||
ret = False
|
||||
if len(new_meta_data) < 25:
|
||||
keys = list(new_meta_data.keys())
|
||||
for k in keys:
|
||||
info = new_meta_data.get(k)
|
||||
expire = info.get(CACHE_EXPIRE_TIMESTAMP_STR)
|
||||
if not expire:
|
||||
ret = True
|
||||
info[CACHE_EXPIRE_TIMESTAMP_STR] = int(time.time()) + EXPIRE_TIMESTAMP
|
||||
elif int(time.time()) >= expire:
|
||||
ret = True
|
||||
if self._tmdb_cache_expire:
|
||||
new_meta_data.pop(k)
|
||||
else:
|
||||
count = 0
|
||||
keys = random.sample(sorted(new_meta_data.keys()), 25)
|
||||
for k in keys:
|
||||
info = new_meta_data.get(k)
|
||||
expire = info.get(CACHE_EXPIRE_TIMESTAMP_STR)
|
||||
if not expire:
|
||||
ret = True
|
||||
info[CACHE_EXPIRE_TIMESTAMP_STR] = int(time.time()) + EXPIRE_TIMESTAMP
|
||||
elif int(time.time()) >= expire:
|
||||
ret = True
|
||||
if self._tmdb_cache_expire:
|
||||
new_meta_data.pop(k)
|
||||
count += 1
|
||||
if count >= 5:
|
||||
ret |= self._random_sample(new_meta_data)
|
||||
return ret
|
||||
|
||||
def get_title(self, key: str) -> Optional[str]:
|
||||
"""
|
||||
获取缓存的标题
|
||||
"""
|
||||
cache_media_info = self._meta_data.get(key)
|
||||
if not cache_media_info or not cache_media_info.get("id"):
|
||||
return None
|
||||
return cache_media_info.get("title")
|
||||
|
||||
def set_title(self, key: str, cn_title: str) -> None:
|
||||
"""
|
||||
重新设置缓存标题
|
||||
"""
|
||||
cache_media_info = self._meta_data.get(key)
|
||||
if not cache_media_info:
|
||||
return
|
||||
self._meta_data[key]['title'] = cn_title
|
||||
# 写入本地
|
||||
with open(self._meta_filepath, 'wb') as f:
|
||||
pickle.dump(new_meta_data, f, pickle.HIGHEST_PROTOCOL) # noqa
|
||||
|
||||
def __del__(self):
|
||||
self.save()
|
||||
|
||||
@@ -282,9 +282,8 @@ class EmbyModule(_ModuleBase, _MediaServerBase[Emby]):
|
||||
episodes=episodes
|
||||
) for season, episodes in seasoninfo.items()]
|
||||
|
||||
def mediaserver_playing(self, server: str,
|
||||
count: Optional[int] = 20, username: Optional[str] = None) -> List[
|
||||
schemas.MediaServerPlayItem]:
|
||||
def mediaserver_playing(self, server: str, count: Optional[int] = 20,
|
||||
username: Optional[str] = None) -> List[schemas.MediaServerPlayItem]:
|
||||
"""
|
||||
获取媒体服务器正在播放信息
|
||||
"""
|
||||
@@ -302,9 +301,8 @@ class EmbyModule(_ModuleBase, _MediaServerBase[Emby]):
|
||||
return None
|
||||
return server_obj.get_play_url(item_id)
|
||||
|
||||
def mediaserver_latest(self, server: Optional[str] = None,
|
||||
count: Optional[int] = 20, username: Optional[str] = None) -> List[
|
||||
schemas.MediaServerPlayItem]:
|
||||
def mediaserver_latest(self, server: Optional[str] = None, count: Optional[int] = 20,
|
||||
username: Optional[str] = None) -> List[schemas.MediaServerPlayItem]:
|
||||
"""
|
||||
获取媒体服务器最新入库条目
|
||||
"""
|
||||
|
||||
@@ -167,7 +167,7 @@ class Emby:
|
||||
image=image,
|
||||
link=f'{self._playhost or self._host}web/index.html'
|
||||
f'#!/videos?serverId={self.serverid}&parentId={library.get("Id")}',
|
||||
server_type= "emby"
|
||||
server_type="emby"
|
||||
)
|
||||
)
|
||||
return libraries
|
||||
@@ -497,7 +497,7 @@ class Emby:
|
||||
logger.info(f"影片图片链接:{res.url}")
|
||||
return res.url
|
||||
else:
|
||||
logger.error("Items/Id/Images 未获取到返回数据或无该影片{}图片".format(image_type))
|
||||
logger.info("Items/Id/Images 未获取到返回数据或无该影片{}图片".format(image_type))
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"连接Items/Id/Images出错:" + str(e))
|
||||
|
||||
@@ -183,8 +183,11 @@ class HddolbySpider:
|
||||
timeout=self._timeout
|
||||
).post_res(url=self._searchurl, json=params)
|
||||
if res and res.status_code == 200:
|
||||
results = res.json().get('data', []) or []
|
||||
return False, self.__parse_result(results)
|
||||
result = res.json()
|
||||
if result.get("error"):
|
||||
logger.warn(f"{self._name} 搜索失败,错误信息:{result.get('error').get('message')}")
|
||||
return True, []
|
||||
return False, self.__parse_result(result.get('data'))
|
||||
elif res is not None:
|
||||
logger.warn(f"{self._name} 搜索失败,错误码:{res.status_code}")
|
||||
return True, []
|
||||
@@ -212,8 +215,11 @@ class HddolbySpider:
|
||||
timeout=self._timeout
|
||||
).post_res(url=self._searchurl, json=params)
|
||||
if res and res.status_code == 200:
|
||||
results = res.json().get('data', []) or []
|
||||
return False, self.__parse_result(results)
|
||||
result = res.json()
|
||||
if result.get("error"):
|
||||
logger.warn(f"{self._name} 搜索失败,错误信息:{result.get('error').get('message')}")
|
||||
return True, []
|
||||
return False, self.__parse_result(result.get('data'))
|
||||
elif res is not None:
|
||||
logger.warn(f"{self._name} 搜索失败,错误码:{res.status_code}")
|
||||
return True, []
|
||||
|
||||
@@ -249,9 +249,9 @@ class MTorrentSpider:
|
||||
'header': {
|
||||
'User-Agent': f'{self._ua}',
|
||||
'Accept': 'application/json, text/plain, */*',
|
||||
'x-api-key': self._apikey,
|
||||
'proxies': self._proxy,
|
||||
'x-api-key': self._apikey
|
||||
},
|
||||
'proxy': True if self._proxy else False,
|
||||
'result': 'data'
|
||||
}
|
||||
# base64编码
|
||||
|
||||
@@ -5,13 +5,14 @@ from app.core.cache import cached
|
||||
from app.core.config import settings
|
||||
from app.log import logger
|
||||
from app.utils.http import RequestUtils, AsyncRequestUtils
|
||||
from app.utils.singleton import Singleton
|
||||
from app.utils.singleton import SingletonClass
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
|
||||
class TNodeSpider(metaclass=Singleton):
|
||||
class TNodeSpider(metaclass=SingletonClass):
|
||||
_size = 100
|
||||
_timeout = 15
|
||||
_proxy = None
|
||||
_baseurl = "%sapi/torrent/advancedSearch"
|
||||
_downloadurl = "%sapi/torrent/download/%s"
|
||||
_pageurl = "%storrent/info/%s"
|
||||
@@ -53,7 +54,7 @@ class TNodeSpider(metaclass=Singleton):
|
||||
if res and res.status_code == 200:
|
||||
csrf_token = re.search(r'<meta name="x-csrf-token" content="(.+?)">', res.text)
|
||||
if csrf_token:
|
||||
_token = csrf_token.group(1)
|
||||
return csrf_token.group(1)
|
||||
return None
|
||||
|
||||
def __get_params(self, keyword: str = None, page: Optional[int] = 0) -> dict:
|
||||
@@ -154,7 +155,7 @@ class TNodeSpider(metaclass=Singleton):
|
||||
# 发送请求
|
||||
res = await AsyncRequestUtils(
|
||||
headers={
|
||||
'X-CSRF-TOKEN': _token,
|
||||
'x-csrf-token': _token,
|
||||
"Content-Type": "application/json; charset=utf-8",
|
||||
"User-Agent": f"{self._ua}"
|
||||
},
|
||||
|
||||
@@ -170,9 +170,9 @@ class Plex:
|
||||
sections = self._plex.library.sections()
|
||||
movie_count = tv_count = episode_count = 0
|
||||
# 媒体库白名单
|
||||
allow_library = [lib.id for lib in self.get_librarys(hidden=True)]
|
||||
allow_library = [str(lib.id) for lib in self.get_librarys(hidden=True)]
|
||||
for sec in sections:
|
||||
if sec.key not in allow_library:
|
||||
if str(sec.key) not in allow_library:
|
||||
continue
|
||||
if sec.type == "movie":
|
||||
movie_count += sec.totalSize
|
||||
|
||||
63
app/modules/postgresql/__init__.py
Normal file
63
app/modules/postgresql/__init__.py
Normal file
@@ -0,0 +1,63 @@
|
||||
from typing import Tuple, Union
|
||||
|
||||
from app.core.config import settings
|
||||
from app.db import SessionFactory
|
||||
from app.modules import _ModuleBase
|
||||
from app.schemas.types import ModuleType, OtherModulesType
|
||||
from sqlalchemy import text
|
||||
|
||||
|
||||
class PostgreSQLModule(_ModuleBase):
|
||||
"""
|
||||
PostgreSQL 数据库模块
|
||||
"""
|
||||
|
||||
def init_module(self) -> None:
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def get_name() -> str:
|
||||
return "PostgreSQL"
|
||||
|
||||
@staticmethod
|
||||
def get_type() -> ModuleType:
|
||||
"""
|
||||
获取模块类型
|
||||
"""
|
||||
return ModuleType.Other
|
||||
|
||||
@staticmethod
|
||||
def get_subtype() -> OtherModulesType:
|
||||
"""
|
||||
获取模块子类型
|
||||
"""
|
||||
return OtherModulesType.PostgreSQL
|
||||
|
||||
@staticmethod
|
||||
def get_priority() -> int:
|
||||
"""
|
||||
获取模块优先级,数字越小优先级越高,只有同一接口下优先级才生效
|
||||
"""
|
||||
return 0
|
||||
|
||||
def init_setting(self) -> Tuple[str, Union[str, bool]]:
|
||||
pass
|
||||
|
||||
def stop(self) -> None:
|
||||
pass
|
||||
|
||||
def test(self):
|
||||
"""
|
||||
测试模块连接性
|
||||
"""
|
||||
if settings.DB_TYPE != "postgresql":
|
||||
return None
|
||||
# 测试数据库连接
|
||||
db = SessionFactory()
|
||||
try:
|
||||
db.execute(text("SELECT 1"))
|
||||
except Exception as e:
|
||||
return False, f"PostgreSQL连接失败:{e}"
|
||||
finally:
|
||||
db.close()
|
||||
return True, ""
|
||||
@@ -5,9 +5,10 @@ from qbittorrentapi import TorrentFilesList
|
||||
from torrentool.torrent import Torrent
|
||||
|
||||
from app import schemas
|
||||
from app.core.cache import FileCache
|
||||
from app.core.config import settings
|
||||
from app.core.metainfo import MetaInfo
|
||||
from app.core.event import eventmanager, Event
|
||||
from app.core.metainfo import MetaInfo
|
||||
from app.log import logger
|
||||
from app.modules import _ModuleBase, _DownloaderBase
|
||||
from app.modules.qbittorrent.qbittorrent import Qbittorrent
|
||||
@@ -92,12 +93,12 @@ class QbittorrentModule(_ModuleBase, _DownloaderBase[Qbittorrent]):
|
||||
logger.info(f"Qbittorrent下载器 {name} 连接断开,尝试重连 ...")
|
||||
server.reconnect()
|
||||
|
||||
def download(self, content: Union[Path, str], download_dir: Path, cookie: str,
|
||||
def download(self, content: Union[Path, str, bytes], download_dir: Path, cookie: str,
|
||||
episodes: Set[int] = None, category: Optional[str] = None, label: Optional[str] = None,
|
||||
downloader: Optional[str] = None) -> Optional[Tuple[Optional[str], Optional[str], Optional[str], str]]:
|
||||
"""
|
||||
根据种子文件,选择并添加下载任务
|
||||
:param content: 种子文件地址或者磁力链接
|
||||
:param content: 种子文件地址或者磁力链接或者种子内容
|
||||
:param download_dir: 下载目录
|
||||
:param cookie: cookie
|
||||
:param episodes: 需要下载的集数
|
||||
@@ -107,25 +108,38 @@ class QbittorrentModule(_ModuleBase, _DownloaderBase[Qbittorrent]):
|
||||
:return: 下载器名称、种子Hash、种子文件布局、错误原因
|
||||
"""
|
||||
|
||||
def __get_torrent_info() -> Tuple[str, int]:
|
||||
def __get_torrent_info() -> Tuple[Optional[Torrent], Optional[bytes]]:
|
||||
"""
|
||||
获取种子名称
|
||||
"""
|
||||
torrent_info, torrent_content = None, None
|
||||
try:
|
||||
if isinstance(content, Path):
|
||||
torrentinfo = Torrent.from_file(content)
|
||||
if content.exists():
|
||||
torrent_content = content.read_bytes()
|
||||
else:
|
||||
# 缓存处理器
|
||||
cache_backend = FileCache()
|
||||
# 读取缓存的种子文件
|
||||
torrent_content = cache_backend.get(content.as_posix(), region="torrents")
|
||||
else:
|
||||
torrentinfo = Torrent.from_string(content)
|
||||
return torrentinfo.name, torrentinfo.total_size
|
||||
torrent_content = content
|
||||
|
||||
if torrent_content:
|
||||
torrent_info = Torrent.from_string(torrent_content)
|
||||
|
||||
return torrent_info, torrent_content
|
||||
except Exception as e:
|
||||
logger.error(f"获取种子名称失败:{e}")
|
||||
return "", 0
|
||||
return None, None
|
||||
|
||||
if not content:
|
||||
return None, None, None, "下载内容为空"
|
||||
if isinstance(content, Path) and not content.exists():
|
||||
logger.error(f"种子文件不存在:{content}")
|
||||
return None, None, None, f"种子文件不存在:{content}"
|
||||
|
||||
# 读取种子的名称
|
||||
torrent, content = __get_torrent_info()
|
||||
if not torrent:
|
||||
return None, None, None, f"添加种子任务失败:无法读取种子文件"
|
||||
|
||||
# 获取下载器
|
||||
server: Qbittorrent = self.get_instance(downloader)
|
||||
@@ -144,7 +158,7 @@ class QbittorrentModule(_ModuleBase, _DownloaderBase[Qbittorrent]):
|
||||
is_paused = True if episodes else False
|
||||
# 添加任务
|
||||
state = server.add_torrent(
|
||||
content=content.read_bytes() if isinstance(content, Path) else content,
|
||||
content=content,
|
||||
download_dir=str(download_dir),
|
||||
is_paused=is_paused,
|
||||
tag=tags,
|
||||
@@ -157,10 +171,6 @@ class QbittorrentModule(_ModuleBase, _DownloaderBase[Qbittorrent]):
|
||||
torrent_layout = server.get_content_layout()
|
||||
|
||||
if not state:
|
||||
# 读取种子的名称
|
||||
torrent_name, torrent_size = __get_torrent_info()
|
||||
if not torrent_name:
|
||||
return None, None, None, f"添加种子任务失败:无法读取种子文件"
|
||||
# 查询所有下载器的种子
|
||||
torrents, error = server.get_torrents()
|
||||
if error:
|
||||
@@ -169,7 +179,8 @@ class QbittorrentModule(_ModuleBase, _DownloaderBase[Qbittorrent]):
|
||||
try:
|
||||
for torrent in torrents:
|
||||
# 名称与大小相等则认为是同一个种子
|
||||
if torrent.get("name") == torrent_name and torrent.get("total_size") == torrent_size:
|
||||
if torrent.get("name") == torrent.name \
|
||||
and torrent.get("total_size") == torrent.total_size:
|
||||
torrent_hash = torrent.get("hash")
|
||||
torrent_tags = [str(tag).strip() for tag in torrent.get("tags").split(',')]
|
||||
logger.warn(f"下载器中已存在该种子任务:{torrent_hash} - {torrent.get('name')}")
|
||||
@@ -326,7 +337,7 @@ class QbittorrentModule(_ModuleBase, _DownloaderBase[Qbittorrent]):
|
||||
del torrents
|
||||
else:
|
||||
return None
|
||||
return ret_torrents # noqa
|
||||
return ret_torrents # noqa
|
||||
|
||||
def transfer_completed(self, hashs: str, downloader: Optional[str] = None) -> None:
|
||||
"""
|
||||
|
||||
@@ -23,6 +23,7 @@ class Qbittorrent:
|
||||
"""
|
||||
若不设置参数,则创建配置文件设置的下载器
|
||||
"""
|
||||
self.qbc = None
|
||||
if host and port:
|
||||
self._host, self._port = host, port
|
||||
elif host:
|
||||
|
||||
60
app/modules/redis/__init__.py
Normal file
60
app/modules/redis/__init__.py
Normal file
@@ -0,0 +1,60 @@
|
||||
from typing import Tuple, Union
|
||||
|
||||
from app.core.config import settings
|
||||
from app.helper.redis import RedisHelper
|
||||
from app.modules import _ModuleBase
|
||||
from app.schemas.types import ModuleType, OtherModulesType
|
||||
|
||||
|
||||
class RedisModule(_ModuleBase):
|
||||
"""
|
||||
Redis 数据库模块
|
||||
"""
|
||||
|
||||
def init_module(self) -> None:
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def get_name() -> str:
|
||||
return "Redis缓存"
|
||||
|
||||
@staticmethod
|
||||
def get_type() -> ModuleType:
|
||||
"""
|
||||
获取模块类型
|
||||
"""
|
||||
return ModuleType.Other
|
||||
|
||||
@staticmethod
|
||||
def get_subtype() -> OtherModulesType:
|
||||
"""
|
||||
获取模块子类型
|
||||
"""
|
||||
return OtherModulesType.Redis
|
||||
|
||||
@staticmethod
|
||||
def get_priority() -> int:
|
||||
"""
|
||||
获取模块优先级,数字越小优先级越高,只有同一接口下优先级才生效
|
||||
"""
|
||||
return 0
|
||||
|
||||
def init_setting(self) -> Tuple[str, Union[str, bool]]:
|
||||
pass
|
||||
|
||||
def stop(self) -> None:
|
||||
pass
|
||||
|
||||
def test(self):
|
||||
"""
|
||||
测试模块连接性
|
||||
"""
|
||||
if settings.CACHE_BACKEND_TYPE != "redis":
|
||||
return None
|
||||
redis_helper = RedisHelper()
|
||||
try:
|
||||
if redis_helper.test():
|
||||
return True, ""
|
||||
return False, "Redis连接失败,请检查配置"
|
||||
finally:
|
||||
redis_helper.close()
|
||||
@@ -76,7 +76,7 @@ class SlackModule(_ModuleBase, _MessageBase[Slack]):
|
||||
for name, client in self.get_instances().items():
|
||||
state = client.get_state()
|
||||
if not state:
|
||||
return False, f"Slack {name} 未就续"
|
||||
return False, f"Slack {name} 未就绪"
|
||||
return True, ""
|
||||
|
||||
def init_setting(self) -> Tuple[str, Union[str, bool]]:
|
||||
|
||||
@@ -63,19 +63,19 @@ class SubtitleModule(_ModuleBase):
|
||||
def test(self):
|
||||
pass
|
||||
|
||||
def download_added(self, context: Context, download_dir: Path, torrent_path: Path = None) -> None:
|
||||
def download_added(self, context: Context, download_dir: Path, torrent_content: Union[str, bytes] = None):
|
||||
"""
|
||||
添加下载任务成功后,从站点下载字幕,保存到下载目录
|
||||
:param context: 上下文,包括识别信息、媒体信息、种子信息
|
||||
:param download_dir: 下载目录
|
||||
:param torrent_path: 种子文件地址
|
||||
:param torrent_content: 种子内容,如果是种子文件,则为文件内容,否则为种子字符串
|
||||
:return: None,该方法可被多个模块同时处理
|
||||
"""
|
||||
if not settings.DOWNLOAD_SUBTITLE:
|
||||
return None
|
||||
return
|
||||
|
||||
# 没有种子文件不处理
|
||||
if not torrent_path:
|
||||
if not torrent_content:
|
||||
return
|
||||
|
||||
# 没有详情页不处理
|
||||
@@ -85,7 +85,7 @@ class SubtitleModule(_ModuleBase):
|
||||
# 字幕下载目录
|
||||
logger.info("开始从站点下载字幕:%s" % torrent.page_url)
|
||||
# 获取种子信息
|
||||
folder_name, _ = TorrentHelper.get_torrent_info(torrent_path)
|
||||
folder_name, _ = TorrentHelper().get_fileinfo_from_torrent_content(torrent_content)
|
||||
# 文件保存目录,如果是单文件种子,则folder_name是空,此时文件保存目录就是下载目录
|
||||
download_dir = download_dir / folder_name
|
||||
# 等待目录存在
|
||||
|
||||
@@ -70,7 +70,7 @@ class SynologyChatModule(_ModuleBase, _MessageBase[SynologyChat]):
|
||||
for name, client in self.get_instances().items():
|
||||
state = client.get_state()
|
||||
if not state:
|
||||
return False, f"Synology Chat {name} 未就续"
|
||||
return False, f"Synology Chat {name} 未就绪"
|
||||
return True, ""
|
||||
|
||||
def init_setting(self) -> Tuple[str, Union[str, bool]]:
|
||||
|
||||
@@ -81,7 +81,7 @@ class TelegramModule(_ModuleBase, _MessageBase[Telegram]):
|
||||
for name, client in self.get_instances().items():
|
||||
state = client.get_state()
|
||||
if not state:
|
||||
return False, f"Telegram {name} 未就续"
|
||||
return False, f"Telegram {name} 未就绪"
|
||||
return True, ""
|
||||
|
||||
def init_setting(self) -> Tuple[str, Union[str, bool]]:
|
||||
|
||||
@@ -127,7 +127,7 @@ class CategoryHelper(metaclass=WeakSingleton):
|
||||
continue
|
||||
elif attr == "production_countries":
|
||||
# 制片国家
|
||||
info_values = [str(val.get("iso_3166_1")).upper() for val in info_value] # type: ignore
|
||||
info_values = [str(val.get("iso_3166_1")).upper() for val in info_value] # type: ignore
|
||||
else:
|
||||
if isinstance(info_value, list):
|
||||
info_values = [str(val).upper() for val in info_value]
|
||||
|
||||
@@ -1,22 +1,17 @@
|
||||
import pickle
|
||||
import random
|
||||
import time
|
||||
import traceback
|
||||
from pathlib import Path
|
||||
from threading import RLock
|
||||
from typing import Optional
|
||||
|
||||
from app.core.cache import TTLCache
|
||||
from app.core.config import settings
|
||||
from app.core.meta import MetaBase
|
||||
from app.log import logger
|
||||
from app.utils.singleton import WeakSingleton
|
||||
from app.schemas.types import MediaType
|
||||
from app.utils.singleton import WeakSingleton
|
||||
|
||||
lock = RLock()
|
||||
|
||||
CACHE_EXPIRE_TIMESTAMP_STR = "cache_expire_timestamp"
|
||||
EXPIRE_TIMESTAMP = settings.CONF.meta
|
||||
|
||||
|
||||
class TmdbCache(metaclass=WeakSingleton):
|
||||
"""
|
||||
@@ -32,15 +27,23 @@ class TmdbCache(metaclass=WeakSingleton):
|
||||
_tmdb_cache_expire: bool = True
|
||||
|
||||
def __init__(self):
|
||||
self._meta_path = settings.TEMP_PATH / "__tmdb_cache__"
|
||||
self._meta_data = self.__load(self._meta_path)
|
||||
self.maxsize = settings.CONF.douban
|
||||
self.ttl = settings.CONF.meta
|
||||
self.region = "__tmdb_cache__"
|
||||
self._meta_filepath = settings.TEMP_PATH / self.region
|
||||
# 初始化缓存
|
||||
self._cache = TTLCache(region=self.region, maxsize=self.maxsize, ttl=self.ttl)
|
||||
# 非Redis加载本地缓存数据
|
||||
if not self._cache.is_redis():
|
||||
for key, value in self.__load(self._meta_filepath).items():
|
||||
self._cache.set(key, value)
|
||||
|
||||
def clear(self):
|
||||
"""
|
||||
清空所有TMDB缓存
|
||||
"""
|
||||
with lock:
|
||||
self._meta_data = {}
|
||||
self._cache.clear()
|
||||
|
||||
@staticmethod
|
||||
def __get_key(meta: MetaBase) -> str:
|
||||
@@ -54,16 +57,9 @@ class TmdbCache(metaclass=WeakSingleton):
|
||||
根据KEY值获取缓存值
|
||||
"""
|
||||
key = self.__get_key(meta)
|
||||
|
||||
with lock:
|
||||
info: dict = self._meta_data.get(key)
|
||||
if info:
|
||||
expire = info.get(CACHE_EXPIRE_TIMESTAMP_STR)
|
||||
if not expire or int(time.time()) < expire:
|
||||
info[CACHE_EXPIRE_TIMESTAMP_STR] = int(time.time()) + EXPIRE_TIMESTAMP
|
||||
self._meta_data[key] = info
|
||||
elif expire and self._tmdb_cache_expire:
|
||||
self.delete(key)
|
||||
return info or {}
|
||||
return self._cache.get(key) or {}
|
||||
|
||||
def delete(self, key: str) -> dict:
|
||||
"""
|
||||
@@ -72,38 +68,26 @@ class TmdbCache(metaclass=WeakSingleton):
|
||||
@return: 被删除的缓存内容
|
||||
"""
|
||||
with lock:
|
||||
return self._meta_data.pop(key, {})
|
||||
|
||||
def delete_by_tmdbid(self, tmdbid: int) -> None:
|
||||
"""
|
||||
清空对应TMDBID的所有缓存记录,以强制更新TMDB中最新的数据
|
||||
"""
|
||||
for key in list(self._meta_data):
|
||||
if self._meta_data.get(key, {}).get("id") == tmdbid:
|
||||
with lock:
|
||||
self._meta_data.pop(key)
|
||||
|
||||
def delete_unknown(self) -> None:
|
||||
"""
|
||||
清除未识别的缓存记录,以便重新搜索TMDB
|
||||
"""
|
||||
for key in list(self._meta_data):
|
||||
if self._meta_data.get(key, {}).get("id") == 0:
|
||||
with lock:
|
||||
self._meta_data.pop(key)
|
||||
redis_data = self._cache.get(key)
|
||||
if redis_data:
|
||||
self._cache.delete(key)
|
||||
return redis_data
|
||||
return {}
|
||||
|
||||
def modify(self, key: str, title: str) -> dict:
|
||||
"""
|
||||
删除缓存信息
|
||||
修改缓存信息
|
||||
@param key: 缓存key
|
||||
@param title: 标题
|
||||
@return: 被修改后缓存内容
|
||||
"""
|
||||
with lock:
|
||||
if self._meta_data.get(key):
|
||||
self._meta_data[key]['title'] = title
|
||||
self._meta_data[key][CACHE_EXPIRE_TIMESTAMP_STR] = int(time.time()) + EXPIRE_TIMESTAMP
|
||||
return self._meta_data.get(key)
|
||||
redis_data = self._cache.get(key)
|
||||
if redis_data:
|
||||
redis_data['title'] = title
|
||||
self._cache.set(key, redis_data)
|
||||
return redis_data
|
||||
return {}
|
||||
|
||||
@staticmethod
|
||||
def __load(path: Path) -> dict:
|
||||
@@ -115,106 +99,61 @@ class TmdbCache(metaclass=WeakSingleton):
|
||||
with open(path, 'rb') as f:
|
||||
data = pickle.load(f)
|
||||
return data
|
||||
return {}
|
||||
except Exception as e:
|
||||
logger.error(f'加载缓存失败:{str(e)} - {traceback.format_exc()}')
|
||||
return {}
|
||||
return {}
|
||||
|
||||
def update(self, meta: MetaBase, info: dict) -> None:
|
||||
"""
|
||||
新增或更新缓存条目
|
||||
"""
|
||||
with lock:
|
||||
if info:
|
||||
# 缓存标题
|
||||
cache_title = info.get("title") \
|
||||
if info.get("media_type") == MediaType.MOVIE else info.get("name")
|
||||
# 缓存年份
|
||||
cache_year = info.get('release_date') \
|
||||
if info.get("media_type") == MediaType.MOVIE else info.get('first_air_date')
|
||||
if cache_year:
|
||||
cache_year = cache_year[:4]
|
||||
self._meta_data[self.__get_key(meta)] = {
|
||||
key = self.__get_key(meta)
|
||||
if info:
|
||||
# 缓存标题
|
||||
cache_title = info.get("title") \
|
||||
if info.get("media_type") == MediaType.MOVIE else info.get("name")
|
||||
# 缓存年份
|
||||
cache_year = info.get('release_date') \
|
||||
if info.get("media_type") == MediaType.MOVIE else info.get('first_air_date')
|
||||
if cache_year:
|
||||
cache_year = cache_year[:4]
|
||||
|
||||
with lock:
|
||||
# 缓存数据
|
||||
cache_data = {
|
||||
"id": info.get("id"),
|
||||
"type": info.get("media_type"),
|
||||
"year": cache_year,
|
||||
"title": cache_title,
|
||||
"poster_path": info.get("poster_path"),
|
||||
"backdrop_path": info.get("backdrop_path"),
|
||||
CACHE_EXPIRE_TIMESTAMP_STR: int(time.time()) + EXPIRE_TIMESTAMP
|
||||
"backdrop_path": info.get("backdrop_path")
|
||||
}
|
||||
elif info is not None:
|
||||
# None时不缓存,此时代表网络错误,允许重复请求
|
||||
self._meta_data[self.__get_key(meta)] = {'id': 0}
|
||||
self._cache.set(key, cache_data)
|
||||
|
||||
elif info is not None:
|
||||
# None时不缓存,此时代表网络错误,允许重复请求
|
||||
with lock:
|
||||
self._cache.set(key, {"id": 0})
|
||||
|
||||
def save(self, force: bool = False) -> None:
|
||||
"""
|
||||
保存缓存数据到文件
|
||||
"""
|
||||
# Redis不需要保存到本地文件
|
||||
if self._cache.is_redis():
|
||||
return
|
||||
|
||||
meta_data = self.__load(self._meta_path)
|
||||
new_meta_data = {k: v for k, v in self._meta_data.items() if v.get("id")}
|
||||
# Redis不可用时,保存到本地文件
|
||||
meta_data = self.__load(self._meta_filepath)
|
||||
# 当前缓存,去除无法识别
|
||||
new_meta_data = {k: v for k, v in self._cache.items() if v.get("id")}
|
||||
|
||||
if not force \
|
||||
and not self._random_sample(new_meta_data) \
|
||||
and meta_data.keys() == new_meta_data.keys():
|
||||
return
|
||||
|
||||
with open(self._meta_path, 'wb') as f:
|
||||
with open(self._meta_filepath, 'wb') as f:
|
||||
pickle.dump(new_meta_data, f, pickle.HIGHEST_PROTOCOL) # type: ignore
|
||||
|
||||
def _random_sample(self, new_meta_data: dict) -> bool:
|
||||
"""
|
||||
采样分析是否需要保存
|
||||
"""
|
||||
ret = False
|
||||
if len(new_meta_data) < 25:
|
||||
keys = list(new_meta_data.keys())
|
||||
for k in keys:
|
||||
info = new_meta_data.get(k)
|
||||
expire = info.get(CACHE_EXPIRE_TIMESTAMP_STR)
|
||||
if not expire:
|
||||
ret = True
|
||||
info[CACHE_EXPIRE_TIMESTAMP_STR] = int(time.time()) + EXPIRE_TIMESTAMP
|
||||
elif int(time.time()) >= expire:
|
||||
ret = True
|
||||
if self._tmdb_cache_expire:
|
||||
new_meta_data.pop(k)
|
||||
else:
|
||||
count = 0
|
||||
keys = random.sample(sorted(new_meta_data.keys()), 25)
|
||||
for k in keys:
|
||||
info = new_meta_data.get(k)
|
||||
expire = info.get(CACHE_EXPIRE_TIMESTAMP_STR)
|
||||
if not expire:
|
||||
ret = True
|
||||
info[CACHE_EXPIRE_TIMESTAMP_STR] = int(time.time()) + EXPIRE_TIMESTAMP
|
||||
elif int(time.time()) >= expire:
|
||||
ret = True
|
||||
if self._tmdb_cache_expire:
|
||||
new_meta_data.pop(k)
|
||||
count += 1
|
||||
if count >= 5:
|
||||
ret |= self._random_sample(new_meta_data)
|
||||
return ret
|
||||
|
||||
def get_title(self, key: str) -> Optional[str]:
|
||||
"""
|
||||
获取缓存的标题
|
||||
"""
|
||||
cache_media_info = self._meta_data.get(key)
|
||||
if not cache_media_info or not cache_media_info.get("id"):
|
||||
return None
|
||||
return cache_media_info.get("title")
|
||||
|
||||
def set_title(self, key: str, cn_title: str) -> None:
|
||||
"""
|
||||
重新设置缓存标题
|
||||
"""
|
||||
cache_media_info = self._meta_data.get(key)
|
||||
if not cache_media_info:
|
||||
return
|
||||
self._meta_data[key]['title'] = cn_title
|
||||
|
||||
def __del__(self):
|
||||
self.save()
|
||||
|
||||
@@ -348,9 +348,13 @@ class TmdbApi:
|
||||
处理网站搜索得到的链接
|
||||
"""
|
||||
if len(tmdb_links) == 1:
|
||||
tmdbid = self._parse_tmdb_id_from_link(tmdb_links[0])
|
||||
if not tmdbid:
|
||||
logger.warn(f"无法从链接解析TMDBID:{tmdb_links[0]}")
|
||||
return {}
|
||||
tmdbinfo = get_info_func(
|
||||
mtype=MediaType.TV if tmdb_links[0].startswith("/tv") else MediaType.MOVIE,
|
||||
tmdbid=tmdb_links[0].split("/")[-1])
|
||||
tmdbid=tmdbid)
|
||||
if tmdbinfo:
|
||||
if mtype == MediaType.TV and tmdbinfo.get('media_type') != MediaType.TV:
|
||||
return {}
|
||||
@@ -368,9 +372,13 @@ class TmdbApi:
|
||||
处理网站搜索得到的链接(异步版本)
|
||||
"""
|
||||
if len(tmdb_links) == 1:
|
||||
tmdbid = self._parse_tmdb_id_from_link(tmdb_links[0])
|
||||
if not tmdbid:
|
||||
logger.warn(f"无法从链接解析TMDBID:{tmdb_links[0]}")
|
||||
return {}
|
||||
tmdbinfo = await self.async_get_info(
|
||||
mtype=MediaType.TV if tmdb_links[0].startswith("/tv") else MediaType.MOVIE,
|
||||
tmdbid=int(tmdb_links[0].split("/")[-1]))
|
||||
tmdbid=tmdbid)
|
||||
if tmdbinfo:
|
||||
if mtype == MediaType.TV and tmdbinfo.get('media_type') != MediaType.TV:
|
||||
return {}
|
||||
@@ -382,6 +390,23 @@ class TmdbApi:
|
||||
logger.info("%s TMDB网站未查询到媒体信息!" % name)
|
||||
return {}
|
||||
|
||||
@staticmethod
|
||||
def _parse_tmdb_id_from_link(link: str) -> Optional[int]:
|
||||
"""
|
||||
从 TMDB 相对链接中解析数值 ID。
|
||||
兼容格式:/movie/1195631-william-tell、/tv/65942-re、/tv/79744-the-rookie
|
||||
"""
|
||||
if not link:
|
||||
return None
|
||||
match = re.match(r"^/[^/]+/(\d+)", link)
|
||||
if match:
|
||||
try:
|
||||
return int(match.group(1))
|
||||
except Exception as err:
|
||||
logger.debug(f"解析TMDBID失败:{str(err)} - {traceback.format_exc()}")
|
||||
return None
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def __get_names(tmdb_info: dict) -> List[str]:
|
||||
"""
|
||||
@@ -808,7 +833,6 @@ class TmdbApi:
|
||||
return None
|
||||
# dict[地区:分级]
|
||||
ratings = {}
|
||||
results = []
|
||||
if results := (tmdb_info.get("release_dates") or {}).get("results"):
|
||||
"""
|
||||
[
|
||||
|
||||
@@ -5,9 +5,10 @@ from torrentool.torrent import Torrent
|
||||
from transmission_rpc import File
|
||||
|
||||
from app import schemas
|
||||
from app.core.cache import FileCache
|
||||
from app.core.config import settings
|
||||
from app.core.metainfo import MetaInfo
|
||||
from app.core.event import eventmanager, Event
|
||||
from app.core.metainfo import MetaInfo
|
||||
from app.log import logger
|
||||
from app.modules import _ModuleBase, _DownloaderBase
|
||||
from app.modules.transmission.transmission import Transmission
|
||||
@@ -93,12 +94,12 @@ class TransmissionModule(_ModuleBase, _DownloaderBase[Transmission]):
|
||||
logger.info(f"Transmission下载器 {name} 连接断开,尝试重连 ...")
|
||||
server.reconnect()
|
||||
|
||||
def download(self, content: Union[Path, str], download_dir: Path, cookie: str,
|
||||
def download(self, content: Union[Path, str, bytes], download_dir: Path, cookie: str,
|
||||
episodes: Set[int] = None, category: Optional[str] = None, label: Optional[str] = None,
|
||||
downloader: Optional[str] = None) -> Optional[Tuple[Optional[str], Optional[str], Optional[str], str]]:
|
||||
"""
|
||||
根据种子文件,选择并添加下载任务
|
||||
:param content: 种子文件地址或者磁力链接
|
||||
:param content: 种子文件地址或者磁力链接或种子内容
|
||||
:param download_dir: 下载目录
|
||||
:param cookie: cookie
|
||||
:param episodes: 需要下载的集数
|
||||
@@ -108,24 +109,38 @@ class TransmissionModule(_ModuleBase, _DownloaderBase[Transmission]):
|
||||
:return: 下载器名称、种子Hash、种子文件布局、错误原因
|
||||
"""
|
||||
|
||||
def __get_torrent_info() -> Tuple[str, int]:
|
||||
def __get_torrent_info() -> Tuple[Optional[Torrent], Optional[bytes]]:
|
||||
"""
|
||||
获取种子名称
|
||||
"""
|
||||
torrent_info, torrent_content = None, None
|
||||
try:
|
||||
if isinstance(content, Path):
|
||||
torrentinfo = Torrent.from_file(content)
|
||||
if content.exists():
|
||||
torrent_content = content.read_bytes()
|
||||
else:
|
||||
# 缓存处理器
|
||||
cache_backend = FileCache()
|
||||
# 读取缓存的种子文件
|
||||
torrent_content = cache_backend.get(content.as_posix(), region="torrents")
|
||||
else:
|
||||
torrentinfo = Torrent.from_string(content)
|
||||
return torrentinfo.name, torrentinfo.total_size
|
||||
torrent_content = content
|
||||
|
||||
if torrent_content:
|
||||
torrent_info = Torrent.from_string(torrent_content)
|
||||
|
||||
return torrent_info, torrent_content
|
||||
except Exception as e:
|
||||
logger.error(f"获取种子名称失败:{e}")
|
||||
return "", 0
|
||||
return None, None
|
||||
|
||||
if not content:
|
||||
return None, None, None, "下载内容为空"
|
||||
if isinstance(content, Path) and not content.exists():
|
||||
return None, None, None, f"种子文件不存在:{content}"
|
||||
|
||||
# 读取种子的名称
|
||||
torrent, content = __get_torrent_info()
|
||||
if not torrent:
|
||||
return None, None, None, f"添加种子任务失败:无法读取种子文件"
|
||||
|
||||
# 获取下载器
|
||||
server: Transmission = self.get_instance(downloader)
|
||||
@@ -144,7 +159,7 @@ class TransmissionModule(_ModuleBase, _DownloaderBase[Transmission]):
|
||||
labels = None
|
||||
# 添加任务
|
||||
torrent = server.add_torrent(
|
||||
content=content.read_bytes() if isinstance(content, Path) else content,
|
||||
content=content,
|
||||
download_dir=str(download_dir),
|
||||
is_paused=is_paused,
|
||||
labels=labels,
|
||||
@@ -154,10 +169,6 @@ class TransmissionModule(_ModuleBase, _DownloaderBase[Transmission]):
|
||||
torrent_layout = "Original"
|
||||
|
||||
if not torrent:
|
||||
# 读取种子的名称
|
||||
torrent_name, torrent_size = __get_torrent_info()
|
||||
if not torrent_name:
|
||||
return None, None, None, f"添加种子任务失败:无法读取种子文件"
|
||||
# 查询所有下载器的种子
|
||||
torrents, error = server.get_torrents()
|
||||
if error:
|
||||
@@ -166,7 +177,7 @@ class TransmissionModule(_ModuleBase, _DownloaderBase[Transmission]):
|
||||
try:
|
||||
for torrent in torrents:
|
||||
# 名称与大小相等则认为是同一个种子
|
||||
if torrent.name == torrent_name and torrent.total_size == torrent_size:
|
||||
if torrent.name == torrent.name and torrent.total_size == torrent.total_size:
|
||||
torrent_hash = torrent.hashString
|
||||
logger.warn(f"下载器中已存在该种子任务:{torrent_hash} - {torrent.name}")
|
||||
# 给种子打上标签
|
||||
@@ -314,7 +325,7 @@ class TransmissionModule(_ModuleBase, _DownloaderBase[Transmission]):
|
||||
del torrents
|
||||
else:
|
||||
return None
|
||||
return ret_torrents # noqa
|
||||
return ret_torrents # noqa
|
||||
|
||||
def transfer_completed(self, hashs: str, downloader: Optional[str] = None) -> None:
|
||||
"""
|
||||
|
||||
@@ -24,6 +24,7 @@ class Transmission:
|
||||
"""
|
||||
若不设置参数,则创建配置文件设置的下载器
|
||||
"""
|
||||
self.trc = None
|
||||
if host and port:
|
||||
self._protocol, self._host, self._port = kwargs.get("protocol", "http"), host, port
|
||||
elif host:
|
||||
|
||||
@@ -71,7 +71,7 @@ class VoceChatModule(_ModuleBase, _MessageBase[VoceChat]):
|
||||
for name, client in self.get_instances().items():
|
||||
state = client.get_state()
|
||||
if not state:
|
||||
return False, f"VoceChat {name} 未就续"
|
||||
return False, f"VoceChat {name} 未就绪"
|
||||
return True, ""
|
||||
|
||||
def init_setting(self) -> Tuple[str, Union[str, bool]]:
|
||||
|
||||
@@ -75,7 +75,7 @@ class WechatModule(_ModuleBase, _MessageBase[WeChat]):
|
||||
for name, client in self.get_instances().items():
|
||||
state = client.get_state()
|
||||
if not state:
|
||||
return False, f"企业微信 {name} 未就续"
|
||||
return False, f"企业微信 {name} 未就绪"
|
||||
return True, ""
|
||||
|
||||
def init_setting(self) -> Tuple[str, Union[str, bool]]:
|
||||
|
||||
@@ -10,7 +10,7 @@ from threading import Lock
|
||||
from typing import Any, Optional, Dict, List
|
||||
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from cachetools import TTLCache
|
||||
from app.core.cache import TTLCache
|
||||
from watchdog.events import FileSystemEventHandler, FileSystemMovedEvent, FileSystemEvent
|
||||
from watchdog.observers.polling import PollingObserver
|
||||
|
||||
@@ -72,7 +72,7 @@ class Monitor(metaclass=Singleton):
|
||||
# 存储过照间隔(分钟)
|
||||
self._snapshot_interval = 5
|
||||
# TTL缓存,10秒钟有效
|
||||
self._cache = TTLCache(maxsize=1024, ttl=10)
|
||||
self._cache = TTLCache(region="monitor", maxsize=1024, ttl=10)
|
||||
# 监控的文件扩展名
|
||||
self.all_exts = settings.RMT_MEDIAEXT
|
||||
# 初始化快照缓存目录
|
||||
@@ -768,7 +768,7 @@ class Monitor(metaclass=Singleton):
|
||||
|
||||
def stop(self):
|
||||
"""
|
||||
退出插件
|
||||
退出监控
|
||||
"""
|
||||
self._event.set()
|
||||
if self._observers:
|
||||
@@ -791,4 +791,6 @@ class Monitor(metaclass=Singleton):
|
||||
except Exception as e:
|
||||
logger.error(f"停止定时服务出现了错误:{e}")
|
||||
self._scheduler = None
|
||||
if self._cache:
|
||||
self._cache.close()
|
||||
self._event.clear()
|
||||
|
||||
@@ -18,7 +18,7 @@ from app.chain.subscribe import SubscribeChain
|
||||
from app.chain.transfer import TransferChain
|
||||
from app.chain.workflow import WorkflowChain
|
||||
from app.core.config import settings
|
||||
from app.core.event import EventManager, eventmanager, Event
|
||||
from app.core.event import eventmanager, Event
|
||||
from app.core.plugin import PluginManager
|
||||
from app.db.systemconfig_oper import SystemConfigOper
|
||||
from app.helper.message import MessageHelper
|
||||
@@ -390,7 +390,7 @@ class Scheduler(metaclass=Singleton):
|
||||
if not job:
|
||||
return None
|
||||
if job.get("running"):
|
||||
logger.warning(f"定时任务 {job_id} - {job.get("name")} 正在运行 ...")
|
||||
logger.warning(f"定时任务 {job_id} - {job.get('name')} 正在运行 ...")
|
||||
return None
|
||||
self._jobs[job_id]["running"] = True
|
||||
return job
|
||||
|
||||
@@ -108,7 +108,7 @@ class TransferInfo(BaseModel):
|
||||
success: bool = True
|
||||
# 整理⼁路径
|
||||
fileitem: Optional[FileItem] = None
|
||||
# 转移后的目录项
|
||||
# 转移后的目录项,媒体的根目录
|
||||
target_diritem: Optional[FileItem] = None
|
||||
# 转移后路径
|
||||
target_item: Optional[FileItem] = None
|
||||
|
||||
@@ -294,20 +294,6 @@ class MediaRecognizeType(Enum):
|
||||
Bangumi = "Bangumi"
|
||||
|
||||
|
||||
# 其他杂项模块类型
|
||||
class OtherModulesType(Enum):
|
||||
# 字幕
|
||||
Subtitle = "站点字幕"
|
||||
# Fanart
|
||||
Fanart = "Fanart"
|
||||
# 文件整理
|
||||
FileManager = "文件整理"
|
||||
# 过滤器
|
||||
Filter = "过滤器"
|
||||
# 站点索引
|
||||
Indexer = "站点索引"
|
||||
|
||||
|
||||
# 用户配置Key字典
|
||||
class UserConfigKey(Enum):
|
||||
# 监控面板
|
||||
@@ -339,3 +325,21 @@ class ModuleType(Enum):
|
||||
Indexer = "indexer"
|
||||
# 其它
|
||||
Other = "other"
|
||||
|
||||
|
||||
# 其他杂项模块类型
|
||||
class OtherModulesType(Enum):
|
||||
# 字幕
|
||||
Subtitle = "站点字幕"
|
||||
# Fanart
|
||||
Fanart = "Fanart"
|
||||
# 文件整理
|
||||
FileManager = "文件整理"
|
||||
# 过滤器
|
||||
Filter = "过滤器"
|
||||
# 站点索引
|
||||
Indexer = "站点索引"
|
||||
# PostgreSQL
|
||||
PostgreSQL = "PostgreSQL"
|
||||
# Redis
|
||||
Redis = "Redis"
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import sys
|
||||
|
||||
from app.helper.redis import RedisHelper, AsyncRedisHelper
|
||||
|
||||
# SitesHelper涉及资源包拉取,提前引入并容错提示
|
||||
try:
|
||||
from app.helper.sites import SitesHelper # noqa
|
||||
@@ -12,14 +14,13 @@ except ImportError as e:
|
||||
from app.utils.system import SystemUtils
|
||||
from app.log import logger
|
||||
from app.core.config import settings
|
||||
from app.core.cache import close_cache
|
||||
from app.core.module import ModuleManager
|
||||
from app.core.event import EventManager
|
||||
from app.helper.thread import ThreadHelper
|
||||
from app.helper.display import DisplayHelper
|
||||
from app.helper.doh import DohHelper
|
||||
from app.helper.resource import ResourceHelper
|
||||
from app.helper.message import MessageHelper
|
||||
from app.helper.message import MessageHelper, stop_message
|
||||
from app.helper.subscribe import SubscribeHelper
|
||||
from app.db import close_database
|
||||
from app.db.systemconfig_oper import SystemConfigOper
|
||||
@@ -68,9 +69,9 @@ def clear_temp():
|
||||
清理临时文件和图片缓存
|
||||
"""
|
||||
# 清理临时目录中3天前的文件
|
||||
SystemUtils.clear(settings.TEMP_PATH, days=3)
|
||||
SystemUtils.clear(settings.TEMP_PATH, days=settings.TEMP_FILE_DAYS)
|
||||
# 清理图片缓存目录中7天前的文件
|
||||
SystemUtils.clear(settings.CACHE_PATH / "images", days=7)
|
||||
SystemUtils.clear(settings.CACHE_PATH / "images", days=settings.GLOBAL_IMAGE_CACHE_DAYS)
|
||||
|
||||
|
||||
def user_auth():
|
||||
@@ -117,8 +118,11 @@ async def stop_modules():
|
||||
DisplayHelper().stop()
|
||||
# 停止线程池
|
||||
ThreadHelper().shutdown()
|
||||
# 停止缓存连接
|
||||
close_cache()
|
||||
# 停止消息服务
|
||||
stop_message()
|
||||
# 关闭Redis缓存连接
|
||||
RedisHelper().close()
|
||||
await AsyncRedisHelper().close()
|
||||
# 停止数据库连接
|
||||
await close_database()
|
||||
# 停止前端服务
|
||||
|
||||
@@ -2,7 +2,7 @@ import re
|
||||
import sys
|
||||
from contextlib import contextmanager, asynccontextmanager
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional, Union
|
||||
from typing import Any, Optional, Tuple, Union
|
||||
|
||||
import chardet
|
||||
import httpx
|
||||
@@ -395,7 +395,7 @@ class RequestUtils:
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def parse_cache_control(header: str) -> (str, int):
|
||||
def parse_cache_control(header: str) -> Tuple[str, Optional[int]]:
|
||||
"""
|
||||
解析 Cache-Control 头,返回 cache_directive 和 max_age
|
||||
:param header: Cache-Control 头部的字符串
|
||||
|
||||
@@ -3,7 +3,7 @@ from pathlib import Path
|
||||
from typing import List, Optional, Set, Union
|
||||
from urllib.parse import quote, urlparse
|
||||
|
||||
from aiopath import AsyncPath
|
||||
from anyio import Path as AsyncPath
|
||||
|
||||
from app.log import logger
|
||||
|
||||
|
||||
@@ -1,17 +1 @@
|
||||
#######################################################################################################
|
||||
# V2版本中大部分设置可通过后台设置界面进行配置,本文件仅展示界面无法配置的项, 这些项同样可以通过环境变量进行设置 #
|
||||
#######################################################################################################
|
||||
# 【*】API监听地址(注意不是前端访问地址)
|
||||
HOST=0.0.0.0
|
||||
# 【*】超级管理员,设置后一但重启将固化到数据库中,修改将无效(初始化超级管理员密码仅会生成一次,请在日志中查看并自行登录系统修改)
|
||||
SUPERUSER=admin
|
||||
# 开发调试模式,仅开发人员使用,打开后将停止后台服务
|
||||
DEV=false
|
||||
# 为指定字幕添加.default后缀设置为默认字幕,支持为'zh-cn','zh-tw','eng'添加默认字幕,未定义或设置为None则不添加
|
||||
DEFAULT_SUB=zh-cn
|
||||
# 是否启用内存监控,开启后将定期生成内存快照文件
|
||||
MEMORY_ANALYSIS=false
|
||||
# 内存快照间隔(分钟)
|
||||
MEMORY_SNAPSHOT_INTERVAL=30
|
||||
# 保留的内存快照文件数量
|
||||
MEMORY_SNAPSHOT_KEEP_COUNT=20
|
||||
# MoviePilot V2版本,大部分设置可通过后台设置界面进行配置,仅个别配置需要通过环境变量或本配置文件配置,所有可配置项参考:https://wiki.movie-pilot.org/zh/configuration
|
||||
@@ -40,13 +40,25 @@ def run_migrations_offline() -> None:
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
render_as_batch=True
|
||||
)
|
||||
|
||||
# 根据数据库类型配置不同的参数
|
||||
if url and "postgresql" in url:
|
||||
# PostgreSQL配置
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
else:
|
||||
# SQLite配置
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
render_as_batch=True
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
@@ -66,9 +78,22 @@ def run_migrations_online() -> None:
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection, target_metadata=target_metadata
|
||||
)
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
|
||||
# 根据数据库类型配置不同的参数
|
||||
if url and "postgresql" in url:
|
||||
# PostgreSQL配置
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata
|
||||
)
|
||||
else:
|
||||
# SQLite配置
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
render_as_batch=True
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
117
database/versions/5b3355c964bb_2_2_0.py
Normal file
117
database/versions/5b3355c964bb_2_2_0.py
Normal file
@@ -0,0 +1,117 @@
|
||||
"""2.2.0
|
||||
|
||||
Revision ID: 5b3355c964bb
|
||||
Revises: d58298a0879f
|
||||
Create Date: 2025-08-19 12:27:08.451371
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '5b3355c964bb'
|
||||
down_revision = 'd58298a0879f'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
if settings.DB_TYPE.lower() == "postgresql":
|
||||
# 将SQLite的Sequence转换为PostgreSQL的Identity
|
||||
fix_postgresql_sequences()
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def fix_postgresql_sequences():
|
||||
"""
|
||||
修复PostgreSQL数据库中的序列问题
|
||||
将SQLite迁移过来的Sequence转换为PostgreSQL的Identity
|
||||
"""
|
||||
connection = op.get_bind()
|
||||
|
||||
# 获取所有表名
|
||||
result = connection.execute(sa.text("""
|
||||
SELECT table_name
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = 'public'
|
||||
AND table_type = 'BASE TABLE'
|
||||
"""))
|
||||
tables = [row[0] for row in result.fetchall()]
|
||||
|
||||
print(f"发现 {len(tables)} 个表需要检查序列")
|
||||
|
||||
for table_name in tables:
|
||||
fix_table_sequence(connection, table_name)
|
||||
|
||||
|
||||
def fix_table_sequence(connection, table_name):
|
||||
"""
|
||||
修复单个表的序列
|
||||
"""
|
||||
try:
|
||||
# 跳过alembic_version表,它没有id列
|
||||
if table_name == 'alembic_version':
|
||||
print(f"跳过表 {table_name},这是Alembic版本表")
|
||||
return
|
||||
|
||||
# 检查表是否有id列
|
||||
result = connection.execute(sa.text(f"""
|
||||
SELECT is_identity, column_default
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = '{table_name}'
|
||||
AND column_name = 'id'
|
||||
"""))
|
||||
|
||||
id_column = result.fetchone()
|
||||
if not id_column:
|
||||
print(f"表 {table_name} 没有id列,跳过")
|
||||
return
|
||||
|
||||
is_identity, column_default = id_column
|
||||
|
||||
# 检查是否已经是Identity类型
|
||||
if is_identity == 'YES' or (column_default and 'GENERATED BY DEFAULT AS IDENTITY' in column_default):
|
||||
print(f"表 {table_name} 的id列已经是Identity类型,跳过")
|
||||
return
|
||||
|
||||
# 检查是否有序列
|
||||
print(f"表 {table_name} 存在序列,需要修复")
|
||||
convert_to_identity(connection, table_name)
|
||||
|
||||
except Exception as e:
|
||||
print(f"修复表 {table_name} 序列时出错: {e}")
|
||||
# 回滚当前事务,避免影响后续操作
|
||||
connection.rollback()
|
||||
|
||||
|
||||
def convert_to_identity(connection, table_name):
|
||||
"""
|
||||
将序列转换为Identity,保持原有约束不变
|
||||
"""
|
||||
try:
|
||||
# 获取当前序列的最大值
|
||||
result = connection.execute(sa.text(f"""
|
||||
SELECT COALESCE(MAX(id), 0) + 1 as next_value
|
||||
FROM "{table_name}"
|
||||
"""))
|
||||
next_value = result.fetchone()[0]
|
||||
|
||||
# 直接修改列属性,添加Identity,保持其他约束不变
|
||||
# 这种方式不会删除主键约束和索引
|
||||
connection.execute(sa.text(f"""
|
||||
ALTER TABLE "{table_name}"
|
||||
ALTER COLUMN id ADD GENERATED BY DEFAULT AS IDENTITY (START WITH {next_value})
|
||||
"""))
|
||||
|
||||
print(f"表 {table_name} 序列已转换为Identity,起始值为 {next_value}")
|
||||
|
||||
except Exception as e:
|
||||
print(f"转换表 {table_name} 序列时出错: {e}")
|
||||
# 如果是已经存在的Identity错误,则忽略
|
||||
if "already an identity column" in str(e):
|
||||
print(f"表 {table_name} 的id列已经是Identity类型,忽略此错误")
|
||||
return
|
||||
raise
|
||||
21
database/versions/d58298a0879f_2_1_9.py
Normal file
21
database/versions/d58298a0879f_2_1_9.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""2.1.9
|
||||
|
||||
Revision ID: d58298a0879f
|
||||
Revises: 4666ce24a443
|
||||
Create Date: 2025-08-19 11:56:39.652032
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'd58298a0879f'
|
||||
down_revision = '4666ce24a443'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
pass
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
pass
|
||||
@@ -1,4 +1,9 @@
|
||||
FROM python:3.12.8-slim-bookworm
|
||||
FROM python:3.12.8-slim-bookworm AS base
|
||||
|
||||
|
||||
# 准备软件包
|
||||
FROM base AS prepare_package
|
||||
|
||||
ENV LANG="C.UTF-8" \
|
||||
TZ="Asia/Shanghai" \
|
||||
HOME="/moviepilot" \
|
||||
@@ -7,59 +12,101 @@ ENV LANG="C.UTF-8" \
|
||||
DISPLAY=:987 \
|
||||
PUID=0 \
|
||||
PGID=0 \
|
||||
UMASK=000
|
||||
WORKDIR "/app"
|
||||
RUN apt-get update -y \
|
||||
&& apt-get upgrade -y \
|
||||
&& apt-get -y install \
|
||||
musl-dev \
|
||||
nginx \
|
||||
gettext-base \
|
||||
locales \
|
||||
procps \
|
||||
gosu \
|
||||
bash \
|
||||
wget \
|
||||
curl \
|
||||
busybox \
|
||||
dumb-init \
|
||||
jq \
|
||||
fuse3 \
|
||||
rsync \
|
||||
ffmpeg \
|
||||
nano \
|
||||
UMASK=000 \
|
||||
VENV_PATH="/opt/venv"
|
||||
|
||||
ENV PATH="${VENV_PATH}/bin:${PATH}"
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
nginx \
|
||||
gettext-base \
|
||||
locales \
|
||||
procps \
|
||||
gosu \
|
||||
bash \
|
||||
curl \
|
||||
wget \
|
||||
busybox \
|
||||
dumb-init \
|
||||
jq \
|
||||
fuse3 \
|
||||
rsync \
|
||||
ffmpeg \
|
||||
nano \
|
||||
&& dpkg-reconfigure --frontend noninteractive tzdata \
|
||||
&& \
|
||||
if [ "$(uname -m)" = "x86_64" ]; \
|
||||
then ln -s /usr/lib/x86_64-linux-musl/libc.so /lib/libc.musl-x86_64.so.1; \
|
||||
elif [ "$(uname -m)" = "aarch64" ]; \
|
||||
then ln -s /usr/lib/aarch64-linux-musl/libc.so /lib/libc.musl-aarch64.so.1; \
|
||||
fi \
|
||||
&& curl https://rclone.org/install.sh | bash \
|
||||
&& apt-get autoremove -y \
|
||||
&& apt-get clean -y \
|
||||
&& apt-get clean \
|
||||
&& rm -rf \
|
||||
/tmp/* \
|
||||
/moviepilot/.cache \
|
||||
/var/lib/apt/lists/* \
|
||||
/var/tmp/*
|
||||
COPY ../requirements.in requirements.in
|
||||
RUN apt-get update -y \
|
||||
&& apt-get install -y build-essential \
|
||||
/tmp/* \
|
||||
/var/lib/apt/lists/* \
|
||||
/var/tmp/*
|
||||
|
||||
|
||||
# 准备 python 环境
|
||||
FROM base AS prepare_venv
|
||||
|
||||
# 设置环境变量
|
||||
ENV LANG="C.UTF-8" \
|
||||
TZ="Asia/Shanghai" \
|
||||
VENV_PATH="/opt/venv"
|
||||
|
||||
ENV PATH="${VENV_PATH}/bin:${PATH}"
|
||||
|
||||
# 安装系统构建依赖
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
build-essential \
|
||||
curl \
|
||||
busybox \
|
||||
jq \
|
||||
wget
|
||||
|
||||
# 安装 Python 构建依赖并创建虚拟环境
|
||||
WORKDIR /app
|
||||
COPY requirements.in requirements.in
|
||||
RUN python3 -m venv ${VENV_PATH} \
|
||||
&& pip install --upgrade pip \
|
||||
&& pip install Cython pip-tools \
|
||||
&& pip-compile requirements.in \
|
||||
&& pip install -r requirements.txt \
|
||||
&& playwright install-deps chromium \
|
||||
&& apt-get remove -y build-essential \
|
||||
&& pip install -r requirements.txt
|
||||
|
||||
# 下载准备代码
|
||||
FROM prepare_package AS prepare_code
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY . .
|
||||
RUN FRONTEND_VERSION=$(sed -n "s/^FRONTEND_VERSION\s*=\s*'\([^']*\)'/\1/p" /app/version.py) \
|
||||
&& curl -sL "https://github.com/jxxghp/MoviePilot-Frontend/releases/download/${FRONTEND_VERSION}/dist.zip" | busybox unzip -d / - \
|
||||
&& mv /dist /public \
|
||||
&& curl -sL "https://github.com/jxxghp/MoviePilot-Plugins/archive/refs/heads/main.zip" | busybox unzip -d /tmp - \
|
||||
&& mv -f /tmp/MoviePilot-Plugins-main/plugins.v2/* /app/app/plugins/ \
|
||||
&& cat /tmp/MoviePilot-Plugins-main/package.json | jq -r 'to_entries[] | select(.value.v2 == true) | .key' | awk '{print tolower($0)}' | \
|
||||
while read -r i; do if [ ! -d "/app/app/plugins/$i" ]; then mv "/tmp/MoviePilot-Plugins-main/plugins/$i" "/app/app/plugins/"; else echo "跳过 $i"; fi; done \
|
||||
&& curl -sL "https://github.com/jxxghp/MoviePilot-Resources/archive/refs/heads/main.zip" | busybox unzip -d /tmp - \
|
||||
&& mv -f /tmp/MoviePilot-Resources-main/resources.v2/* /app/app/helper/
|
||||
|
||||
# final 阶段: 安装运行时依赖和配置最终镜像
|
||||
FROM prepare_package AS final
|
||||
|
||||
# python 环境
|
||||
COPY --from=prepare_venv --chmod=777 ${VENV_PATH} ${VENV_PATH}
|
||||
|
||||
# playwright 环境
|
||||
RUN playwright install-deps chromium \
|
||||
&& apt-get autoremove -y \
|
||||
&& apt-get clean -y \
|
||||
&& apt-get clean \
|
||||
&& rm -rf \
|
||||
/tmp/* \
|
||||
/moviepilot/.cache \
|
||||
/var/lib/apt/lists/* \
|
||||
/var/tmp/*
|
||||
COPY .. .
|
||||
/tmp/* \
|
||||
/var/lib/apt/lists/* \
|
||||
/var/tmp/*
|
||||
|
||||
# 准备运行代码
|
||||
WORKDIR /app
|
||||
|
||||
COPY --from=prepare_code /app /app
|
||||
COPY --from=prepare_code /public /public
|
||||
|
||||
RUN cp -f /app/docker/nginx.common.conf /etc/nginx/common.conf \
|
||||
&& cp -f /app/docker/nginx.template.conf /etc/nginx/nginx.template.conf \
|
||||
&& cp -f /app/docker/update.sh /usr/local/bin/mp_update.sh \
|
||||
@@ -70,20 +117,11 @@ RUN cp -f /app/docker/nginx.common.conf /etc/nginx/common.conf \
|
||||
&& groupadd -r moviepilot -g 918 \
|
||||
&& useradd -r moviepilot -g moviepilot -d ${HOME} -s /bin/bash -u 918 \
|
||||
&& python_ver=$(python3 -V | awk '{print $2}') \
|
||||
&& echo "/app/" > /usr/local/lib/python${python_ver%.*}/site-packages/app.pth \
|
||||
&& echo "/app/" > ${VENV_PATH}/lib/python${python_ver%.*}/site-packages/app.pth \
|
||||
&& echo 'fs.inotify.max_user_watches=5242880' >> /etc/sysctl.conf \
|
||||
&& echo 'fs.inotify.max_user_instances=5242880' >> /etc/sysctl.conf \
|
||||
&& locale-gen zh_CN.UTF-8 \
|
||||
&& FRONTEND_VERSION=$(sed -n "s/^FRONTEND_VERSION\s*=\s*'\([^']*\)'/\1/p" /app/version.py) \
|
||||
&& curl -sL "https://github.com/jxxghp/MoviePilot-Frontend/releases/download/${FRONTEND_VERSION}/dist.zip" | busybox unzip -d / - \
|
||||
&& mv /dist /public \
|
||||
&& curl -sL "https://github.com/jxxghp/MoviePilot-Plugins/archive/refs/heads/main.zip" | busybox unzip -d /tmp - \
|
||||
&& mv -f /tmp/MoviePilot-Plugins-main/plugins.v2/* /app/app/plugins/ \
|
||||
&& cat /tmp/MoviePilot-Plugins-main/package.json | jq -r 'to_entries[] | select(.value.v2 == true) | .key' | awk '{print tolower($0)}' | \
|
||||
while read -r i; do if [ ! -d "/app/app/plugins/$i" ]; then mv "/tmp/MoviePilot-Plugins-main/plugins/$i" "/app/app/plugins/"; else echo "跳过 $i"; fi; done \
|
||||
&& curl -sL "https://github.com/jxxghp/MoviePilot-Resources/archive/refs/heads/main.zip" | busybox unzip -d /tmp - \
|
||||
&& mv -f /tmp/MoviePilot-Resources-main/resources.v2/* /app/app/helper/ \
|
||||
&& rm -rf /tmp/*
|
||||
&& locale-gen zh_CN.UTF-8
|
||||
|
||||
EXPOSE 3000
|
||||
VOLUME [ "${CONFIG_DIR}" ]
|
||||
ENTRYPOINT [ "/entrypoint.sh" ]
|
||||
|
||||
@@ -20,6 +20,10 @@ function WARN() {
|
||||
echo -e "${WARN} ${1}"
|
||||
}
|
||||
|
||||
# 设置虚拟环境路径(兼容群晖等系统必须这样配置)
|
||||
VENV_PATH="${VENV_PATH:-/opt/venv}"
|
||||
export PATH="${VENV_PATH}/bin:$PATH"
|
||||
|
||||
# 校正设置目录
|
||||
CONFIG_DIR="${CONFIG_DIR:-/config}"
|
||||
|
||||
@@ -43,6 +47,16 @@ function load_config_from_app_env() {
|
||||
["GITHUB_TOKEN"]=""
|
||||
["MOVIEPILOT_AUTO_UPDATE"]="release"
|
||||
|
||||
# database
|
||||
["DB_TYPE"]="sqlite"
|
||||
["DB_POSTGRESQL_HOST"]="localhost"
|
||||
["DB_POSTGRESQL_PORT"]="5432"
|
||||
["DB_POSTGRESQL_DATABASE"]="moviepilot"
|
||||
["DB_POSTGRESQL_USERNAME"]="moviepilot"
|
||||
["DB_POSTGRESQL_PASSWORD"]="moviepilot"
|
||||
["DB_POSTGRESQL_POOL_SIZE"]="20"
|
||||
["DB_POSTGRESQL_MAX_OVERFLOW"]="30"
|
||||
|
||||
# cert
|
||||
["ENABLE_SSL"]="false"
|
||||
["SSL_DOMAIN"]=""
|
||||
@@ -195,13 +209,16 @@ fi
|
||||
|
||||
# 使用 `envsubst` 将模板文件中的 ${NGINX_PORT} 替换为实际的环境变量值
|
||||
envsubst '${NGINX_PORT}${PORT}${NGINX_CLIENT_MAX_BODY_SIZE}${ENABLE_SSL}${HTTPS_SERVER_CONF}' < /etc/nginx/nginx.template.conf > /etc/nginx/nginx.conf
|
||||
|
||||
# 自动更新
|
||||
cd /
|
||||
source /usr/local/bin/mp_update.sh
|
||||
cd /app || exit
|
||||
|
||||
# 更改 moviepilot userid 和 groupid
|
||||
groupmod -o -g "${PGID}" moviepilot
|
||||
usermod -o -u "${PUID}" moviepilot
|
||||
|
||||
# 更改文件权限
|
||||
chown -R moviepilot:moviepilot \
|
||||
"${HOME}" \
|
||||
@@ -211,17 +228,21 @@ chown -R moviepilot:moviepilot \
|
||||
/var/lib/nginx \
|
||||
/var/log/nginx
|
||||
chown moviepilot:moviepilot /etc/hosts /tmp
|
||||
|
||||
# 下载浏览器内核
|
||||
if [[ "$HTTPS_PROXY" =~ ^https?:// ]] || [[ "$HTTPS_PROXY" =~ ^https?:// ]] || [[ "$PROXY_HOST" =~ ^https?:// ]]; then
|
||||
HTTPS_PROXY="${HTTPS_PROXY:-${https_proxy:-$PROXY_HOST}}" gosu moviepilot:moviepilot playwright install chromium
|
||||
else
|
||||
gosu moviepilot:moviepilot playwright install chromium
|
||||
fi
|
||||
|
||||
# 证书管理
|
||||
source /app/docker/cert.sh
|
||||
|
||||
# 启动前端nginx服务
|
||||
INFO "→ 启动前端nginx服务..."
|
||||
nginx
|
||||
|
||||
# 启动docker http proxy nginx
|
||||
if [ -S "/var/run/docker.sock" ]; then
|
||||
INFO "→ 启动 Docker Proxy..."
|
||||
@@ -231,6 +252,7 @@ if [ -S "/var/run/docker.sock" ]; then
|
||||
/var/lib/nginx \
|
||||
/var/log/nginx
|
||||
fi
|
||||
|
||||
# 设置后端服务权限掩码
|
||||
umask "${UMASK}"
|
||||
|
||||
@@ -252,4 +274,4 @@ fi
|
||||
|
||||
# 启动后端服务
|
||||
INFO "→ 启动后端服务..."
|
||||
exec dumb-init gosu moviepilot:moviepilot python3 app/main.py
|
||||
exec dumb-init gosu moviepilot:moviepilot ${VENV_PATH}/bin/python3 app/main.py
|
||||
|
||||
@@ -9,42 +9,6 @@ location / {
|
||||
try_files $uri $uri/ /index.html;
|
||||
}
|
||||
|
||||
# 图片类静态资源
|
||||
location ~* \.(png|jpg|jpeg|gif|ico|svg)$ {
|
||||
expires 1y;
|
||||
add_header Cache-Control "public, immutable";
|
||||
}
|
||||
|
||||
# JS和CSS静态资源缓存
|
||||
location ~* \.(js|css)$ {
|
||||
expires 30d;
|
||||
add_header Cache-Control "public";
|
||||
add_header Vary Accept-Encoding;
|
||||
}
|
||||
|
||||
# assets目录
|
||||
location /assets {
|
||||
expires 1y;
|
||||
add_header Cache-Control "public, immutable";
|
||||
}
|
||||
|
||||
# 站点图标
|
||||
location /api/v1/site/icon/ {
|
||||
# 站点图标缓存
|
||||
proxy_cache my_cache;
|
||||
# 缓存响应码为200和302的请求1小时
|
||||
proxy_cache_valid 200 302 1h;
|
||||
# 缓存其他响应码的请求5分钟
|
||||
proxy_cache_valid any 5m;
|
||||
# 缓存键的生成规则
|
||||
proxy_cache_key "$scheme$request_method$host$request_uri";
|
||||
proxy_cache_use_stale error timeout updating http_500 http_502 http_503 http_504;
|
||||
|
||||
# 向后端API转发请求
|
||||
proxy_pass http://backend_api;
|
||||
}
|
||||
|
||||
|
||||
# 本地CookieCloud
|
||||
location /cookiecloud {
|
||||
proxy_pass http://backend_api;
|
||||
@@ -104,4 +68,40 @@ location /api {
|
||||
|
||||
# 超时设置
|
||||
proxy_read_timeout 600s;
|
||||
}
|
||||
|
||||
# 图片类静态资源
|
||||
location ~* \.(png|jpg|jpeg|gif|ico|svg)$ {
|
||||
expires 1y;
|
||||
add_header Cache-Control "public, immutable";
|
||||
}
|
||||
|
||||
# JS 和 CSS 静态资源缓存(排除 /api/v1 路径)
|
||||
location ~* ^/(?!api/v1).*\.(js|css)$ {
|
||||
try_files $uri =404;
|
||||
expires 30d;
|
||||
add_header Cache-Control "public";
|
||||
add_header Vary Accept-Encoding;
|
||||
}
|
||||
|
||||
# assets目录
|
||||
location /assets {
|
||||
expires 1y;
|
||||
add_header Cache-Control "public, immutable";
|
||||
}
|
||||
|
||||
# 站点图标
|
||||
location /api/v1/site/icon/ {
|
||||
# 站点图标缓存
|
||||
proxy_cache my_cache;
|
||||
# 缓存响应码为200和302的请求1小时
|
||||
proxy_cache_valid 200 302 1h;
|
||||
# 缓存其他响应码的请求5分钟
|
||||
proxy_cache_valid any 5m;
|
||||
# 缓存键的生成规则
|
||||
proxy_cache_key "$scheme$request_method$host$request_uri";
|
||||
proxy_cache_use_stale error timeout updating http_500 http_502 http_503 http_504;
|
||||
|
||||
# 向后端API转发请求
|
||||
proxy_pass http://backend_api;
|
||||
}
|
||||
@@ -20,6 +20,10 @@ function WARN() {
|
||||
echo -e "${WARN} ${1}"
|
||||
}
|
||||
|
||||
# 设置虚拟环境路径(兼容群晖等系统必须这样配置)
|
||||
VENV_PATH="${VENV_PATH:-/opt/venv}"
|
||||
export PATH="${VENV_PATH}/bin:$PATH"
|
||||
|
||||
# 下载及解压
|
||||
function download_and_unzip() {
|
||||
local retries=0
|
||||
@@ -54,16 +58,36 @@ function install_backend_and_download_resources() {
|
||||
return 1
|
||||
fi
|
||||
INFO "后端程序下载成功"
|
||||
INFO "→ 正在安装依赖..."
|
||||
if ! pip install ${PIP_OPTIONS} --upgrade --root-user-action=ignore pip > /dev/null; then
|
||||
ERROR "pip 更新失败,请重新拉取镜像"
|
||||
return 1
|
||||
|
||||
# 检查依赖是否有变化
|
||||
INFO "→ 检查依赖变化..."
|
||||
if [ -f "${TMP_PATH}/App/requirements.in" ]; then
|
||||
if ! cmp -s /app/requirements.in "${TMP_PATH}/App/requirements.in"; then
|
||||
INFO "检测到依赖变化,正在更新虚拟环境..."
|
||||
# 备份当前requirements.txt
|
||||
cp /app/requirements.txt /tmp/requirements.txt.backup
|
||||
# 复制新的requirements.in
|
||||
cp "${TMP_PATH}/App/requirements.in" /app/requirements.in
|
||||
# 重新编译依赖
|
||||
if ! ${VENV_PATH}/bin/pip-compile /app/requirements.in; then
|
||||
ERROR "依赖编译失败,恢复原依赖"
|
||||
cp /tmp/requirements.txt.backup /app/requirements.txt
|
||||
return 1
|
||||
fi
|
||||
# 安装新依赖
|
||||
if ! ${VENV_PATH}/bin/pip install ${PIP_OPTIONS} --root-user-action=ignore -r /app/requirements.txt; then
|
||||
ERROR "依赖安装失败,恢复原依赖"
|
||||
cp /tmp/requirements.txt.backup /app/requirements.txt
|
||||
return 1
|
||||
fi
|
||||
INFO "依赖更新成功"
|
||||
else
|
||||
INFO "依赖无变化,跳过依赖更新"
|
||||
fi
|
||||
else
|
||||
WARN "未找到requirements.in文件,跳过依赖检查"
|
||||
fi
|
||||
if ! pip install ${PIP_OPTIONS} --root-user-action=ignore -r ${TMP_PATH}/App/requirements.txt > /dev/null; then
|
||||
ERROR "依赖安装失败,请重新拉取镜像"
|
||||
return 1
|
||||
fi
|
||||
INFO "依赖安装成功"
|
||||
|
||||
# 如果是"heads/v2.zip",则查找v2开头的最新版本号
|
||||
if [[ "${1}" == "heads/v2.zip" ]]; then
|
||||
INFO "→ 正在获取前端最新版本号..."
|
||||
@@ -134,11 +158,11 @@ function install_backend_and_download_resources() {
|
||||
}
|
||||
|
||||
function test_connectivity_pip() {
|
||||
pip uninstall -y pip-hello-world > /dev/null 2>&1
|
||||
${VENV_PATH}/bin/pip uninstall -y pip-hello-world > /dev/null 2>&1
|
||||
case "$1" in
|
||||
0)
|
||||
if [[ -n "${PIP_PROXY}" ]]; then
|
||||
if pip install -i ${PIP_PROXY} pip-hello-world > /dev/null 2>&1; then
|
||||
if ${VENV_PATH}/bin/pip install -i ${PIP_PROXY} pip-hello-world > /dev/null 2>&1; then
|
||||
PIP_OPTIONS="-i ${PIP_PROXY}"
|
||||
PIP_LOG="镜像代理模式"
|
||||
return 0
|
||||
@@ -148,7 +172,7 @@ function test_connectivity_pip() {
|
||||
;;
|
||||
1)
|
||||
if [[ -n "${PROXY_HOST}" ]]; then
|
||||
if pip install --proxy=${PROXY_HOST} pip-hello-world > /dev/null 2>&1; then
|
||||
if ${VENV_PATH}/bin/pip install --proxy=${PROXY_HOST} pip-hello-world > /dev/null 2>&1; then
|
||||
PIP_OPTIONS="--proxy=${PROXY_HOST}"
|
||||
PIP_LOG="全局代理模式"
|
||||
return 0
|
||||
|
||||
220
docs/postgresql-setup.md
Normal file
220
docs/postgresql-setup.md
Normal file
@@ -0,0 +1,220 @@
|
||||
# PostgreSQL 数据库配置指南
|
||||
|
||||
MoviePilot 现在支持 PostgreSQL 数据库,您可以根据需要选择使用 SQLite 或 PostgreSQL。
|
||||
|
||||
## 配置选项
|
||||
|
||||
### 1. 数据库类型选择
|
||||
|
||||
在 `config/app.env` 文件中设置:
|
||||
|
||||
```bash
|
||||
# 使用 SQLite(默认)
|
||||
DB_TYPE=sqlite
|
||||
|
||||
# 使用 PostgreSQL
|
||||
DB_TYPE=postgresql
|
||||
```
|
||||
|
||||
### 2. PostgreSQL 配置参数
|
||||
|
||||
当 `DB_TYPE=postgresql` 时,以下配置生效:
|
||||
|
||||
```bash
|
||||
# PostgreSQL 主机地址
|
||||
DB_POSTGRESQL_HOST=localhost
|
||||
|
||||
# PostgreSQL 端口
|
||||
DB_POSTGRESQL_PORT=5432
|
||||
|
||||
# PostgreSQL 数据库名
|
||||
DB_POSTGRESQL_DATABASE=moviepilot
|
||||
|
||||
# PostgreSQL 用户名
|
||||
DB_POSTGRESQL_USERNAME=moviepilot
|
||||
|
||||
# PostgreSQL 密码
|
||||
DB_POSTGRESQL_PASSWORD=moviepilot
|
||||
|
||||
# PostgreSQL 连接池大小
|
||||
DB_POSTGRESQL_POOL_SIZE=20
|
||||
|
||||
# PostgreSQL 连接池溢出数量
|
||||
DB_POSTGRESQL_MAX_OVERFLOW=30
|
||||
```
|
||||
|
||||
## Docker 部署
|
||||
|
||||
### 使用内置 PostgreSQL
|
||||
|
||||
如果您使用 Docker 部署,MoviePilot 容器内置了 PostgreSQL 服务:
|
||||
|
||||
#### 使用 Docker Compose(推荐)
|
||||
|
||||
1. 创建 `docker-compose.yml` 文件:
|
||||
```yaml
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
moviepilot:
|
||||
image: jxxghp/moviepilot:latest
|
||||
container_name: moviepilot
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "3000:3000" # 前端端口
|
||||
- "3001:3001" # API端口
|
||||
environment:
|
||||
- DB_TYPE=postgresql
|
||||
- DB_POSTGRESQL_HOST=localhost
|
||||
- DB_POSTGRESQL_PORT=5432
|
||||
- DB_POSTGRESQL_DATABASE=moviepilot
|
||||
- DB_POSTGRESQL_USERNAME=moviepilot
|
||||
- DB_POSTGRESQL_PASSWORD=moviepilot
|
||||
volumes:
|
||||
- ./config:/config
|
||||
```
|
||||
|
||||
2. 启动服务:
|
||||
```bash
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
#### 使用 Docker 命令
|
||||
|
||||
1. 设置环境变量:
|
||||
```bash
|
||||
DB_TYPE=postgresql
|
||||
```
|
||||
|
||||
2. 启动容器时,PostgreSQL 服务会自动:
|
||||
- 在配置目录下创建 `postgresql/` 子目录作为数据目录
|
||||
- 初始化 PostgreSQL 数据目录
|
||||
- 启动 PostgreSQL 服务
|
||||
- 创建数据库和用户
|
||||
- 配置连接权限
|
||||
|
||||
3. 数据持久化:
|
||||
- PostgreSQL 数据存储在 `${CONFIG_DIR}/postgresql/` 目录中
|
||||
- 日志文件存储在 `${CONFIG_DIR}/postgresql/logs/` 目录中
|
||||
- 这些目录会通过 Docker 卷映射持久化保存
|
||||
|
||||
### 使用外部 PostgreSQL
|
||||
|
||||
如果您想使用外部的 PostgreSQL 服务:
|
||||
|
||||
1. 确保外部 PostgreSQL 服务已启动并可访问
|
||||
2. 设置环境变量指向外部服务:
|
||||
```bash
|
||||
DB_TYPE=postgresql
|
||||
DB_POSTGRESQL_HOST=your-postgresql-host
|
||||
DB_POSTGRESQL_PORT=5432
|
||||
DB_POSTGRESQL_DATABASE=moviepilot
|
||||
DB_POSTGRESQL_USERNAME=your-username
|
||||
DB_POSTGRESQL_PASSWORD=your-password
|
||||
```
|
||||
|
||||
## 数据迁移
|
||||
|
||||
### 从 SQLite 迁移到 PostgreSQL
|
||||
|
||||
1. 备份现有的 SQLite 数据库文件(`config/user.db`)
|
||||
2. 修改配置为 PostgreSQL
|
||||
3. 启动应用,数据库表会自动创建
|
||||
4. 使用数据库迁移工具或手动导入数据
|
||||
|
||||
### 从 PostgreSQL 迁移到 SQLite
|
||||
|
||||
1. 导出 PostgreSQL 数据
|
||||
2. 修改配置为 SQLite
|
||||
3. 启动应用,数据库表会自动创建
|
||||
4. 导入数据到 SQLite
|
||||
|
||||
## 数据备份
|
||||
|
||||
### PostgreSQL 数据备份
|
||||
|
||||
PostgreSQL 数据存储在 `${CONFIG_DIR}/postgresql/` 目录中,您可以通过以下方式进行备份:
|
||||
|
||||
#### 1. 文件级备份
|
||||
```bash
|
||||
# 备份整个PostgreSQL数据目录
|
||||
tar -czf postgresql_backup_$(date +%Y%m%d_%H%M%S).tar.gz config/postgresql/
|
||||
```
|
||||
|
||||
#### 2. 数据库级备份
|
||||
```bash
|
||||
# 进入容器
|
||||
docker exec -it moviepilot bash
|
||||
|
||||
# 使用pg_dump备份
|
||||
pg_dump -h localhost -U moviepilot -d moviepilot > /config/moviepilot_backup.sql
|
||||
|
||||
# 或使用pg_dumpall备份所有数据库
|
||||
pg_dumpall -h localhost -U moviepilot > /config/all_databases_backup.sql
|
||||
```
|
||||
|
||||
#### 3. 恢复数据
|
||||
```bash
|
||||
# 恢复单个数据库
|
||||
psql -h localhost -U moviepilot -d moviepilot < /config/moviepilot_backup.sql
|
||||
|
||||
# 恢复所有数据库
|
||||
psql -h localhost -U moviepilot < /config/all_databases_backup.sql
|
||||
```
|
||||
|
||||
## 性能优化
|
||||
|
||||
### PostgreSQL 优化建议
|
||||
|
||||
1. **连接池配置**:
|
||||
- 根据应用负载调整 `DB_POSTGRESQL_POOL_SIZE`
|
||||
- 设置合适的 `DB_POSTGRESQL_MAX_OVERFLOW`
|
||||
|
||||
2. **数据库配置**:
|
||||
- 调整 `shared_buffers`
|
||||
- 配置 `work_mem`
|
||||
- 设置合适的 `maintenance_work_mem`
|
||||
|
||||
3. **索引优化**:
|
||||
- 为常用查询字段添加索引
|
||||
- 定期执行 `VACUUM` 和 `ANALYZE`
|
||||
|
||||
## 故障排除
|
||||
|
||||
### 常见问题
|
||||
|
||||
1. **连接失败**:
|
||||
- 检查 PostgreSQL 服务是否启动
|
||||
- 验证连接参数是否正确
|
||||
- 确认网络连接和防火墙设置
|
||||
|
||||
2. **权限问题**:
|
||||
- 确保用户有足够的数据库权限
|
||||
- 检查 `pg_hba.conf` 配置
|
||||
|
||||
3. **性能问题**:
|
||||
- 监控连接池使用情况
|
||||
- 检查慢查询日志
|
||||
- 优化数据库配置
|
||||
|
||||
### 日志查看
|
||||
|
||||
PostgreSQL 相关日志可以在以下位置查看:
|
||||
|
||||
- Docker 容器:`${CONFIG_DIR}/postgresql/logs/`
|
||||
- 系统日志:`journalctl -u postgresql`
|
||||
|
||||
## 注意事项
|
||||
|
||||
1. **兼容性**:PostgreSQL 支持从 MoviePilot v2.0 开始
|
||||
2. **备份**:建议定期备份数据库
|
||||
3. **版本**:建议使用 PostgreSQL 12 或更高版本
|
||||
4. **字符集**:确保使用 UTF-8 字符集
|
||||
|
||||
## 技术支持
|
||||
|
||||
如果遇到问题,请:
|
||||
|
||||
1. 查看应用日志
|
||||
2. 检查 PostgreSQL 日志
|
||||
3. 在 GitHub Issues 中报告问题
|
||||
@@ -60,11 +60,11 @@ pystray~=0.19.5
|
||||
pyotp~=2.9.0
|
||||
Pinyin2Hanzi~=0.1.1
|
||||
pywebpush~=2.0.3
|
||||
python-cookietools==0.0.4
|
||||
aiofiles~=24.1.0
|
||||
aiopath~=0.7.7
|
||||
aiopathlib~=0.6.0
|
||||
asynctempfile~=0.5.0
|
||||
aiosqlite~=0.21.0
|
||||
psycopg2-binary~=2.9.10
|
||||
asyncpg~=0.30.0
|
||||
jieba~=0.42.1
|
||||
rsa~=4.9
|
||||
redis~=6.2.0
|
||||
@@ -78,4 +78,4 @@ smbprotocol~=1.15.0
|
||||
setproctitle~=1.3.6
|
||||
httpx[socks]~=0.28.1
|
||||
prometheus-client~=0.22.1
|
||||
prometheus-fastapi-instrumentator~=7.1.0
|
||||
prometheus-fastapi-instrumentator~=7.1.0
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
APP_VERSION = 'v2.6.9-2'
|
||||
FRONTEND_VERSION = 'v2.6.9'
|
||||
APP_VERSION = 'v2.7.4'
|
||||
FRONTEND_VERSION = 'v2.7.4'
|
||||
|
||||
Reference in New Issue
Block a user