Compare commits

...

134 Commits

Author SHA1 Message Date
jxxghp
607eb4b4aa v2.1.8
- 修复已知问题,优化UI细节
2025-01-04 14:20:57 +08:00
jxxghp
846b4e645c Merge pull request #3664 from Aqr-K/feature/log 2025-01-03 13:38:18 +08:00
Aqr-K
3775e99b02 Remove: del Todo 2025-01-03 13:17:46 +08:00
Aqr-K
cea77bddee feat(log): log supports hot update. 2025-01-03 06:08:29 +08:00
jxxghp
8ac0d169d2 fix 目录监控蓝光原盘 2025-01-02 13:30:59 +08:00
jxxghp
d5ac9f65f6 fix bug 2025-01-01 10:50:14 +08:00
jxxghp
4b3f04c73f fix 目录监控控重 2024-12-31 12:42:28 +08:00
jxxghp
bb478c949a 更新 version.py 2024-12-31 07:15:18 +08:00
jxxghp
11b1003d4d fix 中入队列等待时间,以例聚合消息发送 2024-12-30 19:25:29 +08:00
jxxghp
c0ad5f2970 fix 整理队列锁 2024-12-30 19:02:16 +08:00
jxxghp
54c98cf3a1 fix 目录监控消息重复发送 2024-12-30 18:59:20 +08:00
jxxghp
dfbe8a2c0e fix 目录监控消息重复发送 2024-12-30 18:57:45 +08:00
jxxghp
873f80d534 fix 重复添加队列任务 2024-12-30 18:42:36 +08:00
jxxghp
089992db74 Merge pull request #3640 from InfinityPacer/feature/transfer 2024-12-30 07:00:53 +08:00
jxxghp
f07ab73fde Merge pull request #3639 from InfinityPacer/feature/site 2024-12-30 06:59:02 +08:00
InfinityPacer
166674bfe7 feat(transfer): match source dir in subdirs or prioritize same drive 2024-12-30 02:11:48 +08:00
InfinityPacer
adb4a8fe01 feat(site): add proxy support for site sync 2024-12-30 00:37:54 +08:00
jxxghp
c49e79dda3 rollback #3584 2024-12-29 14:41:55 +08:00
jxxghp
a3b5e51356 fix encoding 2024-12-29 12:54:36 +08:00
jxxghp
8f91e23208 Merge pull request #3634 from InfinityPacer/feature/subscribe 2024-12-29 07:54:12 +08:00
InfinityPacer
b768929cd8 fix(transfer): handle task removal on media info failure 2024-12-29 02:26:30 +08:00
jxxghp
49d5e5b953 v2.1.6 2024-12-28 20:10:34 +08:00
jxxghp
ce4792e87b Merge pull request #3632 from wikrin/v2 2024-12-28 20:07:10 +08:00
Attente
3ea0b1f36b refactor(app): improve code readability and consistency in FileMonitorHandler
- Rename 'size' parameter to 'file_size' in on_created and on_moved methods
- This change enhances code clarity and maintains consistency with other parts of the codebase
2024-12-28 20:05:47 +08:00
jxxghp
51c7852b77 更新 transfer.py 2024-12-28 15:58:07 +08:00
jxxghp
7947f10579 fix size 2024-12-28 14:37:21 +08:00
DDSRem
fca9297fa7 Revert "Merge branch 'rfc-python-bump-312' into v2"
This reverts commit 0ec5e3b365, reversing
changes made to c18937ecc7.
2024-12-28 11:56:54 +08:00
DDSRem
0ec5e3b365 Merge branch 'rfc-python-bump-312' into v2 2024-12-28 11:55:39 +08:00
jxxghp
c18937ecc7 fix bug 2024-12-28 11:00:12 +08:00
jxxghp
8b962757b7 fix bug 2024-12-28 10:57:40 +08:00
jxxghp
2b40e42965 fix bug 2024-12-27 21:16:38 +08:00
jxxghp
0eac7816bc fix bug 2024-12-27 18:36:49 +08:00
jxxghp
e3552d4086 feat:识别支持后台处理 2024-12-27 17:45:04 +08:00
jxxghp
75bb52ccca fix 统一整理记录名称 2024-12-27 07:58:58 +08:00
jxxghp
22c485d177 fix 2024-12-26 21:19:18 +08:00
jxxghp
78dab5038c fix transfer apis 2024-12-26 19:58:23 +08:00
jxxghp
15cc02b083 fix transfer count 2024-12-26 19:25:23 +08:00
jxxghp
419f2e90ce Merge pull request #3621 from InfinityPacer/feature/subscribe 2024-12-26 17:25:05 +08:00
jxxghp
a29e3c23fe Merge pull request #3619 from InfinityPacer/feature/module 2024-12-26 17:24:49 +08:00
InfinityPacer
aa9ae4dd09 feat(TMDB): add episode_type field to TmdbEpisode 2024-12-26 16:39:01 +08:00
InfinityPacer
d02bf33345 feat(config): add TOKENIZED_SEARCH 2024-12-26 13:56:08 +08:00
InfinityPacer
0a1dc1724c chore(deps): add jieba~=0.42.1 for tokenization 2024-12-26 13:55:04 +08:00
jxxghp
80b866e135 Merge remote-tracking branch 'origin/v2' into v2 2024-12-26 13:29:48 +08:00
jxxghp
e7030c734e add remove queue api 2024-12-26 13:29:34 +08:00
jxxghp
e5458ee127 Merge pull request #3615 from wikrin/del_bdmv 2024-12-26 09:25:28 +08:00
Attente
3f60cb3f7d fix(storage): delete Blu-ray directory when removing movie file
- Add logic to delete `BDMV` and `CERTIFICATE` directories when a movie file is removed
- This ensures that empty Blu-ray folders are also cleaned up during the deletion process
2024-12-26 09:00:04 +08:00
jxxghp
8c800836d5 add remove queue api 2024-12-26 08:12:59 +08:00
jxxghp
abfc146335 更新 transfer.py 2024-12-26 07:13:37 +08:00
jxxghp
dd4ff03b08 Merge pull request #3614 from wikrin/v2 2024-12-26 06:59:52 +08:00
jxxghp
be792cb40a Merge pull request #3613 from InfinityPacer/feature/recommend 2024-12-26 06:59:11 +08:00
Attente
cec5cf22de feat(transfer): Update file_items filtering logic to allow bluray directories 2024-12-26 02:41:49 +08:00
InfinityPacer
6ec5f3b98b feat(recommend): support caching by page 2024-12-25 23:07:56 +08:00
jxxghp
0ac43fd3c7 feat:手动整理API支持后台 2024-12-25 20:38:00 +08:00
jxxghp
a600f2f05b Merge pull request #3611 from InfinityPacer/feature/recommend 2024-12-25 19:31:20 +08:00
InfinityPacer
0c0a1c1dad feat(recommend): support caching poster images 2024-12-25 19:24:32 +08:00
jxxghp
c69df36b98 add transfer queue api 2024-12-25 18:11:57 +08:00
jxxghp
20ac9fbfbe fix transfer log 2024-12-25 12:59:43 +08:00
jxxghp
b9756db115 fix jobview 2024-12-25 08:24:57 +08:00
jxxghp
5bfa36418b Merge pull request #3608 from wikrin/split_episode 2024-12-25 07:01:24 +08:00
Attente
30c696adfe fix(format): evaluate offset for start and end episodes 2024-12-25 05:07:54 +08:00
Attente
31887ab4b1 fix(format): improve episode parsing logic 2024-12-25 04:50:23 +08:00
jxxghp
3678de09bf 更新 transfer.py 2024-12-24 21:51:48 +08:00
jxxghp
3f9172146d fix MediaServerSeasonInfo 2024-12-24 21:16:56 +08:00
jxxghp
fc4480644a fix bug 2024-12-24 21:07:12 +08:00
jxxghp
2062214a3b fix bug 2024-12-24 14:17:35 +08:00
jxxghp
01487cfdf6 fix transfer 2024-12-24 14:08:47 +08:00
jxxghp
a2c913a5b2 fix transfer 2024-12-24 14:06:45 +08:00
jxxghp
84f5d1c879 fix bug 2024-12-24 13:31:58 +08:00
jxxghp
48c289edf2 feat: 后台整理队列 2024-12-24 13:14:17 +08:00
jxxghp
c9949581ef Merge pull request #3604 from InfinityPacer/feature/module 2024-12-24 10:49:43 +08:00
InfinityPacer
b4e3dc275d fix(proxy): add proxy for MP_SERVER_HOST 2024-12-24 10:10:19 +08:00
jxxghp
00f85836fa 更新 transfer.py 2024-12-23 22:02:45 +08:00
jxxghp
c4300332c9 TODO 后台整理队列 2024-12-23 21:46:59 +08:00
jxxghp
10f8efc457 TODO 后台整理队列 2024-12-23 18:59:36 +08:00
jxxghp
1b48eb8959 fix ide warnings 2024-12-23 16:58:49 +08:00
jxxghp
61d7374d95 fix ide warnings 2024-12-23 16:58:04 +08:00
jxxghp
baa48610ea refactor:Command提到上层 2024-12-23 13:38:02 +08:00
jxxghp
ece8d0368b Merge remote-tracking branch 'origin/v2' into v2 2024-12-23 12:40:42 +08:00
jxxghp
a9ffebb3ea fix schemas 2024-12-23 12:40:32 +08:00
jxxghp
b6c043aae9 Merge pull request #3598 from InfinityPacer/feature/recommend 2024-12-23 12:09:59 +08:00
jxxghp
d45d49edbd fix schemas default_factory 2024-12-23 11:35:38 +08:00
jxxghp
27f474b192 fix setup 2024-12-23 11:10:08 +08:00
InfinityPacer
544119c49f Revert "feat(recommend): add semaphore to limit concurrent requests"
This reverts commit 33de1c3618.
2024-12-23 10:29:37 +08:00
jxxghp
800a66dc99 Merge pull request #3596 from InfinityPacer/feature/module 2024-12-23 06:54:38 +08:00
InfinityPacer
33de1c3618 feat(recommend): add semaphore to limit concurrent requests 2024-12-23 02:51:23 +08:00
InfinityPacer
6fec16d78a fix(cache): include method name and default parameters in cache key 2024-12-23 01:39:34 +08:00
InfinityPacer
a5d6062aa8 feat(recommend): add job to refresh recommend cache 2024-12-23 01:32:17 +08:00
InfinityPacer
de532f47fb feat(auth): add logging for site auth 2024-12-23 00:20:03 +08:00
jxxghp
60bcc802cf Merge pull request #3593 from wikrin/v2 2024-12-22 10:40:23 +08:00
jxxghp
c143545ef9 Merge pull request #3591 from InfinityPacer/feature/module 2024-12-22 10:28:15 +08:00
Attente
0e8fdac6d6 fix(filemanager): correct season_episode metadata mapping
- Update season_episode field in FileManagerModule to use meta.episode instead of meta.episodes
- This change ensures accurate season and episode information is displayed
2024-12-22 10:24:40 +08:00
jxxghp
45e6dd1561 Merge pull request #3590 from InfinityPacer/feature/recommend 2024-12-22 09:11:51 +08:00
jxxghp
23c37c9a81 Merge pull request #3588 from wikrin/v2 2024-12-22 09:08:11 +08:00
InfinityPacer
098279ceb6 fix #3565 2024-12-22 02:04:36 +08:00
InfinityPacer
1fb791455e chore(recommend): update comment 2024-12-22 01:37:25 +08:00
InfinityPacer
3339bbca50 feat(recommend): switch API calls to use RecommendChain 2024-12-22 01:27:11 +08:00
InfinityPacer
ec77213ca6 feat(recommend): add cached_with_empty_check decorator 2024-12-22 01:09:06 +08:00
InfinityPacer
de1c2c98d2 feat(recommend): add log_execution_time decorator to RecommendChain methods 2024-12-22 01:03:44 +08:00
InfinityPacer
98247fa47a feat: add log_execution_time decorator 2024-12-22 01:02:07 +08:00
InfinityPacer
1eef95421a feat(recommend): add RecommendChain 2024-12-22 01:00:47 +08:00
Attente
b8de563a45 refactor(app): optimize download path logic
- Simplify download path determination logic
- Remove redundant code for save path calculation
2024-12-21 23:56:44 +08:00
jxxghp
fd5fbd779b Merge pull request #3584 from zhzero-hub/v2 2024-12-21 20:15:39 +08:00
zhzero
cb07550388 TorrentSpider添加encoding key 2024-12-21 14:51:55 +08:00
jxxghp
a51632c0a3 Merge pull request #3583 from wikrin/torrent_layout 2024-12-21 07:58:46 +08:00
Attente
9756bf6ac8 refactor(downloader): 新增支持种子文件布局处理
- 在 `DownloadChain` 中根据`种子文件布局`拼接`savepath`
- 在 `QbittorrentModule` 和 `TransmissionModule` 中添加种子文件布局信息
- 修改 `download` 方法的返回值,增加种子文件布局参数
2024-12-21 04:50:10 +08:00
DDSRem
aaa96cff87 Merge pull request #3582 from Aqr-K/patch-1
revert
2024-12-20 23:27:32 +08:00
Aqr-K
a50959d254 revert 2024-12-20 23:26:55 +08:00
DDSRem
b1bd858df1 chore(deps): update dependency python-115 to v0.0.9.8.8.4 2024-12-20 23:21:59 +08:00
DDSRem
c2d6d9b1ac chore(deps): update dependency python-115 to v0.0.9.8.8.4 2024-12-20 23:18:04 +08:00
DDSRem
7288dd24e0 Merge pull request #3580 from jxxghp/v2
Sync
2024-12-20 23:16:30 +08:00
jxxghp
8f05ea581c v2.1.5 2024-12-20 15:40:36 +08:00
jxxghp
03a0bc907b Merge pull request #3569 from yubanmeiqin9048/patch-1 2024-12-19 22:16:27 +08:00
yubanmeiqin9048
5ce4c8a055 feat(filemanager): 增加字幕正则式 2024-12-19 22:01:06 +08:00
jxxghp
b04181fed9 更新 version.py 2024-12-19 20:24:11 +08:00
jxxghp
eee843bafd Merge pull request #3567 from InfinityPacer/feature/cache 2024-12-19 20:21:00 +08:00
InfinityPacer
134fd0761d refactor(cache): split douban cache into recommend and search 2024-12-19 20:00:29 +08:00
InfinityPacer
669481af06 feat(cache): unify bangumi cache strategy 2024-12-19 19:42:17 +08:00
jxxghp
b5640b3179 Merge pull request #3564 from InfinityPacer/feature/subscribe 2024-12-19 16:17:14 +08:00
InfinityPacer
9abb305dbb fix(subscribe): ensure best version is empty set 2024-12-19 15:41:51 +08:00
InfinityPacer
0fd4791479 fix(event): align field names with SubscribeComplete 2024-12-19 10:58:11 +08:00
jxxghp
ce2ecdf44c Merge pull request #3562 from InfinityPacer/feature/subscribe 2024-12-19 07:02:26 +08:00
InfinityPacer
949c0d3b76 feat(subscribe): optimize best version to support multiple states 2024-12-19 00:51:53 +08:00
jxxghp
316915842a Merge pull request #3559 from InfinityPacer/feature/site 2024-12-18 19:24:34 +08:00
jxxghp
1dd7dc36c3 Merge pull request #3557 from InfinityPacer/feature/subscribe 2024-12-18 19:24:00 +08:00
InfinityPacer
fca763b814 fix(site): avoid err_msg cannot be updated when it's None 2024-12-18 16:39:14 +08:00
InfinityPacer
9311125c72 fix(subscribe): avoid reinitializing the dictionary 2024-12-18 15:49:21 +08:00
InfinityPacer
3f1d4933c1 Merge pull request #3553 from InfinityPacer/feature/subscribe
fix(dependencies): pin python-115 version
2024-12-18 12:47:51 +08:00
InfinityPacer
7fb23b5069 fix(dependencies): pin python-115 version 2024-12-18 12:46:28 +08:00
DDSRem
d74ad343f1 Merge pull request #3551 from InfinityPacer/feature/subscribe
Revert "chore(deps): update dependency python-115 to v0.0.9.8.8.3"
2024-12-18 10:42:17 +08:00
InfinityPacer
c0a8351e58 Revert "chore(deps): update dependency python-115 to v0.0.9.8.8.3"
This reverts commit d182a7079d.
2024-12-18 10:39:37 +08:00
DDSRem
a3c048b9c8 chore(deps): upgrade beautifulsoup4 4.12.2 to 4.12.3 2024-12-16 21:40:27 +08:00
DDSRem
3c08054234 chore(ci): beta image only provides amd64 architecture 2024-12-16 21:30:41 +08:00
DDSRem
07e91d4eb1 chore(deps): playwright 1.37.0 to 1.49.1
fix `greenlet==2.0.2` build error
2024-12-16 21:29:44 +08:00
DDSRem
c104498b43 chore(deps): environment and dependency upgrades 2024-12-16 21:11:14 +08:00
72 changed files with 2015 additions and 1191 deletions

View File

@@ -4,6 +4,7 @@ from fastapi import APIRouter, Depends
from app import schemas
from app.chain.bangumi import BangumiChain
from app.chain.recommend import RecommendChain
from app.core.context import MediaInfo
from app.core.security import verify_token
@@ -17,10 +18,7 @@ def calendar(page: int = 1,
"""
浏览Bangumi每日放送
"""
medias = BangumiChain().calendar()
if medias:
return [media.to_dict() for media in medias[(page - 1) * count: page * count]]
return []
return RecommendChain().bangumi_calendar(page=page, count=count)
@router.get("/credits/{bangumiid}", summary="查询Bangumi演职员表", response_model=List[schemas.MediaPerson])

View File

@@ -4,6 +4,7 @@ from fastapi import APIRouter, Depends
from app import schemas
from app.chain.douban import DoubanChain
from app.chain.recommend import RecommendChain
from app.core.context import MediaInfo
from app.core.security import verify_token
from app.schemas import MediaType
@@ -40,10 +41,7 @@ def movie_showing(page: int = 1,
"""
浏览豆瓣正在热映
"""
movies = DoubanChain().movie_showing(page=page, count=count)
if movies:
return [media.to_dict() for media in movies]
return []
return RecommendChain().douban_movie_showing(page=page, count=count)
@router.get("/movies", summary="豆瓣电影", response_model=List[schemas.MediaInfo])
@@ -55,11 +53,7 @@ def douban_movies(sort: str = "R",
"""
浏览豆瓣电影信息
"""
movies = DoubanChain().douban_discover(mtype=MediaType.MOVIE,
sort=sort, tags=tags, page=page, count=count)
if movies:
return [media.to_dict() for media in movies]
return []
return RecommendChain().douban_movies(sort=sort, tags=tags, page=page, count=count)
@router.get("/tvs", summary="豆瓣剧集", response_model=List[schemas.MediaInfo])
@@ -71,11 +65,7 @@ def douban_tvs(sort: str = "R",
"""
浏览豆瓣剧集信息
"""
tvs = DoubanChain().douban_discover(mtype=MediaType.TV,
sort=sort, tags=tags, page=page, count=count)
if tvs:
return [media.to_dict() for media in tvs]
return []
return RecommendChain().douban_tvs(sort=sort, tags=tags, page=page, count=count)
@router.get("/movie_top250", summary="豆瓣电影TOP250", response_model=List[schemas.MediaInfo])
@@ -85,10 +75,7 @@ def movie_top250(page: int = 1,
"""
浏览豆瓣剧集信息
"""
movies = DoubanChain().movie_top250(page=page, count=count)
if movies:
return [media.to_dict() for media in movies]
return []
return RecommendChain().douban_movie_top250(page=page, count=count)
@router.get("/tv_weekly_chinese", summary="豆瓣国产剧集周榜", response_model=List[schemas.MediaInfo])
@@ -98,10 +85,7 @@ def tv_weekly_chinese(page: int = 1,
"""
中国每周剧集口碑榜
"""
tvs = DoubanChain().tv_weekly_chinese(page=page, count=count)
if tvs:
return [media.to_dict() for media in tvs]
return []
return RecommendChain().douban_tv_weekly_chinese(page=page, count=count)
@router.get("/tv_weekly_global", summary="豆瓣全球剧集周榜", response_model=List[schemas.MediaInfo])
@@ -111,10 +95,7 @@ def tv_weekly_global(page: int = 1,
"""
全球每周剧集口碑榜
"""
tvs = DoubanChain().tv_weekly_global(page=page, count=count)
if tvs:
return [media.to_dict() for media in tvs]
return []
return RecommendChain().douban_tv_weekly_global(page=page, count=count)
@router.get("/tv_animation", summary="豆瓣动画剧集", response_model=List[schemas.MediaInfo])
@@ -124,10 +105,7 @@ def tv_animation(page: int = 1,
"""
热门动画剧集
"""
tvs = DoubanChain().tv_animation(page=page, count=count)
if tvs:
return [media.to_dict() for media in tvs]
return []
return RecommendChain().douban_tv_animation(page=page, count=count)
@router.get("/movie_hot", summary="豆瓣热门电影", response_model=List[schemas.MediaInfo])
@@ -137,10 +115,7 @@ def movie_hot(page: int = 1,
"""
热门电影
"""
movies = DoubanChain().movie_hot(page=page, count=count)
if movies:
return [media.to_dict() for media in movies]
return []
return RecommendChain().douban_movie_hot(page=page, count=count)
@router.get("/tv_hot", summary="豆瓣热门电视剧", response_model=List[schemas.MediaInfo])
@@ -150,10 +125,7 @@ def tv_hot(page: int = 1,
"""
热门电视剧
"""
tvs = DoubanChain().tv_hot(page=page, count=count)
if tvs:
return [media.to_dict() for media in tvs]
return []
return RecommendChain().douban_tv_hot(page=page, count=count)
@router.get("/credits/{doubanid}/{type_name}", summary="豆瓣演员阵容", response_model=List[schemas.MediaPerson])

View File

@@ -1,10 +1,12 @@
from typing import List, Any
import jieba
from fastapi import APIRouter, Depends
from sqlalchemy.orm import Session
from app import schemas
from app.chain.storage import StorageChain
from app.core.config import settings
from app.core.event import eventmanager
from app.core.security import verify_token
from app.db import get_db
@@ -39,7 +41,7 @@ def delete_download_history(history_in: schemas.DownloadHistory,
return schemas.Response(success=True)
@router.get("/transfer", summary="查询转移历史记录", response_model=schemas.Response)
@router.get("/transfer", summary="查询整理记录", response_model=schemas.Response)
def transfer_history(title: str = None,
page: int = 1,
count: int = 30,
@@ -47,7 +49,7 @@ def transfer_history(title: str = None,
db: Session = Depends(get_db),
_: schemas.TokenPayload = Depends(verify_token)) -> Any:
"""
查询转移历史记录
查询整理记录
"""
if title == "失败":
title = None
@@ -57,6 +59,9 @@ def transfer_history(title: str = None,
status = True
if title:
if settings.TOKENIZED_SEARCH:
words = jieba.cut(title, HMM=False)
title = "%".join(words)
total = TransferHistory.count_by_title(db, title=title, status=status)
result = TransferHistory.list_by_title(db, title=title, page=page,
count=count, status=status)
@@ -71,14 +76,14 @@ def transfer_history(title: str = None,
})
@router.delete("/transfer", summary="删除转移历史记录", response_model=schemas.Response)
@router.delete("/transfer", summary="删除整理记录", response_model=schemas.Response)
def delete_transfer_history(history_in: schemas.TransferHistory,
deletesrc: bool = False,
deletedest: bool = False,
db: Session = Depends(get_db),
_: schemas.TokenPayload = Depends(get_current_active_superuser)) -> Any:
"""
删除转移历史记录
删除整理记录
"""
history: TransferHistory = TransferHistory.get(db, history_in.id)
if not history:
@@ -86,9 +91,7 @@ def delete_transfer_history(history_in: schemas.TransferHistory,
# 册除媒体库文件
if deletedest and history.dest_fileitem:
dest_fileitem = schemas.FileItem(**history.dest_fileitem)
state = StorageChain().delete_media_file(fileitem=dest_fileitem, mtype=MediaType(history.type))
if not state:
return schemas.Response(success=False, message=f"{dest_fileitem.path} 删除失败")
StorageChain().delete_media_file(fileitem=dest_fileitem, mtype=MediaType(history.type))
# 删除源文件
if deletesrc and history.src_fileitem:
@@ -109,11 +112,11 @@ def delete_transfer_history(history_in: schemas.TransferHistory,
return schemas.Response(success=True)
@router.get("/empty/transfer", summary="清空转移历史记录", response_model=schemas.Response)
@router.get("/empty/transfer", summary="清空整理记录", response_model=schemas.Response)
def delete_transfer_history(db: Session = Depends(get_db),
_: User = Depends(get_current_active_superuser)) -> Any:
"""
清空转移历史记录
清空整理记录
"""
TransferHistory.truncate(db)
return schemas.Response(success=True)

View File

@@ -3,7 +3,7 @@ from typing import Annotated, Any, List, Optional
from fastapi import APIRouter, Depends, Header
from app import schemas
from app.chain.command import CommandChain
from app.command import Command
from app.core.config import settings
from app.core.plugin import PluginManager
from app.core.security import verify_apikey, verify_token
@@ -212,7 +212,7 @@ def install(plugin_id: str,
# 注册插件服务
Scheduler().update_plugin_job(plugin_id)
# 注册菜单命令
CommandChain().init_commands(plugin_id)
Command().init_commands(plugin_id)
# 注册插件API
register_plugin_api(plugin_id)
return schemas.Response(success=True)
@@ -280,7 +280,7 @@ def reset_plugin(plugin_id: str,
# 注册插件服务
Scheduler().update_plugin_job(plugin_id)
# 注册菜单命令
CommandChain().init_commands(plugin_id)
Command().init_commands(plugin_id)
# 注册插件API
register_plugin_api(plugin_id)
return schemas.Response(success=True)
@@ -308,7 +308,7 @@ def set_plugin_config(plugin_id: str, conf: dict,
# 注册插件服务
Scheduler().update_plugin_job(plugin_id)
# 注册菜单命令
CommandChain().init_commands(plugin_id)
Command().init_commands(plugin_id)
# 注册插件API
register_plugin_api(plugin_id)
return schemas.Response(success=True)

View File

@@ -294,7 +294,7 @@ def delete_subscribe_by_mediaid(
# 发送事件
eventmanager.send_event(EventType.SubscribeDeleted, {
"subscribe_id": subscribe.id,
"subscribe": subscribe.to_dict()
"subscribe_info": subscribe.to_dict()
})
return schemas.Response(success=True)
@@ -521,7 +521,7 @@ def delete_subscribe(
# 发送事件
eventmanager.send_event(EventType.SubscribeDeleted, {
"subscribe_id": subscribe_id,
"subscribe": subscribe.to_dict()
"subscribe_info": subscribe.to_dict()
})
# 统计订阅
SubscribeHelper().sub_done_async({

View File

@@ -3,6 +3,7 @@ from typing import List, Any
from fastapi import APIRouter, Depends
from app import schemas
from app.chain.recommend import RecommendChain
from app.chain.tmdb import TmdbChain
from app.core.security import verify_token
from app.schemas.types import MediaType
@@ -108,14 +109,10 @@ def tmdb_movies(sort_by: str = "popularity.desc",
"""
浏览TMDB电影信息
"""
movies = TmdbChain().tmdb_discover(mtype=MediaType.MOVIE,
sort_by=sort_by,
with_genres=with_genres,
with_original_language=with_original_language,
page=page)
if not movies:
return []
return [movie.to_dict() for movie in movies]
return RecommendChain().tmdb_movies(sort_by=sort_by,
with_genres=with_genres,
with_original_language=with_original_language,
page=page)
@router.get("/tvs", summary="TMDB剧集", response_model=List[schemas.MediaInfo])
@@ -127,26 +124,19 @@ def tmdb_tvs(sort_by: str = "popularity.desc",
"""
浏览TMDB剧集信息
"""
tvs = TmdbChain().tmdb_discover(mtype=MediaType.TV,
sort_by=sort_by,
with_genres=with_genres,
with_original_language=with_original_language,
page=page)
if not tvs:
return []
return [tv.to_dict() for tv in tvs]
return RecommendChain().tmdb_tvs(sort_by=sort_by,
with_genres=with_genres,
with_original_language=with_original_language,
page=page)
@router.get("/trending", summary="TMDB流行趋势", response_model=List[schemas.MediaInfo])
def tmdb_trending(page: int = 1,
_: schemas.TokenPayload = Depends(verify_token)) -> Any:
"""
浏览TMDB剧集信息
TMDB流行趋势
"""
infos = TmdbChain().tmdb_trending(page=page)
if not infos:
return []
return [info.to_dict() for info in infos]
return RecommendChain().tmdb_trending(page=page)
@router.get("/{tmdbid}/{season}", summary="TMDB季所有集", response_model=List[schemas.TmdbEpisode])

View File

@@ -1,5 +1,5 @@
from pathlib import Path
from typing import Any
from typing import Any, List
from fastapi import APIRouter, Depends
from sqlalchemy.orm import Session
@@ -47,13 +47,35 @@ def query_name(path: str, filetype: str,
})
@router.get("/queue", summary="查询整理队列", response_model=List[schemas.TransferJob])
def query_queue(_: schemas.TokenPayload = Depends(verify_token)) -> Any:
"""
查询整理队列
:param _: Token校验
"""
return TransferChain().get_queue_tasks()
@router.delete("/queue", summary="从整理队列中删除任务", response_model=schemas.Response)
def remove_queue(fileitem: schemas.FileItem, _: schemas.TokenPayload = Depends(verify_token)) -> Any:
"""
查询整理队列
:param fileitem: 文件项
:param _: Token校验
"""
TransferChain().remove_from_queue(fileitem)
return schemas.Response(success=True)
@router.post("/manual", summary="手动转移", response_model=schemas.Response)
def manual_transfer(transer_item: ManualTransferItem,
background: bool = False,
db: Session = Depends(get_db),
_: schemas.TokenPayload = Depends(get_current_active_superuser)) -> Any:
"""
手动转移,文件或历史记录,支持自定义剧集识别格式
:param transer_item: 手工整理项
:param background: 后台运行
:param db: 数据库
:param _: Token校验
"""
@@ -63,7 +85,7 @@ def manual_transfer(transer_item: ManualTransferItem,
# 查询历史记录
history: TransferHistory = TransferHistory.get(db, transer_item.logid)
if not history:
return schemas.Response(success=False, message=f"历史记录不存在ID{transer_item.logid}")
return schemas.Response(success=False, message=f"整理记录不存在ID{transer_item.logid}")
# 强制转移
force = True
if history.status and ("move" in history.mode):
@@ -130,7 +152,8 @@ def manual_transfer(transer_item: ManualTransferItem,
scrape=transer_item.scrape,
library_type_folder=transer_item.library_type_folder,
library_category_folder=transer_item.library_category_folder,
force=force
force=force,
background=background
)
# 失败
if not state:

View File

@@ -342,7 +342,7 @@ class ChainBase(metaclass=ABCMeta):
def download(self, content: Union[Path, str], download_dir: Path, cookie: str,
episodes: Set[int] = None, category: str = None,
downloader: str = None
) -> Optional[Tuple[Optional[str], Optional[str], str]]:
) -> Optional[Tuple[Optional[str], Optional[str], Optional[str], str]]:
"""
根据种子文件,选择并添加下载任务
:param content: 种子文件地址或者磁力链接
@@ -351,7 +351,7 @@ class ChainBase(metaclass=ABCMeta):
:param episodes: 需要下载的集数
:param category: 种子分类
:param downloader: 下载器
:return: 下载器名称、种子Hash、错误信息
:return: 下载器名称、种子Hash、种子文件布局、错误原因
"""
return self.run_module("download", content=content, download_dir=download_dir,
cookie=cookie, episodes=episodes, category=category,

View File

@@ -19,8 +19,7 @@ from app.helper.directory import DirectoryHelper
from app.helper.message import MessageHelper
from app.helper.torrent import TorrentHelper
from app.log import logger
from app.schemas import ExistMediaInfo, NotExistMediaInfo, DownloadingTorrent, Notification
from app.schemas.event import ResourceSelectionEventData, ResourceDownloadEventData
from app.schemas import ExistMediaInfo, NotExistMediaInfo, DownloadingTorrent, Notification, ResourceSelectionEventData, ResourceDownloadEventData
from app.schemas.types import MediaType, TorrentStatus, EventType, MessageChannel, NotificationType, ChainEventType
from app.utils.http import RequestUtils
from app.utils.string import StringUtils
@@ -313,16 +312,23 @@ class DownloadChain(ChainBase):
category=_media.category,
downloader=downloader or _site_downloader)
if result:
_downloader, _hash, error_msg = result
_downloader, _hash, _layout, error_msg = result
else:
_downloader, _hash, error_msg = None, None, "未找到下载器"
_downloader, _hash, _layout, error_msg = None, None, None, "未找到下载器"
if _hash:
# 下载文件路径
if _folder_name:
download_path = download_dir / _folder_name
else:
# `不创建子文件夹` 或 `不存在子文件夹`
if _layout == "NoSubfolder" or not _folder_name:
# 下载路径记录至文件
download_path = download_dir / _file_list[0] if _file_list else download_dir
# 原始布局
elif _folder_name:
download_path = download_dir / _folder_name
# 创建子文件夹
else:
download_path = download_dir / Path(_file_list[0]).stem if _file_list else download_dir
# 文件保存路径
_save_path = download_dir if _layout == "NoSubfolder" or not _folder_name else download_path
# 登记下载记录
self.downloadhis.add(
@@ -337,6 +343,7 @@ class DownloadChain(ChainBase):
seasons=_meta.season,
episodes=download_episodes or _meta.episode,
image=_media.get_backdrop_image(),
downloader=_downloader,
download_hash=_hash,
torrent_name=_torrent.title,
torrent_description=_torrent.description,
@@ -365,8 +372,8 @@ class DownloadChain(ChainBase):
files_to_add.append({
"download_hash": _hash,
"downloader": _downloader,
"fullpath": str(download_dir / _folder_name / file),
"savepath": str(download_dir / _folder_name),
"fullpath": str(_save_path / file),
"savepath": str(_save_path),
"filepath": file,
"torrentname": _meta.org_string,
})
@@ -520,8 +527,8 @@ class DownloadChain(ChainBase):
downloaded_list.append(context)
# 电视剧整季匹配
logger.info(f"开始匹配电视剧整季:{no_exists}")
if no_exists:
logger.info(f"开始匹配电视剧整季:{no_exists}")
# 先把整季缺失的拿出来,看是否刚好有所有季都满足的种子 {tmdbid: [seasons]}
need_seasons: Dict[int, list] = {}
for need_mid, need_tv in no_exists.items():
@@ -624,8 +631,8 @@ class DownloadChain(ChainBase):
# 全部下载完成
break
# 电视剧季内的集匹配
logger.info(f"开始电视剧完整集匹配:{no_exists}")
if no_exists:
logger.info(f"开始电视剧完整集匹配:{no_exists}")
# TMDBID列表
need_tv_list = list(no_exists)
for need_mid in need_tv_list:
@@ -694,8 +701,8 @@ class DownloadChain(ChainBase):
logger.info(f"{need_season} 剩余需要集:{need_episodes}")
# 仍然缺失的剧集从整季中选择需要的集数文件下载仅支持QB和TR
logger.info(f"开始电视剧多集拆包匹配:{no_exists}")
if no_exists:
logger.info(f"开始电视剧多集拆包匹配:{no_exists}")
# TMDBID列表
no_exists_list = list(no_exists)
for need_mid in no_exists_list:

319
app/chain/recommend.py Normal file
View File

@@ -0,0 +1,319 @@
import inspect
import io
import tempfile
from functools import wraps
from pathlib import Path
from typing import Any, Callable, List
from PIL import Image
from cachetools import TTLCache
from cachetools.keys import hashkey
from app.chain import ChainBase
from app.chain.bangumi import BangumiChain
from app.chain.douban import DoubanChain
from app.chain.tmdb import TmdbChain
from app.core.config import settings
from app.log import logger
from app.schemas import MediaType
from app.utils.common import log_execution_time
from app.utils.http import RequestUtils
from app.utils.security import SecurityUtils
from app.utils.singleton import Singleton
# 推荐相关的专用缓存
recommend_ttl = 24 * 3600
recommend_cache = TTLCache(maxsize=256, ttl=recommend_ttl)
# 推荐缓存装饰器,避免偶发网络获取数据为空时,页面由于缓存为空长时间渲染异常问题
def cached_with_empty_check(func: Callable):
"""
缓存装饰器,用于缓存函数的返回结果,仅在结果非空时进行缓存
:param func: 被装饰的函数
:return: 包装后的函数
"""
@wraps(func)
def wrapper(*args, **kwargs):
signature = inspect.signature(func)
resolved_kwargs = {}
# 获取默认值并结合传递的参数(如果有)
for param, value in signature.parameters.items():
if param in kwargs:
# 使用显式传递的参数
resolved_kwargs[param] = kwargs[param]
elif value.default is not inspect.Parameter.empty:
# 没有传递参数时使用默认值
resolved_kwargs[param] = value.default
# 使用 cachetools 缓存,构造缓存键
cache_key = f"{func.__name__}_{hashkey(*args, **resolved_kwargs)}"
if cache_key in recommend_cache:
return recommend_cache[cache_key]
result = func(*args, **kwargs)
# 如果返回值为空,则不缓存
if result in [None, [], {}]:
return result
recommend_cache[cache_key] = result
return result
return wrapper
class RecommendChain(ChainBase, metaclass=Singleton):
"""
推荐处理链,单例运行
"""
def __init__(self):
super().__init__()
self.tmdbchain = TmdbChain()
self.doubanchain = DoubanChain()
self.bangumichain = BangumiChain()
self.cache_max_pages = 5
def refresh_recommend(self):
"""
刷新推荐
"""
logger.debug("Starting to refresh Recommend data.")
recommend_cache.clear()
logger.debug("Recommend Cache has been cleared.")
# 推荐来源方法
recommend_methods = [
self.tmdb_movies,
self.tmdb_tvs,
self.tmdb_trending,
self.bangumi_calendar,
self.douban_movie_showing,
self.douban_movies,
self.douban_tvs,
self.douban_movie_top250,
self.douban_tv_weekly_chinese,
self.douban_tv_weekly_global,
self.douban_tv_animation,
self.douban_movie_hot,
self.douban_tv_hot,
]
# 缓存并刷新所有推荐数据
recommends = []
# 记录哪些方法已完成
methods_finished = set()
# 这里避免区间内连续调用相同来源,因此遍历方案为每页遍历所有推荐来源,再进行页数遍历
for page in range(1, self.cache_max_pages + 1):
for method in recommend_methods:
if method in methods_finished:
continue
logger.debug(f"Fetch {method.__name__} data for page {page}.")
data = method(page=page)
if not data:
logger.debug("All recommendation methods have finished fetching data. Ending pagination early.")
methods_finished.add(method)
continue
recommends.extend(data)
# 如果所有方法都已经完成,提前结束循环
if len(methods_finished) == len(recommend_methods):
break
# 缓存收集到的海报
self.__cache_posters(recommends)
logger.debug("Recommend data refresh completed.")
def __cache_posters(self, datas: List[dict]):
"""
提取 poster_path 并缓存图片
:param datas: 数据列表
"""
if not settings.GLOBAL_IMAGE_CACHE:
return
for data in datas:
poster_path = data.get("poster_path")
if poster_path:
poster_url = poster_path.replace("original", "w500")
logger.debug(f"Caching poster image: {poster_url}")
self.__fetch_and_save_image(poster_url)
@staticmethod
def __fetch_and_save_image(url: str):
"""
请求并保存图片
:param url: 图片路径
"""
if not settings.GLOBAL_IMAGE_CACHE or not url:
return
# 生成缓存路径
sanitized_path = SecurityUtils.sanitize_url_path(url)
cache_path = settings.CACHE_PATH / "images" / sanitized_path
# 确保缓存路径和文件类型合法
if not SecurityUtils.is_safe_path(settings.CACHE_PATH, cache_path, settings.SECURITY_IMAGE_SUFFIXES):
logger.debug(f"Invalid cache path or file type for URL: {url}, sanitized path: {sanitized_path}")
return
# 本地存在缓存图片,则直接跳过
if cache_path.exists():
logger.debug(f"Cache hit: Image already exists at {cache_path}")
return
# 请求远程图片
referer = "https://movie.douban.com/" if "doubanio.com" in url else None
proxies = settings.PROXY if not referer else None
response = RequestUtils(ua=settings.USER_AGENT, proxies=proxies, referer=referer).get_res(url=url)
if not response:
logger.debug(f"Empty response for URL: {url}")
return
# 验证下载的内容是否为有效图片
try:
Image.open(io.BytesIO(response.content)).verify()
except Exception as e:
logger.debug(f"Invalid image format for URL {url}: {e}")
return
if not cache_path:
return
try:
if not cache_path.parent.exists():
cache_path.parent.mkdir(parents=True, exist_ok=True)
with tempfile.NamedTemporaryFile(dir=cache_path.parent, delete=False) as tmp_file:
tmp_file.write(response.content)
temp_path = Path(tmp_file.name)
temp_path.replace(cache_path)
logger.debug(f"Successfully cached image at {cache_path} for URL: {url}")
except Exception as e:
logger.debug(f"Failed to write cache file {cache_path} for URL {url}: {e}")
@log_execution_time(logger=logger)
@cached_with_empty_check
def tmdb_movies(self, sort_by: str = "popularity.desc", with_genres: str = "",
with_original_language: str = "", page: int = 1) -> Any:
"""
TMDB热门电影
"""
movies = self.tmdbchain.tmdb_discover(mtype=MediaType.MOVIE,
sort_by=sort_by,
with_genres=with_genres,
with_original_language=with_original_language,
page=page)
return [movie.to_dict() for movie in movies] if movies else []
@log_execution_time(logger=logger)
@cached_with_empty_check
def tmdb_tvs(self, sort_by: str = "popularity.desc", with_genres: str = "",
with_original_language: str = "zh|en|ja|ko", page: int = 1) -> Any:
"""
TMDB热门电视剧
"""
tvs = self.tmdbchain.tmdb_discover(mtype=MediaType.TV,
sort_by=sort_by,
with_genres=with_genres,
with_original_language=with_original_language,
page=page)
return [tv.to_dict() for tv in tvs] if tvs else []
@log_execution_time(logger=logger)
@cached_with_empty_check
def tmdb_trending(self, page: int = 1) -> Any:
"""
TMDB流行趋势
"""
infos = self.tmdbchain.tmdb_trending(page=page)
return [info.to_dict() for info in infos] if infos else []
@log_execution_time(logger=logger)
@cached_with_empty_check
def bangumi_calendar(self, page: int = 1, count: int = 30) -> Any:
"""
Bangumi每日放送
"""
medias = self.bangumichain.calendar()
return [media.to_dict() for media in medias[(page - 1) * count: page * count]] if medias else []
@log_execution_time(logger=logger)
@cached_with_empty_check
def douban_movie_showing(self, page: int = 1, count: int = 30) -> Any:
"""
豆瓣正在热映
"""
movies = self.doubanchain.movie_showing(page=page, count=count)
return [media.to_dict() for media in movies] if movies else []
@log_execution_time(logger=logger)
@cached_with_empty_check
def douban_movies(self, sort: str = "R", tags: str = "", page: int = 1, count: int = 30) -> Any:
"""
豆瓣最新电影
"""
movies = self.doubanchain.douban_discover(mtype=MediaType.MOVIE,
sort=sort, tags=tags, page=page, count=count)
return [media.to_dict() for media in movies] if movies else []
@log_execution_time(logger=logger)
@cached_with_empty_check
def douban_tvs(self, sort: str = "R", tags: str = "", page: int = 1, count: int = 30) -> Any:
"""
豆瓣最新电视剧
"""
tvs = self.doubanchain.douban_discover(mtype=MediaType.TV,
sort=sort, tags=tags, page=page, count=count)
return [media.to_dict() for media in tvs] if tvs else []
@log_execution_time(logger=logger)
@cached_with_empty_check
def douban_movie_top250(self, page: int = 1, count: int = 30) -> Any:
"""
豆瓣电影TOP250
"""
movies = self.doubanchain.movie_top250(page=page, count=count)
return [media.to_dict() for media in movies] if movies else []
@log_execution_time(logger=logger)
@cached_with_empty_check
def douban_tv_weekly_chinese(self, page: int = 1, count: int = 30) -> Any:
"""
豆瓣国产剧集榜
"""
tvs = self.doubanchain.tv_weekly_chinese(page=page, count=count)
return [media.to_dict() for media in tvs] if tvs else []
@log_execution_time(logger=logger)
@cached_with_empty_check
def douban_tv_weekly_global(self, page: int = 1, count: int = 30) -> Any:
"""
豆瓣全球剧集榜
"""
tvs = self.doubanchain.tv_weekly_global(page=page, count=count)
return [media.to_dict() for media in tvs] if tvs else []
@log_execution_time(logger=logger)
@cached_with_empty_check
def douban_tv_animation(self, page: int = 1, count: int = 30) -> Any:
"""
豆瓣热门动漫
"""
tvs = self.doubanchain.tv_animation(page=page, count=count)
return [media.to_dict() for media in tvs] if tvs else []
@log_execution_time(logger=logger)
@cached_with_empty_check
def douban_movie_hot(self, page: int = 1, count: int = 30) -> Any:
"""
豆瓣热门电影
"""
movies = self.doubanchain.movie_hot(page=page, count=count)
return [media.to_dict() for media in movies] if movies else []
@log_execution_time(logger=logger)
@cached_with_empty_check
def douban_tv_hot(self, page: int = 1, count: int = 30) -> Any:
"""
豆瓣热门电视剧
"""
tvs = self.doubanchain.tv_hot(page=page, count=count)
return [media.to_dict() for media in tvs] if tvs else []

View File

@@ -319,6 +319,7 @@ class SiteChain(ChainBase):
continue
# 新增站点
domain_url = __indexer_domain(inx=indexer, sub_domain=domain)
proxy = False
res = RequestUtils(cookies=cookie,
ua=settings.USER_AGENT
).get_res(url=domain_url)
@@ -336,16 +337,37 @@ class SiteChain(ChainBase):
logger.warn(f"站点 {indexer.get('name')} 连接状态码:{res.status_code},无法添加站点")
continue
else:
_fail_count += 1
logger.warn(f"站点 {indexer.get('name')} 连接失败,无法添加站点")
continue
if not settings.PROXY_HOST:
_fail_count += 1
logger.warn(f"站点 {indexer.get('name')} 连接失败,无法添加站点")
continue
else:
# 如果配置了代理,尝试通过代理重试
logger.info(f"站点 {indexer.get('name')} 初次连接失败,尝试通过代理重试...")
proxy = True
res = RequestUtils(cookies=cookie,
ua=settings.USER_AGENT,
proxies=settings.PROXY
).get_res(url=domain_url)
if res and res.status_code in [200, 500, 403]:
if not indexer.get("public") and not SiteUtils.is_logged_in(res.text):
logger.warn(f"站点 {indexer.get('name')} 登录失败,即使通过代理,无法添加站点")
_fail_count += 1
continue
logger.info(f"站点 {indexer.get('name')} 通过代理连接成功")
else:
logger.warn(f"站点 {indexer.get('name')} 通过代理连接失败,无法添加站点")
_fail_count += 1
continue
# 获取rss地址
rss_url = None
if not indexer.get("public") and domain_url:
# 自动生成rss地址
rss_url, errmsg = self.rsshelper.get_rss_link(url=domain_url,
cookie=cookie,
ua=settings.USER_AGENT)
ua=settings.USER_AGENT,
proxy=proxy)
if errmsg:
logger.warn(errmsg)
# 插入数据库
@@ -355,6 +377,7 @@ class SiteChain(ChainBase):
domain=domain,
cookie=cookie,
rss=rss_url,
proxy=1 if proxy else 0,
public=1 if indexer.get("public") else 0)
_add_count += 1

View File

@@ -84,6 +84,12 @@ class StorageChain(ChainBase):
"""
return self.run_module("rename_file", fileitem=fileitem, name=name)
def get_item(self, fileitem: schemas.FileItem) -> Optional[schemas.FileItem]:
"""
查询目录或文件
"""
return self.get_file_item(storage=fileitem.storage, path=Path(fileitem.path))
def get_file_item(self, storage: str, path: Path) -> Optional[schemas.FileItem]:
"""
根据路径获取文件项
@@ -125,6 +131,12 @@ class StorageChain(ChainBase):
return False
if fileitem.type == "dir":
# 本身是目录
if _blue_dir := self.list_files(fileitem=fileitem, recursion=False):
# 删除蓝光目录
for _f in _blue_dir:
if _f.type == "dir" and _f.name in ["BDMV", "CERTIFICATE"]:
logger.warn(f"{fileitem.storage}{_f.path} 删除蓝光目录")
self.delete_file(_f)
if self.any_files(fileitem, extensions=media_exts) is False:
logger.warn(f"{fileitem.storage}{fileitem.path} 不存在其它媒体文件,删除空目录")
return self.delete_file(fileitem)

View File

@@ -395,15 +395,17 @@ class SubscribeChain(ChainBase, metaclass=Singleton):
return
# 当前下载资源的优先级
priority = max([item.torrent_info.pri_order for item in downloads])
# 订阅存在待定策略,不管是否已完成,均需更新订阅信息
self.subscribeoper.update(subscribe.id, {
"current_priority": priority,
"last_update": datetime.now().strftime('%Y-%m-%d %H:%M:%S')
})
if priority == 100:
# 洗版完成
self.__finish_subscribe(subscribe=subscribe, meta=meta, mediainfo=mediainfo, bestversion=True)
self.__finish_subscribe(subscribe=subscribe, meta=meta, mediainfo=mediainfo)
else:
# 正在洗版,更新资源优先级
logger.info(f'{mediainfo.title_year} 正在洗版,更新资源优先级为 {priority}')
self.subscribeoper.update(subscribe.id, {
"current_priority": priority
})
def finish_subscribe_or_not(self, subscribe: Subscribe, meta: MetaInfo, mediainfo: MediaInfo,
downloads: List[Context] = None,
@@ -432,9 +434,12 @@ class SubscribeChain(ChainBase, metaclass=Singleton):
# 未下载到内容且不完整
logger.info(f'{mediainfo.title_year} 未下载完整,继续订阅 ...')
elif downloads:
# 洗板,下载到了内容,更新资源优先级
# 洗下载到了内容,更新资源优先级
self.update_subscribe_priority(subscribe=subscribe, meta=meta,
mediainfo=mediainfo, downloads=downloads)
elif subscribe.current_priority == 100:
# 洗版完成
self.__finish_subscribe(subscribe=subscribe, meta=meta, mediainfo=mediainfo)
else:
# 洗版,未下载到内容
logger.info(f'{mediainfo.title_year} 继续洗版 ...')
@@ -818,6 +823,8 @@ class SubscribeChain(ChainBase, metaclass=Singleton):
"""
获取已下载过的集数或电影
"""
if subscribe.best_version:
return []
note = subscribe.note or []
if not note:
return []
@@ -861,13 +868,12 @@ class SubscribeChain(ChainBase, metaclass=Singleton):
lack_episode = len(left_episodes)
logger.info(f"{mediainfo.title_year}{season} 更新缺失集数为{lack_episode} ...")
break
update_data = {"lack_episode": lack_episode}
update_data["lack_episode"] = lack_episode
# 更新数据库
if update_data:
self.subscribeoper.update(subscribe.id, update_data)
def __finish_subscribe(self, subscribe: Subscribe, mediainfo: MediaInfo,
meta: MetaBase, bestversion: bool = False):
def __finish_subscribe(self, subscribe: Subscribe, mediainfo: MediaInfo, meta: MetaBase):
"""
完成订阅
"""
@@ -875,9 +881,7 @@ class SubscribeChain(ChainBase, metaclass=Singleton):
if subscribe.state == "P":
return
# 完成订阅
msgstr = "订阅"
if bestversion:
msgstr = "洗版"
msgstr = "订阅" if not subscribe.best_version else "洗版"
logger.info(f'{mediainfo.title_year} 完成{msgstr}')
# 新增订阅历史
self.subscribeoper.add_history(**subscribe.to_dict())
@@ -1291,25 +1295,30 @@ class SubscribeChain(ChainBase, metaclass=Singleton):
totals=totals
)
else:
# 洗版
exist_flag = False
if meta.type == MediaType.TV:
# 对于电视剧,构造缺失的媒体信息
no_exists = {
mediakey: {
subscribe.season: NotExistMediaInfo(
season=subscribe.season,
episodes=[],
total_episode=subscribe.total_episode,
start_episode=subscribe.start_episode or 1)
}
}
else:
# 洗版,如果已经满足了优先级,则认为已经洗版完成
if subscribe.current_priority == 100:
exist_flag = True
no_exists = {}
else:
exist_flag = False
if meta.type == MediaType.TV:
# 对于电视剧,构造缺失的媒体信息
no_exists = {
mediakey: {
subscribe.season: NotExistMediaInfo(
season=subscribe.season,
episodes=[],
total_episode=subscribe.total_episode,
start_episode=subscribe.start_episode or 1)
}
}
else:
no_exists = {}
# 如果媒体已存在,执行订阅完成操作
if exist_flag:
logger.info(f'{mediainfo.title_year} 媒体库中已存在')
if not subscribe.best_version:
logger.info(f'{mediainfo.title_year} 媒体库中已存在')
self.finish_subscribe_or_not(subscribe=subscribe, meta=meta, mediainfo=mediainfo, force=True)
return True, no_exists

File diff suppressed because it is too large Load Diff

View File

@@ -7,7 +7,7 @@ from app.core.security import get_password_hash, verify_password
from app.db.models.user import User
from app.db.user_oper import UserOper
from app.log import logger
from app.schemas.event import AuthCredentials, AuthInterceptCredentials
from app.schemas import AuthCredentials, AuthInterceptCredentials
from app.schemas.types import ChainEventType
from app.utils.otp import OtpUtils
from app.utils.singleton import Singleton

View File

@@ -15,15 +15,18 @@ from app.helper.message import MessageHelper
from app.helper.thread import ThreadHelper
from app.log import logger
from app.scheduler import Scheduler
from app.schemas import Notification
from app.schemas.event import CommandRegisterEventData
from app.schemas import Notification, CommandRegisterEventData
from app.schemas.types import EventType, MessageChannel, ChainEventType
from app.utils.object import ObjectUtils
from app.utils.singleton import Singleton
from app.utils.structures import DictUtils
class CommandChain(ChainBase, metaclass=Singleton):
class CommandChain(ChainBase):
pass
class Command(metaclass=Singleton):
"""
全局命令管理消费事件
"""
@@ -210,7 +213,7 @@ class CommandChain(ChainBase, metaclass=Singleton):
if filtered_initial_commands != self._registered_commands or force_register:
logger.debug("Command set has changed or force registration is enabled.")
self._registered_commands = filtered_initial_commands
super().register_commands(commands=filtered_initial_commands)
CommandChain().register_commands(commands=filtered_initial_commands)
else:
logger.debug("Command set unchanged, skipping broadcast registration.")
except Exception as e:
@@ -248,7 +251,7 @@ class CommandChain(ChainBase, metaclass=Singleton):
event = eventmanager.send_event(ChainEventType.CommandRegister, event_data)
return event, commands
def __build_plugin_commands(self, pid: Optional[str] = None) -> Dict[str, dict]:
def __build_plugin_commands(self, _: Optional[str] = None) -> Dict[str, dict]:
"""
构建插件命令
"""
@@ -277,7 +280,7 @@ class CommandChain(ChainBase, metaclass=Singleton):
if command.get("type") == "scheduler":
# 定时服务
if userid:
self.post_message(
CommandChain().post_message(
Notification(
channel=channel,
source=source,
@@ -290,7 +293,7 @@ class CommandChain(ChainBase, metaclass=Singleton):
self.scheduler.start(job_id=command.get("id"))
if userid:
self.post_message(
CommandChain().post_message(
Notification(
channel=channel,
source=source,

View File

@@ -10,7 +10,7 @@ from typing import Any, Dict, List, Optional, Tuple, Type
from dotenv import set_key
from pydantic import BaseModel, BaseSettings, validator, Field
from app.log import logger
from app.log import logger, log_settings, LogConfigModel
from app.utils.system import SystemUtils
from app.utils.url import UrlUtils
@@ -240,9 +240,11 @@ class ConfigModel(BaseModel):
RENAME_FORMAT_S0_NAMES: List[str] = Field(
default_factory=lambda: ["Specials", "SPs"]
)
# 启用分词搜索
TOKENIZED_SEARCH: bool = False
class Settings(BaseSettings, ConfigModel):
class Settings(BaseSettings, ConfigModel, LogConfigModel):
"""
系统配置类
"""
@@ -404,6 +406,8 @@ class Settings(BaseSettings, ConfigModel):
# 仅成功更新配置时,才更新内存
if success:
setattr(self, key, converted_value)
if hasattr(log_settings, key):
setattr(log_settings, key, converted_value)
return success, message
return True, ""
except Exception as e:
@@ -414,8 +418,14 @@ class Settings(BaseSettings, ConfigModel):
更新多个配置项
"""
results = {}
log_updated = False
for k, v in env.items():
results[k] = self.update_setting(k, v)
if hasattr(log_settings, k):
log_updated = True
# 本次更新存在日志配置项更新,需要重新加载日志配置
if log_updated:
logger.update_loggers()
return results
@property
@@ -481,6 +491,7 @@ class Settings(BaseSettings, ConfigModel):
"refresh": 100,
"tmdb": 1024,
"douban": 512,
"bangumi": 512,
"fanart": 512,
"meta": (self.META_CACHE_EXPIRE or 24) * 3600
}
@@ -489,6 +500,7 @@ class Settings(BaseSettings, ConfigModel):
"refresh": 50,
"tmdb": 256,
"douban": 256,
"bangumi": 256,
"fanart": 128,
"meta": (self.META_CACHE_EXPIRE or 2) * 3600
}

View File

@@ -1,5 +1,5 @@
import re
from dataclasses import dataclass, field, asdict
from dataclasses import dataclass, field
from datetime import datetime
from typing import List, Dict, Any, Tuple
@@ -142,7 +142,7 @@ class TorrentInfo:
"""
返回字典
"""
dicts = asdict(self)
dicts = vars(self).copy()
dicts["volume_factor"] = self.volume_factor
dicts["freedate_diff"] = self.freedate_diff
return dicts
@@ -740,7 +740,7 @@ class MediaInfo:
"""
返回字典
"""
dicts = asdict(self)
dicts = vars(self).copy()
dicts["type"] = self.type.value if self.type else None
dicts["detail_link"] = self.detail_link
dicts["title_year"] = self.title_year

View File

@@ -13,7 +13,7 @@ from typing import Callable, Dict, List, Optional, Union
from app.helper.message import MessageHelper
from app.helper.thread import ThreadHelper
from app.log import logger
from app.schemas.event import ChainEventData
from app.schemas import ChainEventData
from app.schemas.types import ChainEventType, EventType
from app.utils.limit import ExponentialBackoffRateLimiter
from app.utils.singleton import Singleton
@@ -438,12 +438,15 @@ class EventManager(metaclass=Singleton):
# 如果类不在全局变量中,尝试动态导入模块并创建实例
try:
# 导入模块除了插件只有chain能响应事件
if not class_name.endswith("Chain"):
if class_name == "Command":
module_name = "app.command"
module = importlib.import_module(module_name)
elif class_name.endswith("Chain"):
module_name = f"app.chain.{class_name[:-5].lower()}"
module = importlib.import_module(module_name)
else:
logger.debug(f"事件处理出错:无效的 Chain 类名: {class_name},类名必须以 'Chain' 结尾")
return None
module_name = f"app.chain.{class_name[:-5].lower()}"
module = importlib.import_module(module_name)
if hasattr(module, class_name):
class_obj = getattr(module, class_name)()
return class_obj

View File

@@ -1,13 +1,13 @@
import traceback
from dataclasses import dataclass, asdict
from dataclasses import dataclass
from typing import Union, Optional, List, Self
import cn2an
import regex as re
from app.log import logger
from app.utils.string import StringUtils
from app.schemas.types import MediaType
from app.utils.string import StringUtils
@dataclass
@@ -589,9 +589,10 @@ class MetaBase(object):
"""
转为字典
"""
dicts = asdict(self)
dicts = vars(self).copy()
dicts["type"] = self.type.value if self.type else None
dicts["season_episode"] = self.season_episode
dicts["edition"] = self.edition
dicts["name"] = self.name
dicts["episode_list"] = self.episode_list
return dicts

View File

@@ -286,7 +286,7 @@ def decrypt(data: bytes, key: bytes) -> Optional[bytes]:
return None
def encrypt_message(message: str, key: bytes):
def encrypt_message(message: str, key: bytes) -> str:
"""
使用给定的key对消息进行加密并返回加密后的字符串
"""
@@ -295,14 +295,14 @@ def encrypt_message(message: str, key: bytes):
return encrypted_message.decode()
def hash_sha256(message):
def hash_sha256(message: str) -> str:
"""
对字符串做hash运算
"""
return hashlib.sha256(message.encode()).hexdigest()
def aes_decrypt(data, key):
def aes_decrypt(data: str, key: str) -> str:
"""
AES解密
"""
@@ -322,7 +322,7 @@ def aes_decrypt(data, key):
return result.decode('utf-8')
def aes_encrypt(data, key):
def aes_encrypt(data: str, key: str) -> str:
"""
AES加密
"""
@@ -338,7 +338,7 @@ def aes_encrypt(data, key):
return base64.b64encode(cipher.iv + result).decode('utf-8')
def nexusphp_encrypt(data_str: str, key):
def nexusphp_encrypt(data_str: str, key: bytes) -> str:
"""
NexusPHP加密
"""

View File

@@ -13,7 +13,7 @@ connect_args = {
# 启用 WAL 模式时的额外配置
if settings.DB_WAL_ENABLE:
connect_args["check_same_thread"] = False
kwargs = {
db_kwargs = {
"url": f"sqlite:///{settings.CONFIG_PATH}/user.db",
"pool_pre_ping": settings.DB_POOL_PRE_PING,
"echo": settings.DB_ECHO,
@@ -23,13 +23,13 @@ kwargs = {
}
# 当使用 QueuePool 时,添加 QueuePool 特有的参数
if pool_class == QueuePool:
kwargs.update({
db_kwargs.update({
"pool_size": settings.DB_POOL_SIZE,
"pool_timeout": settings.DB_POOL_TIMEOUT,
"max_overflow": settings.DB_MAX_OVERFLOW
})
# 创建数据库引擎
Engine = create_engine(**kwargs)
Engine = create_engine(**db_kwargs)
# 根据配置设置日志模式
journal_mode = "WAL" if settings.DB_WAL_ENABLE else "DELETE"
with Engine.connect() as connection:
@@ -198,7 +198,7 @@ class Base:
@classmethod
@db_query
def get(cls, db: Session, rid: int) -> Self:
return db.query(cls).filter(cls.id == rid).first()
return db.query(cls).filter(and_(cls.id == rid)).first()
@db_update
def update(self, db: Session, payload: dict):

View File

@@ -29,6 +29,8 @@ class DownloadHistory(Base):
episodes = Column(String)
# 海报
image = Column(String)
# 下载器
downloader = Column(String)
# 下载任务Hash
download_hash = Column(String, index=True)
# 种子名称
@@ -168,10 +170,10 @@ class DownloadFiles(Base):
下载文件记录
"""
id = Column(Integer, Sequence('id'), primary_key=True, index=True)
# 下载任务Hash
download_hash = Column(String, index=True)
# 下载器
downloader = Column(String)
# 下载任务Hash
download_hash = Column(String, index=True)
# 完整路径
fullpath = Column(String, index=True)
# 保存路径

View File

@@ -1,6 +1,6 @@
from datetime import datetime
from sqlalchemy import Column, Integer, String, Sequence, Float, JSON, func
from sqlalchemy import Column, Integer, String, Sequence, Float, JSON, func, or_
from sqlalchemy.orm import Session
from app.db import db_query, Base
@@ -81,7 +81,7 @@ class SiteUserData(Base):
func.max(SiteUserData.updated_day).label('latest_update_day')
)
.group_by(SiteUserData.domain)
.filter(SiteUserData.err_msg.is_(None))
.filter(or_(SiteUserData.err_msg.is_(None), SiteUserData.err_msg == ""))
.subquery()
)

View File

@@ -8,7 +8,7 @@ from app.db import db_query, db_update, Base
class TransferHistory(Base):
"""
转移历史记录
整理记录
"""
id = Column(Integer, Sequence('id'), primary_key=True, index=True)
# 源路径
@@ -43,6 +43,8 @@ class TransferHistory(Base):
episodes = Column(String)
# 海报
image = Column(String)
# 下载器
downloader = Column(String)
# 下载器hash
download_hash = Column(String, index=True)
# 转移成功状态

View File

@@ -114,7 +114,8 @@ class SiteOper(DbOper):
"domain": domain,
"name": name,
"updated_day": current_day,
"updated_time": current_time
"updated_time": current_time,
"err_msg": payload.get("err_msg") or ""
})
# 按站点+天判断是否存在数据
siteuserdatas = SiteUserData.get_by_domain(self._db, domain=domain, workdate=current_day)

View File

@@ -120,7 +120,7 @@ class TransferHistoryOper(DbOper):
def add_success(self, fileitem: FileItem, mode: str, meta: MetaBase,
mediainfo: MediaInfo, transferinfo: TransferInfo,
download_hash: str = None):
downloader: str = None, download_hash: str = None):
"""
新增转移成功历史记录
"""
@@ -143,13 +143,14 @@ class TransferHistoryOper(DbOper):
seasons=meta.season,
episodes=meta.episode,
image=mediainfo.get_poster_image(),
downloader=downloader,
download_hash=download_hash,
status=1,
files=transferinfo.file_list
)
def add_fail(self, fileitem: FileItem, mode: str, meta: MetaBase, mediainfo: MediaInfo = None,
transferinfo: TransferInfo = None, download_hash: str = None):
transferinfo: TransferInfo = None, downloader: str = None, download_hash: str = None):
"""
新增转移失败历史记录
"""
@@ -173,6 +174,7 @@ class TransferHistoryOper(DbOper):
seasons=meta.season,
episodes=meta.episode,
image=mediainfo.get_poster_image(),
downloader=downloader,
download_hash=download_hash,
status=0,
errmsg=transferinfo.message or '未知错误',
@@ -188,6 +190,7 @@ class TransferHistoryOper(DbOper):
mode=mode,
seasons=meta.season,
episodes=meta.episode,
downloader=downloader,
download_hash=download_hash,
status=0,
errmsg="未识别到媒体信息"

View File

@@ -68,10 +68,16 @@ class DirectoryHelper:
# 电影/电视剧
media_type = media.type.value
dirs = self.get_dirs()
# 如果存在源目录,并源目录为任一下载目录的子目录时,则进行源目录匹配,否则,允许源目录按同盘优先的逻辑匹配
matching_dirs = [d for d in dirs if src_path.is_relative_to(d.download_path)] if src_path else []
# 根据是否有匹配的源目录,决定要考虑的目录集合
dirs_to_consider = matching_dirs if matching_dirs else dirs
# 已匹配的目录
matched_dirs: List[schemas.TransferDirectoryConf] = []
# 按照配置顺序查找
for d in dirs:
for d in dirs_to_consider:
# 没有启用整理的目录
if not d.monitor_type and not include_unsorted:
continue
@@ -81,9 +87,6 @@ class DirectoryHelper:
# 目标存储类型不匹配
if target_storage and d.library_storage != target_storage:
continue
# 有源目录时,源目录不匹配下载目录
if src_path and not src_path.is_relative_to(d.download_path):
continue
# 有目标目录时,目标目录不匹配媒体库目录
if dest_path and dest_path != Path(d.library_path):
continue

View File

@@ -84,22 +84,33 @@ class FormatParser(object):
拆分集数返回开始集数结束集数Part信息
"""
# 指定的具体集数,直接返回
if self._start_ep is not None and self._start_ep == self._end_ep:
if isinstance(self._start_ep, str):
s, e = self._start_ep.split("-")
start_ep = self.__offset.replace("EP", s)
end_ep = self.__offset.replace("EP", e)
if int(s) == int(e):
if self._start_ep is not None:
if self._start_ep == self._end_ep:
# `details` 格式为 `X-X` 或者 `X`
if isinstance(self._start_ep, str):
# `details` 格式为 `X-X`
s, e = self._start_ep.split("-")
start_ep = self.__offset.replace("EP", s)
end_ep = self.__offset.replace("EP", e)
if int(s) == int(e):
return int(eval(start_ep)), None, self.part
return int(eval(start_ep)), int(eval(end_ep)), self.part
else:
# `details` 格式为 `X`
start_ep = self.__offset.replace("EP", str(self._start_ep))
return int(eval(start_ep)), None, self.part
return int(eval(start_ep)), int(eval(end_ep)), self.part
else:
# `details` 格式为 `X,X`
start_ep = self.__offset.replace("EP", str(self._start_ep))
return int(eval(start_ep)), None, self.part
end_ep = self.__offset.replace("EP", str(self._end_ep))
return int(eval(start_ep)), int(eval(end_ep)), self.part
if not self._format:
# 未填入`集数定位` 且没有`指定集数` 仅处理`集数偏移`
start_ep = eval(self.__offset.replace("EP", str(file_meta.begin_episode))) if file_meta.begin_episode else None
end_ep = eval(self.__offset.replace("EP", str(file_meta.end_episode))) if file_meta.end_episode else None
return int(start_ep) if start_ep else None, int(end_ep) if end_ep else None, self.part
else:
# 有`集数定位`
s, e = self.__handle_single(file_name)
start_ep = self.__offset.replace("EP", str(s)) if s else None
end_ep = self.__offset.replace("EP", str(e)) if e else None

View File

@@ -120,7 +120,7 @@ class PluginHelper(metaclass=Singleton):
"""
if not settings.PLUGIN_STATISTIC_SHARE:
return {}
res = RequestUtils(timeout=10).get_res(self._install_statistic)
res = RequestUtils(proxies=settings.PROXY, timeout=10).get_res(self._install_statistic)
if res and res.status_code == 200:
return res.json()
return {}
@@ -134,7 +134,7 @@ class PluginHelper(metaclass=Singleton):
if not pid:
return False
install_reg_url = self._install_reg.format(pid=pid)
res = RequestUtils(timeout=5).get_res(install_reg_url)
res = RequestUtils(proxies=settings.PROXY, timeout=5).get_res(install_reg_url)
if res and res.status_code == 200:
return True
return False
@@ -148,7 +148,8 @@ class PluginHelper(metaclass=Singleton):
plugins = self.systemconfig.get(SystemConfigKey.UserInstalledPlugins)
if not plugins:
return False
res = RequestUtils(content_type="application/json",
res = RequestUtils(proxies=settings.PROXY,
content_type="application/json",
timeout=5).post(self._install_report,
json={"plugins": [{"plugin_id": plugin} for plugin in plugins]})
return True if res else False

View File

@@ -44,7 +44,7 @@ class SubscribeHelper(metaclass=Singleton):
"""
if not settings.SUBSCRIBE_STATISTIC_SHARE:
return []
res = RequestUtils(timeout=15).get_res(self._sub_statistic, params={
res = RequestUtils(proxies=settings.PROXY, timeout=15).get_res(self._sub_statistic, params={
"stype": stype,
"page": page,
"count": count
@@ -59,7 +59,7 @@ class SubscribeHelper(metaclass=Singleton):
"""
if not settings.SUBSCRIBE_STATISTIC_SHARE:
return False
res = RequestUtils(timeout=5, headers={
res = RequestUtils(proxies=settings.PROXY, timeout=5, headers={
"Content-Type": "application/json"
}).post_res(self._sub_reg, json=sub)
if res and res.status_code == 200:
@@ -72,7 +72,7 @@ class SubscribeHelper(metaclass=Singleton):
"""
if not settings.SUBSCRIBE_STATISTIC_SHARE:
return False
res = RequestUtils(timeout=5, headers={
res = RequestUtils(proxies=settings.PROXY, timeout=5, headers={
"Content-Type": "application/json"
}).post_res(self._sub_done, json=sub)
if res and res.status_code == 200:
@@ -104,7 +104,7 @@ class SubscribeHelper(metaclass=Singleton):
subscribes = SubscribeOper().list()
if not subscribes:
return True
res = RequestUtils(content_type="application/json",
res = RequestUtils(proxies=settings.PROXY, content_type="application/json",
timeout=10).post(self._sub_report,
json={
"subscribes": [
@@ -125,7 +125,7 @@ class SubscribeHelper(metaclass=Singleton):
return False, "订阅不存在"
subscribe_dict = subscribe.to_dict()
subscribe_dict.pop("id")
res = RequestUtils(content_type="application/json",
res = RequestUtils(proxies=settings.PROXY, content_type="application/json",
timeout=10).post(self._sub_share,
json={
"share_title": share_title,
@@ -146,7 +146,7 @@ class SubscribeHelper(metaclass=Singleton):
"""
if not settings.SUBSCRIBE_STATISTIC_SHARE:
return False, "当前没有开启订阅数据共享功能"
res = RequestUtils(timeout=5, headers={
res = RequestUtils(proxies=settings.PROXY, timeout=5, headers={
"Content-Type": "application/json"
}).get_res(self._sub_fork % share_id)
if res is None:
@@ -163,7 +163,7 @@ class SubscribeHelper(metaclass=Singleton):
"""
if not settings.SUBSCRIBE_STATISTIC_SHARE:
return []
res = RequestUtils(timeout=15).get_res(self._sub_shares, params={
res = RequestUtils(proxies=settings.PROXY, timeout=15).get_res(self._sub_shares, params={
"name": name,
"page": page,
"count": count

View File

@@ -5,15 +5,19 @@ from pathlib import Path
from typing import Dict, Any, Optional
import click
from pydantic import BaseSettings
from pydantic import BaseSettings, BaseModel
from app.utils.system import SystemUtils
class LogSettings(BaseSettings):
class LogConfigModel(BaseModel):
"""
日志设置
Pydantic 配置模型,描述所有配置项及其类型和默认值
"""
class Config:
extra = "ignore" # 忽略未定义的配置项
# 配置文件目录
CONFIG_DIR: Optional[str] = None
# 是否为调试模式
@@ -29,6 +33,12 @@ class LogSettings(BaseSettings):
# 文件日志格式
LOG_FILE_FORMAT: str = "%(levelname)s%(asctime)s - %(message)s"
class LogSettings(BaseSettings, LogConfigModel):
"""
日志设置类
"""
@property
def CONFIG_PATH(self):
return SystemUtils.get_config_path(self.CONFIG_DIR)
@@ -124,7 +134,8 @@ class LoggerManager:
def __setup_logger(log_file: str):
"""
设置日志
log_file日志文件相对路径
:param log_file日志文件相对路径
"""
log_file_path = log_settings.LOG_PATH / log_file
log_file_path.parent.mkdir(parents=True, exist_ok=True)
@@ -134,6 +145,8 @@ class LoggerManager:
if log_settings.DEBUG:
_logger.setLevel(logging.DEBUG)
# 全局日志等级
else:
loglevel = getattr(logging, log_settings.LOG_LEVEL.upper(), logging.INFO)
_logger.setLevel(loglevel)
@@ -162,6 +175,21 @@ class LoggerManager:
return _logger
def update_loggers(self):
"""
更新日志实例
"""
_new_loggers: Dict[str, Any] = {}
for log_file, _logger in self._loggers.items():
# 移除已有的 handler避免重复添加
for handler in _logger.handlers:
_logger.removeHandler(handler)
# 重新设置日志实例
_new_logger = self.__setup_logger(log_file=log_file)
_new_loggers[log_file] = _new_logger
self._loggers = _new_loggers
def logger(self, method: str, msg: str, *args, **kwargs):
"""
获取模块的logger
@@ -181,7 +209,7 @@ class LoggerManager:
# 获取调用者的模块的logger
_logger = self._loggers.get(logfile)
if not _logger:
_logger = self.__setup_logger(logfile)
_logger = self.__setup_logger(log_file=logfile)
self._loggers[logfile] = _logger
# 调用logger的方法打印日志
if hasattr(_logger, method):
@@ -210,7 +238,7 @@ class LoggerManager:
"""
输出警告级别日志(兼容)
"""
self.logger("warning", msg, *args, **kwargs)
self.warning(msg, *args, **kwargs)
def error(self, msg: str, *args, **kwargs):
"""

View File

@@ -1,8 +1,9 @@
from datetime import datetime
from functools import lru_cache
import requests
from cachetools import TTLCache, cached
from app.core.config import settings
from app.utils.http import RequestUtils
@@ -28,7 +29,7 @@ class BangumiApi(object):
pass
@classmethod
@lru_cache(maxsize=128)
@cached(cache=TTLCache(maxsize=settings.CACHE_CONF["bangumi"], ttl=settings.CACHE_CONF["meta"]))
def __invoke(cls, url, **kwargs):
req_url = cls._base_url + url
params = {}

View File

@@ -175,6 +175,19 @@ class DoubanApi(metaclass=Singleton):
).decode()
@cached(cache=TTLCache(maxsize=settings.CACHE_CONF["douban"], ttl=settings.CACHE_CONF["meta"]))
def __invoke_recommend(self, url: str, **kwargs) -> dict:
"""
推荐/发现类API
"""
return self.__invoke(url, **kwargs)
@cached(cache=TTLCache(maxsize=settings.CACHE_CONF["douban"], ttl=settings.CACHE_CONF["meta"]))
def __invoke_search(self, url: str, **kwargs) -> dict:
"""
搜索类API
"""
return self.__invoke(url, **kwargs)
def __invoke(self, url: str, **kwargs) -> dict:
"""
GET请求
@@ -244,189 +257,189 @@ class DoubanApi(metaclass=Singleton):
"""
关键字搜索
"""
return self.__invoke(self._urls["search"], q=keyword,
start=start, count=count, _ts=ts)
return self.__invoke_search(self._urls["search"], q=keyword,
start=start, count=count, _ts=ts)
def movie_search(self, keyword: str, start: int = 0, count: int = 20,
ts=datetime.strftime(datetime.now(), '%Y%m%d')):
"""
电影搜索
"""
return self.__invoke(self._urls["movie_search"], q=keyword,
start=start, count=count, _ts=ts)
return self.__invoke_search(self._urls["movie_search"], q=keyword,
start=start, count=count, _ts=ts)
def tv_search(self, keyword: str, start: int = 0, count: int = 20,
ts=datetime.strftime(datetime.now(), '%Y%m%d')):
"""
电视搜索
"""
return self.__invoke(self._urls["tv_search"], q=keyword,
start=start, count=count, _ts=ts)
return self.__invoke_search(self._urls["tv_search"], q=keyword,
start=start, count=count, _ts=ts)
def book_search(self, keyword: str, start: int = 0, count: int = 20,
ts=datetime.strftime(datetime.now(), '%Y%m%d')):
"""
书籍搜索
"""
return self.__invoke(self._urls["book_search"], q=keyword,
start=start, count=count, _ts=ts)
return self.__invoke_search(self._urls["book_search"], q=keyword,
start=start, count=count, _ts=ts)
def group_search(self, keyword: str, start: int = 0, count: int = 20,
ts=datetime.strftime(datetime.now(), '%Y%m%d')):
"""
小组搜索
"""
return self.__invoke(self._urls["group_search"], q=keyword,
start=start, count=count, _ts=ts)
return self.__invoke_search(self._urls["group_search"], q=keyword,
start=start, count=count, _ts=ts)
def person_search(self, keyword: str, start: int = 0, count: int = 20,
ts=datetime.strftime(datetime.now(), '%Y%m%d')):
"""
人物搜索
"""
return self.__invoke(self._urls["search_subject"], type="person", q=keyword,
start=start, count=count, _ts=ts)
return self.__invoke_search(self._urls["search_subject"], type="person", q=keyword,
start=start, count=count, _ts=ts)
def movie_showing(self, start: int = 0, count: int = 20,
ts=datetime.strftime(datetime.now(), '%Y%m%d')):
"""
正在热映
"""
return self.__invoke(self._urls["movie_showing"],
start=start, count=count, _ts=ts)
return self.__invoke_recommend(self._urls["movie_showing"],
start=start, count=count, _ts=ts)
def movie_soon(self, start: int = 0, count: int = 20,
ts=datetime.strftime(datetime.now(), '%Y%m%d')):
"""
即将上映
"""
return self.__invoke(self._urls["movie_soon"],
start=start, count=count, _ts=ts)
return self.__invoke_recommend(self._urls["movie_soon"],
start=start, count=count, _ts=ts)
def movie_hot_gaia(self, start: int = 0, count: int = 20,
ts=datetime.strftime(datetime.now(), '%Y%m%d')):
"""
热门电影
"""
return self.__invoke(self._urls["movie_hot_gaia"],
start=start, count=count, _ts=ts)
return self.__invoke_recommend(self._urls["movie_hot_gaia"],
start=start, count=count, _ts=ts)
def tv_hot(self, start: int = 0, count: int = 20,
ts=datetime.strftime(datetime.now(), '%Y%m%d')):
"""
热门剧集
"""
return self.__invoke(self._urls["tv_hot"],
start=start, count=count, _ts=ts)
return self.__invoke_recommend(self._urls["tv_hot"],
start=start, count=count, _ts=ts)
def tv_animation(self, start: int = 0, count: int = 20,
ts=datetime.strftime(datetime.now(), '%Y%m%d')):
"""
动画
"""
return self.__invoke(self._urls["tv_animation"],
start=start, count=count, _ts=ts)
return self.__invoke_recommend(self._urls["tv_animation"],
start=start, count=count, _ts=ts)
def tv_variety_show(self, start: int = 0, count: int = 20,
ts=datetime.strftime(datetime.now(), '%Y%m%d')):
"""
综艺
"""
return self.__invoke(self._urls["tv_variety_show"],
start=start, count=count, _ts=ts)
return self.__invoke_recommend(self._urls["tv_variety_show"],
start=start, count=count, _ts=ts)
def tv_rank_list(self, start: int = 0, count: int = 20,
ts=datetime.strftime(datetime.now(), '%Y%m%d')):
"""
电视剧排行榜
"""
return self.__invoke(self._urls["tv_rank_list"],
start=start, count=count, _ts=ts)
return self.__invoke_recommend(self._urls["tv_rank_list"],
start=start, count=count, _ts=ts)
def show_hot(self, start: int = 0, count: int = 20,
ts=datetime.strftime(datetime.now(), '%Y%m%d')):
"""
综艺热门
"""
return self.__invoke(self._urls["show_hot"],
start=start, count=count, _ts=ts)
return self.__invoke_recommend(self._urls["show_hot"],
start=start, count=count, _ts=ts)
def movie_detail(self, subject_id: str):
"""
电影详情
"""
return self.__invoke(self._urls["movie_detail"] + subject_id)
return self.__invoke_search(self._urls["movie_detail"] + subject_id)
def movie_celebrities(self, subject_id: str):
"""
电影演职员
"""
return self.__invoke(self._urls["movie_celebrities"] % subject_id)
return self.__invoke_search(self._urls["movie_celebrities"] % subject_id)
def tv_detail(self, subject_id: str):
"""
电视剧详情
"""
return self.__invoke(self._urls["tv_detail"] + subject_id)
return self.__invoke_search(self._urls["tv_detail"] + subject_id)
def tv_celebrities(self, subject_id: str):
"""
电视剧演职员
"""
return self.__invoke(self._urls["tv_celebrities"] % subject_id)
return self.__invoke_search(self._urls["tv_celebrities"] % subject_id)
def book_detail(self, subject_id: str):
"""
书籍详情
"""
return self.__invoke(self._urls["book_detail"] + subject_id)
return self.__invoke_search(self._urls["book_detail"] + subject_id)
def movie_top250(self, start: int = 0, count: int = 20,
ts=datetime.strftime(datetime.now(), '%Y%m%d')):
"""
电影TOP250
"""
return self.__invoke(self._urls["movie_top250"],
start=start, count=count, _ts=ts)
return self.__invoke_recommend(self._urls["movie_top250"],
start=start, count=count, _ts=ts)
def movie_recommend(self, tags='', sort='R', start: int = 0, count: int = 20,
ts=datetime.strftime(datetime.now(), '%Y%m%d')):
"""
电影探索
"""
return self.__invoke(self._urls["movie_recommend"], tags=tags, sort=sort,
start=start, count=count, _ts=ts)
return self.__invoke_recommend(self._urls["movie_recommend"], tags=tags, sort=sort,
start=start, count=count, _ts=ts)
def tv_recommend(self, tags='', sort='R', start: int = 0, count: int = 20,
ts=datetime.strftime(datetime.now(), '%Y%m%d')):
"""
电视剧探索
"""
return self.__invoke(self._urls["tv_recommend"], tags=tags, sort=sort,
start=start, count=count, _ts=ts)
return self.__invoke_recommend(self._urls["tv_recommend"], tags=tags, sort=sort,
start=start, count=count, _ts=ts)
def tv_chinese_best_weekly(self, start: int = 0, count: int = 20,
ts=datetime.strftime(datetime.now(), '%Y%m%d')):
"""
华语口碑周榜
"""
return self.__invoke(self._urls["tv_chinese_best_weekly"],
start=start, count=count, _ts=ts)
return self.__invoke_recommend(self._urls["tv_chinese_best_weekly"],
start=start, count=count, _ts=ts)
def tv_global_best_weekly(self, start: int = 0, count: int = 20,
ts=datetime.strftime(datetime.now(), '%Y%m%d')):
"""
全球口碑周榜
"""
return self.__invoke(self._urls["tv_global_best_weekly"],
start=start, count=count, _ts=ts)
return self.__invoke_recommend(self._urls["tv_global_best_weekly"],
start=start, count=count, _ts=ts)
def doulist_detail(self, subject_id: str):
"""
豆列详情
:param subject_id: 豆列id
"""
return self.__invoke(self._urls["doulist"] + subject_id)
return self.__invoke_search(self._urls["doulist"] + subject_id)
def doulist_items(self, subject_id: str, start: int = 0, count: int = 20,
ts=datetime.strftime(datetime.now(), '%Y%m%d')):
@@ -437,8 +450,8 @@ class DoubanApi(metaclass=Singleton):
:param count: 数量
:param ts: 时间戳
"""
return self.__invoke(self._urls["doulist_items"] % subject_id,
start=start, count=count, _ts=ts)
return self.__invoke_search(self._urls["doulist_items"] % subject_id,
start=start, count=count, _ts=ts)
def movie_recommendations(self, subject_id: str, start: int = 0, count: int = 20,
ts=datetime.strftime(datetime.now(), '%Y%m%d')):
@@ -449,8 +462,8 @@ class DoubanApi(metaclass=Singleton):
:param count: 数量
:param ts: 时间戳
"""
return self.__invoke(self._urls["movie_recommendations"] % subject_id,
start=start, count=count, _ts=ts)
return self.__invoke_recommend(self._urls["movie_recommendations"] % subject_id,
start=start, count=count, _ts=ts)
def tv_recommendations(self, subject_id: str, start: int = 0, count: int = 20,
ts=datetime.strftime(datetime.now(), '%Y%m%d')):
@@ -461,8 +474,8 @@ class DoubanApi(metaclass=Singleton):
:param count: 数量
:param ts: 时间戳
"""
return self.__invoke(self._urls["tv_recommendations"] % subject_id,
start=start, count=count, _ts=ts)
return self.__invoke_recommend(self._urls["tv_recommendations"] % subject_id,
start=start, count=count, _ts=ts)
def movie_photos(self, subject_id: str, start: int = 0, count: int = 20,
ts=datetime.strftime(datetime.now(), '%Y%m%d')):
@@ -473,8 +486,8 @@ class DoubanApi(metaclass=Singleton):
:param count: 数量
:param ts: 时间戳
"""
return self.__invoke(self._urls["movie_photos"] % subject_id,
start=start, count=count, _ts=ts)
return self.__invoke_search(self._urls["movie_photos"] % subject_id,
start=start, count=count, _ts=ts)
def tv_photos(self, subject_id: str, start: int = 0, count: int = 20,
ts=datetime.strftime(datetime.now(), '%Y%m%d')):
@@ -485,8 +498,8 @@ class DoubanApi(metaclass=Singleton):
:param count: 数量
:param ts: 时间戳
"""
return self.__invoke(self._urls["tv_photos"] % subject_id,
start=start, count=count, _ts=ts)
return self.__invoke_search(self._urls["tv_photos"] % subject_id,
start=start, count=count, _ts=ts)
def person_detail(self, subject_id: int):
"""
@@ -494,7 +507,7 @@ class DoubanApi(metaclass=Singleton):
:param subject_id: 人物 id
:return:
"""
return self.__invoke(self._urls["person_detail"] + str(subject_id))
return self.__invoke_search(self._urls["person_detail"] + str(subject_id))
def person_work(self, subject_id: int, start: int = 0, count: int = 20, sort_by: str = "time",
collection_title: str = "影视",
@@ -509,14 +522,16 @@ class DoubanApi(metaclass=Singleton):
:param ts: 时间戳
:return:
"""
return self.__invoke(self._urls["person_work"] % subject_id, sortby=sort_by, collection_title=collection_title,
start=start, count=count, _ts=ts)
return self.__invoke_search(self._urls["person_work"] % subject_id, sortby=sort_by,
collection_title=collection_title,
start=start, count=count, _ts=ts)
def clear_cache(self):
"""
清空LRU缓存
"""
self.__invoke.cache_clear()
# 尚未支持缓存清理
pass
def close(self):
if self._session:

View File

@@ -6,7 +6,6 @@ from app.core.event import eventmanager
from app.log import logger
from app.modules import _MediaServerBase, _ModuleBase
from app.modules.emby.emby import Emby
from app.schemas.event import AuthCredentials, AuthInterceptCredentials
from app.schemas.types import MediaType, ModuleType, ChainEventType, MediaServerType
@@ -73,8 +72,8 @@ class EmbyModule(_ModuleBase, _MediaServerBase[Emby]):
logger.info(f"Emby服务器 {name} 连接断开,尝试重连 ...")
server.reconnect()
def user_authenticate(self, credentials: AuthCredentials, service_name: Optional[str] = None) \
-> Optional[AuthCredentials]:
def user_authenticate(self, credentials: schemas.AuthCredentials, service_name: Optional[str] = None) \
-> Optional[schemas.AuthCredentials]:
"""
使用Emby用户辅助完成用户认证
:param credentials: 认证数据
@@ -96,11 +95,11 @@ class EmbyModule(_ModuleBase, _MediaServerBase[Emby]):
# 触发认证拦截事件
intercept_event = eventmanager.send_event(
etype=ChainEventType.AuthIntercept,
data=AuthInterceptCredentials(username=credentials.username, channel=self.get_name(),
service=name, status="triggered")
data=schemas.AuthInterceptCredentials(username=credentials.username, channel=self.get_name(),
service=name, status="triggered")
)
if intercept_event and intercept_event.event_data:
intercept_data: AuthInterceptCredentials = intercept_event.event_data
intercept_data: schemas.AuthInterceptCredentials = intercept_event.event_data
if intercept_data.cancel:
continue
token = server.authenticate(credentials.username, credentials.password)

View File

@@ -16,8 +16,7 @@ from app.helper.module import ModuleHelper
from app.log import logger
from app.modules import _ModuleBase
from app.modules.filemanager.storages import StorageBase
from app.schemas import TransferInfo, ExistMediaInfo, TmdbEpisode, TransferDirectoryConf, FileItem, StorageUsage
from app.schemas.event import TransferRenameEventData
from app.schemas import TransferInfo, ExistMediaInfo, TmdbEpisode, TransferDirectoryConf, FileItem, StorageUsage, TransferRenameEventData
from app.schemas.types import MediaType, ModuleType, ChainEventType, OtherModulesType
from app.utils.system import SystemUtils
@@ -609,12 +608,12 @@ class FileManagerModule(_ModuleBase):
r"|chinese|(cn|ch[si]|sg|zho?|eng)[-_&]?(cn|ch[si]|sg|zho?|eng)" \
r"|简[体中]?)[.\])])" \
r"|([\u4e00-\u9fa5]{0,3}[中双][\u4e00-\u9fa5]{0,2}[字文语][\u4e00-\u9fa5]{0,3})" \
r"|简体|简中|JPSC" \
r"|简体|简中|JPSC|sc_jp" \
r"|(?<![a-z0-9])gb(?![a-z0-9])"
_zhtw_sub_re = r"([.\[(](((zh[-_])?(hk|tw|cht|tc))" \
r"|(cht|eng)[-_&]?(cht|eng)" \
r"|繁[体中]?)[.\])])" \
r"|繁体中[文字]|中[文字]繁体|繁体|JPTC" \
r"|繁体中[文字]|中[文字]繁体|繁体|JPTC|tc_jp" \
r"|(?<![a-z0-9])big5(?![a-z0-9])"
_eng_sub_re = r"[.\[(]eng[.\])]"
@@ -1185,7 +1184,7 @@ class FileManagerModule(_ModuleBase):
# 集号
"episode": meta.episode_seqs,
# 季集 SxxExx
"season_episode": "%s%s" % (meta.season, meta.episodes),
"season_episode": "%s%s" % (meta.season, meta.episode),
# 段/节
"part": meta.part,
# 剧集标题

View File

@@ -1,6 +1,6 @@
from abc import ABCMeta, abstractmethod
from pathlib import Path
from typing import Optional, List, Union, Dict, Tuple
from typing import Optional, List, Dict, Tuple
from app import schemas
from app.helper.storage import StorageHelper

View File

@@ -282,6 +282,6 @@ class IndexerModule(_ModuleBase):
leeching_size=site_obj.leeching_size,
message_unread=site_obj.message_unread,
message_unread_contents=site_obj.message_unread_contents or [],
updated_at=datetime.now().strftime('%Y-%m-%d'),
updated_day=datetime.now().strftime('%Y-%m-%d'),
err_msg=site_obj.err_msg
)

View File

@@ -672,7 +672,7 @@ class TorrentSpider:
elif method_name == "appendleft":
text = f"{args}{text}"
elif method_name == "querystring":
parsed_url = urlparse(text)
parsed_url = urlparse(str(text))
query_params = parse_qs(parsed_url.query)
param_value = query_params.get(args)
text = param_value[0] if param_value else ''

View File

@@ -6,7 +6,7 @@ from app.core.event import eventmanager
from app.log import logger
from app.modules import _MediaServerBase, _ModuleBase
from app.modules.jellyfin.jellyfin import Jellyfin
from app.schemas.event import AuthCredentials, AuthInterceptCredentials
from app.schemas import AuthCredentials, AuthInterceptCredentials
from app.schemas.types import MediaType, ModuleType, ChainEventType, MediaServerType

View File

@@ -6,7 +6,7 @@ from app.core.event import eventmanager
from app.log import logger
from app.modules import _ModuleBase, _MediaServerBase
from app.modules.plex.plex import Plex
from app.schemas.event import AuthCredentials, AuthInterceptCredentials
from app.schemas import AuthCredentials, AuthInterceptCredentials
from app.schemas.types import MediaType, ModuleType, ChainEventType, MediaServerType

View File

@@ -79,7 +79,7 @@ class QbittorrentModule(_ModuleBase, _DownloaderBase[Qbittorrent]):
def download(self, content: Union[Path, str], download_dir: Path, cookie: str,
episodes: Set[int] = None, category: str = None,
downloader: str = None) -> Optional[Tuple[Optional[str], Optional[str], str]]:
downloader: str = None) -> Optional[Tuple[Optional[str], Optional[str], Optional[str], str]]:
"""
根据种子文件,选择并添加下载任务
:param content: 种子文件地址或者磁力链接
@@ -88,7 +88,7 @@ class QbittorrentModule(_ModuleBase, _DownloaderBase[Qbittorrent]):
:param episodes: 需要下载的集数
:param category: 分类
:param downloader: 下载器
:return: 种子Hash错误信息
:return: 下载器名称、种子Hash、种子文件布局、错误原因
"""
def __get_torrent_info() -> Tuple[str, int]:
@@ -106,10 +106,10 @@ class QbittorrentModule(_ModuleBase, _DownloaderBase[Qbittorrent]):
return "", 0
if not content:
return None, None, "下载内容为空"
return None, None, None, "下载内容为空"
if isinstance(content, Path) and not content.exists():
logger.error(f"种子文件不存在:{content}")
return None, None, f"种子文件不存在:{content}"
return None, None, None, f"种子文件不存在:{content}"
# 获取下载器
server: Qbittorrent = self.get_instance(downloader)
@@ -134,15 +134,20 @@ class QbittorrentModule(_ModuleBase, _DownloaderBase[Qbittorrent]):
category=category,
ignore_category_check=False
)
# 获取下载器全局设置
application = server.qbc.application.preferences
# 获取种子内容布局: `Original: 原始, Subfolder: 创建子文件夹, NoSubfolder: 不创建子文件夹`
torrent_layout = application.get("torrent_content_layout", "Original")
if not state:
# 读取种子的名称
torrent_name, torrent_size = __get_torrent_info()
if not torrent_name:
return None, None, f"添加种子任务失败:无法读取种子文件"
return None, None, None, f"添加种子任务失败:无法读取种子文件"
# 查询所有下载器的种子
torrents, error = server.get_torrents()
if error:
return None, None, "无法连接qbittorrent下载器"
return None, None, None, "无法连接qbittorrent下载器"
if torrents:
for torrent in torrents:
# 名称与大小相等则认为是同一个种子
@@ -156,19 +161,19 @@ class QbittorrentModule(_ModuleBase, _DownloaderBase[Qbittorrent]):
if settings.TORRENT_TAG and settings.TORRENT_TAG not in torrent_tags:
logger.info(f"给种子 {torrent_hash} 打上标签:{settings.TORRENT_TAG}")
server.set_torrents_tag(ids=torrent_hash, tags=[settings.TORRENT_TAG])
return downloader or self.get_default_config_name(), torrent_hash, f"下载任务已存在"
return None, None, f"添加种子任务失败:{content}"
return downloader or self.get_default_config_name(), torrent_hash, torrent_layout, f"下载任务已存在"
return None, None, None, f"添加种子任务失败:{content}"
else:
# 获取种子Hash
torrent_hash = server.get_torrent_id_by_tag(tags=tag)
if not torrent_hash:
return None, None, f"下载任务添加成功但获取Qbittorrent任务信息失败{content}"
return None, None, None, f"下载任务添加成功但获取Qbittorrent任务信息失败{content}"
else:
if is_paused:
# 种子文件
torrent_files = server.get_files(torrent_hash)
if not torrent_files:
return downloader or self.get_default_config_name(), torrent_hash, "获取种子文件失败,下载任务可能在暂停状态"
return downloader or self.get_default_config_name(), torrent_hash, torrent_layout, "获取种子文件失败,下载任务可能在暂停状态"
# 不需要的文件ID
file_ids = []
@@ -193,11 +198,11 @@ class QbittorrentModule(_ModuleBase, _DownloaderBase[Qbittorrent]):
server.torrents_set_force_start(torrent_hash)
else:
server.start_torrents(torrent_hash)
return downloader or self.get_default_config_name(), torrent_hash, f"添加下载成功,已选择集数:{sucess_epidised}"
return downloader or self.get_default_config_name(), torrent_hash, torrent_layout, f"添加下载成功,已选择集数:{sucess_epidised}"
else:
if server.is_force_resume():
server.torrents_set_force_start(torrent_hash)
return downloader or self.get_default_config_name(), torrent_hash, "添加下载成功"
return downloader or self.get_default_config_name(), torrent_hash, torrent_layout, "添加下载成功"
def list_torrents(self, status: TorrentStatus = None,
hashs: Union[list, str] = None,

View File

@@ -7,8 +7,7 @@ from app.core.event import eventmanager
from app.log import logger
from app.modules import _ModuleBase, _MessageBase
from app.modules.telegram.telegram import Telegram
from app.schemas import MessageChannel, CommingMessage, Notification
from app.schemas.event import CommandRegisterEventData
from app.schemas import MessageChannel, CommingMessage, Notification, CommandRegisterEventData
from app.schemas.types import ModuleType, ChainEventType
from app.utils.structures import DictUtils

View File

@@ -80,7 +80,7 @@ class TransmissionModule(_ModuleBase, _DownloaderBase[Transmission]):
def download(self, content: Union[Path, str], download_dir: Path, cookie: str,
episodes: Set[int] = None, category: str = None,
downloader: str = None) -> Optional[Tuple[Optional[str], Optional[str], str]]:
downloader: str = None) -> Optional[Tuple[Optional[str], Optional[str], Optional[str], str]]:
"""
根据种子文件,选择并添加下载任务
:param content: 种子文件地址或者磁力链接
@@ -89,7 +89,7 @@ class TransmissionModule(_ModuleBase, _DownloaderBase[Transmission]):
:param episodes: 需要下载的集数
:param category: 分类TR中未使用
:param downloader: 下载器
:return: 下载器名称、种子Hash、错误原因
:return: 下载器名称、种子Hash、种子文件布局、错误原因
"""
def __get_torrent_info() -> Tuple[str, int]:
@@ -107,9 +107,9 @@ class TransmissionModule(_ModuleBase, _DownloaderBase[Transmission]):
return "", 0
if not content:
return None, None, "下载内容为空"
return None, None, None, "下载内容为空"
if isinstance(content, Path) and not content.exists():
return None, None, f"种子文件不存在:{content}"
return None, None, None, f"种子文件不存在:{content}"
# 获取下载器
server: Transmission = self.get_instance(downloader)
@@ -131,15 +131,18 @@ class TransmissionModule(_ModuleBase, _DownloaderBase[Transmission]):
labels=labels,
cookie=cookie
)
# TR 始终使用原始种子布局, 返回"Original"
torrent_layout = "Original"
if not torrent:
# 读取种子的名称
torrent_name, torrent_size = __get_torrent_info()
if not torrent_name:
return None, None, f"添加种子任务失败:无法读取种子文件"
return None, None, None, f"添加种子任务失败:无法读取种子文件"
# 查询所有下载器的种子
torrents, error = server.get_torrents()
if error:
return None, None, "无法连接transmission下载器"
return None, None, None, "无法连接transmission下载器"
if torrents:
for torrent in torrents:
# 名称与大小相等则认为是同一个种子
@@ -158,15 +161,15 @@ class TransmissionModule(_ModuleBase, _DownloaderBase[Transmission]):
if settings.TORRENT_TAG and settings.TORRENT_TAG not in labels:
labels.append(settings.TORRENT_TAG)
server.set_torrent_tag(ids=torrent_hash, tags=labels)
return downloader or self.get_default_config_name(), torrent_hash, f"下载任务已存在"
return None, None, f"添加种子任务失败:{content}"
return downloader or self.get_default_config_name(), torrent_hash, torrent_layout, f"下载任务已存在"
return None, None, None, f"添加种子任务失败:{content}"
else:
torrent_hash = torrent.hashString
if is_paused:
# 选择文件
torrent_files = server.get_files(torrent_hash)
if not torrent_files:
return downloader or self.get_default_config_name(), torrent_hash, "获取种子文件失败,下载任务可能在暂停状态"
return downloader or self.get_default_config_name(), torrent_hash, torrent_layout, "获取种子文件失败,下载任务可能在暂停状态"
# 需要的文件信息
file_ids = []
unwanted_file_ids = []
@@ -187,9 +190,9 @@ class TransmissionModule(_ModuleBase, _DownloaderBase[Transmission]):
server.set_unwanted_files(torrent_hash, unwanted_file_ids)
# 开始任务
server.start_torrents(torrent_hash)
return downloader or self.get_default_config_name(), torrent_hash, "添加下载任务成功"
return downloader or self.get_default_config_name(), torrent_hash, torrent_layout, "添加下载任务成功"
else:
return downloader or self.get_default_config_name(), torrent_hash, "添加下载任务成功"
return downloader or self.get_default_config_name(), torrent_hash, torrent_layout, "添加下载任务成功"
def list_torrents(self, status: TorrentStatus = None,
hashs: Union[list, str] = None,

View File

@@ -126,7 +126,7 @@ class Transmission:
return None
try:
torrents, error = self.get_torrents(ids=ids,
status=["downloading", "download_pending", "stopped"],
status=["downloading", "download_pending"],
tags=tags)
return None if error else torrents or []
except Exception as err:

View File

@@ -8,8 +8,7 @@ from app.log import logger
from app.modules import _ModuleBase, _MessageBase
from app.modules.wechat.WXBizMsgCrypt3 import WXBizMsgCrypt
from app.modules.wechat.wechat import WeChat
from app.schemas import MessageChannel, CommingMessage, Notification
from app.schemas.event import CommandRegisterEventData
from app.schemas import MessageChannel, CommingMessage, Notification, CommandRegisterEventData
from app.schemas.types import ModuleType, ChainEventType
from app.utils.dom import DomUtils
from app.utils.structures import DictUtils

View File

@@ -1,37 +1,26 @@
import datetime
import platform
import queue
import re
import threading
import traceback
from pathlib import Path
from queue import Queue
from threading import Lock
from typing import Any
from typing import Any, Optional
from apscheduler.schedulers.background import BackgroundScheduler
from cachetools import TTLCache
from watchdog.events import FileSystemEventHandler, FileSystemMovedEvent, FileSystemEvent
from watchdog.observers.polling import PollingObserver
from app.chain import ChainBase
from app.chain.media import MediaChain
from app.chain.storage import StorageChain
from app.chain.tmdb import TmdbChain
from app.chain.transfer import TransferChain
from app.core.config import settings
from app.core.context import MediaInfo
from app.core.event import EventManager
from app.core.metainfo import MetaInfoPath
from app.db.downloadhistory_oper import DownloadHistoryOper
from app.db.systemconfig_oper import SystemConfigOper
from app.db.transferhistory_oper import TransferHistoryOper
from app.helper.directory import DirectoryHelper
from app.helper.message import MessageHelper
from app.log import logger
from app.schemas import FileItem, TransferInfo, Notification
from app.schemas.types import SystemConfigKey, MediaType, NotificationType, EventType
from app.schemas import FileItem
from app.utils.singleton import Singleton
from app.utils.string import StringUtils
lock = Lock()
snapshot_lock = Lock()
@@ -52,12 +41,12 @@ class FileMonitorHandler(FileSystemEventHandler):
self.callback = callback
def on_created(self, event: FileSystemEvent):
self.callback.event_handler(event=event, text="创建",
mon_path=self._watch_path, event_path=Path(event.src_path))
self.callback.event_handler(event=event, text="创建", event_path=event.src_path,
file_size=Path(event.src_path).stat().st_size)
def on_moved(self, event: FileSystemMovedEvent):
self.callback.event_handler(event=event, text="移动",
mon_path=self._watch_path, event_path=Path(event.dest_path))
self.callback.event_handler(event=event, text="移动", event_path=event.dest_path,
file_size=Path(event.dest_path).stat().st_size)
class Monitor(metaclass=Singleton):
@@ -80,29 +69,12 @@ class Monitor(metaclass=Singleton):
# 存储过照间隔(分钟)
_snapshot_interval = 5
# 待整理任务队列
_queue = Queue()
# 文件整理线程
_transfer_thread = None
# 文件整理间隔(秒)
_transfer_interval = 60
# 消息汇总
_msg_medias = {}
# 消息汇总间隔(秒)
_msg_interval = 60
# TTL缓存10秒钟有效
_cache = TTLCache(maxsize=1024, ttl=10)
def __init__(self):
super().__init__()
self.chain = MonitorChain()
self.transferhis = TransferHistoryOper()
self.transferchain = TransferChain()
self.downloadhis = DownloadHistoryOper()
self.mediaChain = MediaChain()
self.tmdbchain = TmdbChain()
self.storagechain = StorageChain()
self.directoryhelper = DirectoryHelper()
self.systemmessage = MessageHelper()
@@ -120,10 +92,6 @@ class Monitor(metaclass=Singleton):
# 停止现有任务
self.stop()
# 启动文件整理线程
self._transfer_thread = threading.Thread(target=self.__start_transfer, daemon=True)
self._transfer_thread.start()
# 读取目录配置
monitor_dirs = self.directoryhelper.get_download_dirs()
if not monitor_dirs:
@@ -183,9 +151,6 @@ class Monitor(metaclass=Singleton):
'storage': mon_dir.storage,
'mon_path': mon_path
})
# 追加入库消息统一发送服务
self._scheduler.add_job(self.__send_msg, trigger='interval', seconds=15)
# 启动定时服务
if self._scheduler.get_jobs():
self._scheduler.print_jobs()
@@ -212,16 +177,6 @@ class Monitor(metaclass=Singleton):
logger.warn(f"导入模块错误:{error},将使用 PollingObserver 监控目录")
return PollingObserver()
def put_to_queue(self, storage: str, filepath: Path, mon_path: Path):
"""
添加到待整理队列
"""
self._queue.put({
"storage": storage,
"filepath": filepath,
"mon_path": mon_path
})
def polling_observer(self, storage: str, mon_path: Path):
"""
轮询监控
@@ -237,48 +192,42 @@ class Monitor(metaclass=Singleton):
new_files = new_snapshot.keys() - old_snapshot.keys()
for new_file in new_files:
# 添加到待整理队列
self.put_to_queue(storage=storage, filepath=Path(new_file), mon_path=mon_path)
self.__handle_file(storage=storage, event_path=Path(new_file),
file_size=new_snapshot.get(new_file))
# 更新快照
self._storage_snapshot[storage] = new_snapshot
def event_handler(self, event, mon_path: Path, text: str, event_path: Path):
def event_handler(self, event, text: str, event_path: str, file_size: float = None):
"""
处理文件变化
:param event: 事件
:param mon_path: 监控目录
:param text: 事件描述
:param event_path: 事件文件路径
:param file_size: 文件大小
"""
if not event.is_directory:
# 文件发生变化
logger.debug(f"文件 {event_path} 发生了 {text}")
# 添加到待整理队列
self.put_to_queue(storage="local", filepath=event_path, mon_path=mon_path)
# 整理文件
self.__handle_file(storage="local", event_path=Path(event_path), file_size=file_size)
def __start_transfer(self):
"""
整理队列中的文件
"""
while not self._event.is_set():
try:
item = self._queue.get(timeout=self._transfer_interval)
if item:
self.__handle_file(storage=item.get("storage"), event_path=item.get("filepath"))
except queue.Empty:
continue
except Exception as e:
logger.error(f"整理队列处理出现错误:{e}")
def __handle_file(self, storage: str, event_path: Path):
def __handle_file(self, storage: str, event_path: Path, file_size: float = None):
"""
整理一个文件
:param storage: 存储
:param event_path: 事件文件路径
:param file_size: 文件大小
"""
def __get_bluray_dir(_path: Path):
def __is_bluray_sub(_path: Path) -> bool:
"""
获取BDMV目录的上级目录
判断是否蓝光原盘目录内的子目录或文件
"""
return True if re.search(r"BDMV[/\\]STREAM", str(_path), re.IGNORECASE) else False
def __get_bluray_dir(_path: Path) -> Optional[Path]:
"""
获取蓝光原盘BDMV目录的上级目录
"""
for p in _path.parents:
if p.name == "BDMV":
@@ -287,311 +236,33 @@ class Monitor(metaclass=Singleton):
# 全程加锁
with lock:
# 蓝光原盘文件处理
if __is_bluray_sub(event_path):
event_path = __get_bluray_dir(event_path)
if not event_path:
return
# TTL缓存控重
if self._cache.get(str(event_path)):
return
self._cache[str(event_path)] = True
try:
# 回收站及隐藏的文件不处
if str(event_path).find('/@Recycle/') != -1 \
or str(event_path).find('/#recycle/') != -1 \
or str(event_path).find('/.') != -1 \
or str(event_path).find('/@eaDir') != -1:
logger.debug(f"{event_path} 是回收站或隐藏的文件")
return
# 不是媒体文件不处理
if event_path.suffix.lower() not in self.all_exts:
logger.debug(f"{event_path} 不是媒体文件")
return
# 整理屏蔽词不处理
transfer_exclude_words = self.systemconfig.get(SystemConfigKey.TransferExcludeWords)
if transfer_exclude_words:
for keyword in transfer_exclude_words:
if not keyword:
continue
if keyword and re.search(r"%s" % keyword, str(event_path), re.IGNORECASE):
logger.info(f"{event_path} 命中整理屏蔽词 {keyword},不处理")
return
# 判断是不是蓝光目录
bluray_flag = False
if re.search(r"BDMV[/\\]STREAM", str(event_path), re.IGNORECASE):
bluray_flag = True
# 截取BDMV前面的路径
event_path = __get_bluray_dir(event_path)
logger.info(f"{event_path} 是蓝光原盘目录,更正文件路径为:{event_path}")
# 查询历史记录,已转移的不处理
if self.transferhis.get_by_src(str(event_path), storage=storage):
logger.info(f"{event_path} 已经整理过了")
return
# 元数据
file_meta = MetaInfoPath(event_path)
if not file_meta.name:
logger.error(f"{event_path.name} 无法识别有效信息")
return
# 根据父路径获取下载历史
download_history = None
if bluray_flag:
# 蓝光原盘,按目录名查询
download_history = self.downloadhis.get_by_path(str(event_path))
else:
# 按文件全路径查询
download_file = self.downloadhis.get_file_by_fullpath(str(event_path))
if download_file:
download_history = self.downloadhis.get_by_hash(download_file.download_hash)
# 获取下载Hash
download_hash = None
if download_history:
download_hash = download_history.download_hash
# 识别媒体信息
if download_history and (download_history.tmdbid or download_history.doubanid):
# 下载记录中已存在识别信息
mediainfo: MediaInfo = self.mediaChain.recognize_media(mtype=MediaType(download_history.type),
tmdbid=download_history.tmdbid,
doubanid=download_history.doubanid)
if mediainfo:
# 更新自定义媒体类别
if download_history.media_category:
mediainfo.category = download_history.media_category
else:
mediainfo: MediaInfo = self.mediaChain.recognize_by_meta(file_meta)
if not mediainfo:
logger.warn(f'未识别到媒体信息,标题:{file_meta.name}')
# 新增转移失败历史记录
his = self.transferhis.add_fail(
fileitem=FileItem(
storage=storage,
type="file",
path=str(event_path),
name=event_path.name,
basename=event_path.stem,
extension=event_path.suffix[1:],
),
mode='',
meta=file_meta,
download_hash=download_hash
# 开始整
self.transferchain.do_transfer(
fileitem=FileItem(
storage=storage,
path=str(event_path),
type="file",
name=event_path.name,
basename=event_path.stem,
extension=event_path.suffix[1:],
size=file_size
)
self.chain.post_message(Notification(
mtype=NotificationType.Manual,
title=f"{event_path.name} 未识别到媒体信息,无法入库!",
text=f"回复:```\n/redo {his.id} [tmdbid]|[类型]\n``` 手动识别转移。",
link=settings.MP_DOMAIN('#/history')
))
return
# 查询转移目的目录
dir_info = self.directoryhelper.get_dir(mediainfo, storage=storage, src_path=event_path)
if not dir_info:
logger.warn(f"{event_path.name} 未找到对应的目标目录")
return
# 查找这个文件项
file_item = self.storagechain.get_file_item(storage=storage, path=event_path)
if not file_item:
logger.warn(f"{event_path.name} 未找到对应的文件")
return
# 如果未开启新增已入库媒体是否跟随TMDB信息变化则根据tmdbid查询之前的title
if not settings.SCRAP_FOLLOW_TMDB:
transfer_history = self.transferhis.get_by_type_tmdbid(tmdbid=mediainfo.tmdb_id,
mtype=mediainfo.type.value)
if transfer_history:
mediainfo.title = transfer_history.title
logger.info(f"{event_path.name} 识别为:{mediainfo.type.value} {mediainfo.title_year}")
# 更新媒体图片
self.chain.obtain_images(mediainfo=mediainfo)
# 获取集数据
if mediainfo.type == MediaType.TV:
episodes_info = self.tmdbchain.tmdb_episodes(tmdbid=mediainfo.tmdb_id,
season=file_meta.begin_season or 1)
else:
episodes_info = None
# 转移
transferinfo: TransferInfo = self.chain.transfer(fileitem=file_item,
meta=file_meta,
mediainfo=mediainfo,
target_directory=dir_info,
episodes_info=episodes_info)
if not transferinfo:
logger.error("文件转移模块运行失败")
return
if not transferinfo.success:
# 转移失败
logger.warn(f"{event_path.name} 入库失败:{transferinfo.message}")
# 新增转移失败历史记录
self.transferhis.add_fail(
fileitem=file_item,
mode=transferinfo.transfer_type if transferinfo else '',
download_hash=download_hash,
meta=file_meta,
mediainfo=mediainfo,
transferinfo=transferinfo
)
# 发送失败消息
self.chain.post_message(Notification(
mtype=NotificationType.Manual,
title=f"{mediainfo.title_year} {file_meta.season_episode} 入库失败!",
text=f"原因:{transferinfo.message or '未知'}",
image=mediainfo.get_message_image(),
link=settings.MP_DOMAIN('#/history')
))
return
# 转移成功
logger.info(f"{event_path.name} 入库成功:{transferinfo.target_diritem.path}")
# 新增转移成功历史记录
self.transferhis.add_success(
fileitem=file_item,
mode=transferinfo.transfer_type if transferinfo else '',
download_hash=download_hash,
meta=file_meta,
mediainfo=mediainfo,
transferinfo=transferinfo
)
# 汇总刮削
if transferinfo.need_scrape:
self.mediaChain.scrape_metadata(fileitem=transferinfo.target_diritem,
meta=file_meta,
mediainfo=mediainfo)
# 广播事件
EventManager().send_event(EventType.TransferComplete, {
'fileitem': file_item,
'meta': file_meta,
'mediainfo': mediainfo,
'transferinfo': transferinfo
})
# 发送消息汇总
if transferinfo.need_notify:
self.__collect_msg_medias(mediainfo=mediainfo, file_meta=file_meta, transferinfo=transferinfo)
# 移动模式删除空目录
if transferinfo.transfer_type in ["move"]:
self.storagechain.delete_media_file(file_item, delete_self=False)
except Exception as e:
logger.error("目录监控发生错误:%s - %s" % (str(e), traceback.format_exc()))
def __collect_msg_medias(self, mediainfo: MediaInfo, file_meta: MetaInfoPath, transferinfo: TransferInfo):
"""
收集媒体处理完的消息
"""
media_list = self._msg_medias.get(mediainfo.title_year + " " + file_meta.season) or {}
if media_list:
media_files = media_list.get("files") or []
if media_files:
file_exists = False
for file in media_files:
if str(transferinfo.fileitem.path) == file.get("path"):
file_exists = True
break
if not file_exists:
media_files.append({
"path": str(transferinfo.fileitem.path),
"mediainfo": mediainfo,
"file_meta": file_meta,
"transferinfo": transferinfo
})
else:
media_files = [
{
"path": str(transferinfo.fileitem.path),
"mediainfo": mediainfo,
"file_meta": file_meta,
"transferinfo": transferinfo
}
]
media_list = {
"files": media_files,
"time": datetime.datetime.now()
}
else:
media_list = {
"files": [
{
"path": str(transferinfo.fileitem.path),
"mediainfo": mediainfo,
"file_meta": file_meta,
"transferinfo": transferinfo
}
],
"time": datetime.datetime.now()
}
self._msg_medias[mediainfo.title_year + " " + file_meta.season] = media_list
def __send_msg(self):
"""
定时检查是否有媒体处理完,发送统一消息
"""
if not self._msg_medias or not self._msg_medias.keys():
return
# 遍历检查是否已刮削完,发送消息
for medis_title_year_season in list(self._msg_medias.keys()):
media_list = self._msg_medias.get(medis_title_year_season)
logger.info(f"开始处理媒体 {medis_title_year_season} 消息")
if not media_list:
continue
# 获取最后更新时间
last_update_time = media_list.get("time")
media_files = media_list.get("files")
if not last_update_time or not media_files:
continue
transferinfo = media_files[0].get("transferinfo")
file_meta = media_files[0].get("file_meta")
mediainfo = media_files[0].get("mediainfo")
# 判断剧集最后更新时间距现在是已超过10秒或者电影发送消息
if (datetime.datetime.now() - last_update_time).total_seconds() > int(self._msg_interval) \
or mediainfo.type == MediaType.MOVIE:
# 汇总处理文件总大小
total_size = 0
file_count = 0
# 剧集汇总
episodes = []
for file in media_files:
transferinfo = file.get("transferinfo")
total_size += transferinfo.total_size
file_count += 1
file_meta = file.get("file_meta")
if file_meta and file_meta.begin_episode:
episodes.append(file_meta.begin_episode)
transferinfo.total_size = total_size
# 汇总处理文件数量
transferinfo.file_count = file_count
# 剧集季集信息 S01 E01-E04 || S01 E01、E02、E04
season_episode = None
# 处理文件多,说明是剧集,显示季入库消息
if mediainfo.type == MediaType.TV:
# 季集文本
season_episode = f"{file_meta.season} {StringUtils.format_ep(episodes)}"
# 发送消息
self.transferchain.send_transfer_message(meta=file_meta,
mediainfo=mediainfo,
transferinfo=transferinfo,
season_episode=season_episode)
# 发送完消息移出key
del self._msg_medias[medis_title_year_season]
continue
def stop(self):
"""
退出插件

View File

@@ -11,6 +11,7 @@ from apscheduler.schedulers.background import BackgroundScheduler
from app import schemas
from app.chain import ChainBase
from app.chain.mediaserver import MediaServerChain
from app.chain.recommend import RecommendChain
from app.chain.site import SiteChain
from app.chain.subscribe import SubscribeChain
from app.chain.tmdb import TmdbChain
@@ -121,6 +122,11 @@ class Scheduler(metaclass=Singleton):
"name": "站点数据刷新",
"func": SiteChain().refresh_userdatas,
"running": False,
},
"recommend_refresh": {
"name": "推荐缓存",
"func": RecommendChain().refresh_recommend,
"running": False,
}
}
@@ -310,6 +316,19 @@ class Scheduler(metaclass=Singleton):
}
)
# 推荐缓存
self._scheduler.add_job(
self.start,
"interval",
id="recommend_refresh",
name="推荐缓存",
hours=24,
next_run_time=datetime.now(pytz.timezone(settings.TZ)) + timedelta(seconds=3),
kwargs={
'job_id': 'recommend_refresh'
}
)
self.init_plugin_jobs()
# 打印服务

View File

@@ -17,3 +17,5 @@ from .rule import *
from .system import *
from .file import *
from .exception import *
from .system import *
from .event import *

View File

@@ -1,6 +1,6 @@
from typing import Optional, Dict, List, Union
from pydantic import BaseModel
from pydantic import BaseModel, Field
class MetaInfo(BaseModel):
@@ -39,6 +39,8 @@ class MetaInfo(BaseModel):
end_episode: Optional[int] = None
# SxxExx
season_episode: Optional[str] = None
# 集列表
episode_list: Optional[List[int]] = Field(default_factory=list)
# Partx Cd Dvd Disk Disc
part: Optional[str] = None
# 识别的资源类型
@@ -104,56 +106,56 @@ class MediaInfo(BaseModel):
# 二级分类
category: Optional[str] = ""
# 季季集清单
seasons: Optional[Dict[int, list]] = {}
seasons: Optional[Dict[int, list]] = Field(default_factory=dict)
# 季详情
season_info: Optional[List[dict]] = []
season_info: Optional[List[dict]] = Field(default_factory=list)
# 别名和译名
names: Optional[list] = []
names: Optional[list] = Field(default_factory=list)
# 演员
actors: Optional[list] = []
actors: Optional[list] = Field(default_factory=list)
# 导演
directors: Optional[list] = []
directors: Optional[list] = Field(default_factory=list)
# 详情链接
detail_link: Optional[str] = None
# 其它TMDB属性
# 是否成人内容
adult: Optional[bool] = False
# 创建人
created_by: Optional[list] = []
created_by: Optional[list] = Field(default_factory=list)
# 集时长
episode_run_time: Optional[list] = []
episode_run_time: Optional[list] = Field(default_factory=list)
# 风格
genres: Optional[List[dict]] = []
genres: Optional[List[dict]] = Field(default_factory=list)
# 首播日期
first_air_date: Optional[str] = None
# 首页
homepage: Optional[str] = None
# 语种
languages: Optional[list] = []
languages: Optional[list] = Field(default_factory=list)
# 最后上映日期
last_air_date: Optional[str] = None
# 流媒体平台
networks: Optional[list] = []
networks: Optional[list] = Field(default_factory=list)
# 集数
number_of_episodes: Optional[int] = 0
# 季数
number_of_seasons: Optional[int] = 0
# 原产国
origin_country: Optional[list] = []
origin_country: Optional[list] = Field(default_factory=list)
# 原名
original_name: Optional[str] = None
# 出品公司
production_companies: Optional[list] = []
production_companies: Optional[list] = Field(default_factory=list)
# 出品国
production_countries: Optional[list] = []
production_countries: Optional[list] = Field(default_factory=list)
# 语种
spoken_languages: Optional[list] = []
spoken_languages: Optional[list] = Field(default_factory=list)
# 状态
status: Optional[str] = None
# 标签
tagline: Optional[str] = None
# 风格ID
genre_ids: Optional[list] = []
genre_ids: Optional[list] = Field(default_factory=list)
# 评价数量
vote_count: Optional[int] = 0
# 流行度
@@ -161,7 +163,7 @@ class MediaInfo(BaseModel):
# 时长
runtime: Optional[int] = None
# 下一集
next_episode_to_air: Optional[dict] = {}
next_episode_to_air: Optional[dict] = Field(default_factory=dict)
class TorrentInfo(BaseModel):
@@ -213,7 +215,7 @@ class TorrentInfo(BaseModel):
# HR
hit_and_run: Optional[bool] = False
# 种子标签
labels: Optional[list] = []
labels: Optional[list] = Field(default_factory=list)
# 种子优先级
pri_order: Optional[int] = 0
# 促销
@@ -245,13 +247,13 @@ class MediaPerson(BaseModel):
type: Optional[Union[str, int]] = 1
name: Optional[str] = None
character: Optional[str] = None
images: Optional[dict] = {}
images: Optional[dict] = Field(default_factory=dict)
# themoviedb
profile_path: Optional[str] = None
gender: Optional[Union[str, int]] = None
original_name: Optional[str] = None
credit_id: Optional[str] = None
also_known_as: Optional[list] = []
also_known_as: Optional[list] = Field(default_factory=list)
birthday: Optional[str] = None
deathday: Optional[str] = None
imdb_id: Optional[str] = None
@@ -260,11 +262,11 @@ class MediaPerson(BaseModel):
popularity: Optional[float] = None
biography: Optional[str] = None
# douban
roles: Optional[list] = []
roles: Optional[list] = Field(default_factory=list)
title: Optional[str] = None
url: Optional[str] = None
avatar: Optional[Union[str, dict]] = None
latin_name: Optional[str] = None
# bangumi
career: Optional[list] = []
career: Optional[list] = Field(default_factory=list)
relation: Optional[str] = None

View File

@@ -3,7 +3,6 @@ from typing import Optional, Dict, Any, List, Set
from pydantic import BaseModel, Field, root_validator
from app.core.context import Context
from app.schemas import MessageChannel
@@ -46,9 +45,9 @@ class AuthCredentials(ChainEventData):
# 输出参数
# grant_type 为 authorization_code 时,输出参数包括 username、token、channel、service
token: Optional[str] = Field(None, description="认证令牌")
channel: Optional[str] = Field(None, description="认证渠道")
service: Optional[str] = Field(None, description="服务名称")
token: Optional[str] = Field(default=None, description="认证令牌")
channel: Optional[str] = Field(default=None, description="认证渠道")
service: Optional[str] = Field(default=None, description="服务名称")
@root_validator(pre=True)
def check_fields_based_on_grant_type(cls, values):
@@ -89,11 +88,11 @@ class AuthInterceptCredentials(ChainEventData):
channel: str = Field(..., description="认证渠道")
service: str = Field(..., description="服务名称")
status: str = Field(..., description="认证状态, 包含 'triggered' 表示认证触发,'completed' 表示认证成功")
token: Optional[str] = Field(None, description="认证令牌")
token: Optional[str] = Field(default=None, description="认证令牌")
# 输出参数
source: str = Field("未知拦截源", description="拦截源")
cancel: bool = Field(False, description="是否取消认证")
source: str = Field(default="未知拦截源", description="拦截源")
cancel: bool = Field(default=False, description="是否取消认证")
class CommandRegisterEventData(ChainEventData):
@@ -116,8 +115,8 @@ class CommandRegisterEventData(ChainEventData):
service: Optional[str] = Field(..., description="服务名称")
# 输出参数
cancel: bool = Field(False, description="是否取消注册")
source: str = Field("未知拦截源", description="拦截源")
cancel: bool = Field(default=False, description="是否取消注册")
source: str = Field(default="未知拦截源", description="拦截源")
class TransferRenameEventData(ChainEventData):
@@ -143,9 +142,9 @@ class TransferRenameEventData(ChainEventData):
render_str: str = Field(..., description="渲染生成的字符串")
# 输出参数
updated: bool = Field(False, description="是否已更新")
updated_str: Optional[str] = Field(None, description="更新后的字符串")
source: Optional[str] = Field("未知拦截源", description="拦截源")
updated: bool = Field(default=False, description="是否已更新")
updated_str: Optional[str] = Field(default=None, description="更新后的字符串")
source: Optional[str] = Field(default="未知拦截源", description="拦截源")
class ResourceSelectionEventData(BaseModel):
@@ -168,9 +167,9 @@ class ResourceSelectionEventData(BaseModel):
origin: Optional[str] = Field(None, description="来源")
# 输出参数
updated: bool = Field(False, description="是否已更新")
updated_contexts: Optional[List[Context]] = Field(None, description="已更新的资源上下文列表")
source: Optional[str] = Field("未知拦截源", description="拦截源")
updated: bool = Field(default=False, description="是否已更新")
updated_contexts: Optional[List[Any]] = Field(default=None, description="已更新的资源上下文列表")
source: Optional[str] = Field(default="未知拦截源", description="拦截源")
class ResourceDownloadEventData(ChainEventData):
@@ -200,6 +199,6 @@ class ResourceDownloadEventData(ChainEventData):
options: Optional[dict] = Field(None, description="其他参数")
# 输出参数
cancel: bool = Field(False, description="是否取消下载")
source: str = Field("未知拦截源", description="拦截源")
reason: str = Field("", description="拦截原因")
cancel: bool = Field(default=False, description="是否取消下载")
source: str = Field(default="未知拦截源", description="拦截源")
reason: str = Field(default="", description="拦截原因")

View File

@@ -1,6 +1,6 @@
from typing import Optional
from pydantic import BaseModel
from pydantic import BaseModel, Field
class FileItem(BaseModel):
@@ -21,7 +21,7 @@ class FileItem(BaseModel):
# 修改时间
modify_time: Optional[float] = None
# 子节点
children: Optional[list] = []
children: Optional[list] = Field(default_factory=list)
# ID
fileid: Optional[str] = None
# 父ID
@@ -45,4 +45,4 @@ class StorageUsage(BaseModel):
class StorageTransType(BaseModel):
# 传输类型
transtype: Optional[dict] = {}
transtype: Optional[dict] = Field(default_factory=dict)

View File

@@ -46,6 +46,8 @@ class DownloadHistory(BaseModel):
date: Optional[str] = None
# 备注
note: Optional[Any] = None
# 自定义媒体类别
media_category: Optional[str] = None
class Config:
orm_mode = True

View File

@@ -1,7 +1,7 @@
from pathlib import Path
from typing import Optional, Dict, Union, List, Any
from pydantic import BaseModel
from pydantic import BaseModel, Field
from app.schemas.types import MediaType
@@ -13,7 +13,7 @@ class ExistMediaInfo(BaseModel):
# 类型 电影、电视剧
type: Optional[MediaType]
# 季
seasons: Optional[Dict[int, list]] = {}
seasons: Optional[Dict[int, list]] = Field(default_factory=dict)
# 媒体服务器类型plex、jellyfin、emby
server_type: Optional[str] = None
# 媒体服务器名称
@@ -29,7 +29,7 @@ class NotExistMediaInfo(BaseModel):
# 季
season: Optional[int] = None
# 剧集列表
episodes: Optional[list] = []
episodes: Optional[list] = Field(default_factory=list)
# 总集数
total_episode: Optional[int] = 0
# 开始集
@@ -132,7 +132,7 @@ class MediaServerSeasonInfo(BaseModel):
媒体服务器媒体剧集信息
"""
season: Optional[int] = None
episodes: Optional[List[int]] = []
episodes: Optional[List[int]] = Field(default_factory=list)
class WebhookEventInfo(BaseModel):
@@ -173,4 +173,4 @@ class MediaServerPlayItem(BaseModel):
image: Optional[str] = None
link: Optional[str] = None
percent: Optional[float] = None
BackdropImageTags: Optional[list] = []
BackdropImageTags: Optional[list] = Field(default_factory=list)

View File

@@ -1,6 +1,6 @@
from typing import Optional, Union
from pydantic import BaseModel
from pydantic import BaseModel, Field
from app.schemas.types import NotificationType, MessageChannel
@@ -101,7 +101,7 @@ class Subscription(BaseModel):
客户端消息订阅
"""
endpoint: Optional[str]
keys: Optional[dict] = {}
keys: Optional[dict] = Field(default_factory=dict)
class SubscriptionMessage(BaseModel):
@@ -112,4 +112,4 @@ class SubscriptionMessage(BaseModel):
body: Optional[str]
icon: Optional[str]
url: Optional[str]
data: Optional[dict] = {}
data: Optional[dict] = Field(default_factory=dict)

View File

@@ -1,6 +1,6 @@
from typing import Optional, List
from pydantic import BaseModel
from pydantic import BaseModel, Field
class Plugin(BaseModel):
@@ -43,7 +43,7 @@ class Plugin(BaseModel):
# 安装次数
install_count: Optional[int] = 0
# 更新记录
history: Optional[dict] = {}
history: Optional[dict] = Field(default_factory=dict)
# 添加时间,值越小表示越靠后发布
add_time: Optional[int] = 0
# 插件公钥
@@ -60,8 +60,8 @@ class PluginDashboard(Plugin):
# 仪表板key
key: Optional[str] = None
# 全局配置
attrs: Optional[dict] = {}
attrs: Optional[dict] = Field(default_factory=dict)
# col列数
cols: Optional[dict] = {}
cols: Optional[dict] = Field(default_factory=dict)
# 页面元素
elements: Optional[List[dict]] = []
elements: Optional[List[dict]] = Field(default_factory=list)

View File

@@ -1,6 +1,6 @@
from typing import Optional, Union
from pydantic import BaseModel
from pydantic import BaseModel, Field
class Response(BaseModel):
@@ -9,4 +9,4 @@ class Response(BaseModel):
# 消息文本
message: Optional[str] = None
# 数据
data: Optional[Union[dict, list]] = {}
data: Optional[Union[dict, list]] = Field(default_factory=dict)

View File

@@ -1,57 +1,59 @@
from typing import Optional
from pydantic import BaseModel
from pydantic import BaseModel, Field
class RadarrMovie(BaseModel):
id: Optional[int]
title: Optional[str]
year: Optional[str]
id: Optional[int] = None
title: Optional[str] = None
year: Optional[str] = None
isAvailable: bool = False
monitored: bool = False
tmdbId: Optional[int]
imdbId: Optional[str]
titleSlug: Optional[str]
folderName: Optional[str]
path: Optional[str]
profileId: Optional[int]
qualityProfileId: Optional[int]
added: Optional[str]
tmdbId: Optional[int] = None
imdbId: Optional[str] = None
titleSlug: Optional[str] = None
folderName: Optional[str] = None
path: Optional[str] = None
profileId: Optional[int] = None
qualityProfileId: Optional[int] = None
added: Optional[str] = None
hasFile: bool = False
class SonarrSeries(BaseModel):
id: Optional[int]
title: Optional[str]
sortTitle: Optional[str]
seasonCount: Optional[int]
status: Optional[str]
overview: Optional[str]
network: Optional[str]
airTime: Optional[str]
images: list = []
remotePoster: Optional[str]
seasons: list = []
year: Optional[str]
path: Optional[str]
profileId: Optional[int]
languageProfileId: Optional[int]
id: Optional[int] = None
title: Optional[str] = None
sortTitle: Optional[str] = None
seasonCount: Optional[int] = None
status: Optional[str] = None
overview: Optional[str] = None
network: Optional[str] = None
airTime: Optional[str] = None
images: list = Field(default_factory=list)
remotePoster: Optional[str] = None
seasons: list = Field(default_factory=list)
year: Optional[str] = None
path: Optional[str] = None
profileId: Optional[int] = None
languageProfileId: Optional[int] = None
seasonFolder: bool = False
monitored: bool = False
useSceneNumbering: bool = False
runtime: Optional[int]
tmdbId: Optional[int]
imdbId: Optional[str]
tvdbId: Optional[int]
tvRageId: Optional[int]
tvMazeId: Optional[int]
firstAired: Optional[str]
seriesType: Optional[str]
cleanTitle: Optional[str]
titleSlug: Optional[str]
certification: Optional[str]
genres: list = []
tags: list = []
added: Optional[str]
ratings: Optional[dict]
qualityProfileId: Optional[int]
statistics: dict = {}
runtime: Optional[int] = None
tmdbId: Optional[int] = None
imdbId: Optional[str] = None
tvdbId: Optional[int] = None
tvRageId: Optional[int] = None
tvMazeId: Optional[int] = None
firstAired: Optional[str] = None
seriesType: Optional[str] = None
cleanTitle: Optional[str] = None
titleSlug: Optional[str] = None
certification: Optional[str] = None
genres: list = Field(default_factory=list)
tags: list = Field(default_factory=list)
added: Optional[str] = None
ratings: Optional[dict] = None
qualityProfileId: Optional[int] = None
statistics: dict = Field(default_factory=dict)
isAvailable: Optional[bool] = False
hasFile: Optional[bool] = False

View File

@@ -1,17 +1,17 @@
from typing import Optional, Any, Union, Dict
from pydantic import BaseModel
from pydantic import BaseModel, Field
class Site(BaseModel):
# ID
id: Optional[int]
id: Optional[int] = None
# 站点名称
name: Optional[str]
name: Optional[str] = None
# 站点主域名Key
domain: Optional[str]
domain: Optional[str] = None
# 站点地址
url: Optional[str]
url: Optional[str] = None
# 站点优先级
pri: Optional[int] = 0
# RSS地址
@@ -53,7 +53,7 @@ class Site(BaseModel):
class SiteStatistic(BaseModel):
# 站点ID
domain: Optional[str]
domain: Optional[str] = None
# 成功次数
success: Optional[int] = 0
# 失败次数
@@ -63,7 +63,7 @@ class SiteStatistic(BaseModel):
# 最后状态
lst_state: Optional[int] = 0
# 最后修改时间
lst_mod_date: Optional[str]
lst_mod_date: Optional[str] = None
# 备注
note: Optional[Any] = None
@@ -73,15 +73,15 @@ class SiteStatistic(BaseModel):
class SiteUserData(BaseModel):
# 站点域名
domain: Optional[str]
domain: Optional[str] = None
# 用户名
username: Optional[str]
username: Optional[str] = None
# 用户ID
userid: Optional[Union[int, str]]
userid: Optional[Union[int, str]] = None
# 用户等级
user_level: Optional[str]
user_level: Optional[str] = None
# 加入时间
join_at: Optional[str]
join_at: Optional[str] = None
# 积分
bonus: Optional[float] = 0.0
# 上传量
@@ -99,11 +99,11 @@ class SiteUserData(BaseModel):
# 下载体积
leeching_size: Optional[int] = 0
# 做种人数, 种子大小
seeding_info: Optional[list] = []
seeding_info: Optional[list] = Field(default_factory=list)
# 未读消息
message_unread: Optional[int] = 0
# 未读消息内容
message_unread_contents: Optional[list] = []
message_unread_contents: Optional[list] = Field(default_factory=list)
# 错误信息
err_msg: Optional[str] = None
# 更新日期
@@ -114,4 +114,4 @@ class SiteUserData(BaseModel):
class SiteAuth(BaseModel):
site: Optional[str] = None
params: Optional[Dict[str, Union[int, str]]] = {}
params: Optional[Dict[str, Union[int, str]]] = Field(default_factory=dict)

View File

@@ -1,6 +1,6 @@
from typing import Optional, List, Dict, Any
from pydantic import BaseModel
from pydantic import BaseModel, Field
class Subscribe(BaseModel):
@@ -53,7 +53,7 @@ class Subscribe(BaseModel):
# 订阅用户
username: Optional[str] = None
# 订阅站点
sites: Optional[List[int]] = []
sites: Optional[List[int]] = Field(default_factory=list)
# 下载器
downloader: Optional[str] = None
# 是否洗版
@@ -71,7 +71,7 @@ class Subscribe(BaseModel):
# 自定义媒体类别
media_category: Optional[str] = None
# 过滤规则组
filter_groups: Optional[List[str]] = []
filter_groups: Optional[List[str]] = Field(default_factory=list)
class Config:
orm_mode = True
@@ -157,13 +157,13 @@ class SubscribeEpisodeInfo(BaseModel):
# 背景图
backdrop: Optional[str] = None
# 下载文件信息
download: Optional[List[SubscribeDownloadFileInfo]] = []
download: Optional[List[SubscribeDownloadFileInfo]] = Field(default_factory=list)
# 媒体库文件信息
library: Optional[List[SubscribeLibraryFileInfo]] = []
library: Optional[List[SubscribeLibraryFileInfo]] = Field(default_factory=list)
class SubscrbieInfo(BaseModel):
# 订阅信息
subscribe: Optional[Subscribe] = None
# 集信息 {集号: {download: 文件路径library: 文件路径, backdrop: url, title: 标题, description: 描述}}
episodes: Optional[Dict[int, SubscribeEpisodeInfo]] = {}
episodes: Optional[Dict[int, SubscribeEpisodeInfo]] = Field(default_factory=dict)

View File

@@ -1,7 +1,7 @@
from dataclasses import dataclass
from typing import Optional, Any
from pydantic import BaseModel
from pydantic import BaseModel, Field
@dataclass
@@ -30,11 +30,11 @@ class MediaServerConf(BaseModel):
# 类型 emby/jellyfin/plex
type: Optional[str] = None
# 配置
config: Optional[dict] = {}
config: Optional[dict] = Field(default_factory=dict)
# 是否启用
enabled: Optional[bool] = False
# 同步媒体体库列表
sync_libraries: Optional[list] = []
sync_libraries: Optional[list] = Field(default_factory=list)
class DownloaderConf(BaseModel):
@@ -48,7 +48,7 @@ class DownloaderConf(BaseModel):
# 是否默认
default: Optional[bool] = False
# 配置
config: Optional[dict] = {}
config: Optional[dict] = Field(default_factory=dict)
# 是否启用
enabled: Optional[bool] = False
@@ -62,9 +62,9 @@ class NotificationConf(BaseModel):
# 类型 telegram/wechat/vocechat/synologychat/slack/webpush
type: Optional[str] = None
# 配置
config: Optional[dict] = {}
config: Optional[dict] = Field(default_factory=dict)
# 场景开关
switchs: Optional[list] = []
switchs: Optional[list] = Field(default_factory=list)
# 是否启用
enabled: Optional[bool] = False
@@ -88,7 +88,7 @@ class StorageConf(BaseModel):
# 名称
name: Optional[str] = None
# 配置
config: Optional[dict] = {}
config: Optional[dict] = Field(default_factory=dict)
class TransferDirectoryConf(BaseModel):

View File

@@ -1,6 +1,6 @@
from typing import Optional
from pydantic import BaseModel
from pydantic import BaseModel, Field
class TmdbSeason(BaseModel):
@@ -22,11 +22,12 @@ class TmdbEpisode(BaseModel):
"""
air_date: Optional[str] = None
episode_number: Optional[int] = None
episode_type: Optional[str] = None
name: Optional[str] = None
overview: Optional[str] = None
runtime: Optional[int] = None
season_number: Optional[int] = None
still_path: Optional[str] = None
vote_average: Optional[float] = None
crew: Optional[list] = []
guest_stars: Optional[list] = []
crew: Optional[list] = Field(default_factory=list)
guest_stars: Optional[list] = Field(default_factory=list)

View File

@@ -1,9 +1,12 @@
from pathlib import Path
from typing import Optional
from typing import Optional, List, Any, Callable
from pydantic import BaseModel
from pydantic import BaseModel, Field
from app.schemas import TmdbEpisode, DownloadHistory
from app.schemas.file import FileItem
from app.schemas.system import TransferDirectoryConf
from schemas import MediaInfo, MetaInfo
class TransferTorrent(BaseModel):
@@ -34,15 +37,66 @@ class DownloadingTorrent(BaseModel):
state: Optional[str] = 'downloading'
upspeed: Optional[str] = None
dlspeed: Optional[str] = None
media: Optional[dict] = {}
media: Optional[dict] = Field(default_factory=dict)
userid: Optional[str] = None
username: Optional[str] = None
left_time: Optional[str] = None
class TransferTask(BaseModel):
"""
文件整理任务
"""
fileitem: FileItem = None
meta: Any = None
mediainfo: Optional[Any] = None
target_directory: Optional[TransferDirectoryConf] = None
target_storage: Optional[str] = None
target_path: Optional[Path] = None
transfer_type: Optional[str] = None
scrape: Optional[bool] = False
library_type_folder: Optional[bool] = False
library_category_folder: Optional[bool] = False
episodes_info: Optional[List[TmdbEpisode]] = None
downloader: Optional[str] = None
download_hash: Optional[str] = None
download_history: Optional[DownloadHistory] = None
def to_dict(self):
"""
返回字典
"""
dicts = vars(self).copy()
dicts["fileitem"] = self.fileitem.dict() if self.fileitem else None
dicts["meta"] = self.meta.dict() if self.meta else None
dicts["mediainfo"] = self.mediainfo.dict() if self.mediainfo else None
dicts["target_directory"] = self.target_directory.dict() if self.target_directory else None
return dicts
class TransferJobTask(BaseModel):
"""
文件整理作业任务
"""
fileitem: Optional[FileItem] = None
meta: Optional[MetaInfo] = None
state: Optional[str] = None
downloader: Optional[str] = None
download_hash: Optional[str] = None
class TransferJob(BaseModel):
"""
文件整理作业
"""
media: Optional[MediaInfo] = None
season: Optional[int] = None
tasks: Optional[List[TransferJobTask]] = Field(default_factory=list)
class TransferInfo(BaseModel):
"""
文件转移结果信息
文件整理结果
"""
# 是否成功标志
success: bool = True
@@ -57,13 +111,13 @@ class TransferInfo(BaseModel):
# 处理文件数
file_count: Optional[int] = 0
# 处理文件清单
file_list: Optional[list] = []
file_list: Optional[list] = Field(default_factory=list)
# 目标文件清单
file_list_new: Optional[list] = []
file_list_new: Optional[list] = Field(default_factory=list)
# 总文件大小
total_size: Optional[float] = 0
# 失败清单
fail_list: Optional[list] = []
fail_list: Optional[list] = Field(default_factory=list)
# 错误信息
message: Optional[str] = None
# 是否需要刮削
@@ -81,6 +135,18 @@ class TransferInfo(BaseModel):
return dicts
class TransferQueue(BaseModel):
"""
异步整理队列信息
"""
# 任务信息
task: Optional[TransferTask] = None
# 回调函数
callback: Optional[Callable] = None
# 整理结果
result: Optional[TransferInfo] = None
class EpisodeFormat(BaseModel):
"""
剧集自定义识别格式

View File

@@ -1,6 +1,6 @@
from typing import Optional
from pydantic import BaseModel
from pydantic import BaseModel, Field
# Shared properties
@@ -18,9 +18,9 @@ class UserBase(BaseModel):
# 是否开启二次验证
is_otp: Optional[bool] = False
# 权限
permissions: Optional[dict] = {}
permissions: Optional[dict] = Field(default_factory=dict)
# 个性化设置
settings: Optional[dict] = {}
settings: Optional[dict] = Field(default_factory=dict)
class Config:
orm_mode = True
@@ -31,7 +31,7 @@ class UserCreate(UserBase):
name: str
email: Optional[str] = None
password: Optional[str] = None
settings: Optional[dict] = {}
settings: Optional[dict] = Field(default_factory=dict)
# Properties to receive via API on update
@@ -40,7 +40,7 @@ class UserUpdate(UserBase):
name: str
email: Optional[str] = None
password: Optional[str] = None
settings: Optional[dict] = {}
settings: Optional[dict] = Field(default_factory=dict)
class UserInDBBase(UserBase):

View File

@@ -4,12 +4,14 @@ from fastapi import FastAPI
from app.core.config import global_vars, settings
from app.core.module import ModuleManager
from app.log import logger
from app.utils.system import SystemUtils
# SitesHelper涉及资源包拉取提前引入并容错提示
try:
from app.helper.sites import SitesHelper
except ImportError as e:
SitesHelper = None
error_message = f"错误: {str(e)}\n站点认证及索引相关资源导入失败,请尝试重建容器或手动拉取资源"
print(error_message, file=sys.stderr)
sys.exit(1)
@@ -26,7 +28,7 @@ from app.schemas import Notification, NotificationType
from app.schemas.types import SystemConfigKey
from app.db import close_database
from app.db.systemconfig_oper import SystemConfigOper
from app.chain.command import CommandChain
from app.command import Command, CommandChain
def start_frontend():
@@ -78,13 +80,15 @@ def user_auth():
"""
用户认证检查
"""
if SitesHelper().auth_level >= 2:
sites_helper = SitesHelper()
if sites_helper.auth_level >= 2:
return
auth_conf = SystemConfigOper().get(SystemConfigKey.UserSiteAuthParams)
if auth_conf:
SitesHelper().check_user(**auth_conf)
status, msg = sites_helper.check_user(**auth_conf) if auth_conf else sites_helper.check_user()
if status:
logger.info(f"{msg} 用户认证成功")
else:
SitesHelper().check_user()
logger.info(f"用户认证失败:{msg}")
def check_auth():
@@ -156,7 +160,7 @@ def start_modules(_: FastAPI):
# 启动定时服务
Scheduler()
# 加载命令
CommandChain()
Command()
# 启动前端服务
start_frontend()
# 检查认证状态

View File

@@ -1,6 +1,6 @@
import asyncio
from app.chain.command import CommandChain
from app.command import Command
from app.core.plugin import PluginManager
from app.log import logger
from app.scheduler import Scheduler
@@ -14,7 +14,7 @@ async def init_plugins_async():
loop = asyncio.get_event_loop()
plugin_manager = PluginManager()
scheduler = Scheduler()
command = CommandChain()
command = Command()
sync_result = await execute_task(loop, plugin_manager.sync, "插件同步到本地")
resolved_dependencies = await execute_task(loop, plugin_manager.install_plugin_missing_dependencies,

View File

@@ -1,5 +1,6 @@
import time
from typing import Any
from functools import wraps
from typing import Any, Callable
from app.schemas import ImmediateException
@@ -36,3 +37,27 @@ def retry(ExceptionToCheck: Any,
return f_retry
return deco_retry
def log_execution_time(logger: Any = None):
"""
记录函数执行时间的装饰器
:param logger: 日志记录器对象,用于记录异常信息
"""
def decorator(func: Callable):
@wraps(func)
def wrapper(*args, **kwargs):
start_time = time.time()
result = func(*args, **kwargs)
end_time = time.time()
msg = f"{func.__name__} execution time: {end_time - start_time:.2f} seconds"
if logger:
logger.debug(msg)
else:
print(msg)
return result
return wrapper
return decorator

View File

@@ -0,0 +1,30 @@
"""2.0.9
Revision ID: 55390f1f77c1
Revises: bf28a012734c
Create Date: 2024-12-24 13:29:32.225532
"""
import contextlib
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import sqlite
# revision identifiers, used by Alembic.
revision = '55390f1f77c1'
down_revision = 'bf28a012734c'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
# 整理历史记录 增加下载器字段
with contextlib.suppress(Exception):
op.add_column('transferhistory', sa.Column('downloader', sa.String(), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@@ -5,9 +5,6 @@ Revises: ecf3c693fdf3
Create Date: 2024-11-14 12:49:13.838120
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import sqlite
from app.db.systemconfig_oper import SystemConfigOper
from app.schemas.types import SystemConfigKey

View File

@@ -0,0 +1,29 @@
"""2.0.8
Revision ID: bf28a012734c
Revises: eaf9cbc49027
Create Date: 2024-12-23 18:29:31.202143
"""
import contextlib
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = 'bf28a012734c'
down_revision = 'eaf9cbc49027'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
# 下载历史记录 增加下载器字段
with contextlib.suppress(Exception):
op.add_column('downloadhistory', sa.Column('downloader', sa.String(), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@@ -58,6 +58,9 @@ pystray~=0.19.5
pyotp~=2.9.0
Pinyin2Hanzi~=0.1.1
pywebpush~=2.0.0
python-115~=0.0.9.8.8.3
python-115==0.0.9.8.8.2
p115client==0.0.3.8.3.3
python-cookietools==0.0.2.1
aligo~=6.2.4
aiofiles~=24.1.0
jieba~=0.42.1

View File

@@ -1,2 +1,2 @@
APP_VERSION = 'v2.1.3'
FRONTEND_VERSION = 'v2.1.4'
APP_VERSION = 'v2.1.8'
FRONTEND_VERSION = 'v2.1.8'