diff --git a/domain/backup/api.py b/domain/backup/api.py index 7f847b2..aab995c 100644 --- a/domain/backup/api.py +++ b/domain/backup/api.py @@ -28,7 +28,7 @@ async def export_backup( data = await BackupService.export_data(sections=sections) timestamp = datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S") headers = {"Content-Disposition": f"attachment; filename=foxel_backup_{timestamp}.json"} - return JSONResponse(content=data.model_dump(), headers=headers) + return JSONResponse(content=data.model_dump(mode="json"), headers=headers) @router.post("/import", summary="导入数据") diff --git a/domain/backup/service.py b/domain/backup/service.py index f09d632..150b843 100644 --- a/domain/backup/service.py +++ b/domain/backup/service.py @@ -82,6 +82,9 @@ class BackupService: share_links = cls._serialize_datetime_fields( shares, ["created_at", "expires_at"] ) + user_accounts = cls._serialize_datetime_fields( + users, ["created_at", "last_login"] + ) ai_providers = cls._serialize_datetime_fields( providers, ["created_at", "updated_at"] ) @@ -99,7 +102,7 @@ class BackupService: version=VERSION, sections=sections, storage_adapters=list(adapters), - user_accounts=list(users), + user_accounts=user_accounts, automation_tasks=list(tasks), share_links=share_links, configurations=list(configs), @@ -132,6 +135,11 @@ class BackupService: if payload.share_links else [] ) + user_accounts = ( + cls._parse_datetime_fields(payload.user_accounts, ["created_at", "last_login"]) + if payload.user_accounts + else [] + ) ai_providers = ( cls._parse_datetime_fields(payload.ai_providers, ["created_at", "updated_at"]) if payload.ai_providers @@ -189,10 +197,10 @@ class BackupService: if "user_accounts" in sections and payload.user_accounts: if mode == "merge": - await cls._merge_records(UserAccount, payload.user_accounts, conn) + await cls._merge_records(UserAccount, user_accounts, conn) else: await UserAccount.bulk_create( - [UserAccount(**user) for user in payload.user_accounts], + [UserAccount(**user) for user in user_accounts], using_db=conn, )