fix: update backup export to use JSON mode

This commit is contained in:
shiyu
2026-02-09 15:26:48 +08:00
parent c456a208de
commit c473251926
2 changed files with 12 additions and 4 deletions

View File

@@ -28,7 +28,7 @@ async def export_backup(
data = await BackupService.export_data(sections=sections)
timestamp = datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
headers = {"Content-Disposition": f"attachment; filename=foxel_backup_{timestamp}.json"}
return JSONResponse(content=data.model_dump(), headers=headers)
return JSONResponse(content=data.model_dump(mode="json"), headers=headers)
@router.post("/import", summary="导入数据")

View File

@@ -82,6 +82,9 @@ class BackupService:
share_links = cls._serialize_datetime_fields(
shares, ["created_at", "expires_at"]
)
user_accounts = cls._serialize_datetime_fields(
users, ["created_at", "last_login"]
)
ai_providers = cls._serialize_datetime_fields(
providers, ["created_at", "updated_at"]
)
@@ -99,7 +102,7 @@ class BackupService:
version=VERSION,
sections=sections,
storage_adapters=list(adapters),
user_accounts=list(users),
user_accounts=user_accounts,
automation_tasks=list(tasks),
share_links=share_links,
configurations=list(configs),
@@ -132,6 +135,11 @@ class BackupService:
if payload.share_links
else []
)
user_accounts = (
cls._parse_datetime_fields(payload.user_accounts, ["created_at", "last_login"])
if payload.user_accounts
else []
)
ai_providers = (
cls._parse_datetime_fields(payload.ai_providers, ["created_at", "updated_at"])
if payload.ai_providers
@@ -189,10 +197,10 @@ class BackupService:
if "user_accounts" in sections and payload.user_accounts:
if mode == "merge":
await cls._merge_records(UserAccount, payload.user_accounts, conn)
await cls._merge_records(UserAccount, user_accounts, conn)
else:
await UserAccount.bulk_create(
[UserAccount(**user) for user in payload.user_accounts],
[UserAccount(**user) for user in user_accounts],
using_db=conn,
)