mirror of
https://github.com/DrizzleTime/Foxel.git
synced 2026-05-11 18:10:10 +08:00
feat: extend BackupData model
This commit is contained in:
@@ -1,4 +1,5 @@
|
||||
import json
|
||||
from datetime import datetime
|
||||
|
||||
from fastapi import HTTPException
|
||||
from tortoise.transactions import in_transaction
|
||||
@@ -6,8 +7,12 @@ from tortoise.transactions import in_transaction
|
||||
from domain.backup.types import BackupData
|
||||
from domain.config.service import VERSION
|
||||
from models.database import (
|
||||
AIDefaultModel,
|
||||
AIModel,
|
||||
AIProvider,
|
||||
AutomationTask,
|
||||
Configuration,
|
||||
Plugin,
|
||||
ShareLink,
|
||||
StorageAdapter,
|
||||
UserAccount,
|
||||
@@ -23,22 +28,38 @@ class BackupService:
|
||||
tasks = await AutomationTask.all().values()
|
||||
shares = await ShareLink.all().values()
|
||||
configs = await Configuration.all().values()
|
||||
providers = await AIProvider.all().values()
|
||||
models = await AIModel.all().values()
|
||||
default_models = await AIDefaultModel.all().values()
|
||||
plugins = await Plugin.all().values()
|
||||
|
||||
for share in shares:
|
||||
share["created_at"] = (
|
||||
share["created_at"].isoformat() if share.get("created_at") else None
|
||||
)
|
||||
share["expires_at"] = (
|
||||
share["expires_at"].isoformat() if share.get("expires_at") else None
|
||||
)
|
||||
share_links = cls._serialize_datetime_fields(
|
||||
shares, ["created_at", "expires_at"]
|
||||
)
|
||||
ai_providers = cls._serialize_datetime_fields(
|
||||
providers, ["created_at", "updated_at"]
|
||||
)
|
||||
ai_models = cls._serialize_datetime_fields(
|
||||
models, ["created_at", "updated_at"]
|
||||
)
|
||||
ai_default_models = cls._serialize_datetime_fields(
|
||||
default_models, ["created_at", "updated_at"]
|
||||
)
|
||||
plugin_items = cls._serialize_datetime_fields(
|
||||
plugins, ["created_at", "updated_at"]
|
||||
)
|
||||
|
||||
return BackupData(
|
||||
version=VERSION,
|
||||
storage_adapters=list(adapters),
|
||||
user_accounts=list(users),
|
||||
automation_tasks=list(tasks),
|
||||
share_links=list(shares),
|
||||
share_links=share_links,
|
||||
configurations=list(configs),
|
||||
ai_providers=ai_providers,
|
||||
ai_models=ai_models,
|
||||
ai_default_models=ai_default_models,
|
||||
plugins=plugin_items,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@@ -59,6 +80,10 @@ class BackupService:
|
||||
await StorageAdapter.all().using_db(conn).delete()
|
||||
await UserAccount.all().using_db(conn).delete()
|
||||
await Configuration.all().using_db(conn).delete()
|
||||
await AIDefaultModel.all().using_db(conn).delete()
|
||||
await AIModel.all().using_db(conn).delete()
|
||||
await AIProvider.all().using_db(conn).delete()
|
||||
await Plugin.all().using_db(conn).delete()
|
||||
|
||||
if payload.configurations:
|
||||
await Configuration.bulk_create(
|
||||
@@ -86,6 +111,93 @@ class BackupService:
|
||||
|
||||
if payload.share_links:
|
||||
await ShareLink.bulk_create(
|
||||
[ShareLink(**share) for share in payload.share_links],
|
||||
[
|
||||
ShareLink(**share)
|
||||
for share in cls._parse_datetime_fields(
|
||||
payload.share_links, ["created_at", "expires_at"]
|
||||
)
|
||||
],
|
||||
using_db=conn,
|
||||
)
|
||||
|
||||
if payload.ai_providers:
|
||||
await AIProvider.bulk_create(
|
||||
[
|
||||
AIProvider(**item)
|
||||
for item in cls._parse_datetime_fields(
|
||||
payload.ai_providers, ["created_at", "updated_at"]
|
||||
)
|
||||
],
|
||||
using_db=conn,
|
||||
)
|
||||
|
||||
if payload.ai_models:
|
||||
await AIModel.bulk_create(
|
||||
[
|
||||
AIModel(**item)
|
||||
for item in cls._parse_datetime_fields(
|
||||
payload.ai_models, ["created_at", "updated_at"]
|
||||
)
|
||||
],
|
||||
using_db=conn,
|
||||
)
|
||||
|
||||
if payload.ai_default_models:
|
||||
await AIDefaultModel.bulk_create(
|
||||
[
|
||||
AIDefaultModel(**item)
|
||||
for item in cls._parse_datetime_fields(
|
||||
payload.ai_default_models, ["created_at", "updated_at"]
|
||||
)
|
||||
],
|
||||
using_db=conn,
|
||||
)
|
||||
|
||||
if payload.plugins:
|
||||
await Plugin.bulk_create(
|
||||
[
|
||||
Plugin(**item)
|
||||
for item in cls._parse_datetime_fields(
|
||||
payload.plugins, ["created_at", "updated_at"]
|
||||
)
|
||||
],
|
||||
using_db=conn,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _serialize_datetime_fields(
|
||||
records: list[dict], fields: list[str]
|
||||
) -> list[dict]:
|
||||
serialized: list[dict] = []
|
||||
for record in records:
|
||||
item = dict(record)
|
||||
for field in fields:
|
||||
value = item.get(field)
|
||||
if isinstance(value, datetime):
|
||||
item[field] = value.isoformat()
|
||||
serialized.append(item)
|
||||
return serialized
|
||||
|
||||
@staticmethod
|
||||
def _parse_datetime_fields(
|
||||
records: list[dict], fields: list[str]
|
||||
) -> list[dict]:
|
||||
parsed: list[dict] = []
|
||||
for record in records:
|
||||
item = dict(record)
|
||||
for field in fields:
|
||||
value = item.get(field)
|
||||
if isinstance(value, str):
|
||||
item[field] = BackupService._from_iso(value)
|
||||
parsed.append(item)
|
||||
return parsed
|
||||
|
||||
@staticmethod
|
||||
def _from_iso(value: str) -> datetime | None:
|
||||
if not value:
|
||||
return None
|
||||
normalized = value.replace("Z", "+00:00")
|
||||
try:
|
||||
return datetime.fromisoformat(normalized)
|
||||
except ValueError as exc: # noqa: BLE001
|
||||
raise HTTPException(status_code=400, detail="无效的日期格式") from exc
|
||||
|
||||
@@ -10,3 +10,7 @@ class BackupData(BaseModel):
|
||||
automation_tasks: list[dict[str, Any]] = Field(default_factory=list)
|
||||
share_links: list[dict[str, Any]] = Field(default_factory=list)
|
||||
configurations: list[dict[str, Any]] = Field(default_factory=list)
|
||||
ai_providers: list[dict[str, Any]] = Field(default_factory=list)
|
||||
ai_models: list[dict[str, Any]] = Field(default_factory=list)
|
||||
ai_default_models: list[dict[str, Any]] = Field(default_factory=list)
|
||||
plugins: list[dict[str, Any]] = Field(default_factory=list)
|
||||
|
||||
Reference in New Issue
Block a user