mirror of
https://github.com/cnlimiter/codex-register.git
synced 2026-05-07 05:32:48 +08:00
feat(accounts): 添加CPA上传功能及相关字段
- 在Account模型和响应中添加cpa_uploaded和cpa_uploaded_at字段 - 新增批量导出和CPA上传API端点 - 实现数据库迁移功能自动添加缺失列 - 扩展账号管理API支持CPA相关操作
This commit is contained in:
@@ -50,6 +50,8 @@ class Account(Base):
|
||||
expires_at = Column(DateTime) # Token 过期时间
|
||||
status = Column(String(20), default='active') # 'active', 'expired', 'banned', 'failed'
|
||||
extra_data = Column(JSONEncodedDict) # 额外信息存储
|
||||
cpa_uploaded = Column(Boolean, default=False) # 是否已上传到 CPA
|
||||
cpa_uploaded_at = Column(DateTime) # 上传时间
|
||||
created_at = Column(DateTime, default=datetime.utcnow)
|
||||
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
|
||||
|
||||
@@ -68,6 +70,8 @@ class Account(Base):
|
||||
'expires_at': self.expires_at.isoformat() if self.expires_at else None,
|
||||
'status': self.status,
|
||||
'proxy_used': self.proxy_used,
|
||||
'cpa_uploaded': self.cpa_uploaded,
|
||||
'cpa_uploaded_at': self.cpa_uploaded_at.isoformat() if self.cpa_uploaded_at else None,
|
||||
'created_at': self.created_at.isoformat() if self.created_at else None,
|
||||
'updated_at': self.updated_at.isoformat() if self.updated_at else None
|
||||
}
|
||||
|
||||
@@ -4,13 +4,16 @@
|
||||
|
||||
from contextlib import contextmanager
|
||||
from typing import Generator
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy import create_engine, text
|
||||
from sqlalchemy.orm import sessionmaker, Session
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
import os
|
||||
import logging
|
||||
|
||||
from .models import Base
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DatabaseSessionManager:
|
||||
"""数据库会话管理器"""
|
||||
@@ -77,6 +80,40 @@ class DatabaseSessionManager:
|
||||
"""删除所有表(谨慎使用)"""
|
||||
Base.metadata.drop_all(bind=self.engine)
|
||||
|
||||
def migrate_tables(self):
|
||||
"""
|
||||
数据库迁移 - 添加缺失的列
|
||||
用于在不删除数据的情况下更新表结构
|
||||
"""
|
||||
if not self.database_url.startswith("sqlite"):
|
||||
logger.info("非 SQLite 数据库,跳过自动迁移")
|
||||
return
|
||||
|
||||
# 需要检查和添加的新列
|
||||
migrations = [
|
||||
# (表名, 列名, 列类型)
|
||||
("accounts", "cpa_uploaded", "BOOLEAN DEFAULT 0"),
|
||||
("accounts", "cpa_uploaded_at", "DATETIME"),
|
||||
]
|
||||
|
||||
with self.engine.connect() as conn:
|
||||
for table_name, column_name, column_type in migrations:
|
||||
try:
|
||||
# 检查列是否存在
|
||||
result = conn.execute(text(
|
||||
f"SELECT * FROM pragma_table_info('{table_name}') WHERE name='{column_name}'"
|
||||
))
|
||||
if result.fetchone() is None:
|
||||
# 列不存在,添加它
|
||||
logger.info(f"添加列 {table_name}.{column_name}")
|
||||
conn.execute(text(
|
||||
f"ALTER TABLE {table_name} ADD COLUMN {column_name} {column_type}"
|
||||
))
|
||||
conn.commit()
|
||||
logger.info(f"成功添加列 {table_name}.{column_name}")
|
||||
except Exception as e:
|
||||
logger.warning(f"迁移列 {table_name}.{column_name} 时出错: {e}")
|
||||
|
||||
|
||||
# 全局数据库会话管理器实例
|
||||
_db_manager: DatabaseSessionManager = None
|
||||
@@ -90,6 +127,8 @@ def init_database(database_url: str = None) -> DatabaseSessionManager:
|
||||
if _db_manager is None:
|
||||
_db_manager = DatabaseSessionManager(database_url)
|
||||
_db_manager.create_tables()
|
||||
# 执行数据库迁移
|
||||
_db_manager.migrate_tables()
|
||||
return _db_manager
|
||||
|
||||
|
||||
|
||||
@@ -37,6 +37,8 @@ class AccountResponse(BaseModel):
|
||||
expires_at: Optional[str] = None
|
||||
status: str
|
||||
proxy_used: Optional[str] = None
|
||||
cpa_uploaded: bool = False
|
||||
cpa_uploaded_at: Optional[str] = None
|
||||
created_at: Optional[str] = None
|
||||
updated_at: Optional[str] = None
|
||||
|
||||
@@ -84,6 +86,8 @@ def account_to_response(account: Account) -> AccountResponse:
|
||||
expires_at=account.expires_at.isoformat() if account.expires_at else None,
|
||||
status=account.status,
|
||||
proxy_used=account.proxy_used,
|
||||
cpa_uploaded=account.cpa_uploaded or False,
|
||||
cpa_uploaded_at=account.cpa_uploaded_at.isoformat() if account.cpa_uploaded_at else None,
|
||||
created_at=account.created_at.isoformat() if account.created_at else None,
|
||||
updated_at=account.updated_at.isoformat() if account.updated_at else None,
|
||||
)
|
||||
@@ -249,21 +253,16 @@ async def batch_update_accounts(request: BatchUpdateRequest):
|
||||
}
|
||||
|
||||
|
||||
@router.get("/export/json")
|
||||
async def export_accounts_json(
|
||||
status: Optional[str] = Query(None, description="状态筛选"),
|
||||
email_service: Optional[str] = Query(None, description="邮箱服务筛选"),
|
||||
):
|
||||
class BatchExportRequest(BaseModel):
|
||||
"""批量导出请求"""
|
||||
ids: List[int]
|
||||
|
||||
|
||||
@router.post("/export/json")
|
||||
async def export_accounts_json(request: BatchExportRequest):
|
||||
"""导出账号为 JSON 格式"""
|
||||
with get_db() as db:
|
||||
query = db.query(Account)
|
||||
|
||||
if status:
|
||||
query = query.filter(Account.status == status)
|
||||
if email_service:
|
||||
query = query.filter(Account.email_service == email_service)
|
||||
|
||||
accounts = query.all()
|
||||
accounts = db.query(Account).filter(Account.id.in_(request.ids)).all()
|
||||
|
||||
export_data = []
|
||||
for acc in accounts:
|
||||
@@ -289,7 +288,6 @@ async def export_accounts_json(
|
||||
filename = f"accounts_{timestamp}.json"
|
||||
|
||||
# 返回 JSON 响应
|
||||
import io
|
||||
content = json.dumps(export_data, ensure_ascii=False, indent=2)
|
||||
|
||||
return StreamingResponse(
|
||||
@@ -299,24 +297,14 @@ async def export_accounts_json(
|
||||
)
|
||||
|
||||
|
||||
@router.get("/export/csv")
|
||||
async def export_accounts_csv(
|
||||
status: Optional[str] = Query(None, description="状态筛选"),
|
||||
email_service: Optional[str] = Query(None, description="邮箱服务筛选"),
|
||||
):
|
||||
@router.post("/export/csv")
|
||||
async def export_accounts_csv(request: BatchExportRequest):
|
||||
"""导出账号为 CSV 格式"""
|
||||
import csv
|
||||
import io
|
||||
|
||||
with get_db() as db:
|
||||
query = db.query(Account)
|
||||
|
||||
if status:
|
||||
query = query.filter(Account.status == status)
|
||||
if email_service:
|
||||
query = query.filter(Account.email_service == email_service)
|
||||
|
||||
accounts = query.all()
|
||||
accounts = db.query(Account).filter(Account.id.in_(request.ids)).all()
|
||||
|
||||
# 创建 CSV 内容
|
||||
output = io.StringIO()
|
||||
@@ -361,6 +349,31 @@ async def export_accounts_csv(
|
||||
)
|
||||
|
||||
|
||||
@router.post("/export/cpa")
|
||||
async def export_accounts_cpa(request: BatchExportRequest):
|
||||
"""导出账号为 CPA Token JSON 格式"""
|
||||
from ...core.cpa_upload import generate_token_json
|
||||
|
||||
with get_db() as db:
|
||||
accounts = db.query(Account).filter(Account.id.in_(request.ids)).all()
|
||||
|
||||
# 生成 CPA 格式的 Token 数组
|
||||
export_data = [generate_token_json(acc) for acc in accounts]
|
||||
|
||||
# 生成文件名
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
filename = f"cpa_tokens_{timestamp}.json"
|
||||
|
||||
# 返回 JSON 响应
|
||||
content = json.dumps(export_data, ensure_ascii=False, indent=2)
|
||||
|
||||
return StreamingResponse(
|
||||
iter([content]),
|
||||
media_type="application/json",
|
||||
headers={"Content-Disposition": f"attachment; filename={filename}"}
|
||||
)
|
||||
|
||||
|
||||
@router.get("/stats/summary")
|
||||
async def get_accounts_stats():
|
||||
"""获取账号统计信息"""
|
||||
@@ -515,3 +528,71 @@ async def batch_validate_tokens(request: BatchValidateRequest):
|
||||
})
|
||||
|
||||
return results
|
||||
|
||||
|
||||
# ============== CPA 上传相关 ==============
|
||||
|
||||
class CPAUploadRequest(BaseModel):
|
||||
"""CPA 上传请求"""
|
||||
proxy: Optional[str] = None
|
||||
|
||||
|
||||
class BatchCPAUploadRequest(BaseModel):
|
||||
"""批量 CPA 上传请求"""
|
||||
ids: List[int]
|
||||
proxy: Optional[str] = None
|
||||
|
||||
|
||||
@router.post("/{account_id}/upload-cpa")
|
||||
async def upload_account_to_cpa(account_id: int, request: CPAUploadRequest = None):
|
||||
"""上传单个账号到 CPA"""
|
||||
from ...core.cpa_upload import upload_to_cpa, generate_token_json
|
||||
|
||||
# 使用传入的代理或全局代理配置
|
||||
proxy = request.proxy if request and request.proxy else get_settings().proxy_url
|
||||
|
||||
with get_db() as db:
|
||||
account = crud.get_account_by_id(db, account_id)
|
||||
if not account:
|
||||
raise HTTPException(status_code=404, detail="账号不存在")
|
||||
|
||||
if not account.access_token:
|
||||
return {
|
||||
"success": False,
|
||||
"error": "账号缺少 Token,无法上传"
|
||||
}
|
||||
|
||||
# 生成 Token JSON
|
||||
token_data = generate_token_json(account)
|
||||
|
||||
# 上传
|
||||
success, message = upload_to_cpa(token_data, proxy)
|
||||
|
||||
if success:
|
||||
# 更新数据库状态
|
||||
account.cpa_uploaded = True
|
||||
account.cpa_uploaded_at = datetime.utcnow()
|
||||
db.commit()
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": message
|
||||
}
|
||||
else:
|
||||
return {
|
||||
"success": False,
|
||||
"error": message
|
||||
}
|
||||
|
||||
|
||||
@router.post("/batch-upload-cpa")
|
||||
async def batch_upload_accounts_to_cpa(request: BatchCPAUploadRequest):
|
||||
"""批量上传账号到 CPA"""
|
||||
from ...core.cpa_upload import batch_upload_to_cpa
|
||||
|
||||
# 使用传入的代理或全局代理配置
|
||||
proxy = request.proxy if request.proxy else get_settings().proxy_url
|
||||
|
||||
results = batch_upload_to_cpa(request.ids, proxy)
|
||||
|
||||
return results
|
||||
|
||||
Reference in New Issue
Block a user