Merge branch 'master' into fix/worker-mail-otp-extraction

This commit is contained in:
kailian zhou
2026-03-24 17:58:01 +08:00
committed by GitHub
53 changed files with 5999 additions and 409 deletions

View File

@@ -9,7 +9,7 @@ ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1 \
# WebUI 默认配置
WEBUI_HOST=0.0.0.0 \
WEBUI_PORT=1455 \
WEBUI_PORT=15555 \
LOG_LEVEL=info \
DEBUG=0
@@ -30,7 +30,7 @@ RUN pip install --no-cache-dir --upgrade pip \
COPY . .
# 暴露端口
EXPOSE 1455
EXPOSE 15555
# 启动 WebUI
CMD ["python", "webui.py"]

View File

@@ -146,9 +146,9 @@ docker-compose up -d
```bash
docker run -d \
-p 1455:1455 \
-p 15555:15555 \
-e WEBUI_HOST=0.0.0.0 \
-e WEBUI_PORT=1455 \
-e WEBUI_PORT=15555 \
-e WEBUI_ACCESS_PASSWORD=your_secure_password \
-v $(pwd)/data:/app/data \
--name codex-register \
@@ -157,7 +157,7 @@ docker run -d \
环境变量说明:
- `WEBUI_HOST`: 监听的主机地址 (默认 `0.0.0.0`)
- `WEBUI_PORT`: 监听的端口 (默认 `1455`)
- `WEBUI_PORT`: 监听的端口 (默认 `15555`)
- `WEBUI_ACCESS_PASSWORD`: 设置 Web UI 的访问密码
- `DEBUG`: 设为 `1``true` 开启调试模式
- `LOG_LEVEL`: 日志级别,如 `info`, `debug`
@@ -373,8 +373,7 @@ docker-compose build --no-cache
- CPA / Sub2API / Team Manager 上传始终直连,不走代理;其中 CPA 可选把账号记录的代理写入 auth file 的 `proxy_url`
- 注册时自动随机生成用户名和生日(年龄范围 18-45 岁)
- 支付链接生成使用账号 access_token 鉴权,走全局代理配置
- 无痕浏览器优先使用 playwright注入 cookie 直达支付页);未安装时降级为系统 Chrome/Edge 无痕模式
- 安装完整支付功能:`pip install ".[payment]" && playwright install chromium`(可选)
- 无痕打开支付页默认调用系统 Chrome/Edge 的隐私模式
- 订阅状态自动检测调用 `chatgpt.com/backend-api/me`,走全局代理
- 批量注册并发数上限为 50线程池大小已相应调整

155
check_otp_timing.py Normal file
View File

@@ -0,0 +1,155 @@
#!/usr/bin/env python3
"""
离线验证 TempmailService 的 OTP 时间锚点过滤行为。
场景 1:
- 30 秒内先后收到两封邮件
- 在两封邮件之间设置新的 otp_sent_at
- 期望过滤第一封,命中第二封
场景 2:
- 第二封邮件已经入箱后才刷新 otp_sent_at
- 期望复现严格时间过滤导致第二封也被排除的窗口
"""
from __future__ import annotations
from dataclasses import dataclass
import logging
from typing import Any, Dict, List, Optional
import src.services.tempmail as tempmail_module
from src.services.tempmail import TempmailService
@dataclass(frozen=True)
class Scenario:
name: str
anchor_offset_seconds: int
expected_code: Optional[str]
expected_message: str
class FakeResponse:
def __init__(self, payload: Dict[str, Any], status_code: int = 200):
self._payload = payload
self.status_code = status_code
def json(self) -> Dict[str, Any]:
return self._payload
class FakeHTTPClient:
def __init__(self, payload: Dict[str, Any]):
self.payload = payload
self.calls: List[Dict[str, Any]] = []
def get(self, url: str, **kwargs: Any) -> FakeResponse:
self.calls.append({"url": url, "kwargs": kwargs})
return FakeResponse(self.payload)
class FakeClock:
def __init__(self, start: float):
self.current = float(start)
def time(self) -> float:
return self.current
def sleep(self, seconds: float) -> None:
self.current += float(seconds)
def build_inbox_payload(base_timestamp: int) -> Dict[str, Any]:
return {
"emails": [
{
"id": "mail-1",
"received_at": base_timestamp + 10,
"from": "noreply@openai.com",
"subject": "First OTP",
"body": "111111",
},
{
"id": "mail-2",
"received_at": base_timestamp + 20,
"from": "noreply@openai.com",
"subject": "Second OTP",
"body": "222222",
},
]
}
def run_scenario(scenario: Scenario) -> Dict[str, Any]:
base_timestamp = 1_700_000_000
service = TempmailService({"base_url": "https://api.tempmail.test"})
service._email_cache["tester@example.com"] = {"token": "token-1"}
service.http_client = FakeHTTPClient(build_inbox_payload(base_timestamp))
fake_clock = FakeClock(start=base_timestamp + scenario.anchor_offset_seconds)
anchor_timestamp = fake_clock.time()
original_time = tempmail_module.time.time
original_sleep = tempmail_module.time.sleep
try:
tempmail_module.time.time = fake_clock.time
tempmail_module.time.sleep = fake_clock.sleep
code = service.get_verification_code(
email="tester@example.com",
timeout=1,
otp_sent_at=anchor_timestamp,
)
finally:
tempmail_module.time.time = original_time
tempmail_module.time.sleep = original_sleep
passed = code == scenario.expected_code
return {
"name": scenario.name,
"anchor_timestamp": anchor_timestamp,
"code": code,
"passed": passed,
"http_calls": len(service.http_client.calls),
"message": scenario.expected_message,
}
def main() -> int:
logging.getLogger("src.services.tempmail").setLevel(logging.ERROR)
scenarios = [
Scenario(
name="anchor_between_two_emails",
anchor_offset_seconds=15,
expected_code="222222",
expected_message="新锚点位于两封邮件之间,第一封应被过滤,第二封应被命中。",
),
Scenario(
name="anchor_set_after_second_email",
anchor_offset_seconds=21,
expected_code=None,
expected_message="锚点晚于第二封邮件时,严格大于过滤会把第二封也排除,复现登录阶段的竞态窗口。",
),
]
print("Tempmail OTP timing check")
print("=========================")
failed = False
for scenario in scenarios:
result = run_scenario(scenario)
status = "PASS" if result["passed"] else "FAIL"
print(f"{status} {result['name']}")
print(f" anchor_timestamp={result['anchor_timestamp']}")
print(f" returned_code={result['code']}")
print(f" inbox_polls={result['http_calls']}")
print(f" note={result['message']}")
if not result["passed"]:
failed = True
return 1 if failed else 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -1,19 +1,26 @@
version: '3.8'
services:
webui:
build: .
ports:
- "1455:1455"
- "15555:15555"
environment:
- WEBUI_HOST=0.0.0.0
- WEBUI_PORT=1455
- WEBUI_PORT=15555
- WEBUI_ACCESS_PASSWORD=admin123
- DEBUG=0
- LOG_LEVEL=info
# 如果需要访问密码,可以在这里取消注释并设置
# - WEBUI_ACCESS_PASSWORD=your_secure_password
volumes:
# 挂载数据目录以持久化数据库和日志
- ./data:/app/data
- ./logs:/app/logs
restart: unless-stopped
healthcheck:
test:
- CMD
- python
- -c
- import urllib.request; urllib.request.urlopen('http://127.0.0.1:15555/', timeout=5).read()
interval: 10s
timeout: 5s
retries: 5
start_period: 20s
restart: unless-stopped

View File

@@ -0,0 +1,259 @@
# RegistrationEngine 深度架构审计与失败日志合并报告
日期: 2026-03-23
范围:
- 代码审计: `src/core/register.py`, `src/core/http_client.py`, `src/services/tempmail.py`, `src/web/routes/registration.py`
- 日志样本: `logs/app.log` 中最近 100 个失败任务
## 1. 执行摘要
结论分两层:
1. `RegistrationEngine` 当前是一个集中式顺序控制器,控制面、数据面、状态面耦合在单类内部,闭环存在“观测粗、控制粗、误差分类弱”的结构性问题。
2. 最近 100 个失败任务中,`Timeout` 是主导故障,`429` 是次级但高度集中,`403` 是低频单点。按失败任务计数:
- Timeout: 44 次
- 429: 11 次
- 403: 1 次
- 其他: 44 次
就统计意义而言,`Timeout` 占比 44%95% Wilson 区间为 34.7% 到 53.8%,显著高于 `429` 的 11% 和 `403` 的 1%。这说明当前首要瓶颈不是 Cloudflare 封锁,也不是邮箱创建限流,而是 OTP/授权后半程的时滞与恢复路径失真。
## 2. CSE 闭环审计
### 2.1 控制拓扑
- Plant:
- OpenAI 授权链路
- 临时邮箱供应商
- 代理网络
- 本地数据库与任务状态
- Controller:
- `RegistrationEngine.run()` 的顺序式流程控制
- `src/web/routes/registration.py` 的邮箱服务切换与任务终态写回
- Sensors:
- `_log()` 输出
- `logs/app.log`
- 数据库任务日志
- `TaskManager` 内存状态
- Actuators:
- HTTP 请求
- 邮箱创建与轮询
- OAuth 重入
- 邮箱服务 failover
- 代理选择
### 2.2 闭环优点
- 主流程步骤清晰,按阶段推进,适合做阶段化观测,入口位于 [register.py](/Volumes/Work/code/codex-manager/src/core/register.py#L1015)。
- 路由层已经有邮箱服务候选集与限流熔断雏形,见 [registration.py](/Volumes/Work/code/codex-manager/src/web/routes/registration.py#L403) 和 [registration.py](/Volumes/Work/code/codex-manager/src/web/routes/registration.py#L457)。
- `Tempmail` 429 已有最小检测链路,日志可追踪到供应商限流,见 [tempmail.py](/Volumes/Work/code/codex-manager/src/services/tempmail.py#L81)。
### 2.3 闭环缺陷
#### 1. 控制器过大,导致误差无法在局部收敛
`run()` 集成了 IP 检查、邮箱供应商交互、OpenAI 授权、OTP、Workspace、OAuth 回调与结果持久化前状态填充,单方法过长,且依赖大量实例可变状态,见 [register.py](/Volumes/Work/code/codex-manager/src/core/register.py#L1015)。
后果:
- 任一阶段失败都被压平为布尔值返回。
- 控制输入只能“继续/返回失败”,无法做细粒度补偿。
- 失败分类被终态字符串覆盖,真实物理故障被折叠成“获取 Workspace ID 失败”等代理错误。
#### 2. 传感器语义不足,导致 Timeout 被错误归类到 Workspace
OTP 拉取失败只返回 `None``run()` 再把后续失败归并到 Workspace 路径,见 [register.py](/Volumes/Work/code/codex-manager/src/core/register.py#L442) 和 [register.py](/Volumes/Work/code/codex-manager/src/core/register.py#L1080)。
日志证据:
- [app.log](/Volumes/Work/code/codex-manager/logs/app.log#L54897) 到 [app.log](/Volumes/Work/code/codex-manager/logs/app.log#L54904) 显示先发生“等待验证码超时”,终态却写成“获取 Workspace ID 失败 (含降级补偿)”。
这会让控制器错误地把邮箱时滞问题当作授权后段问题处理。
#### 3. 控制面只对邮箱服务 429 做局部闭环,未覆盖代理与授权面
路由层只在 `RateLimitedEmailServiceError` 场景下切换邮箱服务,见 [registration.py](/Volumes/Work/code/codex-manager/src/web/routes/registration.py#L457)。但最近 100 个失败任务里,真正占大头的是 OTP Timeout 与 Workspace 缺失,而这两个问题都没有对应的控制输入:
- 没有代理信誉降级
- 没有 OAuth/Workspace 阶段的代理切换
- 没有 OTP 第二阶段的独立重试预算
#### 4. HTTP 客户端重试策略与故障形态不匹配
`HTTPClient.request()` 只对 `>=500` 做重试,不对 429 做退避,也不区分 403/429/401 的控制意义,见 [http_client.py](/Volumes/Work/code/codex-manager/src/core/http_client.py#L112)。
后果:
- 429 会直接回传业务层,业务层只能失败或靠外层熔断。
- 403 无法触发代理信誉降级。
- 401 无法触发登录流重建。
#### 5. 状态面与观测面有重复副作用
`_log()` 同时写内存、回调、数据库、全局日志,见 [register.py](/Volumes/Work/code/codex-manager/src/core/register.py#L139)。这让传感器与状态面耦合:
- 日志故障可能反噬主流程
- 同一事件被多次展开,难以统一结构化分析
- 控制器无法只输出“事件”,必须直接决定落盘方式
## 3. Clean Code 审计
### 3.1 主要坏味道
- God Object: `RegistrationEngine` 同时承担编排器、网络客户端协调器、状态容器、日志器和部分持久化语义。
- Primitive Obsession: 大量 `bool` / `Optional[str]` 返回值承载复杂故障。
- Duplicate Logic: 登录密码提交拆成两个几乎重复的方法,见 [register.py](/Volumes/Work/code/codex-manager/src/core/register.py#L747) 和 [register.py](/Volumes/Work/code/codex-manager/src/core/register.py#L793)。
- Temporal Coupling: `self.email`, `self.password`, `self.oauth_start`, `self.session`, `self._otp_sent_at` 必须按隐含顺序写入,稍有偏差就会失真。
- Error Flattening: `创建用户账户失败``获取 Workspace ID 失败` 等终态过于粗糙,无法直接反映物理根因。
- Mixed Concerns: 任务路由函数 `_run_sync_registration_task()` 同时做代理选择、邮箱服务选择、引擎执行、自动上传和数据库状态收口,见 [registration.py](/Volumes/Work/code/codex-manager/src/web/routes/registration.py#L362)。
### 3.2 冗余与可收敛点
- `_submit_login_password_step()``_submit_login_password_step_and_get_continue_url()` 可以合并为一个返回结构化结果的方法。
- `run()` 中多个阶段共享“发请求 -> 记录状态码 -> 解析错误 -> 决定控制动作”的模板,可抽成 phase runner。
- `_log()` 的数据库写入应从引擎剥离到事件订阅层。
- `TempmailService.get_verification_code()` 明确写明 `otp_sent_at` 暂不使用,见 [tempmail.py](/Volumes/Work/code/codex-manager/src/services/tempmail.py#L121)。这与双阶段 OTP 场景存在直接脱节。
## 4. 最近 100 个失败任务的物理分布
样本窗口:
- 起点: [app.log](/Volumes/Work/code/codex-manager/logs/app.log#L25252)
- 终点: [app.log](/Volumes/Work/code/codex-manager/logs/app.log#L60766)
分类结果:
| 类别 | 次数 | 占比 | 95% Wilson 区间 |
| --- | ---: | ---: | --- |
| Timeout | 44 | 44% | 34.7% - 53.8% |
| 429 | 11 | 11% | 6.3% - 18.6% |
| 403 | 1 | 1% | 0.2% - 5.4% |
| 其他 | 44 | 44% | - |
### 4.1 Timeout
核心事实:
- 44 个 Timeout 中43 个都表现为“等待验证码超时 -> 终态记为获取 Workspace ID 失败 (含降级补偿)”。
- 代表日志见 [app.log](/Volumes/Work/code/codex-manager/logs/app.log#L54897) 到 [app.log](/Volumes/Work/code/codex-manager/logs/app.log#L54904)。
解释:
- 这不是纯 Workspace 故障,而是第二阶段 OTP 没有在邮箱侧及时可见。
- `TempmailService.get_verification_code()` 轮询固定 120 秒,且不使用 `otp_sent_at` 做新旧邮件裁剪,见 [tempmail.py](/Volumes/Work/code/codex-manager/src/services/tempmail.py#L121)。
- 因为控制器把 OTP 超时后的降级流和 Workspace 解析串在一起,最终把上游时滞扭曲成下游授权失败。
统计结论:
- Timeout 是主导根因,且占比显著高于 429。
- 从控制论角度,这是“传感器滞后 + 误差归因错误”而不是单纯接口失败。
### 4.2 429
核心事实:
- 11 个 429 全部落在 `Tempmail.lol /inbox/create`,即邮箱创建阶段。
- 代表日志见 [app.log](/Volumes/Work/code/codex-manager/logs/app.log#L52280) 到 [app.log](/Volumes/Work/code/codex-manager/logs/app.log#L52282)。
解释:
- 这是单供应商、单接口、单阶段的集中限流,不是全链路随机波动。
- `HTTPClient` 不对 429 做退避重试,见 [http_client.py](/Volumes/Work/code/codex-manager/src/core/http_client.py#L117)。
- 路由层虽然有邮箱服务熔断与切换框架,但在这些失败样本里仍然表现为直接失败,说明供应商多样性或默认候选配置仍不足。
统计结论:
- 429 是第二优先级问题。
- 其特征是“集中、可隔离、可通过供应商调度降低”。
### 4.3 403
核心事实:
- 最近 100 个失败任务里只有 1 个 403。
- 代表日志见 [app.log](/Volumes/Work/code/codex-manager/logs/app.log#L55373) 到 [app.log](/Volumes/Work/code/codex-manager/logs/app.log#L55374)。
- 响应体是 Cloudflare `Just a moment...` 页面,不是业务 JSON。
解释:
- 这是代理信誉或指纹挑战问题,不是注册表单协议错误。
- 403 是低频离群点,不能作为当前主优化方向。
统计结论:
- 403 目前不构成主导故障模式。
- 但应该进入代理评分与预检体系,避免在高价值任务上触发。
## 5. 代码收敛方案
### 5.1 第一阶段: 拆控制器,不改行为
-`run()` 拆成显式 phase:
- `ip_check`
- `email_prepare`
- `signup`
- `otp_primary`
- `account_create`
- `oauth_reenter`
- `otp_secondary`
- `workspace_resolve`
- `oauth_callback`
- 每个 phase 返回统一的 `PhaseResult`:
- `success`
- `phase`
- `error_code`
- `http_status`
- `retryable`
- `next_action`
目标:
- 保持现有输入输出不变。
- 先让误差可观测,再谈策略优化。
### 5.2 第二阶段: 分离控制面与执行面
- `RegistrationEngine` 只保留编排。
- HTTP 请求、OTP 拉取、Workspace 解析、OAuth 回调分别下沉为独立 executor。
- `_log()` 改成事件发布,不在引擎内部直接写数据库。
目标:
- 控制器只负责状态跃迁。
- 执行器只负责副作用。
- 观测器统一消费事件。
### 5.3 第三阶段: 建立真实失败分类
- 终态错误码至少拆出:
- `OTP_TIMEOUT_PRIMARY`
- `OTP_TIMEOUT_SECONDARY`
- `EMAIL_CREATE_RATE_LIMITED`
- `SIGNUP_FORBIDDEN_CLOUDFLARE`
- `WORKSPACE_COOKIE_MISSING`
- `LOGIN_PASSWORD_401`
- `REGISTRATION_DISALLOWED`
- 路由层不要再把多种物理根因压成 `获取 Workspace ID 失败`
### 5.4 第四阶段: 压缩重复逻辑
- 合并两个登录密码提交方法。
- 抽象“请求 + 状态码记录 + 错误解析”模板。
-`_run_sync_registration_task()` 里的自动上传流程拆到 post-success hook避免任务执行与外部同步混在一个函数里。
## 6. 针对这 100 次失败的物理优化策略
### 6.1 Timeout 优化
- 把 OTP 第二阶段单独建预算,不复用第一阶段固定 120 秒。
- `TempmailService.get_verification_code()` 使用 `otp_sent_at` 过滤旧邮件,避免第二次 OTP 被第一次邮件污染。
- 第二次 OTP 超时后,先做邮箱供应商刷新或换供应商,再做 Workspace 解析;不要直接进入 Workspace 失败终态。
- 记录每个域名、每个供应商的 OTP 到达延迟分位数,按 P50/P95 选择优先级。
- 对“OTP 二次等待”引入更短轮询间隔和更快刷新,而不是简单把总 timeout 拉长。
### 6.2 429 优化
- 为邮箱创建接口单独做 429 退避,不依赖通用 HTTP 客户端的 5xx 逻辑。
-`retry_after`、冷却结束时间和供应商失败率持久化,不只保存在进程内。
- 默认配置至少准备两个可切换邮箱供应商,不让单一 `Tempmail.lol` 成为硬依赖。
- 在批量模式下给邮箱创建阶段加令牌桶,平滑 03:11 到 03:27 的创建尖峰。
### 6.3 403 优化
- 在真正启动注册前,对代理做一次低成本授权页预探测;若命中 Cloudflare challenge直接换代理。
- 给代理建立信誉分403 一次即降权,不再继续分配到注册主链。
- 403 不需要扩大主流程重试次数,应该做代理层淘汰。
## 7. 最终判断
本轮审计的核心判断是:
- 代码层面,真正需要收敛的不是“再补几个 if”而是把 `RegistrationEngine` 从大一统顺序脚本收敛成阶段化控制器。
- 物理层面,最近 100 次失败的首要矛盾是 OTP/降级链路的时滞失真,其次才是邮箱创建 429403 目前只是低频外部扰动。
如果只优化 429 或 403而不重构 Timeout 的归因与控制输入,失败面不会明显下降。

View File

@@ -0,0 +1,165 @@
# 功能可用性实测报告
日期: 2026-03-24
范围:
- 启动命令: `uv run python -m src.web.app`
- 监听地址: `http://127.0.0.1:15555`
- 隔离数据库: `tests_runtime/e2e_runtime_1774308869.db`
- 实测脚本: `tests/e2e/runtime_functionality_check.py`
## 1. 执行摘要
本次按真实服务链路完成了以下验证:
- 服务存活检查通过。
- `POST /api/registration/create` 可创建受控模拟任务。
- `GET /api/ws/task/{task_uuid}` 可实时推送日志与状态,任务完成时收到 `completed`
- 任务完成后数据库状态符合 Task 1、Task 5 预期。
- 批量计数探针通过 `/api/registration/batch/{batch_id}` 验证,符合 Task 2 预期。
- 重启后僵尸任务被自动标记失败,符合 Task 4 预期。
结论:
- 本次新增的真实服务验证 harness 可用。
- Task 1-5 中本次可通过真实服务直接观测的加固点均已生效。
## 2. 实测过程
### 2.1 端口处理
`15555` 端口初始被已有容器 `codex-manager-webui-1` 占用。为执行指定启动命令,先停止该容器,实测结束后已恢复。
### 2.2 执行命令
1. 静态验证
- `uv run python -m pytest tests/test_account_token_sync_status.py tests/test_batch_task_manager.py tests/test_task_manager_status_broadcast.py tests/test_task_recovery.py tests/test_registration_email_service_failover.py`
- 结果: `16 passed`
- `uv run python -m py_compile src/web/app.py src/web/routes/registration.py tests/e2e/runtime_functionality_check.py`
- 结果: 退出码 `0`
2. 真实服务启动
- `APP_DATABASE_URL='sqlite:////Volumes/Work/code/codex-manager/tests_runtime/e2e_runtime_1774308869.db' APP_HOST='127.0.0.1' APP_PORT='15555' uv run python -m src.web.app`
3. live 实测
- `uv run python tests/e2e/runtime_functionality_check.py --mode live --base-url http://127.0.0.1:15555 --ws-url ws://127.0.0.1:15555 --db-path /Volumes/Work/code/codex-manager/tests_runtime/e2e_runtime_1774308869.db --report-path /Volumes/Work/code/codex-manager/tests_runtime/runtime_functionality_report_1774308869.json`
4. recovery 准备
- `uv run python tests/e2e/runtime_functionality_check.py --mode prepare-recovery --db-path /Volumes/Work/code/codex-manager/tests_runtime/e2e_runtime_1774308869.db --state-path /Volumes/Work/code/codex-manager/tests_runtime/runtime_recovery_state_1774308869.json`
5. 服务重启后 recovery 实测
- `uv run python tests/e2e/runtime_functionality_check.py --mode verify-recovery --base-url http://127.0.0.1:15555 --db-path /Volumes/Work/code/codex-manager/tests_runtime/e2e_runtime_1774308869.db --state-path /Volumes/Work/code/codex-manager/tests_runtime/runtime_recovery_state_1774308869.json --report-path /Volumes/Work/code/codex-manager/tests_runtime/runtime_recovery_report_1774308869.json`
## 3. 验证结果
### 3.1 服务存活
- `GET /api/registration/tasks?page=1&page_size=1` 返回 `200`
### 3.2 模拟任务创建与 WebSocket
- 创建任务 UUID: `a8f4da41-354c-4d89-9634-c582a032c70b`
- 批量探针 ID: `2e8cfce4-bf20-4f0b-8839-a94e8e141472`
- WebSocket 收到 3 条状态消息:
- `pending`
- `running`
- `completed`
- WebSocket 收到 6 条实时日志,包含:
- Token 同步探针写库
- OTP 超时退避 3 次
- 批量计数探针完成
判定:
- 日志不是任务结束后一次性补发,而是在运行过程中实时推送。
### 3.3 Task 1 验证: Token 同步
数据库中以下账号状态正确:
- `mock-seeded-a8f4da41@example.test`
- `access_token` 已保存
- `refresh_token` 已保存
- `token_sync_status = pending`
- `mock-tokenless-a8f4da41@example.test`
- 先创建无 token再更新 `access_token`
- `token_sync_status = pending`
- `mock-partial-a8f4da41@example.test`
- 清空 `refresh_token` 后仍保留 `access_token`
- `token_sync_status = pending`
Outlook 配置探针:
- `mock-outlook-a8f4da41@example.test`
- `refresh_token` 已从 `old-second` 更新为 `new-second`
### 3.4 Task 2 验证: 批量计数
`GET /api/registration/batch/2e8cfce4-bf20-4f0b-8839-a94e8e141472` 返回:
- `total = 3`
- `completed = 3`
- `success = 2`
- `failed = 1`
- `finished = true`
- `progress = 3/3`
判定:
- 批量计数与任务结果一致,收口正确。
### 3.5 Task 3 验证: 单任务状态广播
任务完成时WebSocket 最后一条状态消息为:
- `status = completed`
- `email = mock-seeded-a8f4da41@example.test`
- `email_service = tempmail`
判定:
- 单任务状态广播已生效。
### 3.6 Task 4 验证: 僵尸任务恢复
重启前手工插入任务:
- `stale-e738842e-74d8-400d-859e-1b283eab1a95`
- 初始状态: `running`
重启后观测结果:
- 状态变为 `failed`
- `error_message = 服务启动时检测到未完成的历史任务,已标记失败,请重新发起。`
- `logs` 已追加系统收敛日志
- `completed_at` 已写入
服务启动日志同时出现:
- `已收敛 1 个僵尸任务: stale-e7`
### 3.7 Task 5 验证: OTP 超时退避
模拟任务内部连续触发 3 次二阶段 OTP 超时,记录到任务结果:
- 第 1 次: `failures = 1`, `delay_seconds = 30`
- 第 2 次: `failures = 2`, `delay_seconds = 60`
- 第 3 次: `failures = 3`, `delay_seconds = 3600`
判定:
- 深度冷却逻辑已生效。
## 4. 产物
- `tests/e2e/runtime_functionality_check.py`
- `tests_runtime/runtime_functionality_report_1774308869.json`
- `tests_runtime/runtime_recovery_report_1774308869.json`
- `tests_runtime/runtime_recovery_state_1774308869.json`
- `tests_runtime/e2e_runtime_1774308869.db`
## 5. 观察到的问题
- `python -m src.web.app` 启动时会出现 `runpy` 的重复导入告警,但不影响服务启动与本次验证结果。
- 启动日志中打印的 `host``database` 仍显示为数据库配置值 `0.0.0.0 / sqlite:///data/database.db`,与本次通过环境变量注入的真实运行参数不一致。实测链路实际使用了隔离数据库,但日志口径存在偏差。

View File

@@ -0,0 +1,29 @@
# Task 5 Validation - 2026-03-23
## Scope
- Task 5
- OTP timeout backoff handling
- Registration controller backoff state persistence
## Commands
1. `./.venv/bin/python -m pytest tests/test_registration_email_service_failover.py tests/test_registration_otp_phase.py`
- exit code: `0`
- result: `4 passed`
- notes: 存在项目既有的 SQLAlchemy / Pydantic / FastAPI deprecation warnings本次任务未改动相关代码路径。
2. `./.venv/bin/ruff check src/services/base.py src/web/routes/registration.py tests/test_registration_email_service_failover.py`
- exit code: `127`
- result: failed
- notes: `.venv/bin/ruff` 不存在。
3. `./.venv/bin/python -m ruff check src/services/base.py src/web/routes/registration.py tests/test_registration_email_service_failover.py`
- exit code: `1`
- result: failed
- notes: 虚拟环境未安装 `ruff` 模块,未完成 lint 校验。
## Summary
- 回归测试通过,覆盖 `OTP_TIMEOUT_SECONDARY` 连续 3 次失败进入 `3600s` 深度冷却。
- Lint 校验因环境缺少 `ruff` 未执行。

103
probe_tempmail.py Normal file
View File

@@ -0,0 +1,103 @@
#!/usr/bin/env python3
"""
Tempmail.lol API 探针。
用途:
1. 创建测试收件箱或复用现有 token。
2. 拉取 /inbox 原始 JSON 并原样打印。
3. 检查邮件对象里是否存在 received_at/date 等时间字段。
"""
import argparse
import json
import sys
import time
from typing import Any, Dict, Iterable
import httpx
DEFAULT_BASE_URL = "https://api.tempmail.lol/v2"
TIME_FIELDS = ("received_at", "date", "created_at", "createdAt", "timestamp")
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(description="抓取 Tempmail.lol 收件箱原始 JSON")
parser.add_argument("--base-url", default=DEFAULT_BASE_URL, help="Tempmail API 基础地址")
parser.add_argument("--token", help="已有 inbox token未提供时自动创建新邮箱")
parser.add_argument("--poll-count", type=int, default=1, help="轮询次数")
parser.add_argument("--poll-interval", type=float, default=3.0, help="轮询间隔秒数")
parser.add_argument("--timeout", type=float, default=20.0, help="HTTP 超时时间")
return parser.parse_args()
def dump_json(title: str, payload: Dict[str, Any]) -> None:
print(f"\n===== {title} =====")
print(json.dumps(payload, ensure_ascii=False, indent=2, sort_keys=True))
def summarize_time_fields(emails: Iterable[Dict[str, Any]]) -> None:
for index, message in enumerate(emails, start=1):
present_fields = {name: message.get(name) for name in TIME_FIELDS if name in message}
print(f"email[{index}] 时间字段: {json.dumps(present_fields, ensure_ascii=False, default=str)}")
def create_inbox(client: httpx.Client, base_url: str) -> Dict[str, Any]:
response = client.post(
f"{base_url}/inbox/create",
headers={
"Accept": "application/json",
"Content-Type": "application/json",
},
json={},
)
print(f"CREATE_STATUS {response.status_code}")
response.raise_for_status()
payload = response.json()
dump_json("CREATE_RESPONSE", payload)
return payload
def fetch_inbox(client: httpx.Client, base_url: str, token: str) -> Dict[str, Any]:
response = client.get(
f"{base_url}/inbox",
params={"token": token},
headers={"Accept": "application/json"},
)
print(f"INBOX_STATUS {response.status_code}")
response.raise_for_status()
payload = response.json()
dump_json("INBOX_RESPONSE", payload)
emails = payload.get("emails", []) if isinstance(payload, dict) else []
if isinstance(emails, list):
summarize_time_fields([mail for mail in emails if isinstance(mail, dict)])
else:
print(f"emails 字段不是列表: {type(emails).__name__}")
return payload
def main() -> int:
args = parse_args()
with httpx.Client(timeout=args.timeout) as client:
token = args.token
if not token:
inbox = create_inbox(client, args.base_url)
token = str(inbox.get("token", "")).strip()
address = str(inbox.get("address", "")).strip()
print(f"ADDRESS {address}")
print(f"TOKEN {token}")
if not token:
print("未拿到 token无法继续拉取 inbox", file=sys.stderr)
return 1
for attempt in range(1, args.poll_count + 1):
print(f"\n----- poll {attempt}/{args.poll_count} -----")
fetch_inbox(client, args.base_url, token)
if attempt < args.poll_count:
time.sleep(args.poll_interval)
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -23,9 +23,6 @@ dev = [
"pytest>=7.0.0",
"httpx>=0.24.0",
]
payment = [
"playwright>=1.40.0",
]
[project.scripts]
codex-webui = "webui:main"

View File

@@ -11,5 +11,3 @@ python-multipart>=0.0.6
sqlalchemy>=2.0.0
aiosqlite>=0.19.0
psycopg[binary]>=3.1.18
# 可选:无痕打开支付页需要 playwrightpip install playwright && playwright install chromium
# playwright>=1.40.0

View File

@@ -56,7 +56,7 @@ APP_DESCRIPTION = "自动注册 OpenAI/Codex CLI 账号的系统"
OAUTH_CLIENT_ID = "app_EMoamEEZ73f0CkXaXp7hrann"
OAUTH_AUTH_URL = "https://auth.openai.com/oauth/authorize"
OAUTH_TOKEN_URL = "https://auth.openai.com/oauth/token"
OAUTH_REDIRECT_URI = "http://localhost:1455/auth/callback"
OAUTH_REDIRECT_URI = "http://localhost:15555/auth/callback"
OAUTH_SCOPE = "openid email profile offline_access"
# OpenAI API 端点
@@ -267,7 +267,7 @@ DEFAULT_SETTINGS = [
("registration.timeout", "120", "超时时间(秒)", "registration"),
("registration.default_password_length", "12", "默认密码长度", "registration"),
("webui.host", "0.0.0.0", "Web UI 监听主机", "webui"),
("webui.port", "8000", "Web UI 监听端口", "webui"),
("webui.port", "15555", "Web UI 监听端口", "webui"),
("webui.debug", "true", "调试模式", "webui"),
]

View File

@@ -76,7 +76,7 @@ SETTING_DEFINITIONS: Dict[str, SettingDefinition] = {
),
"webui_port": SettingDefinition(
db_key="webui.port",
default_value=8000,
default_value=15555,
category=SettingCategory.WEBUI,
description="Web UI 监听端口"
),
@@ -609,7 +609,7 @@ class Settings(BaseModel):
# Web UI 配置
webui_host: str = "0.0.0.0"
webui_port: int = 8000
webui_port: int = 15555
webui_secret_key: SecretStr = SecretStr("your-secret-key-change-in-production")
webui_access_password: SecretStr = SecretStr("admin123")

457
src/core/login.py Normal file
View File

@@ -0,0 +1,457 @@
"""
登录流程引擎
从 register.py 中拆分的登录专属方法
"""
import urllib.parse
import base64
import json as json_module
import time
from datetime import datetime
from typing import Optional, Dict, Any
from .register import RegistrationEngine, RegistrationResult
from ..config.constants import OPENAI_API_ENDPOINTS
class LoginEngine(RegistrationEngine):
"""
登录引擎
继承 RegistrationEngine包含登录流程专属方法
- _follow_login_redirects
- _submit_login_form
- _send_verification_code_passwordless
- _get_workspace_id
- _select_workspace
- _follow_redirects
- _handle_oauth_callback
"""
def _follow_login_redirects(self, start_url: str) -> bool:
"""跟随重定向链,寻找回调 URL"""
try:
current_url = start_url
max_redirects = 6
for i in range(max_redirects):
self._log(f"重定向 {i+1}/{max_redirects}: {current_url[:100]}...")
response = self.session.get(
current_url,
allow_redirects=False,
timeout=15
)
location = response.headers.get("Location") or ""
# 如果不是重定向状态码,停止
if response.status_code == 200:
self._log(f"非重定向状态码: {response.status_code}")
return True
if not location:
self._log("重定向响应缺少 Location 头")
break
# 构建下一个 URL
next_url = urllib.parse.urljoin(current_url, location)
# 检查是否包含回调参数
if "code=" in next_url and "state=" in next_url:
self._log(f"找到回调 URL: {next_url[:100]}...")
current_url = next_url
self._log("未能在重定向链中找到最终 URL")
return False
except Exception as e:
self._log(f"跟随重定向失败: {e}", "error")
return False
def _submit_login_form(self, did: str, sen_token) -> bool:
"""处理 免密登录"""
try:
self._log("处理免密登录...")
login_body = f'{{"username":{{"value":"{self.email}","kind":"email"}}}}'
headers = {
"referer": "https://auth.openai.com/log-in",
"accept": "application/json",
"content-type": "application/json",
}
if sen_token:
sentinel = (
f'{{"p": "", "t": "", "c": "{sen_token}", '
f'"id": "{did}", "flow": "authorize_continue"}}'
)
headers["openai-sentinel-token"] = sentinel
response = self.session.post(
OPENAI_API_ENDPOINTS["signup"],
headers=headers,
data=login_body,
)
self._log(f"提交登录表单状态: {response.status_code}")
if response.status_code == 200:
return True
return False
except Exception as e:
self._log(f"处理登录失败: {e}", "error")
return False
def _send_verification_code_passwordless(self) -> bool:
"""发送验证码"""
try:
self._otp_sent_at = time.time()
response = self.session.post(
OPENAI_API_ENDPOINTS["passwordless_send_otp"],
headers={
"referer": "https://auth.openai.com/log-in/password",
"accept": "application/json"
}
)
self._log(f"验证码发送状态: {response.status_code}")
return response.status_code == 200
except Exception as e:
self._log(f"发送验证码失败: {e}", "error")
return False
def _decode_workspace_id(self, auth_cookie: str) -> str:
"""从授权 Cookie 中解析 Workspace ID"""
segments = auth_cookie.split(".")
if len(segments) < 1:
raise ValueError("授权 Cookie 格式错误")
payload = segments[0]
pad = "=" * ((4 - (len(payload) % 4)) % 4)
decoded = base64.urlsafe_b64decode((payload + pad).encode("ascii"))
auth_json = json_module.loads(decoded.decode("utf-8"))
workspaces = auth_json.get("workspaces") or []
if not workspaces:
raise ValueError("授权 Cookie 里没有 workspace 信息")
workspace_id = str((workspaces[0] or {}).get("id") or "").strip()
if not workspace_id:
raise ValueError("无法解析 workspace_id")
return workspace_id
def _get_workspace_id(self) -> Optional[str]:
"""获取 Workspace ID"""
backoff_seconds = (1, 2, 4)
max_attempts = len(backoff_seconds) + 1
for attempt in range(1, max_attempts + 1):
try:
auth_cookie = self.session.cookies.get("oai-client-auth-session")
if auth_cookie:
workspace_id = self._decode_workspace_id(auth_cookie)
self._log(f"Workspace ID: {workspace_id}")
return workspace_id
raise ValueError("未能获取到授权 Cookie")
except Exception as e:
level = "warning" if attempt < max_attempts else "error"
self._log(
f"获取 Workspace ID 失败: {e} (第 {attempt}/{max_attempts} 次)",
level,
)
if attempt < max_attempts:
wait_seconds = backoff_seconds[attempt - 1]
self._log(f"等待 {wait_seconds} 秒后重试 Workspace ID", "warning")
time.sleep(wait_seconds)
return None
def _select_workspace(self, workspace_id: str) -> Optional[str]:
"""选择 Workspace"""
try:
select_body = f'{{"workspace_id":"{workspace_id}"}}'
response = self.session.post(
OPENAI_API_ENDPOINTS["select_workspace"],
headers={
"referer": "https://auth.openai.com/sign-in-with-chatgpt/codex/consent",
"content-type": "application/json",
},
data=select_body,
)
if response.status_code != 200:
self._log(f"选择 workspace 失败: {response.status_code}", "error")
self._log(f"响应: {response.text[:200]}", "warning")
return None
continue_url = str((response.json() or {}).get("continue_url") or "").strip()
if not continue_url:
self._log("workspace/select 响应里缺少 continue_url", "error")
return None
self._log(f"Continue URL: {continue_url[:100]}...")
return continue_url
except Exception as e:
self._log(f"选择 Workspace 失败: {e}", "error")
return None
def _follow_redirects(self, start_url: str) -> Optional[str]:
"""跟随重定向链,寻找回调 URL"""
try:
current_url = start_url
max_redirects = 6
for i in range(max_redirects):
self._log(f"重定向 {i+1}/{max_redirects}: {current_url[:100]}...")
response = self.session.get(
current_url,
allow_redirects=False,
timeout=15
)
location = response.headers.get("Location") or ""
# 如果不是重定向状态码,停止
if response.status_code not in [301, 302, 303, 307, 308]:
self._log(f"非重定向状态码: {response.status_code}")
break
if not location:
self._log("重定向响应缺少 Location 头")
break
# 构建下一个 URL
next_url = urllib.parse.urljoin(current_url, location)
# 检查是否包含回调参数
if "code=" in next_url and "state=" in next_url:
self._log(f"找到回调 URL: {next_url[:100]}...")
return next_url
current_url = next_url
self._log("未能在重定向链中找到回调 URL", "error")
return None
except Exception as e:
self._log(f"跟随重定向失败: {e}", "error")
return None
def _handle_oauth_callback(self, callback_url: str) -> Optional[Dict[str, Any]]:
"""处理 OAuth 回调"""
try:
if not self.oauth_start:
self._log("OAuth 流程未初始化", "error")
return None
self._log("处理 OAuth 回调...")
token_info = self.oauth_manager.handle_callback(
callback_url=callback_url,
expected_state=self.oauth_start.state,
code_verifier=self.oauth_start.code_verifier
)
self._log("OAuth 授权成功")
return token_info
except Exception as e:
self._log(f"处理 OAuth 回调失败: {e}", "error")
return None
def run(self) -> RegistrationResult:
"""
执行完整的注册流程
支持已注册账号自动登录:
- 如果检测到邮箱已注册,自动切换到登录流程
- 已注册账号跳过:设置密码、发送验证码、创建用户账户
- 共用步骤获取验证码、验证验证码、Workspace 和 OAuth 回调
Returns:
RegistrationResult: 注册结果
"""
result = RegistrationResult(success=False, logs=self.logs)
try:
self._log("=" * 60)
self._log("开始注册流程")
self._log("=" * 60)
self._log("1. 检查 IP 地理位置...")
ip_ok, location = self._check_ip_location()
if not ip_ok:
result.error_message = f"IP 地理位置不支持: {location}"
self._log(f"IP 检查失败: {location}", "error")
return result
self._log(f"IP 位置: {location}")
self._log("2. 创建邮箱...")
if not self._create_email():
result.error_message = "创建邮箱失败"
return result
result.email = self.email
self._log("3. 初始化会话...")
if not self._init_session():
result.error_message = "初始化会话失败"
return result
self._log("4. 开始 OAuth 授权流程...")
if not self._start_oauth():
result.error_message = "开始 OAuth 流程失败"
return result
self._log("5. 获取 Device ID...")
did = self._get_device_id()
if not did:
result.error_message = "获取 Device ID 失败"
return result
self._log("6. 检查 Sentinel 拦截...")
sen_token = self._check_sentinel(did)
if sen_token:
self._log("Sentinel 检查通过")
else:
self._log("Sentinel 检查失败或未启用", "warning")
self._log("7. 提交注册表单...")
signup_result = self._submit_signup_form(did, sen_token)
if not signup_result.success:
result.error_message = f"提交注册表单失败: {signup_result.error_message}"
return result
if self._is_existing_account:
self._log(f"8. 邮箱 {self.email} 在 OpenAI 已注册,跳过注册流程", "warning")
result.error_message = f"邮箱 {self.email} 已在 OpenAI 注册"
return result
self._log("8. 注册密码...")
password_ok, password = self._register_password()
if not password_ok:
result.error_message = "注册密码失败"
return result
self._log("9. 发送验证码...")
if not self._send_verification_code():
result.error_message = "发送验证码失败"
return result
self._log("10. 等待验证码...")
code = self._get_verification_code()
if not code:
self._log("10. 验证码超时,重新发送...")
if self._send_verification_code():
code = self._get_verification_code()
if not code:
result.error_message = "获取验证码失败"
return result
self._log("11. 验证验证码...")
if not self._validate_verification_code(code):
result.error_message = "验证验证码失败"
return result
self._log("12. 创建用户账户...")
if not self._create_user_account():
result.error_message = "创建用户账户失败"
return result
self._log("13-1. 结束注册,启用登录流程...")
if not self._follow_login_redirects(self.oauth_start.auth_url):
result.error_message = "跟随重定向链失败"
return result
self._log("13-2. 提交登陆表单")
if not self._submit_login_form(did, sen_token):
result.error_message = "提交登陆表单失败"
return result
self._log("14. 发送验证码...")
if not self._send_verification_code_passwordless():
result.error_message = "发送验证码失败"
return result
self._log("15. 等待验证码...")
code = self._get_verification_code()
if not code:
self._log("15. 验证码超时,重新发送...")
if self._send_verification_code_passwordless():
code = self._get_verification_code()
if not code:
result.error_message = "获取验证码失败"
return result
self._log("16. 验证验证码...")
if not self._validate_verification_code(code):
result.error_message = "验证验证码失败"
return result
self._log("17. 获取 Workspace ID...")
workspace_id = self._get_workspace_id()
if not workspace_id:
result.error_message = "获取 Workspace ID 失败"
return result
result.workspace_id = workspace_id
self._log("18. 选择 Workspace...")
continue_url = self._select_workspace(workspace_id)
if not continue_url:
result.error_message = "选择 Workspace 失败"
return result
self._log("19. 跟随重定向链...")
callback_url = self._follow_redirects(continue_url)
if not callback_url:
result.error_message = "跟随重定向链失败"
return result
self._log("20. 处理 OAuth 回调...")
token_info = self._handle_oauth_callback(callback_url)
if not token_info:
result.error_message = "处理 OAuth 回调失败"
return result
result.account_id = token_info.get("account_id", "")
result.access_token = token_info.get("access_token", "")
result.refresh_token = token_info.get("refresh_token", "")
result.id_token = token_info.get("id_token", "")
result.password = self.password or ""
result.source = "register"
session_cookie = self.session.cookies.get("__Secure-next-auth.session-token")
if session_cookie:
self.session_token = session_cookie
result.session_token = session_cookie
self._log("获取到 Session Token")
self._log("=" * 60)
self._log("注册成功!")
self._log(f"邮箱: {result.email}")
self._log(f"Account ID: {result.account_id}")
self._log(f"Workspace ID: {result.workspace_id}")
self._log("=" * 60)
result.success = True
result.metadata = {
"email_service": self.email_service.service_type.value,
"proxy_used": self.proxy_url,
"registered_at": datetime.now().isoformat(),
}
return result
except Exception as e:
self._log(f"注册过程中发生未预期错误: {e}", "error")
result.error_message = str(e)
return result
finally:
self.close()

File diff suppressed because it is too large Load Diff

View File

@@ -2,14 +2,24 @@
数据库 CRUD 操作
"""
from typing import List, Optional, Dict, Any, Union
from typing import List, Optional, Dict, Any, Union, Iterable, Set
from datetime import datetime, timedelta
from sqlalchemy.orm import Session
from sqlalchemy.orm.attributes import flag_modified
from sqlalchemy import and_, or_, desc, asc, func
from .models import Account, EmailService, RegistrationTask, Setting, Proxy, CpaService, Sub2ApiService
TOKEN_FIELD_NAMES = ("access_token", "refresh_token", "id_token", "session_token")
def _default_token_sync_status(token_values: Dict[str, Any]) -> str:
"""根据当前持久化的 token 内容推导同步状态。"""
has_token = any(bool(token_values.get(field)) for field in TOKEN_FIELD_NAMES)
return "pending" if has_token else "not_ready"
# ============================================================================
# 账户 CRUD
# ============================================================================
@@ -27,13 +37,21 @@ def create_account(
access_token: Optional[str] = None,
refresh_token: Optional[str] = None,
id_token: Optional[str] = None,
cookies: Optional[str] = None,
proxy_used: Optional[str] = None,
expires_at: Optional['datetime'] = None,
extra_data: Optional[Dict[str, Any]] = None,
status: Optional[str] = None,
source: Optional[str] = None
source: Optional[str] = None,
token_sync_status: Optional[str] = None,
) -> Account:
"""创建新账户"""
token_values = {
"access_token": access_token,
"refresh_token": refresh_token,
"id_token": id_token,
"session_token": session_token,
}
db_account = Account(
email=email,
password=password,
@@ -46,12 +64,15 @@ def create_account(
access_token=access_token,
refresh_token=refresh_token,
id_token=id_token,
cookies=cookies,
proxy_used=proxy_used,
expires_at=expires_at,
extra_data=extra_data or {},
status=status or 'active',
source=source or 'register',
registered_at=datetime.utcnow()
registered_at=datetime.utcnow(),
token_sync_status=token_sync_status or _default_token_sync_status(token_values),
token_sync_updated_at=datetime.utcnow(),
)
db.add(db_account)
db.commit()
@@ -108,6 +129,14 @@ def update_account(
if not db_account:
return None
touches_token = any(field in kwargs for field in TOKEN_FIELD_NAMES)
if touches_token:
persisted_token_values = {
field: kwargs.get(field, getattr(db_account, field))
for field in TOKEN_FIELD_NAMES
}
kwargs.setdefault("token_sync_status", _default_token_sync_status(persisted_token_values))
kwargs["token_sync_updated_at"] = datetime.utcnow()
for key, value in kwargs.items():
if hasattr(db_account, key) and value is not None:
setattr(db_account, key, value)
@@ -326,6 +355,34 @@ def delete_registration_task(db: Session, task_uuid: str) -> bool:
return True
def fail_incomplete_registration_tasks(db: Session, error_message: str) -> List[str]:
"""将服务重启后遗留的未完成任务标记为失败"""
tasks = db.query(RegistrationTask).filter(
RegistrationTask.status.in_(("pending", "running"))
).all()
if not tasks:
return []
now = datetime.utcnow()
cleaned_task_ids: List[str] = []
cleanup_log = f"[系统] {error_message}"
for task in tasks:
task.status = "failed"
task.error_message = error_message
task.completed_at = now
if task.logs:
if cleanup_log not in task.logs:
task.logs = f"{task.logs}\n{cleanup_log}"
else:
task.logs = cleanup_log
cleaned_task_ids.append(task.task_uuid)
db.commit()
return cleaned_task_ids
# 为 API 路由添加别名
get_account = get_account_by_id
get_registration_task = get_registration_task_by_uuid
@@ -437,9 +494,13 @@ def get_proxies(
return query.all()
def get_enabled_proxies(db: Session) -> List[Proxy]:
def get_enabled_proxies(db: Session, exclude_ids: Optional[Iterable[int]] = None) -> List[Proxy]:
"""获取所有启用的代理"""
return db.query(Proxy).filter(Proxy.enabled == True).all()
query = db.query(Proxy).filter(Proxy.enabled == True)
excluded: Set[int] = {int(proxy_id) for proxy_id in (exclude_ids or [])}
if excluded:
query = query.filter(~Proxy.id.in_(excluded))
return query.all()
def update_proxy(
@@ -483,14 +544,18 @@ def update_proxy_last_used(db: Session, proxy_id: int) -> bool:
return True
def get_random_proxy(db: Session) -> Optional[Proxy]:
def get_random_proxy(db: Session, exclude_ids: Optional[Iterable[int]] = None) -> Optional[Proxy]:
"""随机获取一个启用的代理,优先返回 is_default=True 的代理"""
import random
excluded: Set[int] = {int(proxy_id) for proxy_id in (exclude_ids or [])}
# 优先返回默认代理
default_proxy = db.query(Proxy).filter(Proxy.enabled == True, Proxy.is_default == True).first()
default_query = db.query(Proxy).filter(Proxy.enabled == True, Proxy.is_default == True)
if excluded:
default_query = default_query.filter(~Proxy.id.in_(excluded))
default_proxy = default_query.first()
if default_proxy:
return default_proxy
proxies = get_enabled_proxies(db)
proxies = get_enabled_proxies(db, exclude_ids=excluded)
if not proxies:
return None
return random.choice(proxies)
@@ -713,4 +778,38 @@ def delete_tm_service(db: Session, service_id: int) -> bool:
return False
db.delete(svc)
db.commit()
return True
return True
def update_outlook_refresh_token(db: Session, service_id: int, email: str, new_refresh_token: str):
"""更新 EmailService.config 中指定邮箱的 refresh_token"""
service = db.query(EmailService).filter(EmailService.id == service_id).first()
if not service or not isinstance(service.config, dict):
return
normalized_email = (email or "").strip().lower()
if not normalized_email or not isinstance(new_refresh_token, str) or not new_refresh_token:
return
config = dict(service.config)
updated = False
# 单账户格式
if str(config.get("email", "")).lower() == normalized_email:
config["refresh_token"] = new_refresh_token
updated = True
# 多账户列表格式
for acc in config.get("accounts", []):
if not isinstance(acc, dict):
continue
if str(acc.get("email", "")).lower() == normalized_email:
acc["refresh_token"] = new_refresh_token
updated = True
if not updated:
return
service.config = config
flag_modified(service, "config")
db.commit()

View File

@@ -39,6 +39,8 @@ class Account(Base):
refresh_token = Column(Text)
id_token = Column(Text)
session_token = Column(Text) # 会话令牌(优先刷新方式)
token_sync_status = Column(String(20), default='not_ready') # 'not_ready', 'pending', 'synced'
token_sync_updated_at = Column(DateTime, default=datetime.utcnow)
client_id = Column(String(255)) # OAuth Client ID
account_id = Column(String(255))
workspace_id = Column(String(255))
@@ -80,7 +82,9 @@ class Account(Base):
'subscription_type': self.subscription_type,
'subscription_at': self.subscription_at.isoformat() if self.subscription_at else None,
'created_at': self.created_at.isoformat() if self.created_at else None,
'updated_at': self.updated_at.isoformat() if self.updated_at else None
'updated_at': self.updated_at.isoformat() if self.updated_at else None,
'token_sync_status': self.token_sync_status,
'token_sync_updated_at': self.token_sync_updated_at.isoformat() if self.token_sync_updated_at else None,
}
@@ -227,4 +231,4 @@ class Proxy(Base):
if self.username and self.password:
auth = f"{self.username}:{self.password}@"
return f"{scheme}://{auth}{self.host}:{self.port}"
return f"{scheme}://{auth}{self.host}:{self.port}"

View File

@@ -45,7 +45,7 @@ class DatabaseSessionManager:
self.database_url = _build_sqlalchemy_url(database_url)
self.engine = create_engine(
self.database_url,
connect_args={"check_same_thread": False} if self.database_url.startswith("sqlite") else {},
connect_args={"check_same_thread": False, "timeout": 30} if self.database_url.startswith("sqlite") else {},
echo=False, # 设置为 True 可以查看所有 SQL 语句
pool_pre_ping=True # 连接池预检查
)
@@ -110,6 +110,8 @@ class DatabaseSessionManager:
("accounts", "subscription_type", "VARCHAR(20)"),
("accounts", "subscription_at", "DATETIME"),
("accounts", "cookies", "TEXT"),
("accounts", "token_sync_status", "VARCHAR(20) DEFAULT 'not_ready'"),
("accounts", "token_sync_updated_at", "DATETIME"),
("proxies", "is_default", "BOOLEAN DEFAULT 0"),
("cpa_services", "include_proxy_url", "BOOLEAN DEFAULT 0"),
]

View File

@@ -6,6 +6,8 @@
import abc
import logging
import re
import time
from dataclasses import dataclass
from typing import Optional, Dict, Any, List
from enum import Enum
@@ -14,12 +16,109 @@ from ..config.constants import EmailServiceType, OTP_CODE_PATTERN, OTP_CODE_SEMA
logger = logging.getLogger(__name__)
EMAIL_PROVIDER_BACKOFF_BASE_SECONDS = 30
EMAIL_PROVIDER_BACKOFF_MAX_SECONDS = 3600
OTP_TIMEOUT_ERROR_PREFIX = "OTP_TIMEOUT"
@dataclass(frozen=True)
class EmailProviderBackoffState:
"""邮箱供应商退避状态"""
failures: int = 0
delay_seconds: int = 0
opened_until: float = 0.0
retry_after: Optional[int] = None
last_error: Optional[str] = None
def is_open(self, now: Optional[float] = None) -> bool:
now_ts = now if now is not None else time.time()
return self.opened_until > now_ts
def to_dict(self) -> Dict[str, Any]:
return {
"failures": self.failures,
"delay_seconds": self.delay_seconds,
"opened_until": self.opened_until,
"retry_after": self.retry_after,
"last_error": self.last_error,
}
def calculate_adaptive_backoff_delay(
failures: int,
base_delay: int = EMAIL_PROVIDER_BACKOFF_BASE_SECONDS,
max_delay: int = EMAIL_PROVIDER_BACKOFF_MAX_SECONDS,
is_timeout: bool = False,
) -> int:
"""根据连续失败次数计算指数退避时长"""
normalized_failures = max(0, failures)
if is_timeout and normalized_failures >= 3:
return max_delay
exponent = max(0, normalized_failures - 1)
return min(base_delay * (2 ** exponent), max_delay)
def is_otp_timeout_error(error: object) -> bool:
"""识别 OTP 超时类错误码。"""
if error is None:
return False
if isinstance(error, OTPTimeoutEmailServiceError):
return True
error_code = getattr(error, "error_code", "")
if isinstance(error_code, str) and error_code.startswith(OTP_TIMEOUT_ERROR_PREFIX):
return True
return False
def apply_adaptive_backoff(
current_state: Optional[EmailProviderBackoffState],
error: "EmailServiceError",
now: Optional[float] = None,
) -> EmailProviderBackoffState:
"""在限流场景下推进邮箱供应商退避状态"""
state = current_state or EmailProviderBackoffState()
now_ts = now if now is not None else time.time()
next_failures = state.failures + 1
delay_seconds = calculate_adaptive_backoff_delay(
next_failures,
is_timeout=is_otp_timeout_error(error),
)
return EmailProviderBackoffState(
failures=next_failures,
delay_seconds=delay_seconds,
opened_until=now_ts + delay_seconds,
retry_after=getattr(error, "retry_after", None),
last_error=str(error),
)
def reset_adaptive_backoff() -> EmailProviderBackoffState:
"""重置邮箱供应商退避状态"""
return EmailProviderBackoffState()
class EmailServiceError(Exception):
"""邮箱服务异常"""
pass
class RateLimitedEmailServiceError(EmailServiceError):
"""邮箱服务被限流"""
def __init__(self, message: str, retry_after: Optional[int] = None):
super().__init__(message)
self.retry_after = retry_after
class OTPTimeoutEmailServiceError(EmailServiceError):
"""OTP 验证码等待超时。"""
def __init__(self, message: str, error_code: str = OTP_TIMEOUT_ERROR_PREFIX):
super().__init__(message)
self.error_code = error_code
class EmailServiceStatus(Enum):
"""邮箱服务状态"""
HEALTHY = "healthy"
@@ -46,6 +145,7 @@ class BaseEmailService(abc.ABC):
self.name = name or f"{service_type.value}_service"
self._status = EmailServiceStatus.HEALTHY
self._last_error = None
self._provider_backoff = reset_adaptive_backoff()
_EMAIL_ADDRESS_PATTERN = re.compile(r"[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,}")
@@ -59,6 +159,15 @@ class BaseEmailService(abc.ABC):
"""获取最后一次错误信息"""
return self._last_error
@property
def provider_backoff_state(self) -> EmailProviderBackoffState:
"""获取当前邮箱供应商退避状态"""
return self._provider_backoff
def apply_provider_backoff_state(self, state: Optional[EmailProviderBackoffState]) -> None:
"""注入外部持久化的邮箱供应商退避状态"""
self._provider_backoff = state or reset_adaptive_backoff()
@abc.abstractmethod
def create_email(self, config: Dict[str, Any] = None) -> Dict[str, Any]:
"""
@@ -95,7 +204,7 @@ class BaseEmailService(abc.ABC):
email_id: 邮箱服务中的 ID如果需要
timeout: 超时时间(秒)
pattern: 验证码正则表达式
otp_sent_at: OTP 发送时间戳,用于过滤旧邮件
otp_sent_at: OTP 发送时间戳,只允许使用严格晚于该锚点的邮件
Returns:
验证码字符串,如果超时或未找到返回 None
@@ -309,8 +418,16 @@ class BaseEmailService(abc.ABC):
if success:
self._status = EmailServiceStatus.HEALTHY
self._last_error = None
self._provider_backoff = reset_adaptive_backoff()
else:
self._status = EmailServiceStatus.DEGRADED
if isinstance(error, RateLimitedEmailServiceError) or is_otp_timeout_error(error):
self._status = EmailServiceStatus.UNAVAILABLE
self._provider_backoff = apply_adaptive_backoff(
self._provider_backoff,
error,
)
else:
self._status = EmailServiceStatus.DEGRADED
if error:
self._last_error = str(error)

View File

@@ -12,7 +12,7 @@ from datetime import datetime, timezone
from html import unescape
from typing import Any, Dict, List, Optional
from .base import BaseEmailService, EmailServiceError, EmailServiceType
from .base import BaseEmailService, EmailServiceError, EmailServiceType, RateLimitedEmailServiceError
from ..config.constants import OTP_CODE_PATTERN
from ..core.http_client import HTTPClient, RequestConfig
@@ -102,7 +102,19 @@ class DuckMailService(BaseEmailService):
error_message = f"{error_message} - {error_payload}"
except Exception:
error_message = f"{error_message} - {response.text[:200]}"
raise EmailServiceError(error_message)
retry_after = None
if response.status_code == 429:
retry_after_header = response.headers.get("Retry-After")
if retry_after_header:
try:
retry_after = max(1, int(retry_after_header))
except ValueError:
retry_after = None
error = RateLimitedEmailServiceError(error_message, retry_after=retry_after)
else:
error = EmailServiceError(error_message)
self.update_status(False, error)
raise error
try:
return response.json()

View File

@@ -10,7 +10,7 @@ import random
import string
from typing import Optional, Dict, Any, List
from .base import BaseEmailService, EmailServiceError, EmailServiceType
from .base import BaseEmailService, EmailServiceError, EmailServiceType, RateLimitedEmailServiceError
from ..core.http_client import HTTPClient, RequestConfig
from ..config.constants import OTP_CODE_PATTERN
@@ -96,8 +96,19 @@ class FreemailService(BaseEmailService):
error_msg = f"{error_msg} - {error_data}"
except Exception:
error_msg = f"{error_msg} - {response.text[:200]}"
self.update_status(False, EmailServiceError(error_msg))
raise EmailServiceError(error_msg)
retry_after = None
if response.status_code == 429:
retry_after_header = response.headers.get("Retry-After")
if retry_after_header:
try:
retry_after = max(1, int(retry_after_header))
except ValueError:
retry_after = None
error = RateLimitedEmailServiceError(error_msg, retry_after=retry_after)
else:
error = EmailServiceError(error_msg)
self.update_status(False, error)
raise error
try:
return response.json()

View File

@@ -10,7 +10,7 @@ import logging
from typing import Optional, Dict, Any, List
from urllib.parse import urljoin
from .base import BaseEmailService, EmailServiceError, EmailServiceType
from .base import BaseEmailService, EmailServiceError, EmailServiceType, RateLimitedEmailServiceError
from ..core.http_client import HTTPClient, RequestConfig
from ..config.constants import OTP_CODE_PATTERN
@@ -148,8 +148,20 @@ class MeoMailEmailService(BaseEmailService):
except:
error_msg = f"{error_msg} - {response.text[:200]}"
self.update_status(False, EmailServiceError(error_msg))
raise EmailServiceError(error_msg)
retry_after = None
if response.status_code == 429:
retry_after_header = response.headers.get("Retry-After")
if retry_after_header:
try:
retry_after = max(1, int(retry_after_header))
except ValueError:
retry_after = None
error = RateLimitedEmailServiceError(error_msg, retry_after=retry_after)
else:
error = EmailServiceError(error_msg)
self.update_status(False, error)
raise error
# 解析响应
try:
@@ -553,4 +565,4 @@ class MeoMailEmailService(BaseEmailService):
"system_config": config,
"cached_emails_count": len(self._emails_cache),
"status": self.status.value,
}
}

View File

@@ -15,7 +15,7 @@ from email.policy import default as email_policy
from html import unescape
from typing import Optional, Dict, Any, List
from .base import BaseEmailService, EmailServiceError, EmailServiceType
from .base import BaseEmailService, EmailServiceError, EmailServiceType, RateLimitedEmailServiceError
from ..core.http_client import HTTPClient, RequestConfig
from ..config.constants import OTP_CODE_PATTERN
@@ -200,8 +200,19 @@ class TempMailService(BaseEmailService):
error_msg = f"{error_msg} - {error_data}"
except Exception:
error_msg = f"{error_msg} - {response.text[:200]}"
self.update_status(False, EmailServiceError(error_msg))
raise EmailServiceError(error_msg)
retry_after = None
if response.status_code == 429:
retry_after_header = response.headers.get("Retry-After")
if retry_after_header:
try:
retry_after = max(1, int(retry_after_header))
except ValueError:
retry_after = None
error = RateLimitedEmailServiceError(error_msg, retry_after=retry_after)
else:
error = EmailServiceError(error_msg)
self.update_status(False, error)
raise error
try:
return response.json()

View File

@@ -6,9 +6,7 @@ import re
import time
import logging
from typing import Optional, Dict, Any, List
import json
from curl_cffi import requests as cffi_requests
from datetime import datetime, timezone
from .base import BaseEmailService, EmailServiceError, EmailServiceType
from ..core.http_client import HTTPClient, RequestConfig
@@ -17,6 +15,8 @@ from ..config.constants import OTP_CODE_PATTERN
logger = logging.getLogger(__name__)
OTP_SENT_AT_TOLERANCE_SECONDS = 2
class TempmailService(BaseEmailService):
"""
@@ -58,10 +58,65 @@ class TempmailService(BaseEmailService):
config=http_config
)
# 状态变量
# 状态变量(内存缓存,重启后从 DB 按需查询)
self._email_cache: Dict[str, Dict[str, Any]] = {}
self._last_check_time: float = 0
def _parse_message_time(self, value: Any) -> Optional[float]:
"""解析 Tempmail 邮件时间,兼容 Unix 时间戳与 ISO 8601。"""
if value is None or value == "":
return None
if isinstance(value, (int, float)):
timestamp = float(value)
else:
text = str(value).strip()
if not text:
return None
try:
timestamp = float(text)
except ValueError:
try:
normalized = text.replace("Z", "+00:00")
timestamp = datetime.fromisoformat(normalized).astimezone(timezone.utc).timestamp()
except Exception:
return None
while timestamp > 1e11:
timestamp /= 1000.0
return timestamp if timestamp > 0 else None
def _get_received_timestamp(self, message: Dict[str, Any]) -> Optional[float]:
"""返回 Tempmail 邮件的接收时间戳。"""
for field_name in ("received_at", "date", "created_at", "createdAt", "timestamp"):
timestamp = self._parse_message_time(message.get(field_name))
if timestamp is not None:
return timestamp
return None
def _save_token_to_db(self, email: str, token: str) -> None:
"""将邮箱 token 持久化到 Setting 表key=tempmail_token:{email}"""
try:
from ..database.session import get_db
from ..database.crud import set_setting
with get_db() as db:
set_setting(db, f"tempmail_token:{email}", token, category="tempmail")
except Exception as e:
logger.warning(f"保存 Tempmail token 到数据库失败: {e}")
def _load_token_from_db(self, email: str) -> Optional[str]:
"""从 Setting 表读取邮箱 token"""
try:
from ..database.session import get_db
from ..database.crud import get_setting
with get_db() as db:
setting = get_setting(db, f"tempmail_token:{email}")
return setting.value if setting else None
except Exception as e:
logger.warning(f"从数据库读取 Tempmail token 失败: {e}")
return None
def create_email(self, config: Dict[str, Any] = None) -> Dict[str, Any]:
"""
创建新的临时邮箱
@@ -107,6 +162,7 @@ class TempmailService(BaseEmailService):
"created_at": time.time(),
}
self._email_cache[email] = email_info
self._save_token_to_db(email, token)
logger.info(f"成功创建 Tempmail.lol 邮箱: {email}")
self.update_status(True)
@@ -134,19 +190,21 @@ class TempmailService(BaseEmailService):
email_id: 邮箱 token如果不提供从缓存中查找
timeout: 超时时间(秒)
pattern: 验证码正则表达式
otp_sent_at: OTP 发送时间戳Tempmail 服务暂不使用此参数)
otp_sent_at: OTP 发送时间戳,只允许使用严格晚于该锚点减去容差后的邮件
Returns:
验证码字符串,如果超时或未找到返回 None
"""
token = email_id
if not token:
# 缓存查找 token
# 先从内存缓存查找,再从数据库查找
if email in self._email_cache:
token = self._email_cache[email].get("token")
else:
logger.warning(f"未找到邮箱 {email} 的 token无法获取验证码")
return None
if not token:
token = self._load_token_from_db(email)
if not token:
logger.warning(f"未找到邮箱 {email} 的 token无法获取验证码")
return None
if not token:
logger.warning(f"邮箱 {email} 没有 token无法获取验证码")
@@ -187,11 +245,21 @@ class TempmailService(BaseEmailService):
if not isinstance(msg, dict):
continue
# 使用 date 作为唯一标识
msg_date = msg.get("date", 0)
if not msg_date or msg_date in seen_ids:
msg_timestamp = self._get_received_timestamp(msg)
if otp_sent_at is not None:
min_allowed_timestamp = otp_sent_at - OTP_SENT_AT_TOLERANCE_SECONDS
if msg_timestamp is None or msg_timestamp <= min_allowed_timestamp:
continue
message_id = str(
msg.get("id")
or msg.get("date")
or msg.get("createdAt")
or f"{msg.get('from', '')}:{msg.get('subject', '')}:{msg_timestamp}"
).strip()
if not message_id or message_id in seen_ids:
continue
seen_ids.add(msg_date)
seen_ids.add(message_id)
sender = str(msg.get("from", "")).lower()
subject = str(msg.get("subject", ""))
@@ -397,4 +465,4 @@ class TempmailService(BaseEmailService):
"email": email,
"message": "等待验证码超时"
})
return None
return None

View File

@@ -15,9 +15,11 @@ from fastapi import FastAPI, Request, Form
from fastapi.staticfiles import StaticFiles
from fastapi.templating import Jinja2Templates
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import HTMLResponse, RedirectResponse
from fastapi.responses import FileResponse, HTMLResponse, RedirectResponse
from ..config.settings import get_settings
from ..database import crud
from ..database.session import get_db
from .routes import api_router
from .routes.websocket import router as ws_router
from .task_manager import task_manager
@@ -31,6 +33,11 @@ if getattr(sys, 'frozen', False):
else:
_RESOURCE_ROOT = Path(__file__).parent.parent.parent
if __name__ == "__main__":
from webui import setup_application as _setup_application
_setup_application()
# 静态文件和模板目录
STATIC_DIR = _RESOURCE_ROOT / "static"
TEMPLATES_DIR = _RESOURCE_ROOT / "templates"
@@ -108,8 +115,9 @@ def create_app() -> FastAPI:
async def login_page(request: Request, next: Optional[str] = "/"):
"""登录页面"""
return templates.TemplateResponse(
"login.html",
{"request": request, "error": "", "next": next or "/"}
request=request,
name="login.html",
context={"request": request, "error": "", "next": next or "/"}
)
@app.post("/login")
@@ -118,8 +126,9 @@ def create_app() -> FastAPI:
expected = get_settings().webui_access_password.get_secret_value()
if not secrets.compare_digest(password, expected):
return templates.TemplateResponse(
"login.html",
{"request": request, "error": "密码错误", "next": next or "/"},
request=request,
name="login.html",
context={"request": request, "error": "密码错误", "next": next or "/"},
status_code=401
)
@@ -134,38 +143,48 @@ def create_app() -> FastAPI:
response.delete_cookie("webui_auth")
return response
@app.get("/favicon.ico", include_in_schema=False)
async def favicon_ico():
"""兼容浏览器对根路径 favicon 的默认请求。"""
return FileResponse(STATIC_DIR / "favicon.svg", media_type="image/svg+xml")
@app.get("/favicon.svg", include_in_schema=False)
async def favicon_svg():
"""提供统一的站点图标资源。"""
return FileResponse(STATIC_DIR / "favicon.svg", media_type="image/svg+xml")
@app.get("/", response_class=HTMLResponse)
async def index(request: Request):
"""首页 - 注册页面"""
if not _is_authenticated(request):
return _redirect_to_login(request)
return templates.TemplateResponse("index.html", {"request": request})
return templates.TemplateResponse(request=request, name="index.html", context={"request": request})
@app.get("/accounts", response_class=HTMLResponse)
async def accounts_page(request: Request):
"""账号管理页面"""
if not _is_authenticated(request):
return _redirect_to_login(request)
return templates.TemplateResponse("accounts.html", {"request": request})
return templates.TemplateResponse(request=request, name="accounts.html", context={"request": request})
@app.get("/email-services", response_class=HTMLResponse)
async def email_services_page(request: Request):
"""邮箱服务管理页面"""
if not _is_authenticated(request):
return _redirect_to_login(request)
return templates.TemplateResponse("email_services.html", {"request": request})
return templates.TemplateResponse(request=request, name="email_services.html", context={"request": request})
@app.get("/settings", response_class=HTMLResponse)
async def settings_page(request: Request):
"""设置页面"""
if not _is_authenticated(request):
return _redirect_to_login(request)
return templates.TemplateResponse("settings.html", {"request": request})
return templates.TemplateResponse(request=request, name="settings.html", context={"request": request})
@app.get("/payment", response_class=HTMLResponse)
async def payment_page(request: Request):
"""支付页面"""
return templates.TemplateResponse("payment.html", {"request": request})
return templates.TemplateResponse(request=request, name="payment.html", context={"request": request})
@app.on_event("startup")
async def startup_event():
@@ -183,6 +202,12 @@ def create_app() -> FastAPI:
loop = asyncio.get_event_loop()
task_manager.set_loop(loop)
stale_error = "服务启动时检测到未完成的历史任务,已标记失败,请重新发起。"
with get_db() as db:
stale_tasks = crud.fail_incomplete_registration_tasks(db, stale_error)
if stale_tasks:
logger.warning("已收敛 %s 个僵尸任务: %s", len(stale_tasks), ", ".join(task[:8] for task in stale_tasks))
logger.info("=" * 50)
logger.info(f"{settings.app_name} v{settings.app_version} 启动中...")
logger.info(f"调试模式: {settings.debug}")
@@ -199,3 +224,23 @@ def create_app() -> FastAPI:
# 创建全局应用实例
app = create_app()
if __name__ == "__main__":
import uvicorn
settings = get_settings()
logger.info(
"通过模块入口启动 Web UI: http://%s:%s",
settings.webui_host,
settings.webui_port,
)
uvicorn.run(
app,
host=settings.webui_host,
port=settings.webui_port,
reload=False,
log_level="info" if settings.debug else "warning",
access_log=settings.debug,
ws="websockets",
)

File diff suppressed because it is too large Load Diff

View File

@@ -7,12 +7,33 @@ import asyncio
import logging
from fastapi import APIRouter, WebSocket, WebSocketDisconnect
from ...database import crud
from ...database.session import get_db
from ..task_manager import task_manager
logger = logging.getLogger(__name__)
router = APIRouter()
def _restore_task_snapshot(task_uuid: str) -> tuple[dict, list[str]]:
"""从数据库恢复任务状态和历史日志,解决服务重启后的监控空白。"""
with get_db() as db:
task = crud.get_registration_task(db, task_uuid)
if not task:
return {}, []
status = {"status": task.status}
if task.result and task.result.get("email"):
status["email"] = task.result["email"]
if task.error_message:
status["error"] = task.error_message
logs = task.logs.splitlines() if task.logs else []
task_manager.sync_task_state(task_uuid, status=status, logs=logs)
return status, logs
@router.websocket("/ws/task/{task_uuid}")
async def task_websocket(websocket: WebSocket, task_uuid: str):
"""
@@ -25,14 +46,15 @@ async def task_websocket(websocket: WebSocket, task_uuid: str):
- 客户端发送: {"type": "cancel"} - 取消任务
"""
await websocket.accept()
restored_status, restored_logs = _restore_task_snapshot(task_uuid)
# 注册连接(会记录当前日志数量,避免重复发送历史日志)
task_manager.register_websocket(task_uuid, websocket)
# 注册连接,并取得注册时刻的历史日志快照,避免与后续实时推送串扰
history_logs = task_manager.register_websocket(task_uuid, websocket)
logger.info(f"WebSocket 连接已建立: {task_uuid}")
try:
# 发送当前状态
status = task_manager.get_status(task_uuid)
status = task_manager.get_status(task_uuid) or restored_status
if status:
await websocket.send_json({
"type": "status",
@@ -40,9 +62,8 @@ async def task_websocket(websocket: WebSocket, task_uuid: str):
**status
})
# 发送历史日志(只发送注册时已存在的日志,避免与实时推送重复)
history_logs = task_manager.get_unsent_logs(task_uuid, websocket)
for log in history_logs:
# 发送历史日志。服务重启后 _restore_task_snapshot 会先把数据库快照回填到内存。
for log in history_logs or restored_logs:
await websocket.send_json({
"type": "log",
"task_uuid": task_uuid,
@@ -107,8 +128,8 @@ async def batch_websocket(websocket: WebSocket, batch_id: str):
"""
await websocket.accept()
# 注册连接(会记录当前日志数量,避免重复发送历史日志)
task_manager.register_batch_websocket(batch_id, websocket)
# 注册连接,并取得注册时刻的历史日志快照,避免漏发/重复发送
history_logs = task_manager.register_batch_websocket(batch_id, websocket)
logger.info(f"批量任务 WebSocket 连接已建立: {batch_id}")
try:
@@ -121,8 +142,6 @@ async def batch_websocket(websocket: WebSocket, batch_id: str):
**status
})
# 发送历史日志(只发送注册时已存在的日志,避免与实时推送重复)
history_logs = task_manager.get_unsent_batch_logs(batch_id, websocket)
for log in history_logs:
await websocket.send_json({
"type": "log",

View File

@@ -144,20 +144,22 @@ class TaskManager:
except Exception as e:
logger.warning(f"WebSocket 发送状态失败: {e}")
def register_websocket(self, task_uuid: str, websocket):
"""注册 WebSocket 连接"""
def register_websocket(self, task_uuid: str, websocket) -> List[str]:
"""注册 WebSocket 连接,并返回注册时刻的历史日志快照"""
history_logs: List[str] = []
with _ws_lock:
if task_uuid not in _ws_connections:
_ws_connections[task_uuid] = []
# 避免重复注册同一个连接
if websocket not in _ws_connections[task_uuid]:
_ws_connections[task_uuid].append(websocket)
# 记录已发送的日志数量,用于发送历史日志时避免重复
with _get_log_lock(task_uuid):
_ws_sent_index[task_uuid][id(websocket)] = len(_log_queues.get(task_uuid, []))
history_logs = _log_queues.get(task_uuid, []).copy()
_ws_sent_index[task_uuid][id(websocket)] = len(history_logs)
_ws_connections[task_uuid].append(websocket)
logger.info(f"WebSocket 连接已注册: {task_uuid}")
else:
logger.warning(f"WebSocket 连接已存在,跳过重复注册: {task_uuid}")
return history_logs
def get_unsent_logs(self, task_uuid: str, websocket) -> List[str]:
"""获取未发送给该 WebSocket 的日志"""
@@ -190,6 +192,24 @@ class TaskManager:
with _get_log_lock(task_uuid):
return _log_queues.get(task_uuid, []).copy()
def sync_task_state(
self,
task_uuid: str,
status: Optional[dict] = None,
logs: Optional[List[str]] = None
):
"""将数据库中的任务快照回填到内存态,便于重连恢复。"""
if status:
current_status = _task_status.get(task_uuid, {}).copy()
current_status.update(status)
_task_status[task_uuid] = current_status
if logs is not None:
with _get_log_lock(task_uuid):
cached_logs = _log_queues.get(task_uuid, [])
if len(logs) >= len(cached_logs):
_log_queues[task_uuid] = list(logs)
def update_status(self, task_uuid: str, status: str, **kwargs):
"""更新任务状态"""
if task_uuid not in _task_status:
@@ -198,6 +218,15 @@ class TaskManager:
_task_status[task_uuid]["status"] = status
_task_status[task_uuid].update(kwargs)
if self._loop and self._loop.is_running():
try:
asyncio.run_coroutine_threadsafe(
self.broadcast_status(task_uuid, status, **kwargs),
self._loop
)
except Exception as e:
logger.warning(f"广播任务状态失败: {e}")
def get_status(self, task_uuid: str) -> Optional[dict]:
"""获取任务状态"""
return _task_status.get(task_uuid)
@@ -211,18 +240,25 @@ class TaskManager:
# ============== 批量任务管理 ==============
def init_batch(self, batch_id: str, total: int):
def init_batch(self, batch_id: str, total: int, **kwargs):
"""初始化批量任务"""
_batch_status[batch_id] = {
"status": "running",
"total": total,
"completed": 0,
"success": 0,
"failed": 0,
"skipped": 0,
"current_index": 0,
"finished": False
}
with _get_batch_lock(batch_id):
previous = _batch_status.get(batch_id, {})
status = {
"status": "running",
"total": total,
"completed": 0,
"success": 0,
"failed": 0,
"skipped": previous.get("skipped", 0),
"cancelled": previous.get("cancelled", False),
"current_index": 0,
"finished": False,
}
status.update(previous)
status.update(kwargs)
status["total"] = total
_batch_status[batch_id] = status
logger.info(f"批量任务 {batch_id} 已初始化,总数: {total}")
def add_batch_log(self, batch_id: str, log_message: str):
@@ -266,11 +302,11 @@ class TaskManager:
def update_batch_status(self, batch_id: str, **kwargs):
"""更新批量任务状态"""
if batch_id not in _batch_status:
logger.warning(f"批量任务 {batch_id} 不存在")
return
_batch_status[batch_id].update(kwargs)
with _get_batch_lock(batch_id):
if batch_id not in _batch_status:
logger.warning(f"批量任务 {batch_id} 不存在")
return
_batch_status[batch_id].update(kwargs)
# 异步广播状态更新
if self._loop and self._loop.is_running():
@@ -302,7 +338,9 @@ class TaskManager:
def get_batch_status(self, batch_id: str) -> Optional[dict]:
"""获取批量任务状态"""
return _batch_status.get(batch_id)
with _get_batch_lock(batch_id):
status = _batch_status.get(batch_id)
return status.copy() if status is not None else None
def get_batch_logs(self, batch_id: str) -> List[str]:
"""获取批量任务日志"""
@@ -316,26 +354,29 @@ class TaskManager:
def cancel_batch(self, batch_id: str):
"""取消批量任务"""
if batch_id in _batch_status:
_batch_status[batch_id]["cancelled"] = True
_batch_status[batch_id]["status"] = "cancelling"
logger.info(f"批量任务 {batch_id} 已标记为取消")
with _get_batch_lock(batch_id):
if batch_id in _batch_status:
_batch_status[batch_id]["cancelled"] = True
_batch_status[batch_id]["status"] = "cancelling"
logger.info(f"批量任务 {batch_id} 已标记为取消")
def register_batch_websocket(self, batch_id: str, websocket):
"""注册批量任务 WebSocket 连接"""
def register_batch_websocket(self, batch_id: str, websocket) -> List[str]:
"""注册批量任务 WebSocket 连接,并返回注册时刻的历史日志快照"""
key = f"batch_{batch_id}"
history_logs: List[str] = []
with _ws_lock:
if key not in _ws_connections:
_ws_connections[key] = []
# 避免重复注册同一个连接
if websocket not in _ws_connections[key]:
_ws_connections[key].append(websocket)
# 记录已发送的日志数量,用于发送历史日志时避免重复
with _get_batch_lock(batch_id):
_ws_sent_index[key][id(websocket)] = len(_batch_logs.get(batch_id, []))
history_logs = _batch_logs.get(batch_id, []).copy()
_ws_sent_index[key][id(websocket)] = len(history_logs)
_ws_connections[key].append(websocket)
logger.info(f"批量任务 WebSocket 连接已注册: {batch_id}")
else:
logger.warning(f"批量任务 WebSocket 连接已存在,跳过重复注册: {batch_id}")
return history_logs
def get_unsent_batch_logs(self, batch_id: str, websocket) -> List[str]:
"""获取未发送给该 WebSocket 的批量任务日志"""

5
static/favicon.svg Normal file
View File

@@ -0,0 +1,5 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 64 64">
<rect width="64" height="64" rx="14" fill="#111827"/>
<path d="M18 20h28v6H18zm0 10h20v6H18zm0 10h28v6H18z" fill="#f9fafb"/>
<circle cx="46" cy="33" r="6" fill="#22c55e"/>
</svg>

After

Width:  |  Height:  |  Size: 246 B

View File

@@ -552,6 +552,12 @@ function connectWebSocket(taskUuid) {
const logType = getLogType(data.message);
addLog(logType, data.message);
} else if (data.type === 'status') {
if (data.email) {
elements.taskEmail.textContent = data.email;
}
if (data.email_service) {
elements.taskService.textContent = getServiceTypeText(data.email_service);
}
updateTaskStatus(data.status);
// 检查是否完成
@@ -1306,7 +1312,7 @@ function connectBatchWebSocket(batchId) {
if (shouldPoll && currentBatch) {
console.log('切换到轮询模式');
startOutlookBatchPolling(currentBatch.batch_id);
startCurrentBatchPolling(currentBatch.batch_id);
}
};
@@ -1314,12 +1320,12 @@ function connectBatchWebSocket(batchId) {
console.error('批量任务 WebSocket 错误:', error);
stopBatchWebSocketHeartbeat();
// 切换到轮询
startOutlookBatchPolling(batchId);
startCurrentBatchPolling(batchId);
};
} catch (error) {
console.error('批量任务 WebSocket 连接失败:', error);
startOutlookBatchPolling(batchId);
startCurrentBatchPolling(batchId);
}
}
@@ -1332,6 +1338,15 @@ function disconnectBatchWebSocket() {
}
}
function startCurrentBatchPolling(batchId) {
if (isOutlookBatchMode) {
startOutlookBatchPolling(batchId);
return;
}
startBatchPolling(batchId);
}
// 开始批量任务心跳
function startBatchWebSocketHeartbeat() {
stopBatchWebSocketHeartbeat();

View File

@@ -5,7 +5,7 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>账号管理 - OpenAI 注册系统</title>
<link rel="stylesheet" href="/static/css/style.css?v={{ static_version }}">
<link rel="icon" href="data:image/svg+xml,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 100 100'><text y='.9em' font-size='90'>📋</text></svg>">
<link rel="icon" type="image/svg+xml" href="/static/favicon.svg?v={{ static_version }}">
<style>
.password-cell {
font-family: var(--font-mono);

View File

@@ -5,7 +5,7 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>邮箱服务 - OpenAI 注册系统</title>
<link rel="stylesheet" href="/static/css/style.css?v={{ static_version }}">
<link rel="icon" href="data:image/svg+xml,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 100 100'><text y='.9em' font-size='90'>📧</text></svg>">
<link rel="icon" type="image/svg+xml" href="/static/favicon.svg?v={{ static_version }}">
</head>
<body>
<div class="container">

View File

@@ -5,7 +5,7 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>注册控制台 - OpenAI 注册系统</title>
<link rel="stylesheet" href="/static/css/style.css?v={{ static_version }}">
<link rel="icon" href="data:image/svg+xml,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 100 100'><text y='.9em' font-size='90'>🚀</text></svg>">
<link rel="icon" type="image/svg+xml" href="/static/favicon.svg?v={{ static_version }}">
<style>
/* 两栏布局 */
.two-column-layout {

View File

@@ -5,7 +5,7 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>访问验证 - OpenAI 注册系统</title>
<link rel="stylesheet" href="/static/css/style.css?v={{ static_version }}">
<link rel="icon" href="data:image/svg+xml,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 100 100'><text y='.9em' font-size='90'>🔒</text></svg>">
<link rel="icon" type="image/svg+xml" href="/static/favicon.svg?v={{ static_version }}">
<style>
.login-wrap {
max-width: 420px;

View File

@@ -5,7 +5,7 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>支付升级 - OpenAI 注册系统</title>
<link rel="stylesheet" href="/static/css/style.css?v={{ static_version }}">
<link rel="icon" href="data:image/svg+xml,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 100 100'><text y='.9em' font-size='90'>💳</text></svg>">
<link rel="icon" type="image/svg+xml" href="/static/favicon.svg?v={{ static_version }}">
<style>
.plan-cards {
display: grid;

View File

@@ -5,7 +5,7 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>系统设置 - OpenAI 注册系统</title>
<link rel="stylesheet" href="/static/css/style.css?v={{ static_version }}">
<link rel="icon" href="data:image/svg+xml,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 100 100'><text y='.9em' font-size='90'>⚙️</text></svg>">
<link rel="icon" type="image/svg+xml" href="/static/favicon.svg?v={{ static_version }}">
</head>
<body>
<div class="container">

View File

@@ -0,0 +1,278 @@
import argparse
import asyncio
import json
import sqlite3
import time
import uuid
from pathlib import Path
from typing import Any, Dict, List
import httpx
import websockets
STALE_ERROR = "服务启动时检测到未完成的历史任务,已标记失败,请重新发起。"
def _write_json(path: Path, payload: Dict[str, Any]) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(payload, ensure_ascii=False, indent=2), encoding="utf-8")
def _load_json(path: Path) -> Dict[str, Any]:
return json.loads(path.read_text(encoding="utf-8"))
def _connect_db(db_path: Path) -> sqlite3.Connection:
return sqlite3.connect(db_path, timeout=5)
def _fetchone_dict(conn: sqlite3.Connection, sql: str, params: tuple[Any, ...]) -> Dict[str, Any]:
conn.row_factory = sqlite3.Row
row = conn.execute(sql, params).fetchone()
return dict(row) if row else {}
def _assert(condition: bool, message: str) -> None:
if not condition:
raise AssertionError(message)
def _health_check(client: httpx.Client, report: Dict[str, Any]) -> None:
response = client.get("/api/registration/tasks", params={"page": 1, "page_size": 1})
report["health"] = {"status_code": response.status_code, "body": response.json()}
_assert(response.status_code == 200, "健康检查失败")
async def _collect_task_websocket(ws_url: str, task_uuid: str) -> Dict[str, Any]:
endpoint = f"{ws_url}/api/ws/task/{task_uuid}"
messages: List[Dict[str, Any]] = []
started_at = time.time()
async with websockets.connect(endpoint, open_timeout=10, close_timeout=5) as websocket:
while time.time() - started_at < 30:
raw_message = await asyncio.wait_for(websocket.recv(), timeout=10)
payload = json.loads(raw_message)
messages.append(payload)
if payload.get("type") == "status" and payload.get("status") in {"completed", "failed"}:
break
logs = [message for message in messages if message.get("type") == "log"]
statuses = [message for message in messages if message.get("type") == "status"]
return {
"messages": messages,
"log_count": len(logs),
"status_count": len(statuses),
"live_log_count": sum(1 for message in logs if "timestamp" in message),
"final_status": statuses[-1]["status"] if statuses else None,
}
def _poll_task_completion(client: httpx.Client, task_uuid: str) -> Dict[str, Any]:
deadline = time.time() + 20
while time.time() < deadline:
response = client.get(f"/api/registration/tasks/{task_uuid}")
response.raise_for_status()
payload = response.json()
if payload["status"] in {"completed", "failed"}:
return payload
time.sleep(0.2)
raise TimeoutError(f"任务未在预期时间内结束: {task_uuid}")
def _validate_live_database(
db_path: Path,
task_uuid: str,
batch_id: str,
checks: Dict[str, Any],
report: Dict[str, Any],
) -> None:
with _connect_db(db_path) as conn:
seeded = _fetchone_dict(
conn,
"SELECT email, access_token, refresh_token, token_sync_status FROM accounts WHERE email = ?",
(checks["seeded_account_email"],),
)
tokenless = _fetchone_dict(
conn,
"SELECT email, access_token, refresh_token, token_sync_status FROM accounts WHERE email = ?",
(checks["tokenless_account_email"],),
)
partial = _fetchone_dict(
conn,
"SELECT email, access_token, refresh_token, token_sync_status FROM accounts WHERE email = ?",
(checks["partial_account_email"],),
)
task_row = _fetchone_dict(
conn,
"SELECT task_uuid, status, logs, result FROM registration_tasks WHERE task_uuid = ?",
(task_uuid,),
)
outlook_row = _fetchone_dict(
conn,
"SELECT config FROM email_services WHERE id = ?",
(checks["outlook_service_id"],),
)
_assert(seeded.get("token_sync_status") == "pending", "seeded 账号 token_sync_status 异常")
_assert(tokenless.get("access_token") == "mock-access-token-updated", "tokenless 账号 access_token 未写入")
_assert(tokenless.get("token_sync_status") == "pending", "tokenless 账号 token_sync_status 异常")
_assert(partial.get("access_token") == "mock-access-token-partial", "partial 账号 access_token 丢失")
_assert(partial.get("refresh_token") == "", "partial 账号 refresh_token 未清空")
_assert(partial.get("token_sync_status") == "pending", "partial 账号 token_sync_status 异常")
_assert(task_row.get("status") == "completed", "模拟任务数据库状态不是 completed")
_assert(task_row.get("logs"), "模拟任务日志未落库")
task_result = json.loads(task_row["result"]) if task_row.get("result") else {}
outlook_config = json.loads(outlook_row["config"]) if outlook_row.get("config") else {}
second_account = next(
account for account in outlook_config.get("accounts", [])
if account.get("email") == checks["outlook_account_email"]
)
batch_snapshot = task_result["hardening_checks"]["batch_counter"]["snapshot"]
backoff_states = task_result["hardening_checks"]["otp_timeout_backoff"]["states"]
_assert(second_account["refresh_token"] == "new-second", "Outlook refresh_token 未更新")
_assert(batch_snapshot["completed"] == 3, "批量 completed 计数异常")
_assert(batch_snapshot["success"] == 2, "批量 success 计数异常")
_assert(batch_snapshot["failed"] == 1, "批量 failed 计数异常")
_assert(batch_snapshot["status"] == "completed", "批量状态异常")
_assert(batch_snapshot["finished"] is True, "批量 finished 标记异常")
_assert(backoff_states[-1]["delay_seconds"] == 3600, "OTP 深度冷却未生效")
_assert(backoff_states[-1]["failures"] == 3, "OTP 连续失败次数异常")
report["database"] = {
"task_uuid": task_uuid,
"batch_id": batch_id,
"seeded_account": seeded,
"tokenless_account": tokenless,
"partial_account": partial,
"task_result": task_result,
"outlook_second_account": second_account,
}
def run_live_mode(base_url: str, ws_url: str, db_path: Path, report_path: Path) -> None:
report: Dict[str, Any] = {"mode": "live", "base_url": base_url, "db_path": str(db_path)}
with httpx.Client(base_url=base_url, timeout=httpx.Timeout(10, read=30)) as client:
_health_check(client, report)
create_response = client.post(
"/api/registration/create",
json={
"email_service_type": "tempmail",
"start_delay_ms": 600,
"log_delay_ms": 150,
},
)
create_response.raise_for_status()
created = create_response.json()
task_uuid = created["task"]["task_uuid"]
batch_id = created["batch_id"]
checks = created["checks"]
report["create"] = created
ws_report = asyncio.run(_collect_task_websocket(ws_url, task_uuid))
report["websocket"] = ws_report
_assert(ws_report["final_status"] == "completed", "WebSocket 未收到 completed 状态")
_assert(ws_report["log_count"] >= 4, "WebSocket 日志数量不足")
_assert(ws_report["live_log_count"] >= 1, "未捕获到实时日志广播")
task_payload = _poll_task_completion(client, task_uuid)
report["task"] = task_payload
runtime_checks = {
**checks,
"outlook_service_id": task_payload["result"]["hardening_checks"]["outlook_refresh"]["service_id"],
"backoff_service_id": task_payload["result"]["hardening_checks"]["otp_timeout_backoff"]["service_id"],
}
batch_response = client.get(f"/api/registration/batch/{batch_id}")
batch_response.raise_for_status()
report["batch_api"] = batch_response.json()
_assert(report["batch_api"]["completed"] == 3, "批量状态 API completed 异常")
_assert(report["batch_api"]["success"] == 2, "批量状态 API success 异常")
_assert(report["batch_api"]["failed"] == 1, "批量状态 API failed 异常")
_assert(report["batch_api"]["finished"] is True, "批量状态 API finished 异常")
_validate_live_database(db_path, task_uuid, batch_id, runtime_checks, report)
_write_json(report_path, report)
print(json.dumps(report, ensure_ascii=False, indent=2))
def run_prepare_recovery_mode(db_path: Path, state_path: Path) -> None:
stale_task_uuid = f"stale-{uuid.uuid4()}"
now = time.strftime("%Y-%m-%d %H:%M:%S")
with _connect_db(db_path) as conn:
conn.execute(
"""
INSERT INTO registration_tasks (task_uuid, status, logs, created_at, started_at)
VALUES (?, 'running', '[00:00:00] stale task', ?, ?)
""",
(stale_task_uuid, now, now),
)
conn.commit()
payload = {
"stale_task_uuid": stale_task_uuid,
"db_path": str(db_path),
"prepared_at": now,
}
_write_json(state_path, payload)
print(json.dumps(payload, ensure_ascii=False, indent=2))
def run_verify_recovery_mode(base_url: str, db_path: Path, state_path: Path, report_path: Path) -> None:
state = _load_json(state_path)
report: Dict[str, Any] = {
"mode": "verify-recovery",
"base_url": base_url,
"db_path": str(db_path),
"state": state,
}
with httpx.Client(base_url=base_url, timeout=httpx.Timeout(10, read=30)) as client:
_health_check(client, report)
with _connect_db(db_path) as conn:
stale_task = _fetchone_dict(
conn,
"SELECT task_uuid, status, error_message, logs, completed_at FROM registration_tasks WHERE task_uuid = ?",
(state["stale_task_uuid"],),
)
_assert(stale_task.get("status") == "failed", "僵尸任务未在重启后标记为 failed")
_assert(stale_task.get("error_message") == STALE_ERROR, "僵尸任务 error_message 不匹配")
_assert(STALE_ERROR in (stale_task.get("logs") or ""), "僵尸任务日志未追加系统收敛说明")
_assert(bool(stale_task.get("completed_at")), "僵尸任务 completed_at 缺失")
report["recovery"] = stale_task
_write_json(report_path, report)
print(json.dumps(report, ensure_ascii=False, indent=2))
def main() -> None:
parser = argparse.ArgumentParser(description="真实服务功能可用性验证脚本")
parser.add_argument("--mode", choices=["live", "prepare-recovery", "verify-recovery"], required=True)
parser.add_argument("--base-url", default="http://127.0.0.1:15555")
parser.add_argument("--ws-url", default="ws://127.0.0.1:15555")
parser.add_argument("--db-path", required=True)
parser.add_argument("--report-path", default="tests_runtime/runtime_functionality_report.json")
parser.add_argument("--state-path", default="tests_runtime/runtime_recovery_state.json")
args = parser.parse_args()
db_path = Path(args.db_path).resolve()
report_path = Path(args.report_path).resolve()
state_path = Path(args.state_path).resolve()
if args.mode == "live":
run_live_mode(args.base_url, args.ws_url, db_path, report_path)
return
if args.mode == "prepare-recovery":
run_prepare_recovery_mode(db_path, state_path)
return
run_verify_recovery_mode(args.base_url, db_path, state_path, report_path)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,106 @@
from src.database import crud
from src.database.session import DatabaseSessionManager
def test_create_account_marks_token_sync_pending_when_tokens_persist(tmp_path):
manager = DatabaseSessionManager(f"sqlite:///{tmp_path}/test.db")
manager.create_tables()
manager.migrate_tables()
with manager.session_scope() as session:
account = crud.create_account(
session,
email="sync@example.com",
email_service="tempmail",
access_token="access-token",
refresh_token="refresh-token",
)
assert account.token_sync_status == "pending"
assert account.token_sync_updated_at is not None
def test_update_account_marks_token_sync_pending_when_tokens_change(tmp_path):
manager = DatabaseSessionManager(f"sqlite:///{tmp_path}/test.db")
manager.create_tables()
manager.migrate_tables()
with manager.session_scope() as session:
account = crud.create_account(
session,
email="nosync@example.com",
email_service="tempmail",
)
assert account.token_sync_status == "not_ready"
updated = crud.update_account(
session,
account.id,
access_token="new-access-token",
)
assert updated is not None
assert updated.token_sync_status == "pending"
assert updated.token_sync_updated_at is not None
def test_update_account_preserves_pending_status_when_other_tokens_remain(tmp_path):
manager = DatabaseSessionManager(f"sqlite:///{tmp_path}/test.db")
manager.create_tables()
manager.migrate_tables()
with manager.session_scope() as session:
account = crud.create_account(
session,
email="partial-sync@example.com",
email_service="tempmail",
access_token="access-token",
refresh_token="refresh-token",
)
updated = crud.update_account(
session,
account.id,
refresh_token="",
)
assert updated is not None
assert updated.access_token == "access-token"
assert updated.refresh_token == ""
assert updated.token_sync_status == "pending"
assert updated.token_sync_updated_at is not None
def test_update_outlook_refresh_token_persists_nested_config_changes(tmp_path):
manager = DatabaseSessionManager(f"sqlite:///{tmp_path}/test.db")
manager.create_tables()
manager.migrate_tables()
with manager.session_scope() as session:
service = crud.create_email_service(
session,
service_type="outlook",
name="outlook-service",
config={
"accounts": [
{"email": "first@example.com", "refresh_token": "old-first"},
{"email": "second@example.com", "refresh_token": "old-second"},
]
},
)
service_id = service.id
crud.update_outlook_refresh_token(
session,
service_id=service_id,
email="second@example.com",
new_refresh_token="new-second",
)
with manager.session_scope() as session:
reloaded = crud.get_email_service_by_id(session, service_id)
assert reloaded is not None
assert reloaded.config["accounts"][0]["refresh_token"] == "old-first"
assert reloaded.config["accounts"][1]["refresh_token"] == "new-second"

View File

@@ -0,0 +1,86 @@
import asyncio
from contextlib import contextmanager
from types import SimpleNamespace
from src.web.routes import registration as registration_routes
from src.web.task_manager import task_manager
def test_init_batch_state_persists_state_in_task_manager():
batch_id = "batch-sync-init"
task_uuids = ["task-1", "task-2", "task-3"]
registration_routes._init_batch_state(batch_id, task_uuids)
manager_snapshot = task_manager.get_batch_status(batch_id)
assert manager_snapshot is not None
assert manager_snapshot["task_uuids"] == task_uuids
assert manager_snapshot["total"] == 3
assert manager_snapshot["completed"] == 0
assert manager_snapshot["success"] == 0
assert manager_snapshot["failed"] == 0
assert manager_snapshot["finished"] is False
assert manager_snapshot["status"] == "running"
assert task_manager.get_batch_logs(batch_id) == []
def test_run_batch_parallel_keeps_counter_updates_in_sync(monkeypatch):
batch_id = "batch-sync-parallel"
task_uuids = ["task-ok-1", "task-fail-1", "task-ok-2"]
task_statuses = {
"task-ok-1": "completed",
"task-fail-1": "failed",
"task-ok-2": "completed",
}
async def fake_run_registration_task(
task_uuid,
email_service_type,
proxy,
email_service_config,
email_service_id,
log_prefix="",
batch_id="",
auto_upload_cpa=False,
cpa_service_ids=None,
auto_upload_sub2api=False,
sub2api_service_ids=None,
auto_upload_tm=False,
tm_service_ids=None,
):
assert task_uuid in task_statuses
@contextmanager
def fake_get_db():
yield object()
def fake_get_registration_task(db, task_uuid):
status = task_statuses[task_uuid]
error_message = None if status == "completed" else f"{task_uuid}-error"
return SimpleNamespace(status=status, error_message=error_message)
monkeypatch.setattr(registration_routes, "run_registration_task", fake_run_registration_task)
monkeypatch.setattr(registration_routes, "get_db", fake_get_db)
monkeypatch.setattr(registration_routes.crud, "get_registration_task", fake_get_registration_task)
asyncio.run(
registration_routes.run_batch_parallel(
batch_id=batch_id,
task_uuids=task_uuids,
email_service_type="tempmail",
proxy=None,
email_service_config=None,
email_service_id=None,
concurrency=2,
)
)
manager_snapshot = task_manager.get_batch_status(batch_id)
assert manager_snapshot is not None
assert manager_snapshot["completed"] == 3
assert manager_snapshot["success"] == 2
assert manager_snapshot["failed"] == 1
assert manager_snapshot["finished"] is True
assert manager_snapshot["status"] == "completed"

View File

@@ -0,0 +1,133 @@
const test = require('node:test');
const assert = require('node:assert/strict');
const fs = require('node:fs');
const vm = require('node:vm');
const APP_JS_PATH = '/Users/zhoukailian/.config/superpowers/worktrees/codex-manager/repro-batch-monitor/static/js/app.js';
function createElementStub() {
return {
style: {},
dataset: {},
value: '',
checked: false,
disabled: false,
innerHTML: '',
textContent: '',
className: '',
appendChild() {},
addEventListener() {},
removeEventListener() {},
querySelector() {
return createElementStub();
},
querySelectorAll() {
return [];
},
closest() {
return null;
},
};
}
function createSandbox() {
const sandbox = {
console,
setTimeout,
clearTimeout,
setInterval: () => 1,
clearInterval: () => {},
Event: class Event {
constructor(type) {
this.type = type;
}
},
document: {
getElementById() {
return createElementStub();
},
createElement() {
return createElementStub();
},
addEventListener() {},
querySelector() {
return createElementStub();
},
querySelectorAll() {
return [];
},
},
sessionStorage: {
getItem() {
return null;
},
setItem() {},
removeItem() {},
},
toast: {
info() {},
success() {},
warning() {},
error() {},
},
api: {
get() {
throw new Error('api.get should not be called in this test');
},
post() {
throw new Error('api.post should not be called in this test');
},
},
window: null,
WebSocket: null,
};
sandbox.window = sandbox;
sandbox.window.location = { protocol: 'http:', host: '127.0.0.1:8003' };
vm.createContext(sandbox);
vm.runInContext(fs.readFileSync(APP_JS_PATH, 'utf8'), sandbox, { filename: 'app.js' });
return sandbox;
}
async function runFallback(mode) {
const sandbox = createSandbox();
vm.runInContext(
`
var __calls = [];
currentBatch = { batch_id: 'test-batch' };
isOutlookBatchMode = ${mode === 'outlook' ? 'true' : 'false'};
batchCompleted = false;
batchFinalStatus = null;
startOutlookBatchPolling = function(batchId) { __calls.push(['outlook', batchId]); };
startBatchPolling = function(batchId) { __calls.push(['batch', batchId]); };
WebSocket = function(url) {
this.url = url;
this.readyState = 0;
setTimeout(() => {
if (this.onerror) {
this.onerror({ type: 'error' });
}
}, 0);
};
WebSocket.OPEN = 1;
connectBatchWebSocket('test-batch');
`,
sandbox,
);
await new Promise((resolve) => setTimeout(resolve, 20));
return JSON.parse(vm.runInContext('JSON.stringify(__calls)', sandbox));
}
test('normal batch websocket fallback uses standard batch polling', async () => {
const calls = await runFallback('batch');
assert.deepEqual(calls, [['batch', 'test-batch']]);
});
test('outlook batch websocket fallback uses outlook polling', async () => {
const calls = await runFallback('outlook');
assert.deepEqual(calls, [['outlook', 'test-batch']]);
});

View File

@@ -0,0 +1,103 @@
from src.services.base import (
BaseEmailService,
EmailProviderBackoffState,
EmailServiceType,
OTPTimeoutEmailServiceError,
RateLimitedEmailServiceError,
apply_adaptive_backoff,
calculate_adaptive_backoff_delay,
)
class DummyEmailService(BaseEmailService):
def __init__(self):
super().__init__(EmailServiceType.DUCK_MAIL, "dummy")
def create_email(self, config=None):
raise NotImplementedError
def get_verification_code(
self,
email,
email_id=None,
timeout=120,
pattern=r"(?<!\d)(\d{6})(?!\d)",
otp_sent_at=None,
):
raise NotImplementedError
def list_emails(self, **kwargs):
return []
def delete_email(self, email_id: str) -> bool:
return False
def check_health(self) -> bool:
return True
def test_calculate_adaptive_backoff_delay_uses_failure_count_progression():
assert calculate_adaptive_backoff_delay(0) == 30
assert calculate_adaptive_backoff_delay(1) == 30
assert calculate_adaptive_backoff_delay(2) == 60
assert calculate_adaptive_backoff_delay(3) == 120
def test_apply_adaptive_backoff_tracks_timeout_failures_to_one_hour():
state = EmailProviderBackoffState()
first = apply_adaptive_backoff(
state,
OTPTimeoutEmailServiceError("等待验证码超时", error_code="OTP_TIMEOUT_SECONDARY"),
now=1000.0,
)
second = apply_adaptive_backoff(
first,
OTPTimeoutEmailServiceError("等待验证码超时", error_code="OTP_TIMEOUT_SECONDARY"),
now=1031.0,
)
third = apply_adaptive_backoff(
second,
OTPTimeoutEmailServiceError("等待验证码超时", error_code="OTP_TIMEOUT_SECONDARY"),
now=1092.0,
)
assert first.failures == 1
assert first.delay_seconds == 30
assert first.opened_until == 1030.0
assert second.failures == 2
assert second.delay_seconds == 60
assert second.opened_until == 1091.0
assert third.failures == 3
assert third.delay_seconds == 3600
assert third.opened_until == 4692.0
def test_apply_adaptive_backoff_keeps_normal_rate_limit_on_exponential_curve():
state = EmailProviderBackoffState(failures=2, delay_seconds=60, opened_until=1060.0)
next_state = apply_adaptive_backoff(
state,
RateLimitedEmailServiceError("请求失败: 429", retry_after=7),
now=1100.0,
)
assert next_state.failures == 3
assert next_state.delay_seconds == 120
assert next_state.opened_until == 1220.0
assert next_state.retry_after == 7
def test_update_status_resets_provider_backoff_after_success():
service = DummyEmailService()
service.update_status(False, RateLimitedEmailServiceError("请求失败: 429"))
assert service.provider_backoff_state.failures == 1
assert service.provider_backoff_state.delay_seconds == 30
service.update_status(True)
assert service.provider_backoff_state == EmailProviderBackoffState()

View File

@@ -0,0 +1,82 @@
import json
from types import SimpleNamespace
import src.core.register as register_module
from src.config.constants import OPENAI_PAGE_TYPES
from src.core.register import RegistrationEngine
from src.services import EmailServiceType
class DummySettings:
openai_client_id = "client-id"
openai_auth_url = "https://auth.example.test"
openai_token_url = "https://token.example.test"
openai_redirect_uri = "https://callback.example.test"
openai_scope = "openid profile email"
class FakeResponse:
def __init__(self, status_code=200, payload=None, text=""):
self.status_code = status_code
self._payload = payload or {}
self.text = text
def json(self):
return self._payload
class FakeSession:
def __init__(self, response):
self.response = response
self.calls = []
def post(self, url, **kwargs):
self.calls.append({
"url": url,
**kwargs,
})
return self.response
def _build_engine(monkeypatch):
monkeypatch.setattr(register_module, "get_settings", lambda: DummySettings())
email_service = SimpleNamespace(service_type=EmailServiceType.DUCK_MAIL)
return RegistrationEngine(email_service=email_service)
def test_submit_signup_form_uses_stable_protocol_body(monkeypatch):
engine = _build_engine(monkeypatch)
session = FakeSession(FakeResponse(
status_code=200,
payload={"page": {"type": OPENAI_PAGE_TYPES["PASSWORD_REGISTRATION"]}},
))
engine.session = session
engine.email = "tester@example.com"
result = engine._submit_signup_form("did-1", None)
assert result.success is True
assert result.is_existing_account is False
assert (
session.calls[0]["data"]
== '{"username":{"value":"tester@example.com","kind":"email"},"screen_hint":"signup"}'
)
def test_register_password_uses_stable_protocol_body(monkeypatch):
engine = _build_engine(monkeypatch)
session = FakeSession(FakeResponse(status_code=200))
engine.session = session
engine.email = "tester@example.com"
monkeypatch.setattr(engine, "_generate_password", lambda length=0: "Pass12345")
success, password = engine._register_password()
assert success is True
assert password == "Pass12345"
assert session.calls[0]["data"] == json.dumps(
{
"password": "Pass12345",
"username": "tester@example.com",
}
)

View File

@@ -0,0 +1,540 @@
from contextlib import contextmanager
from pathlib import Path
import threading
from types import SimpleNamespace
import src.services.base as base_module
from src.core.register import (
ERROR_OTP_TIMEOUT_SECONDARY,
PhaseResult,
RegistrationResult,
)
from src.database.models import Base, EmailService, RegistrationTask
from src.database.session import DatabaseSessionManager
from src.services import EmailServiceType
from src.services.base import BaseEmailService, EmailProviderBackoffState
from src.web.routes import registration as registration_routes
class DummyTaskManager:
def __init__(self):
self.status_updates = []
self.logs = {}
def is_cancelled(self, task_uuid):
return False
def update_status(self, task_uuid, status, email=None, error=None, **kwargs):
self.status_updates.append((task_uuid, status, email, error, kwargs))
def create_log_callback(self, task_uuid, prefix="", batch_id=""):
def callback(message):
self.logs.setdefault(task_uuid, []).append(message)
return callback
class BackoffAwareEmailService(BaseEmailService):
def __init__(self, service_type, config=None, name=None):
super().__init__(service_type=service_type, name=name)
self.config = config or {}
def create_email(self, config=None):
return {"email": "tester@example.com", "service_id": "svc-1"}
def get_verification_code(self, **kwargs):
return None
def list_emails(self, **kwargs):
return []
def delete_email(self, email_id: str) -> bool:
return True
def check_health(self) -> bool:
return True
def test_registration_task_fails_over_after_rate_limit(monkeypatch):
runtime_dir = Path("tests_runtime")
runtime_dir.mkdir(exist_ok=True)
db_path = runtime_dir / "registration_failover.db"
if db_path.exists():
db_path.unlink()
manager = DatabaseSessionManager(f"sqlite:///{db_path}")
Base.metadata.create_all(bind=manager.engine)
task_uuid = "task-rate-limit-failover"
with manager.session_scope() as session:
session.add(RegistrationTask(task_uuid=task_uuid, status="pending"))
session.add_all([
EmailService(
service_type="duck_mail",
name="duck-primary",
config={
"base_url": "https://mail-1.example.test",
"default_domain": "mail.example.test",
},
enabled=True,
priority=0,
),
EmailService(
service_type="duck_mail",
name="duck-secondary",
config={
"base_url": "https://mail-2.example.test",
"default_domain": "mail.example.test",
},
enabled=True,
priority=1,
),
])
@contextmanager
def fake_get_db():
session = manager.SessionLocal()
try:
yield session
finally:
session.close()
class DummySettings:
pass
attempts = []
class FakeRegistrationEngine:
def __init__(self, email_service, proxy_url=None, callback_logger=None, task_uuid=None):
self.email_service = email_service
self.phase_history = []
def run(self):
attempts.append(self.email_service.name)
if self.email_service.name == "duck-primary":
self.phase_history = [
PhaseResult(
phase="email_prepare",
success=False,
error_message="创建邮箱失败",
error_code="EMAIL_PROVIDER_RATE_LIMITED",
retryable=True,
next_action="switch_provider",
provider_backoff=EmailProviderBackoffState(
failures=1,
delay_seconds=30,
opened_until=1030.0,
retry_after=7,
last_error="请求失败: 429",
),
)
]
return RegistrationResult(
success=False,
error_message="创建邮箱失败: 请求失败: 429",
logs=[],
)
self.phase_history = [
PhaseResult(
phase="email_prepare",
success=True,
provider_backoff=EmailProviderBackoffState(),
)
]
return RegistrationResult(
success=True,
email="tester@example.com",
password="Pass12345",
account_id="acct-1",
workspace_id="ws-1",
access_token="access-token",
refresh_token="refresh-token",
id_token="id-token",
logs=[],
)
def save_to_database(self, result):
return True
def close(self):
return None
monkeypatch.setattr(registration_routes, "get_db", fake_get_db)
monkeypatch.setattr(registration_routes, "get_settings", lambda: DummySettings())
monkeypatch.setattr(registration_routes, "task_manager", DummyTaskManager())
monkeypatch.setattr(registration_routes, "RegistrationEngine", FakeRegistrationEngine)
monkeypatch.setattr(
registration_routes.EmailServiceFactory,
"create",
lambda service_type, config, name=None: SimpleNamespace(
service_type=service_type,
name=name or service_type.value,
config=config,
),
)
monkeypatch.setattr(registration_routes, "update_proxy_usage", lambda db, proxy_id: None)
registration_routes.email_service_circuit_breakers.clear()
registration_routes._run_sync_registration_task(
task_uuid=task_uuid,
email_service_type=EmailServiceType.DUCK_MAIL.value,
proxy=None,
email_service_config=None,
)
with manager.session_scope() as session:
task = session.query(RegistrationTask).filter(RegistrationTask.task_uuid == task_uuid).first()
services = session.query(EmailService).order_by(EmailService.priority.asc()).all()
task_status = task.status
task_email_service_id = task.email_service_id
primary_service_id = services[0].id
secondary_service_id = services[1].id
assert attempts == ["duck-primary", "duck-secondary"]
assert task_status == "completed"
assert task_email_service_id == secondary_service_id
assert registration_routes.email_service_circuit_breakers[primary_service_id].failures == 1
assert registration_routes.email_service_circuit_breakers[primary_service_id].delay_seconds == 30
def test_registration_task_enters_deep_cooldown_after_three_otp_timeouts(monkeypatch):
runtime_dir = Path("tests_runtime")
runtime_dir.mkdir(exist_ok=True)
db_path = runtime_dir / "registration_otp_timeout_backoff.db"
if db_path.exists():
db_path.unlink()
manager = DatabaseSessionManager(f"sqlite:///{db_path}")
Base.metadata.create_all(bind=manager.engine)
task_uuids = [
"task-otp-timeout-1",
"task-otp-timeout-2",
"task-otp-timeout-3",
]
with manager.session_scope() as session:
session.add_all([RegistrationTask(task_uuid=task_uuid, status="pending") for task_uuid in task_uuids])
session.add(
EmailService(
service_type="duck_mail",
name="duck-primary",
config={
"base_url": "https://mail-1.example.test",
"default_domain": "mail.example.test",
},
enabled=True,
priority=0,
)
)
@contextmanager
def fake_get_db():
session = manager.SessionLocal()
try:
yield session
finally:
session.close()
class DummySettings:
pass
current_time = {"value": 1000.0}
class FakeRegistrationEngine:
def __init__(self, email_service, proxy_url=None, callback_logger=None, task_uuid=None):
self.email_service = email_service
self.phase_history = []
def run(self):
self.phase_history = [
PhaseResult(
phase="email_prepare",
success=True,
provider_backoff=EmailProviderBackoffState(),
)
]
return RegistrationResult(
success=False,
error_message="等待验证码超时",
error_code=ERROR_OTP_TIMEOUT_SECONDARY,
logs=[],
)
def save_to_database(self, result):
return True
def close(self):
return None
monkeypatch.setattr(registration_routes, "get_db", fake_get_db)
monkeypatch.setattr(registration_routes, "get_settings", lambda: DummySettings())
monkeypatch.setattr(registration_routes, "task_manager", DummyTaskManager())
monkeypatch.setattr(registration_routes, "RegistrationEngine", FakeRegistrationEngine)
monkeypatch.setattr(
registration_routes.EmailServiceFactory,
"create",
lambda service_type, config, name=None: BackoffAwareEmailService(
service_type=service_type,
config=config,
name=name,
),
)
monkeypatch.setattr(registration_routes, "update_proxy_usage", lambda db, proxy_id: None)
monkeypatch.setattr(base_module.time, "time", lambda: current_time["value"])
registration_routes.email_service_circuit_breakers.clear()
with manager.session_scope() as session:
service_id = session.query(EmailService.id).filter(EmailService.name == "duck-primary").scalar()
expected_delays = [30, 60, 3600]
for attempt_index, task_uuid in enumerate(task_uuids, start=1):
registration_routes._run_sync_registration_task(
task_uuid=task_uuid,
email_service_type=EmailServiceType.DUCK_MAIL.value,
proxy=None,
email_service_config=None,
)
with manager.session_scope() as session:
task = session.query(RegistrationTask).filter(RegistrationTask.task_uuid == task_uuid).first()
assert task.status == "failed"
assert task.error_message == "等待验证码超时"
state = registration_routes.email_service_circuit_breakers[service_id]
assert state.failures == attempt_index
assert state.delay_seconds == expected_delays[attempt_index - 1]
assert state.opened_until == current_time["value"] + expected_delays[attempt_index - 1]
if attempt_index < len(task_uuids):
current_time["value"] = state.opened_until + 1
final_state = registration_routes.email_service_circuit_breakers[service_id]
assert final_state.delay_seconds == 3600
assert final_state.failures == 3
def test_registration_task_success_clears_email_service_backoff(monkeypatch):
runtime_dir = Path("tests_runtime")
runtime_dir.mkdir(exist_ok=True)
db_path = runtime_dir / "registration_success_clears_backoff.db"
if db_path.exists():
db_path.unlink()
manager = DatabaseSessionManager(f"sqlite:///{db_path}")
Base.metadata.create_all(bind=manager.engine)
task_uuid = "task-success-clears-backoff"
with manager.session_scope() as session:
session.add(RegistrationTask(task_uuid=task_uuid, status="pending"))
session.add(
EmailService(
service_type="duck_mail",
name="duck-primary",
config={
"base_url": "https://mail-1.example.test",
"default_domain": "mail.example.test",
},
enabled=True,
priority=0,
)
)
@contextmanager
def fake_get_db():
session = manager.SessionLocal()
try:
yield session
finally:
session.close()
class DummySettings:
pass
class FakeRegistrationEngine:
def __init__(self, email_service, proxy_url=None, callback_logger=None, task_uuid=None):
self.email_service = email_service
self.phase_history = [
PhaseResult(
phase="email_prepare",
success=True,
provider_backoff=EmailProviderBackoffState(),
)
]
def run(self):
return RegistrationResult(
success=True,
email="tester@example.com",
password="Pass12345",
account_id="acct-1",
workspace_id="ws-1",
access_token="access-token",
refresh_token="refresh-token",
id_token="id-token",
logs=[],
)
def save_to_database(self, result):
return True
def close(self):
return None
monkeypatch.setattr(registration_routes, "get_db", fake_get_db)
monkeypatch.setattr(registration_routes, "get_settings", lambda: DummySettings())
monkeypatch.setattr(registration_routes, "task_manager", DummyTaskManager())
monkeypatch.setattr(registration_routes, "RegistrationEngine", FakeRegistrationEngine)
monkeypatch.setattr(
registration_routes.EmailServiceFactory,
"create",
lambda service_type, config, name=None: BackoffAwareEmailService(
service_type=service_type,
config=config,
name=name,
),
)
monkeypatch.setattr(registration_routes, "update_proxy_usage", lambda db, proxy_id: None)
registration_routes.email_service_circuit_breakers.clear()
with manager.session_scope() as session:
service_id = session.query(EmailService.id).filter(EmailService.name == "duck-primary").scalar()
registration_routes.email_service_circuit_breakers[service_id] = EmailProviderBackoffState(
failures=2,
delay_seconds=60,
opened_until=9999.0,
last_error="等待验证码超时",
)
registration_routes._run_sync_registration_task(
task_uuid=task_uuid,
email_service_type=EmailServiceType.DUCK_MAIL.value,
proxy=None,
email_service_config=None,
)
assert service_id not in registration_routes.email_service_circuit_breakers
def test_registration_task_backoff_failures_do_not_get_lost_under_concurrency(monkeypatch):
runtime_dir = Path("tests_runtime")
runtime_dir.mkdir(exist_ok=True)
db_path = runtime_dir / "registration_backoff_concurrency.db"
if db_path.exists():
db_path.unlink()
manager = DatabaseSessionManager(f"sqlite:///{db_path}")
Base.metadata.create_all(bind=manager.engine)
task_uuids = ["task-backoff-1", "task-backoff-2"]
with manager.session_scope() as session:
for task_uuid in task_uuids:
session.add(RegistrationTask(task_uuid=task_uuid, status="pending"))
session.add(
EmailService(
service_type="duck_mail",
name="duck-primary",
config={
"base_url": "https://mail-1.example.test",
"default_domain": "mail.example.test",
},
enabled=True,
priority=0,
)
)
@contextmanager
def fake_get_db():
session = manager.SessionLocal()
try:
yield session
finally:
session.close()
class DummySettings:
pass
start_lock = threading.Lock()
started = {"count": 0}
peer_started = threading.Event()
class FakeRegistrationEngine:
def __init__(self, email_service, proxy_url=None, callback_logger=None, task_uuid=None):
self.email_service = email_service
self.phase_history = []
def run(self):
with start_lock:
started["count"] += 1
if started["count"] == len(task_uuids):
peer_started.set()
peer_started.wait(timeout=0.1)
current_state = self.email_service.provider_backoff_state
next_failures = current_state.failures + 1
delay_seconds = 30 if next_failures == 1 else 60
self.phase_history = [
PhaseResult(
phase="email_prepare",
success=False,
error_message="创建邮箱失败",
error_code="EMAIL_PROVIDER_RATE_LIMITED",
retryable=True,
next_action="switch_provider",
provider_backoff=EmailProviderBackoffState(
failures=next_failures,
delay_seconds=delay_seconds,
opened_until=1000.0 + delay_seconds,
last_error="请求失败: 429",
),
)
]
return RegistrationResult(
success=False,
error_message="创建邮箱失败: 请求失败: 429",
logs=[],
)
def save_to_database(self, result):
return True
def close(self):
return None
monkeypatch.setattr(registration_routes, "get_db", fake_get_db)
monkeypatch.setattr(registration_routes, "get_settings", lambda: DummySettings())
monkeypatch.setattr(registration_routes, "task_manager", DummyTaskManager())
monkeypatch.setattr(registration_routes, "RegistrationEngine", FakeRegistrationEngine)
monkeypatch.setattr(
registration_routes.EmailServiceFactory,
"create",
lambda service_type, config, name=None: BackoffAwareEmailService(
service_type=service_type,
config=config,
name=name,
),
)
registration_routes.email_service_circuit_breakers.clear()
with manager.session_scope() as session:
service_id = session.query(EmailService.id).filter(EmailService.name == "duck-primary").scalar()
threads = [
threading.Thread(
target=registration_routes._run_sync_registration_task,
kwargs={
"task_uuid": task_uuid,
"email_service_type": EmailServiceType.DUCK_MAIL.value,
"proxy": None,
"email_service_config": None,
},
)
for task_uuid in task_uuids
]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
state = registration_routes.email_service_circuit_breakers[service_id]
assert state.failures == 2
assert state.delay_seconds == 60

View File

@@ -0,0 +1,174 @@
import src.core.register as register_module
from src.core.register import (
ERROR_OTP_TIMEOUT_SECONDARY,
PhaseContext,
RegistrationEngine,
)
from src.services import EmailServiceType
class DummySettings:
openai_client_id = "client-id"
openai_auth_url = "https://auth.example.test"
openai_token_url = "https://token.example.test"
openai_redirect_uri = "https://callback.example.test"
openai_scope = "openid profile email"
class FakeEmailService:
def __init__(self, code):
self.service_type = EmailServiceType.TEMPMAIL
self.code = code
self.calls = []
def get_verification_code(self, **kwargs):
self.calls.append(kwargs)
return self.code
class FakeCookies:
def __init__(self, values):
self.values = values
def get(self, name):
return self.values.get(name)
class FakeSession:
def __init__(self, cookies=None):
self.cookies = FakeCookies(cookies or {})
self.get_calls = []
def get(self, *args, **kwargs):
self.get_calls.append((args, kwargs))
raise AssertionError("unexpected network call")
class FakeResponse:
def __init__(self, *, url="", text="", json_payload=None):
self.url = url
self.text = text
self._json_payload = json_payload
def json(self):
if isinstance(self._json_payload, Exception):
raise self._json_payload
return self._json_payload
def _build_engine(monkeypatch, email_service):
monkeypatch.setattr(register_module, "get_settings", lambda: DummySettings())
return RegistrationEngine(email_service=email_service)
def test_phase_otp_secondary_uses_remaining_budget_from_start_timestamp(monkeypatch):
email_service = FakeEmailService(code="654321")
engine = _build_engine(monkeypatch, email_service)
engine.email = "tester@example.com"
engine.email_info = {"service_id": "svc-1"}
monkeypatch.setattr(register_module.time, "time", lambda: 120.0)
code, phase_result = engine._phase_otp_secondary(
PhaseContext(otp_sent_at=77.0),
started_at=100.0,
)
assert code == "654321"
assert phase_result.success is True
assert email_service.calls[0]["timeout"] == 100
assert email_service.calls[0]["otp_sent_at"] == 77.0
assert email_service.calls[0]["email"] == "tester@example.com"
assert email_service.calls[0]["email_id"] == "svc-1"
def test_phase_otp_secondary_returns_dedicated_timeout_error_code(monkeypatch):
email_service = FakeEmailService(code=None)
engine = _build_engine(monkeypatch, email_service)
engine.email = "tester@example.com"
engine.email_info = {"service_id": "svc-1"}
monkeypatch.setattr(register_module.time, "time", lambda: 120.0)
code, phase_result = engine._phase_otp_secondary(
PhaseContext(otp_sent_at=80.0),
started_at=100.0,
)
assert code is None
assert phase_result.success is False
assert phase_result.error_code == ERROR_OTP_TIMEOUT_SECONDARY
assert engine.phase_history[0].error_code == ERROR_OTP_TIMEOUT_SECONDARY
def test_advance_login_authorization_sets_otp_anchor_before_password_submit(monkeypatch):
email_service = FakeEmailService(code=None)
engine = _build_engine(monkeypatch, email_service)
engine.oauth_start = object()
engine._otp_sent_at = 10.0
monkeypatch.setattr(register_module.time, "time", lambda: 456.0)
monkeypatch.setattr(engine, "_init_session", lambda: True)
monkeypatch.setattr(engine, "_start_oauth", lambda: True)
monkeypatch.setattr(engine, "_get_device_id", lambda: True)
monkeypatch.setattr(engine, "_try_reenter_login_flow", lambda: True)
seen_anchors = []
def fake_submit_login_password_step():
seen_anchors.append(engine._otp_sent_at)
return True
monkeypatch.setattr(engine, "_submit_login_password_step", fake_submit_login_password_step)
def fake_get_verification_code():
seen_anchors.append(engine._otp_sent_at)
return None
monkeypatch.setattr(engine, "_get_verification_code", fake_get_verification_code)
workspace_id, callback_url = engine._advance_login_authorization()
assert workspace_id is None
assert callback_url is None
assert engine._otp_sent_at == 456.0
assert seen_anchors == [456.0, 456.0]
def test_get_device_id_reuses_existing_cookie_without_extra_request(monkeypatch):
email_service = FakeEmailService(code=None)
engine = _build_engine(monkeypatch, email_service)
engine.oauth_start = type("OAuthStart", (), {"auth_url": "https://auth.example.test/authorize"})()
engine.session = FakeSession(cookies={"oai-did": "did-cached"})
assert engine._get_device_id() == "did-cached"
assert engine.session.get_calls == []
def test_extract_workspace_id_from_response_payload(monkeypatch):
email_service = FakeEmailService(code=None)
engine = _build_engine(monkeypatch, email_service)
response = FakeResponse(
url="https://auth.example.test/consent?workspace_id=ws-url",
json_payload={
"page": {
"workspace": {
"id": "ws-json",
}
}
},
)
assert engine._extract_workspace_id_from_response(response=response) == "ws-json"
def test_extract_workspace_id_from_response_text_when_hidden_input_missing(monkeypatch):
email_service = FakeEmailService(code=None)
engine = _build_engine(monkeypatch, email_service)
response = FakeResponse(
url="https://auth.example.test/consent",
text='<script>window.__NEXT_DATA__={"activeWorkspaceId":"ws-script"}</script>',
json_payload=ValueError("not json"),
)
assert engine._extract_workspace_id_from_response(response=response) == "ws-script"

View File

@@ -0,0 +1,109 @@
from types import SimpleNamespace
from src.database import crud
from src.database.session import DatabaseSessionManager
from src.web.routes import registration
from src.core.register import RegistrationResult
def test_run_sync_registration_task_disables_bad_proxy_and_retries(monkeypatch, tmp_path):
manager = DatabaseSessionManager(f"sqlite:///{tmp_path}/test.db")
manager.create_tables()
manager.migrate_tables()
with manager.session_scope() as session:
primary_proxy = crud.create_proxy(
session,
name="primary",
type="http",
host="127.0.0.1",
port=8001,
)
crud.update_proxy(session, primary_proxy.id, is_default=True)
backup_proxy = crud.create_proxy(
session,
name="backup",
type="http",
host="127.0.0.1",
port=8002,
)
email_service = crud.create_email_service(
session,
service_type="tempmail",
name="tempmail-db",
config={"base_url": "https://mail.example/api"},
)
crud.create_registration_task(session, task_uuid="task-proxy-failover")
primary_proxy_id = primary_proxy.id
backup_proxy_id = backup_proxy.id
email_service_id = email_service.id
monkeypatch.setattr(registration, "get_db", manager.session_scope)
monkeypatch.setattr(
registration,
"EmailServiceFactory",
SimpleNamespace(
create=lambda service_type, config, name=None: SimpleNamespace(
service_type=service_type,
config=config,
name=name or service_type.value,
)
),
)
attempted_proxies = []
saved_results = []
class FakeRegistrationEngine:
def __init__(self, email_service, proxy_url=None, callback_logger=None, task_uuid=None):
self.proxy_url = proxy_url
def run(self):
attempted_proxies.append(self.proxy_url)
if self.proxy_url.endswith(":8001"):
return RegistrationResult(
success=False,
email="proxy@example.com",
error_message="OpenAI 请求失败: curl: (35) TLS handshake failed",
)
return RegistrationResult(
success=True,
email="proxy@example.com",
access_token="access-token",
workspace_id="ws-123",
)
def save_to_database(self, result):
saved_results.append(result.email)
return True
monkeypatch.setattr(registration, "RegistrationEngine", FakeRegistrationEngine)
registration.email_service_circuit_breakers.clear()
registration._run_sync_registration_task(
task_uuid="task-proxy-failover",
email_service_type="tempmail",
proxy=None,
email_service_config=None,
email_service_id=email_service_id,
)
assert attempted_proxies == [
"http://127.0.0.1:8001",
"http://127.0.0.1:8002",
]
assert saved_results == ["proxy@example.com"]
with manager.session_scope() as session:
disabled_primary = crud.get_proxy_by_id(session, primary_proxy_id)
active_backup = crud.get_proxy_by_id(session, backup_proxy_id)
task = crud.get_registration_task_by_uuid(session, "task-proxy-failover")
assert disabled_primary is not None
assert disabled_primary.enabled is False
assert active_backup is not None
assert active_backup.enabled is True
assert task is not None
assert task.status == "completed"
assert task.proxy == "http://127.0.0.1:8002"

View File

@@ -0,0 +1,150 @@
const test = require('node:test');
const assert = require('node:assert/strict');
const fs = require('node:fs');
const vm = require('node:vm');
const APP_JS_PATH = '/Users/zhoukailian/.config/superpowers/worktrees/codex-manager/repro-batch-monitor/static/js/app.js';
function createElementStub() {
return {
style: {},
dataset: {},
value: '',
checked: false,
disabled: false,
innerHTML: '',
textContent: '',
className: '',
appendChild() {},
addEventListener() {},
removeEventListener() {},
querySelector() {
return createElementStub();
},
querySelectorAll() {
return [];
},
closest() {
return null;
},
};
}
function createSandbox() {
const elements = new Map();
const sandbox = {
console,
setTimeout,
clearTimeout,
setInterval: () => 1,
clearInterval: () => {},
Event: class Event {
constructor(type) {
this.type = type;
}
},
document: {
getElementById(id) {
if (!elements.has(id)) {
elements.set(id, createElementStub());
}
return elements.get(id);
},
createElement() {
return createElementStub();
},
addEventListener() {},
querySelector() {
return createElementStub();
},
querySelectorAll() {
return [];
},
},
sessionStorage: {
getItem() {
return null;
},
setItem() {},
removeItem() {},
},
toast: {
info() {},
success() {},
warning() {},
error() {},
},
api: {
get() {
throw new Error('api.get should not be called in this test');
},
post() {
throw new Error('api.post should not be called in this test');
},
},
loadRecentAccounts() {},
getServiceTypeText(type) {
return {
tempmail: '临时邮箱',
outlook: 'Outlook',
}[type] || type;
},
window: null,
WebSocket: null,
};
sandbox.window = sandbox;
sandbox.window.location = { protocol: 'http:', host: '127.0.0.1:8005' };
vm.createContext(sandbox);
vm.runInContext(fs.readFileSync(APP_JS_PATH, 'utf8'), sandbox, { filename: 'app.js' });
return { sandbox, elements };
}
test('single task websocket completion updates task info and resets buttons', () => {
const { sandbox, elements } = createSandbox();
vm.runInContext(
`
var __lastWs = null;
startLogPolling = function() {
throw new Error('startLogPolling should not be called for completed status');
};
loadRecentAccounts = function() {};
currentTask = { task_uuid: 'task-1' };
taskCompleted = false;
taskFinalStatus = null;
elements.startBtn.disabled = true;
elements.cancelBtn.disabled = false;
elements.taskStatusRow.style.display = 'grid';
WebSocket = function(url) {
this.url = url;
this.readyState = 0;
__lastWs = this;
};
WebSocket.OPEN = 1;
WebSocket.CLOSED = 3;
WebSocket.prototype.close = function() {
this.readyState = WebSocket.CLOSED;
};
connectWebSocket('task-1');
__lastWs.onmessage({
data: JSON.stringify({
type: 'status',
status: 'completed',
email: 'demo@example.com',
email_service: 'tempmail',
}),
});
`,
sandbox,
);
assert.equal(elements.get('start-btn').disabled, false);
assert.equal(elements.get('cancel-btn').disabled, true);
assert.equal(elements.get('task-status').textContent, '已完成');
assert.equal(elements.get('task-email').textContent, 'demo@example.com');
assert.equal(elements.get('task-service').textContent, '临时邮箱');
});

View File

@@ -15,6 +15,7 @@ def test_static_asset_version_is_non_empty_string():
def test_email_services_template_uses_versioned_static_assets():
template = Path("templates/email_services.html").read_text(encoding="utf-8")
assert '/static/favicon.svg?v={{ static_version }}' in template
assert '/static/css/style.css?v={{ static_version }}' in template
assert '/static/js/utils.js?v={{ static_version }}' in template
assert '/static/js/email_services.js?v={{ static_version }}' in template
@@ -23,6 +24,7 @@ def test_email_services_template_uses_versioned_static_assets():
def test_index_template_uses_versioned_static_assets():
template = Path("templates/index.html").read_text(encoding="utf-8")
assert '/static/favicon.svg?v={{ static_version }}' in template
assert '/static/css/style.css?v={{ static_version }}' in template
assert '/static/js/utils.js?v={{ static_version }}' in template
assert '/static/js/app.js?v={{ static_version }}' in template

View File

@@ -0,0 +1,72 @@
import asyncio
from src.web.routes.registration import _create_task_status_callback
from src.web.task_manager import task_manager
class FakeWebSocket:
def __init__(self):
self.messages = []
async def send_json(self, payload):
self.messages.append(payload)
def test_update_status_broadcasts_to_registered_websocket():
async def run_test():
task_uuid = "test-status-broadcast"
websocket = FakeWebSocket()
task_manager.set_loop(asyncio.get_running_loop())
task_manager.register_websocket(task_uuid, websocket)
try:
task_manager.update_status(
task_uuid,
"completed",
email="demo@example.com",
email_service="tempmail",
)
await asyncio.sleep(0.05)
assert websocket.messages, "expected a status message to be broadcast"
assert websocket.messages[-1]["type"] == "status"
assert websocket.messages[-1]["status"] == "completed"
assert websocket.messages[-1]["email"] == "demo@example.com"
assert websocket.messages[-1]["email_service"] == "tempmail"
finally:
task_manager.unregister_websocket(task_uuid, websocket)
asyncio.run(run_test())
def test_task_status_callback_broadcasts_phase_fields():
async def run_test():
task_uuid = "test-status-phase"
websocket = FakeWebSocket()
task_manager.set_loop(asyncio.get_running_loop())
task_manager.register_websocket(task_uuid, websocket)
try:
callback = _create_task_status_callback(task_uuid, "tempmail")
callback({
"phase": "redirect_chain",
"phase_detail": "跟随重定向 1/6",
"step_index": 14,
})
await asyncio.sleep(0.05)
assert websocket.messages, "expected a status message to be broadcast"
assert websocket.messages[-1]["type"] == "status"
assert websocket.messages[-1]["status"] == "running"
assert websocket.messages[-1]["email_service"] == "tempmail"
assert websocket.messages[-1]["phase"] == "redirect_chain"
assert websocket.messages[-1]["phase_detail"] == "跟随重定向 1/6"
assert websocket.messages[-1]["step_index"] == 14
finally:
task_manager.unregister_websocket(task_uuid, websocket)
asyncio.run(run_test())

143
tests/test_task_recovery.py Normal file
View File

@@ -0,0 +1,143 @@
from contextlib import contextmanager
import asyncio
from fastapi import WebSocketDisconnect
from src.database import crud
from src.database.models import Base, RegistrationTask
from src.database.session import DatabaseSessionManager
from src.web.routes import websocket as websocket_routes
from src.web.task_manager import TaskManager
def test_fail_incomplete_registration_tasks_marks_pending_and_running_failed(tmp_path):
db_path = tmp_path / "recovery.db"
manager = DatabaseSessionManager(f"sqlite:///{db_path}")
Base.metadata.create_all(bind=manager.engine)
with manager.session_scope() as session:
session.add_all([
RegistrationTask(task_uuid="task-pending", status="pending"),
RegistrationTask(task_uuid="task-running", status="running", logs="[01:00:00] still running"),
RegistrationTask(task_uuid="task-done", status="completed"),
])
with manager.session_scope() as session:
cleaned = crud.fail_incomplete_registration_tasks(
session,
"服务启动时检测到未完成的历史任务,已标记失败,请重新发起。"
)
assert cleaned == ["task-pending", "task-running"]
with manager.session_scope() as session:
pending_task = crud.get_registration_task_by_uuid(session, "task-pending")
running_task = crud.get_registration_task_by_uuid(session, "task-running")
done_task = crud.get_registration_task_by_uuid(session, "task-done")
assert pending_task.status == "failed"
assert running_task.status == "failed"
assert pending_task.error_message == "服务启动时检测到未完成的历史任务,已标记失败,请重新发起。"
assert running_task.completed_at is not None
assert "[系统] 服务启动时检测到未完成的历史任务,已标记失败,请重新发起。" in running_task.logs
assert done_task.status == "completed"
def test_restore_task_snapshot_loads_status_and_logs_from_database(monkeypatch, tmp_path):
db_path = tmp_path / "websocket.db"
manager = DatabaseSessionManager(f"sqlite:///{db_path}")
Base.metadata.create_all(bind=manager.engine)
with manager.session_scope() as session:
session.add(
RegistrationTask(
task_uuid="task-websocket",
status="failed",
logs="[01:00:00] step 1\n[01:00:01] step 2",
result={"email": "tester@example.com"},
error_message="boom"
)
)
@contextmanager
def fake_get_db():
session = manager.SessionLocal()
try:
yield session
finally:
session.close()
monkeypatch.setattr(websocket_routes, "get_db", fake_get_db)
status, logs = websocket_routes._restore_task_snapshot("task-websocket")
assert status == {
"status": "failed",
"email": "tester@example.com",
"error": "boom",
}
assert logs == ["[01:00:00] step 1", "[01:00:01] step 2"]
def test_sync_task_state_prefers_longer_persisted_log_history():
manager = TaskManager()
task_uuid = "task-sync"
manager.sync_task_state(task_uuid, status={"status": "running"}, logs=["a", "b"])
manager.sync_task_state(task_uuid, logs=["a"])
assert manager.get_status(task_uuid) == {"status": "running"}
assert manager.get_logs(task_uuid) == ["a", "b"]
def test_register_websocket_returns_snapshot_and_keeps_live_cursor():
manager = TaskManager()
task_uuid = "task-live"
websocket = object()
manager.sync_task_state(task_uuid, status={"status": "running"}, logs=["log-1", "log-2"])
history_logs = manager.register_websocket(task_uuid, websocket)
assert history_logs == ["log-1", "log-2"]
assert manager.get_unsent_logs(task_uuid, websocket) == []
manager.add_log(task_uuid, "log-3")
assert manager.get_unsent_logs(task_uuid, websocket) == ["log-3"]
class _FakeWebSocket:
def __init__(self):
self.messages = []
self.accepted = False
async def accept(self):
self.accepted = True
async def send_json(self, payload):
self.messages.append(payload)
async def receive_json(self):
raise WebSocketDisconnect()
def test_batch_websocket_replays_history_logs_from_registration_snapshot(monkeypatch):
manager = TaskManager()
batch_id = "batch-history"
websocket = _FakeWebSocket()
manager.init_batch(batch_id, total=2)
manager.add_batch_log(batch_id, "[01:00:00] first")
manager.add_batch_log(batch_id, "[01:00:01] second")
monkeypatch.setattr(websocket_routes, "task_manager", manager)
asyncio.run(websocket_routes.batch_websocket(websocket, batch_id))
assert websocket.accepted is True
assert websocket.messages[0]["type"] == "status"
assert [msg["message"] for msg in websocket.messages[1:]] == [
"[01:00:00] first",
"[01:00:01] second",
]

View File

@@ -0,0 +1,120 @@
import src.services.tempmail as tempmail_module
from src.services.tempmail import TempmailService
class FakeResponse:
def __init__(self, status_code=200, payload=None):
self.status_code = status_code
self._payload = payload or {}
def json(self):
return self._payload
class FakeHTTPClient:
def __init__(self, responses):
self.responses = list(responses)
self.calls = []
def get(self, url, **kwargs):
self.calls.append({"url": url, "kwargs": kwargs})
if not self.responses:
raise AssertionError(f"未准备响应: GET {url}")
return self.responses.pop(0)
def test_get_verification_code_ignores_messages_older_than_tolerance_window(monkeypatch):
service = TempmailService({
"base_url": "https://api.tempmail.test/v2",
"timeout": 1,
"max_retries": 1,
})
service._email_cache["tester@example.com"] = {
"email": "tester@example.com",
"token": "token-1",
}
service.http_client = FakeHTTPClient([
FakeResponse(
status_code=200,
payload={
"emails": [
{
"id": "old-mail",
"from": "noreply@openai.com",
"subject": "Old verification code",
"body": "111111",
"received_at": 1998,
},
{
"id": "new-mail",
"from": "noreply@openai.com",
"subject": "New verification code",
"body": "654321",
"received_at": 2001,
},
]
},
)
])
monkeypatch.setattr(tempmail_module.time, "sleep", lambda _: None)
code = service.get_verification_code(
email="tester@example.com",
timeout=1,
otp_sent_at=2000,
)
assert code == "654321"
assert service.http_client.calls == [
{
"url": "https://api.tempmail.test/v2/inbox",
"kwargs": {
"params": {"token": "token-1"},
"headers": {"Accept": "application/json"},
},
}
]
def test_get_verification_code_allows_two_second_anchor_tolerance(monkeypatch):
service = TempmailService({
"base_url": "https://api.tempmail.test/v2",
"timeout": 1,
"max_retries": 1,
})
service._email_cache["tester@example.com"] = {
"email": "tester@example.com",
"token": "token-1",
}
service.http_client = FakeHTTPClient([
FakeResponse(
status_code=200,
payload={
"emails": [
{
"id": "too-old-mail",
"from": "noreply@openai.com",
"subject": "Too old verification code",
"body": "111111",
"received_at": 1998,
},
{
"id": "tolerated-mail",
"from": "noreply@openai.com",
"subject": "Tolerated verification code",
"body": "654321",
"received_at": 1999,
},
]
},
)
])
monkeypatch.setattr(tempmail_module.time, "sleep", lambda _: None)
code = service.get_verification_code(
email="tester@example.com",
timeout=1,
otp_sent_at=2000,
)
assert code == "654321"

View File

@@ -0,0 +1,142 @@
from datetime import datetime, timezone
from src.services.tempmail import TempmailService
class FakeResponse:
def __init__(self, payload, status_code=200):
self._payload = payload
self.status_code = status_code
def json(self):
return self._payload
class FakeHTTPClient:
def __init__(self, responses):
self.responses = list(responses)
self.calls = []
def get(self, url, **kwargs):
self.calls.append({"url": url, "kwargs": kwargs})
if not self.responses:
raise AssertionError(f"未准备响应: GET {url}")
return self.responses.pop(0)
def _to_timestamp(value: str) -> float:
return datetime.fromisoformat(value.replace("Z", "+00:00")).astimezone(timezone.utc).timestamp()
def test_get_verification_code_ignores_messages_received_before_otp_sent_at():
service = TempmailService({"base_url": "https://api.tempmail.test"})
service._email_cache["tester@example.com"] = {"token": "token-1"}
service.http_client = FakeHTTPClient([
FakeResponse(
{
"emails": [
{
"id": "old-mail",
"received_at": "2026-03-23T10:00:00Z",
"from": "noreply@openai.com",
"subject": "Old code",
"body": "111111",
},
{
"id": "new-mail",
"received_at": "2026-03-23T10:00:05Z",
"from": "noreply@openai.com",
"subject": "New code",
"body": "222222",
},
]
}
)
])
code = service.get_verification_code(
email="tester@example.com",
timeout=1,
otp_sent_at=_to_timestamp("2026-03-23T10:00:02Z"),
)
assert code == "222222"
def test_get_verification_code_uses_date_field_when_received_at_is_missing():
service = TempmailService({"base_url": "https://api.tempmail.test"})
service._email_cache["tester@example.com"] = {"token": "token-1"}
service.http_client = FakeHTTPClient([
FakeResponse(
{
"emails": [
{
"id": "legacy-mail",
"date": "2026-03-23T10:00:06Z",
"from": "noreply@openai.com",
"subject": "Legacy code",
"body": "333333",
},
{
"id": "received-mail",
"received_at": "2026-03-23T10:00:07Z",
"from": "noreply@openai.com",
"subject": "Received code",
"body": "444444",
},
]
}
)
])
code = service.get_verification_code(
email="tester@example.com",
timeout=1,
otp_sent_at=_to_timestamp("2026-03-23T10:00:05Z"),
)
assert code == "333333"
def test_get_verification_code_accepts_tempmail_date_field_as_timestamp():
service = TempmailService({"base_url": "https://api.tempmail.test"})
service._email_cache["tester@example.com"] = {"token": "token-1"}
service.http_client = FakeHTTPClient([
FakeResponse(
{
"emails": [
{
"id": "old-mail",
"date": "2026-03-23T10:00:02Z",
"from": "noreply@openai.com",
"subject": "Old code",
"body": "111111",
},
{
"id": "new-mail",
"date": "2026-03-23T10:00:08Z",
"from": "noreply@openai.com",
"subject": "New code",
"body": "222222",
},
]
}
)
])
code = service.get_verification_code(
email="tester@example.com",
timeout=1,
otp_sent_at=_to_timestamp("2026-03-23T10:00:05Z"),
)
assert code == "222222"
def test_parse_message_time_normalizes_timezone_offset():
service = TempmailService({"base_url": "https://api.tempmail.test"})
utc_timestamp = service._parse_message_time("2026-03-23T10:00:07Z")
offset_timestamp = service._parse_message_time("2026-03-23T18:00:07+08:00")
assert utc_timestamp == offset_timestamp

View File

@@ -0,0 +1,292 @@
{
"mode": "live",
"base_url": "http://127.0.0.1:15555",
"db_path": "/Volumes/Work/code/codex-manager/tests_runtime/e2e_runtime_1774308869.db",
"health": {
"status_code": 200,
"body": {
"total": 4,
"tasks": [
{
"id": 4,
"task_uuid": "9079068e-e3f5-4fa7-8e1c-810ce1c352da",
"status": "completed",
"email_service_id": null,
"proxy": null,
"logs": null,
"result": null,
"error_message": null,
"created_at": "2026-03-23T23:34:58.715238",
"started_at": "2026-03-23T23:34:58.718370",
"completed_at": "2026-03-23T23:34:58.718376"
}
]
}
},
"create": {
"task": {
"id": 5,
"task_uuid": "a8f4da41-354c-4d89-9634-c582a032c70b",
"status": "pending",
"email_service_id": null,
"proxy": null,
"logs": null,
"result": null,
"error_message": null,
"created_at": "2026-03-23T23:35:28.629402",
"started_at": null,
"completed_at": null
},
"batch_id": "2e8cfce4-bf20-4f0b-8839-a94e8e141472",
"checks": {
"seeded_account_email": "mock-seeded-a8f4da41@example.test",
"tokenless_account_email": "mock-tokenless-a8f4da41@example.test",
"partial_account_email": "mock-partial-a8f4da41@example.test",
"outlook_account_email": "mock-outlook-a8f4da41@example.test",
"backoff_service_name": "mock-backoff-a8f4da41"
}
},
"websocket": {
"messages": [
{
"type": "status",
"task_uuid": "a8f4da41-354c-4d89-9634-c582a032c70b",
"status": "pending"
},
{
"type": "status",
"task_uuid": "a8f4da41-354c-4d89-9634-c582a032c70b",
"status": "running",
"timestamp": "2026-03-23T23:35:29.258537",
"email_service": "tempmail"
},
{
"type": "log",
"task_uuid": "a8f4da41-354c-4d89-9634-c582a032c70b",
"message": "[模拟] 任务已启动,开始执行真实链路探针",
"timestamp": "2026-03-23T23:35:29.258717"
},
{
"type": "log",
"task_uuid": "a8f4da41-354c-4d89-9634-c582a032c70b",
"message": "[模拟] Token 同步与 Outlook refresh_token 探针已写入数据库",
"timestamp": "2026-03-23T23:35:29.462037"
},
{
"type": "log",
"task_uuid": "a8f4da41-354c-4d89-9634-c582a032c70b",
"message": "[模拟] OTP 超时退避 #1: failures=1, delay=30",
"timestamp": "2026-03-23T23:35:29.618496"
},
{
"type": "log",
"task_uuid": "a8f4da41-354c-4d89-9634-c582a032c70b",
"message": "[模拟] OTP 超时退避 #2: failures=2, delay=60",
"timestamp": "2026-03-23T23:35:29.772745"
},
{
"type": "log",
"task_uuid": "a8f4da41-354c-4d89-9634-c582a032c70b",
"message": "[模拟] OTP 超时退避 #3: failures=3, delay=3600",
"timestamp": "2026-03-23T23:35:29.926635"
},
{
"type": "log",
"task_uuid": "a8f4da41-354c-4d89-9634-c582a032c70b",
"message": "[模拟] 批量计数探针已完成",
"timestamp": "2026-03-23T23:35:30.102423"
},
{
"type": "status",
"task_uuid": "a8f4da41-354c-4d89-9634-c582a032c70b",
"status": "completed",
"timestamp": "2026-03-23T23:35:30.287066",
"email": "mock-seeded-a8f4da41@example.test",
"email_service": "tempmail"
}
],
"log_count": 6,
"status_count": 3,
"live_log_count": 6,
"final_status": "completed"
},
"task": {
"id": 5,
"task_uuid": "a8f4da41-354c-4d89-9634-c582a032c70b",
"status": "completed",
"email_service_id": null,
"proxy": null,
"logs": "[模拟] 任务已启动,开始执行真实链路探针\n[模拟] Token 同步与 Outlook refresh_token 探针已写入数据库\n[模拟] OTP 超时退避 #1: failures=1, delay=30\n[模拟] OTP 超时退避 #2: failures=2, delay=60\n[模拟] OTP 超时退避 #3: failures=3, delay=3600\n[模拟] 批量计数探针已完成\n[模拟] 任务完成,所有探针已收口",
"result": {
"email": "mock-seeded-a8f4da41@example.test",
"email_service": "tempmail",
"hardening_checks": {
"token_sync": {
"seeded_account_id": 4,
"tokenless_account_id": 5,
"partial_account_id": 6
},
"outlook_refresh": {
"service_id": 3,
"email": "mock-outlook-a8f4da41@example.test"
},
"batch_counter": {
"batch_id": "2e8cfce4-bf20-4f0b-8839-a94e8e141472",
"task_uuids": [
"03c182b4-d5d3-4939-b2a0-eda844c402d9",
"224f2a9f-c0f3-4d97-8e92-4c2e772a675b",
"6c4f0e18-47b1-473a-9cc5-83ef09e33ff8"
],
"snapshot": {
"status": "completed",
"total": 3,
"completed": 3,
"success": 2,
"failed": 1,
"skipped": 0,
"cancelled": false,
"current_index": 0,
"finished": true,
"task_uuids": [
"03c182b4-d5d3-4939-b2a0-eda844c402d9",
"224f2a9f-c0f3-4d97-8e92-4c2e772a675b",
"6c4f0e18-47b1-473a-9cc5-83ef09e33ff8"
]
}
},
"otp_timeout_backoff": {
"service_id": 4,
"states": [
{
"failures": 1,
"delay_seconds": 30,
"opened_until": 1774308959.612146,
"retry_after": null,
"last_error": "模拟 OTP 超时 #1"
},
{
"failures": 2,
"delay_seconds": 60,
"opened_until": 1774308989.7684338,
"retry_after": null,
"last_error": "模拟 OTP 超时 #2"
},
{
"failures": 3,
"delay_seconds": 3600,
"opened_until": 1774312529.923651,
"retry_after": null,
"last_error": "模拟 OTP 超时 #3"
}
]
}
}
},
"error_message": null,
"created_at": "2026-03-23T23:35:28.629402",
"started_at": "2026-03-23T23:35:29.251251",
"completed_at": "2026-03-23T23:35:30.252298"
},
"batch_api": {
"batch_id": "2e8cfce4-bf20-4f0b-8839-a94e8e141472",
"total": 3,
"completed": 3,
"success": 2,
"failed": 1,
"current_index": 0,
"cancelled": false,
"finished": true,
"progress": "3/3"
},
"database": {
"task_uuid": "a8f4da41-354c-4d89-9634-c582a032c70b",
"batch_id": "2e8cfce4-bf20-4f0b-8839-a94e8e141472",
"seeded_account": {
"email": "mock-seeded-a8f4da41@example.test",
"access_token": "mock-access-token-seeded",
"refresh_token": "mock-refresh-token-seeded",
"token_sync_status": "pending"
},
"tokenless_account": {
"email": "mock-tokenless-a8f4da41@example.test",
"access_token": "mock-access-token-updated",
"refresh_token": null,
"token_sync_status": "pending"
},
"partial_account": {
"email": "mock-partial-a8f4da41@example.test",
"access_token": "mock-access-token-partial",
"refresh_token": "",
"token_sync_status": "pending"
},
"task_result": {
"email": "mock-seeded-a8f4da41@example.test",
"email_service": "tempmail",
"hardening_checks": {
"token_sync": {
"seeded_account_id": 4,
"tokenless_account_id": 5,
"partial_account_id": 6
},
"outlook_refresh": {
"service_id": 3,
"email": "mock-outlook-a8f4da41@example.test"
},
"batch_counter": {
"batch_id": "2e8cfce4-bf20-4f0b-8839-a94e8e141472",
"task_uuids": [
"03c182b4-d5d3-4939-b2a0-eda844c402d9",
"224f2a9f-c0f3-4d97-8e92-4c2e772a675b",
"6c4f0e18-47b1-473a-9cc5-83ef09e33ff8"
],
"snapshot": {
"status": "completed",
"total": 3,
"completed": 3,
"success": 2,
"failed": 1,
"skipped": 0,
"cancelled": false,
"current_index": 0,
"finished": true,
"task_uuids": [
"03c182b4-d5d3-4939-b2a0-eda844c402d9",
"224f2a9f-c0f3-4d97-8e92-4c2e772a675b",
"6c4f0e18-47b1-473a-9cc5-83ef09e33ff8"
]
}
},
"otp_timeout_backoff": {
"service_id": 4,
"states": [
{
"failures": 1,
"delay_seconds": 30,
"opened_until": 1774308959.612146,
"retry_after": null,
"last_error": "模拟 OTP 超时 #1"
},
{
"failures": 2,
"delay_seconds": 60,
"opened_until": 1774308989.7684338,
"retry_after": null,
"last_error": "模拟 OTP 超时 #2"
},
{
"failures": 3,
"delay_seconds": 3600,
"opened_until": 1774312529.923651,
"retry_after": null,
"last_error": "模拟 OTP 超时 #3"
}
]
}
}
},
"outlook_second_account": {
"email": "mock-outlook-a8f4da41@example.test",
"refresh_token": "new-second"
}
}
}

View File

@@ -0,0 +1,38 @@
{
"mode": "verify-recovery",
"base_url": "http://127.0.0.1:15555",
"db_path": "/Volumes/Work/code/codex-manager/tests_runtime/e2e_runtime_1774308869.db",
"state": {
"stale_task_uuid": "stale-e738842e-74d8-400d-859e-1b283eab1a95",
"db_path": "/Volumes/Work/code/codex-manager/tests_runtime/e2e_runtime_1774308869.db",
"prepared_at": "2026-03-24 07:35:40"
},
"health": {
"status_code": 200,
"body": {
"total": 9,
"tasks": [
{
"id": 9,
"task_uuid": "stale-e738842e-74d8-400d-859e-1b283eab1a95",
"status": "failed",
"email_service_id": null,
"proxy": null,
"logs": "[00:00:00] stale task\n[系统] 服务启动时检测到未完成的历史任务,已标记失败,请重新发起。",
"result": null,
"error_message": "服务启动时检测到未完成的历史任务,已标记失败,请重新发起。",
"created_at": "2026-03-24T07:35:40",
"started_at": "2026-03-24T07:35:40",
"completed_at": "2026-03-23T23:35:57.292019"
}
]
}
},
"recovery": {
"task_uuid": "stale-e738842e-74d8-400d-859e-1b283eab1a95",
"status": "failed",
"error_message": "服务启动时检测到未完成的历史任务,已标记失败,请重新发起。",
"logs": "[00:00:00] stale task\n[系统] 服务启动时检测到未完成的历史任务,已标记失败,请重新发起。",
"completed_at": "2026-03-23 23:35:57.292019"
}
}

View File

@@ -0,0 +1,5 @@
{
"stale_task_uuid": "stale-e738842e-74d8-400d-859e-1b283eab1a95",
"db_path": "/Volumes/Work/code/codex-manager/tests_runtime/e2e_runtime_1774308869.db",
"prepared_at": "2026-03-24 07:35:40"
}