refactor: remove legacy LLM_DISABLE_THINKING and LLM_REASONING_EFFORT config, unify thinking_level handling

- Eliminate support for LLM_DISABLE_THINKING and LLM_REASONING_EFFORT in config, code, and tests
- Simplify LLM thinking level logic to rely solely on LLM_THINKING_LEVEL
- Refactor LLMHelper and related endpoints to remove legacy parameter handling
- Update system API and test utilities to match new configuration structure
- Minor code cleanup and formatting improvements
This commit is contained in:
jxxghp
2026-04-25 10:42:03 +08:00
parent 4a81417fb7
commit a05ffc07d4
6 changed files with 143 additions and 389 deletions

View File

@@ -72,8 +72,6 @@ sys.modules["app.core.config"].settings.LLM_MODEL = "deepseek-v4-pro"
sys.modules["app.core.config"].settings.LLM_API_KEY = "sk-test"
sys.modules["app.core.config"].settings.LLM_BASE_URL = "https://api.deepseek.com"
sys.modules["app.core.config"].settings.LLM_THINKING_LEVEL = None
sys.modules["app.core.config"].settings.LLM_DISABLE_THINKING = False
sys.modules["app.core.config"].settings.LLM_REASONING_EFFORT = None
sys.modules["app.core.config"].settings.LLM_TEMPERATURE = 0.1
sys.modules["app.core.config"].settings.LLM_MAX_CONTEXT_TOKENS = 64
sys.modules["app.core.config"].settings.PROXY_HOST = None

View File

@@ -39,8 +39,6 @@ _stub_module(
LLM_API_KEY="global-key",
LLM_BASE_URL="https://global.example.com",
LLM_THINKING_LEVEL=None,
LLM_DISABLE_THINKING=False,
LLM_REASONING_EFFORT=None,
LLM_TEMPERATURE=0.1,
LLM_MAX_CONTEXT_TOKENS=64,
PROXY_HOST=None,