Merge branch 'pr/yangtb2024/13'

This commit is contained in:
snaily
2025-03-18 21:46:34 +08:00
3 changed files with 14 additions and 1 deletions

View File

@@ -1,6 +1,7 @@
API_KEYS=["AIzaSyxxxxxxxxxxxxxxxxxxx","AIzaSyxxxxxxxxxxxxxxxxxxx"]
ALLOWED_TOKENS=["sk-123456"]
# AUTH_TOKEN=sk-123456
FILTERED_MODELS=["gemini-1.0-pro-vision-latest", "gemini-pro-vision", "chat-bison-001", "text-bison-001", "embedding-gecko-001"]
MODEL_SEARCH=["gemini-2.0-flash-exp","gemini-2.0-pro-exp"]
MODEL_IMAGE=["gemini-2.0-flash-exp"]
TOOLS_CODE_EXECUTION_ENABLED=false

View File

@@ -8,6 +8,7 @@ class Settings(BaseSettings):
BASE_URL: str = "https://generativelanguage.googleapis.com/v1beta"
MODEL_SEARCH: List[str] = ["gemini-2.0-flash-exp"]
MODEL_IMAGE: List[str] = ["gemini-2.0-flash-exp"]
FILTERED_MODELS: List[str] = ["gemini-1.0-pro-vision-latest", "gemini-pro-vision", "chat-bison-001", "text-bison-001", "embedding-gecko-001"]
TOOLS_CODE_EXECUTION_ENABLED: bool = False
SHOW_SEARCH_LINK: bool = True
SHOW_THINKING_PROCESS: bool = True

View File

@@ -11,6 +11,7 @@ class ModelService:
self.model_search = model_search
self.model_image = model_image
self.base_url = "https://generativelanguage.googleapis.com/v1beta"
self.filtered_models = settings.FILTERED_MODELS
def get_gemini_models(self, api_key: str) -> Optional[Dict[str, Any]]:
url = f"{self.base_url}/models?key={api_key}"
@@ -19,6 +20,16 @@ class ModelService:
response = requests.get(url)
if response.status_code == 200:
gemini_models = response.json()
filtered_models_list = []
for model in gemini_models.get("models", []):
model_id = model["name"].split("/")[-1]
if model_id not in self.filtered_models:
filtered_models_list.append(model)
else:
logger.info(f"Filtered out model: {model_id}")
gemini_models["models"] = filtered_models_list
return gemini_models
else:
logger.error(f"Error: {response.status_code}")
@@ -81,4 +92,4 @@ class ModelService:
model = model[:-6]
return model in settings.MODEL_IMAGE
return True
return True