refactor: config llm env

This commit is contained in:
lilong.129
2025-04-29 22:33:18 +08:00
parent 429bfe3986
commit 3ffa5d96d2
6 changed files with 68 additions and 151 deletions

View File

@@ -1 +1 @@
v5.0.0-beta-2504292203
v5.0.0-beta-2504292233

View File

@@ -4,9 +4,11 @@ import (
"context"
"os"
"github.com/rs/zerolog/log"
"github.com/cloudwego/eino-ext/components/model/openai"
"github.com/httprunner/httprunner/v5/code"
"github.com/httprunner/httprunner/v5/internal/config"
"github.com/pkg/errors"
"github.com/rs/zerolog/log"
)
func NewAIService(opts ...AIServiceOption) *AIServices {
@@ -105,3 +107,51 @@ func (c *combinedLLMService) Call(opts *PlanningOptions) (*PlanningResult, error
func (c *combinedLLMService) Assert(opts *AssertOptions) (*AssertionResponse, error) {
return c.asserter.Assert(opts)
}
// LLM model config env variables
const (
EnvOpenAIBaseURL = "OPENAI_BASE_URL"
EnvOpenAIAPIKey = "OPENAI_API_KEY"
EnvModelName = "LLM_MODEL_NAME"
)
// GetOpenAIModelConfig get OpenAI config
func GetOpenAIModelConfig() (*openai.ChatModelConfig, error) {
if err := config.LoadEnv(); err != nil {
return nil, errors.Wrap(code.LoadEnvError, err.Error())
}
openaiBaseURL := os.Getenv(EnvOpenAIBaseURL)
if openaiBaseURL == "" {
return nil, errors.Wrapf(code.LLMEnvMissedError,
"env %s missed", EnvOpenAIBaseURL)
}
openaiAPIKey := os.Getenv(EnvOpenAIAPIKey)
if openaiAPIKey == "" {
return nil, errors.Wrapf(code.LLMEnvMissedError,
"env %s missed", EnvOpenAIAPIKey)
}
modelName := os.Getenv(EnvModelName)
if modelName == "" {
return nil, errors.Wrapf(code.LLMEnvMissedError,
"env %s missed", EnvModelName)
}
temperature := float32(0.01)
modelConfig := &openai.ChatModelConfig{
BaseURL: openaiBaseURL,
APIKey: openaiAPIKey,
Model: modelName,
Timeout: defaultTimeout,
Temperature: &temperature,
}
// log config info
log.Info().Str("model", modelConfig.Model).
Str("baseURL", modelConfig.BaseURL).
Str("apiKey", maskAPIKey(modelConfig.APIKey)).
Str("timeout", defaultTimeout.String()).
Msg("get model config")
return modelConfig, nil
}

View File

@@ -1,55 +0,0 @@
package ai
import (
"os"
"github.com/cloudwego/eino-ext/components/model/ark"
"github.com/httprunner/httprunner/v5/code"
"github.com/httprunner/httprunner/v5/internal/config"
"github.com/pkg/errors"
"github.com/rs/zerolog/log"
)
const (
EnvArkBaseURL = "ARK_BASE_URL"
EnvArkAPIKey = "ARK_API_KEY"
EnvArkModelID = "ARK_MODEL_ID"
)
func GetArkModelConfig() (*ark.ChatModelConfig, error) {
if err := config.LoadEnv(); err != nil {
return nil, errors.Wrap(code.LoadEnvError, err.Error())
}
arkBaseURL := os.Getenv(EnvArkBaseURL)
arkAPIKey := os.Getenv(EnvArkAPIKey)
if arkAPIKey == "" {
return nil, errors.Wrapf(code.LLMEnvMissedError,
"env %s missed", EnvArkAPIKey)
}
modelName := os.Getenv(EnvArkModelID)
if modelName == "" {
return nil, errors.Wrapf(code.LLMEnvMissedError,
"env %s missed", EnvArkModelID)
}
timeout := defaultTimeout
// https://www.volcengine.com/docs/82379/1494384?redirect=1
temperature := float32(0.01)
modelConfig := &ark.ChatModelConfig{
BaseURL: arkBaseURL,
APIKey: arkAPIKey,
Model: modelName,
Timeout: &timeout,
Temperature: &temperature,
}
// log config info
log.Info().Str("model", modelConfig.Model).
Str("baseURL", modelConfig.BaseURL).
Str("apiKey", maskAPIKey(modelConfig.APIKey)).
Str("timeout", defaultTimeout.String()).
Msg("get model config")
return modelConfig, nil
}

View File

@@ -1,58 +0,0 @@
package ai
import (
"os"
"github.com/cloudwego/eino-ext/components/model/openai"
"github.com/httprunner/httprunner/v5/code"
"github.com/httprunner/httprunner/v5/internal/config"
"github.com/pkg/errors"
"github.com/rs/zerolog/log"
)
const (
EnvOpenAIBaseURL = "OPENAI_BASE_URL"
EnvOpenAIAPIKey = "OPENAI_API_KEY"
EnvModelName = "LLM_MODEL_NAME"
)
// GetOpenAIModelConfig get OpenAI config
func GetOpenAIModelConfig() (*openai.ChatModelConfig, error) {
if err := config.LoadEnv(); err != nil {
return nil, errors.Wrap(code.LoadEnvError, err.Error())
}
openaiBaseURL := os.Getenv(EnvOpenAIBaseURL)
if openaiBaseURL == "" {
return nil, errors.Wrapf(code.LLMEnvMissedError,
"env %s missed", EnvOpenAIBaseURL)
}
openaiAPIKey := os.Getenv(EnvOpenAIAPIKey)
if openaiAPIKey == "" {
return nil, errors.Wrapf(code.LLMEnvMissedError,
"env %s missed", EnvOpenAIAPIKey)
}
modelName := os.Getenv(EnvModelName)
if modelName == "" {
return nil, errors.Wrapf(code.LLMEnvMissedError,
"env %s missed", EnvModelName)
}
temperature := float32(0.01)
modelConfig := &openai.ChatModelConfig{
BaseURL: openaiBaseURL,
APIKey: openaiAPIKey,
Model: modelName,
Timeout: defaultTimeout,
Temperature: &temperature,
}
// log config info
log.Info().Str("model", modelConfig.Model).
Str("baseURL", modelConfig.BaseURL).
Str("apiKey", maskAPIKey(modelConfig.APIKey)).
Str("timeout", defaultTimeout.String()).
Msg("get model config")
return modelConfig, nil
}

View File

@@ -7,7 +7,6 @@ import (
"strings"
"time"
"github.com/cloudwego/eino-ext/components/model/ark"
"github.com/cloudwego/eino-ext/components/model/openai"
openai2 "github.com/cloudwego/eino-ext/libs/acl/openai"
"github.com/cloudwego/eino/components/model"
@@ -55,24 +54,17 @@ func NewAsserter(ctx context.Context, modelType LLMServiceType) (*Asserter, erro
systemPrompt: defaultAssertionPrompt,
}
config, err := GetOpenAIModelConfig()
if err != nil {
return nil, err
}
switch modelType {
case LLMServiceTypeUITARS:
config, err := GetArkModelConfig()
if err != nil {
return nil, err
}
asserter.systemPrompt += "\n\n" + uiTarsAssertionResponseFormat
asserter.model, err = ark.NewChatModel(ctx, config)
if err != nil {
return nil, err
}
case LLMServiceTypeQwenVL:
asserter.systemPrompt += "\n\n" + defaultAssertionResponseJsonFormat
case LLMServiceTypeGPT4Vision, LLMServiceTypeGPT4o:
config, err := GetOpenAIModelConfig()
if err != nil {
return nil, err
}
// define output format
type OutputFormat struct {
Thought string `json:"thought"`
@@ -94,27 +86,15 @@ func NewAsserter(ctx context.Context, modelType LLMServiceType) (*Asserter, erro
Strict: false,
},
}
asserter.model, err = openai.NewChatModel(ctx, config)
if err != nil {
return nil, err
}
case LLMServiceTypeQwenVL:
config, err := GetOpenAIModelConfig()
if err != nil {
return nil, err
}
asserter.systemPrompt += "\n\n" + defaultAssertionResponseJsonFormat
asserter.model, err = openai.NewChatModel(ctx, config)
if err != nil {
return nil, err
}
default:
return nil, errors.New("not supported model type for asserter")
}
asserter.model, err = openai.NewChatModel(ctx, config)
if err != nil {
return nil, err
}
return asserter, nil
}

View File

@@ -9,7 +9,7 @@ import (
"strings"
"time"
"github.com/cloudwego/eino-ext/components/model/ark"
"github.com/cloudwego/eino-ext/components/model/openai"
"github.com/cloudwego/eino/components/model"
"github.com/cloudwego/eino/schema"
"github.com/httprunner/httprunner/v5/code"
@@ -20,11 +20,11 @@ import (
)
func NewUITarsPlanner(ctx context.Context) (*UITarsPlanner, error) {
config, err := GetArkModelConfig()
config, err := GetOpenAIModelConfig()
if err != nil {
return nil, err
}
chatModel, err := ark.NewChatModel(ctx, config)
chatModel, err := openai.NewChatModel(ctx, config)
if err != nil {
return nil, err
}