refactor: LoadEnv

This commit is contained in:
lilong.129
2025-03-31 14:33:01 +08:00
parent 379f0de4ea
commit 563015c55a
9 changed files with 260 additions and 284 deletions

View File

@@ -53,11 +53,6 @@ const (
)
func WithLLMService(service LLMServiceType) AIServiceOption {
if err := checkEnvLLM(); err != nil {
log.Error().Err(err).Msg("check LLM env failed")
os.Exit(code.GetErrorCode(err))
}
return func(opts *AIServices) {
if service == LLMServiceTypeGPT4o {
var err error

View File

@@ -15,6 +15,7 @@ import (
"github.com/httprunner/httprunner/v5/code"
"github.com/httprunner/httprunner/v5/internal/builtin"
"github.com/httprunner/httprunner/v5/internal/config"
"github.com/httprunner/httprunner/v5/internal/json"
"github.com/httprunner/httprunner/v5/uixt/option"
)
@@ -231,6 +232,9 @@ func (s *vedemCVService) ReadFromBuffer(imageBuf *bytes.Buffer, opts ...option.A
}
func checkEnvCV() error {
if err := config.LoadEnv(); err != nil {
return errors.Wrap(code.LoadEnvError, err.Error())
}
vedemImageURL := os.Getenv("VEDEM_IMAGE_URL")
if vedemImageURL == "" {
return errors.Wrap(code.CVEnvMissedError, "VEDEM_IMAGE_URL missed")

View File

@@ -1,240 +0,0 @@
package ai
import (
"encoding/json"
"fmt"
"net/http"
"os"
"path/filepath"
"strconv"
"sync"
"time"
"github.com/cloudwego/eino-ext/components/model/openai"
"github.com/httprunner/httprunner/v5/code"
"github.com/joho/godotenv"
"github.com/pkg/errors"
"github.com/rs/zerolog/log"
)
const (
defaultTimeout = 60 * time.Second
)
type OpenAIInitConfig struct {
ReportURL string `json:"REPORT_SERVER_URL"`
Headers map[string]string `json:"defaultHeaders"`
}
const (
EnvOpenAIBaseURL = "OPENAI_BASE_URL"
EnvOpenAIAPIKey = "OPENAI_API_KEY"
EnvModelName = "LLM_MODEL_NAME"
EnvOpenAIInitConfigJSON = "OPENAI_INIT_CONFIG_JSON"
)
var once sync.Once
// loadEnv loads environment variables from .env file
// it will search for .env file from current working directory upward recursively
func loadEnv() {
once.Do(func() {
// get current working directory
cwd, err := os.Getwd()
if err != nil {
panic(err)
}
// locate .env file from current working directory upward recursively
envPath := cwd
for {
envFile := filepath.Join(envPath, ".env")
if _, err := os.Stat(envFile); err == nil {
// found .env file
// override existing env variables
err = godotenv.Overload(envFile)
if err != nil {
log.Fatal().Err(err).
Str("path", envFile).Msg("overload env file failed")
}
log.Info().Str("path", envFile).Msg("overload env success")
return
}
// reached root directory
parent := filepath.Dir(envPath)
if parent == envPath {
log.Info().Msg("no .env file found from current directory to root")
return
}
envPath = parent
}
})
}
func checkEnvLLM() error {
loadEnv()
openaiBaseURL := os.Getenv("OPENAI_BASE_URL")
if openaiBaseURL == "" {
return errors.Wrap(code.LLMEnvMissedError, "OPENAI_BASE_URL missed")
}
log.Info().Str("OPENAI_BASE_URL", openaiBaseURL).Msg("get env")
openaiAPIKey := os.Getenv("OPENAI_API_KEY")
if openaiAPIKey == "" {
return errors.Wrap(code.LLMEnvMissedError, "OPENAI_API_KEY missed")
}
log.Info().Str("OPENAI_API_KEY", maskAPIKey(openaiAPIKey)).Msg("get env")
modelName := os.Getenv("LLM_MODEL_NAME")
if modelName == "" {
return errors.Wrap(code.LLMEnvMissedError, "LLM_MODEL_NAME missed")
}
log.Info().Str("LLM_MODEL_NAME", modelName).Msg("get env")
return nil
}
func GetEnvConfig(key string) string {
return os.Getenv(key)
}
func GetEnvConfigInJSON(key string) (map[string]interface{}, error) {
value := GetEnvConfig(key)
if value == "" {
return nil, nil
}
var result map[string]interface{}
if err := json.Unmarshal([]byte(value), &result); err != nil {
return nil, err
}
return result, nil
}
func GetEnvConfigInBool(key string) bool {
value := GetEnvConfig(key)
if value == "" {
return false
}
boolValue, _ := strconv.ParseBool(value)
return boolValue
}
// GetEnvConfigOrDefault get env config or default value
func GetEnvConfigOrDefault(key, defaultValue string) string {
value := GetEnvConfig(key)
if value == "" {
return defaultValue
}
return value
}
func GetEnvConfigInInt(key string, defaultValue int) int {
value := GetEnvConfig(key)
if value == "" {
return defaultValue
}
intValue, err := strconv.Atoi(value)
if err != nil {
return defaultValue
}
return intValue
}
// CustomTransport is a custom RoundTripper that adds headers to every request
type CustomTransport struct {
Transport http.RoundTripper
Headers map[string]string
}
// RoundTrip executes a single HTTP transaction and adds custom headers
func (c *CustomTransport) RoundTrip(req *http.Request) (*http.Response, error) {
for key, value := range c.Headers {
req.Header.Set(key, value)
}
return c.Transport.RoundTrip(req)
}
type OutputFormat struct {
Thought string `json:"thought"`
Action string `json:"action"`
Error string `json:"error,omitempty"`
}
// GetModelConfig get OpenAI config
func GetModelConfig() (*openai.ChatModelConfig, error) {
loadEnv()
envConfig := &OpenAIInitConfig{
Headers: make(map[string]string),
}
// read from JSON config first
jsonStr := GetEnvConfig(EnvOpenAIInitConfigJSON)
if jsonStr != "" {
if err := json.Unmarshal([]byte(jsonStr), envConfig); err != nil {
return nil, err
}
}
// outputFormatSchema, err := openapi3gen.NewSchemaRefForValue(&OutputFormat{}, nil)
// if err != nil {
// log.Fatal().Err(err).Msg("NewSchemaRefForValue failed")
// }
config := &openai.ChatModelConfig{
HTTPClient: &http.Client{
Timeout: defaultTimeout,
Transport: &CustomTransport{
Transport: http.DefaultTransport,
Headers: envConfig.Headers,
},
},
// TODO: set structured response format
// https://github.com/cloudwego/eino-ext/blob/main/components/model/openai/examples/structured/structured.go
// ResponseFormat: &openai2.ChatCompletionResponseFormat{
// Type: openai2.ChatCompletionResponseFormatTypeJSONSchema,
// JSONSchema: &openai2.ChatCompletionResponseFormatJSONSchema{
// Name: "thought_and_action",
// Description: "data that describes planning thought and action",
// Schema: outputFormatSchema.Value,
// Strict: false,
// },
// },
}
if baseURL := GetEnvConfig(EnvOpenAIBaseURL); baseURL != "" {
config.BaseURL = baseURL
} else {
return nil, fmt.Errorf("miss env %s", EnvOpenAIBaseURL)
}
if apiKey := GetEnvConfig(EnvOpenAIAPIKey); apiKey != "" {
config.APIKey = apiKey
} else {
return nil, fmt.Errorf("miss env %s", EnvOpenAIAPIKey)
}
if modelName := GetEnvConfig(EnvModelName); modelName != "" {
config.Model = modelName
} else {
return nil, fmt.Errorf("miss env %s", EnvModelName)
}
// log config info
log.Info().Str("model", config.Model).
Str("baseURL", config.BaseURL).
Str("apiKey", maskAPIKey(config.APIKey)).
Str("timeout", defaultTimeout.String()).
Msg("get model config")
return config, nil
}
// maskAPIKey masks the API key
func maskAPIKey(key string) string {
if len(key) <= 8 {
return "******"
}
return key[:4] + "******" + key[len(key)-4:]
}

View File

@@ -1,7 +1,19 @@
package ai
import (
"fmt"
"net/http"
"os"
"time"
"github.com/cloudwego/eino-ext/components/model/openai"
"github.com/cloudwego/eino/schema"
"github.com/pkg/errors"
"github.com/rs/zerolog/log"
"github.com/httprunner/httprunner/v5/code"
"github.com/httprunner/httprunner/v5/internal/config"
"github.com/httprunner/httprunner/v5/internal/json"
"github.com/httprunner/httprunner/v5/uixt/types"
)
@@ -58,3 +70,142 @@ const (
ActionTypeType ActionType = "type"
ActionTypeScroll ActionType = "scroll"
)
const (
defaultTimeout = 60 * time.Second
)
type OpenAIInitConfig struct {
ReportURL string `json:"REPORT_SERVER_URL"`
Headers map[string]string `json:"defaultHeaders"`
}
const (
EnvOpenAIBaseURL = "OPENAI_BASE_URL"
EnvOpenAIAPIKey = "OPENAI_API_KEY"
EnvModelName = "LLM_MODEL_NAME"
EnvOpenAIInitConfigJSON = "OPENAI_INIT_CONFIG_JSON"
)
func checkEnvLLM() error {
if err := config.LoadEnv(); err != nil {
return errors.Wrap(code.LoadEnvError, err.Error())
}
openaiBaseURL := os.Getenv("OPENAI_BASE_URL")
if openaiBaseURL == "" {
return errors.Wrap(code.LLMEnvMissedError, "OPENAI_BASE_URL missed")
}
log.Info().Str("OPENAI_BASE_URL", openaiBaseURL).Msg("get env")
openaiAPIKey := os.Getenv("OPENAI_API_KEY")
if openaiAPIKey == "" {
return errors.Wrap(code.LLMEnvMissedError, "OPENAI_API_KEY missed")
}
log.Info().Str("OPENAI_API_KEY", maskAPIKey(openaiAPIKey)).Msg("get env")
modelName := os.Getenv("LLM_MODEL_NAME")
if modelName == "" {
return errors.Wrap(code.LLMEnvMissedError, "LLM_MODEL_NAME missed")
}
log.Info().Str("LLM_MODEL_NAME", modelName).Msg("get env")
return nil
}
// CustomTransport is a custom RoundTripper that adds headers to every request
type CustomTransport struct {
Transport http.RoundTripper
Headers map[string]string
}
// RoundTrip executes a single HTTP transaction and adds custom headers
func (c *CustomTransport) RoundTrip(req *http.Request) (*http.Response, error) {
for key, value := range c.Headers {
req.Header.Set(key, value)
}
return c.Transport.RoundTrip(req)
}
type OutputFormat struct {
Thought string `json:"thought"`
Action string `json:"action"`
Error string `json:"error,omitempty"`
}
// GetModelConfig get OpenAI config
func GetModelConfig() (*openai.ChatModelConfig, error) {
if err := checkEnvLLM(); err != nil {
log.Error().Err(err).Msg("check LLM env failed")
return nil, err
}
envConfig := &OpenAIInitConfig{
Headers: make(map[string]string),
}
// read from JSON config first
jsonStr := config.GetEnvConfig(EnvOpenAIInitConfigJSON)
if jsonStr != "" {
if err := json.Unmarshal([]byte(jsonStr), envConfig); err != nil {
return nil, err
}
}
// outputFormatSchema, err := openapi3gen.NewSchemaRefForValue(&OutputFormat{}, nil)
// if err != nil {
// log.Fatal().Err(err).Msg("NewSchemaRefForValue failed")
// }
modelConfig := &openai.ChatModelConfig{
HTTPClient: &http.Client{
Timeout: defaultTimeout,
Transport: &CustomTransport{
Transport: http.DefaultTransport,
Headers: envConfig.Headers,
},
},
// TODO: set structured response format
// https://github.com/cloudwego/eino-ext/blob/main/components/model/openai/examples/structured/structured.go
// ResponseFormat: &openai2.ChatCompletionResponseFormat{
// Type: openai2.ChatCompletionResponseFormatTypeJSONSchema,
// JSONSchema: &openai2.ChatCompletionResponseFormatJSONSchema{
// Name: "thought_and_action",
// Description: "data that describes planning thought and action",
// Schema: outputFormatSchema.Value,
// Strict: false,
// },
// },
}
if baseURL := config.GetEnvConfig(EnvOpenAIBaseURL); baseURL != "" {
modelConfig.BaseURL = baseURL
} else {
return nil, fmt.Errorf("miss env %s", EnvOpenAIBaseURL)
}
if apiKey := config.GetEnvConfig(EnvOpenAIAPIKey); apiKey != "" {
modelConfig.APIKey = apiKey
} else {
return nil, fmt.Errorf("miss env %s", EnvOpenAIAPIKey)
}
if modelName := config.GetEnvConfig(EnvModelName); modelName != "" {
modelConfig.Model = modelName
} else {
return nil, fmt.Errorf("miss env %s", EnvModelName)
}
// log config info
log.Info().Str("model", modelConfig.Model).
Str("baseURL", modelConfig.BaseURL).
Str("apiKey", maskAPIKey(modelConfig.APIKey)).
Str("timeout", defaultTimeout.String()).
Msg("get model config")
return modelConfig, nil
}
// maskAPIKey masks the API key
func maskAPIKey(key string) string {
if len(key) <= 8 {
return "******"
}
return key[:4] + "******" + key[len(key)-4:]
}