mirror of
https://github.com/httprunner/httprunner.git
synced 2026-05-07 04:42:42 +08:00
refactor: LoadEnv
This commit is contained in:
@@ -43,7 +43,7 @@ func GetConfig() *Config {
|
||||
var err error
|
||||
cfg.RootDir, err = os.Getwd()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
log.Fatal().Err(err).Msg("get current working directory failed")
|
||||
}
|
||||
|
||||
startTimeStr := cfg.StartTime.Format("20060102150405")
|
||||
|
||||
101
internal/config/env.go
Normal file
101
internal/config/env.go
Normal file
@@ -0,0 +1,101 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
|
||||
"github.com/joho/godotenv"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
// LoadEnv loads environment variables from .env file
|
||||
// it will search for .env file from current working directory upward recursively
|
||||
func LoadEnv() (err error) {
|
||||
once.Do(func() {
|
||||
// get current working directory
|
||||
var cwd string
|
||||
cwd, err = os.Getwd()
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("get current working directory failed")
|
||||
return
|
||||
}
|
||||
|
||||
// locate .env file from current working directory upward recursively
|
||||
envPath := cwd
|
||||
for {
|
||||
envFile := filepath.Join(envPath, ".env")
|
||||
if _, e := os.Stat(envFile); e == nil {
|
||||
// found .env file
|
||||
// override existing env variables
|
||||
err = godotenv.Overload(envFile)
|
||||
if err != nil {
|
||||
log.Error().Err(err).
|
||||
Str("path", envFile).Msg("overload env file failed")
|
||||
return
|
||||
}
|
||||
log.Info().Str("path", envFile).Msg("overload env success")
|
||||
return
|
||||
}
|
||||
|
||||
// reached root directory
|
||||
parent := filepath.Dir(envPath)
|
||||
if parent == envPath {
|
||||
log.Info().Msg("no .env file found from current directory to root")
|
||||
return
|
||||
}
|
||||
envPath = parent
|
||||
}
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func GetEnvConfig(key string) string {
|
||||
return os.Getenv(key)
|
||||
}
|
||||
|
||||
func GetEnvConfigInJSON(key string) (map[string]interface{}, error) {
|
||||
value := GetEnvConfig(key)
|
||||
if value == "" {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
var result map[string]interface{}
|
||||
if err := json.Unmarshal([]byte(value), &result); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func GetEnvConfigInBool(key string) bool {
|
||||
value := GetEnvConfig(key)
|
||||
if value == "" {
|
||||
return false
|
||||
}
|
||||
|
||||
boolValue, _ := strconv.ParseBool(value)
|
||||
return boolValue
|
||||
}
|
||||
|
||||
// GetEnvConfigOrDefault get env config or default value
|
||||
func GetEnvConfigOrDefault(key, defaultValue string) string {
|
||||
value := GetEnvConfig(key)
|
||||
if value == "" {
|
||||
return defaultValue
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
func GetEnvConfigInInt(key string, defaultValue int) int {
|
||||
value := GetEnvConfig(key)
|
||||
if value == "" {
|
||||
return defaultValue
|
||||
}
|
||||
|
||||
intValue, err := strconv.Atoi(value)
|
||||
if err != nil {
|
||||
return defaultValue
|
||||
}
|
||||
return intValue
|
||||
}
|
||||
@@ -1 +1 @@
|
||||
v5.0.0-beta-2503311103
|
||||
v5.0.0-beta-2503311454
|
||||
|
||||
35
loader.go
35
loader.go
@@ -106,43 +106,8 @@ func LoadFileObject(path string, structObj interface{}) (err error) {
|
||||
if err != nil {
|
||||
err = errors.Wrap(code.LoadYAMLError, err.Error())
|
||||
}
|
||||
case ".env":
|
||||
err = parseEnvContent(file, structObj)
|
||||
if err != nil {
|
||||
err = errors.Wrap(code.LoadEnvError, err.Error())
|
||||
}
|
||||
default:
|
||||
err = code.UnsupportedFileExtension
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func parseEnvContent(file []byte, obj interface{}) error {
|
||||
envMap := obj.(map[string]string)
|
||||
lines := strings.Split(string(file), "\n")
|
||||
for _, line := range lines {
|
||||
line = strings.TrimSpace(line)
|
||||
if line == "" || strings.HasPrefix(line, "#") {
|
||||
// empty line or comment line
|
||||
continue
|
||||
}
|
||||
var kv []string
|
||||
if strings.Contains(line, "=") {
|
||||
kv = strings.SplitN(line, "=", 2)
|
||||
} else if strings.Contains(line, ":") {
|
||||
kv = strings.SplitN(line, ":", 2)
|
||||
}
|
||||
if len(kv) != 2 {
|
||||
return errors.New(".env format error")
|
||||
}
|
||||
|
||||
key := strings.TrimSpace(kv[0])
|
||||
value := strings.TrimSpace(kv[1])
|
||||
envMap[key] = value
|
||||
|
||||
// set env
|
||||
log.Info().Str("key", key).Msg("set env")
|
||||
os.Setenv(key, value)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/joho/godotenv"
|
||||
"github.com/mitchellh/mapstructure"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/rs/zerolog/log"
|
||||
@@ -108,8 +109,7 @@ func (tc *TestCaseDef) loadISteps() (*TestCase, error) {
|
||||
// load .env file
|
||||
dotEnvPath := filepath.Join(projectRootDir, ".env")
|
||||
if builtin.IsFilePathExists(dotEnvPath) {
|
||||
envVars := make(map[string]string)
|
||||
err = LoadFileObject(dotEnvPath, envVars)
|
||||
envVars, err := godotenv.Read(dotEnvPath)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to load .env file")
|
||||
}
|
||||
|
||||
@@ -53,11 +53,6 @@ const (
|
||||
)
|
||||
|
||||
func WithLLMService(service LLMServiceType) AIServiceOption {
|
||||
if err := checkEnvLLM(); err != nil {
|
||||
log.Error().Err(err).Msg("check LLM env failed")
|
||||
os.Exit(code.GetErrorCode(err))
|
||||
}
|
||||
|
||||
return func(opts *AIServices) {
|
||||
if service == LLMServiceTypeGPT4o {
|
||||
var err error
|
||||
|
||||
@@ -15,6 +15,7 @@ import (
|
||||
|
||||
"github.com/httprunner/httprunner/v5/code"
|
||||
"github.com/httprunner/httprunner/v5/internal/builtin"
|
||||
"github.com/httprunner/httprunner/v5/internal/config"
|
||||
"github.com/httprunner/httprunner/v5/internal/json"
|
||||
"github.com/httprunner/httprunner/v5/uixt/option"
|
||||
)
|
||||
@@ -231,6 +232,9 @@ func (s *vedemCVService) ReadFromBuffer(imageBuf *bytes.Buffer, opts ...option.A
|
||||
}
|
||||
|
||||
func checkEnvCV() error {
|
||||
if err := config.LoadEnv(); err != nil {
|
||||
return errors.Wrap(code.LoadEnvError, err.Error())
|
||||
}
|
||||
vedemImageURL := os.Getenv("VEDEM_IMAGE_URL")
|
||||
if vedemImageURL == "" {
|
||||
return errors.Wrap(code.CVEnvMissedError, "VEDEM_IMAGE_URL missed")
|
||||
|
||||
240
uixt/ai/env.go
240
uixt/ai/env.go
@@ -1,240 +0,0 @@
|
||||
package ai
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/cloudwego/eino-ext/components/model/openai"
|
||||
"github.com/httprunner/httprunner/v5/code"
|
||||
"github.com/joho/godotenv"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
const (
|
||||
defaultTimeout = 60 * time.Second
|
||||
)
|
||||
|
||||
type OpenAIInitConfig struct {
|
||||
ReportURL string `json:"REPORT_SERVER_URL"`
|
||||
Headers map[string]string `json:"defaultHeaders"`
|
||||
}
|
||||
|
||||
const (
|
||||
EnvOpenAIBaseURL = "OPENAI_BASE_URL"
|
||||
EnvOpenAIAPIKey = "OPENAI_API_KEY"
|
||||
EnvModelName = "LLM_MODEL_NAME"
|
||||
EnvOpenAIInitConfigJSON = "OPENAI_INIT_CONFIG_JSON"
|
||||
)
|
||||
|
||||
var once sync.Once
|
||||
|
||||
// loadEnv loads environment variables from .env file
|
||||
// it will search for .env file from current working directory upward recursively
|
||||
func loadEnv() {
|
||||
once.Do(func() {
|
||||
// get current working directory
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// locate .env file from current working directory upward recursively
|
||||
envPath := cwd
|
||||
for {
|
||||
envFile := filepath.Join(envPath, ".env")
|
||||
if _, err := os.Stat(envFile); err == nil {
|
||||
// found .env file
|
||||
// override existing env variables
|
||||
err = godotenv.Overload(envFile)
|
||||
if err != nil {
|
||||
log.Fatal().Err(err).
|
||||
Str("path", envFile).Msg("overload env file failed")
|
||||
}
|
||||
log.Info().Str("path", envFile).Msg("overload env success")
|
||||
return
|
||||
}
|
||||
|
||||
// reached root directory
|
||||
parent := filepath.Dir(envPath)
|
||||
if parent == envPath {
|
||||
log.Info().Msg("no .env file found from current directory to root")
|
||||
return
|
||||
}
|
||||
envPath = parent
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func checkEnvLLM() error {
|
||||
loadEnv()
|
||||
openaiBaseURL := os.Getenv("OPENAI_BASE_URL")
|
||||
if openaiBaseURL == "" {
|
||||
return errors.Wrap(code.LLMEnvMissedError, "OPENAI_BASE_URL missed")
|
||||
}
|
||||
log.Info().Str("OPENAI_BASE_URL", openaiBaseURL).Msg("get env")
|
||||
openaiAPIKey := os.Getenv("OPENAI_API_KEY")
|
||||
if openaiAPIKey == "" {
|
||||
return errors.Wrap(code.LLMEnvMissedError, "OPENAI_API_KEY missed")
|
||||
}
|
||||
log.Info().Str("OPENAI_API_KEY", maskAPIKey(openaiAPIKey)).Msg("get env")
|
||||
modelName := os.Getenv("LLM_MODEL_NAME")
|
||||
if modelName == "" {
|
||||
return errors.Wrap(code.LLMEnvMissedError, "LLM_MODEL_NAME missed")
|
||||
}
|
||||
log.Info().Str("LLM_MODEL_NAME", modelName).Msg("get env")
|
||||
return nil
|
||||
}
|
||||
|
||||
func GetEnvConfig(key string) string {
|
||||
return os.Getenv(key)
|
||||
}
|
||||
|
||||
func GetEnvConfigInJSON(key string) (map[string]interface{}, error) {
|
||||
value := GetEnvConfig(key)
|
||||
if value == "" {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
var result map[string]interface{}
|
||||
if err := json.Unmarshal([]byte(value), &result); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func GetEnvConfigInBool(key string) bool {
|
||||
value := GetEnvConfig(key)
|
||||
if value == "" {
|
||||
return false
|
||||
}
|
||||
|
||||
boolValue, _ := strconv.ParseBool(value)
|
||||
return boolValue
|
||||
}
|
||||
|
||||
// GetEnvConfigOrDefault get env config or default value
|
||||
func GetEnvConfigOrDefault(key, defaultValue string) string {
|
||||
value := GetEnvConfig(key)
|
||||
if value == "" {
|
||||
return defaultValue
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
func GetEnvConfigInInt(key string, defaultValue int) int {
|
||||
value := GetEnvConfig(key)
|
||||
if value == "" {
|
||||
return defaultValue
|
||||
}
|
||||
|
||||
intValue, err := strconv.Atoi(value)
|
||||
if err != nil {
|
||||
return defaultValue
|
||||
}
|
||||
return intValue
|
||||
}
|
||||
|
||||
// CustomTransport is a custom RoundTripper that adds headers to every request
|
||||
type CustomTransport struct {
|
||||
Transport http.RoundTripper
|
||||
Headers map[string]string
|
||||
}
|
||||
|
||||
// RoundTrip executes a single HTTP transaction and adds custom headers
|
||||
func (c *CustomTransport) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||
for key, value := range c.Headers {
|
||||
req.Header.Set(key, value)
|
||||
}
|
||||
return c.Transport.RoundTrip(req)
|
||||
}
|
||||
|
||||
type OutputFormat struct {
|
||||
Thought string `json:"thought"`
|
||||
Action string `json:"action"`
|
||||
Error string `json:"error,omitempty"`
|
||||
}
|
||||
|
||||
// GetModelConfig get OpenAI config
|
||||
func GetModelConfig() (*openai.ChatModelConfig, error) {
|
||||
loadEnv()
|
||||
envConfig := &OpenAIInitConfig{
|
||||
Headers: make(map[string]string),
|
||||
}
|
||||
|
||||
// read from JSON config first
|
||||
jsonStr := GetEnvConfig(EnvOpenAIInitConfigJSON)
|
||||
if jsonStr != "" {
|
||||
if err := json.Unmarshal([]byte(jsonStr), envConfig); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// outputFormatSchema, err := openapi3gen.NewSchemaRefForValue(&OutputFormat{}, nil)
|
||||
// if err != nil {
|
||||
// log.Fatal().Err(err).Msg("NewSchemaRefForValue failed")
|
||||
// }
|
||||
|
||||
config := &openai.ChatModelConfig{
|
||||
HTTPClient: &http.Client{
|
||||
Timeout: defaultTimeout,
|
||||
Transport: &CustomTransport{
|
||||
Transport: http.DefaultTransport,
|
||||
Headers: envConfig.Headers,
|
||||
},
|
||||
},
|
||||
// TODO: set structured response format
|
||||
// https://github.com/cloudwego/eino-ext/blob/main/components/model/openai/examples/structured/structured.go
|
||||
// ResponseFormat: &openai2.ChatCompletionResponseFormat{
|
||||
// Type: openai2.ChatCompletionResponseFormatTypeJSONSchema,
|
||||
// JSONSchema: &openai2.ChatCompletionResponseFormatJSONSchema{
|
||||
// Name: "thought_and_action",
|
||||
// Description: "data that describes planning thought and action",
|
||||
// Schema: outputFormatSchema.Value,
|
||||
// Strict: false,
|
||||
// },
|
||||
// },
|
||||
}
|
||||
|
||||
if baseURL := GetEnvConfig(EnvOpenAIBaseURL); baseURL != "" {
|
||||
config.BaseURL = baseURL
|
||||
} else {
|
||||
return nil, fmt.Errorf("miss env %s", EnvOpenAIBaseURL)
|
||||
}
|
||||
|
||||
if apiKey := GetEnvConfig(EnvOpenAIAPIKey); apiKey != "" {
|
||||
config.APIKey = apiKey
|
||||
} else {
|
||||
return nil, fmt.Errorf("miss env %s", EnvOpenAIAPIKey)
|
||||
}
|
||||
|
||||
if modelName := GetEnvConfig(EnvModelName); modelName != "" {
|
||||
config.Model = modelName
|
||||
} else {
|
||||
return nil, fmt.Errorf("miss env %s", EnvModelName)
|
||||
}
|
||||
|
||||
// log config info
|
||||
log.Info().Str("model", config.Model).
|
||||
Str("baseURL", config.BaseURL).
|
||||
Str("apiKey", maskAPIKey(config.APIKey)).
|
||||
Str("timeout", defaultTimeout.String()).
|
||||
Msg("get model config")
|
||||
|
||||
return config, nil
|
||||
}
|
||||
|
||||
// maskAPIKey masks the API key
|
||||
func maskAPIKey(key string) string {
|
||||
if len(key) <= 8 {
|
||||
return "******"
|
||||
}
|
||||
|
||||
return key[:4] + "******" + key[len(key)-4:]
|
||||
}
|
||||
151
uixt/ai/llm.go
151
uixt/ai/llm.go
@@ -1,7 +1,19 @@
|
||||
package ai
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/cloudwego/eino-ext/components/model/openai"
|
||||
"github.com/cloudwego/eino/schema"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/rs/zerolog/log"
|
||||
|
||||
"github.com/httprunner/httprunner/v5/code"
|
||||
"github.com/httprunner/httprunner/v5/internal/config"
|
||||
"github.com/httprunner/httprunner/v5/internal/json"
|
||||
"github.com/httprunner/httprunner/v5/uixt/types"
|
||||
)
|
||||
|
||||
@@ -58,3 +70,142 @@ const (
|
||||
ActionTypeType ActionType = "type"
|
||||
ActionTypeScroll ActionType = "scroll"
|
||||
)
|
||||
|
||||
const (
|
||||
defaultTimeout = 60 * time.Second
|
||||
)
|
||||
|
||||
type OpenAIInitConfig struct {
|
||||
ReportURL string `json:"REPORT_SERVER_URL"`
|
||||
Headers map[string]string `json:"defaultHeaders"`
|
||||
}
|
||||
|
||||
const (
|
||||
EnvOpenAIBaseURL = "OPENAI_BASE_URL"
|
||||
EnvOpenAIAPIKey = "OPENAI_API_KEY"
|
||||
EnvModelName = "LLM_MODEL_NAME"
|
||||
EnvOpenAIInitConfigJSON = "OPENAI_INIT_CONFIG_JSON"
|
||||
)
|
||||
|
||||
func checkEnvLLM() error {
|
||||
if err := config.LoadEnv(); err != nil {
|
||||
return errors.Wrap(code.LoadEnvError, err.Error())
|
||||
}
|
||||
openaiBaseURL := os.Getenv("OPENAI_BASE_URL")
|
||||
if openaiBaseURL == "" {
|
||||
return errors.Wrap(code.LLMEnvMissedError, "OPENAI_BASE_URL missed")
|
||||
}
|
||||
log.Info().Str("OPENAI_BASE_URL", openaiBaseURL).Msg("get env")
|
||||
openaiAPIKey := os.Getenv("OPENAI_API_KEY")
|
||||
if openaiAPIKey == "" {
|
||||
return errors.Wrap(code.LLMEnvMissedError, "OPENAI_API_KEY missed")
|
||||
}
|
||||
log.Info().Str("OPENAI_API_KEY", maskAPIKey(openaiAPIKey)).Msg("get env")
|
||||
modelName := os.Getenv("LLM_MODEL_NAME")
|
||||
if modelName == "" {
|
||||
return errors.Wrap(code.LLMEnvMissedError, "LLM_MODEL_NAME missed")
|
||||
}
|
||||
log.Info().Str("LLM_MODEL_NAME", modelName).Msg("get env")
|
||||
return nil
|
||||
}
|
||||
|
||||
// CustomTransport is a custom RoundTripper that adds headers to every request
|
||||
type CustomTransport struct {
|
||||
Transport http.RoundTripper
|
||||
Headers map[string]string
|
||||
}
|
||||
|
||||
// RoundTrip executes a single HTTP transaction and adds custom headers
|
||||
func (c *CustomTransport) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||
for key, value := range c.Headers {
|
||||
req.Header.Set(key, value)
|
||||
}
|
||||
return c.Transport.RoundTrip(req)
|
||||
}
|
||||
|
||||
type OutputFormat struct {
|
||||
Thought string `json:"thought"`
|
||||
Action string `json:"action"`
|
||||
Error string `json:"error,omitempty"`
|
||||
}
|
||||
|
||||
// GetModelConfig get OpenAI config
|
||||
func GetModelConfig() (*openai.ChatModelConfig, error) {
|
||||
if err := checkEnvLLM(); err != nil {
|
||||
log.Error().Err(err).Msg("check LLM env failed")
|
||||
return nil, err
|
||||
}
|
||||
envConfig := &OpenAIInitConfig{
|
||||
Headers: make(map[string]string),
|
||||
}
|
||||
|
||||
// read from JSON config first
|
||||
jsonStr := config.GetEnvConfig(EnvOpenAIInitConfigJSON)
|
||||
if jsonStr != "" {
|
||||
if err := json.Unmarshal([]byte(jsonStr), envConfig); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// outputFormatSchema, err := openapi3gen.NewSchemaRefForValue(&OutputFormat{}, nil)
|
||||
// if err != nil {
|
||||
// log.Fatal().Err(err).Msg("NewSchemaRefForValue failed")
|
||||
// }
|
||||
|
||||
modelConfig := &openai.ChatModelConfig{
|
||||
HTTPClient: &http.Client{
|
||||
Timeout: defaultTimeout,
|
||||
Transport: &CustomTransport{
|
||||
Transport: http.DefaultTransport,
|
||||
Headers: envConfig.Headers,
|
||||
},
|
||||
},
|
||||
// TODO: set structured response format
|
||||
// https://github.com/cloudwego/eino-ext/blob/main/components/model/openai/examples/structured/structured.go
|
||||
// ResponseFormat: &openai2.ChatCompletionResponseFormat{
|
||||
// Type: openai2.ChatCompletionResponseFormatTypeJSONSchema,
|
||||
// JSONSchema: &openai2.ChatCompletionResponseFormatJSONSchema{
|
||||
// Name: "thought_and_action",
|
||||
// Description: "data that describes planning thought and action",
|
||||
// Schema: outputFormatSchema.Value,
|
||||
// Strict: false,
|
||||
// },
|
||||
// },
|
||||
}
|
||||
|
||||
if baseURL := config.GetEnvConfig(EnvOpenAIBaseURL); baseURL != "" {
|
||||
modelConfig.BaseURL = baseURL
|
||||
} else {
|
||||
return nil, fmt.Errorf("miss env %s", EnvOpenAIBaseURL)
|
||||
}
|
||||
|
||||
if apiKey := config.GetEnvConfig(EnvOpenAIAPIKey); apiKey != "" {
|
||||
modelConfig.APIKey = apiKey
|
||||
} else {
|
||||
return nil, fmt.Errorf("miss env %s", EnvOpenAIAPIKey)
|
||||
}
|
||||
|
||||
if modelName := config.GetEnvConfig(EnvModelName); modelName != "" {
|
||||
modelConfig.Model = modelName
|
||||
} else {
|
||||
return nil, fmt.Errorf("miss env %s", EnvModelName)
|
||||
}
|
||||
|
||||
// log config info
|
||||
log.Info().Str("model", modelConfig.Model).
|
||||
Str("baseURL", modelConfig.BaseURL).
|
||||
Str("apiKey", maskAPIKey(modelConfig.APIKey)).
|
||||
Str("timeout", defaultTimeout.String()).
|
||||
Msg("get model config")
|
||||
|
||||
return modelConfig, nil
|
||||
}
|
||||
|
||||
// maskAPIKey masks the API key
|
||||
func maskAPIKey(key string) string {
|
||||
if len(key) <= 8 {
|
||||
return "******"
|
||||
}
|
||||
|
||||
return key[:4] + "******" + key[len(key)-4:]
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user