diff --git a/backend/data/openai_api.go b/backend/data/openai_api.go index d4cf6fc..30560d8 100644 --- a/backend/data/openai_api.go +++ b/backend/data/openai_api.go @@ -26,6 +26,7 @@ type OpenAi struct { MaxTokens int `json:"max_tokens"` Temperature float64 `json:"temperature"` Prompt string `json:"prompt"` + TimeOut int `json:"time_out"` } func NewDeepSeekOpenAi() *OpenAi { @@ -37,6 +38,7 @@ func NewDeepSeekOpenAi() *OpenAi { MaxTokens: config.OpenAiMaxTokens, Temperature: config.OpenAiTemperature, Prompt: config.Prompt, + TimeOut: config.OpenAiApiTimeOut, } } @@ -206,7 +208,7 @@ func (o OpenAi) NewChatStream(stock, stockCode string) <-chan string { client.SetHeader("Authorization", "Bearer "+o.ApiKey) client.SetHeader("Content-Type", "application/json") client.SetRetryCount(3) - client.SetTimeout(1 * time.Minute) + client.SetTimeout(time.Duration(o.TimeOut) * time.Second) resp, err := client.R(). SetDoNotParseResponse(true). SetBody(map[string]interface{}{ diff --git a/backend/data/settings_api.go b/backend/data/settings_api.go index 5e9fe1d..f3e6c8a 100644 --- a/backend/data/settings_api.go +++ b/backend/data/settings_api.go @@ -21,6 +21,7 @@ type Settings struct { OpenAiModelName string `json:"openAiModelName"` OpenAiMaxTokens int `json:"openAiMaxTokens"` OpenAiTemperature float64 `json:"openAiTemperature"` + OpenAiApiTimeOut int `json:"openAiApiTimeOut"` Prompt string `json:"prompt"` CheckUpdate bool `json:"checkUpdate"` } @@ -57,6 +58,8 @@ func (s SettingsApi) UpdateConfig() string { "open_ai_temperature": s.Config.OpenAiTemperature, "tushare_token": s.Config.TushareToken, "prompt": s.Config.Prompt, + "check_update": s.Config.CheckUpdate, + "open_ai_api_time_out": s.Config.OpenAiApiTimeOut, }) } else { logger.SugaredLogger.Infof("未找到配置,创建默认配置:%+v", s.Config) @@ -74,6 +77,8 @@ func (s SettingsApi) UpdateConfig() string { OpenAiTemperature: s.Config.OpenAiTemperature, TushareToken: s.Config.TushareToken, Prompt: s.Config.Prompt, + CheckUpdate: s.Config.CheckUpdate, + OpenAiApiTimeOut: s.Config.OpenAiApiTimeOut, }) } return "保存成功!" diff --git a/frontend/src/components/settings.vue b/frontend/src/components/settings.vue index b691f29..cf5cff8 100644 --- a/frontend/src/components/settings.vue +++ b/frontend/src/components/settings.vue @@ -27,6 +27,7 @@ const formValue = ref({ temperature: 0.1, maxTokens: 1024, prompt:"", + timeout: 5 }, }) @@ -51,6 +52,7 @@ onMounted(()=>{ temperature:res.openAiTemperature, maxTokens:res.openAiMaxTokens, prompt:res.prompt, + timeout:res.openAiApiTimeOut } console.log(res) }) @@ -73,7 +75,8 @@ function saveConfig(){ openAiMaxTokens:formValue.value.openAI.maxTokens, openAiTemperature:formValue.value.openAI.temperature, tushareToken:formValue.value.tushareToken, - prompt:formValue.value.openAI.prompt + prompt:formValue.value.openAI.prompt, + openAiApiTimeOut:formValue.value.openAI.timeout }) //console.log("Settings",config) @@ -150,22 +153,25 @@ function sendTestNotice(){ - + - + + + + - - + + - + - + - + - + diff --git a/frontend/wailsjs/go/models.ts b/frontend/wailsjs/go/models.ts index 328db3b..0881055 100644 --- a/frontend/wailsjs/go/models.ts +++ b/frontend/wailsjs/go/models.ts @@ -73,6 +73,7 @@ export namespace data { openAiModelName: string; openAiMaxTokens: number; openAiTemperature: number; + openAiApiTimeOut: number; prompt: string; checkUpdate: boolean; @@ -98,6 +99,7 @@ export namespace data { this.openAiModelName = source["openAiModelName"]; this.openAiMaxTokens = source["openAiMaxTokens"]; this.openAiTemperature = source["openAiTemperature"]; + this.openAiApiTimeOut = source["openAiApiTimeOut"]; this.prompt = source["prompt"]; this.checkUpdate = source["checkUpdate"]; }