diff --git a/backend/data/crawler_api.go b/backend/data/crawler_api.go
index 7a5c353..d78c8c8 100644
--- a/backend/data/crawler_api.go
+++ b/backend/data/crawler_api.go
@@ -31,9 +31,9 @@ func (c *CrawlerApi) NewCrawler(ctx context.Context, crawlerBaseInfo CrawlerBase
func (c *CrawlerApi) GetHtml(url, waitVisible string, headless bool) (string, bool) {
htmlContent := ""
- path, e := checkBrowserOnWindows()
- logger.SugaredLogger.Infof("GetHtml path:%s", path)
- if e {
+ path := getConfig().BrowserPath
+ logger.SugaredLogger.Infof("Browser path:%s", path)
+ if path != "" {
pctx, pcancel := chromedp.NewExecAllocator(
c.crawlerCtx,
chromedp.ExecPath(path),
@@ -92,14 +92,14 @@ func (c *CrawlerApi) GetHtml(url, waitVisible string, headless bool) (string, bo
func (c *CrawlerApi) GetHtmlWithNoCancel(url, waitVisible string, headless bool) (html string, ok bool, parent context.CancelFunc, child context.CancelFunc) {
htmlContent := ""
- path, e := checkBrowserOnWindows()
- logger.SugaredLogger.Infof("GetHtml path:%s", path)
+ path := getConfig().BrowserPath
+ logger.SugaredLogger.Infof("BrowserPath :%s", path)
var parentCancel context.CancelFunc
var childCancel context.CancelFunc
var pctx context.Context
var cctx context.Context
- if e {
+ if path != "" {
pctx, parentCancel = chromedp.NewExecAllocator(
c.crawlerCtx,
chromedp.ExecPath(path),
@@ -160,9 +160,9 @@ func (c *CrawlerApi) GetHtmlWithActions(actions *[]chromedp.Action, headless boo
htmlContent := ""
*actions = append(*actions, chromedp.InnerHTML("body", &htmlContent))
- path, e := checkBrowserOnWindows()
+ path := getConfig().BrowserPath
logger.SugaredLogger.Infof("GetHtmlWithActions path:%s", path)
- if e {
+ if path != "" {
pctx, pcancel := chromedp.NewExecAllocator(
c.crawlerCtx,
chromedp.ExecPath(path),
diff --git a/backend/data/openai_api.go b/backend/data/openai_api.go
index 92b219e..98ea254 100644
--- a/backend/data/openai_api.go
+++ b/backend/data/openai_api.go
@@ -34,6 +34,7 @@ type OpenAi struct {
QuestionTemplate string `json:"question_template"`
CrawlTimeOut int64 `json:"crawl_time_out"`
KDays int64 `json:"kDays"`
+ BrowserPath string `json:"browser_path"`
}
func NewDeepSeekOpenAi(ctx context.Context) *OpenAi {
@@ -61,6 +62,7 @@ func NewDeepSeekOpenAi(ctx context.Context) *OpenAi {
QuestionTemplate: config.QuestionTemplate,
CrawlTimeOut: config.CrawlTimeOut,
KDays: config.KDays,
+ BrowserPath: config.BrowserPath,
}
}
@@ -500,10 +502,10 @@ func GetFinancialReports(stockCode string, crawlTimeOut int64) *[]string {
defer timeoutCtxCancel()
var ctx context.Context
var cancel context.CancelFunc
- path, e := checkBrowserOnWindows()
+ path := getConfig().BrowserPath
logger.SugaredLogger.Infof("GetFinancialReports path:%s", path)
- if e {
+ if path != "" {
pctx, pcancel := chromedp.NewExecAllocator(
timeoutCtx,
chromedp.ExecPath(path),
diff --git a/backend/data/openai_api_test.go b/backend/data/openai_api_test.go
index 17b7285..fe3cfb0 100644
--- a/backend/data/openai_api_test.go
+++ b/backend/data/openai_api_test.go
@@ -9,7 +9,7 @@ import (
func TestNewDeepSeekOpenAiConfig(t *testing.T) {
db.Init("../../data/stock.db")
ai := NewDeepSeekOpenAi(context.TODO())
- res := ai.NewChatStream("北京文化", "sz000802", "")
+ res := ai.NewChatStream("上海贝岭", "sh600171", "分析以上股票资金流入信息,找出适合买入的股票,给出具体操作建议")
for {
select {
case msg := <-res:
diff --git a/backend/data/settings_api.go b/backend/data/settings_api.go
index ec9ef96..b76c2b4 100644
--- a/backend/data/settings_api.go
+++ b/backend/data/settings_api.go
@@ -29,6 +29,7 @@ type Settings struct {
CrawlTimeOut int64 `json:"crawlTimeOut"`
KDays int64 `json:"kDays"`
EnableDanmu bool `json:"enableDanmu"`
+ BrowserPath string `json:"browserPath"`
}
func (receiver Settings) TableName() string {
@@ -69,6 +70,7 @@ func (s SettingsApi) UpdateConfig() string {
"crawl_time_out": s.Config.CrawlTimeOut,
"k_days": s.Config.KDays,
"enable_danmu": s.Config.EnableDanmu,
+ "browser_path": s.Config.BrowserPath,
})
} else {
logger.SugaredLogger.Infof("未找到配置,创建默认配置:%+v", s.Config)
@@ -92,6 +94,7 @@ func (s SettingsApi) UpdateConfig() string {
CrawlTimeOut: s.Config.CrawlTimeOut,
KDays: s.Config.KDays,
EnableDanmu: s.Config.EnableDanmu,
+ BrowserPath: s.Config.BrowserPath,
})
}
return "保存成功!"
@@ -111,6 +114,10 @@ func (s SettingsApi) GetConfig() *Settings {
settings.KDays = 120
}
}
+ if settings.BrowserPath == "" {
+ settings.BrowserPath, _ = CheckBrowserOnWindows()
+ }
+
return &settings
}
diff --git a/backend/data/stock_data_api.go b/backend/data/stock_data_api.go
index 83038de..791c5e6 100644
--- a/backend/data/stock_data_api.go
+++ b/backend/data/stock_data_api.go
@@ -872,9 +872,9 @@ func getSHSZStockPriceInfo(stockCode string, crawlTimeOut int64) *[]string {
defer timeoutCtxCancel()
var ctx context.Context
var cancel context.CancelFunc
- path, e := checkBrowserOnWindows()
- logger.SugaredLogger.Infof("SearchStockPriceInfo path:%s", path)
- if e {
+ path := getConfig().BrowserPath
+ logger.SugaredLogger.Infof("SearchStockPriceInfo BrowserPath:%s", path)
+ if path != "" {
pctx, pcancel := chromedp.NewExecAllocator(
timeoutCtx,
chromedp.ExecPath(path),
@@ -1042,8 +1042,8 @@ func checkChromeOnWindows() (string, bool) {
return path + "\\chrome.exe", true
}
-// checkBrowserOnWindows 在 Windows 系统上检查Edge浏览器是否安装,并返回安装路径
-func checkBrowserOnWindows() (string, bool) {
+// CheckBrowserOnWindows 在 Windows 系统上检查Edge浏览器是否安装,并返回安装路径
+func CheckBrowserOnWindows() (string, bool) {
if path, ok := checkChromeOnWindows(); ok {
return path, true
}
diff --git a/frontend/src/components/settings.vue b/frontend/src/components/settings.vue
index 62a7a3c..666e640 100644
--- a/frontend/src/components/settings.vue
+++ b/frontend/src/components/settings.vue
@@ -34,6 +34,7 @@ const formValue = ref({
kDays:30,
},
enableDanmu:false,
+ browserPath: '',
})
onMounted(()=>{
@@ -63,6 +64,7 @@ onMounted(()=>{
kDays:res.kDays,
}
formValue.value.enableDanmu = res.enableDanmu
+ formValue.value.browserPath = res.browserPath
console.log(res)
})
//message.info("加载完成")
@@ -89,7 +91,8 @@ function saveConfig(){
questionTemplate:formValue.value.openAI.questionTemplate,
crawlTimeOut:formValue.value.openAI.crawlTimeOut,
kDays:formValue.value.openAI.kDays,
- enableDanmu:formValue.value.enableDanmu
+ enableDanmu:formValue.value.enableDanmu,
+ browserPath:formValue.value.browserPath
})
//console.log("Settings",config)
@@ -161,6 +164,7 @@ function importConfig(){
kDays:config.kDays
}
formValue.value.enableDanmu = config.enableDanmu
+ formValue.value.browserPath = config.browserPath
// formRef.value.resetFields()
};
reader.readAsText(file);
@@ -205,6 +209,9 @@ window.onerror = function (event, source, lineno, colno, error) {
+
+
+
diff --git a/frontend/wailsjs/go/models.ts b/frontend/wailsjs/go/models.ts
index 0c25fec..d2c545d 100644
--- a/frontend/wailsjs/go/models.ts
+++ b/frontend/wailsjs/go/models.ts
@@ -170,6 +170,7 @@ export namespace data {
crawlTimeOut: number;
kDays: number;
enableDanmu: boolean;
+ browserPath: string;
static createFrom(source: any = {}) {
return new Settings(source);
@@ -200,6 +201,7 @@ export namespace data {
this.crawlTimeOut = source["crawlTimeOut"];
this.kDays = source["kDays"];
this.enableDanmu = source["enableDanmu"];
+ this.browserPath = source["browserPath"];
}
convertValues(a: any, classs: any, asMap: boolean = false): any {