wechatbot first commit 🚀
This commit is contained in:
commit
d10adb381c
4
.gitignore
vendored
Normal file
4
.gitignore
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
.idea
|
||||
local/config.yaml
|
||||
token.json
|
||||
local
|
2
README.md
Normal file
2
README.md
Normal file
@ -0,0 +1,2 @@
|
||||
# wechatgpt
|
||||

|
41
bootstrap/bootstrap.go
Normal file
41
bootstrap/bootstrap.go
Normal file
@ -0,0 +1,41 @@
|
||||
package bootstrap
|
||||
|
||||
import (
|
||||
"github.com/eatmoreapple/openwechat"
|
||||
"github.com/wechatgpt/wechatbot/handler"
|
||||
"log"
|
||||
)
|
||||
|
||||
func Run() {
|
||||
bot := openwechat.DefaultBot(openwechat.Desktop)
|
||||
bot.MessageHandler = handler.Handler
|
||||
bot.UUIDCallback = openwechat.PrintlnQrcodeUrl
|
||||
|
||||
reloadStorage := openwechat.NewJsonFileHotReloadStorage("token.json")
|
||||
err := bot.HotLogin(reloadStorage)
|
||||
if err != nil {
|
||||
if err = bot.Login(); err != nil {
|
||||
log.Fatal(err)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// 获取登陆的用户
|
||||
self, err := bot.GetCurrentUser()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
return
|
||||
}
|
||||
|
||||
friends, err := self.Friends()
|
||||
log.Println(friends, err)
|
||||
groups, err := self.Groups()
|
||||
log.Println(groups, err)
|
||||
|
||||
err = bot.Block()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
return
|
||||
}
|
||||
|
||||
}
|
35
config/config.go
Normal file
35
config/config.go
Normal file
@ -0,0 +1,35 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
var config *Config
|
||||
|
||||
type Config struct {
|
||||
ChatGpt ChatGptConfig `json:"chatgpt"`
|
||||
}
|
||||
|
||||
type ChatGptConfig struct {
|
||||
Keyword string `json:"keyword,omitempty"`
|
||||
Token string `json:"token,omitempty" json:"token,omitempty"`
|
||||
}
|
||||
|
||||
func LoadConfig() error {
|
||||
viper.SetConfigName("config")
|
||||
viper.SetConfigType("yaml")
|
||||
viper.AddConfigPath("./local")
|
||||
viper.AddConfigPath("./config")
|
||||
|
||||
if err := viper.ReadInConfig(); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := viper.Unmarshal(&config); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func GetConfig() *Config {
|
||||
return config
|
||||
}
|
3
config/config.yaml.example
Normal file
3
config/config.yaml.example
Normal file
@ -0,0 +1,3 @@
|
||||
chatgpt:
|
||||
keyword: @chatgpt
|
||||
token: your token
|
8
go.mod
Normal file
8
go.mod
Normal file
@ -0,0 +1,8 @@
|
||||
module github.com/wechatgpt/wechatbot
|
||||
|
||||
go 1.16
|
||||
|
||||
require (
|
||||
github.com/eatmoreapple/openwechat v1.2.1
|
||||
github.com/spf13/viper v1.14.0
|
||||
)
|
68
handler/group_msg_handler.go
Normal file
68
handler/group_msg_handler.go
Normal file
@ -0,0 +1,68 @@
|
||||
package handler
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/eatmoreapple/openwechat"
|
||||
"github.com/wechatgpt/wechatbot/config"
|
||||
"github.com/wechatgpt/wechatbot/openai"
|
||||
"log"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var _ MessageHandlerInterface = (*GroupMessageHandler)(nil)
|
||||
|
||||
type GroupMessageHandler struct {
|
||||
}
|
||||
|
||||
func (gmh *GroupMessageHandler) handle(msg *openwechat.Message) error {
|
||||
if !msg.IsText() {
|
||||
return nil
|
||||
}
|
||||
return gmh.ReplyText(msg)
|
||||
}
|
||||
|
||||
func NewGroupMessageHandler() MessageHandlerInterface {
|
||||
return &GroupMessageHandler{}
|
||||
}
|
||||
|
||||
func (gmh *GroupMessageHandler) ReplyText(msg *openwechat.Message) error {
|
||||
|
||||
sender, err := msg.Sender()
|
||||
group := openwechat.Group{User: sender}
|
||||
log.Printf("Received Group %v Text Msg : %v", group.NickName, msg.Content)
|
||||
|
||||
appConfig := config.GetConfig()
|
||||
if appConfig == nil {
|
||||
return errors.New("can not get appConfig file,please check")
|
||||
}
|
||||
|
||||
if !strings.Contains(msg.Content, appConfig.ChatGpt.Keyword) {
|
||||
return nil
|
||||
}
|
||||
splitItems := strings.Split(msg.Content, appConfig.ChatGpt.Keyword)
|
||||
if len(splitItems) < 2 {
|
||||
return nil
|
||||
}
|
||||
requestText := strings.TrimSpace(splitItems[1])
|
||||
reply, err := openai.Completions(requestText, appConfig.ChatGpt.Token)
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
text, err := msg.ReplyText(fmt.Sprintf("bot error: %s", err.Error()))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
fmt.Println(text)
|
||||
return err
|
||||
}
|
||||
|
||||
if reply != nil {
|
||||
_, err = msg.ReplyText(*reply)
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
40
handler/handler.go
Normal file
40
handler/handler.go
Normal file
@ -0,0 +1,40 @@
|
||||
package handler
|
||||
|
||||
import (
|
||||
"github.com/eatmoreapple/openwechat"
|
||||
)
|
||||
|
||||
// MessageHandlerInterface 消息处理接口
|
||||
type MessageHandlerInterface interface {
|
||||
handle(*openwechat.Message) error
|
||||
ReplyText(*openwechat.Message) error
|
||||
}
|
||||
|
||||
type HandlerType string
|
||||
|
||||
const (
|
||||
GroupHandler = "group"
|
||||
)
|
||||
|
||||
var handlers map[HandlerType]MessageHandlerInterface
|
||||
|
||||
func init() {
|
||||
handlers = make(map[HandlerType]MessageHandlerInterface)
|
||||
handlers[GroupHandler] = NewGroupMessageHandler()
|
||||
}
|
||||
|
||||
// Handler 全局处理入口
|
||||
func Handler(msg *openwechat.Message) {
|
||||
//if msg.IsSendBySelf() {
|
||||
// return
|
||||
//}
|
||||
//sender, err := msg.Sender()
|
||||
//if err != nil {
|
||||
// log.Println(err)
|
||||
// return
|
||||
//}
|
||||
if msg.IsSendByGroup() {
|
||||
handlers[GroupHandler].handle(msg)
|
||||
return
|
||||
}
|
||||
}
|
14
main.go
Normal file
14
main.go
Normal file
@ -0,0 +1,14 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"github.com/wechatgpt/wechatbot/bootstrap"
|
||||
"github.com/wechatgpt/wechatbot/config"
|
||||
)
|
||||
|
||||
func main() {
|
||||
err := config.LoadConfig()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
bootstrap.Run()
|
||||
}
|
115
openai/chatgpt.go
Normal file
115
openai/chatgpt.go
Normal file
@ -0,0 +1,115 @@
|
||||
package openai
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"net/http"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// ChatGPTResponseBody 请求体
|
||||
type ChatGPTResponseBody struct {
|
||||
ID string `json:"id"`
|
||||
Object string `json:"object"`
|
||||
Created int `json:"created"`
|
||||
Model string `json:"model"`
|
||||
Choices []map[string]interface{} `json:"choices"`
|
||||
Usage map[string]interface{} `json:"usage"`
|
||||
}
|
||||
|
||||
// ChatGPTRequestBody 响应体
|
||||
type ChatGPTRequestBody struct {
|
||||
Model string `json:"model"`
|
||||
Prompt string `json:"prompt"`
|
||||
MaxTokens int `json:"max_tokens"`
|
||||
Temperature float32 `json:"temperature"`
|
||||
TopP int `json:"top_p"`
|
||||
FrequencyPenalty int `json:"frequency_penalty"`
|
||||
PresencePenalty int `json:"presence_penalty"`
|
||||
}
|
||||
|
||||
// Completions https://api.openai.com/v1/completions
|
||||
// nodejs example
|
||||
// const { Configuration, OpenAIApi } = require("openai");
|
||||
//
|
||||
// const configuration = new Configuration({
|
||||
// apiKey: process.env.OPENAI_API_KEY,
|
||||
// });
|
||||
// const openai = new OpenAIApi(configuration);
|
||||
//
|
||||
// const response = await openai.createCompletion({
|
||||
// model: "text-davinci-003",
|
||||
// prompt: "I am a highly intelligent question answering bot. If you ask me a question that is rooted in truth, I will give you the answer. If you ask me a question that is nonsense, trickery, or has no clear answer, I will respond with \"Unknown\".\n\nQ: What is human life expectancy in the United States?\nA: Human life expectancy in the United States is 78 years.\n\nQ: Who was president of the United States in 1955?\nA: Dwight D. Eisenhower was president of the United States in 1955.\n\nQ: Which party did he belong to?\nA: He belonged to the Republican Party.\n\nQ: What is the square root of banana?\nA: Unknown\n\nQ: How does a telescope work?\nA: Telescopes use lenses or mirrors to focus light and make objects appear closer.\n\nQ: Where were the 1992 Olympics held?\nA: The 1992 Olympics were held in Barcelona, Spain.\n\nQ: How many squigs are in a bonk?\nA: Unknown\n\nQ: Where is the Valley of Kings?\nA:",
|
||||
// temperature: 0,
|
||||
// max_tokens: 100,
|
||||
// top_p: 1,
|
||||
// frequency_penalty: 0.0,
|
||||
// presence_penalty: 0.0,
|
||||
// stop: ["\n"],
|
||||
// });
|
||||
//
|
||||
// Completions sendMsg
|
||||
func Completions(msg string, token string) (*string, error) {
|
||||
requestBody := ChatGPTRequestBody{
|
||||
Model: "text-davinci-003",
|
||||
Prompt: msg,
|
||||
MaxTokens: 100,
|
||||
Temperature: 1,
|
||||
TopP: 1,
|
||||
FrequencyPenalty: 0,
|
||||
PresencePenalty: 0,
|
||||
}
|
||||
requestData, err := json.Marshal(requestBody)
|
||||
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
return nil, err
|
||||
}
|
||||
log.Printf("request openai json string : %v", string(requestData))
|
||||
req, err := http.NewRequest("POST", "https://api.openai.com/v1/completions", bytes.NewBuffer(requestData))
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
|
||||
client := &http.Client{}
|
||||
response, err := client.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer func(Body io.ReadCloser) {
|
||||
err := Body.Close()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}(response.Body)
|
||||
|
||||
body, err := ioutil.ReadAll(response.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
gptResponseBody := &ChatGPTResponseBody{}
|
||||
log.Println(string(body))
|
||||
err = json.Unmarshal(body, gptResponseBody)
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
return nil, err
|
||||
}
|
||||
var reply string
|
||||
if len(gptResponseBody.Choices) > 0 {
|
||||
for _, v := range gptResponseBody.Choices {
|
||||
reply = v["text"].(string)
|
||||
break
|
||||
}
|
||||
}
|
||||
log.Printf("gpt response text: %s \n", reply)
|
||||
result := strings.TrimSpace(reply)
|
||||
return &result, nil
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user