wechatgpt/openai/chatgpt.go
2022-12-09 15:53:57 +09:00

116 lines
3.7 KiB
Go

package openai
import (
"bytes"
"encoding/json"
"fmt"
"io"
"io/ioutil"
"log"
"net/http"
"strings"
)
// ChatGPTResponseBody 请求体
type ChatGPTResponseBody struct {
ID string `json:"id"`
Object string `json:"object"`
Created int `json:"created"`
Model string `json:"model"`
Choices []map[string]interface{} `json:"choices"`
Usage map[string]interface{} `json:"usage"`
}
// ChatGPTRequestBody 响应体
type ChatGPTRequestBody struct {
Model string `json:"model"`
Prompt string `json:"prompt"`
MaxTokens int `json:"max_tokens"`
Temperature float32 `json:"temperature"`
TopP int `json:"top_p"`
FrequencyPenalty int `json:"frequency_penalty"`
PresencePenalty int `json:"presence_penalty"`
}
// Completions https://api.openai.com/v1/completions
// nodejs example
// const { Configuration, OpenAIApi } = require("openai");
//
// const configuration = new Configuration({
// apiKey: process.env.OPENAI_API_KEY,
// });
// const openai = new OpenAIApi(configuration);
//
// const response = await openai.createCompletion({
// model: "text-davinci-003",
// prompt: "I am a highly intelligent question answering bot. If you ask me a question that is rooted in truth, I will give you the answer. If you ask me a question that is nonsense, trickery, or has no clear answer, I will respond with \"Unknown\".\n\nQ: What is human life expectancy in the United States?\nA: Human life expectancy in the United States is 78 years.\n\nQ: Who was president of the United States in 1955?\nA: Dwight D. Eisenhower was president of the United States in 1955.\n\nQ: Which party did he belong to?\nA: He belonged to the Republican Party.\n\nQ: What is the square root of banana?\nA: Unknown\n\nQ: How does a telescope work?\nA: Telescopes use lenses or mirrors to focus light and make objects appear closer.\n\nQ: Where were the 1992 Olympics held?\nA: The 1992 Olympics were held in Barcelona, Spain.\n\nQ: How many squigs are in a bonk?\nA: Unknown\n\nQ: Where is the Valley of Kings?\nA:",
// temperature: 0,
// max_tokens: 100,
// top_p: 1,
// frequency_penalty: 0.0,
// presence_penalty: 0.0,
// stop: ["\n"],
// });
//
// Completions sendMsg
func Completions(msg string, token string) (*string, error) {
requestBody := ChatGPTRequestBody{
Model: "text-davinci-003",
Prompt: msg,
MaxTokens: 4000,
Temperature: 0.7,
TopP: 1,
FrequencyPenalty: 0,
PresencePenalty: 0,
}
requestData, err := json.Marshal(requestBody)
if err != nil {
log.Println(err)
return nil, err
}
log.Printf("request openai json string : %v", string(requestData))
req, err := http.NewRequest("POST", "https://api.openai.com/v1/completions", bytes.NewBuffer(requestData))
if err != nil {
log.Println(err)
return nil, err
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
client := &http.Client{}
response, err := client.Do(req)
if err != nil {
return nil, err
}
defer func(Body io.ReadCloser) {
err := Body.Close()
if err != nil {
return
}
}(response.Body)
body, err := ioutil.ReadAll(response.Body)
if err != nil {
return nil, err
}
gptResponseBody := &ChatGPTResponseBody{}
log.Println(string(body))
err = json.Unmarshal(body, gptResponseBody)
if err != nil {
log.Println(err)
return nil, err
}
var reply string
if len(gptResponseBody.Choices) > 0 {
for _, v := range gptResponseBody.Choices {
reply = v["text"].(string)
break
}
}
log.Printf("gpt response text: %s \n", reply)
result := strings.TrimSpace(reply)
return &result, nil
}