384 lines
7.1 KiB
Markdown
384 lines
7.1 KiB
Markdown
# Go OpenAI
|
|
[](https://pkg.go.dev/github.com/sashabaranov/go-openai)
|
|
[](https://goreportcard.com/report/github.com/sashabaranov/go-openai)
|
|
[](https://codecov.io/gh/sashabaranov/go-openai)
|
|
|
|
This library provides Go clients for [OpenAI API](https://platform.openai.com/). We support:
|
|
|
|
* ChatGPT
|
|
* GPT-3, GPT-4
|
|
* DALL·E 2
|
|
* Whisper
|
|
|
|
Installation:
|
|
```
|
|
go get github.com/sashabaranov/go-openai
|
|
```
|
|
|
|
|
|
ChatGPT example usage:
|
|
|
|
```go
|
|
package main
|
|
|
|
import (
|
|
"context"
|
|
"fmt"
|
|
openai "github.com/sashabaranov/go-openai"
|
|
)
|
|
|
|
func main() {
|
|
client := openai.NewClient("your token")
|
|
resp, err := client.CreateChatCompletion(
|
|
context.Background(),
|
|
openai.ChatCompletionRequest{
|
|
Model: openai.GPT3Dot5Turbo,
|
|
Messages: []openai.ChatCompletionMessage{
|
|
{
|
|
Role: openai.ChatMessageRoleUser,
|
|
Content: "Hello!",
|
|
},
|
|
},
|
|
},
|
|
)
|
|
|
|
if err != nil {
|
|
fmt.Printf("ChatCompletion error: %v\n", err)
|
|
return
|
|
}
|
|
|
|
fmt.Println(resp.Choices[0].Message.Content)
|
|
}
|
|
|
|
```
|
|
|
|
|
|
|
|
Other examples:
|
|
|
|
<details>
|
|
<summary>ChatGPT streaming completion</summary>
|
|
|
|
```go
|
|
package main
|
|
|
|
import (
|
|
"context"
|
|
"errors"
|
|
"fmt"
|
|
"io"
|
|
openai "github.com/sashabaranov/go-openai"
|
|
)
|
|
|
|
func main() {
|
|
c := openai.NewClient("your token")
|
|
ctx := context.Background()
|
|
|
|
req := openai.ChatCompletionRequest{
|
|
Model: openai.GPT3Dot5Turbo,
|
|
MaxTokens: 20,
|
|
Messages: []openai.ChatCompletionMessage{
|
|
{
|
|
Role: openai.ChatMessageRoleUser,
|
|
Content: "Lorem ipsum",
|
|
},
|
|
},
|
|
Stream: true,
|
|
}
|
|
stream, err := c.CreateChatCompletionStream(ctx, req)
|
|
if err != nil {
|
|
fmt.Printf("ChatCompletionStream error: %v\n", err)
|
|
return
|
|
}
|
|
defer stream.Close()
|
|
|
|
fmt.Printf("Stream response: ")
|
|
for {
|
|
response, err := stream.Recv()
|
|
if errors.Is(err, io.EOF) {
|
|
fmt.Println("\nStream finished")
|
|
return
|
|
}
|
|
|
|
if err != nil {
|
|
fmt.Printf("\nStream error: %v\n", err)
|
|
return
|
|
}
|
|
|
|
fmt.Printf(response.Choices[0].Delta.Content)
|
|
}
|
|
}
|
|
```
|
|
</details>
|
|
|
|
<details>
|
|
<summary>GPT-3 completion</summary>
|
|
|
|
```go
|
|
package main
|
|
|
|
import (
|
|
"context"
|
|
"fmt"
|
|
openai "github.com/sashabaranov/go-openai"
|
|
)
|
|
|
|
func main() {
|
|
c := openai.NewClient("your token")
|
|
ctx := context.Background()
|
|
|
|
req := openai.CompletionRequest{
|
|
Model: openai.GPT3Ada,
|
|
MaxTokens: 5,
|
|
Prompt: "Lorem ipsum",
|
|
}
|
|
resp, err := c.CreateCompletion(ctx, req)
|
|
if err != nil {
|
|
fmt.Printf("Completion error: %v\n", err)
|
|
return
|
|
}
|
|
fmt.Println(resp.Choices[0].Text)
|
|
}
|
|
```
|
|
</details>
|
|
|
|
<details>
|
|
<summary>GPT-3 streaming completion</summary>
|
|
|
|
```go
|
|
package main
|
|
|
|
import (
|
|
"errors"
|
|
"context"
|
|
"fmt"
|
|
"io"
|
|
openai "github.com/sashabaranov/go-openai"
|
|
)
|
|
|
|
func main() {
|
|
c := openai.NewClient("your token")
|
|
ctx := context.Background()
|
|
|
|
req := openai.CompletionRequest{
|
|
Model: openai.GPT3Ada,
|
|
MaxTokens: 5,
|
|
Prompt: "Lorem ipsum",
|
|
Stream: true,
|
|
}
|
|
stream, err := c.CreateCompletionStream(ctx, req)
|
|
if err != nil {
|
|
fmt.Printf("CompletionStream error: %v\n", err)
|
|
return
|
|
}
|
|
defer stream.Close()
|
|
|
|
for {
|
|
response, err := stream.Recv()
|
|
if errors.Is(err, io.EOF) {
|
|
fmt.Println("Stream finished")
|
|
return
|
|
}
|
|
|
|
if err != nil {
|
|
fmt.Printf("Stream error: %v\n", err)
|
|
return
|
|
}
|
|
|
|
|
|
fmt.Printf("Stream response: %v\n", response)
|
|
}
|
|
}
|
|
```
|
|
</details>
|
|
|
|
<details>
|
|
<summary>Audio Speech-To-Text</summary>
|
|
|
|
```go
|
|
package main
|
|
|
|
import (
|
|
"context"
|
|
"fmt"
|
|
|
|
openai "github.com/sashabaranov/go-openai"
|
|
)
|
|
|
|
func main() {
|
|
c := openai.NewClient("your token")
|
|
ctx := context.Background()
|
|
|
|
req := openai.AudioRequest{
|
|
Model: openai.Whisper1,
|
|
FilePath: "recording.mp3",
|
|
}
|
|
resp, err := c.CreateTranscription(ctx, req)
|
|
if err != nil {
|
|
fmt.Printf("Transcription error: %v\n", err)
|
|
return
|
|
}
|
|
fmt.Println(resp.Text)
|
|
}
|
|
```
|
|
</details>
|
|
|
|
<details>
|
|
<summary>DALL-E 2 image generation</summary>
|
|
|
|
```go
|
|
package main
|
|
|
|
import (
|
|
"bytes"
|
|
"context"
|
|
"encoding/base64"
|
|
"fmt"
|
|
openai "github.com/sashabaranov/go-openai"
|
|
"image/png"
|
|
"os"
|
|
)
|
|
|
|
func main() {
|
|
c := openai.NewClient("your token")
|
|
ctx := context.Background()
|
|
|
|
// Sample image by link
|
|
reqUrl := openai.ImageRequest{
|
|
Prompt: "Parrot on a skateboard performs a trick, cartoon style, natural light, high detail",
|
|
Size: openai.CreateImageSize256x256,
|
|
ResponseFormat: openai.CreateImageResponseFormatURL,
|
|
N: 1,
|
|
}
|
|
|
|
respUrl, err := c.CreateImage(ctx, reqUrl)
|
|
if err != nil {
|
|
fmt.Printf("Image creation error: %v\n", err)
|
|
return
|
|
}
|
|
fmt.Println(respUrl.Data[0].URL)
|
|
|
|
// Example image as base64
|
|
reqBase64 := openai.ImageRequest{
|
|
Prompt: "Portrait of a humanoid parrot in a classic costume, high detail, realistic light, unreal engine",
|
|
Size: openai.CreateImageSize256x256,
|
|
ResponseFormat: openai.CreateImageResponseFormatB64JSON,
|
|
N: 1,
|
|
}
|
|
|
|
respBase64, err := c.CreateImage(ctx, reqBase64)
|
|
if err != nil {
|
|
fmt.Printf("Image creation error: %v\n", err)
|
|
return
|
|
}
|
|
|
|
imgBytes, err := base64.StdEncoding.DecodeString(respBase64.Data[0].B64JSON)
|
|
if err != nil {
|
|
fmt.Printf("Base64 decode error: %v\n", err)
|
|
return
|
|
}
|
|
|
|
r := bytes.NewReader(imgBytes)
|
|
imgData, err := png.Decode(r)
|
|
if err != nil {
|
|
fmt.Printf("PNG decode error: %v\n", err)
|
|
return
|
|
}
|
|
|
|
file, err := os.Create("example.png")
|
|
if err != nil {
|
|
fmt.Printf("File creation error: %v\n", err)
|
|
return
|
|
}
|
|
defer file.Close()
|
|
|
|
if err := png.Encode(file, imgData); err != nil {
|
|
fmt.Printf("PNG encode error: %v\n", err)
|
|
return
|
|
}
|
|
|
|
fmt.Println("The image was saved as example.png")
|
|
}
|
|
|
|
```
|
|
</details>
|
|
|
|
<details>
|
|
<summary>Configuring proxy</summary>
|
|
|
|
```go
|
|
config := openai.DefaultConfig("token")
|
|
proxyUrl, err := url.Parse("http://localhost:{port}")
|
|
if err != nil {
|
|
panic(err)
|
|
}
|
|
transport := &http.Transport{
|
|
Proxy: http.ProxyURL(proxyUrl),
|
|
}
|
|
config.HTTPClient = &http.Client{
|
|
Transport: transport,
|
|
}
|
|
|
|
c := openai.NewClientWithConfig(config)
|
|
```
|
|
|
|
See also: https://pkg.go.dev/github.com/sashabaranov/go-openai#ClientConfig
|
|
</details>
|
|
|
|
<details>
|
|
<summary>ChatGPT support context</summary>
|
|
|
|
```go
|
|
package main
|
|
|
|
import (
|
|
"bufio"
|
|
"context"
|
|
"fmt"
|
|
"os"
|
|
"strings"
|
|
|
|
"github.com/sashabaranov/go-openai"
|
|
)
|
|
|
|
func main() {
|
|
client := openai.NewClient("your token")
|
|
messages := make([]openai.ChatCompletionMessage, 0)
|
|
reader := bufio.NewReader(os.Stdin)
|
|
fmt.Println("Conversation")
|
|
fmt.Println("---------------------")
|
|
|
|
for {
|
|
fmt.Print("-> ")
|
|
text, _ := reader.ReadString('\n')
|
|
// convert CRLF to LF
|
|
text = strings.Replace(text, "\n", "", -1)
|
|
messages = append(messages, openai.ChatCompletionMessage{
|
|
Role: openai.ChatMessageRoleUser,
|
|
Content: text,
|
|
})
|
|
|
|
resp, err := client.CreateChatCompletion(
|
|
context.Background(),
|
|
openai.ChatCompletionRequest{
|
|
Model: openai.GPT3Dot5Turbo,
|
|
Messages: messages,
|
|
},
|
|
)
|
|
|
|
if err != nil {
|
|
fmt.Printf("ChatCompletion error: %v\n", err)
|
|
continue
|
|
}
|
|
|
|
content := resp.Choices[0].Message.Content
|
|
messages = append(messages, openai.ChatCompletionMessage{
|
|
Role: openai.ChatMessageRoleAssistant,
|
|
Content: content,
|
|
})
|
|
fmt.Println(content)
|
|
}
|
|
}
|
|
```
|
|
</details>
|