From d529d13ba15d31928a414f83f9ae9730d0a9223b Mon Sep 17 00:00:00 2001 From: Ryuichi Maeda <63947554+ryuichi-maeda@users.noreply.github.com> Date: Sun, 19 Mar 2023 23:01:51 +0900 Subject: [PATCH] Add readme example of ChatGPT streaming completion (#177) --- README.md | 57 ++++++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 56 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index e6e352e..e00ee54 100644 --- a/README.md +++ b/README.md @@ -58,6 +58,61 @@ func main() { Other examples: +
+ChatGPT streaming completion + +```go +package main + +import ( + "context" + "errors" + "fmt" + "io" + openai "github.com/sashabaranov/go-openai" +) + +func main() { + c := openai.NewClient("your token") + ctx := context.Background() + + req := openai.ChatCompletionRequest{ + Model: openai.GPT3Dot5Turbo, + MaxTokens: 20, + Messages: []openai.ChatCompletionMessage{ + { + Role: openai.ChatMessageRoleUser, + Content: "Lorem ipsum", + }, + }, + Stream: true, + } + stream, err := c.CreateChatCompletionStream(ctx, req) + if err != nil { + fmt.Printf("ChatCompletionStream error: %v\n", err) + return + } + defer stream.Close() + + fmt.Printf("Stream response: ") + for { + response, err := stream.Recv() + if errors.Is(err, io.EOF) { + fmt.Println("\nStream finished") + return + } + + if err != nil { + fmt.Printf("\nStream error: %v\n", err) + return + } + + fmt.Printf(response.Choices[0].Delta.Content) + } +} +``` +
+
GPT-3 completion @@ -327,4 +382,4 @@ func main() { } } ``` -
\ No newline at end of file +