Add completion-with-tool example (#598)

As a user of this go SDK it was not immediately intuitive to me how to
correctly utilize the function calling capability of GPT4
(https://platform.openai.com/docs/guides/function-calling). While the
aformentioned link provides a helpful example written in python, I
initially tripped over how to correclty translate the specification of
function arguments when usingthis go SDK.

To make it easier for others in the future this commit adds a
completion-with-tool example showing how to correctly utilize the
function calling capability of GPT4 using this SDK end-to-end in a
CreateChatCompletion() sequence.
This commit is contained in:
mikeb26
2023-11-26 08:45:28 +00:00
committed by GitHub
parent 03caea89b7
commit a09cb0c528

View File

@@ -0,0 +1,94 @@
package main
import (
"context"
"fmt"
"os"
"github.com/sashabaranov/go-openai"
"github.com/sashabaranov/go-openai/jsonschema"
)
func main() {
ctx := context.Background()
client := openai.NewClient(os.Getenv("OPENAI_API_KEY"))
// describe the function & its inputs
params := jsonschema.Definition{
Type: jsonschema.Object,
Properties: map[string]jsonschema.Definition{
"location": {
Type: jsonschema.String,
Description: "The city and state, e.g. San Francisco, CA",
},
"unit": {
Type: jsonschema.String,
Enum: []string{"celsius", "fahrenheit"},
},
},
Required: []string{"location"},
}
f := openai.FunctionDefinition{
Name: "get_current_weather",
Description: "Get the current weather in a given location",
Parameters: params,
}
t := openai.Tool{
Type: openai.ToolTypeFunction,
Function: f,
}
// simulate user asking a question that requires the function
dialogue := []openai.ChatCompletionMessage{
{Role: openai.ChatMessageRoleUser, Content: "What is the weather in Boston today?"},
}
fmt.Printf("Asking OpenAI '%v' and providing it a '%v()' function...\n",
dialogue[0].Content, f.Name)
resp, err := client.CreateChatCompletion(ctx,
openai.ChatCompletionRequest{
Model: openai.GPT4TurboPreview,
Messages: dialogue,
Tools: []openai.Tool{t},
},
)
if err != nil || len(resp.Choices) != 1 {
fmt.Printf("Completion error: err:%v len(choices):%v\n", err,
len(resp.Choices))
return
}
msg := resp.Choices[0].Message
if len(msg.ToolCalls) != 1 {
fmt.Printf("Completion error: len(toolcalls): %v\n", len(msg.ToolCalls))
return
}
// simulate calling the function & responding to OpenAI
dialogue = append(dialogue, msg)
fmt.Printf("OpenAI called us back wanting to invoke our function '%v' with params '%v'\n",
msg.ToolCalls[0].Function.Name, msg.ToolCalls[0].Function.Arguments)
dialogue = append(dialogue, openai.ChatCompletionMessage{
Role: openai.ChatMessageRoleTool,
Content: "Sunny and 80 degrees.",
Name: msg.ToolCalls[0].Function.Name,
ToolCallID: msg.ToolCalls[0].ID,
})
fmt.Printf("Sending OpenAI our '%v()' function's response and requesting the reply to the original question...\n",
f.Name)
resp, err = client.CreateChatCompletion(ctx,
openai.ChatCompletionRequest{
Model: openai.GPT4TurboPreview,
Messages: dialogue,
Tools: []openai.Tool{t},
},
)
if err != nil || len(resp.Choices) != 1 {
fmt.Printf("2nd completion error: err:%v len(choices):%v\n", err,
len(resp.Choices))
return
}
// display OpenAI's response to the original question utilizing our function
msg = resp.Choices[0].Message
fmt.Printf("OpenAI answered the original request with: %v\n",
msg.Content)
}