From 08c167fecf6953619d1905ab2959ed341bfb063d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=B8=A1=E9=82=89=E7=A5=90=E4=B8=80=20/=20Yuichi=20Watana?= =?UTF-8?q?be?= Date: Wed, 8 Nov 2023 18:21:51 +0900 Subject: [PATCH] test: fix compile error in api integration test (#548) --- api_integration_test.go | 41 +++++++++++++++++++++-------------------- 1 file changed, 21 insertions(+), 20 deletions(-) diff --git a/api_integration_test.go b/api_integration_test.go index 6be188b..736040c 100644 --- a/api_integration_test.go +++ b/api_integration_test.go @@ -9,6 +9,7 @@ import ( "os" "testing" + "github.com/sashabaranov/go-openai" "github.com/sashabaranov/go-openai/internal/test/checks" "github.com/sashabaranov/go-openai/jsonschema" ) @@ -20,7 +21,7 @@ func TestAPI(t *testing.T) { } var err error - c := NewClient(apiToken) + c := openai.NewClient(apiToken) ctx := context.Background() _, err = c.ListEngines(ctx) checks.NoError(t, err, "ListEngines error") @@ -36,23 +37,23 @@ func TestAPI(t *testing.T) { checks.NoError(t, err, "GetFile error") } // else skip - embeddingReq := EmbeddingRequest{ + embeddingReq := openai.EmbeddingRequest{ Input: []string{ "The food was delicious and the waiter", "Other examples of embedding request", }, - Model: AdaSearchQuery, + Model: openai.AdaSearchQuery, } _, err = c.CreateEmbeddings(ctx, embeddingReq) checks.NoError(t, err, "Embedding error") _, err = c.CreateChatCompletion( ctx, - ChatCompletionRequest{ - Model: GPT3Dot5Turbo, - Messages: []ChatCompletionMessage{ + openai.ChatCompletionRequest{ + Model: openai.GPT3Dot5Turbo, + Messages: []openai.ChatCompletionMessage{ { - Role: ChatMessageRoleUser, + Role: openai.ChatMessageRoleUser, Content: "Hello!", }, }, @@ -63,11 +64,11 @@ func TestAPI(t *testing.T) { _, err = c.CreateChatCompletion( ctx, - ChatCompletionRequest{ - Model: GPT3Dot5Turbo, - Messages: []ChatCompletionMessage{ + openai.ChatCompletionRequest{ + Model: openai.GPT3Dot5Turbo, + Messages: []openai.ChatCompletionMessage{ { - Role: ChatMessageRoleUser, + Role: openai.ChatMessageRoleUser, Name: "John_Doe", Content: "Hello!", }, @@ -76,9 +77,9 @@ func TestAPI(t *testing.T) { ) checks.NoError(t, err, "CreateChatCompletion (with name) returned error") - stream, err := c.CreateCompletionStream(ctx, CompletionRequest{ + stream, err := c.CreateCompletionStream(ctx, openai.CompletionRequest{ Prompt: "Ex falso quodlibet", - Model: GPT3Ada, + Model: openai.GPT3Ada, MaxTokens: 5, Stream: true, }) @@ -103,15 +104,15 @@ func TestAPI(t *testing.T) { _, err = c.CreateChatCompletion( context.Background(), - ChatCompletionRequest{ - Model: GPT3Dot5Turbo, - Messages: []ChatCompletionMessage{ + openai.ChatCompletionRequest{ + Model: openai.GPT3Dot5Turbo, + Messages: []openai.ChatCompletionMessage{ { - Role: ChatMessageRoleUser, + Role: openai.ChatMessageRoleUser, Content: "What is the weather like in Boston?", }, }, - Functions: []FunctionDefinition{{ + Functions: []openai.FunctionDefinition{{ Name: "get_current_weather", Parameters: jsonschema.Definition{ Type: jsonschema.Object, @@ -140,12 +141,12 @@ func TestAPIError(t *testing.T) { } var err error - c := NewClient(apiToken + "_invalid") + c := openai.NewClient(apiToken + "_invalid") ctx := context.Background() _, err = c.ListEngines(ctx) checks.HasError(t, err, "ListEngines should fail with an invalid key") - var apiErr *APIError + var apiErr *openai.APIError if !errors.As(err, &apiErr) { t.Fatalf("Error is not an APIError: %+v", err) }