Fix integration test (#762)

* added TestCompletionStream test

moved completion stream testing to seperate function
added NoErrorF
fixes nil pointer reference on stream object

* update integration test models
This commit is contained in:
Adam Smith
2024-06-13 06:23:07 -07:00
committed by GitHub
parent 8618492b98
commit fd41f7a5f4
4 changed files with 62 additions and 42 deletions

View File

@@ -26,7 +26,7 @@ func TestAPI(t *testing.T) {
_, err = c.ListEngines(ctx)
checks.NoError(t, err, "ListEngines error")
_, err = c.GetEngine(ctx, "davinci")
_, err = c.GetEngine(ctx, openai.GPT3Davinci002)
checks.NoError(t, err, "GetEngine error")
fileRes, err := c.ListFiles(ctx)
@@ -42,7 +42,7 @@ func TestAPI(t *testing.T) {
"The food was delicious and the waiter",
"Other examples of embedding request",
},
Model: openai.AdaSearchQuery,
Model: openai.AdaEmbeddingV2,
}
_, err = c.CreateEmbeddings(ctx, embeddingReq)
checks.NoError(t, err, "Embedding error")
@@ -77,31 +77,6 @@ func TestAPI(t *testing.T) {
)
checks.NoError(t, err, "CreateChatCompletion (with name) returned error")
stream, err := c.CreateCompletionStream(ctx, openai.CompletionRequest{
Prompt: "Ex falso quodlibet",
Model: openai.GPT3Ada,
MaxTokens: 5,
Stream: true,
})
checks.NoError(t, err, "CreateCompletionStream returned error")
defer stream.Close()
counter := 0
for {
_, err = stream.Recv()
if err != nil {
if errors.Is(err, io.EOF) {
break
}
t.Errorf("Stream error: %v", err)
} else {
counter++
}
}
if counter == 0 {
t.Error("Stream did not return any responses")
}
_, err = c.CreateChatCompletion(
context.Background(),
openai.ChatCompletionRequest{
@@ -134,6 +109,41 @@ func TestAPI(t *testing.T) {
checks.NoError(t, err, "CreateChatCompletion (with functions) returned error")
}
func TestCompletionStream(t *testing.T) {
apiToken := os.Getenv("OPENAI_TOKEN")
if apiToken == "" {
t.Skip("Skipping testing against production OpenAI API. Set OPENAI_TOKEN environment variable to enable it.")
}
c := openai.NewClient(apiToken)
ctx := context.Background()
stream, err := c.CreateCompletionStream(ctx, openai.CompletionRequest{
Prompt: "Ex falso quodlibet",
Model: openai.GPT3Babbage002,
MaxTokens: 5,
Stream: true,
})
checks.NoError(t, err, "CreateCompletionStream returned error")
defer stream.Close()
counter := 0
for {
_, err = stream.Recv()
if err != nil {
if errors.Is(err, io.EOF) {
break
}
t.Errorf("Stream error: %v", err)
} else {
counter++
}
}
if counter == 0 {
t.Error("Stream did not return any responses")
}
}
func TestAPIError(t *testing.T) {
apiToken := os.Getenv("OPENAI_TOKEN")
if apiToken == "" {

View File

@@ -39,28 +39,31 @@ const (
GPT3Dot5Turbo16K0613 = "gpt-3.5-turbo-16k-0613"
GPT3Dot5Turbo = "gpt-3.5-turbo"
GPT3Dot5TurboInstruct = "gpt-3.5-turbo-instruct"
// Deprecated: Will be shut down on January 04, 2024. Use gpt-3.5-turbo-instruct instead.
// Deprecated: Model is shutdown. Use gpt-3.5-turbo-instruct instead.
GPT3TextDavinci003 = "text-davinci-003"
// Deprecated: Will be shut down on January 04, 2024. Use gpt-3.5-turbo-instruct instead.
// Deprecated: Model is shutdown. Use gpt-3.5-turbo-instruct instead.
GPT3TextDavinci002 = "text-davinci-002"
// Deprecated: Will be shut down on January 04, 2024. Use gpt-3.5-turbo-instruct instead.
// Deprecated: Model is shutdown. Use gpt-3.5-turbo-instruct instead.
GPT3TextCurie001 = "text-curie-001"
// Deprecated: Will be shut down on January 04, 2024. Use gpt-3.5-turbo-instruct instead.
// Deprecated: Model is shutdown. Use gpt-3.5-turbo-instruct instead.
GPT3TextBabbage001 = "text-babbage-001"
// Deprecated: Will be shut down on January 04, 2024. Use gpt-3.5-turbo-instruct instead.
// Deprecated: Model is shutdown. Use gpt-3.5-turbo-instruct instead.
GPT3TextAda001 = "text-ada-001"
// Deprecated: Will be shut down on January 04, 2024. Use gpt-3.5-turbo-instruct instead.
// Deprecated: Model is shutdown. Use gpt-3.5-turbo-instruct instead.
GPT3TextDavinci001 = "text-davinci-001"
// Deprecated: Will be shut down on January 04, 2024. Use gpt-3.5-turbo-instruct instead.
// Deprecated: Model is shutdown. Use gpt-3.5-turbo-instruct instead.
GPT3DavinciInstructBeta = "davinci-instruct-beta"
// Deprecated: Model is shutdown. Use davinci-002 instead.
GPT3Davinci = "davinci"
GPT3Davinci002 = "davinci-002"
// Deprecated: Will be shut down on January 04, 2024. Use gpt-3.5-turbo-instruct instead.
// Deprecated: Model is shutdown. Use gpt-3.5-turbo-instruct instead.
GPT3CurieInstructBeta = "curie-instruct-beta"
GPT3Curie = "curie"
GPT3Curie002 = "curie-002"
// Deprecated: Model is shutdown. Use babbage-002 instead.
GPT3Ada = "ada"
GPT3Ada002 = "ada-002"
// Deprecated: Model is shutdown. Use babbage-002 instead.
GPT3Babbage = "babbage"
GPT3Babbage002 = "babbage-002"
)

View File

@@ -16,7 +16,7 @@ var ErrVectorLengthMismatch = errors.New("vector length mismatch")
type EmbeddingModel string
const (
// Deprecated: The following block will be shut down on January 04, 2024. Use text-embedding-ada-002 instead.
// Deprecated: The following block is shut down. Use text-embedding-ada-002 instead.
AdaSimilarity EmbeddingModel = "text-similarity-ada-001"
BabbageSimilarity EmbeddingModel = "text-similarity-babbage-001"
CurieSimilarity EmbeddingModel = "text-similarity-curie-001"

View File

@@ -12,6 +12,13 @@ func NoError(t *testing.T, err error, message ...string) {
}
}
func NoErrorF(t *testing.T, err error, message ...string) {
t.Helper()
if err != nil {
t.Fatal(err, message)
}
}
func HasError(t *testing.T, err error, message ...string) {
t.Helper()
if err == nil {