Skip to content

Commit 1687616

Browse files
authored
o1 model support stream (#904)
1 parent b3ece4d commit 1687616

File tree

2 files changed

+0
-28
lines changed

2 files changed

+0
-28
lines changed

chat_stream_test.go

Lines changed: 0 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -36,27 +36,6 @@ func TestChatCompletionsStreamWrongModel(t *testing.T) {
3636
}
3737
}
3838

39-
func TestChatCompletionsStreamWithO1BetaLimitations(t *testing.T) {
40-
config := openai.DefaultConfig("whatever")
41-
config.BaseURL = "http://localhost/v1/chat/completions"
42-
client := openai.NewClientWithConfig(config)
43-
ctx := context.Background()
44-
45-
req := openai.ChatCompletionRequest{
46-
Model: openai.O1Preview,
47-
Messages: []openai.ChatCompletionMessage{
48-
{
49-
Role: openai.ChatMessageRoleUser,
50-
Content: "Hello!",
51-
},
52-
},
53-
}
54-
_, err := client.CreateChatCompletionStream(ctx, req)
55-
if !errors.Is(err, openai.ErrO1BetaLimitationsStreaming) {
56-
t.Fatalf("CreateChatCompletion should return ErrO1BetaLimitationsStreaming, but returned: %v", err)
57-
}
58-
}
59-
6039
func TestCreateChatCompletionStream(t *testing.T) {
6140
client, server, teardown := setupOpenAITestServer()
6241
defer teardown()

completion.go

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,6 @@ var (
1515

1616
var (
1717
ErrO1BetaLimitationsMessageTypes = errors.New("this model has beta-limitations, user and assistant messages only, system messages are not supported") //nolint:lll
18-
ErrO1BetaLimitationsStreaming = errors.New("this model has beta-limitations, streaming not supported") //nolint:lll
1918
ErrO1BetaLimitationsTools = errors.New("this model has beta-limitations, tools, function calling, and response format parameters are not supported") //nolint:lll
2019
ErrO1BetaLimitationsLogprobs = errors.New("this model has beta-limitations, logprobs not supported") //nolint:lll
2120
ErrO1BetaLimitationsOther = errors.New("this model has beta-limitations, temperature, top_p and n are fixed at 1, while presence_penalty and frequency_penalty are fixed at 0") //nolint:lll
@@ -199,12 +198,6 @@ func validateRequestForO1Models(request ChatCompletionRequest) error {
199198
return ErrO1MaxTokensDeprecated
200199
}
201200

202-
// Beta Limitations
203-
// refs:https://platform.openai.com/docs/guides/reasoning/beta-limitations
204-
// Streaming: not supported
205-
if request.Stream {
206-
return ErrO1BetaLimitationsStreaming
207-
}
208201
// Logprobs: not supported.
209202
if request.LogProbs {
210203
return ErrO1BetaLimitationsLogprobs

0 commit comments

Comments
 (0)