Skip to content

Commit 0ee50e9

Browse files
authored
feat(chat): add stream options (#363)
1 parent e133f28 commit 0ee50e9

File tree

4 files changed

+96
-0
lines changed

4 files changed

+96
-0
lines changed

CHANGELOG.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,8 @@
1+
# Unreleased
2+
3+
### Added
4+
- **chat**: add stream options (#363)
5+
16
# 3.8.1
27
> Published 28 Jun 2024
38

openai-client/src/commonTest/kotlin/com/aallam/openai/client/TestChatCompletions.kt

Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -203,4 +203,40 @@ class TestChatCompletions : TestOpenAI() {
203203

204204
assertTrue(job.isCancelled, "Job should be cancelled")
205205
}
206+
207+
@Test
208+
fun streamOptions() = test {
209+
val request = chatCompletionRequest {
210+
model = ModelId("gpt-3.5-turbo")
211+
messages {
212+
message {
213+
role = ChatRole.System
214+
content = "You are a helpful assistant.!"
215+
}
216+
message {
217+
role = ChatRole.User
218+
content = "Who won the world series in 2020?"
219+
}
220+
message {
221+
role = ChatRole.Assistant
222+
content = "The Los Angeles Dodgers won the World Series in 2020."
223+
}
224+
message {
225+
role = ChatRole.User
226+
content = "Where was it played?"
227+
}
228+
}
229+
streamOptions = streamOptions {
230+
includeUsage = true
231+
}
232+
}
233+
234+
val results = mutableListOf<ChatCompletionChunk>()
235+
openAI.chatCompletions(request).onEach { results += it }.launchIn(this).join()
236+
237+
assertNotNull(results.last().usage)
238+
assertNotNull(results.last().usage?.promptTokens)
239+
assertNotNull(results.last().usage?.completionTokens)
240+
assertNotNull(results.last().usage?.totalTokens)
241+
}
206242
}

openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/ChatCompletionRequest.kt

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -163,6 +163,11 @@ public class ChatCompletionRequest(
163163
* A unique identifier representing the Multi LORA reserved instance.
164164
*/
165165
@SerialName("instance_id") public val instanceId: String? = null,
166+
167+
/**
168+
* Options for streaming response. Only used when in streaming mode.
169+
*/
170+
@SerialName("stream_options") public val streamOptions: StreamOptions? = null
166171
)
167172

168173
/**
@@ -338,6 +343,11 @@ public class ChatCompletionRequestBuilder {
338343
functions = FunctionsBuilder().apply(block).functions
339344
}
340345

346+
/**
347+
* Options for streaming response. Only used when in streaming mode.
348+
*/
349+
public var streamOptions: StreamOptions? = null
350+
341351
/**
342352
* Builder of [ChatCompletionRequest] instances.
343353
*/
@@ -361,6 +371,7 @@ public class ChatCompletionRequestBuilder {
361371
logprobs = logprobs,
362372
topLogprobs = topLogprobs,
363373
instanceId = instanceId,
374+
streamOptions = streamOptions,
364375
)
365376
}
366377

Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
package com.aallam.openai.api.chat
2+
3+
import kotlinx.serialization.SerialName
4+
import kotlinx.serialization.Serializable
5+
6+
/**
7+
* Options for streaming response.
8+
*/
9+
@Serializable
10+
public data class StreamOptions(
11+
/**
12+
* If set, an additional chunk will be streamed before the data: `[DONE]` message.
13+
* The usage field on this chunk shows the token usage statistics for the entire request, and the choices field will
14+
* always be an empty array. All other chunks will also include a usage field, but with a null value.
15+
*/
16+
@SerialName("include_usage") public val includeUsage: Boolean? = null,
17+
)
18+
19+
/**
20+
* Create a new [StreamOptions] instance.
21+
*/
22+
public fun streamOptions(block: StreamOptionsBuilder.() -> Unit): StreamOptions {
23+
return StreamOptionsBuilder().apply(block).build()
24+
}
25+
26+
/**
27+
* Builder for [StreamOptions].
28+
*/
29+
public class StreamOptionsBuilder {
30+
31+
/**
32+
* If set, an additional chunk will be streamed before the data: `[DONE]` message.
33+
* The usage field on this chunk shows the token usage statistics for the entire request, and the choices field will
34+
* always be an empty array. All other chunks will also include a usage field, but with a null value.
35+
*/
36+
public var includeUsage: Boolean? = null
37+
38+
/**
39+
* Build the [StreamOptions] instance.
40+
*/
41+
public fun build(): StreamOptions = StreamOptions(
42+
includeUsage = includeUsage,
43+
)
44+
}

0 commit comments

Comments
 (0)