Skip to content

Commit 0e8b8b1

Browse files
authored
Merge pull request #269 from LimChihi/feat_stream_usage
2 parents ea3e55d + 2908c54 commit 0e8b8b1

File tree

3 files changed

+76
-1
lines changed

3 files changed

+76
-1
lines changed

Sources/OpenAI/Public/Models/ChatQuery.swift

+24-1
Original file line numberDiff line numberDiff line change
@@ -74,6 +74,8 @@ public struct ChatQuery: Equatable, Codable, Streamable {
7474
/// If set, partial message deltas will be sent, like in ChatGPT. Tokens will be sent as data-only server-sent events as they become available, with the stream terminated by a data: [DONE] message.
7575
/// https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format
7676
public var stream: Bool
77+
/// Options for streaming response. Only set this when you set stream: true.
78+
public var streamOptions: Self.StreamOptions?
7779

7880
public init(
7981
messages: [Self.ChatCompletionMessageParam],
@@ -95,7 +97,8 @@ public struct ChatQuery: Equatable, Codable, Streamable {
9597
topLogprobs: Int? = nil,
9698
topP: Double? = nil,
9799
user: String? = nil,
98-
stream: Bool = false
100+
stream: Bool = false,
101+
streamOptions: StreamOptions? = nil
99102
) {
100103
self.messages = messages
101104
self.model = model
@@ -117,6 +120,7 @@ public struct ChatQuery: Equatable, Codable, Streamable {
117120
self.topP = topP
118121
self.user = user
119122
self.stream = stream
123+
self.streamOptions = streamOptions
120124
}
121125

122126
public enum ChatCompletionMessageParam: Codable, Equatable {
@@ -1149,6 +1153,24 @@ public struct ChatQuery: Equatable, Codable, Streamable {
11491153
case function
11501154
}
11511155
}
1156+
1157+
public struct StreamOptions: Codable, Equatable {
1158+
1159+
/// If set, an additional chunk will be streamed before the data: [DONE] message.
1160+
/// The usage field on this chunk shows the token usage statistics for the entire request,
1161+
/// and the choices field will always be an empty array. All other chunks will also
1162+
/// include a usage field, but with a null value.
1163+
public let includeUsage: Bool
1164+
1165+
public init(includeUsage: Bool) {
1166+
self.includeUsage = includeUsage
1167+
}
1168+
1169+
public enum CodingKeys: String, CodingKey {
1170+
case includeUsage = "include_usage"
1171+
}
1172+
1173+
}
11521174

11531175
public enum CodingKeys: String, CodingKey {
11541176
case messages
@@ -1171,6 +1193,7 @@ public struct ChatQuery: Equatable, Codable, Streamable {
11711193
case topP = "top_p"
11721194
case user
11731195
case stream
1196+
case streamOptions = "stream_options"
11741197
}
11751198
}
11761199

Sources/OpenAI/Public/Models/ChatStreamResult.swift

+3
Original file line numberDiff line numberDiff line change
@@ -130,6 +130,8 @@ public struct ChatStreamResult: Codable, Equatable {
130130
public let choices: [Choice]
131131
/// This fingerprint represents the backend configuration that the model runs with. Can be used in conjunction with the `seed` request parameter to understand when backend changes have been made that might impact determinism.
132132
public let systemFingerprint: String?
133+
/// Usage statistics for the completion request.
134+
public let usage: ChatResult.CompletionUsage?
133135

134136
public enum CodingKeys: String, CodingKey {
135137
case id
@@ -138,5 +140,6 @@ public struct ChatStreamResult: Codable, Equatable {
138140
case model
139141
case choices
140142
case systemFingerprint = "system_fingerprint"
143+
case usage
141144
}
142145
}

Tests/OpenAITests/OpenAITestsDecoder.swift

+49
Original file line numberDiff line numberDiff line change
@@ -156,6 +156,55 @@ class OpenAITestsDecoder: XCTestCase {
156156

157157
XCTAssertEqual(chatQueryAsDict, expectedValueAsDict)
158158
}
159+
160+
func testChatQueryWithStreamOptions() async throws {
161+
let chatQuery = ChatQuery(messages: [
162+
.init(role: .user, content: "Who are you?")!
163+
], model: .gpt4, stream: true, streamOptions: .init(includeUsage: true))
164+
let expectedValue = """
165+
{
166+
"model": "gpt-4",
167+
"messages": [
168+
{
169+
"role": "user",
170+
"content": "Who are you?"
171+
}
172+
],
173+
"stream": true,
174+
"stream_options": {
175+
"include_usage" : true
176+
}
177+
}
178+
"""
179+
180+
let chatQueryAsDict = try jsonDataAsNSDictionary(JSONEncoder().encode(chatQuery))
181+
let expectedValueAsDict = try jsonDataAsNSDictionary(expectedValue.data(using: .utf8)!)
182+
183+
XCTAssertEqual(chatQueryAsDict, expectedValueAsDict)
184+
}
185+
186+
func testChatQueryWithoutStreamOptions() async throws {
187+
let chatQuery = ChatQuery(messages: [
188+
.init(role: .user, content: "Who are you?")!
189+
], model: .gpt4, stream: true)
190+
let expectedValue = """
191+
{
192+
"model": "gpt-4",
193+
"messages": [
194+
{
195+
"role": "user",
196+
"content": "Who are you?"
197+
}
198+
],
199+
"stream": true
200+
}
201+
"""
202+
203+
let chatQueryAsDict = try jsonDataAsNSDictionary(JSONEncoder().encode(chatQuery))
204+
let expectedValueAsDict = try jsonDataAsNSDictionary(expectedValue.data(using: .utf8)!)
205+
206+
XCTAssertEqual(chatQueryAsDict, expectedValueAsDict)
207+
}
159208

160209
func testChatQueryWithFunctionCall() async throws {
161210
let chatQuery = ChatQuery(

0 commit comments

Comments
 (0)