Skip to content

Commit 035fdc4

Browse files
committed
Add ParsingOptions and use for ChatStreamResult
1 parent 15f0d6b commit 035fdc4

11 files changed

+113
-26
lines changed

Sources/OpenAI/OpenAI+OpenAIAsync.swift

+1
Original file line numberDiff line numberDiff line change
@@ -217,6 +217,7 @@ extension OpenAI: OpenAIAsync {
217217
middleware.intercept(response: current.response, request: urlRequest, data: current.data)
218218
}
219219
let decoder = JSONDecoder()
220+
decoder.userInfo[.parsingOptions] = configuration.parsingOptions
220221
do {
221222
return try decoder.decode(ResultType.self, from: interceptedData ?? data)
222223
} catch {

Sources/OpenAI/OpenAI+OpenAICombine.swift

+2
Original file line numberDiff line numberDiff line change
@@ -210,10 +210,12 @@ extension OpenAI: OpenAICombine {
210210
middleware.intercept(request: current)
211211
}
212212

213+
let parsingOptions = configuration.parsingOptions
213214
return session
214215
.dataTaskPublisher(for: interceptedRequest)
215216
.tryMap { (data, response) in
216217
let decoder = JSONDecoder()
218+
decoder.userInfo[.parsingOptions] = parsingOptions
217219
let (_, interceptedData) = self.middlewares.reduce((response, data)) { current, middleware in
218220
middleware.intercept(response: current.response, request: urlRequest, data: current.data)
219221
}

Sources/OpenAI/OpenAI.swift

+12-6
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,9 @@ final public class OpenAI: @unchecked Sendable {
4545
/// Currently SDK sets such fields: Authorization, Content-Type, OpenAI-Organization.
4646
public let customHeaders: [String: String]
4747

48-
public init(token: String?, organizationIdentifier: String? = nil, host: String = "api.openai.com", port: Int = 443, scheme: String = "https", basePath: String = "/v1", timeoutInterval: TimeInterval = 60.0, customHeaders: [String: String] = [:]) {
48+
public let parsingOptions: ParsingOptions
49+
50+
public init(token: String?, organizationIdentifier: String? = nil, host: String = "api.openai.com", port: Int = 443, scheme: String = "https", basePath: String = "/v1", timeoutInterval: TimeInterval = 60.0, customHeaders: [String: String] = [:], parsingOptions: ParsingOptions = []) {
4951
self.token = token
5052
self.organizationIdentifier = organizationIdentifier
5153
self.host = host
@@ -54,6 +56,7 @@ final public class OpenAI: @unchecked Sendable {
5456
self.basePath = basePath
5557
self.timeoutInterval = timeoutInterval
5658
self.customHeaders = customHeaders
59+
self.parsingOptions = parsingOptions
5760
}
5861
}
5962

@@ -91,7 +94,11 @@ final public class OpenAI: @unchecked Sendable {
9194
middlewares: [OpenAIMiddleware] = [],
9295
sslStreamingDelegate: SSLDelegateProtocol? = nil
9396
) {
94-
let streamingSessionFactory = ImplicitURLSessionStreamingSessionFactory(sslDelegate: sslStreamingDelegate)
97+
let streamingSessionFactory = ImplicitURLSessionStreamingSessionFactory(
98+
middlewares: middlewares,
99+
parsingOptions: configuration.parsingOptions,
100+
sslDelegate: sslStreamingDelegate
101+
)
95102

96103
self.init(
97104
configuration: configuration,
@@ -309,8 +316,7 @@ extension OpenAI {
309316
}
310317

311318
let session = streamingSessionFactory.makeServerSentEventsStreamingSession(
312-
urlRequest: interceptedRequest,
313-
middlewares: middlewares
319+
urlRequest: interceptedRequest
314320
) { _, object in
315321
onResult(.success(object))
316322
} onProcessingError: { _, error in
@@ -367,8 +373,7 @@ extension OpenAI {
367373
}
368374

369375
let session = streamingSessionFactory.makeAudioSpeechStreamingSession(
370-
urlRequest: interceptedRequest,
371-
middlewares: middlewares
376+
urlRequest: interceptedRequest
372377
) { _, object in
373378
onResult(.success(object))
374379
} onProcessingError: { _, error in
@@ -397,6 +402,7 @@ extension OpenAI {
397402
return completion(.failure(OpenAIError.emptyData))
398403
}
399404
let decoder = JSONDecoder()
405+
decoder.userInfo[.parsingOptions] = self.configuration.parsingOptions
400406
do {
401407
completion(.success(try decoder.decode(ResultType.self, from: data)))
402408
} catch {
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
//
2+
// ParsingOptions.swift
3+
// OpenAI
4+
//
5+
// Created by Oleksii Nezhyborets on 27.03.2025.
6+
//
7+
8+
import Foundation
9+
10+
extension CodingUserInfoKey {
11+
static let parsingOptions = CodingUserInfoKey(rawValue: "parsingOptions")!
12+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
//
2+
// KeyedDecodingContainer+ParsingOptions.swift
3+
// OpenAI
4+
//
5+
// Created by Oleksii Nezhyborets on 27.03.2025.
6+
//
7+
8+
import Foundation
9+
10+
extension KeyedDecodingContainer {
11+
func decodeString(forKey key: KeyedDecodingContainer<K>.Key, parsingOptions: ParsingOptions) throws -> String {
12+
try self.decode(String.self, forKey: key, parsingOptions: parsingOptions, defaultValue: "")
13+
}
14+
15+
func decode<T: Decodable>(_ type: T.Type, forKey key: KeyedDecodingContainer<K>.Key, parsingOptions: ParsingOptions, defaultValue: T) throws -> T {
16+
do {
17+
return try decode(T.self, forKey: key)
18+
} catch {
19+
switch error {
20+
case DecodingError.keyNotFound:
21+
if parsingOptions.contains(.fillRequiredFieldIfKeyNotFound) {
22+
return defaultValue
23+
} else {
24+
throw error
25+
}
26+
default:
27+
throw error
28+
}
29+
}
30+
}
31+
}

Sources/OpenAI/Private/Streaming/ServerSentEventsStreamInterpreter.swift

+4-1
Original file line numberDiff line numberDiff line change
@@ -16,9 +16,11 @@ final class ServerSentEventsStreamInterpreter <ResultType: Codable & Sendable>:
1616
private var onEventDispatched: ((ResultType) -> Void)?
1717
private var onError: ((Error) -> Void)?
1818
private let executionSerializer: ExecutionSerializer
19+
private let parsingOptions: ParsingOptions
1920

20-
init(executionSerializer: ExecutionSerializer = GCDQueueAsyncExecutionSerializer(queue: .userInitiated)) {
21+
init(executionSerializer: ExecutionSerializer = GCDQueueAsyncExecutionSerializer(queue: .userInitiated), parsingOptions: ParsingOptions) {
2122
self.executionSerializer = executionSerializer
23+
self.parsingOptions = parsingOptions
2224
}
2325

2426
/// Sets closures an instance of type in a thread safe manner
@@ -90,6 +92,7 @@ final class ServerSentEventsStreamInterpreter <ResultType: Codable & Sendable>:
9092
return
9193
}
9294
let decoder = JSONDecoder()
95+
decoder.userInfo[.parsingOptions] = parsingOptions
9396
do {
9497
let object = try decoder.decode(ResultType.self, from: jsonData)
9598
onEventDispatched?(object)

Sources/OpenAI/Private/Streaming/ServerSentEventsStreamingSessionFactory.swift

+3-5
Original file line numberDiff line numberDiff line change
@@ -14,34 +14,33 @@ import FoundationNetworking
1414
protocol StreamingSessionFactory {
1515
func makeServerSentEventsStreamingSession<ResultType: Codable & Sendable>(
1616
urlRequest: URLRequest,
17-
middlewares: [OpenAIMiddleware],
1817
onReceiveContent: @Sendable @escaping (StreamingSession<ServerSentEventsStreamInterpreter<ResultType>>, ResultType) -> Void,
1918
onProcessingError: @Sendable @escaping (StreamingSession<ServerSentEventsStreamInterpreter<ResultType>>, Error) -> Void,
2019
onComplete: @Sendable @escaping (StreamingSession<ServerSentEventsStreamInterpreter<ResultType>>, Error?) -> Void
2120
) -> StreamingSession<ServerSentEventsStreamInterpreter<ResultType>>
2221

2322
func makeAudioSpeechStreamingSession(
2423
urlRequest: URLRequest,
25-
middlewares: [OpenAIMiddleware],
2624
onReceiveContent: @Sendable @escaping (StreamingSession<AudioSpeechStreamInterpreter>, AudioSpeechResult) -> Void,
2725
onProcessingError: @Sendable @escaping (StreamingSession<AudioSpeechStreamInterpreter>, Error) -> Void,
2826
onComplete: @Sendable @escaping (StreamingSession<AudioSpeechStreamInterpreter>, Error?) -> Void
2927
) -> StreamingSession<AudioSpeechStreamInterpreter>
3028
}
3129

3230
struct ImplicitURLSessionStreamingSessionFactory: StreamingSessionFactory {
31+
let middlewares: [OpenAIMiddleware]
32+
let parsingOptions: ParsingOptions
3333
let sslDelegate: SSLDelegateProtocol?
3434

3535
func makeServerSentEventsStreamingSession<ResultType>(
3636
urlRequest: URLRequest,
37-
middlewares: [OpenAIMiddleware],
3837
onReceiveContent: @Sendable @escaping (StreamingSession<ServerSentEventsStreamInterpreter<ResultType>>, ResultType) -> Void,
3938
onProcessingError: @Sendable @escaping (StreamingSession<ServerSentEventsStreamInterpreter<ResultType>>, any Error) -> Void,
4039
onComplete: @Sendable @escaping (StreamingSession<ServerSentEventsStreamInterpreter<ResultType>>, (any Error)?) -> Void
4140
) -> StreamingSession<ServerSentEventsStreamInterpreter<ResultType>> where ResultType : Decodable, ResultType : Encodable, ResultType : Sendable {
4241
.init(
4342
urlRequest: urlRequest,
44-
interpreter: .init(),
43+
interpreter: .init(parsingOptions: parsingOptions),
4544
sslDelegate: sslDelegate,
4645
middlewares: middlewares,
4746
onReceiveContent: onReceiveContent,
@@ -52,7 +51,6 @@ struct ImplicitURLSessionStreamingSessionFactory: StreamingSessionFactory {
5251

5352
func makeAudioSpeechStreamingSession(
5453
urlRequest: URLRequest,
55-
middlewares: [OpenAIMiddleware],
5654
onReceiveContent: @Sendable @escaping (StreamingSession<AudioSpeechStreamInterpreter>, AudioSpeechResult) -> Void,
5755
onProcessingError: @Sendable @escaping (StreamingSession<AudioSpeechStreamInterpreter>, any Error) -> Void,
5856
onComplete: @Sendable @escaping (StreamingSession<AudioSpeechStreamInterpreter>, (any Error)?) -> Void

Sources/OpenAI/Public/Models/ChatStreamResult.swift

+15-2
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@
88
import Foundation
99

1010
public struct ChatStreamResult: Codable, Equatable, Sendable {
11-
1211
public struct Choice: Codable, Equatable, Sendable {
1312
public typealias FinishReason = ChatResult.Choice.FinishReason
1413

@@ -135,7 +134,7 @@ public struct ChatStreamResult: Codable, Equatable, Sendable {
135134
/// Can be more than one if `n` is greater than 1.
136135
public let choices: [Choice]
137136
/// This fingerprint represents the backend configuration that the model runs with. Can be used in conjunction with the `seed` request parameter to understand when backend changes have been made that might impact determinism.
138-
public let systemFingerprint: String?
137+
public let systemFingerprint: String
139138
/// Usage statistics for the completion request.
140139
public let usage: ChatResult.CompletionUsage?
141140

@@ -149,4 +148,18 @@ public struct ChatStreamResult: Codable, Equatable, Sendable {
149148
case systemFingerprint = "system_fingerprint"
150149
case usage
151150
}
151+
152+
public init(from decoder: any Decoder) throws {
153+
let container = try decoder.container(keyedBy: CodingKeys.self)
154+
let parsingOptions = decoder.userInfo[.parsingOptions] as? ParsingOptions ?? []
155+
156+
self.id = try container.decodeString(forKey: .id, parsingOptions: parsingOptions)
157+
self.object = try container.decode(String.self, forKey: .object)
158+
self.created = try container.decode(TimeInterval.self, forKey: .created)
159+
self.model = try container.decode(String.self, forKey: .model)
160+
self.citations = try container.decodeIfPresent([String].self, forKey: .citations)
161+
self.choices = try container.decode([ChatStreamResult.Choice].self, forKey: .choices)
162+
self.systemFingerprint = try container.decodeString(forKey: .systemFingerprint, parsingOptions: parsingOptions)
163+
self.usage = try container.decodeIfPresent(ChatResult.CompletionUsage.self, forKey: .usage)
164+
}
152165
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
//
2+
// ParsingOptions.swift
3+
// OpenAI
4+
//
5+
// Created by Oleksii Nezhyborets on 27.03.2025.
6+
//
7+
8+
import Foundation
9+
10+
public struct ParsingOptions: OptionSet {
11+
public let rawValue: Int
12+
13+
public init(rawValue: Int) {
14+
self.rawValue = rawValue
15+
}
16+
17+
public static let fillRequiredFieldIfKeyNotFound = ParsingOptions(rawValue: 1 << 0)
18+
}

Tests/OpenAITests/Mocks/MockStreamingSessionFactory.swift

+3-5
Original file line numberDiff line numberDiff line change
@@ -17,17 +17,16 @@ class MockStreamingSessionFactory: StreamingSessionFactory {
1717

1818
func makeServerSentEventsStreamingSession<ResultType>(
1919
urlRequest: URLRequest,
20-
middlewares: [OpenAIMiddleware],
2120
onReceiveContent: @Sendable @escaping (StreamingSession<ServerSentEventsStreamInterpreter<ResultType>>, ResultType) -> Void,
2221
onProcessingError: @Sendable @escaping (StreamingSession<ServerSentEventsStreamInterpreter<ResultType>>, any Error) -> Void,
2322
onComplete: @Sendable @escaping (StreamingSession<ServerSentEventsStreamInterpreter<ResultType>>, (any Error)?) -> Void
2423
) -> StreamingSession<ServerSentEventsStreamInterpreter<ResultType>> where ResultType : Decodable, ResultType : Encodable, ResultType : Sendable {
2524
.init(
2625
urlSessionFactory: urlSessionFactory,
2726
urlRequest: urlRequest,
28-
interpreter: .init(executionSerializer: NoDispatchExecutionSerializer()),
27+
interpreter: .init(executionSerializer: NoDispatchExecutionSerializer(), parsingOptions: []),
2928
sslDelegate: nil,
30-
middlewares: middlewares,
29+
middlewares: [],
3130
onReceiveContent: onReceiveContent,
3231
onProcessingError: onProcessingError,
3332
onComplete: onComplete
@@ -36,7 +35,6 @@ class MockStreamingSessionFactory: StreamingSessionFactory {
3635

3736
func makeAudioSpeechStreamingSession(
3837
urlRequest: URLRequest,
39-
middlewares: [OpenAIMiddleware],
4038
onReceiveContent: @Sendable @escaping (StreamingSession<AudioSpeechStreamInterpreter>, AudioSpeechResult) -> Void,
4139
onProcessingError: @Sendable @escaping (StreamingSession<AudioSpeechStreamInterpreter>, any Error) -> Void,
4240
onComplete: @Sendable @escaping (StreamingSession<AudioSpeechStreamInterpreter>, (any Error)?) -> Void
@@ -46,7 +44,7 @@ class MockStreamingSessionFactory: StreamingSessionFactory {
4644
urlRequest: urlRequest,
4745
interpreter: .init(),
4846
sslDelegate: nil,
49-
middlewares: middlewares,
47+
middlewares: [],
5048
onReceiveContent: onReceiveContent,
5149
onProcessingError: onProcessingError,
5250
onComplete: onComplete

Tests/OpenAITests/ServerSentEventsStreamInterpreterTests.swift

+12-7
Original file line numberDiff line numberDiff line change
@@ -11,12 +11,15 @@ import Foundation
1111

1212
@MainActor
1313
struct ServerSentEventsStreamInterpreterTests {
14-
private let interpreter = ServerSentEventsStreamInterpreter<ChatStreamResult>(executionSerializer: NoDispatchExecutionSerializer())
14+
private let interpreter = ServerSentEventsStreamInterpreter<ChatStreamResult>(
15+
executionSerializer: NoDispatchExecutionSerializer(),
16+
parsingOptions: []
17+
)
1518

1619
@Test func parseShortMessageResponseStream() async throws {
1720
var chatStreamResults: [ChatStreamResult] = []
1821

19-
await withCheckedContinuation { continuation in
22+
try await withCheckedThrowingContinuation { continuation in
2023
interpreter.setCallbackClosures { result in
2124
Task {
2225
await MainActor.run {
@@ -28,7 +31,8 @@ struct ServerSentEventsStreamInterpreterTests {
2831
}
2932
}
3033
}
31-
} onError: { _ in
34+
} onError: { error in
35+
continuation.resume(throwing: error)
3236
}
3337

3438
interpreter.processData(chatCompletionChunk())
@@ -43,15 +47,16 @@ struct ServerSentEventsStreamInterpreterTests {
4347
// - Ignore the line.
4448
@Test func ignoresLinesStartingWithColon() async throws {
4549
var chatStreamResults: [ChatStreamResult] = []
46-
await withCheckedContinuation { continuation in
50+
try await withCheckedThrowingContinuation { continuation in
4751
interpreter.setCallbackClosures { result in
4852
Task {
4953
await MainActor.run {
5054
chatStreamResults.append(result)
5155
continuation.resume()
5256
}
5357
}
54-
} onError: { _ in
58+
} onError: { error in
59+
continuation.resume(throwing: error)
5560
}
5661

5762
interpreter.processData(chatCompletionChunkWithComment())
@@ -82,11 +87,11 @@ struct ServerSentEventsStreamInterpreterTests {
8287

8388
// Chunk with 3 objects. I captured it from a real response. It's a very short response that contains just "Hi"
8489
private func chatCompletionChunk() -> Data {
85-
"data: {\"id\":\"chatcmpl-AwnboO5ZnaUyii9xxC5ZVmM5vGark\",\"object\":\"chat.completion.chunk\",\"created\":1738577084,\"model\":\"gpt-4-0613\",\"service_tier\":\"default\",\"system_fingerprint\":null,\"choices\":[{\"index\":0,\"delta\":{\"role\":\"assistant\",\"content\":\"\",\"refusal\":null},\"logprobs\":null,\"finish_reason\":null}]}\n\ndata: {\"id\":\"chatcmpl-AwnboO5ZnaUyii9xxC5ZVmM5vGark\",\"object\":\"chat.completion.chunk\",\"created\":1738577084,\"model\":\"gpt-4-0613\",\"service_tier\":\"default\",\"system_fingerprint\":null,\"choices\":[{\"index\":0,\"delta\":{\"content\":\"Hi\"},\"logprobs\":null,\"finish_reason\":null}]}\n\ndata: {\"id\":\"chatcmpl-AwnboO5ZnaUyii9xxC5ZVmM5vGark\",\"object\":\"chat.completion.chunk\",\"created\":1738577084,\"model\":\"gpt-4-0613\",\"service_tier\":\"default\",\"system_fingerprint\":null,\"choices\":[{\"index\":0,\"delta\":{},\"logprobs\":null,\"finish_reason\":\"stop\"}]}\n\n".data(using: .utf8)!
90+
"data: {\"id\":\"chatcmpl-AwnboO5ZnaUyii9xxC5ZVmM5vGark\",\"object\":\"chat.completion.chunk\",\"created\":1738577084,\"model\":\"gpt-4-0613\",\"service_tier\":\"default\",\"system_fingerprint\":\"sysfig\",\"choices\":[{\"index\":0,\"delta\":{\"role\":\"assistant\",\"content\":\"\",\"refusal\":null},\"logprobs\":null,\"finish_reason\":null}]}\n\ndata: {\"id\":\"chatcmpl-AwnboO5ZnaUyii9xxC5ZVmM5vGark\",\"object\":\"chat.completion.chunk\",\"created\":1738577084,\"model\":\"gpt-4-0613\",\"service_tier\":\"default\",\"system_fingerprint\":\"sysfig\",\"choices\":[{\"index\":0,\"delta\":{\"content\":\"Hi\"},\"logprobs\":null,\"finish_reason\":null}]}\n\ndata: {\"id\":\"chatcmpl-AwnboO5ZnaUyii9xxC5ZVmM5vGark\",\"object\":\"chat.completion.chunk\",\"created\":1738577084,\"model\":\"gpt-4-0613\",\"service_tier\":\"default\",\"system_fingerprint\":\"sysfig\",\"choices\":[{\"index\":0,\"delta\":{},\"logprobs\":null,\"finish_reason\":\"stop\"}]}\n\n".data(using: .utf8)!
8691
}
8792

8893
private func chatCompletionChunkWithComment() -> Data {
89-
": OPENROUTER PROCESSING\n\ndata: {\"id\":\"chatcmpl-AwnboO5ZnaUyii9xxC5ZVmM5vGark\",\"object\":\"chat.completion.chunk\",\"created\":1738577084,\"model\":\"gpt-4-0613\",\"service_tier\":\"default\",\"system_fingerprint\":null,\"choices\":[{\"index\":0,\"delta\":{\"role\":\"assistant\",\"content\":\"\",\"refusal\":null},\"logprobs\":null,\"finish_reason\":null}]}\n\n".data(using: .utf8)!
94+
": OPENROUTER PROCESSING\n\ndata: {\"id\":\"chatcmpl-AwnboO5ZnaUyii9xxC5ZVmM5vGark\",\"object\":\"chat.completion.chunk\",\"created\":1738577084,\"model\":\"gpt-4-0613\",\"service_tier\":\"default\",\"system_fingerprint\":\"sysfig\",\"choices\":[{\"index\":0,\"delta\":{\"role\":\"assistant\",\"content\":\"\",\"refusal\":null},\"logprobs\":null,\"finish_reason\":null}]}\n\n".data(using: .utf8)!
9095
}
9196

9297
private func chatCompletionChunkTermination() -> Data {

0 commit comments

Comments
 (0)