@@ -11,12 +11,15 @@ import Foundation
11
11
12
12
@MainActor
13
13
struct ServerSentEventsStreamInterpreterTests {
14
- private let interpreter = ServerSentEventsStreamInterpreter < ChatStreamResult > ( executionSerializer: NoDispatchExecutionSerializer ( ) )
14
+ private let interpreter = ServerSentEventsStreamInterpreter < ChatStreamResult > (
15
+ executionSerializer: NoDispatchExecutionSerializer ( ) ,
16
+ parsingOptions: [ ]
17
+ )
15
18
16
19
@Test func parseShortMessageResponseStream( ) async throws {
17
20
var chatStreamResults : [ ChatStreamResult ] = [ ]
18
21
19
- await withCheckedContinuation { continuation in
22
+ try await withCheckedThrowingContinuation { continuation in
20
23
interpreter. setCallbackClosures { result in
21
24
Task {
22
25
await MainActor . run {
@@ -28,7 +31,8 @@ struct ServerSentEventsStreamInterpreterTests {
28
31
}
29
32
}
30
33
}
31
- } onError: { _ in
34
+ } onError: { error in
35
+ continuation. resume ( throwing: error)
32
36
}
33
37
34
38
interpreter. processData ( chatCompletionChunk ( ) )
@@ -43,15 +47,16 @@ struct ServerSentEventsStreamInterpreterTests {
43
47
// - Ignore the line.
44
48
@Test func ignoresLinesStartingWithColon( ) async throws {
45
49
var chatStreamResults : [ ChatStreamResult ] = [ ]
46
- await withCheckedContinuation { continuation in
50
+ try await withCheckedThrowingContinuation { continuation in
47
51
interpreter. setCallbackClosures { result in
48
52
Task {
49
53
await MainActor . run {
50
54
chatStreamResults. append ( result)
51
55
continuation. resume ( )
52
56
}
53
57
}
54
- } onError: { _ in
58
+ } onError: { error in
59
+ continuation. resume ( throwing: error)
55
60
}
56
61
57
62
interpreter. processData ( chatCompletionChunkWithComment ( ) )
@@ -82,11 +87,11 @@ struct ServerSentEventsStreamInterpreterTests {
82
87
83
88
// Chunk with 3 objects. I captured it from a real response. It's a very short response that contains just "Hi"
84
89
private func chatCompletionChunk( ) -> Data {
85
- " data: { \" id \" : \" chatcmpl-AwnboO5ZnaUyii9xxC5ZVmM5vGark \" , \" object \" : \" chat.completion.chunk \" , \" created \" :1738577084, \" model \" : \" gpt-4-0613 \" , \" service_tier \" : \" default \" , \" system_fingerprint \" :null , \" choices \" :[{ \" index \" :0, \" delta \" :{ \" role \" : \" assistant \" , \" content \" : \" \" , \" refusal \" :null}, \" logprobs \" :null, \" finish_reason \" :null}]} \n \n data: { \" id \" : \" chatcmpl-AwnboO5ZnaUyii9xxC5ZVmM5vGark \" , \" object \" : \" chat.completion.chunk \" , \" created \" :1738577084, \" model \" : \" gpt-4-0613 \" , \" service_tier \" : \" default \" , \" system_fingerprint \" :null , \" choices \" :[{ \" index \" :0, \" delta \" :{ \" content \" : \" Hi \" }, \" logprobs \" :null, \" finish_reason \" :null}]} \n \n data: { \" id \" : \" chatcmpl-AwnboO5ZnaUyii9xxC5ZVmM5vGark \" , \" object \" : \" chat.completion.chunk \" , \" created \" :1738577084, \" model \" : \" gpt-4-0613 \" , \" service_tier \" : \" default \" , \" system_fingerprint \" :null , \" choices \" :[{ \" index \" :0, \" delta \" :{}, \" logprobs \" :null, \" finish_reason \" : \" stop \" }]} \n \n " . data ( using: . utf8) !
90
+ " data: { \" id \" : \" chatcmpl-AwnboO5ZnaUyii9xxC5ZVmM5vGark \" , \" object \" : \" chat.completion.chunk \" , \" created \" :1738577084, \" model \" : \" gpt-4-0613 \" , \" service_tier \" : \" default \" , \" system_fingerprint \" : \" sysfig \" , \" choices \" :[{ \" index \" :0, \" delta \" :{ \" role \" : \" assistant \" , \" content \" : \" \" , \" refusal \" :null}, \" logprobs \" :null, \" finish_reason \" :null}]} \n \n data: { \" id \" : \" chatcmpl-AwnboO5ZnaUyii9xxC5ZVmM5vGark \" , \" object \" : \" chat.completion.chunk \" , \" created \" :1738577084, \" model \" : \" gpt-4-0613 \" , \" service_tier \" : \" default \" , \" system_fingerprint \" : \" sysfig \" , \" choices \" :[{ \" index \" :0, \" delta \" :{ \" content \" : \" Hi \" }, \" logprobs \" :null, \" finish_reason \" :null}]} \n \n data: { \" id \" : \" chatcmpl-AwnboO5ZnaUyii9xxC5ZVmM5vGark \" , \" object \" : \" chat.completion.chunk \" , \" created \" :1738577084, \" model \" : \" gpt-4-0613 \" , \" service_tier \" : \" default \" , \" system_fingerprint \" : \" sysfig \" , \" choices \" :[{ \" index \" :0, \" delta \" :{}, \" logprobs \" :null, \" finish_reason \" : \" stop \" }]} \n \n " . data ( using: . utf8) !
86
91
}
87
92
88
93
private func chatCompletionChunkWithComment( ) -> Data {
89
- " : OPENROUTER PROCESSING \n \n data: { \" id \" : \" chatcmpl-AwnboO5ZnaUyii9xxC5ZVmM5vGark \" , \" object \" : \" chat.completion.chunk \" , \" created \" :1738577084, \" model \" : \" gpt-4-0613 \" , \" service_tier \" : \" default \" , \" system_fingerprint \" :null , \" choices \" :[{ \" index \" :0, \" delta \" :{ \" role \" : \" assistant \" , \" content \" : \" \" , \" refusal \" :null}, \" logprobs \" :null, \" finish_reason \" :null}]} \n \n " . data ( using: . utf8) !
94
+ " : OPENROUTER PROCESSING \n \n data: { \" id \" : \" chatcmpl-AwnboO5ZnaUyii9xxC5ZVmM5vGark \" , \" object \" : \" chat.completion.chunk \" , \" created \" :1738577084, \" model \" : \" gpt-4-0613 \" , \" service_tier \" : \" default \" , \" system_fingerprint \" : \" sysfig \" , \" choices \" :[{ \" index \" :0, \" delta \" :{ \" role \" : \" assistant \" , \" content \" : \" \" , \" refusal \" :null}, \" logprobs \" :null, \" finish_reason \" :null}]} \n \n " . data ( using: . utf8) !
90
95
}
91
96
92
97
private func chatCompletionChunkTermination( ) -> Data {
0 commit comments