1
1
import {
2
2
ChatCompletionCreateParamsWithModel ,
3
3
GenericChatCompletion ,
4
+ GenericClient ,
4
5
InstructorConfig ,
5
6
LogLevel ,
6
7
OpenAILikeClient ,
7
- ReturnTypeBasedOnParams ,
8
- SupportedInstructorClient
8
+ ReturnTypeBasedOnParams
9
9
} from "@/types"
10
10
import OpenAI from "openai"
11
11
import { z } from "zod"
@@ -24,7 +24,7 @@ import { ClientTypeChatCompletionParams, CompletionMeta } from "./types"
24
24
25
25
const MAX_RETRIES_DEFAULT = 0
26
26
27
- class Instructor < C extends SupportedInstructorClient > {
27
+ class Instructor < C extends GenericClient | OpenAI > {
28
28
readonly client : OpenAILikeClient < C >
29
29
readonly mode : Mode
30
30
readonly provider : Provider
@@ -41,10 +41,12 @@ class Instructor<C extends SupportedInstructorClient> {
41
41
this . debug = debug
42
42
43
43
const provider =
44
- this . client ?. baseURL . includes ( NON_OAI_PROVIDER_URLS . ANYSCALE ) ? PROVIDERS . ANYSCALE
45
- : this . client ?. baseURL . includes ( NON_OAI_PROVIDER_URLS . TOGETHER ) ? PROVIDERS . TOGETHER
46
- : this . client ?. baseURL . includes ( NON_OAI_PROVIDER_URLS . OAI ) ? PROVIDERS . OAI
47
- : this . client ?. baseURL . includes ( NON_OAI_PROVIDER_URLS . ANTHROPIC ) ? PROVIDERS . ANTHROPIC
44
+ typeof this . client ?. baseURL === "string" ?
45
+ this . client ?. baseURL . includes ( NON_OAI_PROVIDER_URLS . ANYSCALE ) ? PROVIDERS . ANYSCALE
46
+ : this . client ?. baseURL . includes ( NON_OAI_PROVIDER_URLS . TOGETHER ) ? PROVIDERS . TOGETHER
47
+ : this . client ?. baseURL . includes ( NON_OAI_PROVIDER_URLS . OAI ) ? PROVIDERS . OAI
48
+ : this . client ?. baseURL . includes ( NON_OAI_PROVIDER_URLS . ANTHROPIC ) ? PROVIDERS . ANTHROPIC
49
+ : PROVIDERS . OTHER
48
50
: PROVIDERS . OTHER
49
51
50
52
this . provider = provider
@@ -114,8 +116,8 @@ class Instructor<C extends SupportedInstructorClient> {
114
116
let completionParams = withResponseModel ( {
115
117
params : {
116
118
...params ,
117
- stream : false
118
- } ,
119
+ stream : params . stream ?? false
120
+ } as OpenAI . ChatCompletionCreateParams ,
119
121
mode : this . mode ,
120
122
response_model
121
123
} )
@@ -141,12 +143,18 @@ class Instructor<C extends SupportedInstructorClient> {
141
143
}
142
144
}
143
145
144
- let completion : GenericChatCompletion | null = null
146
+ let completion
145
147
146
148
try {
147
- completion = ( await this . client . chat . completions . create (
148
- resolvedParams
149
- ) ) as GenericChatCompletion
149
+ if ( this . client . chat ?. completions ?. create ) {
150
+ const result = await this . client . chat . completions . create ( {
151
+ ...resolvedParams ,
152
+ stream : false
153
+ } )
154
+ completion = result as GenericChatCompletion < typeof result >
155
+ } else {
156
+ throw new Error ( "Unsupported client type" )
157
+ }
150
158
this . log ( "debug" , "raw standard completion response: " , completion )
151
159
} catch ( error ) {
152
160
this . log (
@@ -245,7 +253,7 @@ class Instructor<C extends SupportedInstructorClient> {
245
253
params : {
246
254
...params ,
247
255
stream : true
248
- } ,
256
+ } as OpenAI . ChatCompletionCreateParams ,
249
257
response_model,
250
258
mode : this . mode
251
259
} )
@@ -260,13 +268,19 @@ class Instructor<C extends SupportedInstructorClient> {
260
268
261
269
return streamClient . create ( {
262
270
completionPromise : async ( ) => {
263
- const completion = await this . client . chat . completions . create ( completionParams )
264
- this . log ( "debug" , "raw stream completion response: " , completion )
265
-
266
- return OAIStream ( {
267
- //TODO: we need to move away from strict openai types - need to cast here but should update to be more flexible
268
- res : completion as AsyncIterable < OpenAI . ChatCompletionChunk >
269
- } )
271
+ if ( this . client . chat ?. completions ?. create ) {
272
+ const completion = await this . client . chat . completions . create ( {
273
+ ...completionParams ,
274
+ stream : true
275
+ } )
276
+ this . log ( "debug" , "raw stream completion response: " , completion )
277
+
278
+ return OAIStream ( {
279
+ res : completion as unknown as AsyncIterable < OpenAI . ChatCompletionChunk >
280
+ } )
281
+ } else {
282
+ throw new Error ( "Unsupported client type" )
283
+ }
270
284
} ,
271
285
response_model
272
286
} )
@@ -289,7 +303,7 @@ class Instructor<C extends SupportedInstructorClient> {
289
303
create : async <
290
304
T extends z . AnyZodObject ,
291
305
P extends T extends z . AnyZodObject ? ChatCompletionCreateParamsWithModel < T >
292
- : ClientTypeChatCompletionParams < typeof this . client > & { response_model : never }
306
+ : ClientTypeChatCompletionParams < OpenAILikeClient < C > > & { response_model : never }
293
307
> (
294
308
params : P
295
309
) : Promise < ReturnTypeBasedOnParams < typeof this . client , P > > => {
@@ -308,20 +322,23 @@ class Instructor<C extends SupportedInstructorClient> {
308
322
>
309
323
}
310
324
} else {
311
- const result =
312
- this . isStandardStream ( params ) ?
313
- await this . client . chat . completions . create ( params )
314
- : await this . client . chat . completions . create ( params )
325
+ if ( this . client . chat ?. completions ?. create ) {
326
+ const result =
327
+ this . isStandardStream ( params ) ?
328
+ await this . client . chat . completions . create ( params )
329
+ : await this . client . chat . completions . create ( params )
315
330
316
- return result as ReturnTypeBasedOnParams < typeof this . client , P >
331
+ return result as unknown as ReturnTypeBasedOnParams < OpenAILikeClient < C > , P >
332
+ } else {
333
+ throw new Error ( "Completion method is undefined" )
334
+ }
317
335
}
318
336
}
319
337
}
320
338
}
321
339
}
322
340
323
- export type InstructorClient < C extends SupportedInstructorClient = OpenAI > = Instructor < C > &
324
- OpenAILikeClient < C >
341
+ export type InstructorClient < C extends GenericClient | OpenAI > = Instructor < C > & OpenAILikeClient < C >
325
342
326
343
/**
327
344
* Creates an instance of the `Instructor` class.
@@ -344,7 +361,7 @@ export type InstructorClient<C extends SupportedInstructorClient = OpenAI> = Ins
344
361
* @param args
345
362
* @returns
346
363
*/
347
- export default function < C extends SupportedInstructorClient = OpenAI > ( args : {
364
+ export default function createInstructor < C extends GenericClient | OpenAI > ( args : {
348
365
client : OpenAILikeClient < C >
349
366
mode : Mode
350
367
debug ?: boolean
@@ -355,6 +372,7 @@ export default function <C extends SupportedInstructorClient = OpenAI>(args: {
355
372
if ( prop in target ) {
356
373
return Reflect . get ( target , prop , receiver )
357
374
}
375
+
358
376
return Reflect . get ( target . client , prop , receiver )
359
377
}
360
378
} )
0 commit comments