Skip to content

Commit f93d93b

Browse files
authored
Add stream output type (#74)
1 parent 2a63e86 commit f93d93b

File tree

13 files changed

+460
-110
lines changed

13 files changed

+460
-110
lines changed

.changeset/twelve-owls-speak.md

+5
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"@instructor-ai/instructor": patch
3+
---
4+
5+
adding an optional flag "streamOutputType" to the create call - this will allow u to specify stream vs generator, although we default to generator still

.vscode/settings.json

+2-2
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
"editor.formatOnPaste": false,
77
"editor.trimAutoWhitespace": true,
88
"editor.codeActionsOnSave": {
9-
"source.fixAll.eslint": true
9+
"source.fixAll.eslint": "always"
1010
},
1111
"[html]": {
1212
"editor.defaultFormatter": "GitHub.copilot"
@@ -42,6 +42,6 @@
4242
"editor.defaultFormatter": "vscode.json-language-features"
4343
},
4444
"[typescript]": {
45-
"editor.defaultFormatter": "esbenp.prettier-vscode"
45+
"editor.defaultFormatter": "vscode.typescript-language-features"
4646
}
4747
}

bun.lockb

0 Bytes
Binary file not shown.

examples/extract_user_stream/index.ts

+3-4
Original file line numberDiff line numberDiff line change
@@ -39,8 +39,6 @@ const ExtractionValuesSchema = z.object({
3939
deadline: z.string().min(1)
4040
})
4141

42-
type Extraction = Partial<z.infer<typeof ExtractionValuesSchema>>
43-
4442
const oai = new OpenAI({
4543
apiKey: process.env.OPENAI_API_KEY ?? undefined,
4644
organization: process.env.OPENAI_ORG_ID ?? undefined
@@ -51,20 +49,21 @@ const client = Instructor({
5149
mode: "TOOLS"
5250
})
5351

52+
let extraction = {}
53+
5454
const extractionStream = await client.chat.completions.create({
5555
messages: [{ role: "user", content: textBlock }],
5656
model: "gpt-4-1106-preview",
5757
response_model: {
5858
schema: ExtractionValuesSchema,
5959
name: "value extraction"
6060
},
61+
streamOutputType: "GENERATOR",
6162
max_retries: 3,
6263
stream: true,
6364
seed: 1
6465
})
6566

66-
let extraction: Extraction = {}
67-
6867
for await (const result of extractionStream) {
6968
try {
7069
extraction = result
+89
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,89 @@
1+
import Instructor from "@/instructor"
2+
import OpenAI from "openai"
3+
import { z } from "zod"
4+
5+
const textBlock = `
6+
In our recent online meeting, participants from various backgrounds joined to discuss the upcoming tech conference. The names and contact details of the participants were as follows:
7+
8+
- Name: John Doe, Email: [email protected], Twitter: @TechGuru44
9+
- Name: Jane Smith, Email: [email protected], Twitter: @DigitalDiva88
10+
- Name: Alex Johnson, Email: [email protected], Twitter: @CodeMaster2023
11+
- Name: Emily Clark, Email: [email protected], Twitter: @InnovateQueen
12+
- Name: Ron Stewart, Email: [email protected], Twitter: @RoboticsRon5
13+
- Name: Sarah Lee, Email: [email protected], Twitter: @AI_Aficionado
14+
- Name: Mike Brown, Email: [email protected], Twitter: @FutureTechLeader
15+
- Name: Lisa Green, Email: [email protected], Twitter: @CyberSavvy101
16+
- Name: David Wilson, Email: [email protected], Twitter: @GadgetGeek77
17+
- Name: Daniel Kim, Email: [email protected], Twitter: @DataDrivenDude
18+
19+
During the meeting, we agreed on several key points. The conference will be held on March 15th, 2024, at the Grand Tech Arena located at 4521 Innovation Drive. Dr. Emily Johnson, a renowned AI researcher, will be our keynote speaker.
20+
21+
The budget for the event is set at $50,000, covering venue costs, speaker fees, and promotional activities. Each participant is expected to contribute an article to the conference blog by February 20th.
22+
23+
A follow-up meeting is scheduled for January 25th at 3 PM GMT to finalize the agenda and confirm the list of speakers.
24+
`
25+
26+
const ExtractionValuesSchema = z.object({
27+
users: z
28+
.array(
29+
z.object({
30+
name: z.string(),
31+
handle: z.string(),
32+
twitter: z.string()
33+
})
34+
)
35+
.min(5),
36+
date: z.string(),
37+
location: z.string(),
38+
budget: z.number(),
39+
deadline: z.string().min(1)
40+
})
41+
42+
const oai = new OpenAI({
43+
apiKey: process.env.OPENAI_API_KEY ?? undefined,
44+
organization: process.env.OPENAI_ORG_ID ?? undefined
45+
})
46+
47+
const client = Instructor({
48+
client: oai,
49+
mode: "TOOLS"
50+
})
51+
52+
const extraction = {}
53+
54+
const extractionStream = await client.chat.completions.create({
55+
messages: [{ role: "user", content: textBlock }],
56+
model: "gpt-4-1106-preview",
57+
response_model: {
58+
schema: ExtractionValuesSchema,
59+
name: "value extraction"
60+
},
61+
streamOutputType: "READABLE",
62+
max_retries: 3,
63+
stream: true,
64+
seed: 1
65+
})
66+
67+
const reader = extractionStream.getReader()
68+
const decoder = new TextDecoder()
69+
70+
let result = client.getSchemaStub({ schema: ExtractionValuesSchema })
71+
72+
let done = false
73+
while (!done) {
74+
const { value, done: doneReading } = await reader.read()
75+
done = doneReading
76+
77+
if (done) {
78+
break
79+
}
80+
81+
const chunkValue = decoder.decode(value)
82+
83+
result = JSON.parse(chunkValue)
84+
console.log(result)
85+
}
86+
87+
console.clear()
88+
console.log("completed extraction:")
89+
console.table(extraction)

package.json

+2-2
Original file line numberDiff line numberDiff line change
@@ -48,8 +48,7 @@
4848
},
4949
"homepage": "https://github.com/instructor-ai/instructor-js#readme",
5050
"dependencies": {
51-
"schema-stream": "1.6.0",
52-
"ts-inference-check": "^0.3.0",
51+
"schema-stream": "2.0.1",
5352
"zod-to-json-schema": "^3.22.3",
5453
"zod-validation-error": "^2.1.0"
5554
},
@@ -73,6 +72,7 @@
7372
"husky": "^8.0.3",
7473
"prettier": "3.0.0",
7574
"tsup": "^8.0.1",
75+
"ts-inference-check": "^0.3.0",
7676
"typescript": "^5.3.3"
7777
}
7878
}

src/constants/modes.ts

+26-1
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,14 @@
1+
import {
2+
OAIBuildFunctionParams,
3+
OAIBuildMessageBasedParams,
4+
OAIBuildToolFunctionParams
5+
} from "@/oai/params"
6+
import {
7+
OAIResponseFnArgsParser,
8+
OAIResponseJSONStringParser,
9+
OAIResponseToolArgsParser
10+
} from "@/oai/parser"
11+
112
export const MODE = {
213
FUNCTIONS: "FUNCTIONS",
314
TOOLS: "TOOLS",
@@ -6,4 +17,18 @@ export const MODE = {
617
JSON_SCHEMA: "JSON_SCHEMA"
718
} as const
819

9-
export type MODE = keyof typeof MODE
20+
export const MODE_TO_PARSER = {
21+
[MODE.FUNCTIONS]: OAIResponseFnArgsParser,
22+
[MODE.TOOLS]: OAIResponseToolArgsParser,
23+
[MODE.JSON]: OAIResponseJSONStringParser,
24+
[MODE.MD_JSON]: OAIResponseJSONStringParser,
25+
[MODE.JSON_SCHEMA]: OAIResponseJSONStringParser
26+
}
27+
28+
export const MODE_TO_PARAMS = {
29+
[MODE.FUNCTIONS]: OAIBuildFunctionParams,
30+
[MODE.TOOLS]: OAIBuildToolFunctionParams,
31+
[MODE.JSON]: OAIBuildMessageBasedParams,
32+
[MODE.MD_JSON]: OAIBuildMessageBasedParams,
33+
[MODE.JSON_SCHEMA]: OAIBuildMessageBasedParams
34+
}

0 commit comments

Comments
 (0)