Skip to content

Commit bf240b2

Browse files
authored
updating deps and tests (#191)
1 parent c810a5c commit bf240b2

File tree

7 files changed

+24
-69
lines changed

7 files changed

+24
-69
lines changed

.changeset/silly-bikes-float.md

+7
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
---
2+
"@instructor-ai/instructor": minor
3+
---
4+
5+
Updating core openai/zod stream/zod/anthropic dependencies to latest - updating tests and support validation to better handle changes without warning unnecessarily.
6+
7+
Peer dependencies will likely need to be updated to match the latest.

bun.lockb

-2.64 KB
Binary file not shown.

package.json

+7-7
Original file line numberDiff line numberDiff line change
@@ -51,15 +51,15 @@
5151
},
5252
"homepage": "https://github.com/instructor-ai/instructor-js#readme",
5353
"dependencies": {
54-
"zod-stream": "1.0.3",
55-
"zod-validation-error": "^2.1.0"
54+
"zod-stream": "2.0.0",
55+
"zod-validation-error": "^3.4.0"
5656
},
5757
"peerDependencies": {
58-
"openai": ">=4.28.0",
59-
"zod": ">=3.22.4"
58+
"openai": ">=4.58.0",
59+
"zod": ">=3.23.8"
6060
},
6161
"devDependencies": {
62-
"@anthropic-ai/sdk": "0.22.0",
62+
"@anthropic-ai/sdk": "0.29.2",
6363
"@changesets/changelog-github": "^0.5.0",
6464
"@changesets/cli": "^2.27.1",
6565
"@ianvs/prettier-plugin-sort-imports": "4.1.0",
@@ -75,8 +75,8 @@
7575
"eslint-plugin-only-warn": "^1.1.0",
7676
"eslint-plugin-prettier": "^5.1.2",
7777
"husky": "^8.0.3",
78-
"llm-polyglot": "2.0.0",
79-
"openai": "4.50.0",
78+
"llm-polyglot": "2.2.0",
79+
"openai": "4.68.1",
8080
"prettier": "latest",
8181
"ts-inference-check": "^0.3.0",
8282
"tsup": "^8.0.1",

src/constants/providers.ts

+7-18
Original file line numberDiff line numberDiff line change
@@ -107,32 +107,21 @@ export const PROVIDER_SUPPORTED_MODES_BY_MODEL = {
107107
[MODE.MD_JSON]: ["*"]
108108
},
109109
[PROVIDERS.TOGETHER]: {
110-
[MODE.MD_JSON]: ["*"],
111-
[MODE.JSON_SCHEMA]: [
112-
"mistralai/Mixtral-8x7B-Instruct-v0.1",
113-
"mistralai/Mistral-7B-Instruct-v0.1",
114-
"togethercomputer/CodeLlama-34b-Instruct"
115-
],
116-
[MODE.TOOLS]: [
117-
"mistralai/Mixtral-8x7B-Instruct-v0.1",
118-
"mistralai/Mistral-7B-Instruct-v0.1",
119-
"togethercomputer/CodeLlama-34b-Instruct"
120-
]
110+
[MODE.MD_JSON]: ["*"]
111+
// [MODE.JSON_SCHEMA]: ["*"]
112+
// [MODE.TOOLS]: ["*"]
121113
},
122114
[PROVIDERS.ANYSCALE]: {
123-
[MODE.MD_JSON]: ["*"],
124-
[MODE.JSON_SCHEMA]: [
125-
"mistralai/Mistral-7B-Instruct-v0.1",
126-
"mistralai/Mixtral-8x7B-Instruct-v0.1"
127-
],
128-
[MODE.TOOLS]: ["mistralai/Mistral-7B-Instruct-v0.1", "mistralai/Mixtral-8x7B-Instruct-v0.1"]
115+
[MODE.MD_JSON]: ["*"]
116+
// [MODE.JSON_SCHEMA]: ["*"]
117+
// [MODE.TOOLS]: ["*"]
129118
},
130119
[PROVIDERS.ANTHROPIC]: {
131120
[MODE.MD_JSON]: ["*"],
132121
[MODE.TOOLS]: ["*"]
133122
},
134123
[PROVIDERS.GROQ]: {
135-
[MODE.TOOLS]: ["mixtral-8x7b-32768", "gemma-7b-it"],
124+
[MODE.TOOLS]: ["*"],
136125
[MODE.MD_JSON]: ["*"]
137126
}
138127
}

src/instructor.ts

-17
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@ import {
1919
Provider,
2020
PROVIDER_PARAMS_TRANSFORMERS,
2121
PROVIDER_SUPPORTED_MODES,
22-
PROVIDER_SUPPORTED_MODES_BY_MODEL,
2322
PROVIDERS
2423
} from "./constants/providers"
2524
import { iterableTee } from "./lib"
@@ -89,20 +88,6 @@ class Instructor<C> {
8988
}
9089
}
9190

92-
private validateModelModeSupport<T extends z.AnyZodObject>(
93-
params: ChatCompletionCreateParamsWithModel<T>
94-
) {
95-
if (this.provider !== PROVIDERS.OAI) {
96-
const modelSupport = PROVIDER_SUPPORTED_MODES_BY_MODEL[this.provider][this.mode]
97-
98-
if (!modelSupport.includes("*") && !modelSupport.includes(params.model)) {
99-
throw new Error(
100-
`Model ${params.model} is not supported by provider ${this.provider} in mode ${this.mode}`
101-
)
102-
}
103-
}
104-
}
105-
10691
private log<T extends unknown[]>(level: LogLevel, ...args: T) {
10792
if (this.logger) {
10893
this.logger(level, ...args)
@@ -419,8 +404,6 @@ class Instructor<C> {
419404
params: P,
420405
requestOptions?: ClientTypeChatCompletionRequestOptions<C>
421406
): Promise<ReturnTypeBasedOnParams<typeof this.client, P>> => {
422-
this.validateModelModeSupport(params)
423-
424407
if (this.isChatCompletionCreateParamsWithModel(params)) {
425408
if (params.stream) {
426409
return this.chatCompletionStream(params, requestOptions) as ReturnTypeBasedOnParams<

tests/mode.test.ts

+2-26
Original file line numberDiff line numberDiff line change
@@ -4,22 +4,17 @@ import OpenAI from "openai"
44
import { z } from "zod"
55
import { type Mode } from "zod-stream"
66

7-
import { MODE, Provider, PROVIDER_SUPPORTED_MODES_BY_MODEL, PROVIDERS } from "@/constants/providers"
7+
import { Provider, PROVIDER_SUPPORTED_MODES_BY_MODEL, PROVIDERS } from "@/constants/providers"
88

99
const default_oai_model = "gpt-4o"
10-
const default_anyscale_model = "mistralai/Mixtral-8x7B-Instruct-v0.1"
11-
const default_together_model = "mistralai/Mixtral-8x7B-Instruct-v0.1"
10+
const default_together_model = "meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo"
1211
const default_groq_model = "llama3-70b-8192"
1312

1413
const provider_config = {
1514
[PROVIDERS.OAI]: {
1615
baseURL: "https://api.openai.com/v1",
1716
apiKey: process.env.OPENAI_API_KEY
1817
},
19-
[PROVIDERS.ANYSCALE]: {
20-
baseURL: "https://api.endpoints.anyscale.com/v1",
21-
apiKey: process.env.ANYSCALE_API_KEY
22-
},
2318
[PROVIDERS.TOGETHER]: {
2419
baseURL: "https://api.together.xyz",
2520
apiKey: process.env.TOGETHER_API_KEY
@@ -55,25 +50,6 @@ const createTestCases = (): {
5550

5651
Object.entries(PROVIDER_SUPPORTED_MODES_BY_MODEL).forEach(
5752
([provider, modesByModel]: [Provider, Record<Mode, string[]>]) => {
58-
if (provider === PROVIDERS.ANYSCALE) {
59-
Object.entries(modesByModel).forEach(([mode, models]: [Mode, string[]]) => {
60-
if (mode === MODE.MD_JSON) {
61-
// Skip MD_JSON for Anyscale - its somewhat supported but super flakey
62-
return
63-
}
64-
65-
if (models.includes("*")) {
66-
testCases.push({
67-
model: default_anyscale_model,
68-
mode,
69-
provider
70-
})
71-
} else {
72-
models.forEach(model => testCases.push({ model, mode, provider }))
73-
}
74-
})
75-
}
76-
7753
if (provider === PROVIDERS.TOGETHER) {
7854
Object.entries(modesByModel).forEach(([mode, models]: [Mode, string[]]) => {
7955
if (models.includes("*")) {

tests/zod-type.test.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ async function extractUser({ schema }) {
1515
})
1616

1717
const user = await client.chat.completions.create({
18-
messages: [{ role: "user", content: "do nothing" }],
18+
messages: [{ role: "user", content: "do nothing - return nothing." }],
1919
model: "gpt-4o",
2020
response_model: { schema: schema, name: "User" }
2121
})

0 commit comments

Comments
 (0)