Skip to content

Commit d0275ff

Browse files
authored
adding basic support for. anthropic (#144)
1 parent 76a1de5 commit d0275ff

File tree

14 files changed

+404
-51
lines changed

14 files changed

+404
-51
lines changed

.changeset/itchy-trains-relax.md

+5
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"@instructor-ai/instructor": major
3+
---
4+
5+
updating all types to better support non openai clients - this changes some of the previously exported types and adds a few new ones

.github/workflows/test-pr.yml

+1
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ jobs:
2121
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
2222
ANYSCALE_API_KEY: ${{ secrets.ANYSCALE_API_KEY }}
2323
TOGETHER_API_KEY: ${{ secrets.TOGETHER_API_KEY }}
24+
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
2425

2526
steps:
2627
- uses: actions/checkout@v3

.github/workflows/test.yml

+1
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ jobs:
1212
env:
1313
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
1414
ANYSCALE_API_KEY: ${{ secrets.ANYSCALE_API_KEY }}
15+
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
1516
TOGETHER_API_KEY: ${{ secrets.TOGETHER_API_KEY }}
1617

1718
steps:

.vscode/launch.json

+36
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
{
2+
// Use IntelliSense to learn about possible attributes.
3+
// Hover to view descriptions of existing attributes.
4+
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
5+
"version": "0.2.0",
6+
"configurations": [
7+
{
8+
"type": "bun",
9+
"internalConsoleOptions": "neverOpen",
10+
"request": "launch",
11+
"name": "Debug File",
12+
"program": "${file}",
13+
"cwd": "${workspaceFolder}",
14+
"stopOnEntry": false,
15+
"watchMode": false
16+
},
17+
{
18+
"type": "bun",
19+
"internalConsoleOptions": "neverOpen",
20+
"request": "launch",
21+
"name": "Run File",
22+
"program": "${file}",
23+
"cwd": "${workspaceFolder}",
24+
"noDebug": true,
25+
"watchMode": false
26+
},
27+
{
28+
"type": "bun",
29+
"internalConsoleOptions": "neverOpen",
30+
"request": "attach",
31+
"name": "Attach Bun",
32+
"url": "ws://localhost:6499/",
33+
"stopOnEntry": false
34+
}
35+
]
36+
}

bun.lockb

11.5 KB
Binary file not shown.

package.json

+9-8
Original file line numberDiff line numberDiff line change
@@ -51,11 +51,11 @@
5151
},
5252
"homepage": "https://github.com/instructor-ai/instructor-js#readme",
5353
"dependencies": {
54-
"zod-stream": "1.0.0",
54+
"zod-stream": "1.0.1",
5555
"zod-validation-error": "^2.1.0"
5656
},
5757
"peerDependencies": {
58-
"openai": ">=4.24.1",
58+
"openai": ">=4.28.0",
5959
"zod": ">=3.22.4"
6060
},
6161
"devDependencies": {
@@ -64,19 +64,20 @@
6464
"@ianvs/prettier-plugin-sort-imports": "4.1.0",
6565
"@types/bun": "^1.0.0",
6666
"@types/node": "^20.10.6",
67-
"eslint-config-turbo": "^1.10.12",
68-
"eslint-config-prettier": "^9.0.0",
67+
"@typescript-eslint/eslint-plugin": "^6.11.0",
68+
"@typescript-eslint/parser": "^6.11.0",
6969
"eslint-config": "^0.3.0",
70-
"eslint-plugin-prettier": "^5.1.2",
70+
"eslint-config-prettier": "^9.0.0",
71+
"eslint-config-turbo": "^1.10.12",
7172
"eslint-import-resolver-typescript": "^3.5.5",
7273
"eslint-plugin-import": "^2.27.5",
7374
"eslint-plugin-only-warn": "^1.1.0",
74-
"@typescript-eslint/parser": "^6.11.0",
75-
"@typescript-eslint/eslint-plugin": "^6.11.0",
75+
"eslint-plugin-prettier": "^5.1.2",
7676
"husky": "^8.0.3",
77+
"llm-polyglot": "^0.0.3",
7778
"prettier": "latest",
78-
"tsup": "^8.0.1",
7979
"ts-inference-check": "^0.3.0",
80+
"tsup": "^8.0.1",
8081
"typescript": "^5.2.2"
8182
}
8283
}

src/constants/providers.ts

+11-4
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ export const PROVIDERS = {
77
OAI: "OAI",
88
ANYSCALE: "ANYSCALE",
99
TOGETHER: "TOGETHER",
10+
ANTHROPIC: "ANTHROPIC",
1011
OTHER: "OTHER"
1112
} as const
1213

@@ -15,16 +16,18 @@ export type Provider = keyof typeof PROVIDERS
1516
export const PROVIDER_SUPPORTED_MODES: {
1617
[key in Provider]: Mode[]
1718
} = {
18-
[PROVIDERS.OTHER]: [MODE.FUNCTIONS, MODE.TOOLS, MODE.JSON, MODE.JSON_SCHEMA],
19+
[PROVIDERS.OTHER]: [MODE.FUNCTIONS, MODE.TOOLS, MODE.JSON, MODE.JSON_SCHEMA, MODE.MD_JSON],
1920
[PROVIDERS.OAI]: [MODE.FUNCTIONS, MODE.TOOLS, MODE.JSON, MODE.MD_JSON],
20-
[PROVIDERS.ANYSCALE]: [MODE.TOOLS, MODE.JSON, MODE.JSON_SCHEMA],
21-
[PROVIDERS.TOGETHER]: [MODE.TOOLS, MODE.JSON, MODE.JSON_SCHEMA]
21+
[PROVIDERS.ANYSCALE]: [MODE.TOOLS, MODE.JSON, MODE.JSON_SCHEMA, MODE.MD_JSON],
22+
[PROVIDERS.TOGETHER]: [MODE.TOOLS, MODE.JSON, MODE.JSON_SCHEMA, MODE.MD_JSON],
23+
[PROVIDERS.ANTHROPIC]: [MODE.MD_JSON, MODE.TOOLS]
2224
} as const
2325

2426
export const NON_OAI_PROVIDER_URLS = {
2527
[PROVIDERS.ANYSCALE]: "api.endpoints.anyscale",
2628
[PROVIDERS.TOGETHER]: "api.together.xyz",
27-
[PROVIDERS.OAI]: "api.openai.com"
29+
[PROVIDERS.OAI]: "api.openai.com",
30+
[PROVIDERS.ANTHROPIC]: "api.anthropic.com"
2831
} as const
2932

3033
export const PROVIDER_PARAMS_TRANSFORMERS = {
@@ -110,5 +113,9 @@ export const PROVIDER_SUPPORTED_MODES_BY_MODEL = {
110113
"mistralai/Mixtral-8x7B-Instruct-v0.1"
111114
],
112115
[MODE.TOOLS]: ["mistralai/Mistral-7B-Instruct-v0.1", "mistralai/Mixtral-8x7B-Instruct-v0.1"]
116+
},
117+
[PROVIDERS.ANTHROPIC]: {
118+
[MODE.MD_JSON]: ["*"],
119+
[MODE.TOOLS]: ["*"]
113120
}
114121
}

src/dsl/validator.ts

+7-3
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
1-
import { OAIClientExtended } from "@/instructor"
1+
import { InstructorClient } from "@/instructor"
22
import OpenAI from "openai"
33
import { RefinementCtx, z } from "zod"
44

55
type AsyncSuperRefineFunction = (data: string, ctx: RefinementCtx) => Promise<void>
66

77
export const LLMValidator = (
8-
instructor: OAIClientExtended,
8+
instructor: InstructorClient,
99
statement: string,
1010
params: Omit<OpenAI.ChatCompletionCreateParams, "messages">
1111
): AsyncSuperRefineFunction => {
@@ -42,9 +42,13 @@ export const LLMValidator = (
4242
}
4343
}
4444

45-
export const moderationValidator = (client: OAIClientExtended | OpenAI) => {
45+
export const moderationValidator = (client: InstructorClient) => {
4646
return async (value: string, ctx: z.RefinementCtx) => {
4747
try {
48+
if (!(client instanceof OpenAI)) {
49+
throw new Error("ModerationValidator only supports OpenAI clients")
50+
}
51+
4852
const response = await client.moderations.create({ input: value })
4953
const flaggedResults = response.results.filter(result => result.flagged)
5054

src/index.ts

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
1-
import Instructor, { OAIClientExtended } from "./instructor"
1+
import Instructor, { InstructorClient } from "./instructor"
22

3-
export { type OAIClientExtended }
3+
export { type InstructorClient }
44
export * from "./types"
55

66
export default Instructor

src/instructor.ts

+37-23
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,11 @@
11
import {
22
ChatCompletionCreateParamsWithModel,
3+
GenericChatCompletion,
34
InstructorConfig,
45
LogLevel,
5-
ReturnTypeBasedOnParams
6+
OpenAILikeClient,
7+
ReturnTypeBasedOnParams,
8+
SupportedInstructorClient
69
} from "@/types"
710
import OpenAI from "openai"
811
import { z } from "zod"
@@ -17,22 +20,22 @@ import {
1720
PROVIDER_SUPPORTED_MODES_BY_MODEL,
1821
PROVIDERS
1922
} from "./constants/providers"
20-
import { CompletionMeta } from "./types"
23+
import { ClientTypeChatCompletionParams, CompletionMeta } from "./types"
2124

2225
const MAX_RETRIES_DEFAULT = 0
2326

24-
class Instructor {
25-
readonly client: OpenAI
27+
class Instructor<C extends SupportedInstructorClient> {
28+
readonly client: OpenAILikeClient<C>
2629
readonly mode: Mode
2730
readonly provider: Provider
2831
readonly debug: boolean = false
2932

3033
/**
3134
* Creates an instance of the `Instructor` class.
32-
* @param {OpenAI} client - The OpenAI client.
35+
* @param {OpenAILikeClient} client - An OpenAI-like client.
3336
* @param {string} mode - The mode of operation.
3437
*/
35-
constructor({ client, mode, debug = false }: InstructorConfig) {
38+
constructor({ client, mode, debug = false }: InstructorConfig<C>) {
3639
this.client = client
3740
this.mode = mode
3841
this.debug = debug
@@ -41,6 +44,7 @@ class Instructor {
4144
this.client?.baseURL.includes(NON_OAI_PROVIDER_URLS.ANYSCALE) ? PROVIDERS.ANYSCALE
4245
: this.client?.baseURL.includes(NON_OAI_PROVIDER_URLS.TOGETHER) ? PROVIDERS.TOGETHER
4346
: this.client?.baseURL.includes(NON_OAI_PROVIDER_URLS.OAI) ? PROVIDERS.OAI
47+
: this.client?.baseURL.includes(NON_OAI_PROVIDER_URLS.ANTHROPIC) ? PROVIDERS.ANTHROPIC
4448
: PROVIDERS.OTHER
4549

4650
this.provider = provider
@@ -137,10 +141,12 @@ class Instructor {
137141
}
138142
}
139143

140-
let completion: OpenAI.Chat.Completions.ChatCompletion | null = null
144+
let completion: GenericChatCompletion | null = null
141145

142146
try {
143-
completion = await this.client.chat.completions.create(resolvedParams)
147+
completion = (await this.client.chat.completions.create(
148+
resolvedParams
149+
)) as GenericChatCompletion
144150
this.log("debug", "raw standard completion response: ", completion)
145151
} catch (error) {
146152
this.log(
@@ -258,7 +264,8 @@ class Instructor {
258264
this.log("debug", "raw stream completion response: ", completion)
259265

260266
return OAIStream({
261-
res: completion
267+
//TODO: we need to move away from strict openai types - need to cast here but should update to be more flexible
268+
res: completion as AsyncIterable<OpenAI.ChatCompletionChunk>
262269
})
263270
},
264271
response_model
@@ -282,41 +289,46 @@ class Instructor {
282289
create: async <
283290
T extends z.AnyZodObject,
284291
P extends T extends z.AnyZodObject ? ChatCompletionCreateParamsWithModel<T>
285-
: OpenAI.ChatCompletionCreateParams & { response_model: never }
292+
: ClientTypeChatCompletionParams<typeof this.client> & { response_model: never }
286293
>(
287294
params: P
288-
): Promise<ReturnTypeBasedOnParams<P>> => {
295+
): Promise<ReturnTypeBasedOnParams<typeof this.client, P>> => {
289296
this.validateModelModeSupport(params)
290297

291298
if (this.isChatCompletionCreateParamsWithModel(params)) {
292299
if (params.stream) {
293300
return this.chatCompletionStream(params) as ReturnTypeBasedOnParams<
301+
typeof this.client,
294302
P & { stream: true }
295303
>
296304
} else {
297-
return this.chatCompletionStandard(params) as ReturnTypeBasedOnParams<P>
305+
return this.chatCompletionStandard(params) as ReturnTypeBasedOnParams<
306+
typeof this.client,
307+
P
308+
>
298309
}
299310
} else {
300-
const result: OpenAI.Chat.Completions.ChatCompletion =
311+
const result =
301312
this.isStandardStream(params) ?
302313
await this.client.chat.completions.create(params)
303314
: await this.client.chat.completions.create(params)
304315

305-
return result as ReturnTypeBasedOnParams<P>
316+
return result as ReturnTypeBasedOnParams<typeof this.client, P>
306317
}
307318
}
308319
}
309320
}
310321
}
311322

312-
export type OAIClientExtended = OpenAI & Instructor
323+
export type InstructorClient<C extends SupportedInstructorClient = OpenAI> = Instructor<C> &
324+
OpenAILikeClient<C>
313325

314326
/**
315327
* Creates an instance of the `Instructor` class.
316-
* @param {OpenAI} client - The OpenAI client.
328+
* @param {OpenAILikeClient} client - The OpenAI client.
317329
* @param {string} mode - The mode of operation.
318330
* @param {boolean} debug - Whether to log debug messages.
319-
* @returns {OAIClientExtended} The extended OpenAI client.
331+
* @returns {InstructorClient} The extended OpenAI client.
320332
*
321333
* @example
322334
* import createInstructor from "@instructor-ai/instructor"
@@ -326,24 +338,26 @@ export type OAIClientExtended = OpenAI & Instructor
326338
*
327339
* const client = createInstructor({
328340
* client: OAI,
329-
* mode: "TOOLS",
341+
* mode: "TOOLS",
330342
* })
331343
*
332344
* @param args
333345
* @returns
334346
*/
335-
export default function (args: { client: OpenAI; mode: Mode; debug?: boolean }): OAIClientExtended {
336-
const instructor = new Instructor(args)
337-
347+
export default function <C extends SupportedInstructorClient = OpenAI>(args: {
348+
client: OpenAILikeClient<C>
349+
mode: Mode
350+
debug?: boolean
351+
}): InstructorClient<C> {
352+
const instructor = new Instructor<C>(args)
338353
const instructorWithProxy = new Proxy(instructor, {
339354
get: (target, prop, receiver) => {
340355
if (prop in target) {
341356
return Reflect.get(target, prop, receiver)
342357
}
343-
344358
return Reflect.get(target.client, prop, receiver)
345359
}
346360
})
347361

348-
return instructorWithProxy as OAIClientExtended
362+
return instructorWithProxy as InstructorClient<C>
349363
}

0 commit comments

Comments
 (0)