@@ -12,6 +12,230 @@ import {
12
12
OpenAICoreRequestOptions ,
13
13
} from "../types.js" ;
14
14
15
+ /**
16
+ * Azure OpenAI chat model integration.
17
+ *
18
+ * Setup:
19
+ * Install `@langchain/openai` and set the following environment variables:
20
+ *
21
+ * ```bash
22
+ * npm install @langchain/openai
23
+ * export AZURE_OPENAI_API_KEY="your-api-key"
24
+ * export AZURE_OPENAI_API_DEPLOYMENT_NAME="your-deployment-name"
25
+ * export AZURE_OPENAI_API_VERSION="your-version"
26
+ * export AZURE_OPENAI_BASE_PATH="your-base-path"
27
+ * ```
28
+ *
29
+ * ## Key args
30
+ *
31
+ * ### [Init args](/classes/langchain_openai.AzureChatOpenAI.html#constructor)
32
+ *
33
+ * ### [Runtime args](/interfaces/langchain_openai.ChatOpenAICallOptions.html)
34
+ *
35
+ * > See full list of supported init args and their descriptions in the [`constructor`](/classes/langchain_openai.AzureChatOpenAI.html#constructor) section.
36
+ *
37
+ * ## Examples
38
+ *
39
+ * <details open>
40
+ * <summary><strong>Instantiate</strong></summary>
41
+ *
42
+ * ```typescript
43
+ * import { AzureChatOpenAI } from '@langchain/openai';
44
+ *
45
+ * const llm = new AzureChatOpenAI({
46
+ * azureOpenAIApiKey: process.env.AZURE_OPENAI_API_KEY, // In Node.js defaults to process.env.AZURE_OPENAI_API_KEY
47
+ * azureOpenAIApiInstanceName: process.env.AZURE_OPENAI_API_INSTANCE_NAME, // In Node.js defaults to process.env.AZURE_OPENAI_API_INSTANCE_NAME
48
+ * azureOpenAIApiDeploymentName: process.env.AZURE_OPENAI_API_DEPLOYMENT_NAME, // In Node.js defaults to process.env.AZURE_OPENAI_API_DEPLOYMENT_NAME
49
+ * azureOpenAIApiVersion: process.env.AZURE_OPENAI_API_VERSION, // In Node.js defaults to process.env.AZURE_OPENAI_API_VERSION
50
+ * temperature: 0,
51
+ * maxTokens: undefined,
52
+ * timeout: undefined,
53
+ * maxRetries: 2,
54
+ * // apiKey: "...",
55
+ * // baseUrl: "...",
56
+ * // other params...
57
+ * });
58
+ * ```
59
+ * </details>
60
+ *
61
+ * <br />
62
+ *
63
+ * <details>
64
+ * <summary><strong>Invoking</strong></summary>
65
+ *
66
+ * ```typescript
67
+ * const messages = [
68
+ * {
69
+ * type: "system" as const,
70
+ * content: "You are a helpful translator. Translate the user sentence to French.",
71
+ * },
72
+ * {
73
+ * type: "human" as const,
74
+ * content: "I love programming.",
75
+ * },
76
+ * ];
77
+ * const result = await llm.invoke(messages);
78
+ * console.log(result);
79
+ * ```
80
+ * </details>
81
+ *
82
+ * <br />
83
+ *
84
+ * <details>
85
+ * <summary><strong>Streaming Chunks</strong></summary>
86
+ *
87
+ * ```typescript
88
+ * for await (const chunk of await llm.stream(messages)) {
89
+ * console.log(chunk);
90
+ * }
91
+ * ```
92
+ * </details>
93
+ *
94
+ * <br />
95
+ *
96
+ * <details>
97
+ * <summary><strong>Aggregate Streamed Chunks</strong></summary>
98
+ *
99
+ * ```typescript
100
+ * import { AIMessageChunk } from '@langchain/core/messages';
101
+ * import { concat } from '@langchain/core/utils/stream';
102
+ *
103
+ * const stream = await llm.stream(messages);
104
+ * let full: AIMessageChunk | undefined;
105
+ * for await (const chunk of stream) {
106
+ * full = !full ? chunk : concat(full, chunk);
107
+ * }
108
+ * console.log(full);
109
+ * ```
110
+ * </details>
111
+ *
112
+ * <br />
113
+ *
114
+ * <details>
115
+ * <summary><strong>Bind tools</strong></summary>
116
+ *
117
+ * ```typescript
118
+ * import { z } from 'zod';
119
+ *
120
+ * const GetWeather = {
121
+ * name: "GetWeather",
122
+ * description: "Get the current weather in a given location",
123
+ * schema: z.object({
124
+ * location: z.string().describe("The city and state, e.g. San Francisco, CA")
125
+ * }),
126
+ * }
127
+ *
128
+ * const GetPopulation = {
129
+ * name: "GetPopulation",
130
+ * description: "Get the current population in a given location",
131
+ * schema: z.object({
132
+ * location: z.string().describe("The city and state, e.g. San Francisco, CA")
133
+ * }),
134
+ * }
135
+ *
136
+ * const llmWithTools = llm.bindTools([GetWeather, GetPopulation]);
137
+ * const aiMsg = await llmWithTools.invoke(
138
+ * "Which city is hotter today and which is bigger: LA or NY?"
139
+ * );
140
+ * console.log(aiMsg.tool_calls);
141
+ * ```
142
+ * </details>
143
+ *
144
+ * <br />
145
+ *
146
+ * <details>
147
+ * <summary><strong>`.withStructuredOutput`</strong></summary>
148
+ *
149
+ * ```typescript
150
+ * import { z } from 'zod';
151
+ *
152
+ * const Joke = z.object({
153
+ * setup: z.string().describe("The setup of the joke"),
154
+ * punchline: z.string().describe("The punchline to the joke"),
155
+ * rating: z.number().optional().describe("How funny the joke is, from 1 to 10")
156
+ * }).describe('Joke to tell user.');
157
+ *
158
+ * const structuredLlm = llm.withStructuredOutput(Joke);
159
+ * const jokeResult = await structuredLlm.invoke("Tell me a joke about cats");
160
+ * console.log(jokeResult);
161
+ * ```
162
+ * </details>
163
+ *
164
+ * <br />
165
+ *
166
+ * <details>
167
+ * <summary><strong>JSON Object Response Format</strong></summary>
168
+ *
169
+ * ```typescript
170
+ * const jsonLlm = llm.bind({ response_format: { type: "json_object" } });
171
+ * const jsonLlmAiMsg = await jsonLlm.invoke(
172
+ * "Return a JSON object with key 'randomInts' and a value of 10 random ints in [0-99]"
173
+ * );
174
+ * console.log(jsonLlmAiMsg.content);
175
+ * ```
176
+ * </details>
177
+ *
178
+ * <br />
179
+ *
180
+ * <details>
181
+ * <summary><strong>Multimodal</strong></summary>
182
+ *
183
+ * ```typescript
184
+ * import { HumanMessage } from '@langchain/core/messages';
185
+ *
186
+ * const imageUrl = "https://example.com/image.jpg";
187
+ * const imageData = await fetch(imageUrl).then(res => res.arrayBuffer());
188
+ * const base64Image = Buffer.from(imageData).toString('base64');
189
+ *
190
+ * const message = new HumanMessage({
191
+ * content: [
192
+ * { type: "text", text: "describe the weather in this image" },
193
+ * {
194
+ * type: "image_url",
195
+ * image_url: { url: `data:image/jpeg;base64,${base64Image}` },
196
+ * },
197
+ * ]
198
+ * });
199
+ *
200
+ * const imageDescriptionAiMsg = await llm.invoke([message]);
201
+ * console.log(imageDescriptionAiMsg.content);
202
+ * ```
203
+ * </details>
204
+ *
205
+ * <br />
206
+ *
207
+ * <details>
208
+ * <summary><strong>Usage Metadata</strong></summary>
209
+ *
210
+ * ```typescript
211
+ * const aiMsgForMetadata = await llm.invoke(messages);
212
+ * console.log(aiMsgForMetadata.usage_metadata);
213
+ * ```
214
+ * </details>
215
+ *
216
+ * <br />
217
+ *
218
+ * <details>
219
+ * <summary><strong>Logprobs</strong></summary>
220
+ *
221
+ * ```typescript
222
+ * const logprobsLlm = new ChatOpenAI({ logprobs: true });
223
+ * const aiMsgForLogprobs = await logprobsLlm.invoke(messages);
224
+ * console.log(aiMsgForLogprobs.response_metadata.logprobs);
225
+ * ```
226
+ * </details>
227
+ *
228
+ * <br />
229
+ *
230
+ * <details>
231
+ * <summary><strong>Response Metadata</strong></summary>
232
+ *
233
+ * ```typescript
234
+ * const aiMsgForResponseMetadata = await llm.invoke(messages);
235
+ * console.log(aiMsgForResponseMetadata.response_metadata);
236
+ * ```
237
+ * </details>
238
+ */
15
239
export class AzureChatOpenAI extends ChatOpenAI {
16
240
_llmType ( ) : string {
17
241
return "azure_openai" ;
0 commit comments