Skip to content

Commit 57b7135

Browse files
committed
feat(api)!: messages is generally available (#287)
This is a breaking change as we've removed the `beta` namespace from the messages API. To migrate you'll just need to remove all `.beta` references, everything else is the same!
1 parent 19a1451 commit 57b7135

File tree

16 files changed

+121
-164
lines changed

16 files changed

+121
-164
lines changed

README.md

Lines changed: 48 additions & 62 deletions
Original file line numberDiff line numberDiff line change
@@ -25,15 +25,17 @@ The full API of this library can be found in [api.md](api.md).
2525
import Anthropic from '@anthropic-ai/sdk';
2626

2727
const anthropic = new Anthropic({
28-
apiKey: 'my api key', // defaults to process.env["ANTHROPIC_API_KEY"]
28+
apiKey: process.env['ANTHROPIC_API_KEY'], // This is the default and can be omitted
2929
});
3030

3131
async function main() {
32-
const completion = await anthropic.completions.create({
32+
const message = await anthropic.messages.create({
33+
max_tokens: 1024,
34+
messages: [{ role: 'user', content: 'How does a court case get to the supreme court?' }],
3335
model: 'claude-2.1',
34-
max_tokens_to_sample: 300,
35-
prompt: `${Anthropic.HUMAN_PROMPT} how does a court case get to the Supreme Court?${Anthropic.AI_PROMPT}`,
3636
});
37+
38+
console.log(message.content);
3739
}
3840

3941
main();
@@ -48,14 +50,14 @@ import Anthropic from '@anthropic-ai/sdk';
4850

4951
const anthropic = new Anthropic();
5052

51-
const stream = await anthropic.completions.create({
52-
prompt: `${Anthropic.HUMAN_PROMPT} Your prompt here${Anthropic.AI_PROMPT}`,
53+
const stream = await anthropic.messages.create({
54+
max_tokens: 1024,
55+
messages: [{ role: 'user', content: 'your prompt here' }],
5356
model: 'claude-2.1',
5457
stream: true,
55-
max_tokens_to_sample: 300,
5658
});
57-
for await (const completion of stream) {
58-
console.log(completion.completion);
59+
for await (const messageStreamEvent of stream) {
60+
console.log(messageStreamEvent.type);
5961
}
6062
```
6163

@@ -71,16 +73,16 @@ This library includes TypeScript definitions for all request params and response
7173
import Anthropic from '@anthropic-ai/sdk';
7274

7375
const anthropic = new Anthropic({
74-
apiKey: 'my api key', // defaults to process.env["ANTHROPIC_API_KEY"]
76+
apiKey: process.env['ANTHROPIC_API_KEY'], // This is the default and can be omitted
7577
});
7678

7779
async function main() {
78-
const params: Anthropic.CompletionCreateParams = {
79-
prompt: `${Anthropic.HUMAN_PROMPT} how does a court case get to the Supreme Court?${Anthropic.AI_PROMPT}`,
80-
max_tokens_to_sample: 300,
80+
const params: Anthropic.MessageCreateParams = {
81+
max_tokens: 1024,
82+
messages: [{ role: 'user', content: 'Where can I get a good coffee in my neighbourhood?' }],
8183
model: 'claude-2.1',
8284
};
83-
const completion: Anthropic.Completion = await anthropic.completions.create(params);
85+
const message: Anthropic.Message = await anthropic.messages.create(params);
8486
}
8587

8688
main();
@@ -104,7 +106,7 @@ import Anthropic from '@anthropic-ai/sdk';
104106
const anthropic = new Anthropic();
105107

106108
async function main() {
107-
const stream = anthropic.beta.messages
109+
const stream = anthropic.messages
108110
.stream({
109111
model: 'claude-2.1',
110112
max_tokens: 1024,
@@ -126,9 +128,9 @@ async function main() {
126128
main();
127129
```
128130

129-
Streaming with `client.beta.messages.stream(...)` exposes [various helpers for your convenience](helpers.md) including event handlers and accumulation.
131+
Streaming with `client.messages.stream(...)` exposes [various helpers for your convenience](helpers.md) including event handlers and accumulation.
130132

131-
Alternatively, you can use `client.beta.messages.create({ ..., stream: true })` which only returns an async iterable of the events in the stream and thus uses less memory (it does not build up a final message object for you).
133+
Alternatively, you can use `client.messages.create({ ..., stream: true })` which only returns an async iterable of the events in the stream and thus uses less memory (it does not build up a final message object for you).
132134

133135
## Handling errors
134136

@@ -139,10 +141,10 @@ a subclass of `APIError` will be thrown:
139141
<!-- prettier-ignore -->
140142
```ts
141143
async function main() {
142-
const completion = await anthropic.completions
144+
const message = await anthropic.messages
143145
.create({
144-
prompt: `${Anthropic.HUMAN_PROMPT} Your prompt here${Anthropic.AI_PROMPT}`,
145-
max_tokens_to_sample: 300,
146+
max_tokens: 1024,
147+
messages: [{ role: 'user', content: 'your prompt here' }],
146148
model: 'claude-2.1',
147149
})
148150
.catch((err) => {
@@ -188,16 +190,9 @@ const anthropic = new Anthropic({
188190
});
189191

190192
// Or, configure per-request:
191-
await anthropic.completions.create(
192-
{
193-
prompt: `${Anthropic.HUMAN_PROMPT} Can you help me effectively ask for a raise at work?${Anthropic.AI_PROMPT}`,
194-
max_tokens_to_sample: 300,
195-
model: 'claude-2.1',
196-
},
197-
{
198-
maxRetries: 5,
199-
},
200-
);
193+
await anthropic.messages.create({ max_tokens: 1024, messages: [{ role: 'user', content: 'Can you help me effectively ask for a raise at work?' }], model: 'claude-2.1' }, {
194+
maxRetries: 5,
195+
});
201196
```
202197

203198
### Timeouts
@@ -212,16 +207,9 @@ const anthropic = new Anthropic({
212207
});
213208

214209
// Override per-request:
215-
await anthropic.completions.create(
216-
{
217-
prompt: `${Anthropic.HUMAN_PROMPT} Where can I get a good coffee in my neighbourhood?${Anthropic.AI_PROMPT}`,
218-
max_tokens_to_sample: 300,
219-
model: 'claude-2.1',
220-
},
221-
{
222-
timeout: 5 * 1000,
223-
},
224-
);
210+
await anthropic.messages.create({ max_tokens: 1024, messages: [{ role: 'user', content: 'Where can I get a good coffee in my neighbourhood?' }], model: 'claude-2.1' }, {
211+
timeout: 5 * 1000,
212+
});
225213
```
226214

227215
On timeout, an `APIConnectionTimeoutError` is thrown.
@@ -241,11 +229,11 @@ import Anthropic from '@anthropic-ai/sdk';
241229

242230
const anthropic = new Anthropic();
243231

244-
const completion = await anthropic.completions.create(
232+
const message = await anthropic.messages.create(
245233
{
246-
max_tokens_to_sample: 300,
234+
max_tokens: 1024,
235+
messages: [{ role: 'user', content: 'Where can I get a good coffee in my neighbourhood?' }],
247236
model: 'claude-2.1',
248-
prompt: `${Anthropic.HUMAN_PROMPT} Where can I get a good coffee in my neighbourhood?${Anthropic.AI_PROMPT}`,
249237
},
250238
{ headers: { 'anthropic-version': 'My-Custom-Value' } },
251239
);
@@ -263,19 +251,25 @@ You can also use the `.withResponse()` method to get the raw `Response` along wi
263251
```ts
264252
const anthropic = new Anthropic();
265253

266-
const response = await anthropic.completions
254+
const response = await anthropic.messages
267255
.create({
268-
prompt: `${Anthropic.HUMAN_PROMPT} Can you help me effectively ask for a raise at work?${Anthropic.AI_PROMPT}`,
269-
max_tokens_to_sample: 300,
256+
max_tokens: 1024,
257+
messages: [{ role: 'user', content: 'Where can I get a good coffee in my neighbourhood?' }],
270258
model: 'claude-2.1',
271259
})
272260
.asResponse();
273261
console.log(response.headers.get('X-My-Header'));
274-
console.log(response.raw.statusText); // access the underlying Response object
262+
console.log(response.statusText); // access the underlying Response object
275263

276-
// parses the response body, returning an object if the API responds with JSON
277-
const completion: Completions.Completion = await response.parse();
278-
console.log(completion.completion);
264+
const { data: message, response: raw } = await anthropic.messages
265+
.create({
266+
max_tokens: 1024,
267+
messages: [{ role: 'user', content: 'Where can I get a good coffee in my neighbourhood?' }],
268+
model: 'claude-2.1',
269+
})
270+
.withResponse();
271+
console.log(raw.headers.get('X-My-Header'));
272+
console.log(message.content);
279273
```
280274

281275
## Customizing the fetch client
@@ -325,7 +319,6 @@ If you would like to disable or customize this behavior, for example to use the
325319
<!-- prettier-ignore -->
326320
```ts
327321
import http from 'http';
328-
import Anthropic from '@anthropic-ai/sdk';
329322
import HttpsProxyAgent from 'https-proxy-agent';
330323

331324
// Configure the default for all requests:
@@ -334,17 +327,10 @@ const anthropic = new Anthropic({
334327
});
335328

336329
// Override per-request:
337-
await anthropic.completions.create(
338-
{
339-
prompt: `${Anthropic.HUMAN_PROMPT} How does a court case get to the Supreme Court?${Anthropic.AI_PROMPT}`,
340-
max_tokens_to_sample: 300,
341-
model: 'claude-2.1',
342-
},
343-
{
344-
baseURL: 'http://localhost:8080/test-api',
345-
httpAgent: new http.Agent({ keepAlive: false }),
346-
},
347-
);
330+
await anthropic.messages.create({ max_tokens: 1024, messages: [{ role: 'user', content: 'Where can I get a good coffee in my neighbourhood?' }], model: 'claude-2.1' }, {
331+
baseURL: 'http://localhost:8080/test-api',
332+
httpAgent: new http.Agent({ keepAlive: false }),
333+
})
348334
```
349335

350336
## Semantic Versioning

api.md

Lines changed: 17 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -10,28 +10,26 @@ Methods:
1010

1111
- <code title="post /v1/complete">client.completions.<a href="./src/resources/completions.ts">create</a>({ ...params }) -> Completion</code>
1212

13-
# Beta
14-
15-
## Messages
13+
# Messages
1614

1715
Types:
1816

19-
- <code><a href="./src/resources/beta/messages.ts">ContentBlock</a></code>
20-
- <code><a href="./src/resources/beta/messages.ts">ContentBlockDeltaEvent</a></code>
21-
- <code><a href="./src/resources/beta/messages.ts">ContentBlockStartEvent</a></code>
22-
- <code><a href="./src/resources/beta/messages.ts">ContentBlockStopEvent</a></code>
23-
- <code><a href="./src/resources/beta/messages.ts">Message</a></code>
24-
- <code><a href="./src/resources/beta/messages.ts">MessageDeltaEvent</a></code>
25-
- <code><a href="./src/resources/beta/messages.ts">MessageDeltaUsage</a></code>
26-
- <code><a href="./src/resources/beta/messages.ts">MessageParam</a></code>
27-
- <code><a href="./src/resources/beta/messages.ts">MessageStartEvent</a></code>
28-
- <code><a href="./src/resources/beta/messages.ts">MessageStopEvent</a></code>
29-
- <code><a href="./src/resources/beta/messages.ts">MessageStreamEvent</a></code>
30-
- <code><a href="./src/resources/beta/messages.ts">TextBlock</a></code>
31-
- <code><a href="./src/resources/beta/messages.ts">TextDelta</a></code>
32-
- <code><a href="./src/resources/beta/messages.ts">Usage</a></code>
17+
- <code><a href="./src/resources/messages.ts">ContentBlock</a></code>
18+
- <code><a href="./src/resources/messages.ts">ContentBlockDeltaEvent</a></code>
19+
- <code><a href="./src/resources/messages.ts">ContentBlockStartEvent</a></code>
20+
- <code><a href="./src/resources/messages.ts">ContentBlockStopEvent</a></code>
21+
- <code><a href="./src/resources/messages.ts">Message</a></code>
22+
- <code><a href="./src/resources/messages.ts">MessageDeltaEvent</a></code>
23+
- <code><a href="./src/resources/messages.ts">MessageDeltaUsage</a></code>
24+
- <code><a href="./src/resources/messages.ts">MessageParam</a></code>
25+
- <code><a href="./src/resources/messages.ts">MessageStartEvent</a></code>
26+
- <code><a href="./src/resources/messages.ts">MessageStopEvent</a></code>
27+
- <code><a href="./src/resources/messages.ts">MessageStreamEvent</a></code>
28+
- <code><a href="./src/resources/messages.ts">TextBlock</a></code>
29+
- <code><a href="./src/resources/messages.ts">TextDelta</a></code>
30+
- <code><a href="./src/resources/messages.ts">Usage</a></code>
3331

3432
Methods:
3533

36-
- <code title="post /v1/messages">client.beta.messages.<a href="./src/resources/beta/messages.ts">create</a>({ ...params }) -> Message</code>
37-
- <code>client.beta.messages.<a href="./src/resources/beta/messages.ts">stream</a>(body, options?) -> MessageStream</code>
34+
- <code title="post /v1/messages">client.messages.<a href="./src/resources/messages.ts">create</a>({ ...params }) -> Message</code>
35+
- <code>client.messages.<a href="./src/resources/messages.ts">stream</a>(body, options?) -> MessageStream</code>

examples/streaming.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ import Anthropic from '@anthropic-ai/sdk';
55
const client = new Anthropic(); // gets API Key from environment variable ANTHROPIC_API_KEY
66

77
async function main() {
8-
const stream = client.beta.messages
8+
const stream = client.messages
99
.stream({
1010
messages: [
1111
{

helpers.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,14 +3,14 @@
33
## Streaming Responses
44

55
```ts
6-
anthropic.beta.messages.stream({ … }, options?): MessageStream
6+
anthropic.messages.stream({ … }, options?): MessageStream
77
```
88
9-
`anthropic.beta.messages.stream()` returns a `MessageStream`, which emits events, has an async
9+
`anthropic.messages.stream()` returns a `MessageStream`, which emits events, has an async
1010
iterator, and exposes helper methods to accumulate stream events into a convenient shape and make it easy to reason
1111
about the conversation.
1212
13-
Alternatively, you can use `anthropic.beta.messages.create({ stream: true, … })` which returns an async
13+
Alternatively, you can use `anthropic.messages.create({ stream: true, … })` which returns an async
1414
iterable of the chunks in the stream and uses less memory (most notably, it does not accumulate a message
1515
object for you).
1616

packages/vertex-sdk/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ import { AnthropicVertex } from '@anthropic-ai/vertex-sdk';
2828
const client = new AnthropicVertex();
2929

3030
async function main() {
31-
const result = await client.beta.messages.create({
31+
const result = await client.messages.create({
3232
messages: [
3333
{
3434
role: 'user',

packages/vertex-sdk/examples/vertex.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ import { AnthropicVertex } from '@anthropic-ai/vertex-sdk';
77
const client = new AnthropicVertex();
88

99
async function main() {
10-
const result = await client.beta.messages.create({
10+
const result = await client.messages.create({
1111
messages: [
1212
{
1313
role: 'user',

packages/vertex-sdk/scripts/postprocess-dist-package-json.cjs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ const pkgJson = require('../dist/package.json');
44
for (const dep in pkgJson.dependencies) {
55
// ensure we point to NPM instead of a local directory
66
if (dep === '@anthropic-ai/sdk') {
7-
pkgJson.dependencies[dep] = '^0';
7+
pkgJson.dependencies[dep] = '^0.14';
88
}
99
}
1010

packages/vertex-sdk/src/client.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ export class AnthropicVertex extends Core.APIClient {
7070
this._authClientPromise = this._auth.getClient();
7171
}
7272

73-
beta: Resources.Beta = new Resources.Beta(this);
73+
messages: Resources.Messages = new Resources.Messages(this);
7474

7575
protected override defaultQuery(): Core.DefaultQuery | undefined {
7676
return this._options.defaultQuery;

src/index.ts

Lines changed: 20 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -121,7 +121,7 @@ export class Anthropic extends Core.APIClient {
121121
}
122122

123123
completions: API.Completions = new API.Completions(this);
124-
beta: API.Beta = new API.Beta(this);
124+
messages: API.Messages = new API.Messages(this);
125125

126126
protected override defaultQuery(): Core.DefaultQuery | undefined {
127127
return this._options.defaultQuery;
@@ -236,7 +236,25 @@ export namespace Anthropic {
236236
export import CompletionCreateParamsNonStreaming = API.CompletionCreateParamsNonStreaming;
237237
export import CompletionCreateParamsStreaming = API.CompletionCreateParamsStreaming;
238238

239-
export import Beta = API.Beta;
239+
export import Messages = API.Messages;
240+
export import ContentBlock = API.ContentBlock;
241+
export import ContentBlockDeltaEvent = API.ContentBlockDeltaEvent;
242+
export import ContentBlockStartEvent = API.ContentBlockStartEvent;
243+
export import ContentBlockStopEvent = API.ContentBlockStopEvent;
244+
export import Message = API.Message;
245+
export import MessageDeltaEvent = API.MessageDeltaEvent;
246+
export import MessageDeltaUsage = API.MessageDeltaUsage;
247+
export import MessageParam = API.MessageParam;
248+
export import MessageStartEvent = API.MessageStartEvent;
249+
export import MessageStopEvent = API.MessageStopEvent;
250+
export import MessageStreamEvent = API.MessageStreamEvent;
251+
export import TextBlock = API.TextBlock;
252+
export import TextDelta = API.TextDelta;
253+
export import Usage = API.Usage;
254+
export import MessageCreateParams = API.MessageCreateParams;
255+
export import MessageCreateParamsNonStreaming = API.MessageCreateParamsNonStreaming;
256+
export import MessageCreateParamsStreaming = API.MessageCreateParamsStreaming;
257+
export import MessageStreamParams = API.MessageStreamParams;
240258
}
241259

242260
export default Anthropic;

src/lib/MessageStream.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ import {
88
MessageParam,
99
MessageCreateParams,
1010
MessageStreamParams,
11-
} from '@anthropic-ai/sdk/resources/beta/messages';
11+
} from '@anthropic-ai/sdk/resources/messages';
1212
import { type ReadableStream } from '@anthropic-ai/sdk/_shims/index';
1313
import { Stream } from '@anthropic-ai/sdk/streaming';
1414

0 commit comments

Comments
 (0)