Skip to content

Commit 4877181

Browse files
feat(api): api update
1 parent 52260c1 commit 4877181

File tree

8 files changed

+221
-250
lines changed

8 files changed

+221
-250
lines changed

.stats.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
configured_endpoints: 26
2-
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/anthropic%2Fanthropic-a7b84017aa1126ad99443296dcd81ab2b53f1c346014b92096226cf993f30502.yml
3-
openapi_spec_hash: 58d4e72c7906bd8a680ab17b99de6215
4-
config_hash: 0d82158a1e3c18a76a63f7424d293ec6
2+
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/anthropic%2Fanthropic-6c14b7aee4574fdd638512009b5cf88a7288800876a396e9ae83248e80e677ac.yml
3+
openapi_spec_hash: 5673a8c858d7b7a5ab3b8d5965c6ad1b
4+
config_hash: a91c4b284696ff571e6ccfa6d7a1a097

MIGRATION.md

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -67,20 +67,20 @@ client.example.list(undefined, { headers: { ... } });
6767

6868
This affects the following methods:
6969

70-
- `client.messages.batches.list()`
71-
- `client.models.retrieve()`
72-
- `client.models.list()`
73-
- `client.beta.models.retrieve()`
74-
- `client.beta.models.list()`
70+
- `client.beta.files.list()`
71+
- `client.beta.files.delete()`
72+
- `client.beta.files.download()`
73+
- `client.beta.files.retrieveMetadata()`
7574
- `client.beta.messages.batches.retrieve()`
7675
- `client.beta.messages.batches.list()`
7776
- `client.beta.messages.batches.delete()`
7877
- `client.beta.messages.batches.cancel()`
7978
- `client.beta.messages.batches.results()`
80-
- `client.beta.files.list()`
81-
- `client.beta.files.delete()`
82-
- `client.beta.files.download()`
83-
- `client.beta.files.retrieveMetadata()`
79+
- `client.beta.models.retrieve()`
80+
- `client.beta.models.list()`
81+
- `client.messages.batches.list()`
82+
- `client.models.retrieve()`
83+
- `client.models.list()`
8484

8585
### Removed `httpAgent` in favor of `fetchOptions`
8686

@@ -144,16 +144,16 @@ If you were relying on anything that was only exported from `@anthropic-ai/sdk/c
144144

145145
#### Resource classes
146146

147-
Previously under certain circumstances it was possible to import resource classes like `Completions` directly from the root of the package. This was never valid at the type level and only worked in CommonJS files.
147+
Previously under certain circumstances it was possible to import resource classes like `Beta` directly from the root of the package. This was never valid at the type level and only worked in CommonJS files.
148148
Now you must always either reference them as static class properties or import them directly from the files in which they are defined.
149149

150150
```typescript
151151
// Before
152-
const { Completions } = require('@anthropic-ai/sdk');
152+
const { Beta } = require('@anthropic-ai/sdk');
153153

154154
// After
155155
const { Anthropic } = require('@anthropic-ai/sdk');
156-
Anthropic.Completions; // or import directly from @anthropic-ai/sdk/resources/completions
156+
Anthropic.Beta; // or import directly from @anthropic-ai/sdk/resources/beta/beta
157157
```
158158

159159
#### Cleaned up `uploads` exports

README.md

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ const client = new Anthropic({
2626

2727
const message = await client.messages.create({
2828
max_tokens: 1024,
29-
messages: [{ role: 'user', content: 'Hello, Claude' }],
29+
messages: [{ content: 'Hello, Claude', role: 'user' }],
3030
model: 'claude-3-5-sonnet-latest',
3131
});
3232

@@ -44,7 +44,7 @@ const client = new Anthropic();
4444

4545
const stream = await client.messages.create({
4646
max_tokens: 1024,
47-
messages: [{ role: 'user', content: 'Hello, Claude' }],
47+
messages: [{ content: 'Hello, Claude', role: 'user' }],
4848
model: 'claude-3-5-sonnet-latest',
4949
stream: true,
5050
});
@@ -70,7 +70,7 @@ const client = new Anthropic({
7070

7171
const params: Anthropic.MessageCreateParams = {
7272
max_tokens: 1024,
73-
messages: [{ role: 'user', content: 'Hello, Claude' }],
73+
messages: [{ content: 'Hello, Claude', role: 'user' }],
7474
model: 'claude-3-5-sonnet-latest',
7575
};
7676
const message: Anthropic.Message = await client.messages.create(params);
@@ -232,7 +232,7 @@ a subclass of `APIError` will be thrown:
232232
const message = await client.messages
233233
.create({
234234
max_tokens: 1024,
235-
messages: [{ role: 'user', content: 'Hello, Claude' }],
235+
messages: [{ content: 'Hello, Claude', role: 'user' }],
236236
model: 'claude-3-5-sonnet-latest',
237237
})
238238
.catch(async (err) => {
@@ -290,7 +290,7 @@ const client = new Anthropic({
290290
});
291291

292292
// Or, configure per-request:
293-
await client.messages.create({ max_tokens: 1024, messages: [{ role: 'user', content: 'Hello, Claude' }], model: 'claude-3-5-sonnet-latest' }, {
293+
await client.messages.create({ max_tokens: 1024, messages: [{ content: 'Hello, Claude', role: 'user' }], model: 'claude-3-5-sonnet-latest' }, {
294294
maxRetries: 5,
295295
});
296296
```
@@ -318,7 +318,7 @@ const client = new Anthropic({
318318
});
319319

320320
// Override per-request:
321-
await client.messages.create({ max_tokens: 1024, messages: [{ role: 'user', content: 'Hello, Claude' }], model: 'claude-3-5-sonnet-latest' }, {
321+
await client.messages.create({ max_tokens: 1024, messages: [{ content: 'Hello, Claude', role: 'user' }], model: 'claude-3-5-sonnet-latest' }, {
322322
timeout: 5 * 1000,
323323
});
324324
```
@@ -393,7 +393,7 @@ const client = new Anthropic();
393393
const message = await client.messages.create(
394394
{
395395
max_tokens: 1024,
396-
messages: [{ role: 'user', content: 'Hello, Claude' }],
396+
messages: [{ content: 'Hello, Claude', role: 'user' }],
397397
model: 'claude-3-5-sonnet-latest',
398398
},
399399
{ headers: { 'anthropic-version': 'My-Custom-Value' } },
@@ -417,7 +417,7 @@ const client = new Anthropic();
417417
const response = await client.messages
418418
.create({
419419
max_tokens: 1024,
420-
messages: [{ role: 'user', content: 'Hello, Claude' }],
420+
messages: [{ content: 'Hello, Claude', role: 'user' }],
421421
model: 'claude-3-5-sonnet-latest',
422422
})
423423
.asResponse();
@@ -427,7 +427,7 @@ console.log(response.statusText); // access the underlying Response object
427427
const { data: message, response: raw } = await client.messages
428428
.create({
429429
max_tokens: 1024,
430-
messages: [{ role: 'user', content: 'Hello, Claude' }],
430+
messages: [{ content: 'Hello, Claude', role: 'user' }],
431431
model: 'claude-3-5-sonnet-latest',
432432
})
433433
.withResponse();

0 commit comments

Comments
 (0)