|
| 1 | +--- |
| 2 | +sidebar_label: DeepSeek |
| 3 | +title: ChatDeepSeek |
| 4 | +--- |
| 5 | + |
| 6 | +export const quartoRawHtml = [ |
| 7 | + ` |
| 8 | +<table style="width:100%;"> |
| 9 | +<colgroup> |
| 10 | +<col style="width: 12%" /> |
| 11 | +<col style="width: 12%" /> |
| 12 | +<col style="width: 15%" /> |
| 13 | +<col style="width: 15%" /> |
| 14 | +<col style="width: 15%" /> |
| 15 | +<col style="width: 15%" /> |
| 16 | +<col style="width: 15%" /> |
| 17 | +</colgroup> |
| 18 | +<thead> |
| 19 | +<tr> |
| 20 | +<th style="text-align: left;">Class</th> |
| 21 | +<th style="text-align: left;">Package</th> |
| 22 | +<th style="text-align: center;">Local</th> |
| 23 | +<th style="text-align: center;">Serializable</th> |
| 24 | +<th style="text-align: center;"><a href="https://python.langchain.com/docs/integrations/chat/deepseek">PY support</a></th> |
| 25 | +<th style="text-align: center;">Package downloads</th> |
| 26 | +<th style="text-align: center;">Package latest</th> |
| 27 | +</tr> |
| 28 | +</thead> |
| 29 | +<tbody> |
| 30 | +<tr> |
| 31 | +<td style="text-align: left;"><a href="https://api.js.langchain.com/classes/_langchain_deepseek.ChatDeepSeek.html"><code>ChatDeepSeek</code></a></td> |
| 32 | +<td style="text-align: left;"><a href="https://npmjs.com/@langchain/deepseek"><code>@langchain/deepseek</code></a></td> |
| 33 | +<td style="text-align: center;">❌ (see <a href="../../../docs/integrations/chat/ollama">Ollama</a>)</td> |
| 34 | +<td style="text-align: center;">beta</td> |
| 35 | +<td style="text-align: center;">✅</td> |
| 36 | +<td style="text-align: center;"><img src="https://img.shields.io/npm/dm/@langchain/deepseek?style=flat-square&label=%20&.png" alt="NPM - Downloads" /></td> |
| 37 | +<td style="text-align: center;"><img src="https://img.shields.io/npm/v/@langchain/deepseek?style=flat-square&label=%20&.png" alt="NPM - Version" /></td> |
| 38 | +</tr> |
| 39 | +</tbody> |
| 40 | +</table> |
| 41 | +`, |
| 42 | + ` |
| 43 | +<table style="width:100%;"> |
| 44 | +<colgroup> |
| 45 | +<col style="width: 11%" /> |
| 46 | +<col style="width: 11%" /> |
| 47 | +<col style="width: 11%" /> |
| 48 | +<col style="width: 11%" /> |
| 49 | +<col style="width: 11%" /> |
| 50 | +<col style="width: 11%" /> |
| 51 | +<col style="width: 11%" /> |
| 52 | +<col style="width: 11%" /> |
| 53 | +<col style="width: 11%" /> |
| 54 | +</colgroup> |
| 55 | +<thead> |
| 56 | +<tr> |
| 57 | +<th style="text-align: center;"><a href="../../../docs/how_to/tool_calling">Tool calling</a></th> |
| 58 | +<th style="text-align: center;"><a href="../../../docs/how_to/structured_output/">Structured output</a></th> |
| 59 | +<th style="text-align: center;">JSON mode</th> |
| 60 | +<th style="text-align: center;"><a href="../../../docs/how_to/multimodal_inputs/">Image input</a></th> |
| 61 | +<th style="text-align: center;">Audio input</th> |
| 62 | +<th style="text-align: center;">Video input</th> |
| 63 | +<th style="text-align: center;"><a href="../../../docs/how_to/chat_streaming/">Token-level streaming</a></th> |
| 64 | +<th style="text-align: center;"><a href="../../../docs/how_to/chat_token_usage_tracking/">Token usage</a></th> |
| 65 | +<th style="text-align: center;"><a href="../../../docs/how_to/logprobs/">Logprobs</a></th> |
| 66 | +</tr> |
| 67 | +</thead> |
| 68 | +<tbody> |
| 69 | +<tr> |
| 70 | +<td style="text-align: center;">✅</td> |
| 71 | +<td style="text-align: center;">✅</td> |
| 72 | +<td style="text-align: center;">✅</td> |
| 73 | +<td style="text-align: center;">❌</td> |
| 74 | +<td style="text-align: center;">❌</td> |
| 75 | +<td style="text-align: center;">❌</td> |
| 76 | +<td style="text-align: center;">✅</td> |
| 77 | +<td style="text-align: center;">✅</td> |
| 78 | +<td style="text-align: center;">✅</td> |
| 79 | +</tr> |
| 80 | +</tbody> |
| 81 | +</table> |
| 82 | +`, |
| 83 | + `<!-- ## Invocation -->`, |
| 84 | +]; |
| 85 | + |
| 86 | +This will help you getting started with DeepSeek [chat |
| 87 | +models](../../../docs/concepts/#chat-models). For detailed documentation |
| 88 | +of all `ChatDeepSeek` features and configurations head to the [API |
| 89 | +reference](https://api.js.langchain.com/classes/_langchain_deepseek.ChatDeepSeek.html). |
| 90 | + |
| 91 | +## Overview |
| 92 | + |
| 93 | +### Integration details |
| 94 | + |
| 95 | +<div dangerouslySetInnerHTML={{ __html: quartoRawHtml[0] }} /> |
| 96 | + |
| 97 | +### Model features |
| 98 | + |
| 99 | +See the links in the table headers below for guides on how to use |
| 100 | +specific features. |
| 101 | + |
| 102 | +<div dangerouslySetInnerHTML={{ __html: quartoRawHtml[1] }} /> |
| 103 | + |
| 104 | +Note that as of 1/27/25, tool calling and structured output are not |
| 105 | +currently supported for `deepseek-reasoner`. |
| 106 | + |
| 107 | +## Setup |
| 108 | + |
| 109 | +To access DeepSeek models you’ll need to create a DeepSeek account, get |
| 110 | +an API key, and install the `@langchain/deepseek` integration package. |
| 111 | + |
| 112 | +You can also access the DeepSeek API through providers like [Together |
| 113 | +AI](../../../docs/integrations/chat/togetherai) or |
| 114 | +[Ollama](../../../docs/integrations/chat/ollama). |
| 115 | + |
| 116 | +### Credentials |
| 117 | + |
| 118 | +Head to https://deepseek.com/ to sign up to DeepSeek and generate an API |
| 119 | +key. Once you’ve done this set the `DEEPSEEK_API_KEY` environment |
| 120 | +variable: |
| 121 | + |
| 122 | +```bash |
| 123 | +export DEEPSEEK_API_KEY="your-api-key" |
| 124 | +``` |
| 125 | + |
| 126 | +If you want to get automated tracing of your model calls you can also |
| 127 | +set your [LangSmith](https://docs.smith.langchain.com/) API key by |
| 128 | +uncommenting below: |
| 129 | + |
| 130 | +```bash |
| 131 | +# export LANGSMITH_TRACING="true" |
| 132 | +# export LANGSMITH_API_KEY="your-api-key" |
| 133 | +``` |
| 134 | + |
| 135 | +### Installation |
| 136 | + |
| 137 | +The LangChain ChatDeepSeek integration lives in the |
| 138 | +`@langchain/deepseek` package: |
| 139 | + |
| 140 | +```mdx-code-block |
| 141 | +import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; |
| 142 | +import Npm2Yarn from "@theme/Npm2Yarn"; |
| 143 | +
|
| 144 | +<IntegrationInstallTooltip></IntegrationInstallTooltip> |
| 145 | +
|
| 146 | +<Npm2Yarn> |
| 147 | + @langchain/deepseek @langchain/core |
| 148 | +</Npm2Yarn> |
| 149 | +
|
| 150 | +``` |
| 151 | + |
| 152 | +## Instantiation |
| 153 | + |
| 154 | +Now we can instantiate our model object and generate chat completions: |
| 155 | + |
| 156 | +```typescript |
| 157 | +import { ChatDeepSeek } from "@langchain/deepseek"; |
| 158 | + |
| 159 | +const llm = new ChatDeepSeek({ |
| 160 | + model: "deepseek-reasoner", |
| 161 | + temperature: 0, |
| 162 | + // other params... |
| 163 | +}); |
| 164 | +``` |
| 165 | + |
| 166 | +<div dangerouslySetInnerHTML={{ __html: quartoRawHtml[2] }} /> |
| 167 | + |
| 168 | +```typescript |
| 169 | +const aiMsg = await llm.invoke([ |
| 170 | + [ |
| 171 | + "system", |
| 172 | + "You are a helpful assistant that translates English to French. Translate the user sentence.", |
| 173 | + ], |
| 174 | + ["human", "I love programming."], |
| 175 | +]); |
| 176 | +aiMsg; |
| 177 | +``` |
| 178 | + |
| 179 | +```text |
| 180 | +AIMessage { |
| 181 | + "id": "e2874482-68a7-4552-8154-b6a245bab429", |
| 182 | + "content": "J'adore la programmation.", |
| 183 | + "additional_kwargs": {, |
| 184 | + "reasoning_content": "...", |
| 185 | + }, |
| 186 | + "response_metadata": { |
| 187 | + "tokenUsage": { |
| 188 | + "promptTokens": 23, |
| 189 | + "completionTokens": 7, |
| 190 | + "totalTokens": 30 |
| 191 | + }, |
| 192 | + "finish_reason": "stop", |
| 193 | + "model_name": "deepseek-reasoner", |
| 194 | + "usage": { |
| 195 | + "prompt_tokens": 23, |
| 196 | + "completion_tokens": 7, |
| 197 | + "total_tokens": 30, |
| 198 | + "prompt_tokens_details": { |
| 199 | + "cached_tokens": 0 |
| 200 | + }, |
| 201 | + "prompt_cache_hit_tokens": 0, |
| 202 | + "prompt_cache_miss_tokens": 23 |
| 203 | + }, |
| 204 | + "system_fingerprint": "fp_3a5770e1b4" |
| 205 | + }, |
| 206 | + "tool_calls": [], |
| 207 | + "invalid_tool_calls": [], |
| 208 | + "usage_metadata": { |
| 209 | + "output_tokens": 7, |
| 210 | + "input_tokens": 23, |
| 211 | + "total_tokens": 30, |
| 212 | + "input_token_details": { |
| 213 | + "cache_read": 0 |
| 214 | + }, |
| 215 | + "output_token_details": {} |
| 216 | + } |
| 217 | +} |
| 218 | +``` |
| 219 | + |
| 220 | +```typescript |
| 221 | +console.log(aiMsg.content); |
| 222 | +``` |
| 223 | + |
| 224 | +```text |
| 225 | +J'adore la programmation. |
| 226 | +``` |
| 227 | + |
| 228 | +## Chaining |
| 229 | + |
| 230 | +We can [chain](../../../docs/how_to/sequence/) our model with a prompt |
| 231 | +template like so: |
| 232 | + |
| 233 | +```typescript |
| 234 | +import { ChatPromptTemplate } from "@langchain/core/prompts"; |
| 235 | + |
| 236 | +const prompt = ChatPromptTemplate.fromMessages([ |
| 237 | + [ |
| 238 | + "system", |
| 239 | + "You are a helpful assistant that translates {input_language} to {output_language}.", |
| 240 | + ], |
| 241 | + ["human", "{input}"], |
| 242 | +]); |
| 243 | + |
| 244 | +const chain = prompt.pipe(llm); |
| 245 | +await chain.invoke({ |
| 246 | + input_language: "English", |
| 247 | + output_language: "German", |
| 248 | + input: "I love programming.", |
| 249 | +}); |
| 250 | +``` |
| 251 | + |
| 252 | +```text |
| 253 | +AIMessage { |
| 254 | + "id": "6e7f6f8c-8d7a-4dad-be07-425384038fd4", |
| 255 | + "content": "Ich liebe es zu programmieren.", |
| 256 | + "additional_kwargs": {, |
| 257 | + "reasoning_content": "...", |
| 258 | + }, |
| 259 | + "response_metadata": { |
| 260 | + "tokenUsage": { |
| 261 | + "promptTokens": 18, |
| 262 | + "completionTokens": 9, |
| 263 | + "totalTokens": 27 |
| 264 | + }, |
| 265 | + "finish_reason": "stop", |
| 266 | + "model_name": "deepseek-reasoner", |
| 267 | + "usage": { |
| 268 | + "prompt_tokens": 18, |
| 269 | + "completion_tokens": 9, |
| 270 | + "total_tokens": 27, |
| 271 | + "prompt_tokens_details": { |
| 272 | + "cached_tokens": 0 |
| 273 | + }, |
| 274 | + "prompt_cache_hit_tokens": 0, |
| 275 | + "prompt_cache_miss_tokens": 18 |
| 276 | + }, |
| 277 | + "system_fingerprint": "fp_3a5770e1b4" |
| 278 | + }, |
| 279 | + "tool_calls": [], |
| 280 | + "invalid_tool_calls": [], |
| 281 | + "usage_metadata": { |
| 282 | + "output_tokens": 9, |
| 283 | + "input_tokens": 18, |
| 284 | + "total_tokens": 27, |
| 285 | + "input_token_details": { |
| 286 | + "cache_read": 0 |
| 287 | + }, |
| 288 | + "output_token_details": {} |
| 289 | + } |
| 290 | +} |
| 291 | +``` |
| 292 | + |
| 293 | +## API reference |
| 294 | + |
| 295 | +For detailed documentation of all ChatDeepSeek features and |
| 296 | +configurations head to the API reference: |
| 297 | +https://api.js.langchain.com/classes/\_langchain_deepseek.ChatDeepSeek.html |
| 298 | + |
| 299 | + |
| 300 | +## Related |
| 301 | + |
| 302 | +- Chat model [conceptual guide](/docs/concepts/#chat-models) |
| 303 | +- Chat model [how-to guides](/docs/how_to/#chat-models) |
0 commit comments