Skip to content

Commit 93f3a5d

Browse files
Feat/deepseek chat node (#3732)
* Feat: Add deepseek models to components configuration * Feat: Implement Deepseek API integration with chat models and add SVG icon * Refactor: Remove image input options and add missing baseOptions in deepseek chat node
1 parent 0381a99 commit 93f3a5d

File tree

4 files changed

+263
-0
lines changed

4 files changed

+263
-0
lines changed
Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
import { INodeCredential, INodeParams } from '../src/Interface'
2+
3+
class DeepseekApi implements INodeCredential {
4+
label: string
5+
name: string
6+
version: number
7+
inputs: INodeParams[]
8+
9+
constructor() {
10+
this.label = 'DeepseekAI API'
11+
this.name = 'deepseekApi'
12+
this.version = 1.0
13+
this.inputs = [
14+
{
15+
label: 'DeepseekAI API Key',
16+
name: 'deepseekApiKey',
17+
type: 'password'
18+
}
19+
]
20+
}
21+
}
22+
23+
module.exports = { credClass: DeepseekApi }

packages/components/models.json

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -572,6 +572,19 @@
572572
}
573573
]
574574
},
575+
{
576+
"name": "deepseek",
577+
"models": [
578+
{
579+
"label": "deepseek-chat",
580+
"name": "deepseek-chat"
581+
},
582+
{
583+
"label": "deepseek-coder",
584+
"name": "deepseek-coder"
585+
}
586+
]
587+
},
575588
{
576589
"name": "chatOpenAI",
577590
"models": [
Lines changed: 197 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,197 @@
1+
import { BaseCache } from '@langchain/core/caches'
2+
import { BaseChatModelParams } from '@langchain/core/language_models/chat_models'
3+
import { ChatOpenAI, LegacyOpenAIInput, OpenAIChatInput } from '@langchain/openai'
4+
import type { ClientOptions } from 'openai'
5+
import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface'
6+
import { getModels, MODEL_TYPE } from '../../../src/modelLoader'
7+
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
8+
9+
class Deepseek_ChatModels implements INode {
10+
readonly baseURL: string = 'https://api.deepseek.com'
11+
label: string
12+
name: string
13+
version: number
14+
type: string
15+
icon: string
16+
category: string
17+
description: string
18+
baseClasses: string[]
19+
credential: INodeParams
20+
inputs: INodeParams[]
21+
22+
constructor() {
23+
this.label = 'ChatDeepseek'
24+
this.name = 'chatDeepseek'
25+
this.version = 1.0
26+
this.type = 'chatDeepseek'
27+
this.icon = 'deepseek.svg'
28+
this.category = 'Chat Models'
29+
this.description = 'Wrapper around Deepseek large language models that use the Chat endpoint'
30+
this.baseClasses = [this.type, ...getBaseClasses(ChatOpenAI)]
31+
this.credential = {
32+
label: 'Connect Credential',
33+
name: 'credential',
34+
type: 'credential',
35+
credentialNames: ['deepseekApi']
36+
}
37+
this.inputs = [
38+
{
39+
label: 'Cache',
40+
name: 'cache',
41+
type: 'BaseCache',
42+
optional: true
43+
},
44+
{
45+
label: 'Model Name',
46+
name: 'modelName',
47+
type: 'asyncOptions',
48+
loadMethod: 'listModels',
49+
default: 'deepseek-chat'
50+
},
51+
{
52+
label: 'Temperature',
53+
name: 'temperature',
54+
type: 'number',
55+
step: 0.1,
56+
default: 0.7,
57+
optional: true
58+
},
59+
{
60+
label: 'Streaming',
61+
name: 'streaming',
62+
type: 'boolean',
63+
default: true,
64+
optional: true,
65+
additionalParams: true
66+
},
67+
{
68+
label: 'Max Tokens',
69+
name: 'maxTokens',
70+
type: 'number',
71+
step: 1,
72+
optional: true,
73+
additionalParams: true
74+
},
75+
{
76+
label: 'Top Probability',
77+
name: 'topP',
78+
type: 'number',
79+
step: 0.1,
80+
optional: true,
81+
additionalParams: true
82+
},
83+
{
84+
label: 'Frequency Penalty',
85+
name: 'frequencyPenalty',
86+
type: 'number',
87+
step: 0.1,
88+
optional: true,
89+
additionalParams: true
90+
},
91+
{
92+
label: 'Presence Penalty',
93+
name: 'presencePenalty',
94+
type: 'number',
95+
step: 0.1,
96+
optional: true,
97+
additionalParams: true
98+
},
99+
{
100+
label: 'Timeout',
101+
name: 'timeout',
102+
type: 'number',
103+
step: 1,
104+
optional: true,
105+
additionalParams: true
106+
},
107+
{
108+
label: 'Stop Sequence',
109+
name: 'stopSequence',
110+
type: 'string',
111+
rows: 4,
112+
optional: true,
113+
description: 'List of stop words to use when generating. Use comma to separate multiple stop words.',
114+
additionalParams: true
115+
},
116+
{
117+
label: 'Base Options',
118+
name: 'baseOptions',
119+
type: 'json',
120+
optional: true,
121+
additionalParams: true,
122+
description: 'Additional options to pass to the Deepseek client. This should be a JSON object.'
123+
}
124+
]
125+
}
126+
127+
//@ts-ignore
128+
loadMethods = {
129+
async listModels(): Promise<INodeOptionsValue[]> {
130+
return await getModels(MODEL_TYPE.CHAT, 'deepseek')
131+
}
132+
}
133+
134+
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
135+
const temperature = nodeData.inputs?.temperature as string
136+
const modelName = nodeData.inputs?.modelName as string
137+
const maxTokens = nodeData.inputs?.maxTokens as string
138+
const topP = nodeData.inputs?.topP as string
139+
const frequencyPenalty = nodeData.inputs?.frequencyPenalty as string
140+
const presencePenalty = nodeData.inputs?.presencePenalty as string
141+
const timeout = nodeData.inputs?.timeout as string
142+
const stopSequence = nodeData.inputs?.stopSequence as string
143+
const streaming = nodeData.inputs?.streaming as boolean
144+
const baseOptions = nodeData.inputs?.baseOptions
145+
146+
if (nodeData.inputs?.credentialId) {
147+
nodeData.credential = nodeData.inputs?.credentialId
148+
}
149+
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
150+
const openAIApiKey = getCredentialParam('deepseekApiKey', credentialData, nodeData)
151+
152+
const cache = nodeData.inputs?.cache as BaseCache
153+
154+
const obj: Partial<OpenAIChatInput> & BaseChatModelParams & { configuration?: ClientOptions & LegacyOpenAIInput } = {
155+
temperature: parseFloat(temperature),
156+
modelName,
157+
openAIApiKey,
158+
streaming: streaming ?? true
159+
}
160+
161+
if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10)
162+
if (topP) obj.topP = parseFloat(topP)
163+
if (frequencyPenalty) obj.frequencyPenalty = parseFloat(frequencyPenalty)
164+
if (presencePenalty) obj.presencePenalty = parseFloat(presencePenalty)
165+
if (timeout) obj.timeout = parseInt(timeout, 10)
166+
if (cache) obj.cache = cache
167+
if (stopSequence) {
168+
const stopSequenceArray = stopSequence.split(',').map((item) => item.trim())
169+
obj.stop = stopSequenceArray
170+
}
171+
172+
let parsedBaseOptions: any | undefined = undefined
173+
174+
if (baseOptions) {
175+
try {
176+
parsedBaseOptions = typeof baseOptions === 'object' ? baseOptions : JSON.parse(baseOptions)
177+
if (parsedBaseOptions.baseURL) {
178+
console.warn("The 'baseURL' parameter is not allowed when using the ChatDeepseek node.")
179+
parsedBaseOptions.baseURL = undefined
180+
}
181+
} catch (exception) {
182+
throw new Error('Invalid JSON in the BaseOptions: ' + exception)
183+
}
184+
}
185+
186+
const model = new ChatOpenAI({
187+
...obj,
188+
configuration: {
189+
baseURL: this.baseURL,
190+
...parsedBaseOptions
191+
}
192+
})
193+
return model
194+
}
195+
}
196+
197+
module.exports = { nodeClass: Deepseek_ChatModels }
Lines changed: 30 additions & 0 deletions
Loading

0 commit comments

Comments
 (0)