Skip to content

Feat/deepseek chat node #3732

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 23 additions & 0 deletions packages/components/credentials/DeepseekApi.credential.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import { INodeCredential, INodeParams } from '../src/Interface'

class DeepseekApi implements INodeCredential {
label: string
name: string
version: number
inputs: INodeParams[]

constructor() {
this.label = 'DeepseekAI API'
this.name = 'deepseekApi'
this.version = 1.0
this.inputs = [
{
label: 'DeepseekAI API Key',
name: 'deepseekApiKey',
type: 'password'
}
]
}
}

module.exports = { credClass: DeepseekApi }
13 changes: 13 additions & 0 deletions packages/components/models.json
Original file line number Diff line number Diff line change
Expand Up @@ -572,6 +572,19 @@
}
]
},
{
"name": "deepseek",
"models": [
{
"label": "deepseek-chat",
"name": "deepseek-chat"
},
{
"label": "deepseek-coder",
"name": "deepseek-coder"
}
]
},
{
"name": "chatOpenAI",
"models": [
Expand Down
235 changes: 235 additions & 0 deletions packages/components/nodes/chatmodels/Deepseek/Deepseek.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,235 @@
import { BaseCache } from '@langchain/core/caches'
import { BaseChatModelParams } from '@langchain/core/language_models/chat_models'
import { AzureOpenAIInput, ChatOpenAI, LegacyOpenAIInput, OpenAIChatInput } from '@langchain/openai'
import type { ClientOptions } from 'openai'
import { ICommonObject, IMultiModalOption, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface'
import { getModels, MODEL_TYPE } from '../../../src/modelLoader'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'

class Deepseek_ChatModels implements INode {
readonly baseURL: string = 'https://api.deepseek.com'
label: string
name: string
version: number
type: string
icon: string
category: string
description: string
baseClasses: string[]
credential: INodeParams
inputs: INodeParams[]

constructor() {
this.label = 'ChatDeepseek'
this.name = 'chatDeepseek'
this.version = 1.0
this.type = 'chatDeepseek'
this.icon = 'deepseek.svg'
this.category = 'Chat Models'
this.description = 'Wrapper around Deepseek large language models that use the Chat endpoint'
this.baseClasses = [this.type, ...getBaseClasses(ChatOpenAI)]
this.credential = {
label: 'Connect Credential',
name: 'credential',
type: 'credential',
credentialNames: ['deepseekApi']
}
this.inputs = [
{
label: 'Cache',
name: 'cache',
type: 'BaseCache',
optional: true
},
{
label: 'Model Name',
name: 'modelName',
type: 'asyncOptions',
loadMethod: 'listModels',
default: 'deepseek-chat'
},
{
label: 'Temperature',
name: 'temperature',
type: 'number',
step: 0.1,
default: 0.7,
optional: true
},
{
label: 'Streaming',
name: 'streaming',
type: 'boolean',
default: true,
optional: true,
additionalParams: true
},
{
label: 'Max Tokens',
name: 'maxTokens',
type: 'number',
step: 1,
optional: true,
additionalParams: true
},
{
label: 'Top Probability',
name: 'topP',
type: 'number',
step: 0.1,
optional: true,
additionalParams: true
},
{
label: 'Frequency Penalty',
name: 'frequencyPenalty',
type: 'number',
step: 0.1,
optional: true,
additionalParams: true
},
{
label: 'Presence Penalty',
name: 'presencePenalty',
type: 'number',
step: 0.1,
optional: true,
additionalParams: true
},
{
label: 'Timeout',
name: 'timeout',
type: 'number',
step: 1,
optional: true,
additionalParams: true
},
{
label: 'Stop Sequence',
name: 'stopSequence',
type: 'string',
rows: 4,
optional: true,
description: 'List of stop words to use when generating. Use comma to separate multiple stop words.',
additionalParams: true
},
{
label: 'BaseOptions',
name: 'baseOptions',
type: 'json',
optional: true,
additionalParams: true
},
{
label: 'Allow Image Uploads',
name: 'allowImageUploads',
type: 'boolean',
description:
'Allow image input. Refer to the <a href="https://docs.flowiseai.com/using-flowise/uploads#image" target="_blank">docs</a> for more details.',
default: false,
optional: true
},
{
label: 'Image Resolution',
description: 'This parameter controls the resolution in which the model views the image.',
name: 'imageResolution',
type: 'options',
options: [
{
label: 'Low',
name: 'low'
},
{
label: 'High',
name: 'high'
},
{
label: 'Auto',
name: 'auto'
}
],
default: 'low',
optional: false,
additionalParams: true
}
]
}

//@ts-ignore
loadMethods = {
async listModels(): Promise<INodeOptionsValue[]> {
return await getModels(MODEL_TYPE.CHAT, 'deepseek')
}
}

async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const temperature = nodeData.inputs?.temperature as string
const modelName = nodeData.inputs?.modelName as string
const maxTokens = nodeData.inputs?.maxTokens as string
const topP = nodeData.inputs?.topP as string
const frequencyPenalty = nodeData.inputs?.frequencyPenalty as string
const presencePenalty = nodeData.inputs?.presencePenalty as string
const timeout = nodeData.inputs?.timeout as string
const stopSequence = nodeData.inputs?.stopSequence as string
const streaming = nodeData.inputs?.streaming as boolean
const baseOptions = nodeData.inputs?.baseOptions

const allowImageUploads = nodeData.inputs?.allowImageUploads as boolean
const imageResolution = nodeData.inputs?.imageResolution as string

if (nodeData.inputs?.credentialId) {
nodeData.credential = nodeData.inputs?.credentialId
}
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const openAIApiKey = getCredentialParam('deepseekApiKey', credentialData, nodeData)

const cache = nodeData.inputs?.cache as BaseCache

const obj: Partial<OpenAIChatInput> &
Partial<AzureOpenAIInput> &
BaseChatModelParams & { configuration?: ClientOptions & LegacyOpenAIInput } = {
temperature: parseFloat(temperature),
modelName,
openAIApiKey,
streaming: streaming ?? true
}

if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10)
if (topP) obj.topP = parseFloat(topP)
if (frequencyPenalty) obj.frequencyPenalty = parseFloat(frequencyPenalty)
if (presencePenalty) obj.presencePenalty = parseFloat(presencePenalty)
if (timeout) obj.timeout = parseInt(timeout, 10)
if (cache) obj.cache = cache
if (stopSequence) {
const stopSequenceArray = stopSequence.split(',').map((item) => item.trim())
obj.stop = stopSequenceArray
}

let parsedBaseOptions: any | undefined = undefined

if (baseOptions) {
try {
parsedBaseOptions = typeof baseOptions === 'object' ? baseOptions : JSON.parse(baseOptions)

Check warning on line 212 in packages/components/nodes/chatmodels/Deepseek/Deepseek.ts

View workflow job for this annotation

GitHub Actions / build (ubuntu-latest, 18.15.0)

'parsedBaseOptions' is assigned a value but never used. Allowed unused vars must match /^_/u
} catch (exception) {
throw new Error("Invalid JSON in the ChatOpenAI's BaseOptions: " + exception)
}
}

const multiModalOption: IMultiModalOption = {

Check warning on line 218 in packages/components/nodes/chatmodels/Deepseek/Deepseek.ts

View workflow job for this annotation

GitHub Actions / build (ubuntu-latest, 18.15.0)

'multiModalOption' is assigned a value but never used. Allowed unused vars must match /^_/u
image: {
allowImageUploads: allowImageUploads ?? false,
imageResolution
}
}

const model = new ChatOpenAI({
...obj,
configuration: {
baseURL: this.baseURL
}
})
return model
}
}

module.exports = { nodeClass: Deepseek_ChatModels }
30 changes: 30 additions & 0 deletions packages/components/nodes/chatmodels/Deepseek/deepseek.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading