diff --git a/packages/components/credentials/NvdiaNIMApi.credential.ts b/packages/components/credentials/NvdiaNIMApi.credential.ts new file mode 100644 index 00000000000..2a56a381a45 --- /dev/null +++ b/packages/components/credentials/NvdiaNIMApi.credential.ts @@ -0,0 +1,24 @@ +import { INodeParams, INodeCredential } from '../src/Interface' + +class NvdiaNIMApi implements INodeCredential { + label: string + name: string + version: number + description: string + inputs: INodeParams[] + + constructor() { + this.label = 'Nvdia NIM API Key' + this.name = 'nvdiaNIMApi' + this.version = 1.0 + this.inputs = [ + { + label: 'Nvdia NIM API Key', + name: 'nvdiaNIMApiKey', + type: 'password' + } + ] + } +} + +module.exports = { credClass: NvdiaNIMApi } diff --git a/packages/components/nodes/chatmodels/ChatNvdiaNIM/ChatNvdiaNIM.ts b/packages/components/nodes/chatmodels/ChatNvdiaNIM/ChatNvdiaNIM.ts new file mode 100644 index 00000000000..84491c639e9 --- /dev/null +++ b/packages/components/nodes/chatmodels/ChatNvdiaNIM/ChatNvdiaNIM.ts @@ -0,0 +1,169 @@ +import { ChatOpenAI, OpenAIChatInput } from '@langchain/openai' +import { BaseCache } from '@langchain/core/caches' +import { BaseLLMParams } from '@langchain/core/language_models/llms' +import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' + +class ChatNvdiaNIM_ChatModels implements INode { + label: string + name: string + version: number + type: string + icon: string + category: string + description: string + baseClasses: string[] + credential: INodeParams + inputs: INodeParams[] + + constructor() { + this.label = 'ChatNvdiaNIM' + this.name = 'chatNvdiaNIM' + this.version = 1.0 + this.type = 'ChatNvdiaNIM' + this.icon = 'nvdia.svg' + this.category = 'Chat Models' + this.description = 'Wrapper around Nvdia NIM Inference API' + this.baseClasses = [this.type, ...getBaseClasses(ChatOpenAI)] + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['nvdiaNIMApi'], + optional: true + } + this.inputs = [ + { + label: 'Cache', + name: 'cache', + type: 'BaseCache', + optional: true + }, + { + label: 'Model Name', + name: 'modelName', + type: 'string', + placeholder: 'microsoft/phi-3-mini-4k-instruct' + }, + { + label: 'Temperature', + name: 'temperature', + type: 'number', + step: 0.1, + default: 0.9, + optional: true + }, + { + label: 'Base Path', + name: 'basePath', + type: 'string', + description: 'Specify the URL of the deployed NIM Inference API', + placeholder: 'https://integrate.api.nvidia.com/v1' + }, + { + label: 'Streaming', + name: 'streaming', + type: 'boolean', + default: true, + optional: true, + additionalParams: true + }, + { + label: 'Max Tokens', + name: 'maxTokens', + type: 'number', + step: 1, + optional: true, + additionalParams: true + }, + { + label: 'Top Probability', + name: 'topP', + type: 'number', + step: 0.1, + optional: true, + additionalParams: true + }, + { + label: 'Frequency Penalty', + name: 'frequencyPenalty', + type: 'number', + step: 0.1, + optional: true, + additionalParams: true + }, + { + label: 'Presence Penalty', + name: 'presencePenalty', + type: 'number', + step: 0.1, + optional: true, + additionalParams: true + }, + { + label: 'Timeout', + name: 'timeout', + type: 'number', + step: 1, + optional: true, + additionalParams: true + }, + { + label: 'Base Options', + name: 'baseOptions', + type: 'json', + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const temperature = nodeData.inputs?.temperature as string + const modelName = nodeData.inputs?.modelName as string + const maxTokens = nodeData.inputs?.maxTokens as string + const topP = nodeData.inputs?.topP as string + const frequencyPenalty = nodeData.inputs?.frequencyPenalty as string + const presencePenalty = nodeData.inputs?.presencePenalty as string + const timeout = nodeData.inputs?.timeout as string + const streaming = nodeData.inputs?.streaming as boolean + const basePath = nodeData.inputs?.basePath as string + const baseOptions = nodeData.inputs?.baseOptions + const cache = nodeData.inputs?.cache as BaseCache + + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const nvdiaNIMApiKey = getCredentialParam('nvdiaNIMApiKey', credentialData, nodeData) + + const obj: Partial & BaseLLMParams & { nvdiaNIMApiKey?: string } = { + temperature: parseFloat(temperature), + modelName, + openAIApiKey: nvdiaNIMApiKey, + streaming: streaming ?? true + } + + if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10) + if (topP) obj.topP = parseFloat(topP) + if (frequencyPenalty) obj.frequencyPenalty = parseFloat(frequencyPenalty) + if (presencePenalty) obj.presencePenalty = parseFloat(presencePenalty) + if (timeout) obj.timeout = parseInt(timeout, 10) + if (cache) obj.cache = cache + + let parsedBaseOptions: any | undefined = undefined + + if (baseOptions) { + try { + parsedBaseOptions = typeof baseOptions === 'object' ? baseOptions : JSON.parse(baseOptions) + } catch (exception) { + throw new Error("Invalid JSON in the ChatOpenAI's BaseOptions: " + exception) + } + } + + const model = new ChatOpenAI(obj, { + basePath, + baseOptions: parsedBaseOptions + }) + return model + } +} + +module.exports = { nodeClass: ChatNvdiaNIM_ChatModels } diff --git a/packages/components/nodes/chatmodels/ChatNvdiaNIM/nvdia.svg b/packages/components/nodes/chatmodels/ChatNvdiaNIM/nvdia.svg new file mode 100644 index 00000000000..76a39e2e635 --- /dev/null +++ b/packages/components/nodes/chatmodels/ChatNvdiaNIM/nvdia.svg @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file