Skip to content

Feature/add nvdia nim #3749

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Dec 31, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 24 additions & 0 deletions packages/components/credentials/NvdiaNIMApi.credential.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import { INodeParams, INodeCredential } from '../src/Interface'

class NvdiaNIMApi implements INodeCredential {
label: string
name: string
version: number
description: string
inputs: INodeParams[]

constructor() {
this.label = 'Nvdia NIM API Key'
this.name = 'nvdiaNIMApi'
this.version = 1.0
this.inputs = [
{
label: 'Nvdia NIM API Key',
name: 'nvdiaNIMApiKey',
type: 'password'
}
]
}
}

module.exports = { credClass: NvdiaNIMApi }
169 changes: 169 additions & 0 deletions packages/components/nodes/chatmodels/ChatNvdiaNIM/ChatNvdiaNIM.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,169 @@
import { ChatOpenAI, OpenAIChatInput } from '@langchain/openai'
import { BaseCache } from '@langchain/core/caches'
import { BaseLLMParams } from '@langchain/core/language_models/llms'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'

class ChatNvdiaNIM_ChatModels implements INode {
label: string
name: string
version: number
type: string
icon: string
category: string
description: string
baseClasses: string[]
credential: INodeParams
inputs: INodeParams[]

constructor() {
this.label = 'ChatNvdiaNIM'
this.name = 'chatNvdiaNIM'
this.version = 1.0
this.type = 'ChatNvdiaNIM'
this.icon = 'nvdia.svg'
this.category = 'Chat Models'
this.description = 'Wrapper around Nvdia NIM Inference API'
this.baseClasses = [this.type, ...getBaseClasses(ChatOpenAI)]
this.credential = {
label: 'Connect Credential',
name: 'credential',
type: 'credential',
credentialNames: ['nvdiaNIMApi'],
optional: true
}
this.inputs = [
{
label: 'Cache',
name: 'cache',
type: 'BaseCache',
optional: true
},
{
label: 'Model Name',
name: 'modelName',
type: 'string',
placeholder: 'microsoft/phi-3-mini-4k-instruct'
},
{
label: 'Temperature',
name: 'temperature',
type: 'number',
step: 0.1,
default: 0.9,
optional: true
},
{
label: 'Base Path',
name: 'basePath',
type: 'string',
description: 'Specify the URL of the deployed NIM Inference API',
placeholder: 'https://integrate.api.nvidia.com/v1'
},
{
label: 'Streaming',
name: 'streaming',
type: 'boolean',
default: true,
optional: true,
additionalParams: true
},
{
label: 'Max Tokens',
name: 'maxTokens',
type: 'number',
step: 1,
optional: true,
additionalParams: true
},
{
label: 'Top Probability',
name: 'topP',
type: 'number',
step: 0.1,
optional: true,
additionalParams: true
},
{
label: 'Frequency Penalty',
name: 'frequencyPenalty',
type: 'number',
step: 0.1,
optional: true,
additionalParams: true
},
{
label: 'Presence Penalty',
name: 'presencePenalty',
type: 'number',
step: 0.1,
optional: true,
additionalParams: true
},
{
label: 'Timeout',
name: 'timeout',
type: 'number',
step: 1,
optional: true,
additionalParams: true
},
{
label: 'Base Options',
name: 'baseOptions',
type: 'json',
optional: true,
additionalParams: true
}
]
}

async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const temperature = nodeData.inputs?.temperature as string
const modelName = nodeData.inputs?.modelName as string
const maxTokens = nodeData.inputs?.maxTokens as string
const topP = nodeData.inputs?.topP as string
const frequencyPenalty = nodeData.inputs?.frequencyPenalty as string
const presencePenalty = nodeData.inputs?.presencePenalty as string
const timeout = nodeData.inputs?.timeout as string
const streaming = nodeData.inputs?.streaming as boolean
const basePath = nodeData.inputs?.basePath as string
const baseOptions = nodeData.inputs?.baseOptions
const cache = nodeData.inputs?.cache as BaseCache

const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const nvdiaNIMApiKey = getCredentialParam('nvdiaNIMApiKey', credentialData, nodeData)

const obj: Partial<OpenAIChatInput> & BaseLLMParams & { nvdiaNIMApiKey?: string } = {
temperature: parseFloat(temperature),
modelName,
openAIApiKey: nvdiaNIMApiKey,
streaming: streaming ?? true
}

if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10)
if (topP) obj.topP = parseFloat(topP)
if (frequencyPenalty) obj.frequencyPenalty = parseFloat(frequencyPenalty)
if (presencePenalty) obj.presencePenalty = parseFloat(presencePenalty)
if (timeout) obj.timeout = parseInt(timeout, 10)
if (cache) obj.cache = cache

let parsedBaseOptions: any | undefined = undefined

if (baseOptions) {
try {
parsedBaseOptions = typeof baseOptions === 'object' ? baseOptions : JSON.parse(baseOptions)
} catch (exception) {
throw new Error("Invalid JSON in the ChatOpenAI's BaseOptions: " + exception)
}
}

const model = new ChatOpenAI(obj, {
basePath,
baseOptions: parsedBaseOptions
})
return model
}
}

module.exports = { nodeClass: ChatNvdiaNIM_ChatModels }
5 changes: 5 additions & 0 deletions packages/components/nodes/chatmodels/ChatNvdiaNIM/nvdia.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading