Skip to content

Commit ff23817

Browse files
Chore/upgrade llamaindex version (FlowiseAI#2440)
* updates to loader to support file upload * adding a todo * upgrade llamaindex * update groq icon * update azure models * update llamaindex version --------- Co-authored-by: Henry <[email protected]>
1 parent e83dcb0 commit ff23817

File tree

22 files changed

+1340
-297
lines changed

22 files changed

+1340
-297
lines changed

packages/components/models.json

Lines changed: 33 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -244,21 +244,29 @@
244244
"label": "gpt-4",
245245
"name": "gpt-4"
246246
},
247+
{
248+
"label": "gpt-4-turbo",
249+
"name": "gpt-4-turbo"
250+
},
247251
{
248252
"label": "gpt-4-32k",
249253
"name": "gpt-4-32k"
250254
},
251255
{
252-
"label": "gpt-35-turbo",
253-
"name": "gpt-35-turbo"
256+
"label": "gpt-3.5-turbo",
257+
"name": "gpt-3.5-turbo"
254258
},
255259
{
256-
"label": "gpt-35-turbo-16k",
257-
"name": "gpt-35-turbo-16k"
260+
"label": "gpt-3.5-turbo-16k",
261+
"name": "gpt-3.5-turbo-16k"
258262
},
259263
{
260264
"label": "gpt-4-vision-preview",
261265
"name": "gpt-4-vision-preview"
266+
},
267+
{
268+
"label": "gpt-4-1106-preview",
269+
"name": "gpt-4-1106-preview"
262270
}
263271
]
264272
},
@@ -504,6 +512,10 @@
504512
{
505513
"name": "chatOpenAI_LlamaIndex",
506514
"models": [
515+
{
516+
"label": "gpt-4o",
517+
"name": "gpt-4o"
518+
},
507519
{
508520
"label": "gpt-4",
509521
"name": "gpt-4"
@@ -622,6 +634,23 @@
622634
"name": "mistral-large-2402"
623635
}
624636
]
637+
},
638+
{
639+
"name": "chatMistral_LlamaIndex",
640+
"models": [
641+
{
642+
"label": "mistral-tiny",
643+
"name": "mistral-tiny"
644+
},
645+
{
646+
"label": "mistral-small",
647+
"name": "mistral-small"
648+
},
649+
{
650+
"label": "mistral-medium",
651+
"name": "mistral-medium"
652+
}
653+
]
625654
}
626655
],
627656
"llm": [
Lines changed: 1 addition & 0 deletions
Loading

packages/components/nodes/agents/OpenAIToolAgent/OpenAIToolAgent_LlamaIndex.ts renamed to packages/components/nodes/agents/LlamaIndexAgents/AnthropicAgent/AnthropicAgent_LlamaIndex.ts

Lines changed: 24 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
import { flatten } from 'lodash'
2-
import { ChatMessage, OpenAI, OpenAIAgent } from 'llamaindex'
3-
import { getBaseClasses } from '../../../src/utils'
4-
import { FlowiseMemory, ICommonObject, IMessage, INode, INodeData, INodeParams, IUsedTool } from '../../../src/Interface'
2+
import { MessageContentTextDetail, ChatMessage, AnthropicAgent, Anthropic } from 'llamaindex'
3+
import { getBaseClasses } from '../../../../src/utils'
4+
import { FlowiseMemory, ICommonObject, IMessage, INode, INodeData, INodeParams, IUsedTool } from '../../../../src/Interface'
55

6-
class OpenAIFunctionAgent_LlamaIndex_Agents implements INode {
6+
class AnthropicAgent_LlamaIndex_Agents implements INode {
77
label: string
88
name: string
99
version: number
@@ -18,16 +18,15 @@ class OpenAIFunctionAgent_LlamaIndex_Agents implements INode {
1818
badge?: string
1919

2020
constructor(fields?: { sessionId?: string }) {
21-
this.label = 'OpenAI Tool Agent'
22-
this.name = 'openAIToolAgentLlamaIndex'
21+
this.label = 'Anthropic Agent'
22+
this.name = 'anthropicAgentLlamaIndex'
2323
this.version = 1.0
24-
this.type = 'OpenAIToolAgent'
24+
this.type = 'AnthropicAgent'
2525
this.category = 'Agents'
26-
this.icon = 'function.svg'
27-
this.description = `Agent that uses OpenAI Function Calling to pick the tools and args to call using LlamaIndex`
28-
this.baseClasses = [this.type, ...getBaseClasses(OpenAIAgent)]
26+
this.icon = 'Anthropic.svg'
27+
this.description = `Agent that uses Anthropic Claude Function Calling to pick the tools and args to call using LlamaIndex`
28+
this.baseClasses = [this.type, ...getBaseClasses(AnthropicAgent)]
2929
this.tags = ['LlamaIndex']
30-
this.badge = 'NEW'
3130
this.inputs = [
3231
{
3332
label: 'Tools',
@@ -41,7 +40,7 @@ class OpenAIFunctionAgent_LlamaIndex_Agents implements INode {
4140
type: 'BaseChatMemory'
4241
},
4342
{
44-
label: 'OpenAI/Azure Chat Model',
43+
label: 'Anthropic Claude Model',
4544
name: 'model',
4645
type: 'BaseChatModel_LlamaIndex'
4746
},
@@ -63,7 +62,7 @@ class OpenAIFunctionAgent_LlamaIndex_Agents implements INode {
6362

6463
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string | ICommonObject> {
6564
const memory = nodeData.inputs?.memory as FlowiseMemory
66-
const model = nodeData.inputs?.model as OpenAI
65+
const model = nodeData.inputs?.model as Anthropic
6766
const systemMessage = nodeData.inputs?.systemMessage as string
6867
const prependMessages = options?.prependMessages
6968

@@ -94,31 +93,33 @@ class OpenAIFunctionAgent_LlamaIndex_Agents implements INode {
9493
}
9594
}
9695

97-
const agent = new OpenAIAgent({
96+
const agent = new AnthropicAgent({
9897
tools,
9998
llm: model,
100-
prefixMessages: chatHistory,
99+
chatHistory: chatHistory,
101100
verbose: process.env.DEBUG === 'true' ? true : false
102101
})
103102

104103
let text = ''
105104
const usedTools: IUsedTool[] = []
106105

107-
const response = await agent.chat({
108-
message: input
109-
})
106+
const response = await agent.chat({ message: input, chatHistory, verbose: process.env.DEBUG === 'true' ? true : false })
110107

111108
if (response.sources.length) {
112109
for (const sourceTool of response.sources) {
113110
usedTools.push({
114-
tool: sourceTool.toolName,
115-
toolInput: sourceTool.rawInput,
116-
toolOutput: sourceTool.rawOutput
111+
tool: sourceTool.tool?.metadata.name ?? '',
112+
toolInput: sourceTool.input,
113+
toolOutput: sourceTool.output as any
117114
})
118115
}
119116
}
120117

121-
text = String(response)
118+
if (Array.isArray(response.response.message.content) && response.response.message.content.length > 0) {
119+
text = (response.response.message.content[0] as MessageContentTextDetail).text
120+
} else {
121+
text = response.response.message.content as string
122+
}
122123

123124
await memory.addChatMessages(
124125
[
@@ -138,4 +139,4 @@ class OpenAIFunctionAgent_LlamaIndex_Agents implements INode {
138139
}
139140
}
140141

141-
module.exports = { nodeClass: OpenAIFunctionAgent_LlamaIndex_Agents }
142+
module.exports = { nodeClass: AnthropicAgent_LlamaIndex_Agents }
Lines changed: 167 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,167 @@
1+
import { flatten } from 'lodash'
2+
import { ChatMessage, OpenAI, OpenAIAgent } from 'llamaindex'
3+
import { getBaseClasses } from '../../../../src/utils'
4+
import { FlowiseMemory, ICommonObject, IMessage, INode, INodeData, INodeParams, IUsedTool } from '../../../../src/Interface'
5+
6+
class OpenAIFunctionAgent_LlamaIndex_Agents implements INode {
7+
label: string
8+
name: string
9+
version: number
10+
description: string
11+
type: string
12+
icon: string
13+
category: string
14+
baseClasses: string[]
15+
tags: string[]
16+
inputs: INodeParams[]
17+
sessionId?: string
18+
badge?: string
19+
20+
constructor(fields?: { sessionId?: string }) {
21+
this.label = 'OpenAI Tool Agent'
22+
this.name = 'openAIToolAgentLlamaIndex'
23+
this.version = 2.0
24+
this.type = 'OpenAIToolAgent'
25+
this.category = 'Agents'
26+
this.icon = 'function.svg'
27+
this.description = `Agent that uses OpenAI Function Calling to pick the tools and args to call using LlamaIndex`
28+
this.baseClasses = [this.type, ...getBaseClasses(OpenAIAgent)]
29+
this.tags = ['LlamaIndex']
30+
this.inputs = [
31+
{
32+
label: 'Tools',
33+
name: 'tools',
34+
type: 'Tool_LlamaIndex',
35+
list: true
36+
},
37+
{
38+
label: 'Memory',
39+
name: 'memory',
40+
type: 'BaseChatMemory'
41+
},
42+
{
43+
label: 'OpenAI/Azure Chat Model',
44+
name: 'model',
45+
type: 'BaseChatModel_LlamaIndex'
46+
},
47+
{
48+
label: 'System Message',
49+
name: 'systemMessage',
50+
type: 'string',
51+
rows: 4,
52+
optional: true,
53+
additionalParams: true
54+
}
55+
]
56+
this.sessionId = fields?.sessionId
57+
}
58+
59+
async init(): Promise<any> {
60+
return null
61+
}
62+
63+
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string | ICommonObject> {
64+
const memory = nodeData.inputs?.memory as FlowiseMemory
65+
const model = nodeData.inputs?.model as OpenAI
66+
const systemMessage = nodeData.inputs?.systemMessage as string
67+
let tools = nodeData.inputs?.tools
68+
tools = flatten(tools)
69+
70+
const isStreamingEnabled = options.socketIO && options.socketIOClientId
71+
72+
const chatHistory = [] as ChatMessage[]
73+
74+
if (systemMessage) {
75+
chatHistory.push({
76+
content: systemMessage,
77+
role: 'system'
78+
})
79+
}
80+
81+
const msgs = (await memory.getChatMessages(this.sessionId, false)) as IMessage[]
82+
for (const message of msgs) {
83+
if (message.type === 'apiMessage') {
84+
chatHistory.push({
85+
content: message.message,
86+
role: 'assistant'
87+
})
88+
} else if (message.type === 'userMessage') {
89+
chatHistory.push({
90+
content: message.message,
91+
role: 'user'
92+
})
93+
}
94+
}
95+
96+
const agent = new OpenAIAgent({
97+
tools,
98+
llm: model,
99+
chatHistory: chatHistory,
100+
verbose: process.env.DEBUG === 'true' ? true : false
101+
})
102+
103+
let text = ''
104+
let isStreamingStarted = false
105+
const usedTools: IUsedTool[] = []
106+
107+
if (isStreamingEnabled) {
108+
const stream = await agent.chat({
109+
message: input,
110+
chatHistory,
111+
stream: true,
112+
verbose: process.env.DEBUG === 'true' ? true : false
113+
})
114+
for await (const chunk of stream) {
115+
//console.log('chunk', chunk)
116+
text += chunk.response.delta
117+
if (!isStreamingStarted) {
118+
isStreamingStarted = true
119+
options.socketIO.to(options.socketIOClientId).emit('start', chunk.response.delta)
120+
if (chunk.sources.length) {
121+
for (const sourceTool of chunk.sources) {
122+
usedTools.push({
123+
tool: sourceTool.tool?.metadata.name ?? '',
124+
toolInput: sourceTool.input,
125+
toolOutput: sourceTool.output as any
126+
})
127+
}
128+
options.socketIO.to(options.socketIOClientId).emit('usedTools', usedTools)
129+
}
130+
}
131+
132+
options.socketIO.to(options.socketIOClientId).emit('token', chunk.response.delta)
133+
}
134+
} else {
135+
const response = await agent.chat({ message: input, chatHistory, verbose: process.env.DEBUG === 'true' ? true : false })
136+
if (response.sources.length) {
137+
for (const sourceTool of response.sources) {
138+
usedTools.push({
139+
tool: sourceTool.tool?.metadata.name ?? '',
140+
toolInput: sourceTool.input,
141+
toolOutput: sourceTool.output as any
142+
})
143+
}
144+
}
145+
146+
text = response.response.message.content as string
147+
}
148+
149+
await memory.addChatMessages(
150+
[
151+
{
152+
text: input,
153+
type: 'userMessage'
154+
},
155+
{
156+
text: text,
157+
type: 'apiMessage'
158+
}
159+
],
160+
this.sessionId
161+
)
162+
163+
return usedTools.length ? { text: text, usedTools } : text
164+
}
165+
}
166+
167+
module.exports = { nodeClass: OpenAIFunctionAgent_LlamaIndex_Agents }
Lines changed: 9 additions & 0 deletions
Loading

packages/components/nodes/chatmodels/AzureChatOpenAI/AzureChatOpenAI_LlamaIndex.ts

Lines changed: 24 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface'
22
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
3-
import { OpenAI, ALL_AVAILABLE_OPENAI_MODELS } from 'llamaindex'
3+
import { OpenAI } from 'llamaindex'
44
import { getModels, MODEL_TYPE } from '../../../src/modelLoader'
55

66
interface AzureOpenAIConfig {
@@ -10,6 +10,28 @@ interface AzureOpenAIConfig {
1010
deploymentName?: string
1111
}
1212

13+
const ALL_AZURE_OPENAI_CHAT_MODELS = {
14+
'gpt-35-turbo': { contextWindow: 4096, openAIModel: 'gpt-3.5-turbo' },
15+
'gpt-35-turbo-16k': {
16+
contextWindow: 16384,
17+
openAIModel: 'gpt-3.5-turbo-16k'
18+
},
19+
'gpt-4': { contextWindow: 8192, openAIModel: 'gpt-4' },
20+
'gpt-4-32k': { contextWindow: 32768, openAIModel: 'gpt-4-32k' },
21+
'gpt-4-turbo': {
22+
contextWindow: 128000,
23+
openAIModel: 'gpt-4-turbo'
24+
},
25+
'gpt-4-vision-preview': {
26+
contextWindow: 128000,
27+
openAIModel: 'gpt-4-vision-preview'
28+
},
29+
'gpt-4-1106-preview': {
30+
contextWindow: 128000,
31+
openAIModel: 'gpt-4-1106-preview'
32+
}
33+
}
34+
1335
class AzureChatOpenAI_LlamaIndex_ChatModels implements INode {
1436
label: string
1537
name: string
@@ -90,7 +112,7 @@ class AzureChatOpenAI_LlamaIndex_ChatModels implements INode {
90112
}
91113

92114
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
93-
const modelName = nodeData.inputs?.modelName as keyof typeof ALL_AVAILABLE_OPENAI_MODELS
115+
const modelName = nodeData.inputs?.modelName as keyof typeof ALL_AZURE_OPENAI_CHAT_MODELS
94116
const temperature = nodeData.inputs?.temperature as string
95117
const maxTokens = nodeData.inputs?.maxTokens as string
96118
const topP = nodeData.inputs?.topP as string

0 commit comments

Comments
 (0)