forked from FlowiseAI/Flowise
-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathMRKLAgentChat.ts
129 lines (112 loc) · 4.55 KB
/
MRKLAgentChat.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
import { flatten } from 'lodash'
import { AgentExecutor } from 'langchain/agents'
import { pull } from 'langchain/hub'
import { Tool } from '@langchain/core/tools'
import type { PromptTemplate } from '@langchain/core/prompts'
import { BaseChatModel } from '@langchain/core/language_models/chat_models'
import { additionalCallbacks } from '../../../src/handler'
import { FlowiseMemory, ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { createReactAgent } from '../../../src/agents'
import { ChatOpenAI } from '../../chatmodels/ChatOpenAI/FlowiseChatOpenAI'
import { HumanMessage } from '@langchain/core/messages'
import { addImagesToMessages } from '../../../src/multiModalUtils'
import { ChatPromptTemplate, HumanMessagePromptTemplate } from 'langchain/prompts'
class MRKLAgentChat_Agents implements INode {
label: string
name: string
version: number
description: string
type: string
icon: string
category: string
baseClasses: string[]
inputs: INodeParams[]
sessionId?: string
constructor(fields?: { sessionId?: string }) {
this.label = 'ReAct Agent for Chat Models'
this.name = 'mrklAgentChat'
this.version = 3.0
this.type = 'AgentExecutor'
this.category = 'Agents'
this.icon = 'agent.svg'
this.description = 'Agent that uses the ReAct logic to decide what action to take, optimized to be used with Chat Models'
this.baseClasses = [this.type, ...getBaseClasses(AgentExecutor)]
this.inputs = [
{
label: 'Allowed Tools',
name: 'tools',
type: 'Tool',
list: true
},
{
label: 'Chat Model',
name: 'model',
type: 'BaseChatModel'
},
{
label: 'Memory',
name: 'memory',
type: 'BaseChatMemory'
}
]
this.sessionId = fields?.sessionId
}
async init(): Promise<any> {
return null
}
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string> {
const memory = nodeData.inputs?.memory as FlowiseMemory
const model = nodeData.inputs?.model as BaseChatModel
let tools = nodeData.inputs?.tools as Tool[]
tools = flatten(tools)
const prompt = await pull<PromptTemplate>('hwchase17/react-chat')
let chatPromptTemplate = undefined
if (model instanceof ChatOpenAI) {
const messageContent = addImagesToMessages(nodeData, options, model.multiModalOption)
if (messageContent?.length) {
// Change model to gpt-4-vision
model.modelName = 'gpt-4-vision-preview'
// Change default max token to higher when using gpt-4-vision
model.maxTokens = 1024
const oldTemplate = prompt.template as string
chatPromptTemplate = ChatPromptTemplate.fromMessages([HumanMessagePromptTemplate.fromTemplate(oldTemplate)])
chatPromptTemplate.promptMessages.push(new HumanMessage({ content: messageContent }))
} else {
// revert to previous values if image upload is empty
model.modelName = model.configuredModel
model.maxTokens = model.configuredMaxToken
}
}
const agent = await createReactAgent({
llm: model,
tools,
prompt: chatPromptTemplate ?? prompt
})
const executor = new AgentExecutor({
agent,
tools,
verbose: process.env.DEBUG === 'true'
})
const callbacks = await additionalCallbacks(nodeData, options)
const prevChatHistory = options.chatHistory
const chatHistory = ((await memory.getChatMessages(this.sessionId, false, prevChatHistory)) as IMessage[]) ?? []
const chatHistoryString = chatHistory.map((hist) => hist.message).join('\\n')
const result = await executor.invoke({ input, chat_history: chatHistoryString }, { callbacks })
await memory.addChatMessages(
[
{
text: input,
type: 'userMessage'
},
{
text: result?.output,
type: 'apiMessage'
}
],
this.sessionId
)
return result?.output
}
}
module.exports = { nodeClass: MRKLAgentChat_Agents }