1
- import { AzureOpenAIInput , ChatOpenAI as LangchainChatOpenAI , OpenAIChatInput , ClientOptions , LegacyOpenAIInput } from '@langchain/openai'
1
+ import { AzureOpenAIInput , AzureChatOpenAI as LangchainAzureChatOpenAI , ChatOpenAIFields , OpenAIClient } from '@langchain/openai'
2
2
import { BaseCache } from '@langchain/core/caches'
3
- import { BaseLLMParams } from '@langchain/core/language_models/llms'
4
3
import { ICommonObject , IMultiModalOption , INode , INodeData , INodeOptionsValue , INodeParams } from '../../../src/Interface'
5
4
import { getBaseClasses , getCredentialData , getCredentialParam } from '../../../src/utils'
6
- import { ChatOpenAI } from '../ChatOpenAI/FlowiseChatOpenAI'
7
5
import { getModels , MODEL_TYPE } from '../../../src/modelLoader'
8
- import { BaseChatModelParams } from '@langchain/core/language_models/chat_models '
6
+ import { AzureChatOpenAI } from './FlowiseAzureChatOpenAI '
9
7
10
8
const serverCredentialsExists =
11
9
! ! process . env . AZURE_OPENAI_API_KEY &&
@@ -33,7 +31,7 @@ class AzureChatOpenAI_ChatModels implements INode {
33
31
this . icon = 'Azure.svg'
34
32
this . category = 'Chat Models'
35
33
this . description = 'Wrapper around Azure OpenAI large language models that use the Chat endpoint'
36
- this . baseClasses = [ this . type , ...getBaseClasses ( LangchainChatOpenAI ) ]
34
+ this . baseClasses = [ this . type , ...getBaseClasses ( LangchainAzureChatOpenAI ) ]
37
35
this . credential = {
38
36
label : 'Connect Credential' ,
39
37
name : 'credential' ,
@@ -155,6 +153,29 @@ class AzureChatOpenAI_ChatModels implements INode {
155
153
default : 'low' ,
156
154
optional : false ,
157
155
additionalParams : true
156
+ } ,
157
+ {
158
+ label : 'Reasoning Effort' ,
159
+ description : 'Constrains effort on reasoning for reasoning models. Only applicable for o1 models' ,
160
+ name : 'reasoningEffort' ,
161
+ type : 'options' ,
162
+ options : [
163
+ {
164
+ label : 'Low' ,
165
+ name : 'low'
166
+ } ,
167
+ {
168
+ label : 'Medium' ,
169
+ name : 'medium'
170
+ } ,
171
+ {
172
+ label : 'High' ,
173
+ name : 'high'
174
+ }
175
+ ] ,
176
+ default : 'low' ,
177
+ optional : false ,
178
+ additionalParams : true
158
179
}
159
180
]
160
181
}
@@ -178,6 +199,7 @@ class AzureChatOpenAI_ChatModels implements INode {
178
199
const topP = nodeData . inputs ?. topP as string
179
200
const basePath = nodeData . inputs ?. basepath as string
180
201
const baseOptions = nodeData . inputs ?. baseOptions
202
+ const reasoningEffort = nodeData . inputs ?. reasoningEffort as OpenAIClient . Chat . ChatCompletionReasoningEffort
181
203
182
204
const credentialData = await getCredentialData ( nodeData . credential ?? '' , options )
183
205
const azureOpenAIApiKey = getCredentialParam ( 'azureOpenAIApiKey' , credentialData , nodeData )
@@ -188,10 +210,7 @@ class AzureChatOpenAI_ChatModels implements INode {
188
210
const allowImageUploads = nodeData . inputs ?. allowImageUploads as boolean
189
211
const imageResolution = nodeData . inputs ?. imageResolution as string
190
212
191
- const obj : Partial < AzureOpenAIInput > &
192
- BaseLLMParams &
193
- Partial < OpenAIChatInput > &
194
- BaseChatModelParams & { configuration ?: ClientOptions & LegacyOpenAIInput } = {
213
+ const obj : ChatOpenAIFields & Partial < AzureOpenAIInput > = {
195
214
temperature : parseFloat ( temperature ) ,
196
215
modelName,
197
216
azureOpenAIApiKey,
@@ -218,6 +237,12 @@ class AzureChatOpenAI_ChatModels implements INode {
218
237
console . error ( 'Error parsing base options' , exception )
219
238
}
220
239
}
240
+ if ( modelName === 'o3-mini' ) {
241
+ delete obj . temperature
242
+ }
243
+ if ( ( modelName . includes ( 'o1' ) || modelName . includes ( 'o3' ) ) && reasoningEffort ) {
244
+ obj . reasoningEffort = reasoningEffort
245
+ }
221
246
222
247
const multiModalOption : IMultiModalOption = {
223
248
image : {
@@ -226,7 +251,7 @@ class AzureChatOpenAI_ChatModels implements INode {
226
251
}
227
252
}
228
253
229
- const model = new ChatOpenAI ( nodeData . id , obj )
254
+ const model = new AzureChatOpenAI ( nodeData . id , obj )
230
255
model . setMultiModalOption ( multiModalOption )
231
256
return model
232
257
}
0 commit comments