Skip to content

Commit fc6eea7

Browse files
authored
Support custom base URL for ChatGoogleGenerativeAI (#4347)
feat: Support custom base URL for ChatGoogleGenerativeAI
1 parent ac04505 commit fc6eea7

File tree

2 files changed

+29
-12
lines changed

2 files changed

+29
-12
lines changed

packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/ChatGoogleGenerativeAI.ts

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -163,6 +163,14 @@ class GoogleGenerativeAI_ChatModels implements INode {
163163
optional: true,
164164
additionalParams: true
165165
},
166+
{
167+
label: 'Base URL',
168+
name: 'baseUrl',
169+
type: 'string',
170+
description: 'Base URL for the API. Leave empty to use the default.',
171+
optional: true,
172+
additionalParams: true
173+
},
166174
{
167175
label: 'Allow Image Uploads',
168176
name: 'allowImageUploads',
@@ -197,6 +205,7 @@ class GoogleGenerativeAI_ChatModels implements INode {
197205
const cache = nodeData.inputs?.cache as BaseCache
198206
const contextCache = nodeData.inputs?.contextCache as FlowiseGoogleAICacheManager
199207
const streaming = nodeData.inputs?.streaming as boolean
208+
const baseUrl = nodeData.inputs?.baseUrl as string | undefined
200209

201210
const allowImageUploads = nodeData.inputs?.allowImageUploads as boolean
202211

@@ -211,6 +220,7 @@ class GoogleGenerativeAI_ChatModels implements INode {
211220
if (topK) obj.topK = parseFloat(topK)
212221
if (cache) obj.cache = cache
213222
if (temperature) obj.temperature = parseFloat(temperature)
223+
if (baseUrl) obj.baseUrl = baseUrl
214224

215225
// Safety Settings
216226
let harmCategories: string[] = convertMultiOptionsToStringArray(harmCategory)

packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/FlowiseChatGoogleGenerativeAI.ts

Lines changed: 19 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -81,6 +81,8 @@ class LangchainChatGoogleGenerativeAI
8181

8282
apiKey?: string
8383

84+
baseUrl?: string
85+
8486
streaming = false
8587

8688
streamUsage = true
@@ -151,19 +153,24 @@ class LangchainChatGoogleGenerativeAI
151153
}
152154

153155
async getClient(prompt?: Content[], tools?: Tool[]) {
154-
this.client = new GenerativeAI(this.apiKey ?? '').getGenerativeModel({
155-
model: this.modelName,
156-
tools,
157-
safetySettings: this.safetySettings as SafetySetting[],
158-
generationConfig: {
159-
candidateCount: 1,
160-
stopSequences: this.stopSequences,
161-
maxOutputTokens: this.maxOutputTokens,
162-
temperature: this.temperature,
163-
topP: this.topP,
164-
topK: this.topK
156+
this.client = new GenerativeAI(this.apiKey ?? '').getGenerativeModel(
157+
{
158+
model: this.modelName,
159+
tools,
160+
safetySettings: this.safetySettings as SafetySetting[],
161+
generationConfig: {
162+
candidateCount: 1,
163+
stopSequences: this.stopSequences,
164+
maxOutputTokens: this.maxOutputTokens,
165+
temperature: this.temperature,
166+
topP: this.topP,
167+
topK: this.topK
168+
}
169+
},
170+
{
171+
baseUrl: this.baseUrl
165172
}
166-
})
173+
)
167174
if (this.contextCache) {
168175
const cachedContent = await this.contextCache.lookup({
169176
contents: prompt ? [{ ...prompt[0], parts: prompt[0].parts.slice(0, 1) }] : [],

0 commit comments

Comments
 (0)