Skip to content

Commit c98d91e

Browse files
jiangbo721刘江波
and
刘江波
authored
fix: o1 model error, use max_completion_tokens instead of max_tokens. (#12037)
Co-authored-by: 刘江波 <[email protected]>
1 parent 3ea54e9 commit c98d91e

File tree

1 file changed

+6
-3
lines changed
  • api/core/model_runtime/model_providers/azure_openai/llm

1 file changed

+6
-3
lines changed

api/core/model_runtime/model_providers/azure_openai/llm/llm.py

+6-3
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,7 @@ def validate_credentials(self, model: str, credentials: dict) -> None:
113113
try:
114114
client = AzureOpenAI(**self._to_credential_kwargs(credentials))
115115

116-
if "o1" in model:
116+
if model.startswith("o1"):
117117
client.chat.completions.create(
118118
messages=[{"role": "user", "content": "ping"}],
119119
model=model,
@@ -311,7 +311,10 @@ def _chat_generate(
311311
prompt_messages = self._clear_illegal_prompt_messages(model, prompt_messages)
312312

313313
block_as_stream = False
314-
if "o1" in model:
314+
if model.startswith("o1"):
315+
if "max_tokens" in model_parameters:
316+
model_parameters["max_completion_tokens"] = model_parameters["max_tokens"]
317+
del model_parameters["max_tokens"]
315318
if stream:
316319
block_as_stream = True
317320
stream = False
@@ -404,7 +407,7 @@ def _clear_illegal_prompt_messages(self, model: str, prompt_messages: list[Promp
404407
]
405408
)
406409

407-
if "o1" in model:
410+
if model.startswith("o1"):
408411
system_message_count = len([m for m in prompt_messages if isinstance(m, SystemPromptMessage)])
409412
if system_message_count > 0:
410413
new_prompt_messages = []

0 commit comments

Comments
 (0)