Skip to content

Commit 39e79ec

Browse files
committed
[vertexai]: ignore empty AIMessage from previous blocked responses
1 parent 42c25e1 commit 39e79ec

File tree

2 files changed

+38
-0
lines changed

2 files changed

+38
-0
lines changed

libs/vertexai/langchain_google_vertexai/chat_models.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -357,6 +357,12 @@ def _convert_to_parts(message: BaseMessage) -> List[Part]:
357357
prev_ai_message = message
358358
role = "model"
359359

360+
# Previous blocked messages will have empty content which should be ignored
361+
if not message.content and message.response_metadata.get(
362+
"is_blocked", False
363+
):
364+
continue
365+
360366
parts = []
361367
if message.content:
362368
parts = _convert_to_parts(message)

libs/vertexai/tests/integration_tests/test_chat_models.py

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -468,6 +468,38 @@ def test_vertexai_single_call_with_no_system_messages() -> None:
468468
assert isinstance(response.content, str)
469469

470470

471+
@pytest.mark.release
472+
def test_vertexai_single_call_previous_blocked_response() -> None:
473+
"""If a previous call was blocked, the AIMessage will have empty content which
474+
should be ignored."""
475+
476+
model = ChatVertexAI(model_name=_DEFAULT_MODEL_NAME, rate_limiter=rate_limiter)
477+
text_question2 = "How much is 3+3?"
478+
# Previous blocked response included in history. This can happen with a LangGraph
479+
# ReAct agent.
480+
message1 = AIMessage(
481+
content="",
482+
response_metadata={
483+
"is_blocked": True,
484+
"safety_ratings": [
485+
{
486+
"category": "HARM_CATEGORY_HARASSMENT",
487+
"probability_label": "MEDIUM",
488+
"probability_score": 0.33039191365242004,
489+
"blocked": True,
490+
"severity": "HARM_SEVERITY_MEDIUM",
491+
"severity_score": 0.2782268822193146,
492+
},
493+
],
494+
"finish_reason": "SAFETY",
495+
},
496+
)
497+
message2 = HumanMessage(content=text_question2)
498+
response = model([message1, message2])
499+
assert isinstance(response, AIMessage)
500+
assert isinstance(response.content, str)
501+
502+
471503
@pytest.mark.release
472504
@pytest.mark.parametrize("model_name", model_names_to_test)
473505
def test_get_num_tokens_from_messages(model_name: str) -> None:

0 commit comments

Comments
 (0)