Skip to content

Commit ee543c1

Browse files
authored
chore: llama.cpp - gently handle the removal of ChatMessage.from_function (#1298)
1 parent 203182b commit ee543c1

File tree

3 files changed

+22
-25
lines changed

3 files changed

+22
-25
lines changed

integrations/llama_cpp/pyproject.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ classifiers = [
2626
"Programming Language :: Python :: Implementation :: CPython",
2727
"Programming Language :: Python :: Implementation :: PyPy",
2828
]
29-
dependencies = ["haystack-ai", "llama-cpp-python>=0.2.87"]
29+
dependencies = ["haystack-ai>=2.9.0", "llama-cpp-python>=0.2.87"]
3030

3131
[project.urls]
3232
Documentation = "https://github.com/deepset-ai/haystack-core-integrations/tree/main/integrations/llama_cpp#readme"

integrations/llama_cpp/src/haystack_integrations/components/generators/llama_cpp/chat/chat_generator.py

+1-5
Original file line numberDiff line numberDiff line change
@@ -138,11 +138,7 @@ def run(self, messages: List[ChatMessage], generation_kwargs: Optional[Dict[str,
138138
name = tool_calls[0]["function"]["name"]
139139

140140
reply = ChatMessage.from_assistant(choice["message"]["content"], meta=meta)
141-
if name:
142-
if hasattr(reply, "_name"):
143-
reply._name = name # new ChatMessage
144-
elif hasattr(reply, "name"):
145-
reply.name = name # legacy ChatMessage
141+
reply._name = name or None
146142
replies.append(reply)
147143

148144
return {"replies": replies}

integrations/llama_cpp/tests/test_chat_generator.py

+20-19
Original file line numberDiff line numberDiff line change
@@ -40,12 +40,12 @@ def test_convert_message_to_llamacpp_format():
4040
message = ChatMessage.from_user("I have a question")
4141
assert _convert_message_to_llamacpp_format(message) == {"role": "user", "content": "I have a question"}
4242

43-
message = ChatMessage.from_function("Function call", "function_name")
44-
converted_message = _convert_message_to_llamacpp_format(message)
45-
46-
assert converted_message["role"] in ("function", "tool")
47-
assert converted_message["name"] == "function_name"
48-
assert converted_message["content"] == "Function call"
43+
if hasattr(ChatMessage, "from_function"):
44+
message = ChatMessage.from_function("Function call", "function_name")
45+
converted_message = _convert_message_to_llamacpp_format(message)
46+
assert converted_message["role"] in ("function", "tool")
47+
assert converted_message["name"] == "function_name"
48+
assert converted_message["content"] == "Function call"
4949

5050

5151
class TestLlamaCppChatGenerator:
@@ -420,19 +420,20 @@ def test_function_call_and_execute(self, generator):
420420
assert "tool_calls" in first_reply.meta
421421
tool_calls = first_reply.meta["tool_calls"]
422422

423-
for tool_call in tool_calls:
424-
function_name = tool_call["function"]["name"]
425-
function_args = json.loads(tool_call["function"]["arguments"])
426-
assert function_name in available_functions
427-
function_response = available_functions[function_name](**function_args)
428-
function_message = ChatMessage.from_function(function_response, function_name)
429-
messages.append(function_message)
430-
431-
second_response = generator.run(messages=messages)
432-
assert "replies" in second_response
433-
assert len(second_response["replies"]) > 0
434-
assert any("San Francisco" in reply.text for reply in second_response["replies"])
435-
assert any("72" in reply.text for reply in second_response["replies"])
423+
if hasattr(ChatMessage, "from_function"):
424+
for tool_call in tool_calls:
425+
function_name = tool_call["function"]["name"]
426+
function_args = json.loads(tool_call["function"]["arguments"])
427+
assert function_name in available_functions
428+
function_response = available_functions[function_name](**function_args)
429+
function_message = ChatMessage.from_function(function_response, function_name)
430+
messages.append(function_message)
431+
432+
second_response = generator.run(messages=messages)
433+
assert "replies" in second_response
434+
assert len(second_response["replies"]) > 0
435+
assert any("San Francisco" in reply.text for reply in second_response["replies"])
436+
assert any("72" in reply.text for reply in second_response["replies"])
436437

437438

438439
class TestLlamaCppChatGeneratorChatML:

0 commit comments

Comments
 (0)