Skip to content

Commit 6445941

Browse files
authored
fix: ollama client doesn't accept non-string tool response (#16)
This commit fixes #15 by just stringifying the non-string tool response like OpenAI client.
1 parent 622b7c9 commit 6445941

File tree

3 files changed

+16
-6
lines changed

3 files changed

+16
-6
lines changed

examples/visit.test.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
# Test visit feature
22

33
Follow the steps below.
4-
Just call browser_navigate and finish.
54

65
## Steps
76

8-
1. Visit https://the-internet.herokuapp.com/login
7+
1. Visit https://github.com/autifyhq/aethr
8+
2. Assert 'Aethr /ˈiːθər/' exists

src/cli/run-command.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ export async function runCommand(
4444
const fileContent = await loadTestFile(filePath);
4545
const input = { messages: [{ role: "user", content: fileContent }] };
4646

47-
const mcpTools = await createMcpTools(profile.mcpServers, {
47+
const mcpTools = await createMcpTools(profile.mcpServers, model, {
4848
thinkTool,
4949
reasoning,
5050
tempDir,

src/mcp/tools.ts

Lines changed: 13 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ import { Client } from "@modelcontextprotocol/sdk/client/index.js";
55
import type { JsonSchema7ObjectType } from "zod-to-json-schema";
66

77
import { thinkTool } from "../agent/agent.js";
8+
import { Model } from "../llm/model.js";
89
import {
910
logMcpEnd,
1011
logMcpEndError,
@@ -16,6 +17,7 @@ import { McpServersConfig } from "./config.js";
1617

1718
export const createMcpTools = async (
1819
config: McpServersConfig,
20+
model: Model,
1921
options: { thinkTool: boolean; reasoning: boolean; tempDir: string },
2022
): Promise<{
2123
tools: StructuredToolInterface[];
@@ -27,7 +29,7 @@ export const createMcpTools = async (
2729
Object.entries(config).map(async ([name, serverConfig]) => {
2830
const client = await createMcpClient(name, serverConfig, options);
2931
clients.push(client);
30-
return await createTools(name, client, options);
32+
return await createTools(name, client, model, options);
3133
}),
3234
)
3335
).flat();
@@ -46,6 +48,7 @@ export const createMcpTools = async (
4648
const createTools = async (
4749
name: string,
4850
client: Client,
51+
model: Model,
4952
options: { reasoning?: boolean } = {},
5053
): Promise<StructuredToolInterface[]> => {
5154
const mcpTools = await loadMcpTools(name, client, {
@@ -54,20 +57,27 @@ const createTools = async (
5457
additionalToolNamePrefix: "",
5558
});
5659
return mcpTools.map((tool) => {
57-
patchToolInvoke(tool);
60+
patchToolInvoke(tool, model);
5861
if (options.reasoning) patchToolSchemaReasoning(tool);
5962
return tool;
6063
});
6164
};
6265

6366
// Patch tool.invoke for better logging.
64-
const patchToolInvoke = (tool: StructuredToolInterface) => {
67+
const patchToolInvoke = (tool: StructuredToolInterface, model: Model) => {
6568
const originalInvoke = tool.invoke.bind(tool);
6669
tool.invoke = async (...args) => {
6770
const start = logMcpStart(tool, args);
6871
try {
6972
const result = (await originalInvoke(...args)) as ToolMessage;
7073
logMcpEnd(tool, start, result);
74+
// Workaround for the models that reject non-string content.
75+
// The same workaround is used by OpenAI client.
76+
if (model.provider === "ollama")
77+
result.content =
78+
typeof result.content === "string"
79+
? result.content
80+
: JSON.stringify(result.content);
7181
return result;
7282
} catch (error) {
7383
logMcpEndError(tool, start, error);

0 commit comments

Comments
 (0)