Skip to content

Commit 2e6f27b

Browse files
authored
Adding SSE and Streamable HTTP transports (#2)
* adding SSE and Streamable HTTP transports and upgrading mpc to version 1.9.0 * Bump version to 0.4.0
1 parent 1ca446b commit 2e6f27b

File tree

7 files changed

+144
-24
lines changed

7 files changed

+144
-24
lines changed

README.md

Lines changed: 27 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -20,12 +20,13 @@ This implementation was adapted from the [Model Context Protocol quickstart guid
2020
## Features
2121

2222
- 🌐 **Multi-Server Support**: Connect to multiple MCP servers simultaneously
23+
- 🚀 **Multiple Transport Types**: Supports STDIO, SSE, and Streamable HTTP server connections
2324
- 🎨 **Rich Terminal Interface**: Interactive console UI
2425
- 🛠️ **Tool Management**: Enable/disable specific tools or entire servers during chat sessions
2526
- 🧠 **Context Management**: Control conversation memory with configurable retention settings
2627
- 🔄 **Cross-Language Support**: Seamlessly work with both Python and JavaScript MCP servers
2728
- 🔍 **Auto-Discovery**: Automatically find and use Claude's existing MCP server configurations
28-
- 🚀 **Dynamic Model Switching**: Switch between any installed Ollama model without restarting
29+
- 🎛️ **Dynamic Model Switching**: Switch between any installed Ollama model without restarting
2930
- 💾 **Configuration Persistence**: Save and load tool preferences between sessions
3031
- 📊 **Usage Analytics**: Track token consumption and conversation history metrics
3132
- 🔌 **Plug-and-Play**: Works immediately with standard MCP-compliant tool servers
@@ -75,6 +76,9 @@ If you don't provide any options, the client will use auto-discovery mode to fin
7576
- `--servers-json`: Path to a JSON file with server configurations.
7677
- `--auto-discovery`: Auto-discover servers from Claude's default config file (default behavior if no other options provided).
7778

79+
> Note: Claude's configuration file is typically located at:
80+
`~/Library/Application Support/Claude/claude_desktop_config.json`
81+
7882
#### Model Options:
7983
- `--model`: Ollama model to use (default: "qwen2.5:7b")
8084

@@ -157,35 +161,51 @@ The configuration saves:
157161

158162
## Server Configuration Format
159163

160-
The JSON configuration file should follow this format:
164+
The JSON configuration file supports STDIO, SSE, and Streamable HTTP server types:
165+
161166

162167
```json
163168
{
164169
"mcpServers": {
165-
"server-name": {
170+
"stdio-server": {
166171
"command": "command-to-run",
167172
"args": ["arg1", "arg2", "..."],
168173
"env": {
169174
"ENV_VAR1": "value1",
170175
"ENV_VAR2": "value2"
171176
},
172177
"disabled": false
178+
},
179+
"sse-server": {
180+
"type": "sse",
181+
"url": "http://localhost:8000/sse",
182+
"headers": {
183+
"Authorization": "Bearer your-token-here"
184+
},
185+
"disabled": false
186+
},
187+
"http-server": {
188+
"type": "streamable_http",
189+
"url": "http://localhost:8000/mcp",
190+
"headers": {
191+
"X-API-Key": "your-api-key-here"
192+
},
193+
"disabled": false
173194
}
174195
}
175196
}
176197
```
177198

178-
Claude's configuration file is typically located at:
179-
`~/Library/Application Support/Claude/claude_desktop_config.json`
199+
> Note: If you specify a URL without a type, the client will default to using Streamable HTTP transport.
180200
181201
## Compatible Models
182202

183203
The following Ollama models work well with tool use:
184204

185205
- qwen2.5
186-
- llama3.3
187-
- llama3.2
206+
- qwen3
188207
- llama3.1
208+
- llama3.2
189209
- mistral
190210

191211
For a complete list of Ollama models with tool use capabilities, visit the [official Ollama models page](https://ollama.com/search?c=tools).

cli-package/pyproject.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "ollmcp"
3-
version = "0.3.2"
3+
version = "0.4.0"
44
description = "CLI for MCP Client for Ollama - An easy-to-use command for interacting with Ollama through MCP"
55
readme = "README.md"
66
requires-python = ">=3.10"
@@ -9,7 +9,7 @@ authors = [
99
{name = "Jonathan Löwenstern"}
1010
]
1111
dependencies = [
12-
"mcp-client-for-ollama==0.3.2"
12+
"mcp-client-for-ollama==0.4.0"
1313
]
1414

1515
[project.scripts]

mcp_client_for_ollama/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
"""MCP Client for Ollama package."""
22

3-
__version__ = "0.3.2"
3+
__version__ = "0.4.0"

mcp_client_for_ollama/server/connector.py

Lines changed: 88 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -4,14 +4,17 @@
44
initialization, and communication.
55
"""
66

7+
import os
8+
import shutil
79
import os
810
import shutil
911
from contextlib import AsyncExitStack
1012
from typing import Dict, List, Any, Optional, Tuple
1113
from rich.console import Console
1214
from rich.panel import Panel
13-
from mcp import ClientSession, StdioServerParameters, Tool
14-
from mcp.client.stdio import stdio_client
15+
from mcp import ClientSession, Tool
16+
from mcp.client.stdio import stdio_client, StdioServerParameters
17+
from mcp.client.sse import sse_client
1518

1619
from .discovery import process_server_paths, parse_server_configs, auto_discover_servers
1720

@@ -107,20 +110,61 @@ async def _connect_to_server(self, server: Dict[str, Any]) -> bool:
107110
self.console.print(f"[cyan]Connecting to server: {server_name}[/cyan]")
108111

109112
try:
110-
# Create server parameters based on server type
111-
if server["type"] == "script":
113+
server_type = server.get("type", "script")
114+
session = None
115+
116+
# Connect based on server type
117+
if server_type == "sse":
118+
# Connect to SSE server
119+
url = self._get_url_from_server(server)
120+
if not url:
121+
self.console.print(f"[red]Error: SSE server {server_name} missing URL[/red]")
122+
return False
123+
124+
headers = self._get_headers_from_server(server)
125+
126+
# Connect using SSE transport
127+
sse_transport = await self.exit_stack.enter_async_context(sse_client(url, headers=headers))
128+
read_stream, write_stream = sse_transport
129+
session = await self.exit_stack.enter_async_context(ClientSession(read_stream, write_stream))
130+
131+
elif server_type == "streamable_http":
132+
# Connect to Streamable HTTP server
133+
url = self._get_url_from_server(server)
134+
if not url:
135+
self.console.print(f"[red]Error: HTTP server {server_name} missing URL[/red]")
136+
return False
137+
138+
headers = self._get_headers_from_server(server)
139+
140+
# In MCP 1.9.0, use SSE client for HTTP connections as well
141+
# since the dedicated HTTP client is no longer available
142+
self.console.print(f"[yellow]Note: Using SSE client for HTTP connection to {server_name}[/yellow]")
143+
transport = await self.exit_stack.enter_async_context(sse_client(url, headers=headers))
144+
read_stream, write_stream = transport
145+
session = await self.exit_stack.enter_async_context(ClientSession(read_stream, write_stream))
146+
147+
elif server_type == "script":
148+
# Connect to script-based server using STDIO
112149
server_params = self._create_script_params(server)
113150
if server_params is None:
114151
return False
152+
153+
stdio_transport = await self.exit_stack.enter_async_context(stdio_client(server_params))
154+
read_stream, write_stream = stdio_transport
155+
session = await self.exit_stack.enter_async_context(ClientSession(read_stream, write_stream))
156+
115157
else:
158+
# Connect to config-based server using STDIO
116159
server_params = self._create_config_params(server)
117160
if server_params is None:
118161
return False
162+
163+
stdio_transport = await self.exit_stack.enter_async_context(stdio_client(server_params))
164+
read_stream, write_stream = stdio_transport
165+
session = await self.exit_stack.enter_async_context(ClientSession(read_stream, write_stream))
119166

120-
# Connect to this server
121-
stdio_transport = await self.exit_stack.enter_async_context(stdio_client(server_params))
122-
stdio, write = stdio_transport
123-
session = await self.exit_stack.enter_async_context(ClientSession(stdio, write))
167+
# Initialize the session
124168
await session.initialize()
125169

126170
# Store the session
@@ -304,3 +348,39 @@ def disable_all_tools(self):
304348
"""Disable all available tools"""
305349
for tool_name in self.enabled_tools:
306350
self.enabled_tools[tool_name] = False
351+
352+
def _get_url_from_server(self, server: Dict[str, Any]) -> Optional[str]:
353+
"""Extract URL from server configuration.
354+
355+
Args:
356+
server: Server configuration dictionary
357+
358+
Returns:
359+
URL string or None if not found
360+
"""
361+
# Try to get URL directly from server dict
362+
url = server.get("url")
363+
364+
# If not there, try the config subdict
365+
if not url and "config" in server:
366+
url = server["config"].get("url")
367+
368+
return url
369+
370+
def _get_headers_from_server(self, server: Dict[str, Any]) -> Dict[str, str]:
371+
"""Extract headers from server configuration.
372+
373+
Args:
374+
server: Server configuration dictionary
375+
376+
Returns:
377+
Dictionary of headers
378+
"""
379+
# Try to get headers directly from server dict
380+
headers = server.get("headers", {})
381+
382+
# If not there, try the config subdict
383+
if not headers and "config" in server:
384+
headers = server["config"].get("headers", {})
385+
386+
return headers

mcp_client_for_ollama/server/discovery.py

Lines changed: 24 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -66,12 +66,32 @@ def parse_server_configs(config_path: str) -> List[Dict[str, Any]]:
6666
# Skip disabled servers
6767
if config.get('disabled', False):
6868
continue
69-
70-
all_servers.append({
71-
"type": "config",
69+
70+
# Determine server type
71+
server_type = "config" # Default type for STDIO servers
72+
73+
# Check for URL-based server types (sse or streamable_http)
74+
if "type" in config:
75+
# Type is explicitly specified in config
76+
server_type = config["type"]
77+
elif "url" in config:
78+
# URL exists but no type, default to streamable_http
79+
server_type = "streamable_http"
80+
81+
# Create server config object
82+
server = {
83+
"type": server_type,
7284
"name": name,
7385
"config": config
74-
})
86+
}
87+
88+
# For URL-based servers, add direct access to URL and headers
89+
if server_type in ["sse", "streamable_http"]:
90+
server["url"] = config.get("url")
91+
if "headers" in config:
92+
server["headers"] = config.get("headers")
93+
94+
all_servers.append(server)
7595

7696
return all_servers
7797

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "mcp-client-for-ollama"
3-
version = "0.3.2"
3+
version = "0.4.0"
44
description = "MCP Client for Ollama - A client for connecting to Model Context Protocol servers using Ollama"
55
readme = "README.md"
66
requires-python = ">=3.10"

uv.lock

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)