Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

.Net: MCP prompt sample #11342

Merged
merged 14 commits into from
Apr 3, 2025
Merged
Show file tree
Hide file tree
Changes from 8 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
// Copyright (c) Microsoft. All rights reserved.

using System;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
using ModelContextProtocol.Protocol.Types;

namespace MCPClient;

/// <summary>
/// Extension methods for <see cref="GetPromptResult"/>.
/// </summary>
internal static class PromptResultExtensions
{
/// <summary>
/// Converts a <see cref="GetPromptResult"/> to a <see cref="ChatHistory"/>.
/// </summary>
/// <param name="result">The prompt result to convert.</param>
/// <returns>The corresponding <see cref="ChatHistory"/>.</returns>
public static ChatHistory ToChatHistory(this GetPromptResult result)
{
ChatHistory chatHistory = [];

foreach (PromptMessage message in result.Messages)
{
ChatMessageContentItemCollection items = [];

switch (message.Content.Type)
{
case "text":
items.Add(new TextContent(message.Content.Text));
break;
case "image":
items.Add(new ImageContent(Convert.FromBase64String(message.Content.Data!), message.Content.MimeType));
break;
default:
throw new InvalidOperationException($"Unexpected message content type '{message.Content.Type}'");
}

chatHistory.Add(new ChatMessageContent(message.Role.ToAuthorRole(), items));
}

return chatHistory;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
// Copyright (c) Microsoft. All rights reserved.

using System;
using Microsoft.SemanticKernel.ChatCompletion;
using ModelContextProtocol.Protocol.Types;

namespace MCPClient;

/// <summary>
/// Extension methods for the <see cref="Role"/> enum.
/// </summary>
internal static class RoleExtensions
{
/// <summary>
/// Converts a <see cref="Role"/> to a <see cref="AuthorRole"/>.
/// </summary>
/// <param name="role">The MCP role to convert.</param>
/// <returns>The corresponding <see cref="AuthorRole"/>.</returns>
public static AuthorRole ToAuthorRole(this Role role)
{
return role switch
{
Role.User => AuthorRole.User,
Role.Assistant => AuthorRole.Assistant,
_ => throw new InvalidOperationException($"Unexpected role '{role}'")
};
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -7,17 +7,41 @@
using System.Threading.Tasks;
using Microsoft.Extensions.Configuration;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.OpenAI;
using ModelContextProtocol;
using ModelContextProtocol.Client;
using ModelContextProtocol.Protocol.Transport;
using ModelContextProtocol.Protocol.Types;

namespace MCPClient;

internal sealed class Program
{
public static async Task Main(string[] args)
{
// Use the MCP tools with the Semantic Kernel
await UseMCPToolsWithSKAsync();

// Use the MCP tools and MCP prompt with the Semantic Kernel
await UseMCPToolsAndPromptWithSKAsync();
}

/// <summary>
/// Demonstrates how to use the MCP tools with the Semantic Kernel.
/// The code in this method:
/// 1. Creates an MCP client.
/// 2. Retrieves the list of tools provided by the MCP server.
/// 3. Creates a kernel and registers the MCP tools as Kernel functions.
/// 4. Sends the prompt to AI model together with the MCP tools represented as Kernel functions.
/// 5. The AI model calls DateTimeUtils-GetCurrentDateTimeInUtc function to get the current date time in UTC required as an argument for the next function.
/// 6. The AI model calls WeatherUtils-GetWeatherForCity function with the current date time and the `Boston` arguments extracted from the prompt to get the weather information.
/// 7. Having received the weather information from the function call, the AI model returns the answer to the prompt.
/// </summary>
private static async Task UseMCPToolsWithSKAsync()
{
Console.WriteLine($"Running the {nameof(UseMCPToolsWithSKAsync)} sample.");

// Create an MCP client
await using IMcpClient mcpClient = await CreateMcpClientAsync();

Expand All @@ -43,10 +67,67 @@ public static async Task Main(string[] args)
FunctionResult result = await kernel.InvokePromptAsync(prompt, new(executionSettings));

Console.WriteLine(result);
Console.WriteLine();

// The expected output is: The likely color of the sky in Boston today is gray, as it is currently rainy.
}

/// <summary>
/// Demonstrates how to use the MCP tools and MCP prompt with the Semantic Kernel.
/// The code in this method:
/// 1. Creates an MCP client.
/// 2. Retrieves the list of tools provided by the MCP server.
/// 3. Retrieves the list of prompts provided by the MCP server.
/// 4. Creates a kernel and registers the MCP tools as Kernel functions.
/// 5. Requests the `GetCurrentWeatherForCity` prompt from the MCP server.
/// 6. The MCP server renders the prompt using the `Boston` as value for the `city` parameter and the result of the `DateTimeUtils-GetCurrentDateTimeInUtc` server-side invocation added to the prompt as part of prompt rendering.
/// 7. Converts the MCP server prompt: list of messages where each message is represented by content and role to a chat history.
/// 8. Sends the chat history to the AI model together with the MCP tools represented as Kernel functions.
/// 9. The AI model calls WeatherUtils-GetWeatherForCity function with the current date time and the `Boston` arguments extracted from the prompt to get the weather information.
/// 10. Having received the weather information from the function call, the AI model returns the answer to the prompt.
/// </summary>
private static async Task UseMCPToolsAndPromptWithSKAsync()
{
Console.WriteLine($"Running the {nameof(UseMCPToolsAndPromptWithSKAsync)} sample.");

// Create an MCP client
await using IMcpClient mcpClient = await CreateMcpClientAsync();

// Retrieve and display the list provided by the MCP server
IList<McpClientTool> tools = await mcpClient.ListToolsAsync();
DisplayTools(tools);

// Retrieve and display the list of prompts provided by the MCP server
IList<McpClientPrompt> prompts = await mcpClient.ListPromptsAsync();
DisplayPrompts(prompts);

// Create a kernel and register the MCP tools
Kernel kernel = CreateKernelWithChatCompletionService();
kernel.Plugins.AddFromFunctions("Tools", tools.Select(aiFunction => aiFunction.AsKernelFunction()));

// Enable automatic function calling
OpenAIPromptExecutionSettings executionSettings = new()
{
Temperature = 0,
FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(options: new() { RetainArgumentTypes = true })
};

// Retrieve the `GetCurrentWeatherForCity` prompt from the MCP server and convert it to a chat history
GetPromptResult promptResult = await mcpClient.GetPromptAsync("GetCurrentWeatherForCity", new Dictionary<string, object?>() { ["city"] = "Boston" });

ChatHistory chatHistory = promptResult.ToChatHistory();

// Execute a prompt using the MCP tools and prompt
IChatCompletionService chatCompletion = kernel.GetRequiredService<IChatCompletionService>();

ChatMessageContent result = await chatCompletion.GetChatMessageContentAsync(chatHistory, executionSettings, kernel);

Console.WriteLine(result);
Console.WriteLine();

// The expected output is: The weather in Boston as of 2025-04-02 16:39:40 is 61°F and rainy.
}

/// <summary>
/// Creates an instance of <see cref="Kernel"/> with the OpenAI chat completion service registered.
/// </summary>
Expand Down Expand Up @@ -129,5 +210,20 @@ private static void DisplayTools(IList<McpClientTool> tools)
{
Console.WriteLine($"- {tool.Name}: {tool.Description}");
}
Console.WriteLine();
}

/// <summary>
/// Displays the list of available MCP prompts.
/// </summary>
/// <param name="prompts">The list of the prompts to display.</param>
private static void DisplayPrompts(IList<McpClientPrompt> prompts)
{
Console.WriteLine("Available MCP prompts:");
foreach (var prompt in prompts)
{
Console.WriteLine($"- {prompt.Name}: {prompt.Description}");
}
Console.WriteLine();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,21 @@
<NoWarn>$(NoWarn);VSTHRD111;CA2007;SKEXP0001</NoWarn>
</PropertyGroup>

<ItemGroup>
<Content Remove="Prompts\getCurrentWeatherForCity.json" />
</ItemGroup>

<ItemGroup>
<EmbeddedResource Include="Prompts\getCurrentWeatherForCity.json" />
</ItemGroup>

<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Hosting" />
<PackageReference Include="ModelContextProtocol" />
</ItemGroup>

<ItemGroup>
<ProjectReference Include="..\..\..\..\src\Extensions\PromptTemplates.Handlebars\PromptTemplates.Handlebars.csproj" />
<ProjectReference Include="..\..\..\..\src\SemanticKernel.Abstractions\SemanticKernel.Abstractions.csproj" />
<ProjectReference Include="..\..\..\..\src\SemanticKernel.Core\SemanticKernel.Core.csproj" />
</ItemGroup>
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
// Copyright (c) Microsoft. All rights reserved.

using MCPServer;
using MCPServer.Prompts;
using MCPServer.Tools;
using Microsoft.SemanticKernel;

Expand All @@ -12,10 +13,16 @@
// Build the kernel
Kernel kernel = kernelBuilder.Build();

// Register prompts
PromptRegistry.RegisterPrompt(PromptDefinition.Create(EmbeddedResource.ReadAsString("getCurrentWeatherForCity.json"), kernel));

var builder = Host.CreateEmptyApplicationBuilder(settings: null);
builder.Services
.AddMcpServer()
.WithStdioServerTransport()
// Add all functions from the kernel plugins to the MCP server as tools
.WithTools(kernel.Plugins);
.WithTools(kernel.Plugins)
// Register prompt handlers
.WithListPromptsHandler(PromptRegistry.GetHandlerForListPromptRequestsAsync)
.WithGetPromptHandler(PromptRegistry.GetHandlerForGetPromptRequestsAsync);
await builder.Build().RunAsync();
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
// Copyright (c) Microsoft. All rights reserved.

using System.Reflection;

namespace MCPServer.Prompts;

/// <summary>
/// Reads embedded resources.
/// </summary>
public static class EmbeddedResource
{
private static readonly string? s_namespace = typeof(EmbeddedResource).Namespace;

internal static string ReadAsString(string fileName)
{
// Get the current assembly. Note: this class is in the same assembly where the embedded resources are stored.
Assembly assembly =
typeof(EmbeddedResource).GetTypeInfo().Assembly ??
throw new InvalidOperationException($"[{s_namespace}] {fileName} assembly not found");

// Resources are mapped like types, using the namespace and appending "." (dot) and the file name
string resourceName = $"{s_namespace}." + fileName;

Stream stream =
assembly.GetManifestResourceStream(resourceName) ??
throw new InvalidOperationException($"{resourceName} resource not found");

// Return the resource content, in text format.
using StreamReader reader = new(stream);
return reader.ReadToEnd();
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,111 @@
// Copyright (c) Microsoft. All rights reserved.

using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.PromptTemplates.Handlebars;
using ModelContextProtocol.Protocol.Types;
using ModelContextProtocol.Server;

namespace MCPServer.Prompts;

/// <summary>
/// Represents a prompt definition.
/// </summary>
internal sealed class PromptDefinition
{
/// <summary>
/// Gets or sets the prompt.
/// </summary>
public required Prompt Prompt { get; init; }

/// <summary>
/// Gets or sets the handler for the prompt.
/// </summary>
public required Func<RequestContext<GetPromptRequestParams>, CancellationToken, Task<GetPromptResult>> Handler { get; init; }

/// <summary>
/// Gets this prompt definition.
/// </summary>
/// <param name="jsonPrompt">The JSON prompt template.</param>
/// <param name="kernel">An instance of the kernel to render the prompt.</param>
/// <returns>The prompt definition.</returns>
public static PromptDefinition Create(string jsonPrompt, Kernel kernel)
{
PromptTemplateConfig promptTemplateConfig = PromptTemplateConfig.FromJson(jsonPrompt);

return new PromptDefinition()
{
Prompt = GetPrompt(promptTemplateConfig),
Handler = (context, cancellationToken) =>
{
IPromptTemplate promptTemplate = new HandlebarsPromptTemplateFactory().Create(promptTemplateConfig);

return GetPromptHandlerAsync(context, promptTemplateConfig, promptTemplate, kernel, cancellationToken);
}
};
}

/// <summary>
/// Creates an MCP prompt from SK prompt template.
/// </summary>
/// <param name="promptTemplateConfig">The prompt template configuration.</param>
/// <returns>The MCP prompt.</returns>
private static Prompt GetPrompt(PromptTemplateConfig promptTemplateConfig)
{
// Create the MCP prompt arguments
List<PromptArgument>? arguments = null;

foreach (var inputVariable in promptTemplateConfig.InputVariables)
{
(arguments ??= []).Add(new()
{
Name = inputVariable.Name,
Description = inputVariable.Description,
Required = inputVariable.IsRequired
});
}

// Create the MCP prompt
return new Prompt
{
Name = promptTemplateConfig.Name!,
Description = promptTemplateConfig.Description,
Arguments = arguments
};
}

/// <summary>
/// Handles the prompt request by rendering the prompt.
/// </summary>
/// <param name="context">The prompt request context.</param>
/// <param name="promptTemplateConfig">The prompt template configuration.</param>
/// <param name="promptTemplate">The prompt template.</param>
/// <param name="kernel">The kernel to render the prompt.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <returns>The prompt.</returns>
private static async Task<GetPromptResult> GetPromptHandlerAsync(RequestContext<GetPromptRequestParams> context, PromptTemplateConfig promptTemplateConfig, IPromptTemplate promptTemplate, Kernel kernel, CancellationToken cancellationToken)
{
// Render the prompt
string renderedPrompt = await promptTemplate.RenderAsync(
kernel: kernel,
arguments: context.Params?.Arguments is { } args ? new KernelArguments(args!) : null,
cancellationToken: cancellationToken);

// Create prompt result
return new GetPromptResult()
{
Description = promptTemplateConfig.Description,
Messages =
[
new PromptMessage()
{
Content = new Content()
{
Type = "text",
Text = renderedPrompt
},
Role = Role.User
}
]
};
}
}
Loading
Loading