diff --git a/Directory.Packages.props b/Directory.Packages.props
index 70eb82f3..4df4ea73 100644
--- a/Directory.Packages.props
+++ b/Directory.Packages.props
@@ -3,7 +3,7 @@
true
9.0.5
10.0.0-preview.4.25258.110
- 9.6.0
+ 9.7.0
@@ -13,7 +13,7 @@
-
+
@@ -53,7 +53,7 @@
all
-
+
diff --git a/samples/AspNetCoreSseServer/Tools/SampleLlmTool.cs b/samples/AspNetCoreSseServer/Tools/SampleLlmTool.cs
index 4fbca594..247619db 100644
--- a/samples/AspNetCoreSseServer/Tools/SampleLlmTool.cs
+++ b/samples/AspNetCoreSseServer/Tools/SampleLlmTool.cs
@@ -17,19 +17,14 @@ public static async Task SampleLLM(
[Description("Maximum number of tokens to generate")] int maxTokens,
CancellationToken cancellationToken)
{
- ChatMessage[] messages =
- [
- new(ChatRole.System, "You are a helpful test server."),
- new(ChatRole.User, prompt),
- ];
-
ChatOptions options = new()
{
+ Instructions = "You are a helpful test server.",
MaxOutputTokens = maxTokens,
Temperature = 0.7f,
};
- var samplingResponse = await thisServer.AsSamplingChatClient().GetResponseAsync(messages, options, cancellationToken);
+ var samplingResponse = await thisServer.AsSamplingChatClient().GetResponseAsync(prompt, options, cancellationToken);
return $"LLM sampling result: {samplingResponse}";
}
diff --git a/src/ModelContextProtocol.Core/Server/McpServerExtensions.cs b/src/ModelContextProtocol.Core/Server/McpServerExtensions.cs
index d00c41a6..277ed737 100644
--- a/src/ModelContextProtocol.Core/Server/McpServerExtensions.cs
+++ b/src/ModelContextProtocol.Core/Server/McpServerExtensions.cs
@@ -64,6 +64,11 @@ public static async Task SampleAsync(
StringBuilder? systemPrompt = null;
+ if (options?.Instructions is { } instructions)
+ {
+ (systemPrompt ??= new()).Append(instructions);
+ }
+
List samplingMessages = [];
foreach (var message in messages)
{