Skip to content
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/ModelContextProtocol.Core/Server/McpServer.Methods.cs
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ public async Task<ChatResponse> SampleAsync(
var result = await SampleAsync(new()
{
Messages = samplingMessages,
MaxTokens = options?.MaxOutputTokens ?? int.MaxValue,
MaxTokens = options?.MaxOutputTokens ?? ServerOptions.DefaultSamplingMaxTokens,
StopSequences = options?.StopSequences?.ToArray(),
SystemPrompt = systemPrompt?.ToString(),
Temperature = options?.Temperature,
Expand Down
17 changes: 17 additions & 0 deletions src/ModelContextProtocol.Core/Server/McpServerOptions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -152,4 +152,21 @@ public McpServerHandlers Handlers
/// </para>
/// </remarks>
public McpServerPrimitiveCollection<McpServerPrompt>? PromptCollection { get; set; }

/// <summary>
/// Gets or sets the default maximum number of tokens to use for sampling requests when not explicitly specified.
/// </summary>
/// <remarks>
/// <para>
/// This value is used as the <c>maxTokens</c> parameter in sampling requests to the client when the
/// <see cref="Microsoft.Extensions.AI.ChatOptions.MaxOutputTokens"/> property is not set in the request options.
/// The MCP protocol requires a <c>maxTokens</c> value for all sampling requests.
/// </para>
/// <para>
/// The default value is 1000 tokens, which provides a reasonable balance between allowing meaningful
/// responses and preventing excessive token usage. This value should be set based on your application's
/// requirements and the capabilities of the LLM being used by the client.
/// </para>
/// </remarks>
public int DefaultSamplingMaxTokens { get; set; } = 1000;
}
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,10 @@ public async Task SampleAsync_Messages_Forwards_To_McpServer_SendRequestAsync()
.Setup(s => s.ClientCapabilities)
.Returns(new ClientCapabilities() { Sampling = new() });

mockServer
.Setup(s => s.ServerOptions)
.Returns(new McpServerOptions());

mockServer
.Setup(s => s.SendRequestAsync(It.IsAny<JsonRpcRequest>(), It.IsAny<CancellationToken>()))
.ReturnsAsync(new JsonRpcResponse
Expand Down