Skip to content

Commit 79a29c0

Browse files
.Net: Add JsonConverter's to handle bool values when converting from PromptExecutionSettings (#11336)
### Motivation and Context Closes #11304 ### Description <!-- Describe your changes, the overall approach, the underlying design. These notes will help understanding how your code works. Thanks! --> ### Contribution Checklist <!-- Before submitting this PR, please make sure: --> - [ ] The code builds clean without any errors or warnings - [ ] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [ ] All unit tests pass, and I have added new tests where possible - [ ] I didn't break anyone 😄
1 parent c6ff272 commit 79a29c0

File tree

16 files changed

+441
-4
lines changed

16 files changed

+441
-4
lines changed

dotnet/SK-dotnet.sln

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -224,8 +224,10 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Functions.OpenApi.Extension
224224
EndProject
225225
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Text", "Text", "{EB2C141A-AE5F-4080-8790-13EB16323CEF}"
226226
ProjectSection(SolutionItems) = preProject
227+
src\InternalUtilities\src\Text\BoolJsonConverter.cs = src\InternalUtilities\src\Text\BoolJsonConverter.cs
227228
src\InternalUtilities\src\Text\ExceptionJsonConverter.cs = src\InternalUtilities\src\Text\ExceptionJsonConverter.cs
228229
src\InternalUtilities\src\Text\JsonOptionsCache.cs = src\InternalUtilities\src\Text\JsonOptionsCache.cs
230+
src\InternalUtilities\src\Text\OptionalBoolJsonConverter.cs = src\InternalUtilities\src\Text\OptionalBoolJsonConverter.cs
229231
src\InternalUtilities\src\Text\SseData.cs = src\InternalUtilities\src\Text\SseData.cs
230232
src\InternalUtilities\src\Text\SseJsonParser.cs = src\InternalUtilities\src\Text\SseJsonParser.cs
231233
src\InternalUtilities\src\Text\SseLine.cs = src\InternalUtilities\src\Text\SseLine.cs

dotnet/src/Connectors/Connectors.Amazon/Bedrock/Settings/AmazonCommandExecutionSettings.cs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -112,6 +112,7 @@ public string? ReturnLikelihoods
112112
/// (Required to support streaming) Specify true to return the response piece-by-piece in real-time and false to return the complete response after the process finishes.
113113
/// </summary>
114114
[JsonPropertyName("stream")]
115+
[JsonConverter(typeof(OptionalBoolJsonConverter))]
115116
public bool? Stream
116117
{
117118
get => this._stream;

dotnet/src/Connectors/Connectors.Amazon/Bedrock/Settings/AmazonCommandRExecutionSettings.cs

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -63,6 +63,7 @@ public List<CohereCommandRTools.Document>? Documents
6363
/// Defaults to false. When true, the response will only contain a list of generated search queries, but no search will take place, and no reply from the model to the user's message will be generated.
6464
/// </summary>
6565
[JsonPropertyName("search_queries_only")]
66+
[JsonConverter(typeof(OptionalBoolJsonConverter))]
6667
public bool? SearchQueriesOnly
6768
{
6869
get => this._searchQueriesOnly;
@@ -203,6 +204,7 @@ public int? Seed
203204
/// Specify true to return the full prompt that was sent to the model. The default value is false. In the response, the prompt in the prompt field.
204205
/// </summary>
205206
[JsonPropertyName("return_prompt")]
207+
[JsonConverter(typeof(OptionalBoolJsonConverter))]
206208
public bool? ReturnPrompt
207209
{
208210
get => this._returnPrompt;
@@ -259,6 +261,7 @@ public List<string>? StopSequences
259261
/// Specify true, to send the user's message to the model without any preprocessing, otherwise false.
260262
/// </summary>
261263
[JsonPropertyName("raw_prompting")]
264+
[JsonConverter(typeof(OptionalBoolJsonConverter))]
262265
public bool? RawPrompting
263266
{
264267
get => this._rawPrompting;

dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs

Lines changed: 115 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22

33
using System;
44
using System.Collections.Generic;
5+
using System.Text.Json;
56
using Azure.AI.OpenAI.Chat;
67
using Microsoft.SemanticKernel;
78
using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
@@ -64,6 +65,119 @@ public void ItRestoresOriginalFunctionChoiceBehavior()
6465
Assert.Equal(functionChoiceBehavior, result.FunctionChoiceBehavior);
6566
}
6667

68+
[Fact]
69+
public void ItCanCreateAzureOpenAIPromptExecutionSettingsFromPromptExecutionSettings()
70+
{
71+
// Arrange
72+
PromptExecutionSettings originalSettings = new()
73+
{
74+
ExtensionData = new Dictionary<string, object>()
75+
{
76+
{ "temperature", 0.7 },
77+
{ "top_p", 0.7 },
78+
{ "frequency_penalty", 0.7 },
79+
{ "presence_penalty", 0.7 },
80+
{ "stop_sequences", new string[] { "foo", "bar" } },
81+
{ "chat_system_prompt", "chat system prompt" },
82+
{ "token_selection_biases", new Dictionary<int, int>() { { 1, 2 }, { 3, 4 } } },
83+
{ "max_tokens", 128 },
84+
{ "logprobs", true },
85+
{ "seed", 123456 },
86+
{ "top_logprobs", 5 },
87+
}
88+
};
89+
90+
// Act
91+
AzureOpenAIPromptExecutionSettings executionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings);
92+
93+
// Assert
94+
AssertExecutionSettings(executionSettings);
95+
}
96+
97+
[Fact]
98+
public void ItCanCreateAzureOpenAIPromptExecutionSettingsFromJson()
99+
{
100+
// Arrange
101+
var json =
102+
"""
103+
{
104+
"temperature": 0.7,
105+
"top_p": 0.7,
106+
"frequency_penalty": 0.7,
107+
"presence_penalty": 0.7,
108+
"stop_sequences": [ "foo", "bar" ],
109+
"chat_system_prompt": "chat system prompt",
110+
"token_selection_biases":
111+
{
112+
"1": "2",
113+
"3": "4"
114+
},
115+
"max_tokens": 128,
116+
"logprobs": true,
117+
"seed": 123456,
118+
"top_logprobs": 5
119+
}
120+
""";
121+
122+
// Act
123+
var originalSettings = JsonSerializer.Deserialize<PromptExecutionSettings>(json);
124+
OpenAIPromptExecutionSettings executionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings);
125+
126+
// Assert
127+
AssertExecutionSettings(executionSettings);
128+
}
129+
130+
[Fact]
131+
public void ItCanCreateAzureOpenAIPromptExecutionSettingsFromPromptExecutionSettingsWithIncorrectTypes()
132+
{
133+
// Arrange
134+
PromptExecutionSettings originalSettings = new()
135+
{
136+
ExtensionData = new Dictionary<string, object>()
137+
{
138+
{ "temperature", "0.7" },
139+
{ "top_p", "0.7" },
140+
{ "frequency_penalty", "0.7" },
141+
{ "presence_penalty", "0.7" },
142+
{ "stop_sequences", new List<object> { "foo", "bar" } },
143+
{ "chat_system_prompt", "chat system prompt" },
144+
{ "token_selection_biases", new Dictionary<string, object>() { { "1", "2" }, { "3", "4" } } },
145+
{ "max_tokens", "128" },
146+
{ "logprobs", "true" },
147+
{ "seed", "123456" },
148+
{ "top_logprobs", "5" },
149+
}
150+
};
151+
152+
// Act
153+
AzureOpenAIPromptExecutionSettings executionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings);
154+
155+
// Assert
156+
AssertExecutionSettings(executionSettings);
157+
}
158+
159+
[Theory]
160+
[InlineData("")]
161+
[InlineData("123")]
162+
[InlineData("Foo")]
163+
[InlineData(1)]
164+
[InlineData(1.0)]
165+
public void ItCannotCreateAzureOpenAIPromptExecutionSettingsWithInvalidBoolValues(object value)
166+
{
167+
// Arrange
168+
PromptExecutionSettings originalSettings = new()
169+
{
170+
ExtensionData = new Dictionary<string, object>()
171+
{
172+
{ "logprobs", value }
173+
}
174+
};
175+
176+
// Act & Assert
177+
Assert.Throws<ArgumentException>(() => AzureOpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings));
178+
}
179+
180+
#region private
67181
private static void AssertExecutionSettings(OpenAIPromptExecutionSettings executionSettings)
68182
{
69183
Assert.NotNull(executionSettings);
@@ -79,4 +193,5 @@ private static void AssertExecutionSettings(OpenAIPromptExecutionSettings execut
79193
Assert.Equal(true, executionSettings.Logprobs);
80194
Assert.Equal(5, executionSettings.TopLogprobs);
81195
}
196+
#endregion
82197
}

dotnet/src/Connectors/Connectors.Google/GeminiPromptExecutionSettings.cs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -189,6 +189,7 @@ public GeminiToolCallBehavior? ToolCallBehavior
189189
/// </summary>
190190
[JsonPropertyName("audio_timestamp")]
191191
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
192+
[JsonConverter(typeof(OptionalBoolJsonConverter))]
192193
public bool? AudioTimestamp
193194
{
194195
get => this._audioTimestamp;

dotnet/src/Connectors/Connectors.HuggingFace/HuggingFacePromptExecutionSettings.cs

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -183,6 +183,7 @@ public bool UseCache
183183
/// This may not be supported by all models/inference API.
184184
/// </remarks>
185185
[JsonPropertyName("wait_for_model")]
186+
[JsonConverter(typeof(BoolJsonConverter))]
186187
public bool WaitForModel
187188
{
188189
get => this._waitForModel;
@@ -233,6 +234,7 @@ public float? PresencePenalty
233234
/// output token returned in the content of message.
234235
/// </summary>
235236
[JsonPropertyName("logprobs")]
237+
[JsonConverter(typeof(OptionalBoolJsonConverter))]
236238
public bool? LogProbs
237239
{
238240
get => this._logProbs;
@@ -294,6 +296,7 @@ public int? TopLogProbs
294296
/// (Default: True). Bool. If set to False, the return results will not contain the original query making it easier for prompting.
295297
/// </summary>
296298
[JsonPropertyName("return_full_text")]
299+
[JsonConverter(typeof(OptionalBoolJsonConverter))]
297300
public bool? ReturnFullText
298301
{
299302
get => this._returnFullText;
@@ -309,6 +312,7 @@ public bool? ReturnFullText
309312
/// (Optional: True). Bool. Whether or not to use sampling, use greedy decoding otherwise.
310313
/// </summary>
311314
[JsonPropertyName("do_sample")]
315+
[JsonConverter(typeof(OptionalBoolJsonConverter))]
312316
public bool? DoSample
313317
{
314318
get => this._doSample;
@@ -323,6 +327,8 @@ public bool? DoSample
323327
/// <summary>
324328
/// Show details of the generation. Including usage.
325329
/// </summary>
330+
[JsonPropertyName("details")]
331+
[JsonConverter(typeof(OptionalBoolJsonConverter))]
326332
public bool? Details
327333
{
328334
get => this._details;

dotnet/src/Connectors/Connectors.MistralAI/MistralAIPromptExecutionSettings.cs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -77,6 +77,7 @@ public int? MaxTokens
7777
/// Whether to inject a safety prompt before all conversations.
7878
/// </summary>
7979
[JsonPropertyName("safe_prompt")]
80+
[JsonConverter(typeof(BoolJsonConverter))]
8081
public bool SafePrompt
8182
{
8283
get => this._safePrompt;

dotnet/src/Connectors/Connectors.Onnx/Connectors.Onnx.csproj

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
<Compile Include="$(RepoRoot)/dotnet/src/InternalUtilities/src/Diagnostics/RequiresDynamicCodeAttribute.cs" Link="%(RecursiveDir)%(Filename)%(Extension)" />
2424
<Compile Include="$(RepoRoot)/dotnet/src/InternalUtilities/src/Diagnostics/UnconditionalSuppressMessageAttribute.cs" Link="%(RecursiveDir)%(Filename)%(Extension)" />
2525
<Compile Include="$(RepoRoot)/dotnet/src/InternalUtilities/src/Text/JsonOptionsCache.cs" Link="%(RecursiveDir)%(Filename)%(Extension)" />
26+
<Compile Include="$(RepoRoot)/dotnet/src/InternalUtilities/src/Text/OptionalBoolJsonConverter.cs" Link="%(RecursiveDir)%(Filename)%(Extension)" />
2627
<Compile Include="$(RepoRoot)/dotnet/src/InternalUtilities/src/Text/ExceptionJsonConverter.cs" Link="%(RecursiveDir)%(Filename)%(Extension)" />
2728
<Compile Include="$(RepoRoot)/dotnet/src/InternalUtilities/src/System/AppContextSwitchHelper.cs" Link="%(RecursiveDir)%(Filename)%(Extension)" />
2829
</ItemGroup>

dotnet/src/Connectors/Connectors.Onnx/OnnxRuntimeGenAIPromptExecutionSettings.cs

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -91,6 +91,7 @@ public static OnnxRuntimeGenAIPromptExecutionSettings FromExecutionSettings(Prom
9191
/// The past/present kv tensors are shared and allocated once to max_length (cuda only)
9292
/// </summary>
9393
[JsonPropertyName("past_present_share_buffer")]
94+
[JsonConverter(typeof(OptionalBoolJsonConverter))]
9495
public bool? PastPresentShareBuffer { get; set; }
9596

9697
/// <summary>
@@ -139,11 +140,13 @@ public static OnnxRuntimeGenAIPromptExecutionSettings FromExecutionSettings(Prom
139140
/// Allows the generation to stop early if all beam candidates reach the end token
140141
/// </summary>
141142
[JsonPropertyName("early_stopping")]
143+
[JsonConverter(typeof(OptionalBoolJsonConverter))]
142144
public bool? EarlyStopping { get; set; }
143145

144146
/// <summary>
145147
/// Do random sampling
146148
/// </summary>
147149
[JsonPropertyName("do_sample")]
150+
[JsonConverter(typeof(OptionalBoolJsonConverter))]
148151
public bool? DoSample { get; set; }
149152
}

0 commit comments

Comments
 (0)