Skip to content

Commit fabffc2

Browse files
Adding additional unit tests
1 parent 44d2d75 commit fabffc2

File tree

12 files changed

+251
-32
lines changed

12 files changed

+251
-32
lines changed

dotnet/SK-dotnet.sln

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -227,6 +227,7 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Text", "Text", "{EB2C141A-A
227227
src\Connectors\Connectors.OpenAI\Text\BoolJsonConverter.cs = src\Connectors\Connectors.OpenAI\Text\BoolJsonConverter.cs
228228
src\InternalUtilities\src\Text\ExceptionJsonConverter.cs = src\InternalUtilities\src\Text\ExceptionJsonConverter.cs
229229
src\InternalUtilities\src\Text\JsonOptionsCache.cs = src\InternalUtilities\src\Text\JsonOptionsCache.cs
230+
OptionalBoolJsonConverter.cs = OptionalBoolJsonConverter.cs
230231
src\InternalUtilities\src\Text\SseData.cs = src\InternalUtilities\src\Text\SseData.cs
231232
src\InternalUtilities\src\Text\SseJsonParser.cs = src\InternalUtilities\src\Text\SseJsonParser.cs
232233
src\InternalUtilities\src\Text\SseLine.cs = src\InternalUtilities\src\Text\SseLine.cs

dotnet/src/Connectors/Connectors.Amazon/Bedrock/Settings/AmazonCommandExecutionSettings.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -112,7 +112,7 @@ public string? ReturnLikelihoods
112112
/// (Required to support streaming) Specify true to return the response piece-by-piece in real-time and false to return the complete response after the process finishes.
113113
/// </summary>
114114
[JsonPropertyName("stream")]
115-
[JsonConverter(typeof(BoolJsonConverter))]
115+
[JsonConverter(typeof(OptionalBoolJsonConverter))]
116116
public bool? Stream
117117
{
118118
get => this._stream;

dotnet/src/Connectors/Connectors.Amazon/Bedrock/Settings/AmazonCommandRExecutionSettings.cs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ public List<CohereCommandRTools.Document>? Documents
6363
/// Defaults to false. When true, the response will only contain a list of generated search queries, but no search will take place, and no reply from the model to the user's message will be generated.
6464
/// </summary>
6565
[JsonPropertyName("search_queries_only")]
66-
[JsonConverter(typeof(BoolJsonConverter))]
66+
[JsonConverter(typeof(OptionalBoolJsonConverter))]
6767
public bool? SearchQueriesOnly
6868
{
6969
get => this._searchQueriesOnly;
@@ -204,7 +204,7 @@ public int? Seed
204204
/// Specify true to return the full prompt that was sent to the model. The default value is false. In the response, the prompt in the prompt field.
205205
/// </summary>
206206
[JsonPropertyName("return_prompt")]
207-
[JsonConverter(typeof(BoolJsonConverter))]
207+
[JsonConverter(typeof(OptionalBoolJsonConverter))]
208208
public bool? ReturnPrompt
209209
{
210210
get => this._returnPrompt;
@@ -261,7 +261,7 @@ public List<string>? StopSequences
261261
/// Specify true, to send the user's message to the model without any preprocessing, otherwise false.
262262
/// </summary>
263263
[JsonPropertyName("raw_prompting")]
264-
[JsonConverter(typeof(BoolJsonConverter))]
264+
[JsonConverter(typeof(OptionalBoolJsonConverter))]
265265
public bool? RawPrompting
266266
{
267267
get => this._rawPrompting;

dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs

Lines changed: 40 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22

33
using System;
44
using System.Collections.Generic;
5+
using System.Text.Json;
56
using Azure.AI.OpenAI.Chat;
67
using Microsoft.SemanticKernel;
78
using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
@@ -65,7 +66,7 @@ public void ItRestoresOriginalFunctionChoiceBehavior()
6566
}
6667

6768
[Fact]
68-
public void ItCanCreateOpenAIPromptExecutionSettingsFromPromptExecutionSettings()
69+
public void ItCanCreateAzureOpenAIPromptExecutionSettingsFromPromptExecutionSettings()
6970
{
7071
// Arrange
7172
PromptExecutionSettings originalSettings = new()
@@ -87,14 +88,47 @@ public void ItCanCreateOpenAIPromptExecutionSettingsFromPromptExecutionSettings(
8788
};
8889

8990
// Act
90-
OpenAIPromptExecutionSettings executionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings);
91+
AzureOpenAIPromptExecutionSettings executionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings);
92+
93+
// Assert
94+
AssertExecutionSettings(executionSettings);
95+
}
96+
97+
[Fact]
98+
public void ItCanCreateAzureOpenAIPromptExecutionSettingsFromJson()
99+
{
100+
// Arrange
101+
var json =
102+
"""
103+
{
104+
"temperature": 0.7,
105+
"top_p": 0.7,
106+
"frequency_penalty": 0.7,
107+
"presence_penalty": 0.7,
108+
"stop_sequences": [ "foo", "bar" ],
109+
"chat_system_prompt": "chat system prompt",
110+
"token_selection_biases":
111+
{
112+
"1": "2",
113+
"3": "4"
114+
},
115+
"max_tokens": 128,
116+
"logprobs": true,
117+
"seed": 123456,
118+
"top_logprobs": 5
119+
}
120+
""";
121+
122+
// Act
123+
var originalSettings = JsonSerializer.Deserialize<PromptExecutionSettings>(json);
124+
OpenAIPromptExecutionSettings executionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings);
91125

92126
// Assert
93127
AssertExecutionSettings(executionSettings);
94128
}
95129

96130
[Fact]
97-
public void ItCanCreateOpenAIPromptExecutionSettingsFromPromptExecutionSettingsWithIncorrectTypes()
131+
public void ItCanCreateAzureOpenAIPromptExecutionSettingsFromPromptExecutionSettingsWithIncorrectTypes()
98132
{
99133
// Arrange
100134
PromptExecutionSettings originalSettings = new()
@@ -116,7 +150,7 @@ public void ItCanCreateOpenAIPromptExecutionSettingsFromPromptExecutionSettingsW
116150
};
117151

118152
// Act
119-
OpenAIPromptExecutionSettings executionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings);
153+
AzureOpenAIPromptExecutionSettings executionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings);
120154

121155
// Assert
122156
AssertExecutionSettings(executionSettings);
@@ -128,7 +162,7 @@ public void ItCanCreateOpenAIPromptExecutionSettingsFromPromptExecutionSettingsW
128162
[InlineData("Foo")]
129163
[InlineData(1)]
130164
[InlineData(1.0)]
131-
public void ItCannotCreateOpenAIPromptExecutionSettingsWithInvalidBoolValues(object value)
165+
public void ItCannotCreateAzureOpenAIPromptExecutionSettingsWithInvalidBoolValues(object value)
132166
{
133167
// Arrange
134168
PromptExecutionSettings originalSettings = new()
@@ -140,7 +174,7 @@ public void ItCannotCreateOpenAIPromptExecutionSettingsWithInvalidBoolValues(obj
140174
};
141175

142176
// Act & Assert
143-
Assert.Throws<ArgumentException>(() => OpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings));
177+
Assert.Throws<ArgumentException>(() => AzureOpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings));
144178
}
145179

146180
#region private

dotnet/src/Connectors/Connectors.Google/GeminiPromptExecutionSettings.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -189,7 +189,7 @@ public GeminiToolCallBehavior? ToolCallBehavior
189189
/// </summary>
190190
[JsonPropertyName("audio_timestamp")]
191191
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
192-
[JsonConverter(typeof(BoolJsonConverter))]
192+
[JsonConverter(typeof(OptionalBoolJsonConverter))]
193193
public bool? AudioTimestamp
194194
{
195195
get => this._audioTimestamp;

dotnet/src/Connectors/Connectors.HuggingFace/HuggingFacePromptExecutionSettings.cs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -234,7 +234,7 @@ public float? PresencePenalty
234234
/// output token returned in the content of message.
235235
/// </summary>
236236
[JsonPropertyName("logprobs")]
237-
[JsonConverter(typeof(BoolJsonConverter))]
237+
[JsonConverter(typeof(OptionalBoolJsonConverter))]
238238
public bool? LogProbs
239239
{
240240
get => this._logProbs;
@@ -296,7 +296,7 @@ public int? TopLogProbs
296296
/// (Default: True). Bool. If set to False, the return results will not contain the original query making it easier for prompting.
297297
/// </summary>
298298
[JsonPropertyName("return_full_text")]
299-
[JsonConverter(typeof(BoolJsonConverter))]
299+
[JsonConverter(typeof(OptionalBoolJsonConverter))]
300300
public bool? ReturnFullText
301301
{
302302
get => this._returnFullText;
@@ -312,7 +312,7 @@ public bool? ReturnFullText
312312
/// (Optional: True). Bool. Whether or not to use sampling, use greedy decoding otherwise.
313313
/// </summary>
314314
[JsonPropertyName("do_sample")]
315-
[JsonConverter(typeof(BoolJsonConverter))]
315+
[JsonConverter(typeof(OptionalBoolJsonConverter))]
316316
public bool? DoSample
317317
{
318318
get => this._doSample;
@@ -328,7 +328,7 @@ public bool? DoSample
328328
/// Show details of the generation. Including usage.
329329
/// </summary>
330330
[JsonPropertyName("details")]
331-
[JsonConverter(typeof(BoolJsonConverter))]
331+
[JsonConverter(typeof(OptionalBoolJsonConverter))]
332332
public bool? Details
333333
{
334334
get => this._details;

dotnet/src/Connectors/Connectors.Onnx/Connectors.Onnx.csproj

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323
<Compile Include="$(RepoRoot)/dotnet/src/InternalUtilities/src/Diagnostics/RequiresDynamicCodeAttribute.cs" Link="%(RecursiveDir)%(Filename)%(Extension)" />
2424
<Compile Include="$(RepoRoot)/dotnet/src/InternalUtilities/src/Diagnostics/UnconditionalSuppressMessageAttribute.cs" Link="%(RecursiveDir)%(Filename)%(Extension)" />
2525
<Compile Include="$(RepoRoot)/dotnet/src/InternalUtilities/src/Text/JsonOptionsCache.cs" Link="%(RecursiveDir)%(Filename)%(Extension)" />
26-
<Compile Include="$(RepoRoot)/dotnet/src/InternalUtilities/src/Text/BoolJsonConverter.cs" Link="%(RecursiveDir)%(Filename)%(Extension)" />
26+
<Compile Include="$(RepoRoot)/dotnet/src/InternalUtilities/src/Text/OptionalBoolJsonConverter.cs" Link="%(RecursiveDir)%(Filename)%(Extension)" />
2727
<Compile Include="$(RepoRoot)/dotnet/src/InternalUtilities/src/Text/ExceptionJsonConverter.cs" Link="%(RecursiveDir)%(Filename)%(Extension)" />
2828
<Compile Include="$(RepoRoot)/dotnet/src/InternalUtilities/src/System/AppContextSwitchHelper.cs" Link="%(RecursiveDir)%(Filename)%(Extension)" />
2929
</ItemGroup>

dotnet/src/Connectors/Connectors.Onnx/OnnxRuntimeGenAIPromptExecutionSettings.cs

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,7 @@ public static OnnxRuntimeGenAIPromptExecutionSettings FromExecutionSettings(Prom
9191
/// The past/present kv tensors are shared and allocated once to max_length (cuda only)
9292
/// </summary>
9393
[JsonPropertyName("past_present_share_buffer")]
94-
[JsonConverter(typeof(BoolJsonConverter))]
94+
[JsonConverter(typeof(OptionalBoolJsonConverter))]
9595
public bool? PastPresentShareBuffer { get; set; }
9696

9797
/// <summary>
@@ -140,12 +140,13 @@ public static OnnxRuntimeGenAIPromptExecutionSettings FromExecutionSettings(Prom
140140
/// Allows the generation to stop early if all beam candidates reach the end token
141141
/// </summary>
142142
[JsonPropertyName("early_stopping")]
143+
[JsonConverter(typeof(OptionalBoolJsonConverter))]
143144
public bool? EarlyStopping { get; set; }
144145

145146
/// <summary>
146147
/// Do random sampling
147148
/// </summary>
148149
[JsonPropertyName("do_sample")]
149-
[JsonConverter(typeof(BoolJsonConverter))]
150+
[JsonConverter(typeof(OptionalBoolJsonConverter))]
150151
public bool? DoSample { get; set; }
151152
}

dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs

Lines changed: 127 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -317,6 +317,133 @@ public void ItRestoresOriginalFunctionChoiceBehavior()
317317
Assert.Equal(functionChoiceBehavior, result.FunctionChoiceBehavior);
318318
}
319319

320+
[Fact]
321+
public void ItCanCreateOpenAIPromptExecutionSettingsFromPromptExecutionSettings()
322+
{
323+
// Arrange
324+
PromptExecutionSettings originalSettings = new()
325+
{
326+
ExtensionData = new Dictionary<string, object>()
327+
{
328+
{ "temperature", 0.7 },
329+
{ "top_p", 0.7 },
330+
{ "frequency_penalty", 0.7 },
331+
{ "presence_penalty", 0.7 },
332+
{ "stop_sequences", new string[] { "foo", "bar" } },
333+
{ "chat_system_prompt", "chat system prompt" },
334+
{ "chat_developer_prompt", "chat developer prompt" },
335+
{ "reasoning_effort", "high" },
336+
{ "token_selection_biases", new Dictionary<int, int>() { { 1, 2 }, { 3, 4 } } },
337+
{ "max_tokens", 128 },
338+
{ "logprobs", true },
339+
{ "seed", 123456 },
340+
{ "store", true },
341+
{ "top_logprobs", 5 },
342+
{ "metadata", new Dictionary<string, string>() { { "foo", "bar" } } }
343+
}
344+
};
345+
346+
// Act
347+
OpenAIPromptExecutionSettings executionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings);
348+
349+
// Assert
350+
AssertExecutionSettings(executionSettings);
351+
}
352+
353+
[Fact]
354+
public void ItCanCreateOpenAIPromptExecutionSettingsFromJson()
355+
{
356+
// Arrange
357+
var json =
358+
"""
359+
{
360+
"temperature": 0.7,
361+
"top_p": 0.7,
362+
"frequency_penalty": 0.7,
363+
"presence_penalty": 0.7,
364+
"stop_sequences": [ "foo", "bar" ],
365+
"chat_system_prompt": "chat system prompt",
366+
"chat_developer_prompt": "chat developer prompt",
367+
"reasoning_effort": "high",
368+
"token_selection_biases":
369+
{
370+
"1": "2",
371+
"3": "4"
372+
},
373+
"max_tokens": 128,
374+
"logprobs": true,
375+
"seed": 123456,
376+
"store": true,
377+
"top_logprobs": 5,
378+
"metadata":
379+
{
380+
"foo": "bar"
381+
}
382+
}
383+
""";
384+
385+
// Act
386+
var originalSettings = JsonSerializer.Deserialize<PromptExecutionSettings>(json);
387+
OpenAIPromptExecutionSettings executionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings);
388+
389+
// Assert
390+
AssertExecutionSettings(executionSettings);
391+
}
392+
393+
[Fact]
394+
public void ItCanCreateOpenAIPromptExecutionSettingsFromPromptExecutionSettingsWithIncorrectTypes()
395+
{
396+
// Arrange
397+
PromptExecutionSettings originalSettings = new()
398+
{
399+
ExtensionData = new Dictionary<string, object>()
400+
{
401+
{ "temperature", "0.7" },
402+
{ "top_p", "0.7" },
403+
{ "frequency_penalty", "0.7" },
404+
{ "presence_penalty", "0.7" },
405+
{ "stop_sequences", new List<object> { "foo", "bar" } },
406+
{ "chat_system_prompt", "chat system prompt" },
407+
{ "chat_developer_prompt", "chat developer prompt" },
408+
{ "reasoning_effort", "high" },
409+
{ "token_selection_biases", new Dictionary<string, object>() { { "1", "2" }, { "3", "4" } } },
410+
{ "max_tokens", "128" },
411+
{ "logprobs", "true" },
412+
{ "seed", "123456" },
413+
{ "store", true },
414+
{ "top_logprobs", "5" },
415+
{ "metadata", new Dictionary<string, string>() { { "foo", "bar" } } }
416+
}
417+
};
418+
419+
// Act
420+
OpenAIPromptExecutionSettings executionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings);
421+
422+
// Assert
423+
AssertExecutionSettings(executionSettings);
424+
}
425+
426+
[Theory]
427+
[InlineData("")]
428+
[InlineData("123")]
429+
[InlineData("Foo")]
430+
[InlineData(1)]
431+
[InlineData(1.0)]
432+
public void ItCannotCreateOpenAIPromptExecutionSettingsWithInvalidBoolValues(object value)
433+
{
434+
// Arrange
435+
PromptExecutionSettings originalSettings = new()
436+
{
437+
ExtensionData = new Dictionary<string, object>()
438+
{
439+
{ "logprobs", value }
440+
}
441+
};
442+
443+
// Act & Assert
444+
Assert.Throws<ArgumentException>(() => OpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings));
445+
}
446+
320447
private static void AssertExecutionSettings(OpenAIPromptExecutionSettings executionSettings)
321448
{
322449
Assert.NotNull(executionSettings);

dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -304,7 +304,7 @@ public string? User
304304
[Experimental("SKEXP0010")]
305305
[JsonPropertyName("logprobs")]
306306
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
307-
[JsonConverter(typeof(BoolJsonConverter))]
307+
[JsonConverter(typeof(OptionalBoolJsonConverter))]
308308
public bool? Logprobs
309309
{
310310
get => this._logprobs;
@@ -356,7 +356,7 @@ public IDictionary<string, string>? Metadata
356356
[Experimental("SKEXP0010")]
357357
[JsonPropertyName("store")]
358358
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
359-
[JsonConverter(typeof(BoolJsonConverter))]
359+
[JsonConverter(typeof(OptionalBoolJsonConverter))]
360360
public bool? Store
361361
{
362362
get => this._store;

0 commit comments

Comments
 (0)