Skip to content

Commit 8af76ac

Browse files
authored
Update Microsoft.Extensions.AI to 9.3.0-preview.1.25114.11 (#7388)
1 parent e3219a9 commit 8af76ac

File tree

6 files changed

+20
-19
lines changed

6 files changed

+20
-19
lines changed

docs/samples/Microsoft.ML.GenAI.Samples/MEAI/Llama3_1.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ public static async Task RunAsync(string weightFolder, string checkPointName = "
4646
""";
4747
var chatMessage = new ChatMessage(ChatRole.User, task);
4848

49-
await foreach (var response in client.CompleteStreamingAsync([chatMessage]))
49+
await foreach (var response in client.GetStreamingResponseAsync([chatMessage]))
5050
{
5151
Console.Write(response.Text);
5252
}

docs/samples/Microsoft.ML.GenAI.Samples/MEAI/Phi3.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ public static async Task RunAsync(string weightFolder)
3636
""";
3737
var chatMessage = new ChatMessage(ChatRole.User, task);
3838

39-
await foreach (var response in client.CompleteStreamingAsync([chatMessage]))
39+
await foreach (var response in client.GetStreamingResponseAsync([chatMessage]))
4040
{
4141
Console.Write(response.Text);
4242
}

eng/Versions.props

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@
4646
<MicrosoftDotNetInteractiveVersion>1.0.0-beta.24375.2</MicrosoftDotNetInteractiveVersion>
4747
<MicrosoftMLOnnxRuntimeVersion>1.18.1</MicrosoftMLOnnxRuntimeVersion>
4848
<MlNetMklDepsVersion>0.0.0.12</MlNetMklDepsVersion>
49-
<MicrosoftExtensionsAIVersion>9.0.1-preview.1.24570.5</MicrosoftExtensionsAIVersion>
49+
<MicrosoftExtensionsAIVersion>9.3.0-preview.1.25114.11</MicrosoftExtensionsAIVersion>
5050
<!-- runtime.native.System.Data.SqlClient.sni is not updated by dependency flow as it is not produced live anymore. -->
5151
<RuntimeNativeSystemDataSqlClientSniVersion>4.4.0</RuntimeNativeSystemDataSqlClientSniVersion>
5252
<!--

src/Microsoft.ML.GenAI.Core/CausalLMPipelineChatClient.cs

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -20,21 +20,20 @@ public abstract class CausalLMPipelineChatClient<TTokenizer, TCausalLMModel> : I
2020
{
2121
private readonly ICausalLMPipeline<TTokenizer, TCausalLMModel> _pipeline;
2222
private readonly IMEAIChatTemplateBuilder _chatTemplateBuilder;
23+
private readonly ChatClientMetadata _metadata;
2324

2425
public CausalLMPipelineChatClient(
2526
ICausalLMPipeline<TTokenizer, TCausalLMModel> pipeline,
2627
IMEAIChatTemplateBuilder chatTemplateBuilder,
2728
ChatClientMetadata? metadata = null)
2829
{
2930
var classNameWithType = $"{nameof(CausalLMPipelineChatClient<TTokenizer, TCausalLMModel>)}<{typeof(TTokenizer).Name}, {typeof(TCausalLMModel).Name}>";
30-
Metadata ??= new ChatClientMetadata(providerName: classNameWithType, modelId: typeof(TCausalLMModel).Name);
31+
_metadata = new ChatClientMetadata(providerName: classNameWithType, modelId: typeof(TCausalLMModel).Name);
3132
_chatTemplateBuilder = chatTemplateBuilder;
3233
_pipeline = pipeline;
3334
}
3435

35-
public ChatClientMetadata Metadata { get; }
36-
37-
public virtual Task<ChatCompletion> CompleteAsync(IList<ChatMessage> chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default)
36+
public virtual Task<ChatResponse> GetResponseAsync(IList<ChatMessage> chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default)
3837
{
3938
var prompt = _chatTemplateBuilder.BuildPrompt(chatMessages, options);
4039
var stopSequences = options?.StopSequences ?? Array.Empty<string>();
@@ -46,15 +45,15 @@ public virtual Task<ChatCompletion> CompleteAsync(IList<ChatMessage> chatMessage
4645
stopSequences: stopSequences.ToArray()) ?? throw new InvalidOperationException("Failed to generate a reply.");
4746

4847
var chatMessage = new ChatMessage(ChatRole.Assistant, output);
49-
return Task.FromResult(new ChatCompletion([chatMessage])
48+
return Task.FromResult(new ChatResponse([chatMessage])
5049
{
5150
CreatedAt = DateTime.UtcNow,
5251
FinishReason = ChatFinishReason.Stop,
5352
});
5453
}
5554

5655
#pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously
57-
public virtual async IAsyncEnumerable<StreamingChatCompletionUpdate> CompleteStreamingAsync(
56+
public virtual async IAsyncEnumerable<ChatResponseUpdate> GetStreamingResponseAsync(
5857
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
5958
IList<ChatMessage> chatMessages,
6059
ChatOptions? options = null,
@@ -69,7 +68,7 @@ public virtual async IAsyncEnumerable<StreamingChatCompletionUpdate> CompleteStr
6968
temperature: options?.Temperature ?? 0.7f,
7069
stopSequences: stopSequences.ToArray()))
7170
{
72-
yield return new StreamingChatCompletionUpdate
71+
yield return new ChatResponseUpdate
7372
{
7473
Role = ChatRole.Assistant,
7574
Text = output,
@@ -83,6 +82,8 @@ public virtual void Dispose()
8382
}
8483

8584
public virtual object? GetService(Type serviceType, object? serviceKey = null) =>
86-
serviceKey is null && serviceType is not null && serviceType.IsAssignableFrom(GetType()) ? this :
85+
serviceKey is not null ? null :
86+
serviceType == typeof(ChatClientMetadata) ? _metadata :
87+
serviceType.IsAssignableFrom(GetType()) ? this :
8788
null;
8889
}

src/Microsoft.ML.GenAI.LLaMA/Llama3CausalLMChatClient.cs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ public Llama3CausalLMChatClient(
2424
{
2525
}
2626

27-
public override Task<ChatCompletion> CompleteAsync(
27+
public override Task<ChatResponse> GetResponseAsync(
2828
IList<ChatMessage> chatMessages,
2929
ChatOptions? options = null,
3030
CancellationToken cancellationToken = default)
@@ -40,10 +40,10 @@ public override Task<ChatCompletion> CompleteAsync(
4040
options.StopSequences = new List<string> { _eotToken };
4141
}
4242

43-
return base.CompleteAsync(chatMessages, options, cancellationToken);
43+
return base.GetResponseAsync(chatMessages, options, cancellationToken);
4444
}
4545

46-
public override IAsyncEnumerable<StreamingChatCompletionUpdate> CompleteStreamingAsync(
46+
public override IAsyncEnumerable<ChatResponseUpdate> GetStreamingResponseAsync(
4747
IList<ChatMessage> chatMessages,
4848
ChatOptions? options = null,
4949
CancellationToken cancellationToken = default)
@@ -52,6 +52,6 @@ public override IAsyncEnumerable<StreamingChatCompletionUpdate> CompleteStreamin
5252
options.StopSequences ??= [];
5353
options.StopSequences.Add(_eotToken);
5454

55-
return base.CompleteStreamingAsync(chatMessages, options, cancellationToken);
55+
return base.GetStreamingResponseAsync(chatMessages, options, cancellationToken);
5656
}
5757
}

src/Microsoft.ML.GenAI.Phi/Phi3/Phi3CausalLMChatClient.cs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ public Phi3CausalLMChatClient(
2929
{
3030
}
3131

32-
public override Task<ChatCompletion> CompleteAsync(
32+
public override Task<ChatResponse> GetResponseAsync(
3333
IList<ChatMessage> chatMessages,
3434
ChatOptions? options = null,
3535
CancellationToken cancellationToken = default)
@@ -45,10 +45,10 @@ public override Task<ChatCompletion> CompleteAsync(
4545
options.StopSequences = [_eotToken];
4646
}
4747

48-
return base.CompleteAsync(chatMessages, options, cancellationToken);
48+
return base.GetResponseAsync(chatMessages, options, cancellationToken);
4949
}
5050

51-
public override IAsyncEnumerable<StreamingChatCompletionUpdate> CompleteStreamingAsync(
51+
public override IAsyncEnumerable<ChatResponseUpdate> GetStreamingResponseAsync(
5252
IList<ChatMessage> chatMessages,
5353
ChatOptions? options = null,
5454
CancellationToken cancellationToken = default)
@@ -57,6 +57,6 @@ public override IAsyncEnumerable<StreamingChatCompletionUpdate> CompleteStreamin
5757
options.StopSequences ??= [];
5858
options.StopSequences.Add(_eotToken);
5959

60-
return base.CompleteStreamingAsync(chatMessages, options, cancellationToken);
60+
return base.GetStreamingResponseAsync(chatMessages, options, cancellationToken);
6161
}
6262
}

0 commit comments

Comments
 (0)