From 6833654600796d87bdc223e8428d1e79735741af Mon Sep 17 00:00:00 2001 From: Genevieve Warren <24882762+gewarren@users.noreply.github.com> Date: Wed, 19 Feb 2025 14:15:04 -0800 Subject: [PATCH 1/6] rename chat client APIs --- docs/ai/ai-extensions.md | 4 ++-- .../snippets/content-filtering/Program.cs | 6 +++--- .../snippets/hosted-app-auth/Program.cs | 2 +- .../build-chat-app/azure-openai/Program.cs | 2 +- .../snippets/build-chat-app/openai/Program.cs | 2 +- .../function-calling/azure-openai/Program.cs | 2 +- .../function-calling/openai/Program.cs | 2 +- .../quickstarts/snippets/local-ai/Program.cs | 6 +++--- .../prompt-completion/azure-openai/Program.cs | 2 +- .../prompt-completion/openai/Program.cs | 2 +- .../extensions/artificial-intelligence.md | 20 +++++++++---------- .../ai/AI.Shared/RateLimitingChatClient.cs | 8 ++++---- .../snippets/ai/AI.Shared/SampleChatClient.cs | 4 ++-- .../ai/ConsoleAI.CacheResponses/Program.cs | 2 +- .../Program.cs | 2 +- .../ConsoleAI.CustomClientMiddle/Program.cs | 2 +- .../ConsoleAI.DependencyInjection/Program.cs | 2 +- .../Program.cs | 2 +- .../ConsoleAI.GetResponseAsyncArgs.csproj} | 0 .../Program.cs | 2 +- ...onsoleAI.GetStreamingResponseAsync.csproj} | 0 .../Program.cs | 2 +- .../ai/ConsoleAI.ProvideOptions/Program.cs | 4 ++-- .../ai/ConsoleAI.ToolCalling/Program.cs | 2 +- .../ai/ConsoleAI.UseTelemetry/Program.cs | 2 +- .../snippets/ai/ConsoleAI/Program.cs | 2 +- 26 files changed, 43 insertions(+), 43 deletions(-) rename docs/core/extensions/snippets/ai/{ConsoleAI.CompleteAsyncArgs/ConsoleAI.CompleteAsyncArgs.csproj => ConsoleAI.GetResponseAsyncArgs/ConsoleAI.GetResponseAsyncArgs.csproj} (100%) rename docs/core/extensions/snippets/ai/{ConsoleAI.CompleteAsyncArgs => ConsoleAI.GetResponseAsyncArgs}/Program.cs (83%) rename docs/core/extensions/snippets/ai/{ConsoleAI.CompleteStreamingAsync/ConsoleAI.CompleteStreamingAsync.csproj => ConsoleAI.GetStreamingResponseAsync/ConsoleAI.GetStreamingResponseAsync.csproj} (100%) rename docs/core/extensions/snippets/ai/{ConsoleAI.CompleteStreamingAsync => ConsoleAI.GetStreamingResponseAsync}/Program.cs (67%) diff --git a/docs/ai/ai-extensions.md b/docs/ai/ai-extensions.md index 61f7622aa8c95..45a0e8da778a1 100644 --- a/docs/ai/ai-extensions.md +++ b/docs/ai/ai-extensions.md @@ -45,10 +45,10 @@ IChatClient client =     new AzureAIInferenceChatClient(...); ``` -Then, regardless of the provider you're using, you can send requests by calling , as follows: +Then, regardless of the provider you're using, you can send requests by calling , as follows: ```csharp -var response = await chatClient.CompleteAsync( +var response = await chatClient.GetResponseAsync(       "Translate the following text into Pig Latin: I love .NET and AI"); Console.WriteLine(response.Message); diff --git a/docs/ai/how-to/snippets/content-filtering/Program.cs b/docs/ai/how-to/snippets/content-filtering/Program.cs index f6537d51e3969..ab7ccd23d7b62 100644 --- a/docs/ai/how-to/snippets/content-filtering/Program.cs +++ b/docs/ai/how-to/snippets/content-filtering/Program.cs @@ -9,11 +9,11 @@ try { - ChatCompletion completion = await client.CompleteAsync("YOUR_PROMPT"); + ChatCompletion completion = await client.GetResponseAsync("YOUR_PROMPT"); Console.WriteLine(completion.Message); -} -catch (Exception e) +} +catch (Exception e) { Console.WriteLine(e.Message); } diff --git a/docs/ai/how-to/snippets/hosted-app-auth/Program.cs b/docs/ai/how-to/snippets/hosted-app-auth/Program.cs index deb2850199b8e..3d14a3f6aa051 100644 --- a/docs/ai/how-to/snippets/hosted-app-auth/Program.cs +++ b/docs/ai/how-to/snippets/hosted-app-auth/Program.cs @@ -40,7 +40,7 @@ app.MapGet("/test-prompt", async (IChatClient chatClient) => { - return await chatClient.CompleteAsync("Test prompt", new ChatOptions()); + return await chatClient.GetResponseAsync("Test prompt", new ChatOptions()); }) .WithName("Test prompt"); diff --git a/docs/ai/quickstarts/snippets/build-chat-app/azure-openai/Program.cs b/docs/ai/quickstarts/snippets/build-chat-app/azure-openai/Program.cs index c6031d83718f5..880ebfced7741 100644 --- a/docs/ai/quickstarts/snippets/build-chat-app/azure-openai/Program.cs +++ b/docs/ai/quickstarts/snippets/build-chat-app/azure-openai/Program.cs @@ -41,7 +41,7 @@ the local nature on the hikes when making a recommendation. At the end of your Console.WriteLine("AI Response:"); var response = ""; await foreach (var item in - chatClient.CompleteStreamingAsync(chatHistory)) + chatClient.GetStreamingResponseAsync(chatHistory)) { Console.Write(item.Text); response += item.Text; diff --git a/docs/ai/quickstarts/snippets/build-chat-app/openai/Program.cs b/docs/ai/quickstarts/snippets/build-chat-app/openai/Program.cs index 12e694e1ab8c8..522af1ec7be10 100644 --- a/docs/ai/quickstarts/snippets/build-chat-app/openai/Program.cs +++ b/docs/ai/quickstarts/snippets/build-chat-app/openai/Program.cs @@ -40,7 +40,7 @@ the local nature on the hikes when making a recommendation. At the end of your Console.WriteLine("AI Response:"); var response = ""; await foreach (var item in - chatClient.CompleteStreamingAsync(chatHistory)) + chatClient.GetStreamingResponseAsync(chatHistory)) { Console.Write(item.Text); response += item.Text; diff --git a/docs/ai/quickstarts/snippets/function-calling/azure-openai/Program.cs b/docs/ai/quickstarts/snippets/function-calling/azure-openai/Program.cs index 5d9dee4294635..26800ef70e4c8 100644 --- a/docs/ai/quickstarts/snippets/function-calling/azure-openai/Program.cs +++ b/docs/ai/quickstarts/snippets/function-calling/azure-openai/Program.cs @@ -37,6 +37,6 @@ You are a hiking enthusiast who helps people discover fun hikes in their area. Y "I live in Montreal and I'm looking for a moderate intensity hike. What's the current weather like? ")); Console.WriteLine($"{chatHistory.Last().Role} >>> {chatHistory.Last()}"); -var response = await client.CompleteAsync(chatHistory, chatOptions); +var response = await client.GetResponseAsync(chatHistory, chatOptions); chatHistory.Add(new ChatMessage(ChatRole.Assistant, response.Message.Contents)); Console.WriteLine($"{chatHistory.Last().Role} >>> {chatHistory.Last()}"); diff --git a/docs/ai/quickstarts/snippets/function-calling/openai/Program.cs b/docs/ai/quickstarts/snippets/function-calling/openai/Program.cs index c57da3c4c3275..b276180ffb3ad 100644 --- a/docs/ai/quickstarts/snippets/function-calling/openai/Program.cs +++ b/docs/ai/quickstarts/snippets/function-calling/openai/Program.cs @@ -35,6 +35,6 @@ You are a hiking enthusiast who helps people discover fun hikes in their area. Y "I live in Montreal and I'm looking for a moderate intensity hike. What's the current weather like? ")); Console.WriteLine($"{chatHistory.Last().Role} >>> {chatHistory.Last()}"); -var response = await client.CompleteAsync(chatHistory, chatOptions); +var response = await client.GetResponseAsync(chatHistory, chatOptions); chatHistory.Add(new ChatMessage(ChatRole.Assistant, response.Message.Contents)); Console.WriteLine($"{chatHistory.Last().Role} >>> {chatHistory.Last()}"); diff --git a/docs/ai/quickstarts/snippets/local-ai/Program.cs b/docs/ai/quickstarts/snippets/local-ai/Program.cs index ec8b1468712cc..5e0a731fca0e3 100644 --- a/docs/ai/quickstarts/snippets/local-ai/Program.cs +++ b/docs/ai/quickstarts/snippets/local-ai/Program.cs @@ -1,6 +1,6 @@ using Microsoft.Extensions.AI; -IChatClient chatClient = +IChatClient chatClient = new OllamaChatClient(new Uri("http://localhost:11434/"), "phi3:mini"); // Start the conversation with context for the AI model @@ -17,11 +17,11 @@ Console.WriteLine("AI Response:"); var response = ""; await foreach (var item in - chatClient.CompleteStreamingAsync(chatHistory)) + chatClient.GetStreamingResponseAsync(chatHistory)) { Console.Write(item.Text); response += item.Text; } chatHistory.Add(new ChatMessage(ChatRole.Assistant, response)); Console.WriteLine(); -} \ No newline at end of file +} diff --git a/docs/ai/quickstarts/snippets/prompt-completion/azure-openai/Program.cs b/docs/ai/quickstarts/snippets/prompt-completion/azure-openai/Program.cs index 43e83dd1bf561..9d3050cfc333d 100644 --- a/docs/ai/quickstarts/snippets/prompt-completion/azure-openai/Program.cs +++ b/docs/ai/quickstarts/snippets/prompt-completion/azure-openai/Program.cs @@ -20,5 +20,5 @@ Console.WriteLine($"user >>> {prompt}"); // Submit the prompt and print out the response -ChatCompletion response = await client.CompleteAsync(prompt, new ChatOptions { MaxOutputTokens = 400 }); +ChatCompletion response = await client.GetResponseAsync(prompt, new ChatOptions { MaxOutputTokens = 400 }); Console.WriteLine($"assistant >>> {response}"); diff --git a/docs/ai/quickstarts/snippets/prompt-completion/openai/Program.cs b/docs/ai/quickstarts/snippets/prompt-completion/openai/Program.cs index e4f234697acb3..88b789059e485 100644 --- a/docs/ai/quickstarts/snippets/prompt-completion/openai/Program.cs +++ b/docs/ai/quickstarts/snippets/prompt-completion/openai/Program.cs @@ -19,5 +19,5 @@ Console.WriteLine($"user >>> {prompt}"); // Submit the prompt and print out the response -ChatCompletion response = await client.CompleteAsync(prompt, new ChatOptions { MaxOutputTokens = 400 }); +ChatCompletion response = await client.GetResponseAsync(prompt, new ChatOptions { MaxOutputTokens = 400 }); Console.WriteLine($"assistant >>> {response}"); diff --git a/docs/core/extensions/artificial-intelligence.md b/docs/core/extensions/artificial-intelligence.md index 87189676658d4..b2df9aac9e38a 100644 --- a/docs/core/extensions/artificial-intelligence.md +++ b/docs/core/extensions/artificial-intelligence.md @@ -67,13 +67,13 @@ The following subsections show specific `IChatClient` usage examples: ### Request chat completion -To request a completion, call the method. The request is composed of one or more messages, each of which is composed of one or more pieces of content. Accelerator methods exist to simplify common cases, such as constructing a request for a single piece of text content. +To request a completion, call the method. The request is composed of one or more messages, each of which is composed of one or more pieces of content. Accelerator methods exist to simplify common cases, such as constructing a request for a single piece of text content. :::code language="csharp" source="snippets/ai/ConsoleAI/Program.cs"::: -The core `IChatClient.CompleteAsync` method accepts a list of messages. This list represents the history of all messages that are part of the conversation. +The core `IChatClient.GetResponseAsync` method accepts a list of messages. This list represents the history of all messages that are part of the conversation. -:::code language="csharp" source="snippets/ai/ConsoleAI.CompleteAsyncArgs/Program.cs"::: +:::code language="csharp" source="snippets/ai/ConsoleAI.GetResponseAsyncArgs/Program.cs"::: Each message in the history is represented by a object. The `ChatMessage` class provides a property that indicates the role of the message. By default, the is used. The following roles are available: @@ -94,9 +94,9 @@ Each chat message is instantiated, assigning to its are identical to those of `CompleteAsync`. However, rather than returning the complete response as part of a object, the method returns an where `T` is , providing a stream of updates that collectively form the single response. +The inputs to are identical to those of `GetResponseAsync`. However, rather than returning the complete response as part of a object, the method returns an where `T` is , providing a stream of updates that collectively form the single response. -:::code language="csharp" source="snippets/ai/ConsoleAI.CompleteStreamingAsync/Program.cs"::: +:::code language="csharp" source="snippets/ai/ConsoleAI.GetStreamingResponseAsync/Program.cs"::: > [!TIP] > Streaming APIs are nearly synonymous with AI user experiences. C# enables compelling scenarios with its `IAsyncEnumerable` support, allowing for a natural and efficient way to stream data. @@ -120,7 +120,7 @@ The preceding code: - Defines a function named `GetCurrentWeather` that returns a random weather forecast. - This function is decorated with a , which is used to provide a description of the function to the AI service. - Instantiates a with an and configures it to use function invocation. -- Calls `CompleteStreamingAsync` on the client, passing a prompt and a list of tools that includes a function created with . +- Calls `GetStreamingResponseAsync` on the client, passing a prompt and a list of tools that includes a function created with . - Iterates over the response, printing each update to the console. ### Cache responses @@ -141,7 +141,7 @@ The preceding example depends on the [📦 OpenTelemetry.Exporter.Console](https ### Provide options -Every call to or can optionally supply a instance containing additional parameters for the operation. The most common parameters among AI models and services show up as strongly typed properties on the type, such as . Other parameters can be supplied by name in a weakly typed manner via the dictionary. +Every call to or can optionally supply a instance containing additional parameters for the operation. The most common parameters among AI models and services show up as strongly typed properties on the type, such as . Other parameters can be supplied by name in a weakly typed manner via the dictionary. You can also specify options when building an `IChatClient` with the fluent API and chaining a call to the `ConfigureOptions` extension method. This delegating client wraps another client and invokes the supplied delegate to populate a `ChatOptions` instance for every call. For example, to ensure that the property defaults to a particular model name, you can use code like the following: @@ -165,7 +165,7 @@ The preceding example depends on the following NuGet packages: To add additional functionality, you can implement `IChatClient` directly or use the class. This class serves as a base for creating chat clients that delegate operations to another `IChatClient` instance. It simplifies chaining multiple clients, allowing calls to pass through to an underlying client. -The `DelegatingChatClient` class provides default implementations for methods like `CompleteAsync`, `CompleteStreamingAsync`, and `Dispose`, which forward calls to the inner client. You can derive from this class and override only the methods you need to enhance behavior, while delegating other calls to the base implementation. This approach helps create flexible and modular chat clients that are easy to extend and compose. +The `DelegatingChatClient` class provides default implementations for methods like `GetResponseAsync`, `GetStreamingResponseAsync`, and `Dispose`, which forward calls to the inner client. You can derive from this class and override only the methods you need to enhance behavior, while delegating other calls to the base implementation. This approach helps create flexible and modular chat clients that are easy to extend and compose. The following is an example class derived from `DelegatingChatClient` to provide rate limiting functionality, utilizing the : @@ -194,7 +194,7 @@ This example demonstrates [hosted scenario](generic-host.md), where the consumer - - -For example, in the earlier `RateLimitingChatClient` example, the overrides of `CompleteAsync` and `CompleteStreamingAsync` only need to do work before and after delegating to the next client in the pipeline. To achieve the same thing without writing a custom class, you can use an overload of `Use` that accepts a delegate that's used for both `CompleteAsync` and `CompleteStreamingAsync`, reducing the boilerplate required: +For example, in the earlier `RateLimitingChatClient` example, the overrides of `GetResponseAsync` and `GetStreamingResponseAsync` only need to do work before and after delegating to the next client in the pipeline. To achieve the same thing without writing a custom class, you can use an overload of `Use` that accepts a delegate that's used for both `GetResponseAsync` and `GetStreamingResponseAsync`, reducing the boilerplate required: :::code language="csharp" source="snippets/ai/ConsoleAI.UseExample/Program.cs"::: @@ -202,7 +202,7 @@ The preceding overload internally uses an `AnonymousDelegatingChatClient`, which :::code language="csharp" source="snippets/ai/ConsoleAI.UseExampleAlt/Program.cs"::: -For scenarios where the developer would like to specify delegating implementations of `CompleteAsync` and `CompleteStreamingAsync` inline, and where it's important to be able to write a different implementation for each in order to handle their unique return types specially, another overload of `Use` exists that accepts a delegate for each. +For scenarios where the developer would like to specify delegating implementations of `GetResponseAsync` and `GetStreamingResponseAsync` inline, and where it's important to be able to write a different implementation for each in order to handle their unique return types specially, another overload of `Use` exists that accepts a delegate for each. ### Dependency injection diff --git a/docs/core/extensions/snippets/ai/AI.Shared/RateLimitingChatClient.cs b/docs/core/extensions/snippets/ai/AI.Shared/RateLimitingChatClient.cs index e5d3ada7f1f60..bd33cb289cf90 100644 --- a/docs/core/extensions/snippets/ai/AI.Shared/RateLimitingChatClient.cs +++ b/docs/core/extensions/snippets/ai/AI.Shared/RateLimitingChatClient.cs @@ -6,7 +6,7 @@ public sealed class RateLimitingChatClient( IChatClient innerClient, RateLimiter rateLimiter) : DelegatingChatClient(innerClient) { - public override async Task CompleteAsync( + public override async Task GetResponseAsync( IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) @@ -19,11 +19,11 @@ public override async Task CompleteAsync( throw new InvalidOperationException("Unable to acquire lease."); } - return await base.CompleteAsync(chatMessages, options, cancellationToken) + return await base.GetResponseAsync(chatMessages, options, cancellationToken) .ConfigureAwait(false); } - public override async IAsyncEnumerable CompleteStreamingAsync( + public override async IAsyncEnumerable GetStreamingResponseAsync( IList chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) @@ -36,7 +36,7 @@ public override async IAsyncEnumerable CompleteSt throw new InvalidOperationException("Unable to acquire lease."); } - await foreach (var update in base.CompleteStreamingAsync(chatMessages, options, cancellationToken) + await foreach (var update in base.GetStreamingResponseAsync(chatMessages, options, cancellationToken) .ConfigureAwait(false)) { yield return update; diff --git a/docs/core/extensions/snippets/ai/AI.Shared/SampleChatClient.cs b/docs/core/extensions/snippets/ai/AI.Shared/SampleChatClient.cs index 99e0fb033df9a..413a4735e7d0d 100644 --- a/docs/core/extensions/snippets/ai/AI.Shared/SampleChatClient.cs +++ b/docs/core/extensions/snippets/ai/AI.Shared/SampleChatClient.cs @@ -5,7 +5,7 @@ public sealed class SampleChatClient(Uri endpoint, string modelId) : IChatClient { public ChatClientMetadata Metadata { get; } = new(nameof(SampleChatClient), endpoint, modelId); - public async Task CompleteAsync( + public async Task GetResponseAsync( IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) @@ -28,7 +28,7 @@ public async Task CompleteAsync( }]); } - public async IAsyncEnumerable CompleteStreamingAsync( + public async IAsyncEnumerable GetStreamingResponseAsync( IList chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.CacheResponses/Program.cs b/docs/core/extensions/snippets/ai/ConsoleAI.CacheResponses/Program.cs index 51096d1df9a95..eac78def1b9e0 100644 --- a/docs/core/extensions/snippets/ai/ConsoleAI.CacheResponses/Program.cs +++ b/docs/core/extensions/snippets/ai/ConsoleAI.CacheResponses/Program.cs @@ -15,7 +15,7 @@ foreach (var prompt in prompts) { - await foreach (var update in client.CompleteStreamingAsync(prompt)) + await foreach (var update in client.GetStreamingResponseAsync(prompt)) { Console.Write(update); } diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.ConsumeClientMiddleware/Program.cs b/docs/core/extensions/snippets/ai/ConsoleAI.ConsumeClientMiddleware/Program.cs index f95efffe26568..082ee7821cdc2 100644 --- a/docs/core/extensions/snippets/ai/ConsoleAI.ConsumeClientMiddleware/Program.cs +++ b/docs/core/extensions/snippets/ai/ConsoleAI.ConsumeClientMiddleware/Program.cs @@ -20,7 +20,7 @@ // Elsewhere in the app var chatClient = app.Services.GetRequiredService(); -Console.WriteLine(await chatClient.CompleteAsync("What is AI?")); +Console.WriteLine(await chatClient.GetResponseAsync("What is AI?")); app.Run(); // diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.CustomClientMiddle/Program.cs b/docs/core/extensions/snippets/ai/ConsoleAI.CustomClientMiddle/Program.cs index dd69572c6c7a2..31b73e10c8f53 100644 --- a/docs/core/extensions/snippets/ai/ConsoleAI.CustomClientMiddle/Program.cs +++ b/docs/core/extensions/snippets/ai/ConsoleAI.CustomClientMiddle/Program.cs @@ -9,4 +9,4 @@ QueueLimit = int.MaxValue })); -await client.CompleteAsync("What color is the sky?"); +await client.GetResponseAsync("What color is the sky?"); diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.DependencyInjection/Program.cs b/docs/core/extensions/snippets/ai/ConsoleAI.DependencyInjection/Program.cs index 930b0b036c74e..aa1ad1cfc4358 100644 --- a/docs/core/extensions/snippets/ai/ConsoleAI.DependencyInjection/Program.cs +++ b/docs/core/extensions/snippets/ai/ConsoleAI.DependencyInjection/Program.cs @@ -15,6 +15,6 @@ // Elsewhere in the app var chatClient = app.Services.GetRequiredService(); -Console.WriteLine(await chatClient.CompleteAsync("What is AI?")); +Console.WriteLine(await chatClient.GetResponseAsync("What is AI?")); app.Run(); diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.FunctionalityPipelines/Program.cs b/docs/core/extensions/snippets/ai/ConsoleAI.FunctionalityPipelines/Program.cs index 16f563b3689a3..392c16c4d6e60 100644 --- a/docs/core/extensions/snippets/ai/ConsoleAI.FunctionalityPipelines/Program.cs +++ b/docs/core/extensions/snippets/ai/ConsoleAI.FunctionalityPipelines/Program.cs @@ -42,5 +42,5 @@ new ChatMessage(ChatRole.User, "Do I need an umbrella?") ]; - Console.WriteLine(await client.CompleteAsync(history, options)); + Console.WriteLine(await client.GetResponseAsync(history, options)); } diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.CompleteAsyncArgs/ConsoleAI.CompleteAsyncArgs.csproj b/docs/core/extensions/snippets/ai/ConsoleAI.GetResponseAsyncArgs/ConsoleAI.GetResponseAsyncArgs.csproj similarity index 100% rename from docs/core/extensions/snippets/ai/ConsoleAI.CompleteAsyncArgs/ConsoleAI.CompleteAsyncArgs.csproj rename to docs/core/extensions/snippets/ai/ConsoleAI.GetResponseAsyncArgs/ConsoleAI.GetResponseAsyncArgs.csproj diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.CompleteAsyncArgs/Program.cs b/docs/core/extensions/snippets/ai/ConsoleAI.GetResponseAsyncArgs/Program.cs similarity index 83% rename from docs/core/extensions/snippets/ai/ConsoleAI.CompleteAsyncArgs/Program.cs rename to docs/core/extensions/snippets/ai/ConsoleAI.GetResponseAsyncArgs/Program.cs index eda37fef75fbf..b33fe5f1a3d80 100644 --- a/docs/core/extensions/snippets/ai/ConsoleAI.CompleteAsyncArgs/Program.cs +++ b/docs/core/extensions/snippets/ai/ConsoleAI.GetResponseAsyncArgs/Program.cs @@ -3,7 +3,7 @@ IChatClient client = new SampleChatClient( new Uri("http://coolsite.ai"), "target-ai-model"); -Console.WriteLine(await client.CompleteAsync( +Console.WriteLine(await client.GetResponseAsync( [ new(ChatRole.System, "You are a helpful AI assistant"), new(ChatRole.User, "What is AI?"), diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.CompleteStreamingAsync/ConsoleAI.CompleteStreamingAsync.csproj b/docs/core/extensions/snippets/ai/ConsoleAI.GetStreamingResponseAsync/ConsoleAI.GetStreamingResponseAsync.csproj similarity index 100% rename from docs/core/extensions/snippets/ai/ConsoleAI.CompleteStreamingAsync/ConsoleAI.CompleteStreamingAsync.csproj rename to docs/core/extensions/snippets/ai/ConsoleAI.GetStreamingResponseAsync/ConsoleAI.GetStreamingResponseAsync.csproj diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.CompleteStreamingAsync/Program.cs b/docs/core/extensions/snippets/ai/ConsoleAI.GetStreamingResponseAsync/Program.cs similarity index 67% rename from docs/core/extensions/snippets/ai/ConsoleAI.CompleteStreamingAsync/Program.cs rename to docs/core/extensions/snippets/ai/ConsoleAI.GetStreamingResponseAsync/Program.cs index a5e32ce3438a0..56a5fb5678a4d 100644 --- a/docs/core/extensions/snippets/ai/ConsoleAI.CompleteStreamingAsync/Program.cs +++ b/docs/core/extensions/snippets/ai/ConsoleAI.GetStreamingResponseAsync/Program.cs @@ -3,7 +3,7 @@ IChatClient client = new SampleChatClient( new Uri("http://coolsite.ai"), "target-ai-model"); -await foreach (var update in client.CompleteStreamingAsync("What is AI?")) +await foreach (var update in client.GetStreamingResponseAsync("What is AI?")) { Console.Write(update); } diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.ProvideOptions/Program.cs b/docs/core/extensions/snippets/ai/ConsoleAI.ProvideOptions/Program.cs index c6ce0bfb7010e..77098afb4b8ba 100644 --- a/docs/core/extensions/snippets/ai/ConsoleAI.ProvideOptions/Program.cs +++ b/docs/core/extensions/snippets/ai/ConsoleAI.ProvideOptions/Program.cs @@ -6,8 +6,8 @@ .Build(); // will request "phi3" -Console.WriteLine(await client.CompleteAsync("What is AI?")); +Console.WriteLine(await client.GetResponseAsync("What is AI?")); // will request "llama3.1" -Console.WriteLine(await client.CompleteAsync( +Console.WriteLine(await client.GetResponseAsync( "What is AI?", new() { ModelId = "llama3.1" })); diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.ToolCalling/Program.cs b/docs/core/extensions/snippets/ai/ConsoleAI.ToolCalling/Program.cs index ff8ef0c2ba7c8..a3d9c6247da96 100644 --- a/docs/core/extensions/snippets/ai/ConsoleAI.ToolCalling/Program.cs +++ b/docs/core/extensions/snippets/ai/ConsoleAI.ToolCalling/Program.cs @@ -11,7 +11,7 @@ string GetCurrentWeather() => Random.Shared.NextDouble() > 0.5 .UseFunctionInvocation() .Build(); -var response = client.CompleteStreamingAsync( +var response = client.GetStreamingResponseAsync( "Should I wear a rain coat?", new() { Tools = [AIFunctionFactory.Create(GetCurrentWeather)] }); diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.UseTelemetry/Program.cs b/docs/core/extensions/snippets/ai/ConsoleAI.UseTelemetry/Program.cs index d4c5e2c28e723..beee01a4777a1 100644 --- a/docs/core/extensions/snippets/ai/ConsoleAI.UseTelemetry/Program.cs +++ b/docs/core/extensions/snippets/ai/ConsoleAI.UseTelemetry/Program.cs @@ -17,4 +17,4 @@ configure: static c => c.EnableSensitiveData = true) .Build(); -Console.WriteLine((await client.CompleteAsync("What is AI?")).Message); +Console.WriteLine((await client.GetResponseAsync("What is AI?")).Message); diff --git a/docs/core/extensions/snippets/ai/ConsoleAI/Program.cs b/docs/core/extensions/snippets/ai/ConsoleAI/Program.cs index 258a6d41ac681..42a1ef7b164e5 100644 --- a/docs/core/extensions/snippets/ai/ConsoleAI/Program.cs +++ b/docs/core/extensions/snippets/ai/ConsoleAI/Program.cs @@ -3,6 +3,6 @@ IChatClient client = new SampleChatClient( new Uri("http://coolsite.ai"), "target-ai-model"); -var response = await client.CompleteAsync("What is AI?"); +var response = await client.GetResponseAsync("What is AI?"); Console.WriteLine(response.Message); From 695d7aee08f3b8481c553c6be49aed5361ec5853 Mon Sep 17 00:00:00 2001 From: Genevieve Warren <24882762+gewarren@users.noreply.github.com> Date: Fri, 21 Feb 2025 10:30:00 -0800 Subject: [PATCH 2/6] update NuGet packages --- docs/ai/ai-extensions.md | 2 +- docs/core/extensions/snippets/ai/AI.Shared/AI.Shared.csproj | 2 +- .../snippets/ai/AI.Shared/RateLimitingChatClient.cs | 4 ++-- .../extensions/snippets/ai/AI.Shared/SampleChatClient.cs | 6 +++--- .../ConsoleAI.CustomClientMiddle.csproj | 2 +- .../ConsoleAI.FunctionalityPipelines.csproj | 2 +- .../ConsoleAI.ProvideOptions.csproj | 2 +- .../ai/ConsoleAI.ToolCalling/ConsoleAI.ToolCalling.csproj | 2 +- docs/core/extensions/snippets/ai/ConsoleAI/ConsoleAI.csproj | 2 +- 9 files changed, 12 insertions(+), 12 deletions(-) diff --git a/docs/ai/ai-extensions.md b/docs/ai/ai-extensions.md index 45a0e8da778a1..d3cc47c5373fc 100644 --- a/docs/ai/ai-extensions.md +++ b/docs/ai/ai-extensions.md @@ -45,7 +45,7 @@ IChatClient client =     new AzureAIInferenceChatClient(...); ``` -Then, regardless of the provider you're using, you can send requests by calling , as follows: +Then, regardless of the provider you're using, you can send requests by calling , as follows: ```csharp var response = await chatClient.GetResponseAsync( diff --git a/docs/core/extensions/snippets/ai/AI.Shared/AI.Shared.csproj b/docs/core/extensions/snippets/ai/AI.Shared/AI.Shared.csproj index 0322057468166..3b7a06b2c109f 100644 --- a/docs/core/extensions/snippets/ai/AI.Shared/AI.Shared.csproj +++ b/docs/core/extensions/snippets/ai/AI.Shared/AI.Shared.csproj @@ -7,7 +7,7 @@ - + diff --git a/docs/core/extensions/snippets/ai/AI.Shared/RateLimitingChatClient.cs b/docs/core/extensions/snippets/ai/AI.Shared/RateLimitingChatClient.cs index bd33cb289cf90..2755e8d4cd84f 100644 --- a/docs/core/extensions/snippets/ai/AI.Shared/RateLimitingChatClient.cs +++ b/docs/core/extensions/snippets/ai/AI.Shared/RateLimitingChatClient.cs @@ -6,7 +6,7 @@ public sealed class RateLimitingChatClient( IChatClient innerClient, RateLimiter rateLimiter) : DelegatingChatClient(innerClient) { - public override async Task GetResponseAsync( + public override async Task GetResponseAsync( IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) @@ -23,7 +23,7 @@ public override async Task GetResponseAsync( .ConfigureAwait(false); } - public override async IAsyncEnumerable GetStreamingResponseAsync( + public override async IAsyncEnumerable GetStreamingResponseAsync( IList chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) diff --git a/docs/core/extensions/snippets/ai/AI.Shared/SampleChatClient.cs b/docs/core/extensions/snippets/ai/AI.Shared/SampleChatClient.cs index 413a4735e7d0d..6eb12af104363 100644 --- a/docs/core/extensions/snippets/ai/AI.Shared/SampleChatClient.cs +++ b/docs/core/extensions/snippets/ai/AI.Shared/SampleChatClient.cs @@ -5,7 +5,7 @@ public sealed class SampleChatClient(Uri endpoint, string modelId) : IChatClient { public ChatClientMetadata Metadata { get; } = new(nameof(SampleChatClient), endpoint, modelId); - public async Task GetResponseAsync( + public async Task GetResponseAsync( IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) @@ -28,7 +28,7 @@ public async Task GetResponseAsync( }]); } - public async IAsyncEnumerable GetStreamingResponseAsync( + public async IAsyncEnumerable GetStreamingResponseAsync( IList chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) @@ -41,7 +41,7 @@ public async IAsyncEnumerable GetStreamingRespons await Task.Delay(100, cancellationToken); // Yield the next message in the response. - yield return new StreamingChatCompletionUpdate + yield return new ChatResponseUpdate { Role = ChatRole.Assistant, Text = word, diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.CustomClientMiddle/ConsoleAI.CustomClientMiddle.csproj b/docs/core/extensions/snippets/ai/ConsoleAI.CustomClientMiddle/ConsoleAI.CustomClientMiddle.csproj index f4296fe4b5cda..2f67400fed6fe 100644 --- a/docs/core/extensions/snippets/ai/ConsoleAI.CustomClientMiddle/ConsoleAI.CustomClientMiddle.csproj +++ b/docs/core/extensions/snippets/ai/ConsoleAI.CustomClientMiddle/ConsoleAI.CustomClientMiddle.csproj @@ -8,7 +8,7 @@ - + diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.FunctionalityPipelines/ConsoleAI.FunctionalityPipelines.csproj b/docs/core/extensions/snippets/ai/ConsoleAI.FunctionalityPipelines/ConsoleAI.FunctionalityPipelines.csproj index a48697c3b26e9..a2d29c0295a8d 100644 --- a/docs/core/extensions/snippets/ai/ConsoleAI.FunctionalityPipelines/ConsoleAI.FunctionalityPipelines.csproj +++ b/docs/core/extensions/snippets/ai/ConsoleAI.FunctionalityPipelines/ConsoleAI.FunctionalityPipelines.csproj @@ -9,7 +9,7 @@ - + diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.ProvideOptions/ConsoleAI.ProvideOptions.csproj b/docs/core/extensions/snippets/ai/ConsoleAI.ProvideOptions/ConsoleAI.ProvideOptions.csproj index 52b8ab4531c7f..dd7878962bbb6 100644 --- a/docs/core/extensions/snippets/ai/ConsoleAI.ProvideOptions/ConsoleAI.ProvideOptions.csproj +++ b/docs/core/extensions/snippets/ai/ConsoleAI.ProvideOptions/ConsoleAI.ProvideOptions.csproj @@ -8,7 +8,7 @@ - + diff --git a/docs/core/extensions/snippets/ai/ConsoleAI.ToolCalling/ConsoleAI.ToolCalling.csproj b/docs/core/extensions/snippets/ai/ConsoleAI.ToolCalling/ConsoleAI.ToolCalling.csproj index 52b8ab4531c7f..dd7878962bbb6 100644 --- a/docs/core/extensions/snippets/ai/ConsoleAI.ToolCalling/ConsoleAI.ToolCalling.csproj +++ b/docs/core/extensions/snippets/ai/ConsoleAI.ToolCalling/ConsoleAI.ToolCalling.csproj @@ -8,7 +8,7 @@ - + diff --git a/docs/core/extensions/snippets/ai/ConsoleAI/ConsoleAI.csproj b/docs/core/extensions/snippets/ai/ConsoleAI/ConsoleAI.csproj index bcec98d0ad009..05947b9ec5925 100644 --- a/docs/core/extensions/snippets/ai/ConsoleAI/ConsoleAI.csproj +++ b/docs/core/extensions/snippets/ai/ConsoleAI/ConsoleAI.csproj @@ -8,7 +8,7 @@ - + From b1e65f7788d30ac3b3145e879adc02f9da55c324 Mon Sep 17 00:00:00 2001 From: Genevieve Warren <24882762+gewarren@users.noreply.github.com> Date: Fri, 21 Feb 2025 10:58:56 -0800 Subject: [PATCH 3/6] fix snippets 5000 warnings --- .../snippets/content-filtering/AIContentFiltering.csproj | 4 ++-- docs/ai/how-to/snippets/content-filtering/Program.cs | 2 +- .../how-to/snippets/hosted-app-auth/hosted-app-auth.csproj | 6 +++--- .../snippets/build-chat-app/azure-openai/ChatAppAI.csproj | 2 +- .../snippets/function-calling/azure-openai/Program.cs | 2 +- docs/ai/quickstarts/snippets/local-ai/ollama.csproj | 2 +- .../azure-openai/ExtensionsAzureOpenAI.csproj | 2 +- .../snippets/prompt-completion/azure-openai/Program.cs | 2 +- 8 files changed, 11 insertions(+), 11 deletions(-) diff --git a/docs/ai/how-to/snippets/content-filtering/AIContentFiltering.csproj b/docs/ai/how-to/snippets/content-filtering/AIContentFiltering.csproj index bd79a4df023fa..7491520c71161 100644 --- a/docs/ai/how-to/snippets/content-filtering/AIContentFiltering.csproj +++ b/docs/ai/how-to/snippets/content-filtering/AIContentFiltering.csproj @@ -10,8 +10,8 @@ - - + + diff --git a/docs/ai/how-to/snippets/content-filtering/Program.cs b/docs/ai/how-to/snippets/content-filtering/Program.cs index ab7ccd23d7b62..12542d600e70b 100644 --- a/docs/ai/how-to/snippets/content-filtering/Program.cs +++ b/docs/ai/how-to/snippets/content-filtering/Program.cs @@ -9,7 +9,7 @@ try { - ChatCompletion completion = await client.GetResponseAsync("YOUR_PROMPT"); + ChatResponse completion = await client.GetResponseAsync("YOUR_PROMPT"); Console.WriteLine(completion.Message); } diff --git a/docs/ai/how-to/snippets/hosted-app-auth/hosted-app-auth.csproj b/docs/ai/how-to/snippets/hosted-app-auth/hosted-app-auth.csproj index 47419885dd408..ce616f605ade8 100644 --- a/docs/ai/how-to/snippets/hosted-app-auth/hosted-app-auth.csproj +++ b/docs/ai/how-to/snippets/hosted-app-auth/hosted-app-auth.csproj @@ -11,9 +11,9 @@ - - - + + + diff --git a/docs/ai/quickstarts/snippets/build-chat-app/azure-openai/ChatAppAI.csproj b/docs/ai/quickstarts/snippets/build-chat-app/azure-openai/ChatAppAI.csproj index a955030574851..5638d8c175ccd 100644 --- a/docs/ai/quickstarts/snippets/build-chat-app/azure-openai/ChatAppAI.csproj +++ b/docs/ai/quickstarts/snippets/build-chat-app/azure-openai/ChatAppAI.csproj @@ -11,7 +11,7 @@ - + diff --git a/docs/ai/quickstarts/snippets/function-calling/azure-openai/Program.cs b/docs/ai/quickstarts/snippets/function-calling/azure-openai/Program.cs index 26800ef70e4c8..5d9dee4294635 100644 --- a/docs/ai/quickstarts/snippets/function-calling/azure-openai/Program.cs +++ b/docs/ai/quickstarts/snippets/function-calling/azure-openai/Program.cs @@ -37,6 +37,6 @@ You are a hiking enthusiast who helps people discover fun hikes in their area. Y "I live in Montreal and I'm looking for a moderate intensity hike. What's the current weather like? ")); Console.WriteLine($"{chatHistory.Last().Role} >>> {chatHistory.Last()}"); -var response = await client.GetResponseAsync(chatHistory, chatOptions); +var response = await client.CompleteAsync(chatHistory, chatOptions); chatHistory.Add(new ChatMessage(ChatRole.Assistant, response.Message.Contents)); Console.WriteLine($"{chatHistory.Last().Role} >>> {chatHistory.Last()}"); diff --git a/docs/ai/quickstarts/snippets/local-ai/ollama.csproj b/docs/ai/quickstarts/snippets/local-ai/ollama.csproj index 347a4259f003d..27429efffe7d8 100644 --- a/docs/ai/quickstarts/snippets/local-ai/ollama.csproj +++ b/docs/ai/quickstarts/snippets/local-ai/ollama.csproj @@ -8,7 +8,7 @@ - + diff --git a/docs/ai/quickstarts/snippets/prompt-completion/azure-openai/ExtensionsAzureOpenAI.csproj b/docs/ai/quickstarts/snippets/prompt-completion/azure-openai/ExtensionsAzureOpenAI.csproj index 4b2e15935d47a..c17559bfd06dd 100644 --- a/docs/ai/quickstarts/snippets/prompt-completion/azure-openai/ExtensionsAzureOpenAI.csproj +++ b/docs/ai/quickstarts/snippets/prompt-completion/azure-openai/ExtensionsAzureOpenAI.csproj @@ -10,7 +10,7 @@ - + diff --git a/docs/ai/quickstarts/snippets/prompt-completion/azure-openai/Program.cs b/docs/ai/quickstarts/snippets/prompt-completion/azure-openai/Program.cs index 9d3050cfc333d..907980bc69520 100644 --- a/docs/ai/quickstarts/snippets/prompt-completion/azure-openai/Program.cs +++ b/docs/ai/quickstarts/snippets/prompt-completion/azure-openai/Program.cs @@ -20,5 +20,5 @@ Console.WriteLine($"user >>> {prompt}"); // Submit the prompt and print out the response -ChatCompletion response = await client.GetResponseAsync(prompt, new ChatOptions { MaxOutputTokens = 400 }); +ChatResponse response = await client.GetResponseAsync(prompt, new ChatOptions { MaxOutputTokens = 400 }); Console.WriteLine($"assistant >>> {response}"); From 6edd2fb881ca10d3a05fd3cafdc91f71104aa394 Mon Sep 17 00:00:00 2001 From: Genevieve Warren <24882762+gewarren@users.noreply.github.com> Date: Fri, 21 Feb 2025 11:05:46 -0800 Subject: [PATCH 4/6] fix xrefs --- docs/ai/ai-extensions.md | 2 +- docs/core/extensions/artificial-intelligence.md | 8 +++----- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/docs/ai/ai-extensions.md b/docs/ai/ai-extensions.md index d3cc47c5373fc..45a0e8da778a1 100644 --- a/docs/ai/ai-extensions.md +++ b/docs/ai/ai-extensions.md @@ -45,7 +45,7 @@ IChatClient client =     new AzureAIInferenceChatClient(...); ``` -Then, regardless of the provider you're using, you can send requests by calling , as follows: +Then, regardless of the provider you're using, you can send requests by calling , as follows: ```csharp var response = await chatClient.GetResponseAsync( diff --git a/docs/core/extensions/artificial-intelligence.md b/docs/core/extensions/artificial-intelligence.md index b2df9aac9e38a..0638a0aaeb69d 100644 --- a/docs/core/extensions/artificial-intelligence.md +++ b/docs/core/extensions/artificial-intelligence.md @@ -84,17 +84,15 @@ Each message in the history is represented by a property a new . There are various [types of content](xref:Microsoft.Extensions.AI.AIContent) that can be represented, such as a simple string or a more complex object that represents a multi-modal message with text, images, and audio: -- - - - -- - - ### Request chat completion with streaming -The inputs to are identical to those of `GetResponseAsync`. However, rather than returning the complete response as part of a object, the method returns an where `T` is , providing a stream of updates that collectively form the single response. +The inputs to are identical to those of `GetResponseAsync`. However, rather than returning the complete response as part of a object, the method returns an where `T` is , providing a stream of updates that collectively form the single response. :::code language="csharp" source="snippets/ai/ConsoleAI.GetStreamingResponseAsync/Program.cs"::: @@ -189,10 +187,10 @@ The consumer can then easily use this in their pipeline, for example: This example demonstrates [hosted scenario](generic-host.md), where the consumer relies on [dependency injection](dependency-injection.md) to provide the `RateLimiter` instance. The preceding extension methods demonstrate using a `Use` method on . The `ChatClientBuilder` also provides overloads that make it easier to write such delegating handlers. -- +- - - -- +- For example, in the earlier `RateLimitingChatClient` example, the overrides of `GetResponseAsync` and `GetStreamingResponseAsync` only need to do work before and after delegating to the next client in the pipeline. To achieve the same thing without writing a custom class, you can use an overload of `Use` that accepts a delegate that's used for both `GetResponseAsync` and `GetStreamingResponseAsync`, reducing the boilerplate required: From 4fe61bb456c754ee7ab7c85e8f13a7af77857f15 Mon Sep 17 00:00:00 2001 From: Genevieve Warren <24882762+gewarren@users.noreply.github.com> Date: Fri, 21 Feb 2025 11:22:50 -0800 Subject: [PATCH 5/6] fix more errors --- .../openai/ExtensionsOpenAI.csproj | 2 +- .../openai/FunctionCallingAI.csproj | 4 ++-- .../function-calling/openai/Program.cs | 23 +++++++++---------- .../extensions/artificial-intelligence.md | 4 ++-- 4 files changed, 16 insertions(+), 17 deletions(-) diff --git a/docs/ai/quickstarts/snippets/build-chat-app/openai/ExtensionsOpenAI.csproj b/docs/ai/quickstarts/snippets/build-chat-app/openai/ExtensionsOpenAI.csproj index d8716dcf01257..ce77307cc2764 100644 --- a/docs/ai/quickstarts/snippets/build-chat-app/openai/ExtensionsOpenAI.csproj +++ b/docs/ai/quickstarts/snippets/build-chat-app/openai/ExtensionsOpenAI.csproj @@ -8,7 +8,7 @@ - + diff --git a/docs/ai/quickstarts/snippets/function-calling/openai/FunctionCallingAI.csproj b/docs/ai/quickstarts/snippets/function-calling/openai/FunctionCallingAI.csproj index 25478e6270ca6..001dab80d6808 100644 --- a/docs/ai/quickstarts/snippets/function-calling/openai/FunctionCallingAI.csproj +++ b/docs/ai/quickstarts/snippets/function-calling/openai/FunctionCallingAI.csproj @@ -8,8 +8,8 @@ - - + + diff --git a/docs/ai/quickstarts/snippets/function-calling/openai/Program.cs b/docs/ai/quickstarts/snippets/function-calling/openai/Program.cs index b276180ffb3ad..dfee28be36956 100644 --- a/docs/ai/quickstarts/snippets/function-calling/openai/Program.cs +++ b/docs/ai/quickstarts/snippets/function-calling/openai/Program.cs @@ -2,18 +2,17 @@ using Microsoft.Extensions.Configuration; using OpenAI; -var config = new ConfigurationBuilder().AddUserSecrets().Build(); -string model = config["ModelName"]; -string key = config["OpenAIKey"]; +IConfigurationRoot config = new ConfigurationBuilder().AddUserSecrets().Build(); +string? model = config["ModelName"]; +string? key = config["OpenAIKey"]; IChatClient client = - new ChatClientBuilder() - .UseFunctionInvocation() - .Use( - new OpenAIClient(key) - .AsChatClient(model)); + new ChatClientBuilder(new OpenAIClient(key).AsChatClient(model ?? "gpt-4o")) + .UseFunctionInvocation() + .Build(); -// Add a new plugin with a local .NET function that should be available to the AI model +// Add a new plugin with a local .NET function +// that should be available to the AI model. var chatOptions = new ChatOptions { Tools = [AIFunctionFactory.Create((string location, string unit) => @@ -25,16 +24,16 @@ "Get the current weather in a given location")] }; -// System prompt to provide context +// System prompt to provide context. List chatHistory = [new(ChatRole.System, """ You are a hiking enthusiast who helps people discover fun hikes in their area. You are upbeat and friendly. """)]; -// Weather conversation relevant to the registered function +// Weather conversation relevant to the registered function. chatHistory.Add(new ChatMessage(ChatRole.User, "I live in Montreal and I'm looking for a moderate intensity hike. What's the current weather like? ")); Console.WriteLine($"{chatHistory.Last().Role} >>> {chatHistory.Last()}"); -var response = await client.GetResponseAsync(chatHistory, chatOptions); +ChatResponse response = await client.GetResponseAsync(chatHistory, chatOptions); chatHistory.Add(new ChatMessage(ChatRole.Assistant, response.Message.Contents)); Console.WriteLine($"{chatHistory.Last().Role} >>> {chatHistory.Last()}"); diff --git a/docs/core/extensions/artificial-intelligence.md b/docs/core/extensions/artificial-intelligence.md index 0638a0aaeb69d..b550e7ef7422e 100644 --- a/docs/core/extensions/artificial-intelligence.md +++ b/docs/core/extensions/artificial-intelligence.md @@ -92,7 +92,7 @@ Each chat message is instantiated, assigning to its are identical to those of `GetResponseAsync`. However, rather than returning the complete response as part of a object, the method returns an where `T` is , providing a stream of updates that collectively form the single response. +The inputs to are identical to those of `GetResponseAsync`. However, rather than returning the complete response as part of a object, the method returns an where `T` is , providing a stream of updates that collectively form the single response. :::code language="csharp" source="snippets/ai/ConsoleAI.GetStreamingResponseAsync/Program.cs"::: @@ -190,7 +190,7 @@ This example demonstrates [hosted scenario](generic-host.md), where the consumer - - - -- +- For example, in the earlier `RateLimitingChatClient` example, the overrides of `GetResponseAsync` and `GetStreamingResponseAsync` only need to do work before and after delegating to the next client in the pipeline. To achieve the same thing without writing a custom class, you can use an overload of `Use` that accepts a delegate that's used for both `GetResponseAsync` and `GetStreamingResponseAsync`, reducing the boilerplate required: From f9f5b6567053142cd6c44234df61a43296fa944b Mon Sep 17 00:00:00 2001 From: Genevieve Warren <24882762+gewarren@users.noreply.github.com> Date: Fri, 21 Feb 2025 11:32:22 -0800 Subject: [PATCH 6/6] fix more errors --- .../prompt-completion/openai/ExtensionsOpenAI.csproj | 2 +- .../snippets/prompt-completion/openai/Program.cs | 8 ++++---- docs/core/extensions/artificial-intelligence.md | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/ai/quickstarts/snippets/prompt-completion/openai/ExtensionsOpenAI.csproj b/docs/ai/quickstarts/snippets/prompt-completion/openai/ExtensionsOpenAI.csproj index d8716dcf01257..ce77307cc2764 100644 --- a/docs/ai/quickstarts/snippets/prompt-completion/openai/ExtensionsOpenAI.csproj +++ b/docs/ai/quickstarts/snippets/prompt-completion/openai/ExtensionsOpenAI.csproj @@ -8,7 +8,7 @@ - + diff --git a/docs/ai/quickstarts/snippets/prompt-completion/openai/Program.cs b/docs/ai/quickstarts/snippets/prompt-completion/openai/Program.cs index 88b789059e485..ebc4a3a660c83 100644 --- a/docs/ai/quickstarts/snippets/prompt-completion/openai/Program.cs +++ b/docs/ai/quickstarts/snippets/prompt-completion/openai/Program.cs @@ -2,9 +2,9 @@ using Microsoft.Extensions.Configuration; using OpenAI; -var config = new ConfigurationBuilder().AddUserSecrets().Build(); -string model = config["ModelName"]; -string key = config["OpenAIKey"]; +IConfigurationRoot config = new ConfigurationBuilder().AddUserSecrets().Build(); +string? model = config["ModelName"]; +string? key = config["OpenAIKey"]; // Create the IChatClient IChatClient client = @@ -19,5 +19,5 @@ Console.WriteLine($"user >>> {prompt}"); // Submit the prompt and print out the response -ChatCompletion response = await client.GetResponseAsync(prompt, new ChatOptions { MaxOutputTokens = 400 }); +ChatResponse response = await client.GetResponseAsync(prompt, new ChatOptions { MaxOutputTokens = 400 }); Console.WriteLine($"assistant >>> {response}"); diff --git a/docs/core/extensions/artificial-intelligence.md b/docs/core/extensions/artificial-intelligence.md index b550e7ef7422e..8ad669af2a732 100644 --- a/docs/core/extensions/artificial-intelligence.md +++ b/docs/core/extensions/artificial-intelligence.md @@ -92,7 +92,7 @@ Each chat message is instantiated, assigning to its are identical to those of `GetResponseAsync`. However, rather than returning the complete response as part of a object, the method returns an where `T` is , providing a stream of updates that collectively form the single response. +The inputs to are identical to those of `GetResponseAsync`. However, rather than returning the complete response as part of a object, the method returns an where `T` is , providing a stream of updates that collectively form the single response. :::code language="csharp" source="snippets/ai/ConsoleAI.GetStreamingResponseAsync/Program.cs":::