From 7a8bc8bebc831cf95707220e38a56b14297adf33 Mon Sep 17 00:00:00 2001 From: Jose Arriaga Maldonado <45773732+joseharriaga@users.noreply.github.com> Date: Sat, 21 Sep 2024 23:39:33 -0700 Subject: [PATCH] Add serialization/deserialization example with chat completions (#124) Added example illustrating how to serialize and deserialize a list of `ChatMessage`. We are also working to make this simpler in the future. This change also refactors a few examples and tests. --- .github/ISSUE_TEMPLATE/bug_report.yaml | 34 +-- .github/ISSUE_TEMPLATE/feature_request.yaml | 16 +- README.md | 30 +-- examples/Chat/Example01_SimpleChat.cs | 2 +- examples/Chat/Example01_SimpleChatAsync.cs | 2 +- .../Chat/Example02_SimpleChatStreaming.cs | 2 +- .../Example02_SimpleChatStreamingAsync.cs | 2 +- examples/Chat/Example03_FunctionCalling.cs | 4 +- .../Chat/Example03_FunctionCallingAsync.cs | 4 +- .../Example04_FunctionCallingStreaming.cs | 4 +- ...Example04_FunctionCallingStreamingAsync.cs | 4 +- ..._ChatWithVision.cs => Example05_Vision.cs} | 2 +- ...isionAsync.cs => Example05_VisionAsync.cs} | 2 +- ...puts.cs => Example06_StructuredOutputs.cs} | 11 +- ...cs => Example06_StructuredOutputsAsync.cs} | 11 +- ...tProtocol.cs => Example07_ChatProtocol.cs} | 2 +- ...sync.cs => Example07_ChatProtocolAsync.cs} | 2 +- examples/Chat/Example08_ChatSerialization.cs | 100 +++++++++ .../Chat/Example08_ChatSerializationAsync.cs | 66 ++++++ ...ocol.cs => Example04_EmbeddingProtocol.cs} | 2 +- ...cs => Example04_EmbeddingProtocolAsync.cs} | 2 +- tests/Chat/ChatTests.cs | 209 +++++++++++++----- 22 files changed, 384 insertions(+), 129 deletions(-) rename examples/Chat/{Example05_ChatWithVision.cs => Example05_Vision.cs} (95%) rename examples/Chat/{Example05_ChatWithVisionAsync.cs => Example05_VisionAsync.cs} (94%) rename examples/Chat/{Example07_StructuredOutputs.cs => Example06_StructuredOutputs.cs} (88%) rename examples/Chat/{Example07_StructuredOutputsAsync.cs => Example06_StructuredOutputsAsync.cs} (90%) rename examples/Chat/{Example06_SimpleChatProtocol.cs => Example07_ChatProtocol.cs} (96%) rename examples/Chat/{Example06_SimpleChatProtocolAsync.cs => Example07_ChatProtocolAsync.cs} (95%) create mode 100644 examples/Chat/Example08_ChatSerialization.cs create mode 100644 examples/Chat/Example08_ChatSerializationAsync.cs rename examples/Embeddings/{Example04_SimpleEmbeddingProtocol.cs => Example04_EmbeddingProtocol.cs} (96%) rename examples/Embeddings/{Example04_SimpleEmbeddingProtocolAsync.cs => Example04_EmbeddingProtocolAsync.cs} (96%) diff --git a/.github/ISSUE_TEMPLATE/bug_report.yaml b/.github/ISSUE_TEMPLATE/bug_report.yaml index 6cc70520..b8989f62 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yaml +++ b/.github/ISSUE_TEMPLATE/bug_report.yaml @@ -6,30 +6,16 @@ body: attributes: value: | Thanks for taking the time to fill out this bug report! - - type: checkboxes - id: non_python + - type: dropdown + id: service-kind attributes: - label: Confirm this is not an issue with the OpenAI Python Library - description: Issues with the OpenAI Python Library should be reported in our [OpenAI Python SDK repo](https://github.com/openai/openai-python/issues) + label: Service + description: Select whether you are using OpenAI or Azure OpenAI options: - - label: This is not an issue with the OpenAI Python Library - required: true - - type: checkboxes - id: non_api - attributes: - label: Confirm this is not an issue with the underlying OpenAI API - description: Issues with the underlying OpenAI API should be reported in our [Developer Community](https://community.openai.com/c/api/7) - options: - - label: This is not an issue with the OpenAI API - required: true - - type: checkboxes - id: non_azure - attributes: - label: Confirm this is not an issue with Azure OpenAI - description: Issues related to Azure OpenAI should be reported in the [Azure SDK repo](https://github.com/Azure/azure-sdk-for-net/issues) - options: - - label: This is not an issue with Azure OpenAI - required: true + - OpenAI + - Azure OpenAI + validations: + required: true - type: textarea id: what-happened attributes: @@ -41,7 +27,7 @@ body: - type: textarea id: repro-steps attributes: - label: To Reproduce + label: Steps to reproduce description: Steps to reproduce the behavior. placeholder: | 1. Fetch a '...' @@ -68,13 +54,11 @@ body: id: language-version attributes: label: .NET version - placeholder: validations: required: true - type: input id: lib-version attributes: label: Library version - placeholder: validations: required: true diff --git a/.github/ISSUE_TEMPLATE/feature_request.yaml b/.github/ISSUE_TEMPLATE/feature_request.yaml index cdb9812e..9e10b261 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.yaml +++ b/.github/ISSUE_TEMPLATE/feature_request.yaml @@ -9,23 +9,15 @@ body: - type: checkboxes id: non_api attributes: - label: Confirm this is not a feature request for the underlying OpenAI API. - description: Feature requests for the underlying OpenAI API should be reported in our [Developer Community](https://community.openai.com/c/api/7) + label: Confirm this is a feature request for the .NET library and not the underlying OpenAI API + description: Feature requests for the underlying OpenAI API should be reported on our [Developer Community](https://community.openai.com/c/api/7) options: - - label: This is not a feature request for the underlying OpenAI API - required: true - - type: checkboxes - id: non_azure - attributes: - label: Confirm this is not a feature request for Azure OpenAI. - description: Feature requests for Azure OpenAI should be reported reported in the [Azure SDK repo](https://github.com/Azure/azure-sdk-for-net/issues) - options: - - label: This is not a feature request for Azure OpenAI + - label: This is a feature request for the .NET library required: true - type: textarea id: feature attributes: - label: Describe the feature or improvement you're requesting + label: Describe the feature or improvement you are requesting description: A clear and concise description of what you want to happen. validations: required: true diff --git a/README.md b/README.md index 7185e346..d639fe37 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ It is generated from our [OpenAPI specification](https://github.com/openai/opena - [Using the `OpenAIClient` class](#using-the-openaiclient-class) - [How to use chat completions with streaming](#how-to-use-chat-completions-with-streaming) - [How to use chat completions with tools and function calling](#how-to-use-chat-completions-with-tools-and-function-calling) -- [How to use structured outputs](#how-to-use-structured-outputs) +- [How to use chat completions with structured outputs](#how-to-use-structured-outputs) - [How to generate text embeddings](#how-to-generate-text-embeddings) - [How to generate images](#how-to-generate-images) - [How to transcribe audio](#how-to-transcribe-audio) @@ -65,19 +65,19 @@ While you can pass your API key directly as a string, it is highly recommended t The library is organized into several namespaces corresponding to OpenAI feature areas. Each namespace contains a corresponding client class. -| Namespace | Client class | Notes | -| ------------------------------|------------------------------|---------------------| -| `OpenAI.Assistants` | `AssistantClient` | \[Experimental\] | -| `OpenAI.Audio` | `AudioClient` | | -| `OpenAI.Batch` | `BatchClient` | | -| `OpenAI.Chat` | `ChatClient` | | -| `OpenAI.Embeddings` | `EmbeddingClient` | | -| `OpenAI.FineTuning` | `FineTuningClient` | | -| `OpenAI.Files` | `FileClient` | | -| `OpenAI.Images` | `ImageClient` | | -| `OpenAI.Models` | `ModelClient` | | -| `OpenAI.Moderations` | `ModerationClient` | | -| `OpenAI.VectorStores` | `VectorStoreClient` | \[Experimental\] | +| Namespace | Client class | Notes | +| ------------------------------|------------------------------|-------------------------------------------------------------------| +| `OpenAI.Assistants` | `AssistantClient` | ![Experimental](https://img.shields.io/badge/experimental-purple) | +| `OpenAI.Audio` | `AudioClient` | | +| `OpenAI.Batch` | `BatchClient` | ![Experimental](https://img.shields.io/badge/experimental-purple) | +| `OpenAI.Chat` | `ChatClient` | | +| `OpenAI.Embeddings` | `EmbeddingClient` | | +| `OpenAI.FineTuning` | `FineTuningClient` | ![Experimental](https://img.shields.io/badge/experimental-purple) | +| `OpenAI.Files` | `FileClient` | | +| `OpenAI.Images` | `ImageClient` | | +| `OpenAI.Models` | `ModelClient` | | +| `OpenAI.Moderations` | `ModerationClient` | | +| `OpenAI.VectorStores` | `VectorStoreClient` | ![Experimental](https://img.shields.io/badge/experimental-purple) | ### Using the async API @@ -297,7 +297,7 @@ do } while (requiresAction); ``` -## How to use structured outputs +## How to use chat completions with structured outputs Beginning with the `gpt-4o-mini`, `gpt-4o-mini-2024-07-18`, and `gpt-4o-2024-08-06` model snapshots, structured outputs are available for both top-level response content and tool calls in the chat completion and assistants APIs. diff --git a/examples/Chat/Example01_SimpleChat.cs b/examples/Chat/Example01_SimpleChat.cs index 3715532d..b93dc0d9 100644 --- a/examples/Chat/Example01_SimpleChat.cs +++ b/examples/Chat/Example01_SimpleChat.cs @@ -9,7 +9,7 @@ public partial class ChatExamples [Test] public void Example01_SimpleChat() { - ChatClient client = new(model: "gpt-4o", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ChatClient client = new(model: "gpt-4o", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); ChatCompletion completion = client.CompleteChat("Say 'this is a test.'"); diff --git a/examples/Chat/Example01_SimpleChatAsync.cs b/examples/Chat/Example01_SimpleChatAsync.cs index 14b61d1b..4325eae1 100644 --- a/examples/Chat/Example01_SimpleChatAsync.cs +++ b/examples/Chat/Example01_SimpleChatAsync.cs @@ -10,7 +10,7 @@ public partial class ChatExamples [Test] public async Task Example01_SimpleChatAsync() { - ChatClient client = new(model: "gpt-4o", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ChatClient client = new(model: "gpt-4o", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); ChatCompletion completion = await client.CompleteChatAsync("Say 'this is a test.'"); diff --git a/examples/Chat/Example02_SimpleChatStreaming.cs b/examples/Chat/Example02_SimpleChatStreaming.cs index 50b8938f..17930436 100644 --- a/examples/Chat/Example02_SimpleChatStreaming.cs +++ b/examples/Chat/Example02_SimpleChatStreaming.cs @@ -10,7 +10,7 @@ public partial class ChatExamples [Test] public void Example02_SimpleChatStreaming() { - ChatClient client = new(model: "gpt-4o", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ChatClient client = new(model: "gpt-4o", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); CollectionResult updates = client.CompleteChatStreaming("Say 'this is a test.'"); diff --git a/examples/Chat/Example02_SimpleChatStreamingAsync.cs b/examples/Chat/Example02_SimpleChatStreamingAsync.cs index c22bb4d8..45cb5303 100644 --- a/examples/Chat/Example02_SimpleChatStreamingAsync.cs +++ b/examples/Chat/Example02_SimpleChatStreamingAsync.cs @@ -11,7 +11,7 @@ public partial class ChatExamples [Test] public async Task Example02_SimpleChatStreamingAsync() { - ChatClient client = new(model: "gpt-4o", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ChatClient client = new(model: "gpt-4o", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); AsyncCollectionResult updates = client.CompleteChatStreamingAsync("Say 'this is a test.'"); diff --git a/examples/Chat/Example03_FunctionCalling.cs b/examples/Chat/Example03_FunctionCalling.cs index 9f725a41..cd32b666 100644 --- a/examples/Chat/Example03_FunctionCalling.cs +++ b/examples/Chat/Example03_FunctionCalling.cs @@ -151,9 +151,9 @@ public void Example03_FunctionCalling() #endregion #region - foreach (ChatMessage requestMessage in messages) + foreach (ChatMessage message in messages) { - switch (requestMessage) + switch (message) { case SystemChatMessage systemMessage: Console.WriteLine($"[SYSTEM]:"); diff --git a/examples/Chat/Example03_FunctionCallingAsync.cs b/examples/Chat/Example03_FunctionCallingAsync.cs index e66464be..3be52f9b 100644 --- a/examples/Chat/Example03_FunctionCallingAsync.cs +++ b/examples/Chat/Example03_FunctionCallingAsync.cs @@ -111,9 +111,9 @@ public async Task Example03_FunctionCallingAsync() #endregion #region - foreach (ChatMessage requestMessage in messages) + foreach (ChatMessage message in messages) { - switch (requestMessage) + switch (message) { case SystemChatMessage systemMessage: Console.WriteLine($"[SYSTEM]:"); diff --git a/examples/Chat/Example04_FunctionCallingStreaming.cs b/examples/Chat/Example04_FunctionCallingStreaming.cs index 52500d91..a7fd83f3 100644 --- a/examples/Chat/Example04_FunctionCallingStreaming.cs +++ b/examples/Chat/Example04_FunctionCallingStreaming.cs @@ -172,9 +172,9 @@ StringBuilder argumentsBuilder #endregion #region - foreach (ChatMessage requestMessage in messages) + foreach (ChatMessage message in messages) { - switch (requestMessage) + switch (message) { case SystemChatMessage systemMessage: Console.WriteLine($"[SYSTEM]:"); diff --git a/examples/Chat/Example04_FunctionCallingStreamingAsync.cs b/examples/Chat/Example04_FunctionCallingStreamingAsync.cs index 32f69e51..e05e7054 100644 --- a/examples/Chat/Example04_FunctionCallingStreamingAsync.cs +++ b/examples/Chat/Example04_FunctionCallingStreamingAsync.cs @@ -173,9 +173,9 @@ StringBuilder argumentsBuilder #endregion #region - foreach (ChatMessage requestMessage in messages) + foreach (ChatMessage message in messages) { - switch (requestMessage) + switch (message) { case SystemChatMessage systemMessage: Console.WriteLine($"[SYSTEM]:"); diff --git a/examples/Chat/Example05_ChatWithVision.cs b/examples/Chat/Example05_Vision.cs similarity index 95% rename from examples/Chat/Example05_ChatWithVision.cs rename to examples/Chat/Example05_Vision.cs index dd572454..b0da61de 100644 --- a/examples/Chat/Example05_ChatWithVision.cs +++ b/examples/Chat/Example05_Vision.cs @@ -9,7 +9,7 @@ namespace OpenAI.Examples; public partial class ChatExamples { [Test] - public void Example05_ChatWithVision() + public void Example05_Vision() { ChatClient client = new("gpt-4o", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); diff --git a/examples/Chat/Example05_ChatWithVisionAsync.cs b/examples/Chat/Example05_VisionAsync.cs similarity index 94% rename from examples/Chat/Example05_ChatWithVisionAsync.cs rename to examples/Chat/Example05_VisionAsync.cs index d18cc7b7..7ae0131a 100644 --- a/examples/Chat/Example05_ChatWithVisionAsync.cs +++ b/examples/Chat/Example05_VisionAsync.cs @@ -10,7 +10,7 @@ namespace OpenAI.Examples; public partial class ChatExamples { [Test] - public async Task Example05_ChatWithVisionAsync() + public async Task Example05_VisionAsync() { ChatClient client = new("gpt-4o", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); diff --git a/examples/Chat/Example07_StructuredOutputs.cs b/examples/Chat/Example06_StructuredOutputs.cs similarity index 88% rename from examples/Chat/Example07_StructuredOutputs.cs rename to examples/Chat/Example06_StructuredOutputs.cs index a64757c9..bb1c76df 100644 --- a/examples/Chat/Example07_StructuredOutputs.cs +++ b/examples/Chat/Example06_StructuredOutputs.cs @@ -1,6 +1,7 @@ using NUnit.Framework; using OpenAI.Chat; using System; +using System.Collections.Generic; using System.Text.Json; namespace OpenAI.Examples; @@ -8,10 +9,14 @@ namespace OpenAI.Examples; public partial class ChatExamples { [Test] - public void Example07_StructuredOutputs() + public void Example06_StructuredOutputs() { ChatClient client = new("gpt-4o-mini", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + List messages = [ + new UserChatMessage("How can I solve 8x + 7 = -23?"), + ]; + ChatCompletionOptions options = new() { ResponseFormat = ChatResponseFormat.CreateJsonSchemaFormat( @@ -41,9 +46,7 @@ public void Example07_StructuredOutputs() jsonSchemaIsStrict: true) }; - ChatCompletion chatCompletion = client.CompleteChat( - [ new UserChatMessage("How can I solve 8x + 7 = -23?") ], - options); + ChatCompletion chatCompletion = client.CompleteChat(messages, options); using JsonDocument structuredJson = JsonDocument.Parse(chatCompletion.ToString()); diff --git a/examples/Chat/Example07_StructuredOutputsAsync.cs b/examples/Chat/Example06_StructuredOutputsAsync.cs similarity index 90% rename from examples/Chat/Example07_StructuredOutputsAsync.cs rename to examples/Chat/Example06_StructuredOutputsAsync.cs index 81df15f2..dcc4fffe 100644 --- a/examples/Chat/Example07_StructuredOutputsAsync.cs +++ b/examples/Chat/Example06_StructuredOutputsAsync.cs @@ -1,6 +1,7 @@ using NUnit.Framework; using OpenAI.Chat; using System; +using System.Collections.Generic; using System.Text.Json; using System.Threading.Tasks; @@ -9,10 +10,14 @@ namespace OpenAI.Examples; public partial class ChatExamples { [Test] - public async Task Example07_StructuredOutputsAsync() + public async Task Example06_StructuredOutputsAsync() { ChatClient client = new("gpt-4o-mini", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + List messages = [ + new UserChatMessage("How can I solve 8x + 7 = -23?"), + ]; + ChatCompletionOptions options = new() { ResponseFormat = ChatResponseFormat.CreateJsonSchemaFormat( @@ -42,9 +47,7 @@ public async Task Example07_StructuredOutputsAsync() jsonSchemaIsStrict: true) }; - ChatCompletion chatCompletion = await client.CompleteChatAsync( - [ new UserChatMessage("How can I solve 8x + 7 = -23?") ], - options); + ChatCompletion chatCompletion = await client.CompleteChatAsync(messages, options); using JsonDocument structuredJson = JsonDocument.Parse(chatCompletion.ToString()); diff --git a/examples/Chat/Example06_SimpleChatProtocol.cs b/examples/Chat/Example07_ChatProtocol.cs similarity index 96% rename from examples/Chat/Example06_SimpleChatProtocol.cs rename to examples/Chat/Example07_ChatProtocol.cs index e8682bfd..090448ce 100644 --- a/examples/Chat/Example06_SimpleChatProtocol.cs +++ b/examples/Chat/Example07_ChatProtocol.cs @@ -9,7 +9,7 @@ namespace OpenAI.Examples; public partial class ChatExamples { [Test] - public void Example06_SimpleChatProtocol() + public void Example07_ChatProtocol() { ChatClient client = new("gpt-4o", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); diff --git a/examples/Chat/Example06_SimpleChatProtocolAsync.cs b/examples/Chat/Example07_ChatProtocolAsync.cs similarity index 95% rename from examples/Chat/Example06_SimpleChatProtocolAsync.cs rename to examples/Chat/Example07_ChatProtocolAsync.cs index 8e363e96..331836a7 100644 --- a/examples/Chat/Example06_SimpleChatProtocolAsync.cs +++ b/examples/Chat/Example07_ChatProtocolAsync.cs @@ -10,7 +10,7 @@ namespace OpenAI.Examples; public partial class ChatExamples { [Test] - public async Task Example06_SimpleChatProtocolAsync() + public async Task Example07_ChatProtocolAsync() { ChatClient client = new("gpt-4o", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); diff --git a/examples/Chat/Example08_ChatSerialization.cs b/examples/Chat/Example08_ChatSerialization.cs new file mode 100644 index 00000000..03c11ff4 --- /dev/null +++ b/examples/Chat/Example08_ChatSerialization.cs @@ -0,0 +1,100 @@ +using NUnit.Framework; +using OpenAI.Chat; +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text.Json; + +namespace OpenAI.Examples; + +public partial class ChatExamples +{ + #region + public static IEnumerable DeserializeMessages(BinaryData data) + { + using JsonDocument messagesAsJson = JsonDocument.Parse(data.ToMemory()); + + foreach (JsonElement jsonElement in messagesAsJson.RootElement.EnumerateArray()) + { + yield return ModelReaderWriter.Read(BinaryData.FromObjectAsJson(jsonElement), ModelReaderWriterOptions.Json); + } + } + #endregion + + #region + public static BinaryData SerializeMessages(IEnumerable messages) + { + using MemoryStream stream = new(); + using Utf8JsonWriter writer = new(stream); + + writer.WriteStartArray(); + + foreach (IJsonModel message in messages) + { + message.Write(writer, ModelReaderWriterOptions.Json); + } + + writer.WriteEndArray(); + writer.Flush(); + + return BinaryData.FromBytes(stream.ToArray()); + } + #endregion + + [Test] + public void Example08_ChatSerialization() + { + ChatClient client = new("gpt-4o", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + + BinaryData serializedData = BinaryData.FromBytes(""" + [ + { + "role": "user", + "content": "Who won the world series in 2020?" + }, + { + "role": "assistant", + "content": "The Los Angeles Dodgers won the World Series in 2020." + }, + { + "role": "user", + "content": "Where was it played?" + } + ] + """u8.ToArray()); + + List messages = DeserializeMessages(serializedData).ToList(); + + ChatCompletion completion = client.CompleteChat(messages); + + messages.Add(new AssistantChatMessage(completion)); + + foreach (ChatMessage message in messages) + { + switch (message) + { + case UserChatMessage userMessage: + Console.WriteLine($"[USER]:"); + break; + + case AssistantChatMessage assistantMessage when assistantMessage.Content.Count > 0: + Console.WriteLine($"[ASSISTANT]:"); + break; + + default: + break; + } + + Console.WriteLine($"{message.Content[0].Text}"); + Console.WriteLine(); + } + + serializedData = SerializeMessages(messages); + + Console.WriteLine("****************************************************"); + Console.WriteLine(); + Console.WriteLine(serializedData.ToString()); + } +} diff --git a/examples/Chat/Example08_ChatSerializationAsync.cs b/examples/Chat/Example08_ChatSerializationAsync.cs new file mode 100644 index 00000000..de0ec288 --- /dev/null +++ b/examples/Chat/Example08_ChatSerializationAsync.cs @@ -0,0 +1,66 @@ +using NUnit.Framework; +using OpenAI.Chat; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; + +namespace OpenAI.Examples; + +public partial class ChatExamples +{ + [Test] + public async Task Example08_ChatSerializationAsync() + { + ChatClient client = new("gpt-4o", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + + BinaryData serializedData = BinaryData.FromBytes(""" + [ + { + "role": "user", + "content": "Who won the world series in 2020?" + }, + { + "role": "assistant", + "content": "The Los Angeles Dodgers won the World Series in 2020." + }, + { + "role": "user", + "content": "Where was it played?" + } + ] + """u8.ToArray()); + + List messages = DeserializeMessages(serializedData).ToList(); + + ChatCompletion completion = await client.CompleteChatAsync(messages); + + messages.Add(new AssistantChatMessage(completion)); + + foreach (ChatMessage message in messages) + { + switch (message) + { + case UserChatMessage userMessage: + Console.WriteLine($"[USER]:"); + break; + + case AssistantChatMessage assistantMessage when assistantMessage.Content.Count > 0: + Console.WriteLine($"[ASSISTANT]:"); + break; + + default: + break; + } + + Console.WriteLine($"{message.Content[0].Text}"); + Console.WriteLine(); + } + + serializedData = SerializeMessages(messages); + + Console.WriteLine("****************************************************"); + Console.WriteLine(); + Console.WriteLine(serializedData.ToString()); + } +} diff --git a/examples/Embeddings/Example04_SimpleEmbeddingProtocol.cs b/examples/Embeddings/Example04_EmbeddingProtocol.cs similarity index 96% rename from examples/Embeddings/Example04_SimpleEmbeddingProtocol.cs rename to examples/Embeddings/Example04_EmbeddingProtocol.cs index d23112e8..065b6c3a 100644 --- a/examples/Embeddings/Example04_SimpleEmbeddingProtocol.cs +++ b/examples/Embeddings/Example04_EmbeddingProtocol.cs @@ -9,7 +9,7 @@ namespace OpenAI.Examples; public partial class EmbeddingExamples { [Test] - public void Example04_SimpleEmbeddingProtocol() + public void Example04_EmbeddingProtocol() { EmbeddingClient client = new("text-embedding-3-small", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); diff --git a/examples/Embeddings/Example04_SimpleEmbeddingProtocolAsync.cs b/examples/Embeddings/Example04_EmbeddingProtocolAsync.cs similarity index 96% rename from examples/Embeddings/Example04_SimpleEmbeddingProtocolAsync.cs rename to examples/Embeddings/Example04_EmbeddingProtocolAsync.cs index e3be7b52..4edf1b0d 100644 --- a/examples/Embeddings/Example04_SimpleEmbeddingProtocolAsync.cs +++ b/examples/Embeddings/Example04_EmbeddingProtocolAsync.cs @@ -10,7 +10,7 @@ namespace OpenAI.Examples; public partial class EmbeddingExamples { [Test] - public async Task Example04_SimpleEmbeddingProtocolAsync() + public async Task Example04_EmbeddingProtocolAsync() { EmbeddingClient client = new("text-embedding-3-small", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); diff --git a/tests/Chat/ChatTests.cs b/tests/Chat/ChatTests.cs index 044e7952..614e028f 100644 --- a/tests/Chat/ChatTests.cs +++ b/tests/Chat/ChatTests.cs @@ -5,13 +5,16 @@ using OpenAI.Tests.Utility; using System; using System.ClientModel; +using System.ClientModel.Primitives; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Linq; using System.Net; +using System.Net.Http; using System.Text; using System.Text.Json; +using System.Threading; using System.Threading.Tasks; using static OpenAI.Tests.Telemetry.TestMeterListener; using static OpenAI.Tests.TestHelpers; @@ -70,34 +73,80 @@ public async Task MultiMessageChat() [Test] public void StreamingChat() { + AssertSyncOnly(); + ChatClient client = GetTestClient(TestScenario.Chat); - IEnumerable messages = [ - new UserChatMessage("What are the best pizza toppings? Give me a breakdown on the reasons.") - ]; + IEnumerable messages = [ new UserChatMessage("What are the best pizza toppings? Give me a breakdown on the reasons.") ]; + int updateCount = 0; + ChatTokenUsage usage = null; TimeSpan? firstTokenReceiptTime = null; TimeSpan? latestTokenReceiptTime = null; Stopwatch stopwatch = Stopwatch.StartNew(); - CollectionResult streamingResult = client.CompleteChatStreaming(messages); + Assert.That(streamingResult, Is.InstanceOf>()); - int updateCount = 0; foreach (StreamingChatCompletionUpdate chatUpdate in streamingResult) { firstTokenReceiptTime ??= stopwatch.Elapsed; latestTokenReceiptTime = stopwatch.Elapsed; + usage ??= chatUpdate.Usage; + updateCount++; + Console.WriteLine(stopwatch.Elapsed.TotalMilliseconds); + } + + stopwatch.Stop(); + + Assert.That(updateCount, Is.GreaterThan(1)); + Assert.That(latestTokenReceiptTime - firstTokenReceiptTime > TimeSpan.FromMilliseconds(500)); + Assert.That(usage, Is.Not.Null); + Assert.That(usage?.InputTokenCount, Is.GreaterThan(0)); + Assert.That(usage?.OutputTokenCount, Is.GreaterThan(0)); + Assert.That(usage?.OutputTokenDetails?.ReasoningTokenCount, Is.Null.Or.EqualTo(0)); + } + + [Test] + public async Task StreamingChatAsync() + { + AssertAsyncOnly(); + + ChatClient client = GetTestClient(TestScenario.Chat); + IEnumerable messages = [ new UserChatMessage("What are the best pizza toppings? Give me a breakdown on the reasons.") ]; + + int updateCount = 0; + ChatTokenUsage usage = null; + TimeSpan? firstTokenReceiptTime = null; + TimeSpan? latestTokenReceiptTime = null; + Stopwatch stopwatch = Stopwatch.StartNew(); + AsyncCollectionResult streamingResult = client.CompleteChatStreamingAsync(messages); + + Assert.That(streamingResult, Is.InstanceOf>()); + + await foreach (StreamingChatCompletionUpdate chatUpdate in streamingResult) + { + firstTokenReceiptTime ??= stopwatch.Elapsed; + latestTokenReceiptTime = stopwatch.Elapsed; + usage ??= chatUpdate.Usage; updateCount++; } + stopwatch.Stop(); + Assert.That(updateCount, Is.GreaterThan(1)); Assert.That(latestTokenReceiptTime - firstTokenReceiptTime > TimeSpan.FromMilliseconds(500)); + Assert.That(usage, Is.Not.Null); + Assert.That(usage?.InputTokenCount, Is.GreaterThan(0)); + Assert.That(usage?.OutputTokenCount, Is.GreaterThan(0)); + Assert.That(usage?.OutputTokenDetails?.ReasoningTokenCount, Is.Null.Or.EqualTo(0)); } [Test] - public void CompleteChatStreamingClosesNetworkStream() + public void StreamingChatCanBeCancelled() { + AssertSyncOnly(); + MockPipelineResponse response = new(200); response.SetContent(""" data: {"id":"chatcmpl-A7mKGugwaczn3YyrJLlZY6CM0Wlkr","object":"chat.completion.chunk","created":1726417424,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_483d39d857","choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}],"usage":null} @@ -112,67 +161,129 @@ public void CompleteChatStreamingClosesNetworkStream() Transport = new MockPipelineTransport(response) }; + CancellationTokenSource cancellationTokenSource = new(); + cancellationTokenSource.CancelAfter(1000); + ChatClient client = GetTestClient(TestScenario.Chat, options: options); - IEnumerable messages = [ - new UserChatMessage("What are the best pizza toppings? Give me a breakdown on the reasons.") - ]; + IEnumerable messages = [new UserChatMessage("What are the best pizza toppings? Give me a breakdown on the reasons.")]; - TimeSpan? firstTokenReceiptTime = null; - TimeSpan? latestTokenReceiptTime = null; - Stopwatch stopwatch = Stopwatch.StartNew(); + CollectionResult streamingResult = client.CompleteChatStreaming(messages, cancellationToken: cancellationTokenSource.Token); + IEnumerator enumerator = streamingResult.GetEnumerator(); - CollectionResult streamingResult = client.CompleteChatStreaming(messages); - Assert.That(streamingResult, Is.InstanceOf>()); - int updateCount = 0; + enumerator.MoveNext(); + StreamingChatCompletionUpdate firstUpdate = enumerator.Current; - Assert.IsFalse(response.IsDisposed); + Assert.That(firstUpdate, Is.Not.Null); + Assert.That(cancellationTokenSource.IsCancellationRequested, Is.False); - foreach (StreamingChatCompletionUpdate chatUpdate in streamingResult) + Thread.Sleep(1000); + + Assert.Throws(() => { - firstTokenReceiptTime ??= stopwatch.Elapsed; - latestTokenReceiptTime = stopwatch.Elapsed; - Console.WriteLine(stopwatch.Elapsed.TotalMilliseconds); - updateCount++; - } + // Should throw for the second update. + Assert.True(cancellationTokenSource.IsCancellationRequested); + Assert.True(cancellationTokenSource.Token.IsCancellationRequested); + enumerator.MoveNext(); + enumerator.MoveNext(); + }); + } - Assert.IsTrue(response.IsDisposed); + [Test] + public async Task StreamingChatCanBeCancelledAsync() + { + AssertAsyncOnly(); + + MockPipelineResponse response = new(200); + response.SetContent(""" + data: {"id":"chatcmpl-A7mKGugwaczn3YyrJLlZY6CM0Wlkr","object":"chat.completion.chunk","created":1726417424,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_483d39d857","choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}],"usage":null} + + data: {"id":"chatcmpl-A7mKGugwaczn3YyrJLlZY6CM0Wlkr","object":"chat.completion.chunk","created":1726417424,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_483d39d857","choices":[{"index":0,"delta":{"content":"The"},"logprobs":null,"finish_reason":null}],"usage":null} + + data: [DONE] + """); + + OpenAIClientOptions options = new OpenAIClientOptions() + { + Transport = new MockPipelineTransport(response) + }; + + CancellationTokenSource cancellationTokenSource = new(); + cancellationTokenSource.CancelAfter(1000); + + ChatClient client = GetTestClient(TestScenario.Chat, options: options); + IEnumerable messages = [new UserChatMessage("What are the best pizza toppings? Give me a breakdown on the reasons.")]; + + AsyncCollectionResult streamingResult = client.CompleteChatStreamingAsync(messages, cancellationToken: cancellationTokenSource.Token); + IAsyncEnumerator enumerator = streamingResult.GetAsyncEnumerator(); + + await enumerator.MoveNextAsync(); + StreamingChatCompletionUpdate firstUpdate = enumerator.Current; + + Assert.That(firstUpdate, Is.Not.Null); + Assert.That(cancellationTokenSource.IsCancellationRequested, Is.False); + + Thread.Sleep(1000); + + Assert.ThrowsAsync(async () => + { + // Should throw for the second update. + Assert.True(cancellationTokenSource.IsCancellationRequested); + Assert.True(cancellationTokenSource.Token.IsCancellationRequested); + await enumerator.MoveNextAsync(); + await enumerator.MoveNextAsync(); + }); } [Test] - public async Task StreamingChatAsync() + public void CompleteChatStreamingClosesNetworkStream() { - ChatClient client = GetTestClient(TestScenario.Chat); - IEnumerable messages = [ - new UserChatMessage("What are the best pizza toppings? Give me a breakdown on the reasons.") - ]; + AssertSyncOnly(); + + MockPipelineResponse response = new(200); + response.SetContent(""" + data: {"id":"chatcmpl-A7mKGugwaczn3YyrJLlZY6CM0Wlkr","object":"chat.completion.chunk","created":1726417424,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_483d39d857","choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}],"usage":null} + + data: {"id":"chatcmpl-A7mKGugwaczn3YyrJLlZY6CM0Wlkr","object":"chat.completion.chunk","created":1726417424,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_483d39d857","choices":[{"index":0,"delta":{"content":"The"},"logprobs":null,"finish_reason":null}],"usage":null} + + data: [DONE] + """); + + OpenAIClientOptions options = new OpenAIClientOptions() + { + Transport = new MockPipelineTransport(response) + }; + + ChatClient client = GetTestClient(TestScenario.Chat, options: options); + IEnumerable messages = [ new UserChatMessage("What are the best pizza toppings? Give me a breakdown on the reasons.") ]; + int updateCount = 0; TimeSpan? firstTokenReceiptTime = null; TimeSpan? latestTokenReceiptTime = null; Stopwatch stopwatch = Stopwatch.StartNew(); + CollectionResult streamingResult = client.CompleteChatStreaming(messages); - AsyncCollectionResult streamingResult = client.CompleteChatStreamingAsync(messages); - Assert.That(streamingResult, Is.InstanceOf>()); - int updateCount = 0; - ChatTokenUsage usage = null; + Assert.That(streamingResult, Is.InstanceOf>()); + Assert.IsFalse(response.IsDisposed); - await foreach (StreamingChatCompletionUpdate chatUpdate in streamingResult) + foreach (StreamingChatCompletionUpdate chatUpdate in streamingResult) { firstTokenReceiptTime ??= stopwatch.Elapsed; latestTokenReceiptTime = stopwatch.Elapsed; - usage ??= chatUpdate.Usage; updateCount++; + + Console.WriteLine(stopwatch.Elapsed.TotalMilliseconds); } - Assert.That(updateCount, Is.GreaterThan(1)); - Assert.That(latestTokenReceiptTime - firstTokenReceiptTime > TimeSpan.FromMilliseconds(500)); - Assert.That(usage, Is.Not.Null); - Assert.That(usage?.InputTokenCount, Is.GreaterThan(0)); - Assert.That(usage?.OutputTokenCount, Is.GreaterThan(0)); - Assert.That(usage?.OutputTokenDetails?.ReasoningTokenCount, Is.Null.Or.EqualTo(0)); + + stopwatch.Stop(); + + Assert.IsTrue(response.IsDisposed); } [Test] public async Task CompleteChatStreamingClosesNetworkStreamAsync() { + AssertAsyncOnly(); + MockPipelineResponse response = new(200); response.SetContent(""" data: {"id":"chatcmpl-A7mKGugwaczn3YyrJLlZY6CM0Wlkr","object":"chat.completion.chunk","created":1726417424,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_483d39d857","choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}],"usage":null} @@ -188,28 +299,28 @@ public async Task CompleteChatStreamingClosesNetworkStreamAsync() }; ChatClient client = GetTestClient(TestScenario.Chat, options: options); - IEnumerable messages = [ - new UserChatMessage("What are the best pizza toppings? Give me a breakdown on the reasons.") - ]; + IEnumerable messages = [ new UserChatMessage("What are the best pizza toppings? Give me a breakdown on the reasons.") ]; + int updateCount = 0; TimeSpan? firstTokenReceiptTime = null; TimeSpan? latestTokenReceiptTime = null; Stopwatch stopwatch = Stopwatch.StartNew(); - AsyncCollectionResult streamingResult = client.CompleteChatStreamingAsync(messages); - Assert.That(streamingResult, Is.InstanceOf>()); - int updateCount = 0; + Assert.That(streamingResult, Is.InstanceOf>()); Assert.IsFalse(response.IsDisposed); await foreach (StreamingChatCompletionUpdate chatUpdate in streamingResult) { firstTokenReceiptTime ??= stopwatch.Elapsed; latestTokenReceiptTime = stopwatch.Elapsed; - Console.WriteLine(stopwatch.Elapsed.TotalMilliseconds); updateCount++; + + Console.WriteLine(stopwatch.Elapsed.TotalMilliseconds); } + stopwatch.Stop(); + Assert.IsTrue(response.IsDisposed); } @@ -316,14 +427,12 @@ public async Task TokenLogProbabilities(bool includeLogProbabilities) foreach (ChatTokenLogProbabilityDetails tokenLogProbs in chatTokenLogProbabilities) { Assert.That(tokenLogProbs.Token, Is.Not.Null.Or.Empty); - Assert.That(tokenLogProbs.Utf8Bytes, Is.Not.Null); Assert.That(tokenLogProbs.TopLogProbabilities, Is.Not.Null.Or.Empty); Assert.That(tokenLogProbs.TopLogProbabilities, Has.Count.EqualTo(topLogProbabilityCount)); foreach (ChatTokenTopLogProbabilityDetails tokenTopLogProbs in tokenLogProbs.TopLogProbabilities) { Assert.That(tokenTopLogProbs.Token, Is.Not.Null.Or.Empty); - Assert.That(tokenTopLogProbs.Utf8Bytes, Is.Not.Null); } } } @@ -373,14 +482,12 @@ public async Task TokenLogProbabilitiesStreaming(bool includeLogProbabilities) foreach (ChatTokenLogProbabilityDetails tokenLogProbs in chatCompletionUpdate.ContentTokenLogProbabilities) { Assert.That(tokenLogProbs.Token, Is.Not.Null.Or.Empty); - Assert.That(tokenLogProbs.Utf8Bytes, Is.Not.Null); Assert.That(tokenLogProbs.TopLogProbabilities, Is.Not.Null.Or.Empty); Assert.That(tokenLogProbs.TopLogProbabilities, Has.Count.EqualTo(topLogProbabilityCount)); foreach (ChatTokenTopLogProbabilityDetails tokenTopLogProbs in tokenLogProbs.TopLogProbabilities) { Assert.That(tokenTopLogProbs.Token, Is.Not.Null.Or.Empty); - Assert.That(tokenTopLogProbs.Utf8Bytes, Is.Not.Null); } } }