From 4445b98e877cf4820fac99d04ffabd42d339a429 Mon Sep 17 00:00:00 2001 From: claudiogodoy99 Date: Sat, 28 Jun 2025 20:04:04 -0300 Subject: [PATCH] chore: add ASP.NET Core example and update README --- README.md | 40 ++++++ examples/ASP.NET Core/ASP.NET Core.sln | 24 ++++ examples/ASP.NET Core/Program.cs | 41 ++++++ examples/ASP.NET Core/README.md | 118 ++++++++++++++++++ .../ASP.NET Core/appsettings.Development.json | 8 ++ examples/ASP.NET Core/appsettings.json | 14 +++ examples/ASP.NET Core/client-di.csproj | 15 +++ 7 files changed, 260 insertions(+) create mode 100644 examples/ASP.NET Core/ASP.NET Core.sln create mode 100644 examples/ASP.NET Core/Program.cs create mode 100644 examples/ASP.NET Core/README.md create mode 100644 examples/ASP.NET Core/appsettings.Development.json create mode 100644 examples/ASP.NET Core/appsettings.json create mode 100644 examples/ASP.NET Core/client-di.csproj diff --git a/README.md b/README.md index 026fb575..d691c607 100644 --- a/README.md +++ b/README.md @@ -138,6 +138,46 @@ AudioClient ttsClient = client.GetAudioClient("tts-1"); AudioClient whisperClient = client.GetAudioClient("whisper-1"); ``` +## How to use dependency injection + +The OpenAI clients are **thread-safe** and can be safely registered as **singletons** in ASP.NET Core's Dependency Injection container. This maximizes resource efficiency and HTTP connection reuse. + +Register the `OpenAIClient` as a singleton in your `Program.cs`: + +```csharp +builder.Services.AddSingleton(serviceProvider => +{ + var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY"); + + return new OpenAIClient(apiKey); +}); +``` + +Then inject and use the client in your controllers or services: + +```csharp +[ApiController] +[Route("api/[controller]")] +public class ChatController : ControllerBase +{ + private readonly OpenAIClient _openAIClient; + + public ChatController(OpenAIClient openAIClient) + { + _openAIClient = openAIClient; + } + + [HttpPost("complete")] + public async Task CompleteChat([FromBody] string message) + { + ChatClient chatClient = _openAIClient.GetChatClient("gpt-4o"); + ChatCompletion completion = await chatClient.CompleteChatAsync(message); + + return Ok(new { response = completion.Content[0].Text }); + } +} +``` + ## How to use chat completions with streaming When you request a chat completion, the default behavior is for the server to generate it in its entirety before sending it back in a single response. Consequently, long chat completions can require waiting for several seconds before hearing back from the server. To mitigate this, the OpenAI REST API supports the ability to stream partial results back as they are being generated, allowing you to start processing the beginning of the completion before it is finished. diff --git a/examples/ASP.NET Core/ASP.NET Core.sln b/examples/ASP.NET Core/ASP.NET Core.sln new file mode 100644 index 00000000..7dcb33a5 --- /dev/null +++ b/examples/ASP.NET Core/ASP.NET Core.sln @@ -0,0 +1,24 @@ +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.5.2.0 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "client-di", "client-di.csproj", "{F3F2E48A-807D-4AC2-064F-2417457154CA}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {F3F2E48A-807D-4AC2-064F-2417457154CA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {F3F2E48A-807D-4AC2-064F-2417457154CA}.Debug|Any CPU.Build.0 = Debug|Any CPU + {F3F2E48A-807D-4AC2-064F-2417457154CA}.Release|Any CPU.ActiveCfg = Release|Any CPU + {F3F2E48A-807D-4AC2-064F-2417457154CA}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {33398145-928F-4A73-A7AE-4B3ED3CE96C2} + EndGlobalSection +EndGlobal diff --git a/examples/ASP.NET Core/Program.cs b/examples/ASP.NET Core/Program.cs new file mode 100644 index 00000000..2f358770 --- /dev/null +++ b/examples/ASP.NET Core/Program.cs @@ -0,0 +1,41 @@ +using System.ClientModel; +using OpenAI.Chat; + +var builder = WebApplication.CreateBuilder(args); + +// Add services to the container. +builder.Services.AddEndpointsApiExplorer(); +builder.Services.AddSwaggerGen(); + +builder.Services.AddSingleton(serviceProvider => new ChatClient(builder.Configuration["OpenAI:Model"], + new ApiKeyCredential(builder.Configuration["OpenAI:ApiKey"] + ?? Environment.GetEnvironmentVariable("OPENAI_API_KEY") + ?? throw new InvalidOperationException("OpenAI API key not found"))) +); + + +var app = builder.Build(); + +// Configure the HTTP request pipeline. +if (app.Environment.IsDevelopment()) +{ + app.UseSwagger(); + app.UseSwaggerUI(); +} + +app.UseHttpsRedirection(); + +// Chat completion endpoint using injected ChatClient client +app.MapPost("/chat/complete", async (ChatRequest request, ChatClient client) => +{ + var completion = await client.CompleteChatAsync(request.Message); + + return new ChatResponse(completion.Value.Content[0].Text); +}); + +app.Run(); + +record ChatRequest(string Message); +record ChatResponse(string Response); +record EmbeddingRequest(string Text); +record EmbeddingResponse(float[] Vector); diff --git a/examples/ASP.NET Core/README.md b/examples/ASP.NET Core/README.md new file mode 100644 index 00000000..16be5fdb --- /dev/null +++ b/examples/ASP.NET Core/README.md @@ -0,0 +1,118 @@ +# OpenAI ASP.NET Core Example + +This example demonstrates how to use the OpenAI .NET client library with ASP.NET Core's dependency injection container, registering a ChatClient as a singleton for optimal performance and resource usage. + +## Features + +- **Singleton Registration**: ChatClient registered as singleton in DI container +- **Thread-Safe**: Demonstrates concurrent usage for chat completion endpoints +- **Configurable Model**: Model selection via configuration (appsettings.json) +- **Modern ASP.NET Core**: Uses minimal APIs with async/await patterns + +## Prerequisites + +- .NET 8.0 or later +- OpenAI API key + +## Setup + +1. **Set your OpenAI API key** using one of these methods: + + **Environment Variable (Recommended):** + + ```bash + export OPENAI_API_KEY="your-api-key-here" + ``` + + **Configuration (appsettings.json):** + + ```json + { + "OpenAI": { + "Model": "gpt-4o-mini", + "ApiKey": "your-api-key-here" + } + } + ``` + +2. **Install dependencies:** + + ```bash + dotnet restore + ``` + +3. **Run the application:** + + ```bash + dotnet run + ``` + +## API Endpoints + +### Chat Completion + +- **POST** `/chat/complete` +- **Request Body:** + + ```json + { + "message": "Hello, how are you?" + } + ``` + +- **Response:** + + ```json + { + "response": "I'm doing well, thank you for asking! How can I help you today?" + } + ``` + +## Testing with cURL + +**Chat Completion:** + +```bash +curl -X POST "https://localhost:7071/chat/complete" \ + -H "Content-Type: application/json" \ + -d '{"message": "What is the capital of France?"}' +``` + +## Key Implementation Details + +### Singleton Registration + +```csharp +builder.Services.AddSingleton(serviceProvider => new ChatClient( + builder.Configuration["OpenAI:Model"], + new ApiKeyCredential(builder.Configuration["OpenAI:ApiKey"] + ?? Environment.GetEnvironmentVariable("OPENAI_API_KEY") + ?? throw new InvalidOperationException("OpenAI API key not found"))) +); +``` + +### Dependency Injection Usage + +```csharp +app.MapPost("/chat/complete", async (ChatRequest request, ChatClient client) => +{ + var completion = await client.CompleteChatAsync(request.Message); + + return new ChatResponse(completion.Value.Content[0].Text); +}); +``` + +## Why Singleton? + +- **Thread-Safe**: ChatClient is thread-safe and can handle concurrent requests +- **Resource Efficient**: Reuses HTTP connections and avoids creating multiple instances +- **Performance**: Reduces object allocation overhead +- **Stateless**: Clients don't maintain per-request state + +## Swagger UI + +When running in development mode, you can access the Swagger UI at: + +- `https://localhost:7071/swagger` + +This provides an interactive interface to test the API endpoints. diff --git a/examples/ASP.NET Core/appsettings.Development.json b/examples/ASP.NET Core/appsettings.Development.json new file mode 100644 index 00000000..0c208ae9 --- /dev/null +++ b/examples/ASP.NET Core/appsettings.Development.json @@ -0,0 +1,8 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" + } + } +} diff --git a/examples/ASP.NET Core/appsettings.json b/examples/ASP.NET Core/appsettings.json new file mode 100644 index 00000000..8636efc3 --- /dev/null +++ b/examples/ASP.NET Core/appsettings.json @@ -0,0 +1,14 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" + } + }, + "AllowedHosts": "*", + "OpenAI": + { + "Model": "gpt-4.1-mini", + "ApiKey": "YOUR_API_KEY" + } +} diff --git a/examples/ASP.NET Core/client-di.csproj b/examples/ASP.NET Core/client-di.csproj new file mode 100644 index 00000000..04f0cfbd --- /dev/null +++ b/examples/ASP.NET Core/client-di.csproj @@ -0,0 +1,15 @@ + + + + net8.0 + enable + enable + ASP.NET_Core + + + + + + + +