-
Notifications
You must be signed in to change notification settings - Fork 282
Telemetry: update enablement (experimental source instead of app context switch) and docs improvements #187
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,49 @@ | ||
using NUnit.Framework; | ||
using OpenAI.Chat; | ||
using OpenTelemetry.Metrics; | ||
using OpenTelemetry.Resources; | ||
using OpenTelemetry.Trace; | ||
using System; | ||
using System.Threading.Tasks; | ||
|
||
namespace OpenAI.Examples; | ||
|
||
public partial class ChatExamples | ||
{ | ||
[Test] | ||
public async Task OpenTelemetryExamples() | ||
{ | ||
// Let's configure OpenTelemetry to collect OpenAI and HTTP client traces and metrics | ||
// and export them to console and also to the local OTLP endpoint. | ||
// | ||
// If you have some local OTLP listener (e.g. Aspire dashboard) running, | ||
// you can explore traces and metrics produced by the test there. | ||
// | ||
// Check out https://opentelemetry.io/docs/languages/net/getting-started/ for more details and | ||
// examples on how to set up OpenTelemetry with ASP.NET Core. | ||
|
||
ResourceBuilder resourceBuilder = ResourceBuilder.CreateDefault().AddService("test"); | ||
using TracerProvider tracerProvider = OpenTelemetry.Sdk.CreateTracerProviderBuilder() | ||
.SetResourceBuilder(resourceBuilder) | ||
.AddSource("Experimental.OpenAI.*", "OpenAI.*") | ||
.AddHttpClientInstrumentation() | ||
.AddConsoleExporter() | ||
.AddOtlpExporter() | ||
.Build(); | ||
|
||
using MeterProvider meterProvider = OpenTelemetry.Sdk.CreateMeterProviderBuilder() | ||
.SetResourceBuilder(resourceBuilder) | ||
.AddView("gen_ai.client.operation.duration", new ExplicitBucketHistogramConfiguration { Boundaries = [0.01, 0.02, 0.04, 0.08, 0.16, 0.32, 0.64, 1.28, 2.56, 5.12, 10.24, 20.48, 40.96, 81.92] }) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. https://github.com/open-telemetry/opentelemetry-dotnet/blob/main/src/OpenTelemetry/CHANGELOG.md#1100-beta1 added support for Hints, so Views maybe replaced with the hint API itself. |
||
.AddMeter("Experimental.OpenAI.*", "OpenAI.*") | ||
.AddHttpClientInstrumentation() | ||
.AddConsoleExporter() | ||
.AddOtlpExporter() | ||
.Build(); | ||
|
||
ChatClient client = new("gpt-4o-mini", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); | ||
|
||
ChatCompletion completion = await client.CompleteChatAsync("Say 'this is a test.'"); | ||
|
||
Console.WriteLine($"{completion}"); | ||
} | ||
} |
This file was deleted.
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -41,24 +41,12 @@ public void AllTelemetryOff() | |
Assert.IsNull(Activity.Current); | ||
} | ||
|
||
[Test] | ||
public void SwitchOffAllTelemetryOn() | ||
{ | ||
using var activityListener = new TestActivityListener("OpenAI.ChatClient"); | ||
using var meterListener = new TestMeterListener("OpenAI.ChatClient"); | ||
var telemetry = new OpenTelemetrySource(RequestModel, new Uri(Endpoint)); | ||
Assert.IsNull(telemetry.StartChatScope(new ChatCompletionOptions())); | ||
Assert.IsNull(Activity.Current); | ||
} | ||
|
||
[Test] | ||
public void MetricsOnTracingOff() | ||
{ | ||
using var _ = TestAppContextSwitchHelper.EnableOpenTelemetry(); | ||
|
||
var telemetry = new OpenTelemetrySource(RequestModel, new Uri(Endpoint)); | ||
|
||
using var meterListener = new TestMeterListener("OpenAI.ChatClient"); | ||
using var meterListener = new TestMeterListener("Experimental.OpenAI.ChatClient"); | ||
|
||
var elapsedMax = Stopwatch.StartNew(); | ||
using var scope = telemetry.StartChatScope(new ChatCompletionOptions()); | ||
|
@@ -83,10 +71,8 @@ public void MetricsOnTracingOff() | |
[Test] | ||
public void MetricsOnTracingOffException() | ||
{ | ||
using var _ = TestAppContextSwitchHelper.EnableOpenTelemetry(); | ||
|
||
var telemetry = new OpenTelemetrySource(RequestModel, new Uri(Endpoint)); | ||
using var meterListener = new TestMeterListener("OpenAI.ChatClient"); | ||
using var meterListener = new TestMeterListener("Experimental.OpenAI.ChatClient"); | ||
|
||
using (var scope = telemetry.StartChatScope(new ChatCompletionOptions())) | ||
{ | ||
|
@@ -100,10 +86,8 @@ public void MetricsOnTracingOffException() | |
[Test] | ||
public void TracingOnMetricsOff() | ||
{ | ||
using var _ = TestAppContextSwitchHelper.EnableOpenTelemetry(); | ||
|
||
var telemetry = new OpenTelemetrySource(RequestModel, new Uri(Endpoint)); | ||
using var listener = new TestActivityListener("OpenAI.ChatClient"); | ||
using var listener = new TestActivityListener("Experimental.OpenAI.ChatClient"); | ||
|
||
var chatCompletion = CreateChatCompletion(); | ||
|
||
|
@@ -129,9 +113,8 @@ public void TracingOnMetricsOff() | |
[Test] | ||
public void ChatTracingAllAttributes() | ||
{ | ||
using var _ = TestAppContextSwitchHelper.EnableOpenTelemetry(); | ||
var telemetry = new OpenTelemetrySource(RequestModel, new Uri(Endpoint)); | ||
using var listener = new TestActivityListener("OpenAI.ChatClient"); | ||
using var listener = new TestActivityListener("Experimental.OpenAI.ChatClient"); | ||
var options = new ChatCompletionOptions() | ||
{ | ||
Temperature = 0.42f, | ||
|
@@ -157,10 +140,8 @@ public void ChatTracingAllAttributes() | |
[Test] | ||
public void ChatTracingException() | ||
{ | ||
using var _ = TestAppContextSwitchHelper.EnableOpenTelemetry(); | ||
|
||
var telemetry = new OpenTelemetrySource(RequestModel, new Uri(Endpoint)); | ||
using var listener = new TestActivityListener("OpenAI.ChatClient"); | ||
using var listener = new TestActivityListener("Experimental.OpenAI.ChatClient"); | ||
|
||
var error = new SocketException(42, "test error"); | ||
using (var scope = telemetry.StartChatScope(new ChatCompletionOptions())) | ||
|
@@ -176,11 +157,10 @@ public void ChatTracingException() | |
[Test] | ||
public async Task ChatTracingAndMetricsMultiple() | ||
{ | ||
using var _ = TestAppContextSwitchHelper.EnableOpenTelemetry(); | ||
var source = new OpenTelemetrySource(RequestModel, new Uri(Endpoint)); | ||
|
||
using var activityListener = new TestActivityListener("OpenAI.ChatClient"); | ||
using var meterListener = new TestMeterListener("OpenAI.ChatClient"); | ||
using var activityListener = new TestActivityListener("Experimental.OpenAI.ChatClient"); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I wish everyone uses OTel's InMemoryExporter rather than writing own listeners, but it is upto the owners of this project! There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. can you explain why? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Listeners are not extensively documented given its generally only used by small set of people like people authoring OpenTelemetry SDK etc. Just a suggestion only. |
||
using var meterListener = new TestMeterListener("Experimental.OpenAI.ChatClient"); | ||
|
||
var options = new ChatCompletionOptions(); | ||
|
||
|
This file was deleted.
Uh oh!
There was an error while loading. Please reload this page.