Skip to content

Update openai instrumentation to use Logs API instead of the deprecated events API #3628

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 6 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,13 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

## Unreleased

### Added

### Fixed

- `opentelemetry-instrumentation-openai-v2`: migrate off the deprecated events API to use the logs API
([#3625](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/3628))

## Version 1.35.0/0.56b0 (2025-07-11)

### Added
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,9 @@ classifiers = [
"Programming Language :: Python :: 3.13",
]
dependencies = [
"opentelemetry-api ~= 1.30",
"opentelemetry-instrumentation ~= 0.51b0",
"opentelemetry-semantic-conventions ~= 0.51b0"
"opentelemetry-api >= 1.35.0",
"opentelemetry-instrumentation ~= 0.56b0",
"opentelemetry-semantic-conventions ~= 0.56b0"
]

[project.optional-dependencies]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@

from wrapt import wrap_function_wrapper

from opentelemetry._events import get_event_logger
from opentelemetry._logs import get_logger
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
from opentelemetry.instrumentation.openai_v2.package import _instruments
from opentelemetry.instrumentation.openai_v2.utils import is_content_enabled
Expand Down Expand Up @@ -73,12 +73,12 @@ def _instrument(self, **kwargs):
tracer_provider,
schema_url=Schemas.V1_28_0.value,
)
event_logger_provider = kwargs.get("event_logger_provider")
event_logger = get_event_logger(
logger_provider = kwargs.get("logger_provider")
logger = get_logger(
__name__,
"",
schema_url=Schemas.V1_28_0.value,
event_logger_provider=event_logger_provider,
logger_provider=logger_provider,
)
meter_provider = kwargs.get("meter_provider")
self._meter = get_meter(
Expand All @@ -94,15 +94,15 @@ def _instrument(self, **kwargs):
module="openai.resources.chat.completions",
name="Completions.create",
wrapper=chat_completions_create(
tracer, event_logger, instruments, is_content_enabled()
tracer, logger, instruments, is_content_enabled()
),
)

wrap_function_wrapper(
module="openai.resources.chat.completions",
name="AsyncCompletions.create",
wrapper=async_chat_completions_create(
tracer, event_logger, instruments, is_content_enabled()
tracer, logger, instruments, is_content_enabled()
),
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@

from openai import Stream

from opentelemetry._events import Event, EventLogger
from opentelemetry._logs import Logger, LogRecord
from opentelemetry.semconv._incubating.attributes import (
gen_ai_attributes as GenAIAttributes,
)
Expand All @@ -40,7 +40,7 @@

def chat_completions_create(
tracer: Tracer,
event_logger: EventLogger,
logger: Logger,
instruments: Instruments,
capture_content: bool,
):
Expand All @@ -57,24 +57,22 @@ def traced_method(wrapped, instance, args, kwargs):
end_on_exit=False,
) as span:
for message in kwargs.get("messages", []):
event_logger.emit(message_to_event(message, capture_content))
logger.emit(message_to_event(message, capture_content))

start = default_timer()
result = None
error_type = None
try:
result = wrapped(*args, **kwargs)
if is_streaming(kwargs):
return StreamWrapper(
result, span, event_logger, capture_content
)
return StreamWrapper(result, span, logger, capture_content)

if span.is_recording():
_set_response_attributes(
span, result, event_logger, capture_content
span, result, logger, capture_content
)
for choice in getattr(result, "choices", []):
event_logger.emit(choice_to_event(choice, capture_content))
logger.emit(choice_to_event(choice, capture_content))

span.end()
return result
Expand All @@ -98,7 +96,7 @@ def traced_method(wrapped, instance, args, kwargs):

def async_chat_completions_create(
tracer: Tracer,
event_logger: EventLogger,
logger: Logger,
instruments: Instruments,
capture_content: bool,
):
Expand All @@ -115,24 +113,22 @@ async def traced_method(wrapped, instance, args, kwargs):
end_on_exit=False,
) as span:
for message in kwargs.get("messages", []):
event_logger.emit(message_to_event(message, capture_content))
logger.emit(message_to_event(message, capture_content))

start = default_timer()
result = None
error_type = None
try:
result = await wrapped(*args, **kwargs)
if is_streaming(kwargs):
return StreamWrapper(
result, span, event_logger, capture_content
)
return StreamWrapper(result, span, logger, capture_content)

if span.is_recording():
_set_response_attributes(
span, result, event_logger, capture_content
span, result, logger, capture_content
)
for choice in getattr(result, "choices", []):
event_logger.emit(choice_to_event(choice, capture_content))
logger.emit(choice_to_event(choice, capture_content))

span.end()
return result
Expand Down Expand Up @@ -221,7 +217,7 @@ def _record_metrics(


def _set_response_attributes(
span, result, event_logger: EventLogger, capture_content: bool
span, result, logger: Logger, capture_content: bool
):
set_span_attribute(
span, GenAIAttributes.GEN_AI_RESPONSE_MODEL, result.model
Expand Down Expand Up @@ -311,7 +307,7 @@ def __init__(
self,
stream: Stream,
span: Span,
event_logger: EventLogger,
logger: Logger,
capture_content: bool,
):
self.stream = stream
Expand All @@ -320,7 +316,7 @@ def __init__(
self._span_started = False
self.capture_content = capture_content

self.event_logger = event_logger
self.logger = logger
self.setup()

def setup(self):
Expand Down Expand Up @@ -399,9 +395,9 @@ def cleanup(self):

# this span is not current, so we need to manually set the context on event
span_ctx = self.span.get_span_context()
self.event_logger.emit(
Event(
name="gen_ai.choice",
self.logger.emit(
LogRecord(
event_name="gen_ai.choice",
attributes=event_attributes,
body=body,
trace_id=span_ctx.trace_id,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from httpx import URL
from openai import NOT_GIVEN

from opentelemetry._events import Event
from opentelemetry._logs import LogRecord
from opentelemetry.semconv._incubating.attributes import (
gen_ai_attributes as GenAIAttributes,
)
Expand Down Expand Up @@ -123,8 +123,8 @@ def message_to_event(message, capture_content):
if tool_call_id:
body["id"] = tool_call_id

return Event(
name=f"gen_ai.{role}.message",
return LogRecord(
event_name=f"gen_ai.{role}.message",
attributes=attributes,
body=body if body else None,
)
Expand Down Expand Up @@ -156,8 +156,8 @@ def choice_to_event(choice, capture_content):
message["content"] = content
body["message"] = message

return Event(
name="gen_ai.choice",
return LogRecord(
event_name="gen_ai.choice",
attributes=attributes,
body=body,
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
from opentelemetry.instrumentation.openai_v2.utils import (
OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT,
)
from opentelemetry.sdk._events import EventLoggerProvider
from opentelemetry.sdk._logs import LoggerProvider
from opentelemetry.sdk._logs.export import (
InMemoryLogExporter,
Expand Down Expand Up @@ -56,13 +55,11 @@ def fixture_tracer_provider(span_exporter):
return provider


@pytest.fixture(scope="function", name="event_logger_provider")
def fixture_event_logger_provider(log_exporter):
@pytest.fixture(scope="function", name="logger_provider")
def fixture_logger_provider(log_exporter):
provider = LoggerProvider()
provider.add_log_record_processor(SimpleLogRecordProcessor(log_exporter))
event_logger_provider = EventLoggerProvider(provider)

return event_logger_provider
return provider


@pytest.fixture(scope="function", name="meter_provider")
Expand Down Expand Up @@ -105,17 +102,15 @@ def vcr_config():


@pytest.fixture(scope="function")
def instrument_no_content(
tracer_provider, event_logger_provider, meter_provider
):
def instrument_no_content(tracer_provider, logger_provider, meter_provider):
os.environ.update(
{OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT: "False"}
)

instrumentor = OpenAIInstrumentor()
instrumentor.instrument(
tracer_provider=tracer_provider,
event_logger_provider=event_logger_provider,
logger_provider=logger_provider,
meter_provider=meter_provider,
)

Expand All @@ -125,16 +120,14 @@ def instrument_no_content(


@pytest.fixture(scope="function")
def instrument_with_content(
tracer_provider, event_logger_provider, meter_provider
):
def instrument_with_content(tracer_provider, logger_provider, meter_provider):
os.environ.update(
{OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT: "True"}
)
instrumentor = OpenAIInstrumentor()
instrumentor.instrument(
tracer_provider=tracer_provider,
event_logger_provider=event_logger_provider,
logger_provider=logger_provider,
meter_provider=meter_provider,
)

Expand All @@ -145,7 +138,7 @@ def instrument_with_content(

@pytest.fixture(scope="function")
def instrument_with_content_unsampled(
span_exporter, event_logger_provider, meter_provider
span_exporter, logger_provider, meter_provider
):
os.environ.update(
{OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT: "True"}
Expand All @@ -157,7 +150,7 @@ def instrument_with_content_unsampled(
instrumentor = OpenAIInstrumentor()
instrumentor.instrument(
tracer_provider=tracer_provider,
event_logger_provider=event_logger_provider,
logger_provider=logger_provider,
meter_provider=meter_provider,
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@ pytest-vcr==1.0.2
pytest-asyncio==0.21.0
wrapt==1.16.0
opentelemetry-exporter-otlp-proto-http~=1.30
opentelemetry-api==1.30 # when updating, also update in pyproject.toml
opentelemetry-sdk==1.30 # when updating, also update in pyproject.toml
opentelemetry-semantic-conventions==0.51b0 # when updating, also update in pyproject.toml
opentelemetry-api==1.35 # when updating, also update in pyproject.toml
opentelemetry-sdk==1.35 # when updating, also update in pyproject.toml
opentelemetry-semantic-conventions==0.56b0 # when updating, also update in pyproject.toml

-e instrumentation-genai/opentelemetry-instrumentation-openai-v2
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,6 @@
from opentelemetry.semconv._incubating.attributes import (
error_attributes as ErrorAttributes,
)
from opentelemetry.semconv._incubating.attributes import (
event_attributes as EventAttributes,
)
from opentelemetry.semconv._incubating.attributes import (
gen_ai_attributes as GenAIAttributes,
)
Expand Down Expand Up @@ -797,7 +794,7 @@ async def async_chat_completion_multiple_tools_streaming(


def assert_message_in_logs(log, event_name, expected_content, parent_span):
assert log.log_record.attributes[EventAttributes.EVENT_NAME] == event_name
assert log.log_record.event_name == event_name
assert (
log.log_record.attributes[GenAIAttributes.GEN_AI_SYSTEM]
== GenAIAttributes.GenAiSystemValues.OPENAI.value
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,6 @@
from opentelemetry.semconv._incubating.attributes import (
error_attributes as ErrorAttributes,
)
from opentelemetry.semconv._incubating.attributes import (
event_attributes as EventAttributes,
)
from opentelemetry.semconv._incubating.attributes import (
gen_ai_attributes as GenAIAttributes,
)
Expand Down Expand Up @@ -812,7 +809,7 @@ def chat_completion_multiple_tools_streaming(


def assert_message_in_logs(log, event_name, expected_content, parent_span):
assert log.log_record.attributes[EventAttributes.EVENT_NAME] == event_name
assert log.log_record.event_name == event_name
assert (
log.log_record.attributes[GenAIAttributes.GEN_AI_SYSTEM]
== GenAIAttributes.GenAiSystemValues.OPENAI.value
Expand Down