Skip to content
Open
Show file tree
Hide file tree
Changes from 13 commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
1c54afd
fix(openai): add streaming support for Responses API
rtyer Sep 30, 2025
fce6e2d
refactor: align responses streaming with chat API pattern
rtyer Sep 30, 2025
91768bd
fix: remove duplicate SpanAttributes import
rtyer Sep 30, 2025
104d842
clean up comments
rtyer Sep 30, 2025
0ddba72
Update packages/opentelemetry-instrumentation-openai/tests/traces/tes…
rtyer Sep 30, 2025
dd36039
test clean up
rtyer Sep 30, 2025
ef7a46d
fix: preserve exception details in streaming spans
rtyer Sep 30, 2025
a28e427
docstrings
rtyer Sep 30, 2025
ecf8040
fix: add missing __aiter__ method to ResponseStream for async iterati…
rtyer Oct 1, 2025
230b854
fix: ensure ResponseStream is recognized as async-iterable by Python
rtyer Oct 1, 2025
1eb44d2
Merge branch 'main' into fix-responses-api-streaming-support
rtyer Oct 1, 2025
935e51c
fix: properly accumulate output text from streaming response chunks
rtyer Oct 1, 2025
274da69
feat: add comprehensive debug logging for streaming responses
rtyer Oct 1, 2025
dd741da
fix: correct time scale mismatch between seconds and nanoseconds
rtyer Oct 1, 2025
ea9693d
feat: handle streaming event objects directly in responses API
rtyer Oct 1, 2025
330c7c0
fix: correct event attribute names and add missing event handlers
rtyer Oct 1, 2025
dc2022c
chore: change set_data_attributes logs from debug to info
rtyer Oct 1, 2025
ca3126e
chore: add info logs for stream initialization input_data
rtyer Oct 1, 2025
c7527c3
feat: add support for function_call_output input type
rtyer Oct 1, 2025
a686f1a
fix: store complete output items from ResponseOutputItemDoneEvent
rtyer Oct 1, 2025
751d1cb
chore: remove debug logging statements
rtyer Oct 2, 2025
af3a2a4
fix: address code review issues in responses streaming
rtyer Oct 2, 2025
d1997a5
fix: reuse existing span for non-streaming completed responses
rtyer Oct 2, 2025
5dbcd70
added support for parse
rtyer Oct 9, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,36 @@ def wrapper(wrapped, instance, args, kwargs):
return _with_chat_telemetry


def _with_responses_telemetry_wrapper(func):
def _with_responses_telemetry(
tracer,
token_counter,
choice_counter,
duration_histogram,
exception_counter,
streaming_time_to_first_token,
streaming_time_to_generate,
):
def wrapper(wrapped, instance, args, kwargs):
return func(
tracer,
token_counter,
choice_counter,
duration_histogram,
exception_counter,
streaming_time_to_first_token,
streaming_time_to_generate,
wrapped,
instance,
args,
kwargs,
)

return wrapper

return _with_responses_telemetry


def _with_tracer_wrapper(func):
def _with_tracer(tracer):
def wrapper(wrapped, instance, args, kwargs):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -302,12 +302,28 @@ def _instrument(self, **kwargs):
self._try_wrap(
"openai.resources.responses",
"Responses.create",
responses_get_or_create_wrapper(tracer),
responses_get_or_create_wrapper(
tracer,
tokens_histogram,
chat_choice_counter,
duration_histogram,
chat_exception_counter,
streaming_time_to_first_token,
streaming_time_to_generate,
),
)
self._try_wrap(
"openai.resources.responses",
"Responses.retrieve",
responses_get_or_create_wrapper(tracer),
responses_get_or_create_wrapper(
tracer,
tokens_histogram,
chat_choice_counter,
duration_histogram,
chat_exception_counter,
streaming_time_to_first_token,
streaming_time_to_generate,
),
)
self._try_wrap(
"openai.resources.responses",
Expand All @@ -317,12 +333,28 @@ def _instrument(self, **kwargs):
self._try_wrap(
"openai.resources.responses",
"AsyncResponses.create",
async_responses_get_or_create_wrapper(tracer),
async_responses_get_or_create_wrapper(
tracer,
tokens_histogram,
chat_choice_counter,
duration_histogram,
chat_exception_counter,
streaming_time_to_first_token,
streaming_time_to_generate,
),
)
self._try_wrap(
"openai.resources.responses",
"AsyncResponses.retrieve",
async_responses_get_or_create_wrapper(tracer),
async_responses_get_or_create_wrapper(
tracer,
tokens_histogram,
chat_choice_counter,
duration_histogram,
chat_exception_counter,
streaming_time_to_first_token,
streaming_time_to_generate,
),
)
self._try_wrap(
"openai.resources.responses",
Expand Down
Loading