Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -82,14 +82,22 @@ def _generate_node_id() -> str:
return token_hex(8) # Generates 16 hex characters (8 bytes)


def _run_arguments(arguments: Mapping[str, Any]) -> Iterator[Tuple[str, AttributeValue]]:
user_id = arguments.get("user_id")
def _extract_session_id(arguments: Mapping[str, Any]) -> Optional[str]:
"""Extract session_id with proper fallback logic for Agno v2 compatibility."""
session_id = arguments.get("session_id")

# For agno v2: session_id might be in the session object for internal _run method
session = arguments.get("session")
if session and hasattr(session, "session_id"):
session_id = session.session_id
if not session_id:
session = arguments.get("session")
if session and hasattr(session, "session_id"):
session_id = session.session_id

return str(session_id) if session_id is not None else None


def _run_arguments(arguments: Mapping[str, Any]) -> Iterator[Tuple[str, AttributeValue]]:
user_id = arguments.get("user_id")
session_id = _extract_session_id(arguments)

if session_id:
yield SESSION_ID, session_id
Expand Down Expand Up @@ -271,11 +279,7 @@ def run_stream(
try:
yield from wrapped(*args, **kwargs)
# Use get_last_run_output instead of removed agent.run_response
session_id = None
try:
session_id = arguments.get("session_id")
except Exception:
session_id = None
session_id = _extract_session_id(arguments)

run_response = None
if hasattr(agent, "get_last_run_output"):
Expand Down Expand Up @@ -401,11 +405,7 @@ async def arun_stream(
yield response

# Use get_last_run_output instead of removed agent.run_response
session_id = None
try:
session_id = arguments.get("session_id")
except Exception:
session_id = None
session_id = _extract_session_id(arguments)

run_response = None
if hasattr(agent, "get_last_run_output"):
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
interactions:
- request:
body: '{"messages":[{"role":"user","content":"What is 2+2?"}],"model":"gpt-4o-mini","stream":true}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate, br
connection:
- keep-alive
content-length:
- '82'
content-type:
- application/json
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.108.0
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.108.0
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.13.5
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: 'data: {"id":"chatcmpl-test123","object":"chat.completion.chunk","created":1699999999,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}


data: {"id":"chatcmpl-test123","object":"chat.completion.chunk","created":1699999999,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"content":"2"},"logprobs":null,"finish_reason":null}]}


data: {"id":"chatcmpl-test123","object":"chat.completion.chunk","created":1699999999,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"content":"+"},"logprobs":null,"finish_reason":null}]}


data: {"id":"chatcmpl-test123","object":"chat.completion.chunk","created":1699999999,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"content":"2"},"logprobs":null,"finish_reason":null}]}


data: {"id":"chatcmpl-test123","object":"chat.completion.chunk","created":1699999999,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"content":" equals"},"logprobs":null,"finish_reason":null}]}


data: {"id":"chatcmpl-test123","object":"chat.completion.chunk","created":1699999999,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"content":" 4"},"logprobs":null,"finish_reason":null}]}


data: {"id":"chatcmpl-test123","object":"chat.completion.chunk","created":1699999999,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}


data: {"id":"chatcmpl-test123","object":"chat.completion.chunk","created":1699999999,"model":"gpt-4o-mini","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}


data: [DONE]

'
headers:
cache-control:
- no-cache, must-revalidate
cf-cache-status:
- DYNAMIC
cf-ray:
- 8c123456789abcdef-SJC
content-type:
- text/plain; charset=utf-8
date:
- Fri, 10 Oct 2025 10:00:00 GMT
openai-model:
- gpt-4o-mini-2024-07-18
openai-organization:
- org-test123
openai-processing-ms:
- '500'
openai-version:
- '2020-10-01'
server:
- cloudflare
strict-transport-security:
- max-age=15724800; includeSubDomains
transfer-encoding:
- chunked
x-ratelimit-limit-requests:
- '10000'
x-ratelimit-limit-tokens:
- '200000'
x-ratelimit-remaining-requests:
- '9999'
x-ratelimit-remaining-tokens:
- '199950'
x-ratelimit-reset-requests:
- 6ms
x-ratelimit-reset-tokens:
- 15ms
x-request-id:
- req_test123456789abcdef
status:
code: 200
message: OK
version: 1
Original file line number Diff line number Diff line change
Expand Up @@ -143,9 +143,7 @@ def test_agno_team_coordinate_instrumentation(
name="Finance Agent",
role="Get financial data",
model=OpenAIChat(id="gpt-4o-mini"),
tools=[
YFinanceTools() # type: ignore
],
tools=[YFinanceTools()],
instructions="Use tables to display data",
)

Expand Down Expand Up @@ -231,3 +229,46 @@ def is_valid_node_id(node_id: str) -> bool:
assert web_agent_span is not None or finance_agent_span is not None, (
"At least one agent span should be found"
)


def test_session_id_streaming_regression(
tracer_provider: TracerProvider,
in_memory_span_exporter: InMemorySpanExporter,
setup_agno_instrumentation: Any,
) -> None:
"""Regression test: ensure session_id is properly extracted in streaming methods."""
with test_vcr.use_cassette(
"agent_run_stream.yaml", filter_headers=["authorization", "X-API-KEY"]
):
import os

os.environ["OPENAI_API_KEY"] = "fake_key"
agent = Agent(
name="Stream Agent",
model=OpenAIChat(id="gpt-4o-mini"),
)

# Test streaming with session_id - this should not raise an exception
session_id = "test_session_stream"
# Use the public run method with stream=True instead of _run_stream
stream_result = agent.run("What is 2+2?", session_id=session_id, stream=True)

# Consume the stream to trigger the instrumentation
if hasattr(stream_result, "__iter__"):
list(stream_result)

spans = in_memory_span_exporter.get_finished_spans()
assert len(spans) >= 1

# Find the streaming span and verify session_id is captured
stream_span = None
for span in spans:
attributes = dict(span.attributes or dict())
if span.name == "Stream_Agent.run":
stream_span = attributes
break

assert stream_span is not None, "Stream agent span should be found"
assert stream_span.get("session.id") == session_id, (
"Session ID should be properly extracted in streaming methods"
)
Loading