Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 24 additions & 4 deletions python/packages/core/agent_framework/openai/_responses_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -1433,11 +1433,20 @@ def _parse_response_from_openai(
)
case "reasoning": # ResponseOutputReasoning
added_reasoning = False
# Extract encrypted_content once so it is propagated through
# whichever branch fires and can round-trip via
# _prepare_content_for_openai. Previously only the fallback
# (no-content, no-summary) branch captured it, so responses
# containing both summaries and encrypted_content silently
# dropped the encrypted payload. See #4644.
encrypted_content = getattr(item, "encrypted_content", None)
if item_content := getattr(item, "content", None):
for index, reasoning_content in enumerate(item_content):
additional_properties: dict[str, Any] = {}
if hasattr(item, "summary") and item.summary and index < len(item.summary):
additional_properties["summary"] = item.summary[index]
if encrypted_content:
additional_properties["encrypted_content"] = encrypted_content
contents.append(
Content.from_text_reasoning(
id=item.id,
Expand All @@ -1449,20 +1458,24 @@ def _parse_response_from_openai(
added_reasoning = True
if item_summary := getattr(item, "summary", None):
for summary in item_summary:
summary_additional: dict[str, Any] = {}
if encrypted_content:
summary_additional["encrypted_content"] = encrypted_content
contents.append(
Content.from_text_reasoning(
id=item.id,
text=summary.text,
raw_representation=summary, # type: ignore[arg-type]
additional_properties=summary_additional or None,
)
)
added_reasoning = True
if not added_reasoning:
# Reasoning item with no visible text (e.g. encrypted reasoning).
# Always emit an empty marker so co-occurrence detection can be done
additional_properties_empty: dict[str, Any] = {}
if encrypted := getattr(item, "encrypted_content", None):
additional_properties_empty["encrypted_content"] = encrypted
if encrypted_content:
additional_properties_empty["encrypted_content"] = encrypted_content
contents.append(
Content.from_text_reasoning(
id=item.id,
Expand Down Expand Up @@ -2079,6 +2092,11 @@ def _parse_chunk_from_openai(
case "reasoning": # ResponseOutputReasoning
reasoning_id = getattr(event_item, "id", None)
added_reasoning = False
# Extract encrypted_content once so it is propagated
# through whichever branch fires – mirrors the
# non-streaming fix in _parse_response_from_openai.
# See #4644.
encrypted_content = getattr(event_item, "encrypted_content", None)
if hasattr(event_item, "content") and event_item.content:
for index, reasoning_content in enumerate(event_item.content):
additional_properties: dict[str, Any] = {}
Expand All @@ -2088,6 +2106,8 @@ def _parse_chunk_from_openai(
and index < len(event_item.summary)
):
additional_properties["summary"] = event_item.summary[index]
if encrypted_content:
additional_properties["encrypted_content"] = encrypted_content
contents.append(
Content.from_text_reasoning(
id=reasoning_id or None,
Expand All @@ -2101,8 +2121,8 @@ def _parse_chunk_from_openai(
# Reasoning item with no visible text (e.g. encrypted reasoning).
# Always emit an empty marker so co-occurrence detection can occur.
additional_properties_empty: dict[str, Any] = {}
if encrypted := getattr(event_item, "encrypted_content", None):
additional_properties_empty["encrypted_content"] = encrypted
if encrypted_content:
additional_properties_empty["encrypted_content"] = encrypted_content
contents.append(
Content.from_text_reasoning(
id=reasoning_id or None,
Expand Down
118 changes: 118 additions & 0 deletions python/packages/core/tests/openai/test_openai_responses_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -518,6 +518,7 @@ def test_response_content_creation_with_reasoning() -> None:
mock_reasoning_item.type = "reasoning"
mock_reasoning_item.content = [mock_reasoning_content]
mock_reasoning_item.summary = [Summary(text="Summary", type="summary_text")]
mock_reasoning_item.encrypted_content = None

mock_response.output = [mock_reasoning_item]

Expand All @@ -528,6 +529,122 @@ def test_response_content_creation_with_reasoning() -> None:
assert response.messages[0].contents[0].text == "Reasoning step"


def test_response_reasoning_preserves_encrypted_content_with_summary() -> None:
"""encrypted_content must survive when both content/summary and encrypted_content are present.

Regression test for #4644: _parse_response_from_openai dropped encrypted_content
when reasoning summaries were also present because only the fallback (no-content,
no-summary) branch captured it.
"""
client = OpenAIResponsesClient(model_id="test-model", api_key="test-key")

mock_response = MagicMock()
mock_response.output_parsed = None
mock_response.metadata = {}
mock_response.usage = None
mock_response.id = "test-id"
mock_response.model = "test-model"
mock_response.created_at = 1000000000

mock_reasoning_content = MagicMock()
mock_reasoning_content.text = "Reasoning step"

mock_reasoning_item = MagicMock()
mock_reasoning_item.type = "reasoning"
mock_reasoning_item.id = "rs_enc"
mock_reasoning_item.content = [mock_reasoning_content]
mock_reasoning_item.summary = [Summary(text="Summary text", type="summary_text")]
mock_reasoning_item.encrypted_content = "gAAAA_encrypted_payload"

mock_response.output = [mock_reasoning_item]

response = client._parse_response_from_openai(mock_response, options={}) # type: ignore

# The content branch should carry encrypted_content in additional_properties
reasoning_contents = [c for c in response.messages[0].contents if c.type == "text_reasoning"]
assert len(reasoning_contents) >= 1
first_reasoning = reasoning_contents[0]
assert first_reasoning.text == "Reasoning step"
assert first_reasoning.additional_properties is not None
assert first_reasoning.additional_properties.get("encrypted_content") == "gAAAA_encrypted_payload"
assert first_reasoning.additional_properties.get("summary") is not None

# The summary branch Content should also carry encrypted_content
assert len(reasoning_contents) >= 2
assert reasoning_contents[1].additional_properties is not None
assert reasoning_contents[1].additional_properties.get("encrypted_content") == "gAAAA_encrypted_payload"


def test_response_reasoning_preserves_encrypted_content_summary_only() -> None:
"""encrypted_content must survive when only summary (no content) is present.

Covers the case where the API returns summary + encrypted_content but no
clear-text reasoning content.
"""
client = OpenAIResponsesClient(model_id="test-model", api_key="test-key")

mock_response = MagicMock()
mock_response.output_parsed = None
mock_response.metadata = {}
mock_response.usage = None
mock_response.id = "test-id"
mock_response.model = "test-model"
mock_response.created_at = 1000000000

mock_reasoning_item = MagicMock()
mock_reasoning_item.type = "reasoning"
mock_reasoning_item.id = "rs_enc2"
mock_reasoning_item.content = None # No clear-text content
mock_reasoning_item.summary = [Summary(text="Summary only", type="summary_text")]
mock_reasoning_item.encrypted_content = "gAAAA_encrypted_payload_2"

mock_response.output = [mock_reasoning_item]

response = client._parse_response_from_openai(mock_response, options={}) # type: ignore

reasoning_contents = [c for c in response.messages[0].contents if c.type == "text_reasoning"]
assert len(reasoning_contents) >= 1
# The summary branch should carry encrypted_content
summary_reasoning = reasoning_contents[0]
assert summary_reasoning.text == "Summary only"
assert summary_reasoning.additional_properties is not None
assert summary_reasoning.additional_properties.get("encrypted_content") == "gAAAA_encrypted_payload_2"


def test_response_reasoning_no_encrypted_content() -> None:
"""When encrypted_content is None/missing, additional_properties should not contain it."""
client = OpenAIResponsesClient(model_id="test-model", api_key="test-key")

mock_response = MagicMock()
mock_response.output_parsed = None
mock_response.metadata = {}
mock_response.usage = None
mock_response.id = "test-id"
mock_response.model = "test-model"
mock_response.created_at = 1000000000

mock_reasoning_content = MagicMock()
mock_reasoning_content.text = "Reasoning step"

mock_reasoning_item = MagicMock()
mock_reasoning_item.type = "reasoning"
mock_reasoning_item.id = "rs_no_enc"
mock_reasoning_item.content = [mock_reasoning_content]
mock_reasoning_item.summary = [Summary(text="Summary text", type="summary_text")]
mock_reasoning_item.encrypted_content = None

mock_response.output = [mock_reasoning_item]

response = client._parse_response_from_openai(mock_response, options={}) # type: ignore

reasoning_contents = [c for c in response.messages[0].contents if c.type == "text_reasoning"]
assert len(reasoning_contents) >= 1
for rc in reasoning_contents:
# additional_properties should either be None or not contain encrypted_content
if rc.additional_properties is not None:
assert "encrypted_content" not in rc.additional_properties


def test_response_content_keeps_reasoning_and_function_calls_in_one_message() -> None:
"""Reasoning + function calls should parse into one assistant message."""
client = OpenAIResponsesClient(model_id="test-model", api_key="test-key")
Expand All @@ -548,6 +665,7 @@ def test_response_content_keeps_reasoning_and_function_calls_in_one_message() ->
mock_reasoning_item.id = "rs_123"
mock_reasoning_item.content = [mock_reasoning_content]
mock_reasoning_item.summary = []
mock_reasoning_item.encrypted_content = None

mock_function_call_item_1 = MagicMock()
mock_function_call_item_1.type = "function_call"
Expand Down