|
2 | 2 |
|
3 | 3 | import contextlib |
4 | 4 | import inspect |
| 5 | +import json |
5 | 6 | from collections.abc import AsyncIterable, MutableSequence |
6 | 7 | from typing import Any |
7 | | -from unittest.mock import AsyncMock, MagicMock |
| 8 | +from unittest.mock import AsyncMock, MagicMock, patch |
8 | 9 | from uuid import uuid4 |
9 | 10 |
|
10 | 11 | import pytest |
@@ -1943,6 +1944,128 @@ async def test_stores_by_default_with_store_false_in_default_options_injects_inm |
1943 | 1944 | assert any(isinstance(p, InMemoryHistoryProvider) for p in agent.context_providers) |
1944 | 1945 |
|
1945 | 1946 |
|
| 1947 | +async def test_shared_local_storage_cross_provider_responses_history_does_not_leak_fc_id() -> None: |
| 1948 | + """Responses-specific replay metadata should stay local to Responses when session storage is shared.""" |
| 1949 | + from openai.types.chat.chat_completion import ChatCompletion, Choice |
| 1950 | + from openai.types.chat.chat_completion_message import ChatCompletionMessage |
| 1951 | + |
| 1952 | + from agent_framework._sessions import InMemoryHistoryProvider |
| 1953 | + from agent_framework.openai import OpenAIChatClient, OpenAIResponsesClient |
| 1954 | + |
| 1955 | + @tool(approval_mode="never_require") |
| 1956 | + def search_hotels(city: str) -> str: |
| 1957 | + return f"Found 3 hotels in {city}" |
| 1958 | + |
| 1959 | + responses_client = OpenAIResponsesClient(model_id="test-model", api_key="test-key") |
| 1960 | + responses_agent = Agent( |
| 1961 | + client=responses_client, |
| 1962 | + tools=[search_hotels], |
| 1963 | + default_options={"store": False}, |
| 1964 | + ) |
| 1965 | + session = responses_agent.create_session() |
| 1966 | + |
| 1967 | + responses_tool_call = MagicMock() |
| 1968 | + responses_tool_call.type = "function_call" |
| 1969 | + responses_tool_call.id = "fc_provider123" |
| 1970 | + responses_tool_call.call_id = "call_1" |
| 1971 | + responses_tool_call.name = "search_hotels" |
| 1972 | + responses_tool_call.arguments = '{"city": "Paris"}' |
| 1973 | + responses_tool_call.status = "completed" |
| 1974 | + |
| 1975 | + responses_first = MagicMock() |
| 1976 | + responses_first.output_parsed = None |
| 1977 | + responses_first.metadata = {} |
| 1978 | + responses_first.usage = None |
| 1979 | + responses_first.id = "resp_1" |
| 1980 | + responses_first.model = "test-model" |
| 1981 | + responses_first.created_at = 1000000000 |
| 1982 | + responses_first.status = "completed" |
| 1983 | + responses_first.finish_reason = "tool_calls" |
| 1984 | + responses_first.incomplete = None |
| 1985 | + responses_first.output = [responses_tool_call] |
| 1986 | + |
| 1987 | + responses_text_item = MagicMock() |
| 1988 | + responses_text_item.type = "message" |
| 1989 | + responses_text_content = MagicMock() |
| 1990 | + responses_text_content.type = "output_text" |
| 1991 | + responses_text_content.text = "Hotel Lutetia is the cheapest option." |
| 1992 | + responses_text_item.content = [responses_text_content] |
| 1993 | + |
| 1994 | + responses_second = MagicMock() |
| 1995 | + responses_second.output_parsed = None |
| 1996 | + responses_second.metadata = {} |
| 1997 | + responses_second.usage = None |
| 1998 | + responses_second.id = "resp_2" |
| 1999 | + responses_second.model = "test-model" |
| 2000 | + responses_second.created_at = 1000000001 |
| 2001 | + responses_second.status = "completed" |
| 2002 | + responses_second.finish_reason = "stop" |
| 2003 | + responses_second.incomplete = None |
| 2004 | + responses_second.output = [responses_text_item] |
| 2005 | + |
| 2006 | + with patch.object( |
| 2007 | + responses_client.client.responses, |
| 2008 | + "create", |
| 2009 | + side_effect=[responses_first, responses_second], |
| 2010 | + ) as mock_responses_create: |
| 2011 | + responses_result = await responses_agent.run("Find me a hotel in Paris", session=session) |
| 2012 | + |
| 2013 | + assert responses_result.text == "Hotel Lutetia is the cheapest option." |
| 2014 | + assert any(isinstance(provider, InMemoryHistoryProvider) for provider in responses_agent.context_providers) |
| 2015 | + |
| 2016 | + shared_messages = session.state[InMemoryHistoryProvider.DEFAULT_SOURCE_ID]["messages"] |
| 2017 | + shared_function_call = next( |
| 2018 | + content for message in shared_messages for content in message.contents if content.type == "function_call" |
| 2019 | + ) |
| 2020 | + assert shared_function_call.additional_properties is not None |
| 2021 | + assert shared_function_call.additional_properties.get("fc_id") == "fc_provider123" |
| 2022 | + |
| 2023 | + responses_replay_input = mock_responses_create.call_args_list[1].kwargs["input"] |
| 2024 | + responses_replay_call = next(item for item in responses_replay_input if item.get("type") == "function_call") |
| 2025 | + assert responses_replay_call["id"] == "fc_provider123" |
| 2026 | + |
| 2027 | + chat_client = OpenAIChatClient(model_id="test-model", api_key="test-key") |
| 2028 | + chat_agent = Agent(client=chat_client) |
| 2029 | + |
| 2030 | + chat_response = ChatCompletion( |
| 2031 | + id="chatcmpl-test", |
| 2032 | + object="chat.completion", |
| 2033 | + created=1234567890, |
| 2034 | + model="gpt-4o-mini", |
| 2035 | + choices=[ |
| 2036 | + Choice( |
| 2037 | + index=0, |
| 2038 | + message=ChatCompletionMessage(role="assistant", content="The cheapest option is still Hotel Lutetia."), |
| 2039 | + finish_reason="stop", |
| 2040 | + ) |
| 2041 | + ], |
| 2042 | + ) |
| 2043 | + |
| 2044 | + with patch.object( |
| 2045 | + chat_client.client.chat.completions, |
| 2046 | + "create", |
| 2047 | + new=AsyncMock(return_value=chat_response), |
| 2048 | + ) as mock_chat_create: |
| 2049 | + chat_result = await chat_agent.run("Which option is cheapest?", session=session) |
| 2050 | + |
| 2051 | + assert chat_result.text == "The cheapest option is still Hotel Lutetia." |
| 2052 | + |
| 2053 | + chat_request_messages = mock_chat_create.call_args.kwargs["messages"] |
| 2054 | + assistant_tool_call_message = next( |
| 2055 | + message for message in chat_request_messages if message.get("role") == "assistant" and message.get("tool_calls") |
| 2056 | + ) |
| 2057 | + assert assistant_tool_call_message["tool_calls"][0]["id"] == "call_1" |
| 2058 | + assert assistant_tool_call_message["tool_calls"][0]["function"]["name"] == "search_hotels" |
| 2059 | + |
| 2060 | + tool_result_message = next( |
| 2061 | + message |
| 2062 | + for message in chat_request_messages |
| 2063 | + if message.get("role") == "tool" and message.get("tool_call_id") == "call_1" |
| 2064 | + ) |
| 2065 | + assert tool_result_message["content"] == "Found 3 hotels in Paris" |
| 2066 | + assert "fc_provider123" not in json.dumps(chat_request_messages) |
| 2067 | + |
| 2068 | + |
1946 | 2069 | # region as_tool user_input_request propagation |
1947 | 2070 |
|
1948 | 2071 |
|
|
0 commit comments