Skip to content

Commit 0cd40f8

Browse files
eavanvalkenburgCopilotCopilot
authored
Python: [BREAKING] Refactor middleware layering and split Anthropic raw client (#4746)
* [BREAKING] Refactor middleware layering and raw clients Reorder chat client layers so function invocation wraps chat middleware, and chat middleware stays outside telemetry while still running for each inner model call. Add middleware pipeline caching, refresh docs and samples, and split Anthropic into raw and public clients to match the standard layering model. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> * Tighten typing ignores in ancillary modules Add targeted typing ignores in workflow visualization and lab modules so pyright stays clean alongside the middleware refactor work. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> * Fix categorize_middleware to unpack tuple/Sequence and use relative MRO assertions - Broaden isinstance check in categorize_middleware from list to Sequence so tuples and other Sequence types are properly unpacked instead of being appended as a single item. - Replace fragile hardcoded MRO index assertions in anthropic test with relative ordering via mro.index(). - Add regression tests for categorize_middleware with tuple, list, and None inputs. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> * Fix middleware string decomposition, add middleware param to FunctionInvocationLayer, and add tests (#4710) - Guard categorize_middleware Sequence check against str/bytes to prevent character-by-character decomposition of accidentally passed strings - Add explicit middleware parameter to FunctionInvocationLayer.get_response and merge it into client_kwargs before categorization, fixing the inconsistency where only OpenAIChatClient supported this parameter - Add assertions that RawAnthropicClient does not inherit convenience layers - Add chat middleware cache test with non-empty base middleware - Add tests for single unwrapped middleware item and string input Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> * Apply pre-commit auto-fixes * Apply pre-commit auto-fixes * Address review feedback for #4710: review comment fixes --------- Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> Co-authored-by: Copilot <copilot@github.com>
1 parent cefda44 commit 0cd40f8

41 files changed

Lines changed: 937 additions & 156 deletions

File tree

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

python/packages/ag-ui/agent_framework_ag_ui/_client.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -111,8 +111,8 @@ def _map_update(update: ChatResponseUpdate) -> ChatResponseUpdate:
111111

112112
@_apply_server_function_call_unwrap
113113
class AGUIChatClient(
114-
ChatMiddlewareLayer[AGUIChatOptionsT],
115114
FunctionInvocationLayer[AGUIChatOptionsT],
115+
ChatMiddlewareLayer[AGUIChatOptionsT],
116116
ChatTelemetryLayer[AGUIChatOptionsT],
117117
BaseChatClient[AGUIChatOptionsT],
118118
Generic[AGUIChatOptionsT],

python/packages/ag-ui/tests/ag_ui/conftest.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -45,16 +45,16 @@ def pytest_configure() -> None:
4545

4646

4747
class StreamingChatClientStub(
48-
ChatMiddlewareLayer[OptionsCoT],
4948
FunctionInvocationLayer[OptionsCoT],
49+
ChatMiddlewareLayer[OptionsCoT],
5050
ChatTelemetryLayer[OptionsCoT],
5151
BaseChatClient[OptionsCoT],
5252
Generic[OptionsCoT],
5353
):
5454
"""Typed streaming stub that satisfies SupportsChatGetResponse."""
5555

5656
def __init__(self, stream_fn: StreamFn, response_fn: ResponseFn | None = None) -> None:
57-
super().__init__(function_middleware=[])
57+
super().__init__(middleware=[])
5858
self._stream_fn = stream_fn
5959
self._response_fn = response_fn
6060
self.last_session: AgentSession | None = None

python/packages/anthropic/agent_framework_anthropic/__init__.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
import importlib.metadata
44

5-
from ._chat_client import AnthropicChatOptions, AnthropicClient
5+
from ._chat_client import AnthropicChatOptions, AnthropicClient, RawAnthropicClient
66

77
try:
88
__version__ = importlib.metadata.version(__name__)
@@ -12,5 +12,6 @@
1212
__all__ = [
1313
"AnthropicChatOptions",
1414
"AnthropicClient",
15+
"RawAnthropicClient",
1516
"__version__",
1617
]

python/packages/anthropic/agent_framework_anthropic/_chat_client.py

Lines changed: 114 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -68,6 +68,7 @@
6868
__all__ = [
6969
"AnthropicChatOptions",
7070
"AnthropicClient",
71+
"RawAnthropicClient",
7172
"ThinkingConfig",
7273
]
7374

@@ -210,14 +211,24 @@ class AnthropicSettings(TypedDict, total=False):
210211
chat_model_id: str | None
211212

212213

213-
class AnthropicClient(
214-
ChatMiddlewareLayer[AnthropicOptionsT],
215-
FunctionInvocationLayer[AnthropicOptionsT],
216-
ChatTelemetryLayer[AnthropicOptionsT],
214+
class RawAnthropicClient(
217215
BaseChatClient[AnthropicOptionsT],
218216
Generic[AnthropicOptionsT],
219217
):
220-
"""Anthropic Chat client with middleware, telemetry, and function invocation support."""
218+
"""Raw Anthropic chat client without middleware, telemetry, or function invocation support.
219+
220+
Warning:
221+
**This class should not normally be used directly.** It does not include middleware,
222+
telemetry, or function invocation support that you most likely need. If you do use it,
223+
you should consider which additional layers to apply. There is a defined ordering that
224+
you should follow:
225+
226+
1. **FunctionInvocationLayer** - Owns the tool/function calling loop and routes function middleware
227+
2. **ChatMiddlewareLayer** - Applies chat middleware per model call and stays outside telemetry
228+
3. **ChatTelemetryLayer** - Must stay inside chat middleware for correct per-call telemetry
229+
230+
Use ``AnthropicClient`` instead for a fully-featured client with all layers applied.
231+
"""
221232

222233
OTEL_PROVIDER_NAME: ClassVar[str] = "anthropic" # type: ignore[reportIncompatibleVariableOverride, misc]
223234

@@ -229,12 +240,10 @@ def __init__(
229240
anthropic_client: AsyncAnthropic | None = None,
230241
additional_beta_flags: list[str] | None = None,
231242
additional_properties: dict[str, Any] | None = None,
232-
middleware: Sequence[ChatAndFunctionMiddlewareTypes] | None = None,
233-
function_invocation_configuration: FunctionInvocationConfiguration | None = None,
234243
env_file_path: str | None = None,
235244
env_file_encoding: str | None = None,
236245
) -> None:
237-
"""Initialize an Anthropic Agent client.
246+
"""Initialize a raw Anthropic client.
238247
239248
Keyword Args:
240249
api_key: The Anthropic API key to use for authentication.
@@ -245,37 +254,35 @@ def __init__(
245254
additional_beta_flags: Additional beta flags to enable on the client.
246255
Default flags are: "mcp-client-2025-04-04", "code-execution-2025-08-25".
247256
additional_properties: Additional properties stored on the client instance.
248-
middleware: Optional middleware to apply to the client.
249-
function_invocation_configuration: Optional function invocation configuration override.
250257
env_file_path: Path to environment file for loading settings.
251258
env_file_encoding: Encoding of the environment file.
252259
253260
Examples:
254261
.. code-block:: python
255262
256-
from agent_framework.anthropic import AnthropicClient
263+
from agent_framework.anthropic import RawAnthropicClient
257264
from azure.identity.aio import DefaultAzureCredential
258265
259266
# Using environment variables
260267
# Set ANTHROPIC_API_KEY=your_anthropic_api_key
261268
# ANTHROPIC_CHAT_MODEL_ID=claude-sonnet-4-5-20250929
262269
263270
# Or passing parameters directly
264-
client = AnthropicClient(
271+
client = RawAnthropicClient(
265272
model_id="claude-sonnet-4-5-20250929",
266273
api_key="your_anthropic_api_key",
267274
)
268275
269276
# Or loading from a .env file
270-
client = AnthropicClient(env_file_path="path/to/.env")
277+
client = RawAnthropicClient(env_file_path="path/to/.env")
271278
272279
# Or passing in an existing client
273280
from anthropic import AsyncAnthropic
274281
275282
anthropic_client = AsyncAnthropic(
276283
api_key="your_anthropic_api_key", base_url="https://custom-anthropic-endpoint.com"
277284
)
278-
client = AnthropicClient(
285+
client = RawAnthropicClient(
279286
model_id="claude-sonnet-4-5-20250929",
280287
anthropic_client=anthropic_client,
281288
)
@@ -289,7 +296,7 @@ class MyOptions(AnthropicChatOptions, total=False):
289296
my_custom_option: str
290297
291298
292-
client: AnthropicClient[MyOptions] = AnthropicClient(model_id="claude-sonnet-4-5-20250929")
299+
client: RawAnthropicClient[MyOptions] = RawAnthropicClient(model_id="claude-sonnet-4-5-20250929")
293300
response = await client.get_response("Hello", options={"my_custom_option": "value"})
294301
295302
"""
@@ -320,8 +327,6 @@ class MyOptions(AnthropicChatOptions, total=False):
320327
# Initialize parent
321328
super().__init__(
322329
additional_properties=additional_properties,
323-
middleware=middleware,
324-
function_invocation_configuration=function_invocation_configuration,
325330
)
326331

327332
# Initialize instance variables
@@ -1376,3 +1381,95 @@ def service_url(self) -> str:
13761381
The service URL for the chat client, or None if not set.
13771382
"""
13781383
return str(self.anthropic_client.base_url)
1384+
1385+
1386+
class AnthropicClient(
1387+
FunctionInvocationLayer[AnthropicOptionsT],
1388+
ChatMiddlewareLayer[AnthropicOptionsT],
1389+
ChatTelemetryLayer[AnthropicOptionsT],
1390+
RawAnthropicClient[AnthropicOptionsT],
1391+
Generic[AnthropicOptionsT],
1392+
):
1393+
"""Anthropic chat client with middleware, telemetry, and function invocation support."""
1394+
1395+
def __init__(
1396+
self,
1397+
*,
1398+
api_key: str | None = None,
1399+
model_id: str | None = None,
1400+
anthropic_client: AsyncAnthropic | None = None,
1401+
additional_beta_flags: list[str] | None = None,
1402+
additional_properties: dict[str, Any] | None = None,
1403+
middleware: Sequence[ChatAndFunctionMiddlewareTypes] | None = None,
1404+
function_invocation_configuration: FunctionInvocationConfiguration | None = None,
1405+
env_file_path: str | None = None,
1406+
env_file_encoding: str | None = None,
1407+
) -> None:
1408+
"""Initialize an Anthropic client.
1409+
1410+
Keyword Args:
1411+
api_key: The Anthropic API key to use for authentication.
1412+
model_id: The ID of the model to use.
1413+
anthropic_client: An existing Anthropic client to use. If not provided, one will be created.
1414+
This can be used to further configure the client before passing it in.
1415+
For instance if you need to set a different base_url for testing or private deployments.
1416+
additional_beta_flags: Additional beta flags to enable on the client.
1417+
Default flags are: "mcp-client-2025-04-04", "code-execution-2025-08-25".
1418+
additional_properties: Additional properties stored on the client instance.
1419+
middleware: Optional middleware to apply to the client.
1420+
function_invocation_configuration: Optional function invocation configuration override.
1421+
env_file_path: Path to environment file for loading settings.
1422+
env_file_encoding: Encoding of the environment file.
1423+
1424+
Examples:
1425+
.. code-block:: python
1426+
1427+
from agent_framework.anthropic import AnthropicClient
1428+
1429+
# Using environment variables
1430+
# Set ANTHROPIC_API_KEY=your_anthropic_api_key
1431+
# ANTHROPIC_CHAT_MODEL_ID=claude-sonnet-4-5-20250929
1432+
1433+
# Or passing parameters directly
1434+
client = AnthropicClient(
1435+
model_id="claude-sonnet-4-5-20250929",
1436+
api_key="your_anthropic_api_key",
1437+
)
1438+
1439+
# Or loading from a .env file
1440+
client = AnthropicClient(env_file_path="path/to/.env")
1441+
1442+
# Or passing in an existing client
1443+
from anthropic import AsyncAnthropic
1444+
1445+
anthropic_client = AsyncAnthropic(
1446+
api_key="your_anthropic_api_key", base_url="https://custom-anthropic-endpoint.com"
1447+
)
1448+
client = AnthropicClient(
1449+
model_id="claude-sonnet-4-5-20250929",
1450+
anthropic_client=anthropic_client,
1451+
)
1452+
1453+
# Using custom ChatOptions with type safety:
1454+
from typing import TypedDict
1455+
from agent_framework.anthropic import AnthropicChatOptions
1456+
1457+
1458+
class MyOptions(AnthropicChatOptions, total=False):
1459+
my_custom_option: str
1460+
1461+
1462+
client: AnthropicClient[MyOptions] = AnthropicClient(model_id="claude-sonnet-4-5-20250929")
1463+
response = await client.get_response("Hello", options={"my_custom_option": "value"})
1464+
"""
1465+
super().__init__(
1466+
api_key=api_key,
1467+
model_id=model_id,
1468+
anthropic_client=anthropic_client,
1469+
additional_beta_flags=additional_beta_flags,
1470+
additional_properties=additional_properties,
1471+
middleware=middleware,
1472+
function_invocation_configuration=function_invocation_configuration,
1473+
env_file_path=env_file_path,
1474+
env_file_encoding=env_file_encoding,
1475+
)

python/packages/anthropic/tests/test_anthropic_client.py

Lines changed: 19 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,15 +6,18 @@
66

77
import pytest
88
from agent_framework import (
9+
ChatMiddlewareLayer,
910
ChatOptions,
1011
ChatResponseUpdate,
1112
Content,
13+
FunctionInvocationLayer,
1214
Message,
1315
SupportsChatGetResponse,
1416
tool,
1517
)
1618
from agent_framework._settings import load_settings
1719
from agent_framework._tools import SHELL_TOOL_KIND_VALUE
20+
from agent_framework.observability import ChatTelemetryLayer
1821
from anthropic.types.beta import (
1922
BetaMessage,
2023
BetaTextBlock,
@@ -23,7 +26,7 @@
2326
)
2427
from pydantic import BaseModel, Field
2528

26-
from agent_framework_anthropic import AnthropicClient
29+
from agent_framework_anthropic import AnthropicClient, RawAnthropicClient
2730
from agent_framework_anthropic._chat_client import AnthropicSettings
2831

2932
# Test constants
@@ -64,6 +67,8 @@ def create_test_anthropic_client(
6467
client.additional_beta_flags = []
6568
client.chat_middleware = []
6669
client.function_middleware = []
70+
client._cached_chat_middleware_pipeline = None
71+
client._cached_function_middleware_pipeline = None
6772
client.function_invocation_configuration = normalize_function_invocation_configuration(None)
6873

6974
return client
@@ -117,6 +122,19 @@ def test_anthropic_client_init_with_client(mock_anthropic_client: MagicMock) ->
117122
assert isinstance(client, SupportsChatGetResponse)
118123

119124

125+
def test_anthropic_client_wraps_raw_client_with_standard_layer_order() -> None:
126+
"""Test AnthropicClient composes the standard public layer stack around the raw client."""
127+
assert issubclass(AnthropicClient, RawAnthropicClient)
128+
mro = AnthropicClient.__mro__
129+
assert mro.index(FunctionInvocationLayer) < mro.index(ChatMiddlewareLayer)
130+
assert mro.index(ChatMiddlewareLayer) < mro.index(ChatTelemetryLayer)
131+
assert mro.index(ChatTelemetryLayer) < mro.index(RawAnthropicClient)
132+
# RawAnthropicClient must not include the convenience layers
133+
assert not issubclass(RawAnthropicClient, FunctionInvocationLayer)
134+
assert not issubclass(RawAnthropicClient, ChatMiddlewareLayer)
135+
assert not issubclass(RawAnthropicClient, ChatTelemetryLayer)
136+
137+
120138
def test_anthropic_client_init_auto_create_client(
121139
anthropic_unit_test_env: dict[str, str],
122140
) -> None:

python/packages/azure-ai/agent_framework_azure_ai/_chat_client.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -206,8 +206,8 @@ class AzureAIAgentOptions(ChatOptions, total=False):
206206

207207

208208
class AzureAIAgentClient(
209-
ChatMiddlewareLayer[AzureAIAgentOptionsT],
210209
FunctionInvocationLayer[AzureAIAgentOptionsT],
210+
ChatMiddlewareLayer[AzureAIAgentOptionsT],
211211
ChatTelemetryLayer[AzureAIAgentOptionsT],
212212
BaseChatClient[AzureAIAgentOptionsT],
213213
Generic[AzureAIAgentOptionsT],

python/packages/azure-ai/agent_framework_azure_ai/_client.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -97,9 +97,9 @@ class RawAzureAIClient(RawOpenAIResponsesClient[AzureAIClientOptionsT], Generic[
9797
you should consider which additional layers to apply. There is a defined ordering that
9898
you should follow:
9999
100-
1. **ChatMiddlewareLayer** - Should be applied first as it also prepares function middleware
101-
2. **FunctionInvocationLayer** - Handles tool/function calling loop
102-
3. **ChatTelemetryLayer** - Must be inside the function calling loop for correct per-call telemetry
100+
1. **FunctionInvocationLayer** - Owns the tool/function calling loop and routes function middleware
101+
2. **ChatMiddlewareLayer** - Applies chat middleware per model call and stays outside telemetry
102+
3. **ChatTelemetryLayer** - Must stay inside chat middleware for correct per-call telemetry
103103
104104
Use ``AzureAIClient`` instead for a fully-featured client with all layers applied.
105105
"""
@@ -1214,8 +1214,8 @@ def as_agent(
12141214

12151215

12161216
class AzureAIClient(
1217-
ChatMiddlewareLayer[AzureAIClientOptionsT],
12181217
FunctionInvocationLayer[AzureAIClientOptionsT],
1218+
ChatMiddlewareLayer[AzureAIClientOptionsT],
12191219
ChatTelemetryLayer[AzureAIClientOptionsT],
12201220
RawAzureAIClient[AzureAIClientOptionsT],
12211221
Generic[AzureAIClientOptionsT],

python/packages/azure-ai/tests/test_azure_ai_agent_client.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -87,6 +87,8 @@ def create_test_azure_ai_chat_client(
8787
client.middleware = None
8888
client.chat_middleware = []
8989
client.function_middleware = []
90+
client._cached_chat_middleware_pipeline = None
91+
client._cached_function_middleware_pipeline = None
9092
client.otel_provider_name = "azure.ai"
9193
client.function_invocation_configuration = {
9294
"enabled": True,
@@ -151,6 +153,10 @@ def test_azure_ai_chat_client_init_auto_create_client(
151153
chat_client.agent_name = None
152154
chat_client.additional_properties = {}
153155
chat_client.middleware = None
156+
chat_client.chat_middleware = []
157+
chat_client.function_middleware = []
158+
chat_client._cached_chat_middleware_pipeline = None
159+
chat_client._cached_function_middleware_pipeline = None
154160

155161
assert chat_client.agents_client is mock_agents_client
156162
assert chat_client.agent_id is None

python/packages/bedrock/agent_framework_bedrock/_chat_client.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -216,8 +216,8 @@ class BedrockSettings(TypedDict, total=False):
216216

217217

218218
class BedrockChatClient(
219-
ChatMiddlewareLayer[BedrockChatOptionsT],
220219
FunctionInvocationLayer[BedrockChatOptionsT],
220+
ChatMiddlewareLayer[BedrockChatOptionsT],
221221
ChatTelemetryLayer[BedrockChatOptionsT],
222222
BaseChatClient[BedrockChatOptionsT],
223223
Generic[BedrockChatOptionsT],

python/packages/core/agent_framework/_clients.py

Lines changed: 1 addition & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -966,16 +966,7 @@ def _apply_get_response_docstrings() -> None:
966966
from .observability import ChatTelemetryLayer
967967

968968
apply_layered_docstring(ChatTelemetryLayer.get_response, BaseChatClient.get_response)
969-
apply_layered_docstring(
970-
FunctionInvocationLayer.get_response,
971-
ChatTelemetryLayer.get_response,
972-
extra_keyword_args={
973-
"function_middleware": """
974-
Optional per-call function middleware.
975-
When omitted, middleware configured on the client or forwarded from higher layers is used.
976-
""",
977-
},
978-
)
969+
apply_layered_docstring(FunctionInvocationLayer.get_response, ChatTelemetryLayer.get_response)
979970
apply_layered_docstring(
980971
ChatMiddlewareLayer.get_response,
981972
FunctionInvocationLayer.get_response,

0 commit comments

Comments
 (0)