Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
d3652ff
Consolidating StreamingResponse definitions and slight fixes
rodrigobr-msft Feb 19, 2026
e8aa9b2
Improving StreamingResponse tests and formatting
rodrigobr-msft Feb 25, 2026
a374ced
Merge branch 'main' into users/robrandao/streaming-response-consolida…
rodrigobr-msft Feb 25, 2026
c730a89
Adding tests for StreamingResponse
rodrigobr-msft Feb 26, 2026
ead6403
Adding basic StreamingResponse tests
rodrigobr-msft Feb 26, 2026
03ed8f7
Adding integration tests for streaming
rodrigobr-msft Feb 26, 2026
634b608
Finalized basic end-to-end streaming tests
rodrigobr-msft Feb 27, 2026
13dda2a
Addressing Copilot PR review
rodrigobr-msft Feb 27, 2026
c121834
Validating channel_id before accessing parent channel
rodrigobr-msft Feb 27, 2026
a230b9a
Removing duplicate sample
rodrigobr-msft Mar 2, 2026
ee5016c
Another commit
rodrigobr-msft Mar 2, 2026
86686ae
Adding '@type' aliases for AIEntity-related classes
rodrigobr-msft Mar 3, 2026
a6a58f8
Fixing further linting issues
rodrigobr-msft Mar 3, 2026
df23e5c
Removing exclude_unset=True usagei n reply_to_activity to enable prop…
rodrigobr-msft Mar 3, 2026
904557f
Adding improved serialization for AIEntity and related classes
rodrigobr-msft Mar 17, 2026
bb2980c
Readded exclude_unset=True usage in ConversationsOperations
rodrigobr-msft Mar 18, 2026
b976eb9
Formatting
rodrigobr-msft Mar 18, 2026
e634a91
Removing unused imports
rodrigobr-msft Mar 18, 2026
eedfd6c
Small fixes
rodrigobr-msft Mar 18, 2026
8d1b300
Removing unnecessary dummy constructors
rodrigobr-msft Mar 18, 2026
f8639c3
Fixing tests
rodrigobr-msft Mar 19, 2026
478651d
Potential fix for pull request finding
rodrigobr-msft Mar 19, 2026
c9b25e9
Adding formatting and comment
rodrigobr-msft Mar 19, 2026
d77f479
Potential fix for pull request finding
rodrigobr-msft Mar 19, 2026
e07bed3
Merge branch 'main' into users/robrandao/streaming-response-consolida…
rodrigobr-msft Mar 19, 2026
ea765fd
Reformatting
rodrigobr-msft Mar 19, 2026
fe605f3
Merge branch 'users/robrandao/streaming-response-consolidation' of ht…
rodrigobr-msft Mar 19, 2026
7757e09
Potential fix for pull request finding
rodrigobr-msft Mar 19, 2026
20a903f
Potential fix for pull request finding
rodrigobr-msft Mar 19, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -90,4 +90,4 @@ def generate_token_from_config(sdk_config: dict, connection_name: str = "SERVICE

if not client_id or not client_secret or not tenant_id:
raise ValueError("Incorrect configuration provided for token generation.")
return generate_token(client_id, client_secret, tenant_id)
return generate_token(client_id, client_secret, tenant_id)
Original file line number Diff line number Diff line change
Expand Up @@ -102,4 +102,4 @@ def resolve_scenario(scenario_or_str: Scenario | str ) -> Scenario:
else:
return scenario_registry.get(scenario_or_str)
else:
raise TypeError("Input must be a Scenario instance or a string key.")
raise TypeError("Input must be a Scenario instance or a string key.")
122 changes: 122 additions & 0 deletions dev/tests/sdk/test_streaming_response.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
import pytest
import asyncio

from microsoft_agents.activity import (
Activity,
ActivityTypes,
Channels,
Entity
)

from microsoft_agents.hosting.core import (
TurnContext,
TurnState,
)

from microsoft_agents.testing import (
AgentClient,
AgentEnvironment,
AiohttpScenario,
)

FULL_TEXT = "This is a streaming response."
CHUNKS = FULL_TEXT.split()

def get_streaminfo(activity: Activity) -> Entity:
for entity in activity.entities:
if isinstance(entity, dict) and entity.get("type") == "streaminfo":
return Entity.model_validate(entity)
elif isinstance(entity, Entity) and entity.type == "streaminfo":
return entity
raise ValueError("No streaminfo entity found")

async def init_agent(env: AgentEnvironment):

app = env.agent_application

@app.message("/stream")
async def stream_handler(context: TurnContext, state: TurnState):

assert context.streaming_response is not None

context.streaming_response.queue_informative_update("Starting stream...")
await asyncio.sleep(1.0) # Simulate delay before starting stream

for chunk in CHUNKS[:-1]:
context.streaming_response.queue_text_chunk(chunk)
await asyncio.sleep(1.0) # Simulate delay between chunks

context.streaming_response.queue_text_chunk(CHUNKS[-1])
await context.streaming_response.end_stream()

_SCENARIO = AiohttpScenario(init_agent=init_agent, use_jwt_middleware=False)

@pytest.mark.asyncio
@pytest.mark.agent_test(_SCENARIO)
async def test_basic_streaming_response_non_streaming_channel(agent_client: AgentClient):

expected_len = len(FULL_TEXT.split())

agent_client.template = agent_client.template.with_updates(channel_id=Channels.emulator)

# give enough time for all the activities to send
await agent_client.send("/stream", wait=expected_len * 2.0)

stream_activities = agent_client.select().where(
entities=lambda x: any(e["type"] == "streaminfo" for e in x)
).get()

assert len(stream_activities) == 1

final_streaminfo = get_streaminfo(stream_activities[0])

assert final_streaminfo.stream_sequence == 1
assert final_streaminfo.stream_type == "final"
assert stream_activities[0].text == FULL_TEXT.replace(" ", "")



@pytest.mark.asyncio
@pytest.mark.agent_test(_SCENARIO)
async def test_basic_streaming_response_streaming_channel(agent_client: AgentClient):

expected_len = len(FULL_TEXT.split())

agent_client.template = agent_client.template.with_updates(channel_id=Channels.webchat)

# give enough time for all the activities to send
await agent_client.send("/stream", wait=expected_len * 2.0)

stream_activities = agent_client.select().where(
entities=lambda x: any(e["type"] == "streaminfo" for e in x)
).get()

assert len(stream_activities) == len(CHUNKS) + 1

informative = stream_activities[0]
informative_streaminfo = get_streaminfo(informative)

assert informative_streaminfo.stream_type == "informative"
assert informative_streaminfo.stream_sequence == 1
assert informative.text == "Starting stream..."
assert informative.type == ActivityTypes.typing

t = ""
for i, chunk in enumerate(CHUNKS[:-1]):
t += chunk

j = i + 1

streaminfo = get_streaminfo(stream_activities[j])

assert stream_activities[j].text == t
assert stream_activities[j].type == ActivityTypes.typing
assert streaminfo.stream_type == "streaming"
assert streaminfo.stream_sequence == j + 1

final_streaminfo = get_streaminfo(stream_activities[-1])

assert final_streaminfo.stream_sequence == len(stream_activities)
assert final_streaminfo.stream_type == "final"
assert stream_activities[-1].text == FULL_TEXT.replace(" ", "")

Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
Place,
ProductInfo,
Thing,
StreamInfo,
)
from .error import Error
from .error_response import ErrorResponse
Expand Down Expand Up @@ -133,6 +134,7 @@
"ExpectedReplies",
"Entity",
"AIEntity",
"EntityTypes",
"ClientCitation",
"ClientCitationAppearance",
"ClientCitationImage",
Expand All @@ -154,6 +156,7 @@
"OAuthCard",
"PagedMembersResult",
"Place",
"StreamInfo",
"ProductInfo",
"ReceiptCard",
"ReceiptItem",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -754,9 +754,6 @@ def add_ai_metadata(
"""
if citations:
ai_entity = AIEntity(
type="https://schema.org/Message",
schema_type="Message",
context="https://schema.org",
id="",
additional_type=["AIGeneratedContent"],
citation=citations,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,9 @@


class AgentsModel(BaseModel):
model_config = ConfigDict(alias_generator=to_camel, populate_by_name=True)
model_config = ConfigDict(
alias_generator=to_camel, validate_by_name=True, validate_by_alias=True
)

"""
@model_serializer
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
from .geo_coordinates import GeoCoordinates
from .place import Place
from .product_info import ProductInfo
from .stream_info import StreamInfo
from .thing import Thing

__all__ = [
Expand All @@ -29,6 +30,7 @@
"Mention",
"SensitivityUsageInfo",
"SensitivityPattern",
"StreamInfo",
"GeoCoordinates",
"Place",
"ProductInfo",
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.

from typing import Any
from pydantic import model_serializer, SerializerFunctionWrapHandler


class _SchemaMixin:

at_type: Any

@model_serializer(mode="wrap")
def serialize_model(
self, handler: SerializerFunctionWrapHandler
) -> dict[str, object]:
serialized = handler(self)
serialized["@type"] = self.at_type
if hasattr(self, "at_context"):
serialized["@context"] = getattr(self, "at_context")
return serialized
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,11 @@
# Licensed under the MIT License.

from enum import Enum
from typing import List, Optional, Union, Literal
from dataclasses import dataclass
from typing import List, Optional, Literal

from pydantic import Field, model_serializer
from ..agents_model import AgentsModel
from ._schema_mixin import _SchemaMixin
from .entity import Entity


Expand Down Expand Up @@ -42,33 +43,36 @@ class ClientCitationImage(AgentsModel):
name: str = ""


class SensitivityPattern(AgentsModel):
class SensitivityPattern(AgentsModel, _SchemaMixin):
"""Pattern information for sensitivity usage info."""

type: str = "DefinedTerm"
at_type: Literal["DefinedTerm"] = "DefinedTerm"

in_defined_term_set: str = ""
name: str = ""
term_code: str = ""


class SensitivityUsageInfo(AgentsModel):
class SensitivityUsageInfo(AgentsModel, _SchemaMixin):
"""
Sensitivity usage info for content sent to the user.
This is used to provide information about the content to the user.
"""

type: str = "https://schema.org/Message"
schema_type: str = "CreativeWork"
at_type: Literal["CreativeWork"] = "CreativeWork"

description: Optional[str] = None
name: str = ""
position: Optional[int] = None
pattern: Optional[SensitivityPattern] = None


class ClientCitationAppearance(AgentsModel):
class ClientCitationAppearance(AgentsModel, _SchemaMixin):
"""Appearance information for a client citation."""

type: str = "DigitalDocument"
at_type: Literal["DigitalDocument"] = "DigitalDocument"

name: str = ""
text: Optional[str] = None
url: Optional[str] = None
Expand All @@ -79,33 +83,54 @@ class ClientCitationAppearance(AgentsModel):
usage_info: Optional[SensitivityUsageInfo] = None


class ClientCitation(AgentsModel):
class ClientCitation(AgentsModel, _SchemaMixin):
"""
Represents a Teams client citation to be included in a message.
See Bot messages with AI-generated content for more details.
https://learn.microsoft.com/en-us/microsoftteams/platform/bots/how-to/bot-messages-ai-generated-content?tabs=before%2Cbotmessage
"""

type: str = "Claim"
position: int = 0
appearance: Optional[ClientCitationAppearance] = None
at_type: Literal["Claim"] = "Claim"

def __post_init__(self):
if self.appearance is None:
self.appearance = ClientCitationAppearance()
position: int = 0
appearance: Optional[ClientCitationAppearance] = Field(default_factory=ClientCitationAppearance)


class AIEntity(Entity):
# in the future, we need a better way to resolve the different serializers.
class AIEntity(_SchemaMixin, Entity):
"""Entity indicating AI-generated content."""

at_type: Literal["Message"] = "Message"
at_context: Literal["https://schema.org"] = "https://schema.org"

type: str = "https://schema.org/Message"
schema_type: str = "Message"
context: str = "https://schema.org"
id: str = ""
additional_type: Optional[List[str]] = None

additional_type: List[str] = Field(default_factory=lambda: ["AIGeneratedContent"])
citation: Optional[List[ClientCitation]] = None
usage_info: Optional[SensitivityUsageInfo] = None

def __post_init__(self):
if self.additional_type is None:
self.additional_type = ["AIGeneratedContent"]
@model_serializer(mode="wrap")
def _serialize_ai_entity(self, handler):
"""
Unified serializer to ensure both Bot Framework `type` and
schema.org `@type` / `@context` are present in the wire output.
"""
data = handler(self)

# Always include the Bot Framework 'type' field, even when it has its
# default value and `exclude_unset=True` is used.
data["type"] = getattr(self, "type", data.get("type"))

# Map internal schema fields to their JSON-LD counterparts.
at_type_value = getattr(self, "at_type", None)
if "at_type" in data or at_type_value is not None:
data["@type"] = at_type_value if at_type_value is not None else data.get("at_type")
data.pop("at_type", None)

at_context_value = getattr(self, "at_context", None)
if "at_context" in data or at_context_value is not None:
data["@context"] = at_context_value if at_context_value is not None else data.get("at_context")
data.pop("at_context", None)

return data
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,11 @@

from typing import Any

from pydantic import model_serializer, model_validator
from pydantic.alias_generators import to_camel, to_snake
from pydantic.alias_generators import to_camel
from pydantic import model_serializer, SerializerFunctionWrapHandler

from ..agents_model import AgentsModel, ConfigDict
from ._schema_mixin import _SchemaMixin


class Entity(AgentsModel):
Expand All @@ -16,7 +17,12 @@ class Entity(AgentsModel):
:type type: str
"""

model_config = ConfigDict(extra="allow")
model_config = ConfigDict(
extra="allow",
alias_generator=to_camel,
validate_by_name=True,
validate_by_alias=True,
)

type: str

Expand All @@ -25,17 +31,11 @@ def additional_properties(self) -> dict[str, Any]:
"""Returns the set of properties that are not None."""
return self.model_extra

@model_validator(mode="before")
@classmethod
def to_snake_for_all(cls, data):
ret = {to_snake(k): v for k, v in data.items()}
return ret

@model_serializer(mode="plain")
def to_camel_for_all(self, config):
if config.by_alias:
new_data = {}
for k, v in self:
new_data[to_camel(k)] = v
return new_data
return {k: v for k, v in self}
# ensures type is included when serializing, even when exclude_unset=True
@model_serializer(mode="wrap")
def serialize_with_type(
self, handler: SerializerFunctionWrapHandler
) -> dict[str, object]:
serialized = handler(self)
serialized["type"] = self.type
return serialized
Loading
Loading