feat: accept chunks as arguments to chat.{start,append,stop}Stream methods#1806
Conversation
Codecov Report❌ Patch coverage is
Additional details and impacted files@@ Coverage Diff @@
## feat-ai-apps-thinking-steps #1806 +/- ##
===============================================================
- Coverage 83.91% 83.90% -0.01%
===============================================================
Files 115 116 +1
Lines 13080 13168 +88
===============================================================
+ Hits 10976 11049 +73
- Misses 2104 2119 +15 ☔ View full report in Codecov by Sentry. |
zimeg
left a comment
There was a problem hiding this comment.
👾 Leaving a few updates from the future! These are refactors made in a "stacked" branch. Please do let me know if rebasing is preferred though-
| class URLSource(JsonObject): | ||
| type = "url" | ||
|
|
||
| @property | ||
| def attributes(self) -> Set[str]: | ||
| return super().attributes.union( | ||
| { | ||
| "url", | ||
| "text", | ||
| "icon_url", | ||
| } | ||
| ) | ||
|
|
||
| def __init__( | ||
| self, | ||
| *, | ||
| url: str, | ||
| text: str, | ||
| icon_url: Optional[str] = None, | ||
| **others: Dict, | ||
| ): | ||
| show_unknown_key_warning(self, others) | ||
| self._url = url | ||
| self._text = text | ||
| self._icon_url = icon_url | ||
|
|
||
| def to_dict(self) -> Dict[str, Any]: | ||
| self.validate_json() | ||
| json: Dict[str, Union[str, Dict]] = { | ||
| "type": self.type, | ||
| "url": self._url, | ||
| "text": self._text, | ||
| } | ||
| if self._icon_url: | ||
| json["icon_url"] = self._icon_url | ||
| return json |
There was a problem hiding this comment.
🧮 note: It's also changed from "URL" to "Url" to match similar elements!
| if sources is not None: | ||
| self.sources = [] | ||
| for src in sources: | ||
| if isinstance(src, Dict): | ||
| self.sources.append(src) | ||
| elif isinstance(src, URLSource): | ||
| self.sources.append(src.to_dict()) | ||
| else: | ||
| raise SlackObjectFormationError(f"Unsupported type for source in task update chunk: {type(src)}") |
There was a problem hiding this comment.
🪓 note: This is simplified alongside the changes of #1819 as well!
mwbrooks
left a comment
There was a problem hiding this comment.
✅ The code looks good and it works well for me!
🧪 Testing locally works well using your sample app. In case others want to check, I'll include my listeners/assistant/message.py from the sample app.
bolt-python-assistant-template/listeners/assistant/message.py:
from logging import Logger
from typing import Dict, List
from slack_bolt import BoltContext, Say, SetStatus
from slack_sdk import WebClient
from ai.llm_caller import call_llm
from ..views.feedback_block import create_feedback_block
import time
from slack_sdk.models.messages.chunk import MarkdownTextChunk, TaskUpdateChunk, URLSource
def message(
client: WebClient,
context: BoltContext,
logger: Logger,
payload: dict,
say: Say,
set_status: SetStatus,
):
"""
Handles when users send messages or select a prompt in an assistant thread and generate AI responses:
Args:
client: Slack WebClient for making API calls
context: Bolt context containing channel and thread information
logger: Logger instance for error tracking
payload: Event payload with message details (channel, user, text, etc.)
say: Function to send messages to the thread
set_status: Function to update the assistant's status
"""
try:
channel_id = payload["channel"]
team_id = context.team_id
thread_ts = payload["thread_ts"]
user_id = context.user_id
set_status(
status="thinking...",
loading_messages=[
"Teaching the hamsters to type faster…",
"Untangling the internet cables…",
"Consulting the office goldfish…",
"Polishing up the response just for you…",
"Convincing the AI to stop overthinking…",
],
)
replies = client.conversations_replies(
channel=context.channel_id,
ts=context.thread_ts,
oldest=context.thread_ts,
limit=10,
)
messages_in_thread: List[Dict[str, str]] = []
for message in replies["messages"]:
role = "user" if message.get("bot_id") is None else "assistant"
messages_in_thread.append({"role": role, "content": message["text"]})
returned_message = call_llm(messages_in_thread)
streamer = client.chat_startStream(
channel=channel_id,
recipient_team_id=team_id,
recipient_user_id=user_id,
thread_ts=thread_ts,
chunks=[
MarkdownTextChunk(text="**onwards processing**"),
TaskUpdateChunk(
id="12",
title="counting bytes...",
status="in_progress",
),
],
)
time.sleep(4)
client.chat_appendStream(
channel=channel_id,
ts=streamer.get("ts"),
markdown_text="",
chunks=[
TaskUpdateChunk(
id="12",
title="adding numbers...",
status="in_progress",
details="sums have increased",
)
],
)
time.sleep(4)
client.chat_stopStream(
channel=channel_id,
ts=streamer.get("ts"),
chunks=[
TaskUpdateChunk(
id="12",
title="solved equation!",
status="complete",
sources=[
URLSource(
url="https://oeis.org",
text="The On-Line Encyclopedia of Integer Sequences (OEIS)",
),
],
),
MarkdownTextChunk(text="that computes."),
],
)
except Exception as e:
logger.exception(f"Failed to handle a user message event: {e}")
say(f":warning: Something went wrong! ({e})")
WilliamBergamin
left a comment
There was a problem hiding this comment.
Nice work ✅
Left a few comments, I don't think any one them are blocking 🚀
| *, | ||
| id: str, | ||
| title: str, | ||
| status: str, # "pending", "in_progress", "complete", "error" |
There was a problem hiding this comment.
NIT: if you find this valuable I think you could use some sort of Enum instead of raw strings for the status but I'm not sure how this ill play out in a JsonObject 🤔
There was a problem hiding this comment.
@WilliamBergamin Ohh nice I forgot this feature is supported! I'll save this for a follow up PR as well for ongoing testing 🤖
| channel: str, | ||
| ts: str, | ||
| markdown_text: str, | ||
| chunks: Optional[Sequence[Union[Dict, Chunk]]] = None, |
There was a problem hiding this comment.
Should the above markdown_text be made optional as well here?
There was a problem hiding this comment.
@WilliamBergamin Nice catch - yes! In #1809 it's updated and we'll squash these together next before a prerelease 📠
|
@srtaalej @mwbrooks @WilliamBergamin Thanks all for taking a look and sharing amazing feedback! 💌 I've taken note to prefer optionals and enums for certain arguments but am saving this for a follow up PR at this time to avoid rebasing branches! |
Summary
This PR introduces the
chunksargument to the following methods:Testing
The following code snippet might be interesting to experiment with:
Category
/docs(Documents)tests/integration_tests(Automated tests for this library)Notes
Requirements
python3 -m venv .venv && source .venv/bin/activate && ./scripts/run_validation.shafter making the changes.