Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
[project]
name = "uipath-langchain"
version = "0.3.5"
version = "0.4.0"
description = "Python SDK that enables developers to build and deploy LangGraph agents to the UiPath Cloud Platform"
readme = { file = "README.md", content-type = "text/markdown" }
requires-python = ">=3.11"
dependencies = [
"uipath>=2.4.14, <2.5.0",
"uipath-runtime>=0.4.1, <0.5.0",
"uipath>=2.5.0, <2.6.0",
"uipath-runtime>=0.5.0,<0.6.0",
"langgraph>=1.0.0, <2.0.0",
"langchain-core>=1.2.5, <2.0.0",
"aiosqlite==0.21.0",
Expand Down
31 changes: 25 additions & 6 deletions src/uipath_langchain/agent/react/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
from .router import (
create_route_agent,
)
from .router_conversational import create_route_agent_conversational
from .terminate_node import (
create_terminate_node,
)
Expand Down Expand Up @@ -74,16 +75,18 @@ def create_agent(
os.environ["LANGCHAIN_RECURSION_LIMIT"] = str(config.recursion_limit)

agent_tools = list(tools)
flow_control_tools: list[BaseTool] = create_flow_control_tools(output_schema)
flow_control_tools: list[BaseTool] = (
[] if config.is_conversational else create_flow_control_tools(output_schema)
)
llm_tools: list[BaseTool] = [*agent_tools, *flow_control_tools]

init_node = create_init_node(messages, input_schema)
init_node = create_init_node(messages, input_schema, config.is_conversational)

tool_nodes = create_tool_node(agent_tools)
tool_nodes_with_guardrails = create_tools_guardrails_subgraph(
tool_nodes, guardrails
)
terminate_node = create_terminate_node(output_schema)
terminate_node = create_terminate_node(output_schema, config.is_conversational)

CompleteAgentGraphState = create_state_with_input(
input_schema if input_schema is not None else BaseModel
Expand All @@ -109,19 +112,35 @@ def create_agent(

builder.add_edge(START, AgentGraphNode.INIT)

llm_node = create_llm_node(model, llm_tools, config.thinking_messages_limit)
llm_node = create_llm_node(
model, llm_tools, config.thinking_messages_limit, config.is_conversational
)
llm_with_guardrails_subgraph = create_llm_guardrails_subgraph(
(AgentGraphNode.LLM, llm_node), guardrails
)
builder.add_node(AgentGraphNode.AGENT, llm_with_guardrails_subgraph)
builder.add_edge(AgentGraphNode.INIT, AgentGraphNode.AGENT)

tool_node_names = list(tool_nodes_with_guardrails.keys())
route_agent = create_route_agent(config.thinking_messages_limit)

if config.is_conversational:
route_agent = create_route_agent_conversational()
target_node_names = [
*tool_node_names,
AgentGraphNode.TERMINATE,
]
else:
route_agent = create_route_agent(config.thinking_messages_limit)
target_node_names = [
AgentGraphNode.AGENT,
*tool_node_names,
AgentGraphNode.TERMINATE,
]

builder.add_conditional_edges(
AgentGraphNode.AGENT,
route_agent,
[AgentGraphNode.AGENT, *tool_node_names, AgentGraphNode.TERMINATE],
target_node_names,
)

for tool_name in tool_node_names:
Expand Down
21 changes: 18 additions & 3 deletions src/uipath_langchain/agent/react/init_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from typing import Any, Callable, Sequence

from langchain_core.messages import HumanMessage, SystemMessage
from langgraph.types import Overwrite
from pydantic import BaseModel

from .job_attachments import (
Expand All @@ -14,12 +15,26 @@ def create_init_node(
messages: Sequence[SystemMessage | HumanMessage]
| Callable[[Any], Sequence[SystemMessage | HumanMessage]],
input_schema: type[BaseModel] | None,
is_conversational: bool = False,
):
def graph_state_init(state: Any) -> Any:
resolved_messages: Sequence[SystemMessage | HumanMessage] | Overwrite
if callable(messages):
resolved_messages = messages(state)
resolved_messages = list(messages(state))
else:
resolved_messages = messages
resolved_messages = list(messages)
if is_conversational:
# For conversational agents we need to reorder the messages so that the system message is first, followed by
# the initial user message. When resuming the conversation, the state will have the entire message history,
# including the system message. In this case, we need to replace the system message from the state with the
# newly generated one. It will have the current date/time and reflect any changes to user settings. The add
# reducer is used for the messages property in the state, so by default new messages are appended to the end
# and using Overwrite will cause LangGraph to replace the entire array instead.
if len(state.messages) > 0 and isinstance(state.messages[0], SystemMessage):
preserved_messages = state.messages[1:]
else:
preserved_messages = state.messages
resolved_messages = Overwrite([*resolved_messages, *preserved_messages])

schema = input_schema if input_schema is not None else BaseModel
job_attachments = get_job_attachments(schema, state)
Expand All @@ -28,7 +43,7 @@ def graph_state_init(state: Any) -> Any:
}

return {
"messages": list(resolved_messages),
"messages": resolved_messages,
"inner_state": {
"job_attachments": job_attachments_dict,
},
Expand Down
7 changes: 6 additions & 1 deletion src/uipath_langchain/agent/react/llm_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ def create_llm_node(
model: BaseChatModel,
tools: Sequence[BaseTool] | None = None,
thinking_messages_limit: int = MAX_CONSECUTIVE_THINKING_MESSAGES,
is_conversational: bool = False,
):
"""Create LLM node with dynamic tool_choice enforcement.

Expand All @@ -58,7 +59,11 @@ async def llm_node(state: AgentGraphState):

consecutive_thinking_messages = count_consecutive_thinking_messages(messages)

if bindable_tools and consecutive_thinking_messages >= thinking_messages_limit:
if (
not is_conversational
and bindable_tools
and consecutive_thinking_messages >= thinking_messages_limit
):
llm = base_llm.bind(tool_choice=tool_choice_required_value)
else:
llm = base_llm
Expand Down
25 changes: 3 additions & 22 deletions src/uipath_langchain/agent/react/router.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,11 @@

from typing import Literal

from langchain_core.messages import AIMessage, AnyMessage, ToolCall
from langchain_core.messages import ToolCall
from uipath.agent.react import END_EXECUTION_TOOL, RAISE_ERROR_TOOL

from ..exceptions import AgentNodeRoutingException
from .router_utils import validate_last_message_is_AI
from .types import AgentGraphNode, AgentGraphState
from .utils import count_consecutive_thinking_messages

Expand All @@ -27,26 +28,6 @@ def __has_control_flow_tool(tool_calls: list[ToolCall]) -> bool:
return any(tc.get("name") in FLOW_CONTROL_TOOLS for tc in tool_calls)


def __validate_last_message_is_AI(messages: list[AnyMessage]) -> AIMessage:
"""Validate and return last message from state.

Raises:
AgentNodeRoutingException: If messages are empty or last message is not AIMessage
"""
if not messages:
raise AgentNodeRoutingException(
"No messages in state - cannot route after agent"
)

last_message = messages[-1]
if not isinstance(last_message, AIMessage):
raise AgentNodeRoutingException(
f"Last message is not AIMessage (type: {type(last_message).__name__}) - cannot route after agent"
)

return last_message


def create_route_agent(thinking_messages_limit: int = 0):
"""Create a routing function configured with thinking_messages_limit.

Expand Down Expand Up @@ -77,7 +58,7 @@ def route_agent(
AgentNodeRoutingException: When encountering unexpected state (empty messages, non-AIMessage, or excessive completions)
"""
messages = state.messages
last_message = __validate_last_message_is_AI(messages)
last_message = validate_last_message_is_AI(messages)

tool_calls = list(last_message.tool_calls) if last_message.tool_calls else []
tool_calls = __filter_control_flow_tool_calls(tool_calls)
Expand Down
43 changes: 43 additions & 0 deletions src/uipath_langchain/agent/react/router_conversational.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
"""Routing functions for conditional edges in the agent graph."""

import logging
from typing import Literal

from uipath_langchain.agent.react.router_utils import validate_last_message_is_AI

from .types import AgentGraphNode, AgentGraphState

logger = logging.getLogger(__name__)


def create_route_agent_conversational():
"""Create a routing function for conversational agents. It routes between agent and tool calls until
the agent response has no tool calls, then it routes to the USER_MESSAGE_WAIT node which does an interrupt.

Returns:
Routing function for LangGraph conditional edges
"""

def route_agent_conversational(
state: AgentGraphState,
) -> list[str] | Literal[AgentGraphNode.TERMINATE]:
"""Route after agent

Routing logic:
3. If tool calls, route to specific tool nodes (return list of tool names)
4. If no tool calls, route to user message wait node

Returns:
- list[str]: Tool node names for parallel execution
- AgentGraphNode.USER_MESSAGE_WAIT: When there are no tool calls

Raises:
AgentNodeRoutingException: When encountering unexpected state (empty messages, non-AIMessage, or excessive completions)
"""
last_message = validate_last_message_is_AI(state.messages)
if last_message.tool_calls:
return [tc["name"] for tc in last_message.tool_calls]
else:
return AgentGraphNode.TERMINATE

return route_agent_conversational
25 changes: 25 additions & 0 deletions src/uipath_langchain/agent/react/router_utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
"""Routing functions for conditional edges in the agent graph."""

from langchain_core.messages import AIMessage, AnyMessage

from ..exceptions import AgentNodeRoutingException


def validate_last_message_is_AI(messages: list[AnyMessage]) -> AIMessage:
"""Validate and return last message from state.

Raises:
AgentNodeRoutingException: If messages are empty or last message is not AIMessage
"""
if not messages:
raise AgentNodeRoutingException(
"No messages in state - cannot route after agent"
)

last_message = messages[-1]
if not isinstance(last_message, AIMessage):
raise AgentNodeRoutingException(
f"Last message is not AIMessage (type: {type(last_message).__name__}) - cannot route after agent"
)

return last_message
31 changes: 16 additions & 15 deletions src/uipath_langchain/agent/react/terminate_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def _handle_agent_termination(termination: AgentTermination) -> NoReturn:


def create_terminate_node(
response_schema: type[BaseModel] | None = None,
response_schema: type[BaseModel] | None = None, is_conversational: bool = False
):
"""Handles Agent Graph termination for multiple sources and output or error propagation to Orchestrator.

Expand All @@ -60,23 +60,24 @@ def terminate_node(state: AgentGraphState):
if state.inner_state.termination:
_handle_agent_termination(state.inner_state.termination)

last_message = state.messages[-1]
if not isinstance(last_message, AIMessage):
raise AgentNodeRoutingException(
f"Expected last message to be AIMessage, got {type(last_message).__name__}"
)
if not is_conversational:
last_message = state.messages[-1]
if not isinstance(last_message, AIMessage):
raise AgentNodeRoutingException(
f"Expected last message to be AIMessage, got {type(last_message).__name__}"
)

for tool_call in last_message.tool_calls:
tool_name = tool_call["name"]
for tool_call in last_message.tool_calls:
tool_name = tool_call["name"]

if tool_name == END_EXECUTION_TOOL.name:
return _handle_end_execution(tool_call["args"], response_schema)
if tool_name == END_EXECUTION_TOOL.name:
return _handle_end_execution(tool_call["args"], response_schema)

if tool_name == RAISE_ERROR_TOOL.name:
_handle_raise_error(tool_call["args"])
if tool_name == RAISE_ERROR_TOOL.name:
_handle_raise_error(tool_call["args"])

raise AgentNodeRoutingException(
"No control flow tool call found in terminate node. Unexpected state."
)
raise AgentNodeRoutingException(
"No control flow tool call found in terminate node. Unexpected state."
)

return terminate_node
3 changes: 3 additions & 0 deletions src/uipath_langchain/agent/react/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,3 +61,6 @@ class AgentGraphConfig(BaseModel):
ge=0,
description="Max consecutive thinking messages before enforcing tool usage. 0 = force tools every time.",
)
is_conversational: bool = Field(
default=False, description="If set, creates a graph for conversational agents"
)
17 changes: 2 additions & 15 deletions src/uipath_langchain/chat/mapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -307,21 +307,8 @@ def map_event(
)
]

# --- Fallback for other BaseMessage types ---
text_content = self._extract_text(message.content)
return [
UiPathConversationMessageEvent(
message_id=message.id,
start=UiPathConversationMessageStartEvent(
role="assistant", timestamp=timestamp
),
content_part=UiPathConversationContentPartEvent(
content_part_id=f"cp-{message.id}",
chunk=UiPathConversationContentPartChunkEvent(data=text_content),
),
end=UiPathConversationMessageEndEvent(),
)
]
# Don't send events for system or user messages. Agent messages are handled above.
return []


__all__ = ["UiPathChatMessagesMapper"]
9 changes: 8 additions & 1 deletion src/uipath_langchain/runtime/factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,9 +62,16 @@ def _setup_instrumentation(self, trace_manager: UiPathTraceManager | None) -> No

def _get_connection_string(self) -> str:
"""Get the database connection string."""
if self.context.state_file_path is not None:
return self.context.state_file_path

if self.context.runtime_dir and self.context.state_file:
path = os.path.join(self.context.runtime_dir, self.context.state_file)
if not self.context.resume and self.context.job_id is None:
if (
not self.context.resume
and self.context.job_id is None
and not self.context.keep_state_file
):
# If not resuming and no job id, delete the previous state file
if os.path.exists(path):
os.remove(path)
Expand Down
Loading
Loading