Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@

All notable changes to `uipath_llm_client` (core package) will be documented in this file.

## [1.5.2] - 2026-03-18

### Fix
- Factory function fix

## [1.5.1] - 2026-03-17

### Fix
Expand Down
5 changes: 5 additions & 0 deletions packages/uipath_langchain_client/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@

All notable changes to `uipath_langchain_client` will be documented in this file.

## [1.5.2] - 2026-03-18

### Fix
- Factory function fix

## [1.5.1] - 2026-03-17

### Fixes
Expand Down
2 changes: 1 addition & 1 deletion packages/uipath_langchain_client/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ readme = "README.md"
requires-python = ">=3.11"
dependencies = [
"langchain>=1.2.12",
"uipath-llm-client>=1.5.1",
"uipath-llm-client>=1.5.2",
]

[project.optional-dependencies]
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
__title__ = "UiPath LangChain Client"
__description__ = "A Python client for interacting with UiPath's LLM services via LangChain."
__version__ = "1.5.1"
__version__ = "1.5.2"
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,7 @@ def get_chat_model(
vendor_type=vendor_type,
)
is_uipath_owned = model_info.get("modelSubscriptionType") == "UiPathOwned"
model_family = model_info.get("modelFamily", "").lower()
if not is_uipath_owned:
client_settings.validate_byo_model(model_info)

Expand Down Expand Up @@ -165,47 +166,31 @@ def get_chat_model(
**model_kwargs,
)
case VendorType.VERTEXAI:
if is_uipath_owned:
if "claude" in model_name:
from uipath_langchain_client.clients.anthropic.chat_models import (
UiPathChatAnthropic,
)

return UiPathChatAnthropic(
model=model_name,
settings=client_settings,
vendor_type=discovered_vendor,
byo_connection_id=byo_connection_id,
**model_kwargs,
)
elif "gemini" in model_name:
from uipath_langchain_client.clients.google.chat_models import (
UiPathChatGoogleGenerativeAI,
)

return UiPathChatGoogleGenerativeAI(
model=model_name,
settings=client_settings,
byo_connection_id=byo_connection_id,
**model_kwargs,
)
else:
raise ValueError(
f"We don't have a client that currently supports this model: {model_name} on vendor: {discovered_vendor}"
)
else:
from uipath_langchain_client.clients.google.chat_models import (
UiPathChatGoogleGenerativeAI,
if model_family == "anthropicclaude":
from uipath_langchain_client.clients.anthropic.chat_models import (
UiPathChatAnthropic,
)

return UiPathChatGoogleGenerativeAI(
return UiPathChatAnthropic(
model=model_name,
settings=client_settings,
vendor_type=discovered_vendor,
byo_connection_id=byo_connection_id,
**model_kwargs,
)

from uipath_langchain_client.clients.google.chat_models import (
UiPathChatGoogleGenerativeAI,
)

return UiPathChatGoogleGenerativeAI(
model=model_name,
settings=client_settings,
byo_connection_id=byo_connection_id,
**model_kwargs,
)
case VendorType.AWSBEDROCK:
if "claude" in model_name:
if model_family == "anthropicclaude" and api_flavor is None:
from uipath_langchain_client.clients.bedrock.chat_models import (
UiPathChatAnthropicBedrock,
)
Expand Down
2 changes: 1 addition & 1 deletion src/uipath/llm_client/__version__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
__title__ = "UiPath LLM Client"
__description__ = "A Python client for interacting with UiPath's LLM services."
__version__ = "1.5.1"
__version__ = "1.5.2"