Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 15 additions & 4 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "uipath-langchain"
version = "0.7.15"
version = "1.0.0"
description = "Python SDK that enables developers to build and deploy LangGraph agents to the UiPath Cloud Platform"
readme = { file = "README.md", content-type = "text/markdown" }
requires-python = ">=3.11"
Expand All @@ -12,7 +12,6 @@ dependencies = [
"langgraph>=1.0.0, <2.0.0",
"langchain-core>=1.2.11, <2.0.0",
"langgraph-checkpoint-sqlite>=3.0.3, <4.0.0",
"langchain-openai>=1.0.0, <2.0.0",
"langchain>=1.0.0, <2.0.0",
"pydantic-settings>=2.6.0",
"python-dotenv>=1.0.1",
Expand All @@ -22,6 +21,7 @@ dependencies = [
"jsonpath-ng>=1.7.0",
"mcp==1.26.0",
"langchain-mcp-adapters==0.2.1",
"uipath-langchain-client[openai]>=1.2.7",
]

classifiers = [
Expand All @@ -37,8 +37,19 @@ maintainers = [
]

[project.optional-dependencies]
vertex = ["langchain-google-genai>=2.0.0", "google-generativeai>=0.8.0"]
bedrock = ["langchain-aws>=0.2.35", "boto3-stubs>=1.41.4"]
anthropic = [
"uipath-langchain-client[anthropic]>=1.2.7",
]
vertex = [
"uipath-langchain-client[google]>=1.2.7",
"uipath-langchain-client[vertexai]>=1.2.7",
]
bedrock = [
"uipath-langchain-client[aws]>=1.2.7",
]
all = [
"uipath-langchain-client[all]>=1.2.7",
]

[project.entry-points."uipath.middlewares"]
register = "uipath_langchain.middlewares:register_middleware"
Expand Down
2 changes: 1 addition & 1 deletion samples/chat-hitl-agent/graph.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from langchain_anthropic import ChatAnthropic
from langchain_tavily import TavilySearch
from langchain.agents import create_agent
from uipath_langchain.chat import requires_approval
from uipath_langchain.agent.tools.hitl import requires_approval

tavily_tool = TavilySearch(max_results=5)

Expand Down
2 changes: 1 addition & 1 deletion samples/oauth-external-apps-agent/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from langchain.agents import create_agent
from langchain.messages import SystemMessage, HumanMessage

from uipath_langchain.chat.models import UiPathChat
from uipath_langchain.chat import UiPathChat
from langchain_mcp_adapters.tools import load_mcp_tools
from mcp import ClientSession
from mcp.client.streamable_http import streamablehttp_client
Expand Down
2 changes: 1 addition & 1 deletion samples/retrieval-chain/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.runnables import RunnablePassthrough
from langchain_core.vectorstores import VectorStore
from uipath_langchain.chat.models import UiPathAzureChatOpenAI
from uipath_langchain.chat import UiPathAzureChatOpenAI
from uipath_langchain.vectorstores.context_grounding_vectorstore import (
ContextGroundingVectorStore,
)
Expand Down
62 changes: 41 additions & 21 deletions src/uipath_langchain/chat/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,43 +11,63 @@
Instead, all exports are loaded on-demand when first accessed.
"""

from .chat_model_factory import get_chat_model

def __getattr__(name):
if name == "UiPathAzureChatOpenAI":
from .models import UiPathAzureChatOpenAI

return UiPathAzureChatOpenAI
def __getattr__(name):
if name == "UiPathChat":
from .models import UiPathChat
from uipath_langchain_client.clients.normalized.chat_models import (
UiPathChat,
)

return UiPathChat
if name == "UiPathAzureChatOpenAI":
from uipath_langchain_client.clients.openai.chat_models import (
UiPathAzureChatOpenAI,
)

return UiPathAzureChatOpenAI
if name == "UiPathChatOpenAI":
from .openai import UiPathChatOpenAI
from uipath_langchain_client.clients.openai.chat_models import (
UiPathChatOpenAI,
)

return UiPathChatOpenAI
if name == "requires_approval":
from .hitl import requires_approval
if name == "UiPathChatGoogleGenerativeAI":
from uipath_langchain_client.clients.google.chat_models import (
UiPathChatGoogleGenerativeAI,
)

return UiPathChatGoogleGenerativeAI
if name == "UiPathChatBedrock":
from uipath_langchain_client.clients.bedrock.chat_models import (
UiPathChatBedrock,
)

return UiPathChatBedrock
if name == "UiPathChatBedrockConverse":
from uipath_langchain_client.clients.bedrock.chat_models import (
UiPathChatBedrockConverse,
)

return requires_approval
if name in ("OpenAIModels", "BedrockModels", "GeminiModels"):
from . import supported_models
return UiPathChatBedrockConverse
if name == "UiPathChatAnthropic":
from uipath_langchain_client.clients.anthropic.chat_models import (
UiPathChatAnthropic,
)

return getattr(supported_models, name)
if name in ("LLMProvider", "APIFlavor"):
from . import types
return UiPathChatAnthropic

return getattr(types, name)
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")


__all__ = [
"get_chat_model",
"UiPathChat",
"UiPathAzureChatOpenAI",
"UiPathChatOpenAI",
"OpenAIModels",
"BedrockModels",
"GeminiModels",
"requires_approval",
"LLMProvider",
"APIFlavor",
"UiPathChatGoogleGenerativeAI",
"UiPathChatBedrock",
"UiPathChatBedrockConverse",
"UiPathChatAnthropic",
]
Loading
Loading