Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 9 additions & 20 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
[project]
name = "uipath-langchain"
version = "0.1.43"
version = "0.1.44"
description = "Python SDK that enables developers to build and deploy LangGraph agents to the UiPath Cloud Platform"
readme = { file = "README.md", content-type = "text/markdown" }
requires-python = ">=3.11"
dependencies = [
"uipath>=2.2.41, <2.3.0",
"uipath>=2.2.44, <2.3.0",
"langgraph>=1.0.0, <2.0.0",
"langchain-core>=1.0.0, <2.0.0",
"aiosqlite==0.21.0",
Expand All @@ -16,7 +16,7 @@ dependencies = [
"python-dotenv>=1.0.1",
"httpx>=0.27.0",
"openinference-instrumentation-langchain>=0.1.56",
"jsonschema-pydantic-converter>=0.1.5",
"jsonschema-pydantic-converter>=0.1.6",
"jsonpath-ng>=1.7.0",
"mcp==1.24.0",
"langchain-mcp-adapters==0.2.1",
Expand All @@ -31,18 +31,12 @@ classifiers = [
]
maintainers = [
{ name = "Marius Cosareanu", email = "marius.cosareanu@uipath.com" },
{ name = "Cristian Pufu", email = "cristian.pufu@uipath.com" }
{ name = "Cristian Pufu", email = "cristian.pufu@uipath.com" },
]

[project.optional-dependencies]
vertex = [
"langchain-google-genai>=2.0.0",
"google-generativeai>=0.8.0",
]
bedrock = [
"langchain-aws>=0.2.35",
"boto3-stubs>=1.41.4",
]
vertex = ["langchain-google-genai>=2.0.0", "google-generativeai>=0.8.0"]
bedrock = ["langchain-aws>=0.2.35", "boto3-stubs>=1.41.4"]

[project.entry-points."uipath.middlewares"]
register = "uipath_langchain.middlewares:register_middleware"
Expand All @@ -69,7 +63,7 @@ dev = [
"pytest-asyncio>=1.0.0",
"pre-commit>=4.1.0",
"numpy>=1.24.0",
"pytest_httpx>=0.35.0"
"pytest_httpx>=0.35.0",
]

[tool.hatch.build.targets.wheel]
Expand All @@ -95,13 +89,8 @@ skip-magic-trailing-comma = false
line-ending = "auto"

[tool.mypy]
plugins = [
"pydantic.mypy"
]
exclude = [
"samples/.*",
"testcases/.*"
]
plugins = ["pydantic.mypy"]
exclude = ["samples/.*", "testcases/.*"]

follow_imports = "silent"
warn_redundant_casts = true
Expand Down
2 changes: 1 addition & 1 deletion src/uipath_langchain/agent/react/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def create_agent(
flow_control_tools: list[BaseTool] = create_flow_control_tools(output_schema)
llm_tools: list[BaseTool] = [*agent_tools, *flow_control_tools]

init_node = create_init_node(messages)
init_node = create_init_node(messages, input_schema)
tool_nodes = create_tool_node(agent_tools)
tool_nodes_with_guardrails = create_tools_guardrails_subgraph(
tool_nodes, guardrails
Expand Down
17 changes: 16 additions & 1 deletion src/uipath_langchain/agent/react/init_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,18 +3,33 @@
from typing import Any, Callable, Sequence

from langchain_core.messages import HumanMessage, SystemMessage
from pydantic import BaseModel

from .job_attachments import (
get_job_attachments,
)


def create_init_node(
messages: Sequence[SystemMessage | HumanMessage]
| Callable[[Any], Sequence[SystemMessage | HumanMessage]],
input_schema: type[BaseModel] | None,
):
def graph_state_init(state: Any):
if callable(messages):
resolved_messages = messages(state)
else:
resolved_messages = messages

return {"messages": list(resolved_messages)}
schema = input_schema if input_schema is not None else BaseModel
job_attachments = get_job_attachments(schema, state)
job_attachments_dict = {
str(att.id): att for att in job_attachments if att.id is not None
}

return {
"messages": list(resolved_messages),
"job_attachments": job_attachments_dict,
}

return graph_state_init
125 changes: 125 additions & 0 deletions src/uipath_langchain/agent/react/job_attachments.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
"""Job attachment utilities for ReAct Agent."""

import copy
import uuid
from typing import Any

from jsonpath_ng import parse # type: ignore[import-untyped]
from pydantic import BaseModel
from uipath.platform.attachments import Attachment

from .json_utils import extract_values_by_paths, get_json_paths_by_type


def get_job_attachments(
schema: type[BaseModel],
data: dict[str, Any] | BaseModel,
) -> list[Attachment]:
"""Extract job attachments from data based on schema and convert to Attachment objects.

Args:
schema: The Pydantic model class defining the data structure
data: The data object (dict or Pydantic model) to extract attachments from

Returns:
List of Attachment objects
"""
job_attachment_paths = get_job_attachment_paths(schema)
job_attachments = extract_values_by_paths(data, job_attachment_paths)

result = []
for attachment in job_attachments:
result.append(Attachment.model_validate(attachment, from_attributes=True))

return result


def get_job_attachment_paths(model: type[BaseModel]) -> list[str]:
"""Get JSONPath expressions for all job attachment fields in a Pydantic model.

Args:
model: The Pydantic model class to analyze

Returns:
List of JSONPath expressions pointing to job attachment fields
"""
return get_json_paths_by_type(model, "Job_attachment")


def replace_job_attachment_ids(
json_paths: list[str],
tool_args: dict[str, Any],
state: dict[str, Attachment],
errors: list[str],
) -> dict[str, Any]:
"""Replace job attachment IDs in tool_args with full attachment objects from state.

For each JSON path, this function finds matching objects in tool_args and
replaces them with corresponding attachment objects from state. The matching
is done by looking up the object's 'ID' field in the state dictionary.

If an ID is not a valid UUID or is not present in state, an error message
is added to the errors list.

Args:
json_paths: List of JSONPath expressions (e.g., ["$.attachment", "$.attachments[*]"])
tool_args: The dictionary containing tool arguments to modify
state: Dictionary mapping attachment UUID strings to Attachment objects
errors: List to collect error messages for invalid or missing IDs

Returns:
Modified copy of tool_args with attachment IDs replaced by full objects

Example:
>>> state = {
... "123e4567-e89b-12d3-a456-426614174000": Attachment(id="123e4567-e89b-12d3-a456-426614174000", name="file1.pdf"),
... "223e4567-e89b-12d3-a456-426614174001": Attachment(id="223e4567-e89b-12d3-a456-426614174001", name="file2.pdf")
... }
>>> tool_args = {
... "attachment": {"ID": "123"},
... "other_field": "value"
... }
>>> paths = ['$.attachment']
>>> errors = []
>>> replace_job_attachment_ids(paths, tool_args, state, errors)
{'attachment': {'ID': '123', 'name': 'file1.pdf', ...}, 'other_field': 'value'}
"""
result = copy.deepcopy(tool_args)

for json_path in json_paths:
expr = parse(json_path)
matches = expr.find(result)

for match in matches:
current_value = match.value

if isinstance(current_value, dict) and "ID" in current_value:
attachment_id_str = str(current_value["ID"])

try:
uuid.UUID(attachment_id_str)
except (ValueError, AttributeError):
errors.append(
_create_job_attachment_error_message(attachment_id_str)
)
continue

if attachment_id_str in state:
replacement_value = state[attachment_id_str]
match.full_path.update(
result, replacement_value.model_dump(by_alias=True, mode="json")
)
else:
errors.append(
_create_job_attachment_error_message(attachment_id_str)
)

return result


def _create_job_attachment_error_message(attachment_id_str: str) -> str:
return (
f"Could not find JobAttachment with ID='{attachment_id_str}'. "
f"Try invoking the tool again and please make sure that you pass "
f"valid JobAttachment IDs associated with existing JobAttachments in the current context."
)
Loading