Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions packages/uipath-llamaindex/docs/llms_and_embeddings.md
Original file line number Diff line number Diff line change
Expand Up @@ -49,10 +49,10 @@ print(response)
from uipath_llamaindex.llms import UiPathOpenAI, OpenAIModel

# Use a specific model
llm = UiPathOpenAI(model=OpenAIModel.GPT_4O_2024_11_20)
llm = UiPathOpenAI(model=OpenAIModel.GPT_4_1_2025_04_14)

# Or use a model string directly
llm = UiPathOpenAI(model="gpt-4o-2024-11-20")
llm = UiPathOpenAI(model="gpt-4.1-2025-04-14")
```

## UiPathOpenAIEmbedding
Expand Down Expand Up @@ -211,7 +211,7 @@ def add(a: int, b: int) -> int:
# Create agent with UiPath LLM
agent = ReActAgent(
tools=[multiply, add],
llm=UiPathOpenAI(model=OpenAIModel.GPT_4O_2024_11_20))
llm=UiPathOpenAI(model=OpenAIModel.GPT_4_1_2025_04_14))

async def main():
handler = agent.run("What is 2+(2*4)?")
Expand Down Expand Up @@ -241,7 +241,7 @@ index = VectorStoreIndex.from_documents(

# Create query engine with UiPath LLM
query_engine = index.as_query_engine(
llm=UiPathOpenAI(model=OpenAIModel.GPT_4O_2024_11_20)
llm=UiPathOpenAI(model=OpenAIModel.GPT_4_1_2025_04_14)
)

response = query_engine.query("What is machine learning?")
Expand Down
2 changes: 1 addition & 1 deletion packages/uipath-llamaindex/pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "uipath-llamaindex"
version = "0.4.0"
version = "0.4.1"
description = "UiPath LlamaIndex SDK"
readme = { file = "README.md", content-type = "text/markdown" }
requires-python = ">=3.11"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

from uipath_llamaindex.models import CreateTaskEvent

llm = OpenAI(model="gpt-4o-mini")
llm = OpenAI()


async def may_research_company(ctx: Context, company_name: str) -> bool:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from llama_index.llms.openai import OpenAI

load_dotenv()
llm = OpenAI(model="gpt-4o-mini")
llm = OpenAI()


async def may_research_company(ctx: Context, company_name: str) -> bool:
Expand Down
2 changes: 1 addition & 1 deletion packages/uipath-llamaindex/samples/multi-agent/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

from uipath_llamaindex.models import InvokeProcessEvent

llm = OpenAI(model="gpt-4o-mini")
llm = OpenAI()


async def may_research_company(ctx: Context, company_name: str) -> str:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
)
from llama_index.llms.openai import OpenAI

llm = OpenAI(model="gpt-4o-mini")
llm = OpenAI()


async def may_research_company(ctx: Context, company_name: str) -> bool:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ async def process_query(self, ev: UserQueryEvent) -> AgentResponseEvent:
self.agent = FunctionAgent(
name="UiPath MCP Agent",
description="An agent that can interact with MCP tools",
llm=OpenAI(model="gpt-4o"),
llm=OpenAI(),
tools=tools,
system_prompt="""You are a helpful assistant.
You have access to various tools through MCP (Model Context Protocol).
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
company_policy_files_directory = "sample_data/company_policies"
personal_preferences_files_directory = "sample_data/personal_preferences"

llm = UiPathOpenAI(model="gpt-4o-2024-11-20")
llm = UiPathOpenAI()


class CustomStartEvent(StartEvent):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ async def handle_async_request(self, request: httpx.Request) -> httpx.Response:
class UiPathOpenAI(AzureOpenAI):
def __init__(
self,
model: str | OpenAIModel = OpenAIModel.GPT_4O_MINI_2024_07_18,
model: str | OpenAIModel = OpenAIModel.GPT_4_1_2025_04_14,
api_version: str = "2024-10-21",
**kwargs: Any,
):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ class CritiqueEvent(StopEvent):


class JokeFlow(Workflow):
llm = UiPathOpenAI(model="gpt-4o-mini-2024-07-18")
llm = UiPathOpenAI()

@step
async def generate_joke(self, ev: TopicEvent) -> JokeEvent:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

from uipath_llamaindex.llms import UiPathOpenAI

llm = UiPathOpenAI(model="gpt-4o-mini-2024-07-18")
llm = UiPathOpenAI()


async def may_research_company(ctx: Context, company_name: str) -> bool:
Expand Down
3 changes: 2 additions & 1 deletion packages/uipath-llamaindex/uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.