33import argparse
44import os
55import re
6- import yaml
7- from typing import Any , Optional , List
6+ from typing import Any
87
9- from langchain .agents import AgentExecutor , create_tool_calling_agent
10- from langchain .prompts import ChatPromptTemplate , MessagesPlaceholder
8+ from langchain .agents import AgentExecutor
9+ from langchain .agents import create_tool_calling_agent
10+ from langchain .prompts import ChatPromptTemplate
11+ from langchain .prompts import MessagesPlaceholder
1112from langchain .tools import BaseTool
1213from langchain_community .chat_message_histories import ChatMessageHistory
1314from langchain_core .runnables .history import RunnableWithMessageHistory
15+ import yaml
1416
1517# LLM names declared as Any so mypy accepts fallback to None if imports fail
1618GoogleGenerativeAI : Any
2426
2527try :
2628 from langchain_openai import ChatOpenAI # type: ignore
29+
2730 OpenAI = ChatOpenAI
2831except Exception :
2932 OpenAI = None
3033
3134try :
3235 from langchain_ollama import ChatOllama # type: ignore
36+
3337 Ollama = ChatOllama
3438except Exception :
3539 Ollama = None
@@ -53,12 +57,13 @@ class FlexibleAgent:
5357 Flexible agent supporting multiple LLM providers (Gemini, OpenAI,
5458 Ollama, etc.) for unstructuredDataHandler.
5559 """
60+
5661 def __init__ (
5762 self ,
58- provider : Optional [ str ] = None ,
59- api_key : Optional [ str ] = None ,
60- model : Optional [ str ] = None ,
61- tools : Optional [ List [ Any ]] = None ,
63+ provider : str | None = None ,
64+ api_key : str | None = None ,
65+ model : str | None = None ,
66+ tools : list [ Any ] | None = None ,
6267 dry_run : bool = False ,
6368 config_path : str = "config/model_config.yaml" ,
6469 ** kwargs ,
@@ -72,15 +77,14 @@ def __init__(
7277 config_path: Path to the model configuration file.
7378 kwargs: Additional LLM-specific arguments
7479 """
75- with open (config_path , 'r' , encoding = ' utf-8' ) as f :
80+ with open (config_path , encoding = " utf-8" ) as f :
7681 config = yaml .safe_load (f )
7782
7883 # Normalize provider and validate
79- provider = (provider or config .get (' default_provider' ) or "" ).lower ()
84+ provider = (provider or config .get (" default_provider" ) or "" ).lower ()
8085 if not provider :
8186 raise ValueError (
82- "Provider not specified and no default_provider found in "
83- "config."
87+ "Provider not specified and no default_provider found in " "config."
8488 )
8589
8690 self .dry_run = bool (dry_run )
@@ -94,28 +98,24 @@ def __init__(
9498 return
9599
96100 # Configure agent from YAML
97- agent_config = config .get (' agent' , {})
98- verbose = agent_config .get (' verbose' , True )
101+ agent_config = config .get (" agent" , {})
102+ verbose = agent_config .get (" verbose" , True )
99103
100104 # Configure provider
101- provider_config = config .get (' providers' , {}).get (provider , {})
102- model = model or provider_config .get (' default_model' )
105+ provider_config = config .get (" providers" , {}).get (provider , {})
106+ model = model or provider_config .get (" default_model" )
103107
104108 try :
105- if provider == ' gemini' :
109+ if provider == " gemini" :
106110 self .llm = GoogleGenerativeAI (
107111 google_api_key = api_key , model = model , ** kwargs
108112 )
109- elif provider == 'openai' :
110- self .llm = OpenAI (
111- openai_api_key = api_key , model = model , ** kwargs
112- )
113+ elif provider == "openai" :
114+ self .llm = OpenAI (openai_api_key = api_key , model = model , ** kwargs )
113115 elif provider == "ollama" and Ollama is not None :
114116 self .llm = Ollama (model = model , ** kwargs )
115117 else :
116- raise ValueError (
117- f"Unsupported or unavailable provider: { provider } "
118- )
118+ raise ValueError (f"Unsupported or unavailable provider: { provider } " )
119119 except Exception as e :
120120 raise RuntimeError (
121121 f"Failed to initialize LLM provider '{ provider } ': { e } "
@@ -133,9 +133,7 @@ def __init__(
133133 )
134134
135135 agent = create_tool_calling_agent (self .llm , self .tools , prompt )
136- agent_executor = AgentExecutor (
137- agent = agent , tools = self .tools , verbose = verbose
138- )
136+ agent_executor = AgentExecutor (agent = agent , tools = self .tools , verbose = verbose )
139137
140138 def get_session_history (session_id : str ) -> ChatMessageHistory :
141139 if session_id not in self .store :
@@ -154,15 +152,15 @@ def run(self, input_data: str, session_id: str = "default"):
154152 Run the agent on the provided input data (prompt).
155153 """
156154 result = self .agent .invoke (
157- {"input" : input_data },
158- config = {"configurable" : {"session_id" : session_id }}
155+ {"input" : input_data }, config = {"configurable" : {"session_id" : session_id }}
159156 )
160157 return result ["output" ]
161158
162159
163160class MockAgent :
164161 """A mock agent for dry-run and CI that can echo or use tools."""
165- def __init__ (self , tools : Optional [List [BaseTool ]] = None ):
162+
163+ def __init__ (self , tools : list [BaseTool ] | None = None ):
166164 self .last_input = None
167165 self .tools = tools or []
168166
@@ -188,21 +186,21 @@ def main():
188186 try :
189187 from dotenv import load_dotenv # type: ignore
190188 except Exception : # pragma: no cover - fallback if dotenv is unavailable
189+
191190 def load_dotenv (* _args , ** _kwargs ): # type: ignore
192191 return False
192+
193193 parser = argparse .ArgumentParser ()
194194 parser .add_argument (
195195 "--dry-run" , action = "store_true" , help = "Run agent in dry-run mode"
196196 )
197197 parser .add_argument ("--provider" , help = "LLM provider to use" )
198198 parser .add_argument ("--model" , help = "Model name to use" )
199199 parser .add_argument (
200- "--prompt" , default = "What is the capital of France?" ,
201- help = "The prompt to run"
200+ "--prompt" , default = "What is the capital of France?" , help = "The prompt to run"
202201 )
203202 parser .add_argument (
204- "--session-id" , default = "default" ,
205- help = "The session ID for the conversation"
203+ "--session-id" , default = "default" , help = "The session ID for the conversation"
206204 )
207205 args = parser .parse_args ()
208206
@@ -212,8 +210,7 @@ def load_dotenv(*_args, **_kwargs): # type: ignore
212210 api_key = None
213211 if not args .dry_run :
214212 if args .provider and (
215- args .provider .lower () == "gemini" or
216- args .provider .lower () == "google"
213+ args .provider .lower () == "gemini" or args .provider .lower () == "google"
217214 ):
218215 api_key = os .getenv ("GOOGLE_GEMINI_API_KEY" )
219216 elif args .provider and args .provider .lower () == "openai" :
0 commit comments