Abbasid commited on
Commit
1994090
·
verified ·
1 Parent(s): cc3df33

Update agent.py

Browse files
Files changed (1) hide show
  1. agent.py +2 -5
agent.py CHANGED
@@ -282,13 +282,10 @@ def create_agent_executor(provider: str = "groq"):
282
  print(f"Initializing agent with provider: {provider}")
283
 
284
  # Step 1: Build LLMs - Use Google for vision capabilities
285
- if provider == "google":
286
- main_llm = ChatGoogleGenerativeAI(model="gemini-1.5-pro-latest", temperature=0)
287
- vision_llm = ChatGoogleGenerativeAI(model="gemini-1.5-pro-latest", temperature=0)
288
- elif provider == "groq":
289
  main_llm = ChatGroq(model_name="meta-llama/llama-4-maverick-17b-128e-instruct", temperature=0)
290
  # Use Google for vision since Groq's vision support may be limited
291
- main_llm = ChatGroq(model_name="meta-llama/llama-4-maverick-17b-128e-instruct", temperature=0)
292
  elif provider == "huggingface":
293
  main_llm = ChatHuggingFace(llm=HuggingFaceEndpoint(repo_id="mistralai/Mixtral-8x7B-Instruct-v0.1", temperature=0.1))
294
  vision_llm = ChatGoogleGenerativeAI(model="gemini-1.5-pro-latest", temperature=0)
 
282
  print(f"Initializing agent with provider: {provider}")
283
 
284
  # Step 1: Build LLMs - Use Google for vision capabilities
285
+ if provider == "groq":
 
 
 
286
  main_llm = ChatGroq(model_name="meta-llama/llama-4-maverick-17b-128e-instruct", temperature=0)
287
  # Use Google for vision since Groq's vision support may be limited
288
+ vision_llm = ChatGroq(model_name="meta-llama/llama-4-maverick-17b-128e-instruct", temperature=0)
289
  elif provider == "huggingface":
290
  main_llm = ChatHuggingFace(llm=HuggingFaceEndpoint(repo_id="mistralai/Mixtral-8x7B-Instruct-v0.1", temperature=0.1))
291
  vision_llm = ChatGoogleGenerativeAI(model="gemini-1.5-pro-latest", temperature=0)