Coool2 commited on
Commit
c4be662
·
verified ·
1 Parent(s): 610ce11

Update agent.py

Browse files
Files changed (1) hide show
  1. agent.py +10 -16
agent.py CHANGED
@@ -21,7 +21,6 @@ import wandb
21
  from llama_index.callbacks.wandb import WandbCallbackHandler
22
  from llama_index.core.callbacks.base import CallbackManager
23
  from llama_index.core.callbacks.llama_debug import LlamaDebugHandler
24
- from llama_index.core import ServiceContext
25
 
26
  proj_llm = OpenRouter(
27
  model="mistralai/mistral-small-3.1-24b-instruct:free",
@@ -33,11 +32,11 @@ wandb_callback = WandbCallbackHandler(run_args={"project": "gaia-llamaindex-agen
33
  llama_debug = LlamaDebugHandler(print_trace_on_end=True)
34
  callback_manager = CallbackManager([wandb_callback, llama_debug])
35
 
36
- service_context = ServiceContext.from_defaults(
37
- llm=proj_llm,
38
- embed_model=HuggingFaceEmbedding("BAAI/bge-small-en-v1.5"),
39
- callback_manager=callback_manager
40
- )
41
 
42
 
43
 
@@ -113,8 +112,7 @@ class EnhancedRAGQueryEngine:
113
 
114
  index = VectorStoreIndex(
115
  nodes,
116
- embed_model=self.embed_model,
117
- service_context=service_context
118
  )
119
 
120
  return index
@@ -129,8 +127,7 @@ class EnhancedRAGQueryEngine:
129
  query_engine = RetrieverQueryEngine(
130
  retriever=retriever,
131
  node_postprocessors=[self.reranker],
132
- llm=proj_llm,
133
- service_context=service_context
134
  )
135
 
136
  return query_engine
@@ -234,8 +231,7 @@ analysis_agent = FunctionAgent(
234
  """,
235
  llm=proj_llm,
236
  tools=[enhanced_rag_tool, cross_document_tool],
237
- max_steps=5,
238
- service_context=service_context
239
  )
240
 
241
 
@@ -380,8 +376,7 @@ code_agent = ReActAgent(
380
  """,
381
  llm=proj_llm,
382
  tools=[code_execution_tool],
383
- max_steps = 5,
384
- service_context=service_context
385
  )
386
 
387
  # Créer des outils à partir des agents
@@ -440,8 +435,7 @@ class EnhancedGAIAAgent:
440
  """,
441
  llm=proj_llm,
442
  tools=[analysis_tool, research_tool, code_tool],
443
- max_steps = 10,
444
- service_context=service_context
445
  )
446
 
447
  async def solve_gaia_question(self, question_data: Dict[str, Any]) -> str:
 
21
  from llama_index.callbacks.wandb import WandbCallbackHandler
22
  from llama_index.core.callbacks.base import CallbackManager
23
  from llama_index.core.callbacks.llama_debug import LlamaDebugHandler
 
24
 
25
  proj_llm = OpenRouter(
26
  model="mistralai/mistral-small-3.1-24b-instruct:free",
 
32
  llama_debug = LlamaDebugHandler(print_trace_on_end=True)
33
  callback_manager = CallbackManager([wandb_callback, llama_debug])
34
 
35
+ from llama_index.core import Settings
36
+
37
+ Settings.llm = llm
38
+ Settings.embed_model = embed_model
39
+ Settings.callback_manager = callback_manager
40
 
41
 
42
 
 
112
 
113
  index = VectorStoreIndex(
114
  nodes,
115
+ embed_model=self.embed_model
 
116
  )
117
 
118
  return index
 
127
  query_engine = RetrieverQueryEngine(
128
  retriever=retriever,
129
  node_postprocessors=[self.reranker],
130
+ llm=proj_llm
 
131
  )
132
 
133
  return query_engine
 
231
  """,
232
  llm=proj_llm,
233
  tools=[enhanced_rag_tool, cross_document_tool],
234
+ max_steps=5
 
235
  )
236
 
237
 
 
376
  """,
377
  llm=proj_llm,
378
  tools=[code_execution_tool],
379
+ max_steps = 5
 
380
  )
381
 
382
  # Créer des outils à partir des agents
 
435
  """,
436
  llm=proj_llm,
437
  tools=[analysis_tool, research_tool, code_tool],
438
+ max_steps = 10
 
439
  )
440
 
441
  async def solve_gaia_question(self, question_data: Dict[str, Any]) -> str: