Coool2 commited on
Commit
3f6e874
·
verified ·
1 Parent(s): a9b5090

Update agent.py

Browse files
Files changed (1) hide show
  1. agent.py +0 -5
agent.py CHANGED
@@ -53,11 +53,6 @@ import sys
53
 
54
  import weave
55
  weave.init("gaia-llamaindex-agents")
56
- from llama_index.core import set_global_handler
57
-
58
- # Set Weave as the global callback handler
59
- set_global_handler("weave")
60
-
61
  def get_max_memory_config(max_memory_per_gpu):
62
  """Generate max_memory config for available GPUs"""
63
  if torch.cuda.is_available():
 
53
 
54
  import weave
55
  weave.init("gaia-llamaindex-agents")
 
 
 
 
 
56
  def get_max_memory_config(max_memory_per_gpu):
57
  """Generate max_memory config for available GPUs"""
58
  if torch.cuda.is_available():