Luong Huu Thanh commited on
Commit
097a92f
·
1 Parent(s): c16c390

Update agent.py

Browse files
Files changed (1) hide show
  1. agent.py +29 -19
agent.py CHANGED
@@ -707,28 +707,38 @@ create_retriever_tool = create_retriever_tool(
707
  )
708
 
709
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
710
  tools = [
711
- web_search,
712
- wiki_search,
713
- arxiv_search,
714
  multiply,
715
  add,
716
  subtract,
717
  divide,
718
  modulus,
719
- power,
720
- square_root,
721
- save_and_read_file,
722
- download_file_from_url,
723
- extract_text_from_image,
724
- analyze_csv_file,
725
- analyze_excel_file,
726
- execute_code_multilang,
727
- analyze_image,
728
- transform_image,
729
- draw_on_image,
730
- generate_simple_image,
731
- combine_images,
732
  ]
733
 
734
  # Build graph function
@@ -742,7 +752,7 @@ def build_graph(provider: str = "groq"):
742
  # TODO: Add huggingface endpoint
743
  llm = ChatHuggingFace(
744
  llm=HuggingFaceEndpoint(
745
- repo_id="meta-llama/Llama-2-7b-chat-hf",
746
  task="text-generation", # for chat‐style use “text-generation”
747
  max_new_tokens=1024,
748
  do_sample=False,
@@ -752,7 +762,7 @@ def build_graph(provider: str = "groq"):
752
  verbose=True,
753
  )
754
  else:
755
- raise ValueError("Invalid provider. Choose 'google', 'groq' or 'huggingface'.")
756
  # Bind tools to LLM
757
  llm_with_tools = llm.bind_tools(tools)
758
 
@@ -792,7 +802,7 @@ def build_graph(provider: str = "groq"):
792
 
793
  # test
794
  if __name__ == "__main__":
795
- question = "How many studio albums were published by Mercedes Sosa between 2000 and 2009 (included)? You can use the latest 2022 version of english wikipedia."
796
  graph = build_graph(provider="groq")
797
  messages = [HumanMessage(content=question)]
798
  messages = graph.invoke({"messages": messages})
 
707
  )
708
 
709
 
710
+ # tools = [
711
+ # web_search,
712
+ # wiki_search,
713
+ # arxiv_search,
714
+ # multiply,
715
+ # add,
716
+ # subtract,
717
+ # divide,
718
+ # modulus,
719
+ # power,
720
+ # square_root,
721
+ # save_and_read_file,
722
+ # download_file_from_url,
723
+ # extract_text_from_image,
724
+ # analyze_csv_file,
725
+ # analyze_excel_file,
726
+ # execute_code_multilang,
727
+ # analyze_image,
728
+ # transform_image,
729
+ # draw_on_image,
730
+ # generate_simple_image,
731
+ # combine_images,
732
+ # ]
733
  tools = [
 
 
 
734
  multiply,
735
  add,
736
  subtract,
737
  divide,
738
  modulus,
739
+ wiki_search,
740
+ web_search,
741
+ arxiv_search,
 
 
 
 
 
 
 
 
 
 
742
  ]
743
 
744
  # Build graph function
 
752
  # TODO: Add huggingface endpoint
753
  llm = ChatHuggingFace(
754
  llm=HuggingFaceEndpoint(
755
+ repo_id="TinyLlama/TinyLlama-1.1B-Chat-v1.0",
756
  task="text-generation", # for chat‐style use “text-generation”
757
  max_new_tokens=1024,
758
  do_sample=False,
 
762
  verbose=True,
763
  )
764
  else:
765
+ raise ValueError("Invalid provider. Choose 'groq' or 'huggingface'.")
766
  # Bind tools to LLM
767
  llm_with_tools = llm.bind_tools(tools)
768
 
 
802
 
803
  # test
804
  if __name__ == "__main__":
805
+ question = ".rewsna eht sa ""tfel"" drow eht fo etisoppo eht etirw ,ecnetnes siht dnatsrednu uoy fI"
806
  graph = build_graph(provider="groq")
807
  messages = [HumanMessage(content=question)]
808
  messages = graph.invoke({"messages": messages})