mohammedelfeky-ai commited on
Commit
6fc9a0b
·
verified ·
1 Parent(s): c3a01ba

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +166 -4
app.py CHANGED
@@ -1,16 +1,178 @@
1
- from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel,load_tool,tool
2
  import datetime
3
  import requests
4
  import pytz
5
  import yaml
 
 
6
  from tools.final_answer import FinalAnswerTool
7
  from tools.visit_webpage import VisitWebpageTool
8
- #from tools.web_shearch import DuckDuckGoSearchTool
9
  from smolagents import GradioUI
10
- import tempfile
11
  import gradio as gr
 
12
  import os
13
- from smolagents.models import GeminiModel
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
 
15
  '''
16
  # Below is an example of a tool that does nothing. Amaze us with your creativity !
 
1
+ from smolagents import CodeAgent, tool
2
  import datetime
3
  import requests
4
  import pytz
5
  import yaml
6
+ import os
7
+ import tempfile
8
  from tools.final_answer import FinalAnswerTool
9
  from tools.visit_webpage import VisitWebpageTool
 
10
  from smolagents import GradioUI
 
11
  import gradio as gr
12
+ import json
13
  import os
14
+ from typing import Dict, List, Optional, Union, Any
15
+
16
+ # Create a custom model adapter for Gemini since it's not natively supported in smolagents 1.13.0
17
+ from smolagents.models import LLMAdapter
18
+ import google.generativeai as genai
19
+
20
+ class CustomGeminiAdapter(LLMAdapter):
21
+ """
22
+ Custom adapter for Google's Gemini model.
23
+
24
+ This adapter lets us use Gemini with smolagents even if it's not natively supported.
25
+ """
26
+
27
+ def __init__(
28
+ self,
29
+ model: str = "gemini-1.5-pro",
30
+ temperature: float = 0.7,
31
+ max_tokens: int = 2048,
32
+ api_key: Optional[str] = None,
33
+ ):
34
+ """Initialize the Gemini adapter."""
35
+ self.model = model
36
+ self.temperature = temperature
37
+ self.max_tokens = max_tokens
38
+
39
+ # Set up API key
40
+ if api_key:
41
+ genai.configure(api_key=api_key)
42
+ elif os.environ.get("GOOGLE_API_KEY"):
43
+ genai.configure(api_key=os.environ.get("GOOGLE_API_KEY"))
44
+ else:
45
+ raise ValueError("Google API key must be provided either through api_key parameter or GOOGLE_API_KEY environment variable")
46
+
47
+ # Configure the model
48
+ self.generation_config = {
49
+ "temperature": temperature,
50
+ "max_output_tokens": max_tokens,
51
+ "top_p": 0.95,
52
+ "top_k": 0,
53
+ }
54
+
55
+ def call(
56
+ self,
57
+ system_message: str,
58
+ messages: List[Dict[str, str]],
59
+ functions: Optional[List[Dict]] = None,
60
+ function_call: Optional[str] = None,
61
+ **kwargs,
62
+ ) -> Dict[str, Any]:
63
+ """
64
+ Call the Gemini model with messages and return the response.
65
+
66
+ Args:
67
+ system_message: System message to set context
68
+ messages: List of messages in the conversation
69
+ functions: Function definitions (for function calling)
70
+ function_call: Function to call
71
+
72
+ Returns:
73
+ Dictionary with model response
74
+ """
75
+ try:
76
+ # Convert messages format to what Gemini expects
77
+ gemini_messages = []
78
+
79
+ # Add system message as user message at the beginning (Gemini doesn't have system)
80
+ if system_message:
81
+ gemini_messages.append({
82
+ "role": "user",
83
+ "parts": [{"text": f"System: {system_message}"}]
84
+ })
85
+ gemini_messages.append({
86
+ "role": "model",
87
+ "parts": [{"text": "I understand and will follow these instructions."}]
88
+ })
89
+
90
+ # Add the rest of the messages
91
+ for message in messages:
92
+ if message["role"] == "system":
93
+ # Handle system messages as user instructions
94
+ gemini_messages.append({
95
+ "role": "user",
96
+ "parts": [{"text": f"System instruction: {message['content']}"}]
97
+ })
98
+ else:
99
+ role = "user" if message["role"] == "user" else "model"
100
+ gemini_messages.append({
101
+ "role": role,
102
+ "parts": [{"text": message["content"]}]
103
+ })
104
+
105
+ # Create the Gemini model
106
+ model = genai.GenerativeModel(
107
+ model_name=self.model,
108
+ generation_config=self.generation_config
109
+ )
110
+
111
+ # For function calling (tools)
112
+ if functions and len(functions) > 0:
113
+ # Simulate function calling by adding function descriptions to the prompt
114
+ function_descriptions = []
115
+ for func in functions:
116
+ function_descriptions.append(f"""
117
+ Function Name: {func.get('name')}
118
+ Description: {func.get('description')}
119
+ Parameters: {json.dumps(func.get('parameters', {}))}
120
+ """)
121
+
122
+ function_context = """
123
+ You have access to the following functions. When you decide to use a function, respond with a JSON object with 'function_call' key containing 'name' and 'arguments' keys.
124
+ Example: {"function_call": {"name": "function_name", "arguments": {"arg1": "value1"}}}
125
+
126
+ Functions:
127
+ """ + "\n\n".join(function_descriptions)
128
+
129
+ # Add function description as the last user message
130
+ gemini_messages.append({
131
+ "role": "user",
132
+ "parts": [{"text": function_context}]
133
+ })
134
+
135
+ # Create a chat session
136
+ chat = model.start_chat(history=gemini_messages[:-1])
137
+
138
+ # Get the last message content
139
+ last_message = gemini_messages[-1]["parts"][0]["text"]
140
+
141
+ # Generate response
142
+ response = chat.send_message(last_message)
143
+ content = response.text
144
+
145
+ # Process the content to see if it contains a function call
146
+ function_call_data = None
147
+ if functions:
148
+ # Check if the response contains a function call format
149
+ import re
150
+ function_call_match = re.search(r'{\s*"function_call"\s*:\s*{.*?}\s*}', content, re.DOTALL)
151
+ if function_call_match:
152
+ try:
153
+ function_call_text = function_call_match.group(0)
154
+ function_call_data = json.loads(function_call_text)
155
+ # Remove the function call from the content
156
+ content = content.replace(function_call_text, "").strip()
157
+ except json.JSONDecodeError:
158
+ pass
159
+
160
+ # Create response format that matches what smolagents expects
161
+ result = {
162
+ "content": content
163
+ }
164
+
165
+ # Add function call if present
166
+ if function_call_data:
167
+ result["function_call"] = {
168
+ "name": function_call_data.get("function_call", {}).get("name", ""),
169
+ "arguments": function_call_data.get("function_call", {}).get("arguments", {})
170
+ }
171
+
172
+ return result
173
+
174
+ except Exception as e:
175
+ return {"content": f"Error calling Gemini model: {str(e)}"}
176
 
177
  '''
178
  # Below is an example of a tool that does nothing. Amaze us with your creativity !