Kunal Pai commited on
Commit
e0964c2
·
1 Parent(s): c3c1740

Add GroqAgent and GroqModelManager implementations for Groq model integration

Browse files
src/manager/agent_manager.py CHANGED
@@ -8,6 +8,7 @@ from src.manager.utils.streamlit_interface import output_assistant_response
8
  from google import genai
9
  from google.genai import types
10
  from google.genai.types import *
 
11
  import os
12
  from dotenv import load_dotenv
13
  from src.manager.budget_manager import BudgetManager
@@ -119,6 +120,44 @@ class GeminiAgent(Agent):
119
 
120
  def delete_agent(self):
121
  self.messages = []
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
122
  @singleton
123
  class AgentManager():
124
  budget_manager: BudgetManager = BudgetManager()
@@ -126,7 +165,8 @@ class AgentManager():
126
  self._agents: Dict[str, Agent] = {}
127
  self._agent_types ={
128
  "ollama": OllamaAgent,
129
- "gemini": GeminiAgent
 
130
  }
131
 
132
  self._load_agents()
@@ -330,6 +370,8 @@ class AgentManager():
330
  return "ollama"
331
  elif "gemini" in base_model:
332
  return "gemini"
 
 
333
  else:
334
  return "unknown"
335
 
 
8
  from google import genai
9
  from google.genai import types
10
  from google.genai.types import *
11
+ from groq import Groq
12
  import os
13
  from dotenv import load_dotenv
14
  from src.manager.budget_manager import BudgetManager
 
120
 
121
  def delete_agent(self):
122
  self.messages = []
123
+
124
+ class GroqAgent(Agent):
125
+ def __init__(
126
+ self,
127
+ agent_name: str,
128
+ base_model: str = "llama-3.3-70b-versatile",
129
+ system_prompt: str = "system.prompt",
130
+ ):
131
+ self.agent_name = agent_name
132
+ self.base_model = base_model
133
+ # load API key from environment
134
+ api_key = os.getenv("GROQ_API_KEY")
135
+ self.client = Groq(api_key=api_key)
136
+ # read system prompt content
137
+ with open(system_prompt, 'r') as f:
138
+ self.system_instruction = f.read()
139
+
140
+ def create_model(self) -> None:
141
+ # Groq models are available by name; no creation step
142
+ pass
143
+
144
+ def ask_agent(self, prompt: str) -> str:
145
+ messages = [
146
+ {"role": "system", "content": self.system_instruction},
147
+ {"role": "user", "content": prompt},
148
+ ]
149
+ response = self.client.chat.completions.create(
150
+ messages=messages,
151
+ model=self.base_model,
152
+ )
153
+ result = response.choices[0].message.content
154
+ print(result)
155
+ return result
156
+
157
+ def delete_agent(self) -> None:
158
+ # No delete support for Groq
159
+ pass
160
+
161
  @singleton
162
  class AgentManager():
163
  budget_manager: BudgetManager = BudgetManager()
 
165
  self._agents: Dict[str, Agent] = {}
166
  self._agent_types ={
167
  "ollama": OllamaAgent,
168
+ "gemini": GeminiAgent,
169
+ "groq": GroqAgent,
170
  }
171
 
172
  self._load_agents()
 
370
  return "ollama"
371
  elif "gemini" in base_model:
372
  return "gemini"
373
+ elif "groq" in base_model:
374
+ return "groq"
375
  else:
376
  return "unknown"
377
 
src/manager/llm_models.py CHANGED
@@ -5,6 +5,7 @@ from pathlib import Path
5
  from google import genai
6
  from google.genai import types
7
  from mistralai import Mistral
 
8
  from src.manager.utils.streamlit_interface import output_assistant_response
9
 
10
 
@@ -136,3 +137,42 @@ class MistralModelManager(AbstractModelManager):
136
  def delete(self):
137
  # Implement model deletion logic (if applicable)
138
  self.model = None
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  from google import genai
6
  from google.genai import types
7
  from mistralai import Mistral
8
+ from groq import Groq
9
  from src.manager.utils.streamlit_interface import output_assistant_response
10
 
11
 
 
137
  def delete(self):
138
  # Implement model deletion logic (if applicable)
139
  self.model = None
140
+
141
+ class GroqModelManager(AbstractModelManager):
142
+ def __init__(self, api_key, model_name="llama-3.3-70b-versatile", system_prompt_file="system.prompt"):
143
+ super().__init__(model_name, system_prompt_file)
144
+ self.client = Groq(api_key=api_key)
145
+
146
+ def is_model_loaded(self, model):
147
+ # Groq models are referenced by name; assume always available if name matches
148
+ return model == self.model_name
149
+
150
+ def create_model(self, base_model=None, context_window=4096, temperature=0):
151
+ # Groq does not require explicit creation; no-op
152
+ if not self.is_model_loaded(self.model_name):
153
+ output_assistant_response(f"Model {self.model_name} is not available on Groq.")
154
+
155
+ def request(self, prompt, temperature=0, context_window=4096):
156
+ # Read system instruction
157
+ with open(self.system_prompt_file, 'r') as f:
158
+ system_instruction = f.read()
159
+
160
+ # Build messages
161
+ messages = [
162
+ {"role": "system", "content": system_instruction},
163
+ {"role": "user", "content": prompt}
164
+ ]
165
+
166
+ # Send request
167
+ response = self.client.chat.completions.create(
168
+ messages=messages,
169
+ model=self.model_name,
170
+ temperature=temperature
171
+ )
172
+
173
+ # Extract and return content
174
+ return response.choices[0].message.content
175
+
176
+ def delete(self):
177
+ # No deletion support for Groq-managed models
178
+ output_assistant_response(f"Deletion not supported for Groq model {self.model_name}.")