avsolatorio commited on
Commit
0fe5347
·
1 Parent(s): 43c89ca

Fix tools access by LLM.

Browse files

Signed-off-by: Aivin V. Solatorio <avsolatorio@gmail.com>

Files changed (4) hide show
  1. mcp_client.py +38 -1
  2. pyproject.toml +1 -0
  3. services.py +1 -1
  4. uv.lock +11 -0
mcp_client.py CHANGED
@@ -69,6 +69,8 @@ Stay strictly within these boundaries while maintaining a helpful and respectful
69
 
70
 
71
  LLM_MODEL = "claude-3-5-haiku-20241022"
 
 
72
 
73
 
74
  class MCPClientWrapper:
@@ -180,13 +182,46 @@ class MCPClientWrapper:
180
  print(response.content)
181
  contents = response.content
182
 
183
- while len(contents) > 0:
 
 
 
184
  content = contents.pop(0)
185
 
186
  if content.type == "text":
187
  result_messages.append({"role": "assistant", "content": content.text})
188
  claude_messages.append({"role": "assistant", "content": content.text})
189
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
190
  elif content.type == "tool_use":
191
  tool_id = content.id
192
  tool_name = content.name
@@ -295,7 +330,9 @@ class MCPClientWrapper:
295
  system=SYSTEM_PROMPT,
296
  max_tokens=1000,
297
  messages=claude_messages,
 
298
  )
 
299
  except OverloadedError:
300
  return [
301
  {
 
69
 
70
 
71
  LLM_MODEL = "claude-3-5-haiku-20241022"
72
+ # What is the military spending of bangladesh in 2014?
73
+ # When a tool is needed for any step, ensure to add the token `TOOL_USE`.
74
 
75
 
76
  class MCPClientWrapper:
 
182
  print(response.content)
183
  contents = response.content
184
 
185
+ MAX_CALLS = 10
186
+ auto_calls = 0
187
+
188
+ while len(contents) > 0 and auto_calls < MAX_CALLS:
189
  content = contents.pop(0)
190
 
191
  if content.type == "text":
192
  result_messages.append({"role": "assistant", "content": content.text})
193
  claude_messages.append({"role": "assistant", "content": content.text})
194
 
195
+ # if (
196
+ # auto_calls < MAX_CALLS
197
+ # and "TOOL_USE" in content.text
198
+ # and len(contents) == 0
199
+ # ):
200
+ # # Call the LLM automatically.
201
+ # claude_messages.append({"role": "user", "content": "ok"})
202
+ # try:
203
+ # next_response = self.anthropic.messages.create(
204
+ # model=LLM_MODEL,
205
+ # system=SYSTEM_PROMPT,
206
+ # max_tokens=1000,
207
+ # messages=claude_messages,
208
+ # tools=self.tools,
209
+ # )
210
+ # except OverloadedError:
211
+ # return [
212
+ # {
213
+ # "role": "assistant",
214
+ # "content": "The LLM API is overloaded now, try again later...",
215
+ # }
216
+ # ]
217
+
218
+ # print("next_response", next_response.content)
219
+
220
+ # contents.extend(next_response.content)
221
+ # auto_calls += 1
222
+
223
+ # # continue
224
+
225
  elif content.type == "tool_use":
226
  tool_id = content.id
227
  tool_name = content.name
 
330
  system=SYSTEM_PROMPT,
331
  max_tokens=1000,
332
  messages=claude_messages,
333
+ tools=self.tools,
334
  )
335
+ auto_calls += 1
336
  except OverloadedError:
337
  return [
338
  {
pyproject.toml CHANGED
@@ -10,6 +10,7 @@ dependencies = [
10
  "gradio[mcp]>=5.29.1",
11
  "httpx>=0.28.1",
12
  "numpy>=2.2.6",
 
13
  "scikit-learn>=1.6.1",
14
  "sentence-transformers>=4.1.0",
15
  ]
 
10
  "gradio[mcp]>=5.29.1",
11
  "httpx>=0.28.1",
12
  "numpy>=2.2.6",
13
+ "python-ulid>=3.0.0",
14
  "scikit-learn>=1.6.1",
15
  "sentence-transformers>=4.1.0",
16
  ]
services.py CHANGED
@@ -71,7 +71,7 @@ class DetailedOutput(SearchOutput):
71
  def search_relevant_indicators(
72
  query: str, top_k: int = 1
73
  ) -> dict[str, list[SearchOutput] | str]:
74
- """Search for a shortlist of relevant indicators from the World Development Indicators (WDI) given the query. The search ranking may not be optimal, so the LLM may use this as shortlist and pick the most relevant from the list (if any).
75
 
76
  Args:
77
  query: The search query by the user or one formulated by an LLM based on the user's prompt.
 
71
  def search_relevant_indicators(
72
  query: str, top_k: int = 1
73
  ) -> dict[str, list[SearchOutput] | str]:
74
+ """Search for a shortlist of relevant indicators from the World Development Indicators (WDI) given the query. The search ranking may not be optimal, so the LLM may use this as shortlist and pick the most relevant from the list (if any). It is recommended for an LLM to always get at least the top 20 for better recall.
75
 
76
  Args:
77
  query: The search query by the user or one formulated by an LLM based on the user's prompt.
uv.lock CHANGED
@@ -1711,6 +1711,15 @@ wheels = [
1711
  { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546 },
1712
  ]
1713
 
 
 
 
 
 
 
 
 
 
1714
  [[package]]
1715
  name = "pytz"
1716
  version = "2025.2"
@@ -2140,6 +2149,7 @@ dependencies = [
2140
  { name = "gradio", extra = ["mcp"] },
2141
  { name = "httpx" },
2142
  { name = "numpy" },
 
2143
  { name = "scikit-learn" },
2144
  { name = "sentence-transformers" },
2145
  ]
@@ -2156,6 +2166,7 @@ requires-dist = [
2156
  { name = "gradio", extras = ["mcp"], specifier = ">=5.29.1" },
2157
  { name = "httpx", specifier = ">=0.28.1" },
2158
  { name = "numpy", specifier = ">=2.2.6" },
 
2159
  { name = "scikit-learn", specifier = ">=1.6.1" },
2160
  { name = "sentence-transformers", specifier = ">=4.1.0" },
2161
  ]
 
1711
  { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546 },
1712
  ]
1713
 
1714
+ [[package]]
1715
+ name = "python-ulid"
1716
+ version = "3.0.0"
1717
+ source = { registry = "https://pypi.org/simple" }
1718
+ sdist = { url = "https://files.pythonhosted.org/packages/9a/db/e5e67aeca9c2420cb91f94007f30693cc3628ae9783a565fd33ffb3fbfdd/python_ulid-3.0.0.tar.gz", hash = "sha256:e50296a47dc8209d28629a22fc81ca26c00982c78934bd7766377ba37ea49a9f", size = 28822 }
1719
+ wheels = [
1720
+ { url = "https://files.pythonhosted.org/packages/63/4e/cc2ba2c0df2589f35a4db8473b8c2ba9bbfc4acdec4a94f1c78934d2350f/python_ulid-3.0.0-py3-none-any.whl", hash = "sha256:e4c4942ff50dbd79167ad01ac725ec58f924b4018025ce22c858bfcff99a5e31", size = 11194 },
1721
+ ]
1722
+
1723
  [[package]]
1724
  name = "pytz"
1725
  version = "2025.2"
 
2149
  { name = "gradio", extra = ["mcp"] },
2150
  { name = "httpx" },
2151
  { name = "numpy" },
2152
+ { name = "python-ulid" },
2153
  { name = "scikit-learn" },
2154
  { name = "sentence-transformers" },
2155
  ]
 
2166
  { name = "gradio", extras = ["mcp"], specifier = ">=5.29.1" },
2167
  { name = "httpx", specifier = ">=0.28.1" },
2168
  { name = "numpy", specifier = ">=2.2.6" },
2169
+ { name = "python-ulid", specifier = ">=3.0.0" },
2170
  { name = "scikit-learn", specifier = ">=1.6.1" },
2171
  { name = "sentence-transformers", specifier = ">=4.1.0" },
2172
  ]