Update app.py
Browse filestry to add duckduckgo API
app.py
CHANGED
@@ -1,10 +1,35 @@
|
|
1 |
import gradio as gr
|
2 |
from huggingface_hub import InferenceClient
|
|
|
3 |
|
4 |
client = InferenceClient("mistralai/Mistral-Nemo-Instruct-2407")
|
5 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
6 |
def respond(message, history: list[tuple[str, str]]):
|
7 |
-
system_message = "
|
8 |
max_tokens = 4096
|
9 |
temperature = 0.6
|
10 |
top_p = 0.95
|
@@ -15,6 +40,20 @@ def respond(message, history: list[tuple[str, str]]):
|
|
15 |
messages.append({"role": "user", "content": user_text})
|
16 |
if assistant_text:
|
17 |
messages.append({"role": "assistant", "content": assistant_text})
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
18 |
messages.append({"role": "user", "content": message})
|
19 |
|
20 |
response = ""
|
@@ -30,13 +69,10 @@ def respond(message, history: list[tuple[str, str]]):
|
|
30 |
if not token:
|
31 |
break
|
32 |
response += token
|
33 |
-
# Only yield if new content was added
|
34 |
if response != previous_response:
|
35 |
yield response
|
36 |
previous_response = response
|
37 |
-
|
38 |
-
# Optional: break out if the response is too long to avoid infinite loops
|
39 |
-
if len(response) > 3000: # adjust threshold as needed
|
40 |
break
|
41 |
|
42 |
demo = gr.ChatInterface(respond)
|
|
|
1 |
import gradio as gr
|
2 |
from huggingface_hub import InferenceClient
|
3 |
+
import requests
|
4 |
|
5 |
client = InferenceClient("mistralai/Mistral-Nemo-Instruct-2407")
|
6 |
|
7 |
+
def get_internet_data(query: str) -> str:
|
8 |
+
"""
|
9 |
+
Uses DuckDuckGo's Instant Answer API to fetch search data.
|
10 |
+
The returned text will be used to enrich the chatbot's context.
|
11 |
+
"""
|
12 |
+
url = "https://api.duckduckgo.com"
|
13 |
+
params = {
|
14 |
+
"q": query,
|
15 |
+
"format": "json",
|
16 |
+
"no_redirect": 1,
|
17 |
+
"skip_disambig": 1,
|
18 |
+
}
|
19 |
+
try:
|
20 |
+
response = requests.get(url, params=params, timeout=5)
|
21 |
+
response.raise_for_status()
|
22 |
+
data = response.json()
|
23 |
+
# Use the abstract text as a summary of the query
|
24 |
+
result = data.get("AbstractText", "")
|
25 |
+
if not result:
|
26 |
+
result = "Wala koy nakuha nga impormasyon gikan sa internet."
|
27 |
+
except Exception as e:
|
28 |
+
result = "Naay problema sa pagkuha sa impormasyon gikan sa internet."
|
29 |
+
return result
|
30 |
+
|
31 |
def respond(message, history: list[tuple[str, str]]):
|
32 |
+
system_message = "Ikaw usa ka buotan nga Chatbot. Tubaga lang sa binisaya, ug ayaw gamita ang english nga pinulungan."
|
33 |
max_tokens = 4096
|
34 |
temperature = 0.6
|
35 |
top_p = 0.95
|
|
|
40 |
messages.append({"role": "user", "content": user_text})
|
41 |
if assistant_text:
|
42 |
messages.append({"role": "assistant", "content": assistant_text})
|
43 |
+
|
44 |
+
# Check if the user wants to do an internet search.
|
45 |
+
# Trigger the search if the message starts with "search:"
|
46 |
+
if message.lower().startswith("search:"):
|
47 |
+
query = message[7:].strip() # Remove the "search:" prefix
|
48 |
+
search_result = get_internet_data(query)
|
49 |
+
# Add the search result into the conversation context in Bisaya.
|
50 |
+
messages.append({
|
51 |
+
"role": "assistant",
|
52 |
+
"content": f"Mga resulta gikan sa internet para sa '{query}': {search_result}"
|
53 |
+
})
|
54 |
+
# Optionally, you can clear the original message if it's only a search command:
|
55 |
+
message = ""
|
56 |
+
|
57 |
messages.append({"role": "user", "content": message})
|
58 |
|
59 |
response = ""
|
|
|
69 |
if not token:
|
70 |
break
|
71 |
response += token
|
|
|
72 |
if response != previous_response:
|
73 |
yield response
|
74 |
previous_response = response
|
75 |
+
if len(response) > 3000:
|
|
|
|
|
76 |
break
|
77 |
|
78 |
demo = gr.ChatInterface(respond)
|