File size: 2,613 Bytes
1915306 0e9eccb 1915306 777bb52 1915306 0e9eccb 4d0e77c 0e9eccb 4d0e77c 0e9eccb 4d0e77c 0e9eccb 8b5f3bd 2e6435e 8b5f3bd 4a65c44 8b5f3bd 1915306 8b5f3bd e8b0ec8 2e6435e e8b0ec8 0e9eccb 2e6435e 4d0e77c 2e6435e 1915306 0c08de9 987b836 1915306 987b836 1915306 2e6435e 0c08de9 2e6435e 0e9eccb 0c08de9 1915306 8b5f3bd 1915306 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 |
import gradio as gr
from huggingface_hub import InferenceClient
import requests
client = InferenceClient("mistralai/Mistral-Nemo-Instruct-2407")
def get_internet_data(query: str) -> str:
"""
Uses DuckDuckGo's Instant Answer API to fetch search data.
Returns a short summary of the query result.
"""
url = "https://api.duckduckgo.com"
params = {
"q": query,
"format": "json",
"no_redirect": 1,
"skip_disambig": 1,
}
try:
response = requests.get(url, params=params, timeout=5)
response.raise_for_status()
data = response.json()
# Retrieve a summary from the API's result
result = data.get("AbstractText", "")
if not result:
result = "Wala koy nakuha nga impormasyon gikan sa internet."
except Exception:
result = "Naay problema sa pagkuha sa impormasyon gikan sa internet."
return result
def respond(message, history: list[tuple[str, str]]):
system_message = "Ikaw usa ka buotan nga Chatbot. Tubaga lang sa binisaya, ug ayaw gamita ang english nga pinulungan. Gamita ang pinakabag-ong impormasyon gikan sa internet kung kinahanglan."
max_tokens = 4096
temperature = 0.6
top_p = 0.95
messages = [{"role": "system", "content": system_message}]
for user_text, assistant_text in history:
# Ensure alternating roles: user then assistant
if user_text:
messages.append({"role": "user", "content": user_text})
if assistant_text:
messages.append({"role": "assistant", "content": assistant_text})
# Get search results based on the user's query.
search_result = get_internet_data(message)
# Enrich the user message with the search result.
enriched_message = f"{message}\n\nMga resulta gikan sa internet: {search_result}"
messages.append({"role": "user", "content": enriched_message})
response = ""
previous_response = ""
for token_message in client.chat_completion(
messages,
max_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
):
token = token_message.choices[0].delta.get("content", "")
if not token:
break
response += token
# Yield only if new content was added.
if response != previous_response:
yield response
previous_response = response
# Optional: break if the response is too long.
if len(response) > 3000:
break
demo = gr.ChatInterface(respond)
if __name__ == "__main__":
demo.launch()
|