import gradio as gr from huggingface_hub import InferenceClient import requests from googletrans import Translator client = InferenceClient("mistralai/Mistral-Nemo-Instruct-2407") translator = Translator() def get_internet_data(query: str) -> str: """ Uses DuckDuckGo's Instant Answer API to fetch search data. Returns a short summary of the query result. """ url = "https://api.duckduckgo.com" params = { "q": query, "format": "json", "no_redirect": 1, "skip_disambig": 1, } try: response = requests.get(url, params=params, timeout=5) response.raise_for_status() data = response.json() result = data.get("AbstractText", "") if not result: result = "Wala koy nakuha nga impormasyon gikan sa internet." except Exception: result = "Naay problema sa pagkuha sa impormasyon gikan sa internet." return result def translate_to_bisaya(text: str) -> str: """ Translates the given text to Bisaya (Cebuano) using googletrans. """ try: # 'ceb' is the ISO 639-3 code for Cebuano translation = translator.translate(text, dest='ceb') return translation.text except Exception: return text def respond(message, history: list[tuple[str, str]]): system_message = ( "Ikaw usa ka buotan nga Chatbot. Tubaga lang sa binisaya, " "ug ayaw gamita ang english nga pinulungan. Gamita ang pinakabag-ong " "impormasyon gikan sa internet ug isalin kini sa binisaya." ) max_tokens = 4096 temperature = 0.6 top_p = 0.95 messages = [{"role": "system", "content": system_message}] for user_text, assistant_text in history: if user_text: messages.append({"role": "user", "content": user_text}) if assistant_text: messages.append({"role": "assistant", "content": assistant_text}) # Automatically perform an internet search for the query. search_result = get_internet_data(message) # Translate the search result to Bisaya. translated_result = translate_to_bisaya(search_result) # Enrich the user query with the translated internet search result. enriched_message = ( f"{message}\n\nMga resulta gikan sa internet (isinalin sa binisaya): " f"{translated_result}" ) messages.append({"role": "user", "content": enriched_message}) response = "" previous_response = "" for token_message in client.chat_completion( messages, max_tokens=max_tokens, stream=True, temperature=temperature, top_p=top_p, ): token = token_message.choices[0].delta.get("content", "") if not token: break response += token if response != previous_response: yield response previous_response = response if len(response) > 3000: break demo = gr.ChatInterface(respond) if __name__ == "__main__": demo.launch()