Spaces:
Running
Running
File size: 2,356 Bytes
2ea8556 7c06e97 576227b 7c06e97 434b328 8157183 7c06e97 8157183 7c06e97 8157183 7c06e97 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 |
from google.genai import types
from src.manager import GeminiManager
from src.tool_loader import ToolLoader
import gradio as gr
import time
if __name__ == "__main__":
# Define the tool metadata for orchestration.
# Load the tools using the ToolLoader class.
tool_loader = ToolLoader()
model_manager = GeminiManager(toolsLoader=tool_loader, gemini_model="gemini-2.5-pro-preview-03-25")
def user_message(msg: str, history: list) -> tuple[str, list]:
"""Adds user message to chat history"""
history.append(gr.ChatMessage(role="user", content=msg))
return "", history
def handle_undo(history, undo_data: gr.UndoData):
return history[:undo_data.index], history[undo_data.index]['content']
def handle_retry(history, retry_data: gr.RetryData):
new_history = history[:retry_data.index+1]
# yield new_history, gr.update(interactive=False,)
yield from model_manager.run(new_history)
def handle_edit(history, edit_data: gr.EditData):
new_history = history[:edit_data.index+1]
new_history[-1]['content'] = edit_data.value
# yield new_history, gr.update(interactive=False,)
yield from model_manager.run(new_history)
with gr.Blocks(fill_width=True, fill_height=True) as demo:
gr.Markdown("# Hashiru AI")
chatbot = gr.Chatbot(
avatar_images=("HASHIRU_2.png", "HASHIRU.png"),
type="messages",
show_copy_button=True,
editable="user",
scale=1
)
input_box = gr.Textbox(label="Chat Message", scale=0, interactive=True, submit_btn=True)
chatbot.undo(handle_undo, chatbot, [chatbot, input_box])
chatbot.retry(handle_retry, chatbot, [chatbot, input_box])
chatbot.edit(handle_edit, chatbot, [chatbot, input_box])
input_box.submit(
user_message, # Add user message to chat
inputs=[input_box, chatbot],
outputs=[input_box, chatbot],
queue=False,
).then(
model_manager.run, # Generate and stream response
inputs=chatbot,
outputs=[chatbot, input_box],
queue=True,
show_progress="full",
trigger_mode="always_last"
)
demo.launch(share=True)
|