Spaces:
Running
Running
File size: 4,313 Bytes
2ea8556 7c06e97 6900003 7c06e97 b27e104 434b328 8157183 6900003 7c06e97 8157183 7c06e97 b27e104 8157183 b27e104 6900003 4af653a b27e104 4af653a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 |
from google.genai import types
from src.manager import GeminiManager
from src.tool_manager import ToolManager
import gradio as gr
import time
import base64
_logo_bytes = open("HASHIRU_LOGO.png", "rb").read()
_logo_b64 = base64.b64encode(_logo_bytes).decode()
_header_html = f"""
<div style="
display: flex;
flex-direction: column;
align-items: center;
padding-right: 24px;
">
<img src="data:image/png;base64,{_logo_b64}" width="20" height="20" />
<span style="margin-top: 8px; font-size: 20px; font-weight: bold; color: white;">
HASHIRU AI
</span>
</div>
"""
if __name__ == "__main__":
model_manager = GeminiManager(gemini_model="gemini-2.0-flash")
def user_message(msg: str, history: list) -> tuple[str, list]:
"""Adds user message to chat history"""
history.append(gr.ChatMessage(role="user", content=msg))
return "", history
def handle_undo(history, undo_data: gr.UndoData):
return history[:undo_data.index], history[undo_data.index]['content']
def handle_retry(history, retry_data: gr.RetryData):
new_history = history[:retry_data.index+1]
# yield new_history, gr.update(interactive=False,)
yield from model_manager.run(new_history)
def handle_edit(history, edit_data: gr.EditData):
new_history = history[:edit_data.index+1]
new_history[-1]['content'] = edit_data.value
# yield new_history, gr.update(interactive=False,)
yield from model_manager.run(new_history)
def update_model(model_name):
print(f"Model changed to: {model_name}")
pass
css = """
#title-row { background: #2c2c2c; border-radius: 8px; padding: 8px; }
"""
with gr.Blocks(css=css, fill_width=True, fill_height=True) as demo:
local_storage = gr.BrowserState(["", ""])
with gr.Column(scale=1):
with gr.Row(scale=0):
gr.HTML(_header_html)
model_dropdown = gr.Dropdown(
choices=[
"HASHIRU",
"Static-HASHIRU",
"Cloud-Only HASHIRU",
"Local-Only HASHIRU",
"No-Economy HASHIRU",
],
value="HASHIRU",
# label="HASHIRU",
interactive=True,
)
model_dropdown.change(fn=update_model, inputs=model_dropdown, outputs=[])
with gr.Row(scale=1):
with gr.Sidebar(position="left"):
buttons = []
for i in range(1, 6):
button = gr.Button(f"Button {i}", elem_id=f"button-{i}")
button.click(fn=lambda x=i: print(f"Button {x} clicked"), inputs=[], outputs=[])
buttons.append(button)
with gr.Column(scale=1):
chatbot = gr.Chatbot(
avatar_images=("HASHIRU_2.png", "HASHIRU.png"),
type="messages",
show_copy_button=True,
editable="user",
scale=1,
render_markdown=True,
)
input_box = gr.Textbox(label="Chat Message", scale=0, interactive=True, submit_btn=True)
chatbot.undo(handle_undo, chatbot, [chatbot, input_box])
chatbot.retry(handle_retry, chatbot, [chatbot, input_box])
chatbot.edit(handle_edit, chatbot, [chatbot, input_box])
input_box.submit(
user_message, # Add user message to chat
inputs=[input_box, chatbot],
outputs=[input_box, chatbot],
queue=False,
).then(
model_manager.ask_llm, # Generate and stream response
inputs=chatbot,
outputs=[chatbot, input_box],
queue=True,
show_progress="full",
trigger_mode="always_last"
)
demo.launch()
|