Spaces:
Paused
Paused
import gradio as gr | |
from transformers import pipeline, set_seed | |
# Load Hugging Face model (adjust as needed) | |
generator = pipeline("text-generation", model="gpt2") | |
set_seed(42) | |
chat_history = [] | |
# Text generation function | |
def codette_terminal(user_input): | |
global chat_history | |
if user_input.lower() in ["exit", "quit"]: | |
chat_history = [] | |
return "π§ Codette signing off. Type again to restart." | |
output = generator(user_input, max_length=100, num_return_sequences=1) | |
response = output[0]['generated_text'].strip() | |
# Update terminal-style chat log | |
chat_history.append(f"ποΈ You > {user_input}") | |
chat_history.append(f"π§ Codette > {response}") | |
return "\n".join(chat_history[-10:]) # Keep last 10 entries for brevity | |
# Gradio Interface | |
with gr.Blocks(title="Codette Terminal") as demo: | |
gr.Markdown("## 𧬠Codette Terminal Interface (Hugging Face Edition)") | |
gr.Markdown("Type your message below. Type `'exit'` to reset conversation.\n") | |
with gr.Row(): | |
input_box = gr.Textbox(label="Your input", placeholder="Ask me anything...", lines=1) | |
output_box = gr.Textbox(label="Codette Output", lines=15, interactive=False) | |
input_box.submit(fn=codette_terminal, inputs=input_box, outputs=output_box) | |
# Launch in HF Space | |
if __name__ == "__main__": | |
demo.launch() | |