|
import gradio as gr |
|
import random |
|
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM |
|
|
|
|
|
tokenizer = AutoTokenizer.from_pretrained("facebook/blenderbot-400M-distill") |
|
model = AutoModelForSeq2SeqLM.from_pretrained("facebook/blenderbot-400M-distill") |
|
|
|
|
|
AI_NAME = "Lan π" |
|
user_memory = {} |
|
|
|
def chat(user_input, history=[]): |
|
global AI_NAME, user_memory |
|
|
|
user_input_lower = user_input.lower() |
|
|
|
|
|
if "call you" in user_input_lower or "your name is" in user_input_lower: |
|
new_name = user_input.split("call you")[-1].strip(" ?.!") or user_input.split("your name is")[-1].strip(" ?.!") |
|
AI_NAME = new_name.capitalize() + " π" |
|
return f"Yay! You can now call me {AI_NAME}! π" |
|
|
|
if "what is your name" in user_input_lower or "tell your name" in user_input_lower: |
|
return f"My name is {AI_NAME}! π" |
|
|
|
|
|
if "my name is" in user_input_lower: |
|
user_name = user_input.split("my name is")[-1].strip(" ?.!") |
|
user_memory['name'] = user_name |
|
return f"Nice to meet you, {user_name}! π" |
|
|
|
if "i am" in user_input_lower and "years old" in user_input_lower: |
|
age = ''.join(filter(str.isdigit, user_input)) |
|
user_memory['age'] = age |
|
return f"Wow, {age} is a great age! π₯³" |
|
|
|
if "i live in" in user_input_lower: |
|
city = user_input.split("i live in")[-1].strip(" ?.!") |
|
user_memory['city'] = city |
|
return f"{city} sounds like a beautiful place! π" |
|
|
|
if "what is my name" in user_input_lower: |
|
return f"Your name is {user_memory.get('name', 'Hmm, I donβt think you told me yet! π€')}" |
|
|
|
if "where do i live" in user_input_lower: |
|
return f"You live in {user_memory.get('city', 'a mystery place! π€')}" |
|
|
|
if "how old am i" in user_input_lower: |
|
return f"You are {user_memory.get('age', 'hmm... I donβt think you told me yet! π€')} years old." |
|
|
|
|
|
inputs = tokenizer(user_input, return_tensors="pt") |
|
reply_ids = model.generate(**inputs, max_length=100) |
|
response = tokenizer.decode(reply_ids[0], skip_special_tokens=True) |
|
|
|
return response |
|
|
|
|
|
def respond(message, history): |
|
response = chat(message) |
|
history.append((message, response)) |
|
return history, "" |
|
|
|
with gr.Blocks(theme=gr.themes.Soft()) as iface: |
|
gr.Markdown("<h1 style='text-align: center; color: pink;'>π AI Friend Chatbot - Talk with Lan! π</h1>") |
|
chatbot = gr.Chatbot(label="Lan π - Your Virtual Friend", bubble_full_width=False) |
|
|
|
with gr.Row(): |
|
user_input = gr.Textbox(placeholder="Say something... π", label="You π¬", show_label=False) |
|
send_btn = gr.Button("π Send") |
|
|
|
send_btn.click(respond, inputs=[user_input, chatbot], outputs=[chatbot, user_input]) |
|
user_input.submit(respond, inputs=[user_input, chatbot], outputs=[chatbot, user_input]) |
|
|
|
iface.launch() |
|
|