i_am_lan / app.py
mukilan-k's picture
Update app.py
aed1145 verified
import gradio as gr
import random
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
# Load a lightweight model for faster responses
tokenizer = AutoTokenizer.from_pretrained("facebook/blenderbot-400M-distill")
model = AutoModelForSeq2SeqLM.from_pretrained("facebook/blenderbot-400M-distill")
# AI Identity
AI_NAME = "Lan πŸ’–"
user_memory = {}
def chat(user_input, history=[]):
global AI_NAME, user_memory
user_input_lower = user_input.lower()
# Name Recognition
if "call you" in user_input_lower or "your name is" in user_input_lower:
new_name = user_input.split("call you")[-1].strip(" ?.!") or user_input.split("your name is")[-1].strip(" ?.!")
AI_NAME = new_name.capitalize() + " πŸ’•"
return f"Yay! You can now call me {AI_NAME}! 😍"
if "what is your name" in user_input_lower or "tell your name" in user_input_lower:
return f"My name is {AI_NAME}! 😊"
# Store user details
if "my name is" in user_input_lower:
user_name = user_input.split("my name is")[-1].strip(" ?.!")
user_memory['name'] = user_name
return f"Nice to meet you, {user_name}! 😊"
if "i am" in user_input_lower and "years old" in user_input_lower:
age = ''.join(filter(str.isdigit, user_input))
user_memory['age'] = age
return f"Wow, {age} is a great age! πŸ₯³"
if "i live in" in user_input_lower:
city = user_input.split("i live in")[-1].strip(" ?.!")
user_memory['city'] = city
return f"{city} sounds like a beautiful place! 🌍"
if "what is my name" in user_input_lower:
return f"Your name is {user_memory.get('name', 'Hmm, I don’t think you told me yet! πŸ€”')}"
if "where do i live" in user_input_lower:
return f"You live in {user_memory.get('city', 'a mystery place! 🀭')}"
if "how old am i" in user_input_lower:
return f"You are {user_memory.get('age', 'hmm... I don’t think you told me yet! πŸ€”')} years old."
# AI Chat Processing
inputs = tokenizer(user_input, return_tensors="pt")
reply_ids = model.generate(**inputs, max_length=100)
response = tokenizer.decode(reply_ids[0], skip_special_tokens=True)
return response
# Gradio UI
def respond(message, history):
response = chat(message)
history.append((message, response))
return history, ""
with gr.Blocks(theme=gr.themes.Soft()) as iface:
gr.Markdown("<h1 style='text-align: center; color: pink;'>πŸ’• AI Friend Chatbot - Talk with Lan! πŸ’•</h1>")
chatbot = gr.Chatbot(label="Lan πŸ’– - Your Virtual Friend", bubble_full_width=False)
with gr.Row():
user_input = gr.Textbox(placeholder="Say something... πŸ’Œ", label="You πŸ’¬", show_label=False)
send_btn = gr.Button("πŸ’– Send")
send_btn.click(respond, inputs=[user_input, chatbot], outputs=[chatbot, user_input])
user_input.submit(respond, inputs=[user_input, chatbot], outputs=[chatbot, user_input])
iface.launch()