File size: 3,040 Bytes
0b60f49 3b2b860 aed1145 0b60f49 aed1145 0b60f49 aed1145 63fe53b aed1145 03db4ac be87bd0 aed1145 03db4ac aed1145 3b2b860 03db4ac aed1145 3b2b860 03db4ac aed1145 03db4ac aed1145 03db4ac aed1145 03db4ac aed1145 03db4ac aed1145 3b2b860 aed1145 03db4ac aed1145 3b2b860 aed1145 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 |
import gradio as gr
import random
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
# Load a lightweight model for faster responses
tokenizer = AutoTokenizer.from_pretrained("facebook/blenderbot-400M-distill")
model = AutoModelForSeq2SeqLM.from_pretrained("facebook/blenderbot-400M-distill")
# AI Identity
AI_NAME = "Lan π"
user_memory = {}
def chat(user_input, history=[]):
global AI_NAME, user_memory
user_input_lower = user_input.lower()
# Name Recognition
if "call you" in user_input_lower or "your name is" in user_input_lower:
new_name = user_input.split("call you")[-1].strip(" ?.!") or user_input.split("your name is")[-1].strip(" ?.!")
AI_NAME = new_name.capitalize() + " π"
return f"Yay! You can now call me {AI_NAME}! π"
if "what is your name" in user_input_lower or "tell your name" in user_input_lower:
return f"My name is {AI_NAME}! π"
# Store user details
if "my name is" in user_input_lower:
user_name = user_input.split("my name is")[-1].strip(" ?.!")
user_memory['name'] = user_name
return f"Nice to meet you, {user_name}! π"
if "i am" in user_input_lower and "years old" in user_input_lower:
age = ''.join(filter(str.isdigit, user_input))
user_memory['age'] = age
return f"Wow, {age} is a great age! π₯³"
if "i live in" in user_input_lower:
city = user_input.split("i live in")[-1].strip(" ?.!")
user_memory['city'] = city
return f"{city} sounds like a beautiful place! π"
if "what is my name" in user_input_lower:
return f"Your name is {user_memory.get('name', 'Hmm, I donβt think you told me yet! π€')}"
if "where do i live" in user_input_lower:
return f"You live in {user_memory.get('city', 'a mystery place! π€')}"
if "how old am i" in user_input_lower:
return f"You are {user_memory.get('age', 'hmm... I donβt think you told me yet! π€')} years old."
# AI Chat Processing
inputs = tokenizer(user_input, return_tensors="pt")
reply_ids = model.generate(**inputs, max_length=100)
response = tokenizer.decode(reply_ids[0], skip_special_tokens=True)
return response
# Gradio UI
def respond(message, history):
response = chat(message)
history.append((message, response))
return history, ""
with gr.Blocks(theme=gr.themes.Soft()) as iface:
gr.Markdown("<h1 style='text-align: center; color: pink;'>π AI Friend Chatbot - Talk with Lan! π</h1>")
chatbot = gr.Chatbot(label="Lan π - Your Virtual Friend", bubble_full_width=False)
with gr.Row():
user_input = gr.Textbox(placeholder="Say something... π", label="You π¬", show_label=False)
send_btn = gr.Button("π Send")
send_btn.click(respond, inputs=[user_input, chatbot], outputs=[chatbot, user_input])
user_input.submit(respond, inputs=[user_input, chatbot], outputs=[chatbot, user_input])
iface.launch()
|