File size: 3,483 Bytes
0b60f49
3b2b860
03db4ac
 
0b60f49
03db4ac
 
 
 
0b60f49
03db4ac
 
63fe53b
03db4ac
3b2b860
03db4ac
be87bd0
03db4ac
 
 
 
3b2b860
03db4ac
 
 
 
 
3b2b860
03db4ac
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3b2b860
03db4ac
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3b2b860
03db4ac
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
import gradio as gr
import random
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer

# Load the faster, lightweight model (DialoGPT-small for speed)
model_name = "microsoft/DialoGPT-small"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name)

# Memory for user details (name, age, city, likes, favorites)
user_memory = {"name": "Friend", "age": "unknown", "city": "unknown", "likes": "unknown", "favorite": "unknown"}

def chat(user_input, chat_history=[]):
    global user_memory
    user_input_lower = user_input.lower()
    
    # Capture user details for memory
    if "my name is" in user_input_lower:
        user_memory["name"] = user_input.split("my name is")[-1].strip(" ?.!\n")
        return f"Nice to meet you, {user_memory['name']}! 😊"
    
    if "how old am i" in user_input_lower:
        return f"You told me you are {user_memory['age']} years old! πŸŽ‚"
    if "i am" in user_input_lower and "years old" in user_input_lower:
        user_memory["age"] = user_input.split("i am")[-1].split("years old")[0].strip()
        return f"Got it! You are {user_memory['age']} years old. πŸŽ‰"
    
    if "where do i live" in user_input_lower:
        return f"You told me you live in {user_memory['city']}! 🏑"
    if "i live in" in user_input_lower:
        user_memory["city"] = user_input.split("i live in")[-1].strip(" ?.!\n")
        return f"Awesome! {user_memory['city']} sounds like a great place! 🌍"
    
    if "what do i like" in user_input_lower:
        return f"You said you like {user_memory['likes']}! πŸ˜ƒ"
    if "i like" in user_input_lower:
        user_memory["likes"] = user_input.split("i like")[-1].strip(" ?.!\n")
        return f"Nice! {user_memory['likes']} sounds amazing! 😍"
    
    if "what is my favorite" in user_input_lower:
        return f"Your favorite is {user_memory['favorite']}! πŸŽ‰"
    if "my favorite is" in user_input_lower:
        user_memory["favorite"] = user_input.split("my favorite is")[-1].strip(" ?.!\n")
        return f"Cool! {user_memory['favorite']} is a great choice! πŸ†"
    
    # Chatbot response generation (with streaming for speed)
    input_ids = tokenizer.encode(user_input + tokenizer.eos_token, return_tensors="pt")
    chat_history_ids = model.generate(input_ids, max_length=50, pad_token_id=tokenizer.eos_token_id)
    bot_response = tokenizer.decode(chat_history_ids[:, input_ids.shape[-1]:][0], skip_special_tokens=True)
    
    return bot_response

# Gradio UI with attractive design
def chat_ui():
    with gr.Blocks(theme=gr.themes.Soft()) as ui:
        gr.Markdown("""
        <h1 style='text-align: center; color: blue;'>πŸ€– AI Friend Chatbot</h1>
        <p style='text-align: center;'>Your friendly AI companion! Let's chat. 😊</p>
        """)
        
        chatbot = gr.Chatbot(label="Your AI Friend", bubble_full_width=False)
        user_input = gr.Textbox(placeholder="Type a message...", label="You", show_label=False)
        send_btn = gr.Button("Send πŸ’¬")
        
        def respond(message, history):
            response = chat(message)
            history.append((message, response))
            return history, ""
        
        send_btn.click(respond, inputs=[user_input, chatbot], outputs=[chatbot, user_input])
        user_input.submit(respond, inputs=[user_input, chatbot], outputs=[chatbot, user_input])
    
    return ui

# Launch the chatbot
chat_ui().launch()