File size: 1,496 Bytes
0aef548
92cf3e7
 
d89669b
 
83f6cb8
d89669b
6ddeb4a
83f6cb8
92cf3e7
 
 
 
 
83f6cb8
 
6ddeb4a
 
83f6cb8
6ddeb4a
 
 
83f6cb8
 
 
 
 
 
 
 
 
d89669b
6ddeb4a
83f6cb8
 
 
 
 
 
 
 
7dcca7a
83f6cb8
92cf3e7
 
 
 
 
7dcca7a
92cf3e7
 
83f6cb8
 
92cf3e7
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
import os
import streamlit as st
from huggingface_hub import InferenceClient
from dotenv import load_dotenv

# Load .env (if you’re using one)
load_dotenv()

# Instantiate once
client = InferenceClient(
    provider="auto",
    api_key=os.environ["HUGGINGFACEHUB_API_TOKEN"]
)

# Page config
st.set_page_config(page_title="Educational Chatbot", layout="wide")
st.title("🎓 Educational Chatbot")

# Initialize history
if "history" not in st.session_state:
    st.session_state.history = []

# Render existing messages
for sender, message in st.session_state.history:
    if sender == "You":
        st.chat_message("user").write(message)
    else:
        st.chat_message("assistant").write(message)

# Input box
user_input = st.chat_input("Ask me anything…")

if user_input:
    # Display user message immediately
    st.chat_message("user").write(user_input)
    st.session_state.history.append(("You", user_input))

    # Bot placeholder
    placeholder = st.chat_message("assistant")
    placeholder.write("⏳ Thinking...")

    try:
        # Call HF Inference API
        completion = client.chat.completions.create(
            model="deepseek-ai/DeepSeek-R1",
            messages=[{"role": "user", "content": user_input}],
        )
        reply = completion.choices[0].message["content"]
    except Exception as e:
        reply = f"❌ API Error: {e}"

    # Update placeholder with real response
    placeholder.write(reply)
    st.session_state.history.append(("Bot", reply))