Spaces:
Sleeping
Sleeping
File size: 2,141 Bytes
0aef548 92cf3e7 d89669b ebd56c6 d89669b 6ddeb4a ebd56c6 a428822 ebd56c6 4ba52b5 6ddeb4a ebd56c6 6ddeb4a ebd56c6 4ba52b5 6ddeb4a ebd56c6 4ba52b5 ebd56c6 4ba52b5 ebd56c6 83f6cb8 ebd56c6 83f6cb8 a428822 6ddeb4a ebd56c6 83f6cb8 ebd56c6 83f6cb8 ebd56c6 83f6cb8 4ba52b5 83f6cb8 ebd56c6 7dcca7a 92cf3e7 ebd56c6 92cf3e7 7dcca7a 92cf3e7 ebd56c6 83f6cb8 92cf3e7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 |
import os
import streamlit as st
from huggingface_hub import InferenceClient
from dotenv import load_dotenv
# 1. Load your HF token from .env
load_dotenv()
# 2. Instantiate the Hugging Face Inference client once
client = InferenceClient(
provider="auto",
api_key=os.environ["HUGGINGFACEHUB_API_TOKEN"]
)
# 3. Streamlit page config
st.set_page_config(page_title="Educational Chatbot", layout="wide")
st.title("🎓 Educational Chatbot")
# 4. Initialize chat history in session state
if "history" not in st.session_state:
# history is a list of (sender, message) tuples
st.session_state.history = []
def build_messages():
"""
Convert session_state.history into the list of {"role", "content"} dicts
that the Hugging Face chat API expects.
"""
msgs = []
for sender, text in st.session_state.history:
role = "user" if sender == "You" else "assistant"
msgs.append({"role": role, "content": text})
return msgs
# 5. Render the existing chat as chat bubbles
for sender, text in st.session_state.history:
if sender == "You":
st.chat_message("user").write(text)
else:
st.chat_message("assistant").write(text)
# 6. Get new user input
user_input = st.chat_input("Ask me anything…")
if user_input:
# 7. Immediately record & display the user message
st.session_state.history.append(("You", user_input))
st.chat_message("user").write(user_input)
# 8. Show a placeholder for the assistant response
placeholder = st.chat_message("assistant")
placeholder.write("⏳ Thinking...")
# 9. Build the full message history to send
full_messages = build_messages()
# 10. Call the HF Inference API with the full conversation
try:
completion = client.chat.completions.create(
model="deepseek-ai/DeepSeek-R1",
messages=full_messages,
)
reply = completion.choices[0].message["content"]
except Exception as e:
reply = f"❌ API Error: {e}"
# 11. Update the placeholder and session history
placeholder.write(reply)
st.session_state.history.append(("Bot", reply))
|