Imsachinsingh00's picture
deepseek R1
ebd56c6
raw
history blame
2.14 kB
import os
import streamlit as st
from huggingface_hub import InferenceClient
from dotenv import load_dotenv
# 1. Load your HF token from .env
load_dotenv()
# 2. Instantiate the Hugging Face Inference client once
client = InferenceClient(
provider="auto",
api_key=os.environ["HUGGINGFACEHUB_API_TOKEN"]
)
# 3. Streamlit page config
st.set_page_config(page_title="Educational Chatbot", layout="wide")
st.title("🎓 Educational Chatbot")
# 4. Initialize chat history in session state
if "history" not in st.session_state:
# history is a list of (sender, message) tuples
st.session_state.history = []
def build_messages():
"""
Convert session_state.history into the list of {"role", "content"} dicts
that the Hugging Face chat API expects.
"""
msgs = []
for sender, text in st.session_state.history:
role = "user" if sender == "You" else "assistant"
msgs.append({"role": role, "content": text})
return msgs
# 5. Render the existing chat as chat bubbles
for sender, text in st.session_state.history:
if sender == "You":
st.chat_message("user").write(text)
else:
st.chat_message("assistant").write(text)
# 6. Get new user input
user_input = st.chat_input("Ask me anything…")
if user_input:
# 7. Immediately record & display the user message
st.session_state.history.append(("You", user_input))
st.chat_message("user").write(user_input)
# 8. Show a placeholder for the assistant response
placeholder = st.chat_message("assistant")
placeholder.write("⏳ Thinking...")
# 9. Build the full message history to send
full_messages = build_messages()
# 10. Call the HF Inference API with the full conversation
try:
completion = client.chat.completions.create(
model="deepseek-ai/DeepSeek-R1",
messages=full_messages,
)
reply = completion.choices[0].message["content"]
except Exception as e:
reply = f"❌ API Error: {e}"
# 11. Update the placeholder and session history
placeholder.write(reply)
st.session_state.history.append(("Bot", reply))