File size: 2,432 Bytes
78cbc5c
c9939ff
 
 
 
 
78cbc5c
c9939ff
 
78cbc5c
 
 
 
 
 
 
 
 
 
c9939ff
 
 
78cbc5c
c9939ff
 
78cbc5c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c9939ff
78cbc5c
 
c9939ff
78cbc5c
c9939ff
78cbc5c
 
c9939ff
78cbc5c
 
 
 
 
 
c9939ff
 
 
78cbc5c
c9939ff
78cbc5c
 
 
c9939ff
78cbc5c
c9939ff
78cbc5c
c9939ff
78cbc5c
c9939ff
78cbc5c
c9939ff
78cbc5c
 
 
 
 
c9939ff
78cbc5c
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
# streamlit_app.py
import os
import streamlit as st
from huggingface_hub import InferenceClient
from dotenv import load_dotenv

# Load environment variables for local development
load_dotenv()

# Initialize the Hugging Face Inference client
hf_token = os.getenv("HUGGINGFACEHUB_API_TOKEN")
client = InferenceClient(provider="auto", api_key=hf_token)

# Streamlit configuration
st.set_page_config(
    page_title="Interview Prep Bot",
    page_icon="🧠",
    layout="centered"
)

st.title("🎓 Interview Preparation Chatbot")

# Session state for conversation history and selected topic
if "history" not in st.session_state:
    st.session_state.history = []
if "topic" not in st.session_state:
    st.session_state.topic = "Machine Learning"

# Sidebar: Topic selection
st.sidebar.header("Practice Topic")
st.session_state.topic = st.sidebar.selectbox(
    "Select a topic:",
    [
        "Machine Learning",
        "Data Structures",
        "Python",
        "Generative AI",
        "Computer Vision",
        "Deep Learning",
    ],
    index=["Machine Learning","Data Structures","Python","Generative AI","Computer Vision","Deep Learning"].index(st.session_state.topic)
)

# Display existing chat history
for sender, message in st.session_state.history:
    role = "user" if sender == "You" else "assistant"
    st.chat_message(role).write(message)

# User input
user_input = st.chat_input("Ask me anything about " + st.session_state.topic + "…")

if user_input:
    # Append user message
    st.session_state.history.append(("You", user_input))
    st.chat_message("user").write(user_input)

    # Placeholder for bot response
    placeholder = st.chat_message("assistant")
    placeholder.write("⏳ Thinking...")

    # Build messages for the API call
    messages = []
    for role, text in [("user", msg) if s == "You" else ("assistant", msg)
                       for s, msg in st.session_state.history]:
        messages.append({"role": role, "content": text})

    # Call the Inference API
    try:
        response = client.chat.completions.create(
            model="mistralai/Mistral-7B-Instruct-v0.1",
            messages=messages
        )
        bot_reply = response.choices[0].message["content"].strip()
    except Exception as e:
        bot_reply = f"❌ API Error: {e}"

    # Display and store bot reply
    placeholder.write(bot_reply)
    st.session_state.history.append(("Bot", bot_reply))

```