File size: 1,616 Bytes
c9939ff
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
import os
import streamlit as st
from huggingface_hub import InferenceClient
from dotenv import load_dotenv

# Load environment variables (for local development)
load_dotenv()

# Initialize InferenceClient
token = os.environ.get("HUGGINGFACEHUB_API_TOKEN")
client = InferenceClient(provider="auto", api_key=token)

st.set_page_config(page_title="Interview Prep Bot", layout="wide")
st.title("🎓 Interview Preparation Chatbot")

# Initialize history
if "history" not in st.session_state:
    st.session_state.history = []

# Render chat history
for sender, msg in st.session_state.history:
    role = "user" if sender == "You" else "assistant"
    st.chat_message(role).write(msg)

# Prompt input
text = st.chat_input("Ask me anything about interview prep…")
if text:
    # record user
    st.session_state.history.append(("You", text))
    st.chat_message("user").write(text)

    # placeholder
    placeholder = st.chat_message("assistant")
    placeholder.write("⏳ Thinking...")

    # build messages for API
    messages = []
    for s, m in st.session_state.history:
        role = "user" if s == "You" else "assistant"
        messages.append({"role": role, "content": m})

    # call HF chat completion
    try:
        completion = client.chat.completions.create(
            model="mistralai/Mistral-7B-Instruct-v0.1",
            messages=messages,
        )
        reply = completion.choices[0].message["content"].strip()
    except Exception as e:
        reply = f"❌ API Error: {e}"

    # display and store reply
    placeholder.write(reply)
    st.session_state.history.append(("Bot", reply))