Spaces:
Runtime error
Runtime error
import os | |
import gdown | |
import subprocess | |
import streamlit as st | |
# Page configuration | |
st.set_page_config(page_title="Offline AI Chatbot", layout="wide") | |
# Model setup | |
MODEL_FOLDER = "models" | |
MODEL_FILE = "mistral-3b-q4.gguf" | |
MODEL_PATH = os.path.join(MODEL_FOLDER, MODEL_FILE) | |
# Direct download link for the 3B model | |
MODEL_URL = "https://huggingface.co/Tech-Meld/HX-Mistral-3B_v0.1-Q4_K_M-GGUF/resolve/main/hx-mistral-3b_v0.1.Q4_K_M.gguf" | |
# Ensure models folder exists | |
os.makedirs(MODEL_FOLDER, exist_ok=True) | |
# Download model if not already present | |
if not os.path.exists(MODEL_PATH): | |
with st.spinner("Downloading Mistral 3B model (~2GB), please wait..."): | |
gdown.download(MODEL_URL, MODEL_PATH, quiet=False) | |
st.success("Model downloaded!") | |
# Streamlit UI | |
st.title("🟢 Offline AI Chatbot") | |
user_input = st.text_input("You:", "") | |
if st.button("Send") and user_input.strip() != "": | |
with st.spinner("Generating response..."): | |
# Run llama-run.exe | |
result = subprocess.run( | |
["./llama-run.exe", MODEL_PATH, user_input], | |
capture_output=True, | |
text=True | |
) | |
reply = result.stdout.strip() | |
st.text_area("AI:", value=reply, height=200) | |