Update app final mistral
Browse files
app.py
CHANGED
@@ -7,11 +7,24 @@ st.title("🧚♀️ Magic Story Buddy 📚")
|
|
7 |
st.markdown("Let's create a magical story just for you!")
|
8 |
|
9 |
# Initialize the model
|
10 |
-
@st.
|
11 |
def load_model():
|
12 |
-
|
13 |
-
|
14 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
15 |
|
16 |
model, tokenizer = load_model()
|
17 |
|
|
|
7 |
st.markdown("Let's create a magical story just for you!")
|
8 |
|
9 |
# Initialize the model
|
10 |
+
@st.cache(allow_output_mutation=True)
|
11 |
def load_model():
|
12 |
+
model_name = "blockblockblock/Young-Children-Storyteller-Mistral-7B-bpw6"
|
13 |
+
try:
|
14 |
+
# Load model and tokenizer
|
15 |
+
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
16 |
+
model = AutoModelForCausalLM.from_pretrained(model_name)
|
17 |
+
|
18 |
+
# Get configuration
|
19 |
+
config = AutoConfig.from_pretrained(model_name)
|
20 |
+
model_type = config.model_type
|
21 |
+
|
22 |
+
print(f"Loaded model type: {model_type}")
|
23 |
+
|
24 |
+
return model, tokenizer
|
25 |
+
|
26 |
+
except Exception as e:
|
27 |
+
st.error(f"Error loading model: {e}")
|
28 |
|
29 |
model, tokenizer = load_model()
|
30 |
|