Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -2,7 +2,7 @@ import gradio as gr
|
|
2 |
from llama_cpp import Llama
|
3 |
|
4 |
# Path to your GGUF model inside the space
|
5 |
-
MODEL_PATH = "
|
6 |
|
7 |
# Load model
|
8 |
llm = Llama(model_path=MODEL_PATH, n_ctx=2048, n_threads=4, verbose=True)
|
|
|
2 |
from llama_cpp import Llama
|
3 |
|
4 |
# Path to your GGUF model inside the space
|
5 |
+
MODEL_PATH = "Futuresony/gemma2-2b-gguf-q4_k_m"
|
6 |
|
7 |
# Load model
|
8 |
llm = Llama(model_path=MODEL_PATH, n_ctx=2048, n_threads=4, verbose=True)
|