Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -8,7 +8,7 @@ import time
|
|
8 |
import spaces
|
9 |
|
10 |
# --- μ€μ ---
|
11 |
-
MODEL_ID = "
|
12 |
MAX_NEW_TOKENS = 512
|
13 |
CPU_THREAD_COUNT = 4 # νμμ μ‘°μ
|
14 |
|
@@ -85,7 +85,6 @@ except Exception as e:
|
|
85 |
def get_system_prompt():
|
86 |
current_date = datetime.datetime.now().strftime("%Y-%m-%d (%A)")
|
87 |
return (
|
88 |
-
f"- AI μΈμ΄λͺ¨λΈμ μ΄λ¦μ \"MiMo\"μ΄λ©° XiaomiMiMoμμ λ§λ€μμ΅λλ€.\n"
|
89 |
f"- μ€λμ {current_date}μ
λλ€.\n"
|
90 |
f"- μ¬μ©μμ μ§λ¬Έμ λν΄ μΉμ νκ³ μμΈνκ² νκ΅μ΄λ‘ λ΅λ³ν΄μΌ ν©λλ€."
|
91 |
)
|
@@ -139,7 +138,7 @@ def warmup_model():
|
|
139 |
@spaces.GPU()
|
140 |
def predict(message, history):
|
141 |
"""
|
142 |
-
|
143 |
'history'λ Gradio 'messages' νμμ κ°μ ν©λλ€: List[Dict].
|
144 |
"""
|
145 |
if model is None or tokenizer is None:
|
|
|
8 |
import spaces
|
9 |
|
10 |
# --- μ€μ ---
|
11 |
+
MODEL_ID = "HyperCLOVAX-SEED-Vision-Instruct-3B"
|
12 |
MAX_NEW_TOKENS = 512
|
13 |
CPU_THREAD_COUNT = 4 # νμμ μ‘°μ
|
14 |
|
|
|
85 |
def get_system_prompt():
|
86 |
current_date = datetime.datetime.now().strftime("%Y-%m-%d (%A)")
|
87 |
return (
|
|
|
88 |
f"- μ€λμ {current_date}μ
λλ€.\n"
|
89 |
f"- μ¬μ©μμ μ§λ¬Έμ λν΄ μΉμ νκ³ μμΈνκ² νκ΅μ΄λ‘ λ΅λ³ν΄μΌ ν©λλ€."
|
90 |
)
|
|
|
138 |
@spaces.GPU()
|
139 |
def predict(message, history):
|
140 |
"""
|
141 |
+
HyperCLOVAX-SEED-Vision-Instruct-3B λͺ¨λΈμ μ¬μ©νμ¬ μλ΅μ μμ±ν©λλ€.
|
142 |
'history'λ Gradio 'messages' νμμ κ°μ ν©λλ€: List[Dict].
|
143 |
"""
|
144 |
if model is None or tokenizer is None:
|