Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -214,6 +214,21 @@ def generate_text_topp_stream(model, prompt, max_len=100, max_gen=98, p=0.9, tem
|
|
214 |
|
215 |
yield decoded_text
|
216 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
217 |
@app.get("/generate", response_class=PlainTextResponse)
|
218 |
async def generate(request: Request):
|
219 |
prompt = request.query_params.get("prompt", "μλ
νμΈμ")
|
|
|
214 |
|
215 |
yield decoded_text
|
216 |
|
217 |
+
def respond(input_text):
|
218 |
+
intent = simple_intent_classifier(input_text)
|
219 |
+
|
220 |
+
if "μ΄λ¦" in input_text:
|
221 |
+
return "μ μ΄λ¦μ Ector.Vμ
λλ€."
|
222 |
+
|
223 |
+
if "λꡬ" in input_text:
|
224 |
+
return "μ λ Ector.VλΌκ³ ν΄μ."
|
225 |
+
|
226 |
+
# μΌμ λν: μνλ§ + fallback
|
227 |
+
response = generate_text_topp_stream(model, input_text)
|
228 |
+
if not is_valid_response(response) or mismatch_tone(input_text, response):
|
229 |
+
response = generate_text_topp_stream(model, input_text)
|
230 |
+
return response
|
231 |
+
|
232 |
@app.get("/generate", response_class=PlainTextResponse)
|
233 |
async def generate(request: Request):
|
234 |
prompt = request.query_params.get("prompt", "μλ
νμΈμ")
|