Spaces:
Sleeping
Sleeping
Update api.py
Browse files
api.py
CHANGED
@@ -150,7 +150,7 @@ print("๋ชจ๋ธ ๊ฐ์ค์น ๋ก๋ ์๋ฃ!")
|
|
150 |
|
151 |
def generate_text_sample(model, prompt, max_len=100, max_gen=98,
|
152 |
temperature=0.8, top_k=55, top_p=0.95, min_len=12):
|
153 |
-
model_input = text_to_ids(f"<start> {prompt}
|
154 |
model_input = model_input[:max_len]
|
155 |
generated = list(model_input)
|
156 |
|
@@ -234,13 +234,13 @@ def respond(input_text):
|
|
234 |
return response
|
235 |
|
236 |
# ๋ฉ๋ชจ๋ฆฌ ๊ด๋ จ ๋ถ๋ถ ์น ์ ๊ฑฐํ๊ณ , ๋จ์ ํ๋กฌํํธ ์์ฑ
|
237 |
-
full_prompt = f"ํ์ฌ ์ง๋ฌธ:\n{input_text}\n\n
|
238 |
|
239 |
for _ in range(3): # ์ต๋ 3๋ฒ ์ฌ์๋
|
240 |
full_response = generate_text_sample(model, full_prompt)
|
241 |
|
242 |
if "์๋ต:" in full_response:
|
243 |
-
response = full_response.split("
|
244 |
else:
|
245 |
response = full_response.strip()
|
246 |
|
|
|
150 |
|
151 |
def generate_text_sample(model, prompt, max_len=100, max_gen=98,
|
152 |
temperature=0.8, top_k=55, top_p=0.95, min_len=12):
|
153 |
+
model_input = text_to_ids(f"<start> {prompt}")
|
154 |
model_input = model_input[:max_len]
|
155 |
generated = list(model_input)
|
156 |
|
|
|
234 |
return response
|
235 |
|
236 |
# ๋ฉ๋ชจ๋ฆฌ ๊ด๋ จ ๋ถ๋ถ ์น ์ ๊ฑฐํ๊ณ , ๋จ์ ํ๋กฌํํธ ์์ฑ
|
237 |
+
full_prompt = f"ํ์ฌ ์ง๋ฌธ:\n{input_text}\n\n<sep>"
|
238 |
|
239 |
for _ in range(3): # ์ต๋ 3๋ฒ ์ฌ์๋
|
240 |
full_response = generate_text_sample(model, full_prompt)
|
241 |
|
242 |
if "์๋ต:" in full_response:
|
243 |
+
response = full_response.split("<sep>")[-1].strip()
|
244 |
else:
|
245 |
response = full_response.strip()
|
246 |
|