Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -4,9 +4,9 @@ from transformers import pipeline
|
|
4 |
|
5 |
# Настройка моделей
|
6 |
models = {
|
7 |
-
"ChatGPT-like (FRED-T5)": pipeline("text2text-generation", model="ai-forever/FRED-T5-1.7B", tokenizer="ai-forever/FRED-T5-1.7B",
|
8 |
-
"DeepSeek-like (Qwen7B)": pipeline("text-generation", model="lightblue/DeepSeek-R1-Distill-Qwen-7B-Multilingual", tokenizer="lightblue/DeepSeek-R1-Distill-Qwen-7B-Multilingual",
|
9 |
-
"GigaChat-like (GigaChat-20B)": pipeline("text-generation", model="ai-sage/GigaChat-20B-A3B-instruct", tokenizer="ai-sage/GigaChat-20B-A3B-instruct",
|
10 |
}
|
11 |
|
12 |
# Промпты
|
|
|
4 |
|
5 |
# Настройка моделей
|
6 |
models = {
|
7 |
+
"ChatGPT-like (FRED-T5)": pipeline("text2text-generation", model="ai-forever/FRED-T5-1.7B", tokenizer="ai-forever/FRED-T5-1.7B", device=-1),
|
8 |
+
"DeepSeek-like (Qwen7B)": pipeline("text-generation", model="lightblue/DeepSeek-R1-Distill-Qwen-7B-Multilingual", tokenizer="lightblue/DeepSeek-R1-Distill-Qwen-7B-Multilingual", device=-1),
|
9 |
+
"GigaChat-like (GigaChat-20B)": pipeline("text-generation", model="ai-sage/GigaChat-20B-A3B-instruct", tokenizer="ai-sage/GigaChat-20B-A3B-instruct", device=-1)
|
10 |
}
|
11 |
|
12 |
# Промпты
|