SimrusDenuvo commited on
Commit
10035b7
·
verified ·
1 Parent(s): c43dd36

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +17 -22
app.py CHANGED
@@ -25,10 +25,10 @@ simple_instruction = (
25
  " (например: доступ, платежи, безопасность и т.д.)."
26
  )
27
 
28
- # Модели
29
  models = {
30
- "ChatGPT-like (saiga_mistral)": pipeline("text-generation", model="IlyaGusev/saiga_mistral_7b_merged", tokenizer="IlyaGusev/saiga_mistral_7b_merged", device=-1),
31
- "DeepSeek-like (ruGPT3-medium)": pipeline("text-generation", model="ai-forever/rugpt3medium_based_on_gpt2", tokenizer="ai-forever/rugpt3medium_based_on_gpt2", device=-1),
32
  "GigaChat-like (rubert-tiny2)": pipeline("text-generation", model="cointegrated/rubert-tiny2", tokenizer="cointegrated/rubert-tiny2", device=-1),
33
  }
34
 
@@ -57,13 +57,13 @@ def generate_dual_answers(user_input):
57
  for name, pipe in models.items():
58
  # CoT
59
  start_cot = time.time()
60
- out_cot = pipe(prompt_cot, max_new_tokens=300, do_sample=True, top_p=0.9, temperature=0.7)[0]["generated_text"]
61
  end_cot = round(time.time() - start_cot, 2)
62
  answer_cot = out_cot.strip().split('\n')[-1]
63
 
64
  # Simple
65
  start_simple = time.time()
66
- out_simple = pipe(prompt_simple, max_new_tokens=300, do_sample=True, top_p=0.9, temperature=0.7)[0]["generated_text"]
67
  end_simple = round(time.time() - start_simple, 2)
68
  answer_simple = out_simple.strip().split('\n')[-1]
69
 
@@ -74,34 +74,29 @@ def generate_dual_answers(user_input):
74
  "simple_time": end_simple
75
  }
76
 
77
- return tuple(
78
- [
79
- results[m]["cot_answer"], f"{results[m]['cot_time']} сек",
80
- results[m]["simple_answer"], f"{results[m]['simple_time']} сек"
81
- ]
82
- for m in models
83
- )[0] + tuple(
84
- [
85
- results[m]["cot_answer"], f"{results[m]['cot_time']} сек",
86
- results[m]["simple_answer"], f"{results[m]['simple_time']} сек"
87
- ]
88
- for m in list(models)[1:]
89
  )
90
 
91
  # Интерфейс Gradio
92
  with gr.Blocks() as demo:
93
- gr.Markdown("## 🏦 Сравнение моделей (Классификация клиентских обращений): ChatGPT-like, DeepSeek-like, GigaChat-like")
94
 
95
- inp = gr.Textbox(label="Вопрос клиента", placeholder="Например: Я не могу попасть в личный кабинет", lines=2)
96
- btn = gr.Button("Сгенерировать")
97
 
98
- gr.Markdown("### ChatGPT-like (saiga_mistral)")
99
  cot1 = gr.Textbox(label="CoT ответ")
100
  cot1_time = gr.Textbox(label="Время CoT")
101
  simple1 = gr.Textbox(label="Обычный ответ")
102
  simple1_time = gr.Textbox(label="Время обычного")
103
 
104
- gr.Markdown("### DeepSeek-like (ruGPT3-medium)")
105
  cot2 = gr.Textbox(label="CoT ответ")
106
  cot2_time = gr.Textbox(label="Время CoT")
107
  simple2 = gr.Textbox(label="Обычный ответ")
 
25
  " (например: доступ, платежи, безопасность и т.д.)."
26
  )
27
 
28
+ # Лёгкие и реальные модели, подходящие под требования
29
  models = {
30
+ "ChatGPT-like (ruGPT3-medium)": pipeline("text-generation", model="cointegrated/rugpt3-medium", tokenizer="cointegrated/rugpt3-medium", device=-1),
31
+ "DeepSeek-like (ruGPT3-small)": pipeline("text-generation", model="ai-forever/rugpt3small_based_on_gpt2", tokenizer="ai-forever/rugpt3small_based_on_gpt2", device=-1),
32
  "GigaChat-like (rubert-tiny2)": pipeline("text-generation", model="cointegrated/rubert-tiny2", tokenizer="cointegrated/rubert-tiny2", device=-1),
33
  }
34
 
 
57
  for name, pipe in models.items():
58
  # CoT
59
  start_cot = time.time()
60
+ out_cot = pipe(prompt_cot, max_new_tokens=200, do_sample=True, top_p=0.9, temperature=0.7)[0]["generated_text"]
61
  end_cot = round(time.time() - start_cot, 2)
62
  answer_cot = out_cot.strip().split('\n')[-1]
63
 
64
  # Simple
65
  start_simple = time.time()
66
+ out_simple = pipe(prompt_simple, max_new_tokens=150, do_sample=True, top_p=0.9, temperature=0.7)[0]["generated_text"]
67
  end_simple = round(time.time() - start_simple, 2)
68
  answer_simple = out_simple.strip().split('\n')[-1]
69
 
 
74
  "simple_time": end_simple
75
  }
76
 
77
+ return (
78
+ results["ChatGPT-like (ruGPT3-medium)"]["cot_answer"], f"{results['ChatGPT-like (ruGPT3-medium)']['cot_time']} сек",
79
+ results["ChatGPT-like (ruGPT3-medium)"]["simple_answer"], f"{results['ChatGPT-like (ruGPT3-medium)']['simple_time']} сек",
80
+ results["DeepSeek-like (ruGPT3-small)"]["cot_answer"], f"{results['DeepSeek-like (ruGPT3-small)']['cot_time']} сек",
81
+ results["DeepSeek-like (ruGPT3-small)"]["simple_answer"], f"{results['DeepSeek-like (ruGPT3-small)']['simple_time']} сек",
82
+ results["GigaChat-like (rubert-tiny2)"]["cot_answer"], f"{results['GigaChat-like (rubert-tiny2)']['cot_time']} сек",
83
+ results["GigaChat-like (rubert-tiny2)"]["simple_answer"], f"{results['GigaChat-like (rubert-tiny2)']['simple_time']} сек"
 
 
 
 
 
84
  )
85
 
86
  # Интерфейс Gradio
87
  with gr.Blocks() as demo:
88
+ gr.Markdown("## 🏦 Сравнение моделей: Классификация клиентских обращений (CoT и обычный)")
89
 
90
+ inp = gr.Textbox(label="Обращение клиента", placeholder="Например: Не могу войти в приложение", lines=2)
91
+ btn = gr.Button("Получить ответы")
92
 
93
+ gr.Markdown("### ChatGPT-like (ruGPT3-medium)")
94
  cot1 = gr.Textbox(label="CoT ответ")
95
  cot1_time = gr.Textbox(label="Время CoT")
96
  simple1 = gr.Textbox(label="Обычный ответ")
97
  simple1_time = gr.Textbox(label="Время обычного")
98
 
99
+ gr.Markdown("### DeepSeek-like (ruGPT3-small)")
100
  cot2 = gr.Textbox(label="CoT ответ")
101
  cot2_time = gr.Textbox(label="Время CoT")
102
  simple2 = gr.Textbox(label="Обычный ответ")