Spaces:
Sleeping
Sleeping
app.py
CHANGED
@@ -3,9 +3,31 @@ from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
|
|
3 |
|
4 |
# Supported summarization models
|
5 |
model_choices = {
|
|
|
6 |
"Pegasus (google/pegasus-xsum)": "google/pegasus-xsum",
|
7 |
-
"
|
8 |
-
"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
9 |
}
|
10 |
|
11 |
# Cache for loaded models/tokenizers
|
|
|
3 |
|
4 |
# Supported summarization models
|
5 |
model_choices = {
|
6 |
+
# 🥇 High Accuracy Models
|
7 |
"Pegasus (google/pegasus-xsum)": "google/pegasus-xsum",
|
8 |
+
"BigBird-Pegasus (google/bigbird-pegasus-large-arxiv)": "google/bigbird-pegasus-large-arxiv",
|
9 |
+
"LongT5 Large (google/long-t5-tglobal-large)": "google/long-t5-tglobal-large",
|
10 |
+
"BART Large CNN (facebook/bart-large-cnn)": "facebook/bart-large-cnn",
|
11 |
+
"ProphetNet (microsoft/prophetnet-large-uncased-cnndm)": "microsoft/prophetnet-large-uncased-cnndm",
|
12 |
+
"LED (allenai/led-base-16384)": "allenai/led-base-16384",
|
13 |
+
"T5 Large (t5-large)": "t5-large",
|
14 |
+
"Flan-T5 Large (google/flan-t5-large)": "google/flan-t5-large",
|
15 |
+
|
16 |
+
# ⚖️ Balanced (Speed vs Accuracy)
|
17 |
+
"DistilBART CNN (sshleifer/distilbart-cnn-12-6)": "sshleifer/distilbart-cnn-12-6",
|
18 |
+
"DistilBART XSum (mrm8488/distilbart-xsum-12-6)": "mrm8488/distilbart-xsum-12-6",
|
19 |
+
"T5 Base (t5-base)": "t5-base",
|
20 |
+
"Flan-T5 Base (google/flan-t5-base)": "google/flan-t5-base",
|
21 |
+
"BART CNN SamSum (philschmid/bart-large-cnn-samsum)": "philschmid/bart-large-cnn-samsum",
|
22 |
+
"T5 SamSum (knkarthick/pegasus-samsum)": "knkarthick/pegasus-samsum",
|
23 |
+
"LongT5 Base (google/long-t5-tglobal-base)": "google/long-t5-tglobal-base",
|
24 |
+
|
25 |
+
# ⚡ Lighter / Faster Models
|
26 |
+
"T5 Small (t5-small)": "t5-small",
|
27 |
+
"MBART (facebook/mbart-large-cc25)": "facebook/mbart-large-cc25",
|
28 |
+
"MarianMT (Helsinki-NLP/opus-mt-en-ro)": "Helsinki-NLP/opus-mt-en-ro", # not trained for summarization, just as placeholder
|
29 |
+
"Falcon Instruct (tiiuae/falcon-7b-instruct)": "tiiuae/falcon-7b-instruct", # general-purpose, not summarization-specific
|
30 |
+
"BART ELI5 (yjernite/bart_eli5)": "yjernite/bart_eli5" # trained for explain-like-I'm-5
|
31 |
}
|
32 |
|
33 |
# Cache for loaded models/tokenizers
|