alakxender commited on
Commit
748c56b
·
1 Parent(s): 111a749
Files changed (2) hide show
  1. app.py +6 -12
  2. title_gen.py +19 -4
app.py CHANGED
@@ -75,10 +75,11 @@ with gr.Blocks(theme=gr.themes.Default(), css=css) as demo:
75
 
76
  import sys
77
  sys.path.append('.')
78
- from title_gen import generate_title
79
-
80
  with gr.Row():
81
  article_content = gr.Textbox(lines=10, label="Article Content", rtl=True, elem_classes="textbox1")
 
 
82
  with gr.Row():
83
  seed = gr.Slider(0, 10000, value=42, step=1, label="Random Seed")
84
  use_sampling = gr.Checkbox(label="Use Sampling (Creative/Random)", value=False)
@@ -87,7 +88,7 @@ with gr.Blocks(theme=gr.themes.Default(), css=css) as demo:
87
  generate_btn = gr.Button("Generate Title")
88
  generate_btn.click(
89
  fn=generate_title,
90
- inputs=[article_content, seed, use_sampling],
91
  outputs=generated_title
92
  )
93
 
@@ -152,16 +153,9 @@ This is an experimental model trained on a very small dataset of Dhivehi news ar
152
  - Due to limited training data, the model may not handle all topics or writing styles equally well.
153
  - Experimental nature means outputs should be reviewed carefully before use.
154
 
155
- ### Tips for Best Results
156
-
157
- - Use well-structured, news-style content similar to the training data
158
- - Try multiple seeds to get different title options
159
- - Keep content focused on a single main topic
160
- - Review generated titles for accuracy and appropriateness
161
-
162
  """)
163
 
164
  # Launch the app
165
  if __name__ == "__main__":
166
- #demo.launch(server_name="0.0.0.0", server_port=7811)
167
- demo.launch()
 
75
 
76
  import sys
77
  sys.path.append('.')
78
+ from title_gen import generate_title,MODEL_OPTIONS
 
79
  with gr.Row():
80
  article_content = gr.Textbox(lines=10, label="Article Content", rtl=True, elem_classes="textbox1")
81
+ with gr.Row():
82
+ model_choice = gr.Dropdown(choices=list(MODEL_OPTIONS.keys()), value="V6 Model", label="Model")
83
  with gr.Row():
84
  seed = gr.Slider(0, 10000, value=42, step=1, label="Random Seed")
85
  use_sampling = gr.Checkbox(label="Use Sampling (Creative/Random)", value=False)
 
88
  generate_btn = gr.Button("Generate Title")
89
  generate_btn.click(
90
  fn=generate_title,
91
+ inputs=[article_content, seed, use_sampling, model_choice],
92
  outputs=generated_title
93
  )
94
 
 
153
  - Due to limited training data, the model may not handle all topics or writing styles equally well.
154
  - Experimental nature means outputs should be reviewed carefully before use.
155
 
 
 
 
 
 
 
 
156
  """)
157
 
158
  # Launch the app
159
  if __name__ == "__main__":
160
+ demo.launch(server_name="0.0.0.0", server_port=7811)
161
+ #demo.launch()
title_gen.py CHANGED
@@ -3,22 +3,37 @@ import numpy as np
3
  import torch
4
  from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
5
 
6
- MODEL_DIR = "alakxender/t5-dhivehi-title-generation-xs"
7
- tokenizer = AutoTokenizer.from_pretrained(MODEL_DIR)
8
- model = AutoModelForSeq2SeqLM.from_pretrained(MODEL_DIR)
 
 
 
 
 
 
 
 
 
 
 
 
9
 
10
  prefix = "2title: "
11
 
12
  max_input_length = 512
13
  max_target_length = 32
14
 
15
- def generate_title(content, seed, use_sampling):
16
  random.seed(seed)
17
  np.random.seed(seed)
18
  torch.manual_seed(seed)
19
  if torch.cuda.is_available():
20
  torch.cuda.manual_seed_all(seed)
21
 
 
 
 
22
  input_text = prefix + content.strip()
23
  inputs = tokenizer(
24
  input_text,
 
3
  import torch
4
  from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
5
 
6
+ # Available models
7
+ MODEL_OPTIONS = {
8
+ "V6 Model": "alakxender/t5-divehi-title-generation-v6",
9
+ "XS Model": "alakxender/t5-dhivehi-title-generation-xs"
10
+ }
11
+
12
+ # Cache for loaded models/tokenizers
13
+ MODEL_CACHE = {}
14
+
15
+ def get_model_and_tokenizer(model_dir):
16
+ if model_dir not in MODEL_CACHE:
17
+ tokenizer = AutoTokenizer.from_pretrained(model_dir)
18
+ model = AutoModelForSeq2SeqLM.from_pretrained(model_dir)
19
+ MODEL_CACHE[model_dir] = (tokenizer, model)
20
+ return MODEL_CACHE[model_dir]
21
 
22
  prefix = "2title: "
23
 
24
  max_input_length = 512
25
  max_target_length = 32
26
 
27
+ def generate_title(content, seed, use_sampling, model_choice):
28
  random.seed(seed)
29
  np.random.seed(seed)
30
  torch.manual_seed(seed)
31
  if torch.cuda.is_available():
32
  torch.cuda.manual_seed_all(seed)
33
 
34
+ model_dir = MODEL_OPTIONS[model_choice]
35
+ tokenizer, model = get_model_and_tokenizer(model_dir)
36
+
37
  input_text = prefix + content.strip()
38
  inputs = tokenizer(
39
  input_text,