Rishi Desai commited on
Commit
a28e4db
·
1 Parent(s): 49415e1

some more clean up

Browse files
Files changed (3) hide show
  1. caption.py +2 -2
  2. demo.py +2 -2
  3. prompt.py +2 -1
caption.py CHANGED
@@ -3,7 +3,7 @@ import io
3
  import os
4
  from together import Together
5
 
6
- MODEL = "meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8"
7
  TRIGGER_WORD = "tr1gger"
8
 
9
  def get_system_prompt():
@@ -106,7 +106,7 @@ def caption_single_image(client, img_str):
106
 
107
  # Request caption for the image using Llama 4 Maverick
108
  response = client.chat.completions.create(
109
- model="meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8",
110
  messages=messages
111
  )
112
 
 
3
  import os
4
  from together import Together
5
 
6
+ MODEL = "deepseek-ai/DeepSeek-V3"
7
  TRIGGER_WORD = "tr1gger"
8
 
9
  def get_system_prompt():
 
106
 
107
  # Request caption for the image using Llama 4 Maverick
108
  response = client.chat.completions.create(
109
+ model=MODEL,
110
  messages=messages
111
  )
112
 
demo.py CHANGED
@@ -504,8 +504,8 @@ def create_config_area():
504
  gr.Markdown("""
505
  **Note about categorization:**
506
  - Images are grouped by the part of the filename before the last underscore
507
- - For example: "character_pose_01.png" and "character_pose_02.png" share the category "character_pose"
508
- - When using "Batch by category", similar images are captioned together for more consistent results
509
  """, elem_classes=["category-info"])
510
 
511
  caption_btn = gr.Button("Caption Images", variant="primary", interactive=False)
 
504
  gr.Markdown("""
505
  **Note about categorization:**
506
  - Images are grouped by the part of the filename before the last underscore
507
+ - For example: "character_pose_1.png" and "character_pose_2.png" share the category "character_pose"
508
+ - When using "Batch process by category", similar images are captioned together for more consistent results
509
  """, elem_classes=["category-info"])
510
 
511
  caption_btn = gr.Button("Caption Images", variant="primary", interactive=False)
prompt.py CHANGED
@@ -3,6 +3,7 @@ import argparse
3
  from pathlib import Path
4
  from caption import get_system_prompt, get_together_client, extract_captions
5
 
 
6
 
7
  def optimize_prompt(user_prompt, captions_dir=None, captions_list=None):
8
  """Optimize a user prompt to follow the same format as training captions.
@@ -43,7 +44,7 @@ def optimize_prompt(user_prompt, captions_dir=None, captions_list=None):
43
  ]
44
 
45
  response = client.chat.completions.create(
46
- model="meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8",
47
  messages=messages
48
  )
49
 
 
3
  from pathlib import Path
4
  from caption import get_system_prompt, get_together_client, extract_captions
5
 
6
+ MODEL = "deepseek-ai/DeepSeek-V3"
7
 
8
  def optimize_prompt(user_prompt, captions_dir=None, captions_list=None):
9
  """Optimize a user prompt to follow the same format as training captions.
 
44
  ]
45
 
46
  response = client.chat.completions.create(
47
+ model=MODEL,
48
  messages=messages
49
  )
50