Visual Document Retrieval
PEFT
Safetensors
ColPali
vidore
multimodal_embedding
multilingual_embedding
Text-to-Visual Document (T→VD) retrieval
colnomic-embed-multimodal-7b / training_config.yml
zpn's picture
Add files using upload-large-folder tool
59d397b verified
config:
(): colpali_engine.trainer.colmodel_training.ColModelTrainingConfig
output_dir: !path ../../../models/colqwen2_5_train_single_source_3ep_r32_512bs_vdr_7b_1e4
processor:
(): colpali_engine.utils.transformers_wrappers.AllPurposeWrapper
class_to_instanciate: !ext colpali_engine.models.ColQwen2_5_Processor
pretrained_model_name_or_path: "Qwen/Qwen2.5-VL-7B-Instruct" # "./models/paligemma-3b-mix-448"
max_num_visual_tokens: 1280
# num_image_tokens: 2048
# max_length: 50
model:
(): colpali_engine.utils.transformers_wrappers.AllPurposeWrapper
class_to_instanciate: !ext colpali_engine.models.ColQwen2_5
pretrained_model_name_or_path: "Qwen/Qwen2.5-VL-7B-Instruct"
torch_dtype: !ext torch.bfloat16
use_cache: false
attn_implementation: "flash_attention_2"
# device_map: "auto"
# quantization_config:
# (): transformers.BitsAndBytesConfig
# load_in_4bit: true
# bnb_4bit_quant_type: "nf4"
# bnb_4bit_compute_dtype: "bfloat16"
# bnb_4bit_use_double_quant: true
dataset_loading_func: !ext colpali_engine.utils.dataset_transformation.load_train_set_colpali_vdr
eval_dataset_loader: !import ../data/test_data.yaml
# max_length: 50
run_eval: true
loss_func:
(): colpali_engine.loss.late_interaction_losses.ColbertPairwiseCELoss
tr_args:
(): transformers.training_args.TrainingArguments
output_dir: null
overwrite_output_dir: true
num_train_epochs: 3
per_device_train_batch_size: 512
ddp_find_unused_parameters: false
gradient_checkpointing: true
gradient_checkpointing_kwargs: { "use_reentrant": false }
# gradient_checkpointing: true
# 6 x 8 gpus = 48 batch size
# gradient_accumulation_steps: 4
per_device_eval_batch_size: 16
eval_strategy: "no"
dataloader_num_workers: 0
# bf16: true
accelerator_config:
split_batches: true
save_steps: 500
logging_steps: 10
eval_steps: 100
warmup_ratio: 0.01
learning_rate: 1e-4
save_total_limit: 1
# resume_from_checkpoint: true
optim: "paged_adamw_8bit"
# wandb logging
# wandb_project: "mllm"
run_name: "colqwen2_5_train_single_source_3ep_r32_512bs_vdr_7b_1e4"
report_to: "wandb"
peft_config:
(): peft.LoraConfig
r: 32
lora_alpha: 32
lora_dropout: 0.1
init_lora_weights: "gaussian"
bias: "none"
task_type: "FEATURE_EXTRACTION"
target_modules: '(.*(model).*(down_proj|gate_proj|up_proj|k_proj|q_proj|v_proj|o_proj).*$|.*(custom_text_proj).*$)'
# target_modules: '(.*(language_model).*(down_proj|gate_proj|up_proj|k_proj|q_proj|v_proj|o_proj).*$|.*(custom_text_proj).*$)'