Ravindu9904 commited on
Commit
659af11
·
verified ·
1 Parent(s): 1b8348c

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +15 -250
config.json CHANGED
@@ -1,254 +1,19 @@
1
  {
2
- "_name_or_path": "/data_out/models/checkpoints/vn_balanced_4node_8b_2e_oct_22_2024",
3
  "architectures": [
4
- "LlavaLlamaModel"
5
  ],
6
- "drop_path_rate": 0.0,
7
- "fps": 0.0,
8
- "hidden_size": 4096,
9
- "image_aspect_ratio": "resize",
10
- "interpolate_mode": "linear",
11
- "llm_cfg": {
12
- "_name_or_path": "/data_out/models/checkpoints/vn_balanced_4node_8b_2e_oct_22_2024/llm",
13
- "add_cross_attention": false,
14
- "architectures": [
15
- "LlamaForCausalLM"
16
- ],
17
- "attention_bias": false,
18
- "attention_dropout": 0.0,
19
- "bad_words_ids": null,
20
- "begin_suppress_tokens": null,
21
- "bos_token_id": 128000,
22
- "chunk_size_feed_forward": 0,
23
- "cross_attention_hidden_size": null,
24
- "decoder_start_token_id": null,
25
- "diversity_penalty": 0.0,
26
- "do_sample": false,
27
- "early_stopping": false,
28
- "encoder_no_repeat_ngram_size": 0,
29
- "eos_token_id": 128001,
30
- "exponential_decay_length_penalty": null,
31
- "finetuning_task": null,
32
- "forced_bos_token_id": null,
33
- "forced_eos_token_id": null,
34
- "hidden_act": "silu",
35
- "hidden_size": 4096,
36
- "id2label": {
37
- "0": "LABEL_0",
38
- "1": "LABEL_1"
39
- },
40
- "initializer_range": 0.02,
41
- "intermediate_size": 14336,
42
- "is_decoder": false,
43
- "is_encoder_decoder": false,
44
- "label2id": {
45
- "LABEL_0": 0,
46
- "LABEL_1": 1
47
- },
48
- "length_penalty": 1.0,
49
- "max_length": 20,
50
- "max_position_embeddings": 8192,
51
- "min_length": 0,
52
- "model_max_length": 4096,
53
- "model_type": "llama",
54
- "no_repeat_ngram_size": 0,
55
- "num_attention_heads": 32,
56
- "num_beam_groups": 1,
57
- "num_beams": 1,
58
- "num_hidden_layers": 32,
59
- "num_key_value_heads": 8,
60
- "num_return_sequences": 1,
61
- "output_attentions": false,
62
- "output_hidden_states": false,
63
- "output_scores": false,
64
- "pad_token_id": null,
65
- "prefix": null,
66
- "pretraining_tp": 1,
67
- "problem_type": null,
68
- "pruned_heads": {},
69
- "remove_invalid_values": false,
70
- "repetition_penalty": 1.0,
71
- "return_dict": true,
72
- "return_dict_in_generate": false,
73
- "rms_norm_eps": 1e-05,
74
- "rope_scaling": null,
75
- "rope_theta": 500000.0,
76
- "sep_token_id": null,
77
- "suppress_tokens": null,
78
- "task_specific_params": null,
79
- "temperature": 1.0,
80
- "tf_legacy_loss": false,
81
- "tie_encoder_decoder": false,
82
- "tie_word_embeddings": false,
83
- "tokenizer_class": null,
84
- "tokenizer_model_max_length": 4096,
85
- "tokenizer_padding_side": "right",
86
- "top_k": 50,
87
- "top_p": 1.0,
88
- "torch_dtype": "bfloat16",
89
- "torchscript": false,
90
- "typical_p": 1.0,
91
- "use_bfloat16": false,
92
- "use_cache": true,
93
- "vocab_size": 128257
94
- },
95
- "mm_hidden_size": 1152,
96
- "mm_projector_cfg": {
97
- "_name_or_path": "/data_out/models/checkpoints/vn_balanced_4node_8b_2e_oct_22_2024/mm_projector",
98
- "add_cross_attention": false,
99
- "architectures": [
100
- "MultimodalProjector"
101
- ],
102
- "bad_words_ids": null,
103
- "begin_suppress_tokens": null,
104
- "bos_token_id": null,
105
- "chunk_size_feed_forward": 0,
106
- "cross_attention_hidden_size": null,
107
- "decoder_start_token_id": null,
108
- "diversity_penalty": 0.0,
109
- "do_sample": false,
110
- "early_stopping": false,
111
- "encoder_no_repeat_ngram_size": 0,
112
- "eos_token_id": null,
113
- "exponential_decay_length_penalty": null,
114
- "finetuning_task": null,
115
- "forced_bos_token_id": null,
116
- "forced_eos_token_id": null,
117
- "id2label": {
118
- "0": "LABEL_0",
119
- "1": "LABEL_1"
120
- },
121
- "is_decoder": false,
122
- "is_encoder_decoder": false,
123
- "label2id": {
124
- "LABEL_0": 0,
125
- "LABEL_1": 1
126
- },
127
- "length_penalty": 1.0,
128
- "max_length": 20,
129
- "min_length": 0,
130
- "mm_projector_type": "mlp_downsample",
131
- "model_type": "v2l_projector",
132
- "no_repeat_ngram_size": 0,
133
- "num_beam_groups": 1,
134
- "num_beams": 1,
135
- "num_return_sequences": 1,
136
- "output_attentions": false,
137
- "output_hidden_states": false,
138
- "output_scores": false,
139
- "pad_token_id": null,
140
- "prefix": null,
141
- "problem_type": null,
142
- "pruned_heads": {},
143
- "remove_invalid_values": false,
144
- "repetition_penalty": 1.0,
145
- "return_dict": true,
146
- "return_dict_in_generate": false,
147
- "sep_token_id": null,
148
- "suppress_tokens": null,
149
- "task_specific_params": null,
150
- "temperature": 1.0,
151
- "tf_legacy_loss": false,
152
- "tie_encoder_decoder": false,
153
- "tie_word_embeddings": true,
154
- "tokenizer_class": null,
155
- "top_k": 50,
156
- "top_p": 1.0,
157
- "torch_dtype": "bfloat16",
158
- "torchscript": false,
159
- "typical_p": 1.0,
160
- "use_bfloat16": false
161
- },
162
- "mm_projector_lr": null,
163
- "mm_use_im_patch_token": false,
164
- "mm_use_im_start_end": false,
165
- "mm_vision_select_feature": "cls_patch",
166
- "mm_vision_select_layer": -2,
167
- "model_dtype": "torch.bfloat16",
168
- "model_type": "llava_llama",
169
- "num_video_frames": 8,
170
- "resume_path": "/data_out/models/checkpoints/vn_balanced_4node_8b_2e_oct_22_2024",
171
- "s2": false,
172
- "s2_max_split_size": 336,
173
- "s2_scales": "336,672,1008",
174
- "transformers_version": "4.37.2",
175
- "tune_language_model": true,
176
- "tune_mm_projector": true,
177
- "tune_vision_tower": true,
178
- "vision_resolution": -1,
179
- "vision_tower_cfg": {
180
- "_name_or_path": "/data_out/models/checkpoints/vn_balanced_4node_8b_2e_oct_22_2024/vision_tower",
181
- "add_cross_attention": false,
182
- "architectures": [
183
- "SiglipVisionModel"
184
- ],
185
- "attention_dropout": 0.0,
186
- "bad_words_ids": null,
187
- "begin_suppress_tokens": null,
188
- "bos_token_id": null,
189
- "chunk_size_feed_forward": 0,
190
- "cross_attention_hidden_size": null,
191
- "decoder_start_token_id": null,
192
- "diversity_penalty": 0.0,
193
- "do_sample": false,
194
- "early_stopping": false,
195
- "encoder_no_repeat_ngram_size": 0,
196
- "eos_token_id": null,
197
- "exponential_decay_length_penalty": null,
198
- "finetuning_task": null,
199
- "forced_bos_token_id": null,
200
- "forced_eos_token_id": null,
201
- "hidden_act": "gelu_pytorch_tanh",
202
- "hidden_size": 1152,
203
- "id2label": {
204
- "0": "LABEL_0",
205
- "1": "LABEL_1"
206
- },
207
- "image_size": 384,
208
- "intermediate_size": 4304,
209
- "is_decoder": false,
210
- "is_encoder_decoder": false,
211
- "label2id": {
212
- "LABEL_0": 0,
213
- "LABEL_1": 1
214
- },
215
- "layer_norm_eps": 1e-06,
216
- "length_penalty": 1.0,
217
- "max_length": 20,
218
- "min_length": 0,
219
- "model_type": "siglip_vision_model",
220
- "no_repeat_ngram_size": 0,
221
- "num_attention_heads": 16,
222
- "num_beam_groups": 1,
223
- "num_beams": 1,
224
- "num_channels": 3,
225
- "num_hidden_layers": 27,
226
- "num_return_sequences": 1,
227
- "output_attentions": false,
228
- "output_hidden_states": false,
229
- "output_scores": false,
230
- "pad_token_id": null,
231
- "patch_size": 14,
232
- "prefix": null,
233
- "problem_type": null,
234
- "pruned_heads": {},
235
- "remove_invalid_values": false,
236
- "repetition_penalty": 1.0,
237
- "return_dict": true,
238
- "return_dict_in_generate": false,
239
- "sep_token_id": null,
240
- "suppress_tokens": null,
241
- "task_specific_params": null,
242
- "temperature": 1.0,
243
- "tf_legacy_loss": false,
244
- "tie_encoder_decoder": false,
245
- "tie_word_embeddings": true,
246
- "tokenizer_class": null,
247
- "top_k": 50,
248
- "top_p": 1.0,
249
- "torch_dtype": "bfloat16",
250
- "torchscript": false,
251
- "typical_p": 1.0,
252
- "use_bfloat16": false
253
- }
254
  }
 
1
  {
2
+ "_name_or_path": "/data_out/models/checkpoints/vn_balanced_4node_8b_2e_oct_22_2024/vision_tower",
3
  "architectures": [
4
+ "SiglipVisionModel"
5
  ],
6
+ "attention_dropout": 0.0,
7
+ "hidden_act": "gelu_pytorch_tanh",
8
+ "hidden_size": 1152,
9
+ "image_size": 384,
10
+ "intermediate_size": 4304,
11
+ "layer_norm_eps": 1e-06,
12
+ "model_type": "siglip_vision_model",
13
+ "num_attention_heads": 16,
14
+ "num_channels": 3,
15
+ "num_hidden_layers": 27,
16
+ "patch_size": 14,
17
+ "torch_dtype": "bfloat16",
18
+ "transformers_version": "4.37.2"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
19
  }