D-eon commited on
Commit
07ba71f
·
verified ·
1 Parent(s): 0a79e29

Upload folder using huggingface_hub

Browse files
model/MolmoE-1B-0924/config.yaml ADDED
@@ -0,0 +1,805 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ run_name: multitask_train
2
+ seed: 6198
3
+ epoch: null
4
+ dry_run: false
5
+ model:
6
+ d_model: 2048
7
+ n_heads: 16
8
+ n_kv_heads: null
9
+ qkv_bias: false
10
+ clip_qkv: null
11
+ n_layers: 16
12
+ mlp_ratio: 1
13
+ mlp_hidden_size: null
14
+ activation_type: swiglu
15
+ block_type: moe
16
+ block_group_size: 1
17
+ alibi: false
18
+ alibi_bias_max: 8.0
19
+ rope: true
20
+ rope_full_precision: true
21
+ rope_theta: 10000.0
22
+ rope_impl: llama
23
+ vision_backbone:
24
+ image_model_type: openai
25
+ image_default_input_size:
26
+ - 336
27
+ - 336
28
+ image_patch_size: 14
29
+ image_pos_patch_size: 14
30
+ image_emb_dim: 1024
31
+ image_num_heads: 16
32
+ image_num_key_value_heads: 16
33
+ image_num_layers: 23
34
+ image_head_dim: 64
35
+ image_mlp_dim: 4096
36
+ image_mlp_activations: quick_gelu
37
+ image_dropout_rate: 0.0
38
+ image_num_pos: 577
39
+ image_norm_eps: 1.0e-05
40
+ attention_dropout: 0.0
41
+ residual_dropout: 0.0
42
+ initializer_range: 0.02
43
+ fsdp_wrap: false
44
+ resize_mode: default
45
+ vit_load_path: /mnt/data/zhangkaidong/lizihang/weka/oe-training-default/mm-olmo/pretrained_image_encoders/vit-l-14-336.pt
46
+ llm_load_path: /mnt/data/zhangkaidong/lizihang/weka/oe-training-default/mm-olmo/olmoe/model.safetensors
47
+ low_cpu_fsdp: true
48
+ attention_type: sdpa
49
+ float32_attention: true
50
+ attention_dropout: 0.0
51
+ response_attention_dropout: 0.0
52
+ multi_query_attention: null
53
+ attention_layer_norm: true
54
+ residual_dropout: 0.1
55
+ response_residual_dropout: 0.0
56
+ embedding_dropout: 0.0
57
+ layer_norm_type: rms
58
+ layer_norm_with_affine: true
59
+ layer_norm_eps: 1.0e-05
60
+ attention_layer_norm_with_affine: true
61
+ max_sequence_length: 4096
62
+ max_position_embeddings: 32768
63
+ include_bias: false
64
+ bias_for_layer_norm: false
65
+ scale_logits: false
66
+ vocab_size: 50280
67
+ embedding_size: 50304
68
+ additional_vocab_size: 128
69
+ new_embedding_init_range: 0.02
70
+ weight_tying: false
71
+ pad_token_id: 1
72
+ init_device: meta
73
+ init_fn: normal
74
+ init_std: 0.02
75
+ init_cutoff_factor: 3.0
76
+ norm_after: false
77
+ precision: amp_bf16
78
+ moe_num_experts: 64
79
+ moe_top_k: 8
80
+ moe_mlp_impl: sparse
81
+ moe_log_expert_assignment: false
82
+ moe_shared_expert: false
83
+ moe_lbl_in_fp32: false
84
+ moe_interleave: false
85
+ moe_loss_weight: 0.0
86
+ moe_zloss_weight: 0.0
87
+ moe_dropless: true
88
+ moe_capacity_factor: 1.25
89
+ max_crops: 12
90
+ crop_mode: overlap-and-resize-c2
91
+ do_random_scale: false
92
+ use_col_tokens: true
93
+ prompt_type: uber_model
94
+ system_prompt_kind: demo_or_style
95
+ message_formatting: role
96
+ always_start_with_space: true
97
+ prompt_override: null
98
+ default_inference_len: 65
99
+ overlap_margins:
100
+ - 4
101
+ - 4
102
+ image_padding_embed: pad_and_partial_pad
103
+ vit_layers:
104
+ - -2
105
+ - -9
106
+ image_pooling_h: 2
107
+ image_pooling_w: 2
108
+ image_pooling_2d: attention_meanq
109
+ image_projector: mlp
110
+ image_feature_dropout: 0.0
111
+ use_cls_feature: false
112
+ fix_image_input_idx: 2
113
+ unconditioned: false
114
+ pad_to: null
115
+ initializer_range: 0.02
116
+ pad_tokenizer: false
117
+ normalize_input_embeds: false
118
+ use_position_ids: true
119
+ query_pre_attn_scalar: 224
120
+ attn_logit_softcapping: null
121
+ final_logit_softcapping: null
122
+ head_dim: null
123
+ tokenizer:
124
+ identifier: mm:hf-allenai/OLMoE-1B-7B-0924
125
+ truncate_direction: right
126
+ tokenizer_adds_space: false
127
+ tokenizer_dir: /mnt/data/zhangkaidong/lizihang/weka/oe-training-default/mm-olmo/tokenizer
128
+ olmo_bos_token_id: 50279
129
+ olmo_eos_token_id: 50279
130
+ loss_token_weighting: root_subsegments
131
+ gin_bindings: null
132
+ ft_llm: true
133
+ ft_vit: true
134
+ ft_connector: true
135
+ ft_embedding: lm_head
136
+ optimizer:
137
+ name: adamw
138
+ learning_rate: 0.0001
139
+ weight_decay: 0.01
140
+ betas:
141
+ - 0.9
142
+ - 0.95
143
+ eps: 1.0e-05
144
+ connector_learning_rate: 5.0e-06
145
+ vit_learning_rate: 5.0e-06
146
+ llm_learning_rate: 2.0e-05
147
+ connector_weight_decay: 0.0
148
+ vit_weight_decay: 0.0
149
+ llm_weight_decay: 0.0
150
+ connector_betas:
151
+ - 0.9
152
+ - 0.95
153
+ vit_betas:
154
+ - 0.9
155
+ - 0.95
156
+ llm_betas:
157
+ - 0.9
158
+ - 0.95
159
+ connector_eps: 1.0e-06
160
+ vit_eps: 1.0e-06
161
+ llm_eps: 1.0e-06
162
+ no_decay_norm_and_bias: null
163
+ decay_norm_and_bias: false
164
+ decay_embeddings: false
165
+ metrics_log_interval: 20
166
+ scheduler:
167
+ name: multimodal
168
+ units: steps
169
+ t_warmup: 100
170
+ t_max: null
171
+ alpha_f: 0.1
172
+ connector_t_warmup: 200
173
+ vit_t_warmup: 200
174
+ llm_t_warmup: 200
175
+ grad_clip_warmup_steps: null
176
+ grad_clip_warmup_factor: null
177
+ warmup_min_lr: 0.0
178
+ data:
179
+ multi_modal: true
180
+ mixture_or_task_name: null
181
+ paths: null
182
+ datasets: null
183
+ label_mask_paths: null
184
+ pad_direction: right
185
+ generate_attention_mask: false
186
+ num_workers: 0
187
+ drop_last: true
188
+ pin_memory: false
189
+ prefetch_factor: null
190
+ persistent_workers: false
191
+ timeout: 0
192
+ seed: null
193
+ instance_filter: null
194
+ mixture:
195
+ user_qa: 3.772204620047811
196
+ cockatoo_712k_sept6: 3.161736404536387
197
+ synthetic_qa_v3_as_user_qa: 5.70612401246295
198
+ point_qa: 2.359934962952852
199
+ coco_2014_vqa_multi: 3.067124765028162
200
+ text_vqa: 1.982948627824974
201
+ okvqa: 1.011810438733119
202
+ chart_qa_weighted: 1.793272988041348
203
+ doc_qa: 2.1176584276768065
204
+ info_qa: 1.6495950748744952
205
+ ai2_diagram_v2_mix_transparent: 1.307415575130731
206
+ a_okvqa_mc: 1.3921930323623322
207
+ a_okvqa_da: 1.3921930323623322
208
+ android_control: 2.91381420538637
209
+ science_qa_img: 0.8405938747465043
210
+ tabwmp_da: 1.618754903035765
211
+ st_qa: 1.6871928539865735
212
+ tally_qa: 3.8873680368227395
213
+ clocks: 5.330044888030191
214
+ scifi_charts_qa: 5.152496802868815
215
+ scifi_table_qa: 3.251519607143135
216
+ scifi_document_qa: 4.024928615904329
217
+ scifi_diagram_qa: 1.9394919410872649
218
+ dv_qa: 1.0660089776060382
219
+ figure_qa: 1.0660089776060382
220
+ plot_qa: 1.507564353741936
221
+ pointing: 8.963869696866821
222
+ pointing_high_freq: 5.438911590336815
223
+ fast_flickr_count_qa_pointing: 3.0972187127963617
224
+ point_count: 8.963869696866821
225
+ point_count_high_freq: 5.438911590336815
226
+ fast_flickr_count_qa_point_count: 3.0972187127963617
227
+ sequence_length: 2304
228
+ shuffle: true
229
+ for_inference: false
230
+ split: train
231
+ use_memory_cache: false
232
+ num_epochs: null
233
+ shuffle_buffer_size: 200
234
+ per_node_data_loader: null
235
+ restore_dataloader: true
236
+ fast_forward_batches: null
237
+ evaluators: []
238
+ eval_interval: 2000
239
+ inf_eval_interval: 2000
240
+ inf_evaluators:
241
+ - label: chart_qa
242
+ type: multi_modal
243
+ data:
244
+ multi_modal: true
245
+ mixture_or_task_name: chart_qa
246
+ paths: null
247
+ datasets: null
248
+ label_mask_paths: null
249
+ pad_direction: right
250
+ generate_attention_mask: false
251
+ num_workers: 0
252
+ drop_last: false
253
+ pin_memory: false
254
+ prefetch_factor: null
255
+ persistent_workers: false
256
+ timeout: 0
257
+ seed: null
258
+ instance_filter: null
259
+ mixture: null
260
+ sequence_length: 1792
261
+ shuffle: false
262
+ for_inference: true
263
+ split: validation
264
+ use_memory_cache: false
265
+ num_epochs: 1
266
+ shuffle_buffer_size: 1000
267
+ per_node_data_loader: null
268
+ device_eval_batch_size: null
269
+ subset_num_batches: -1
270
+ max_new_tokens: 12
271
+ mm_evaluator:
272
+ cider: ''
273
+ num_wandb_examples: 32
274
+ ptb_tokenizer: false
275
+ save_html: 0
276
+ save_predictions: null
277
+ named_entity_eval: false
278
+ save_tokens: false
279
+ vqa_eval: scifi_relaxed_correctness,relaxed_correctness,em
280
+ n_to_log: 0
281
+ mme_eval: false
282
+ mmbench_eval: false
283
+ sugar_crepe_eval: false
284
+ pointing_eval: false
285
+ count_eval: false
286
+ point_count_eval: false
287
+ refexp_eval: false
288
+ pointing: false
289
+ android_eval: false
290
+ clock_eval: false
291
+ gpt_eval: null
292
+ save_dir: null
293
+ save_to_checkpoint_dir: false
294
+ eval_name: null
295
+ skip_if_metrics_cached: true
296
+ - label: info_qa
297
+ type: multi_modal
298
+ data:
299
+ multi_modal: true
300
+ mixture_or_task_name: info_qa
301
+ paths: null
302
+ datasets: null
303
+ label_mask_paths: null
304
+ pad_direction: right
305
+ generate_attention_mask: false
306
+ num_workers: 0
307
+ drop_last: true
308
+ pin_memory: false
309
+ prefetch_factor: null
310
+ persistent_workers: false
311
+ timeout: 0
312
+ seed: null
313
+ instance_filter: null
314
+ mixture: null
315
+ sequence_length: 1792
316
+ shuffle: false
317
+ for_inference: true
318
+ split: validation
319
+ use_memory_cache: false
320
+ num_epochs: null
321
+ shuffle_buffer_size: 200
322
+ per_node_data_loader: null
323
+ device_eval_batch_size: null
324
+ subset_num_batches: 8
325
+ max_new_tokens: 12
326
+ mm_evaluator:
327
+ cider: ''
328
+ num_wandb_examples: 32
329
+ ptb_tokenizer: false
330
+ save_html: 0
331
+ save_predictions: null
332
+ named_entity_eval: false
333
+ save_tokens: false
334
+ vqa_eval: ansl,em
335
+ n_to_log: 0
336
+ mme_eval: false
337
+ mmbench_eval: false
338
+ sugar_crepe_eval: false
339
+ pointing_eval: false
340
+ count_eval: false
341
+ point_count_eval: false
342
+ refexp_eval: false
343
+ pointing: false
344
+ android_eval: false
345
+ clock_eval: false
346
+ gpt_eval: null
347
+ save_dir: null
348
+ save_to_checkpoint_dir: false
349
+ eval_name: null
350
+ skip_if_metrics_cached: true
351
+ - label: doc_qa
352
+ type: multi_modal
353
+ data:
354
+ multi_modal: true
355
+ mixture_or_task_name: doc_qa
356
+ paths: null
357
+ datasets: null
358
+ label_mask_paths: null
359
+ pad_direction: right
360
+ generate_attention_mask: false
361
+ num_workers: 0
362
+ drop_last: true
363
+ pin_memory: false
364
+ prefetch_factor: null
365
+ persistent_workers: false
366
+ timeout: 0
367
+ seed: null
368
+ instance_filter: null
369
+ mixture: null
370
+ sequence_length: 1792
371
+ shuffle: false
372
+ for_inference: true
373
+ split: validation
374
+ use_memory_cache: false
375
+ num_epochs: null
376
+ shuffle_buffer_size: 200
377
+ per_node_data_loader: null
378
+ device_eval_batch_size: null
379
+ subset_num_batches: 8
380
+ max_new_tokens: 12
381
+ mm_evaluator:
382
+ cider: ''
383
+ num_wandb_examples: 32
384
+ ptb_tokenizer: false
385
+ save_html: 0
386
+ save_predictions: null
387
+ named_entity_eval: false
388
+ save_tokens: false
389
+ vqa_eval: ansl,em
390
+ n_to_log: 0
391
+ mme_eval: false
392
+ mmbench_eval: false
393
+ sugar_crepe_eval: false
394
+ pointing_eval: false
395
+ count_eval: false
396
+ point_count_eval: false
397
+ refexp_eval: false
398
+ pointing: false
399
+ android_eval: false
400
+ clock_eval: false
401
+ gpt_eval: null
402
+ save_dir: null
403
+ save_to_checkpoint_dir: false
404
+ eval_name: null
405
+ skip_if_metrics_cached: true
406
+ - label: a_okvqa_da
407
+ type: multi_modal
408
+ data:
409
+ multi_modal: true
410
+ mixture_or_task_name: a_okvqa_da
411
+ paths: null
412
+ datasets: null
413
+ label_mask_paths: null
414
+ pad_direction: right
415
+ generate_attention_mask: false
416
+ num_workers: 0
417
+ drop_last: false
418
+ pin_memory: false
419
+ prefetch_factor: null
420
+ persistent_workers: false
421
+ timeout: 0
422
+ seed: null
423
+ instance_filter: null
424
+ mixture: null
425
+ sequence_length: 1792
426
+ shuffle: false
427
+ for_inference: true
428
+ split: validation
429
+ use_memory_cache: false
430
+ num_epochs: 1
431
+ shuffle_buffer_size: 1000
432
+ per_node_data_loader: null
433
+ device_eval_batch_size: null
434
+ subset_num_batches: -1
435
+ max_new_tokens: 12
436
+ mm_evaluator:
437
+ cider: ''
438
+ num_wandb_examples: 32
439
+ ptb_tokenizer: false
440
+ save_html: 0
441
+ save_predictions: null
442
+ named_entity_eval: false
443
+ save_tokens: false
444
+ vqa_eval: a_okvqa_score
445
+ n_to_log: 0
446
+ mme_eval: false
447
+ mmbench_eval: false
448
+ sugar_crepe_eval: false
449
+ pointing_eval: false
450
+ count_eval: false
451
+ point_count_eval: false
452
+ refexp_eval: false
453
+ pointing: false
454
+ android_eval: false
455
+ clock_eval: false
456
+ gpt_eval: null
457
+ save_dir: null
458
+ save_to_checkpoint_dir: false
459
+ eval_name: null
460
+ skip_if_metrics_cached: true
461
+ - label: ai2_diagram
462
+ type: multi_modal
463
+ data:
464
+ multi_modal: true
465
+ mixture_or_task_name: ai2_diagram_v2_mix_transparent
466
+ paths: null
467
+ datasets: null
468
+ label_mask_paths: null
469
+ pad_direction: right
470
+ generate_attention_mask: false
471
+ num_workers: 0
472
+ drop_last: false
473
+ pin_memory: false
474
+ prefetch_factor: null
475
+ persistent_workers: false
476
+ timeout: 0
477
+ seed: null
478
+ instance_filter: null
479
+ mixture: null
480
+ sequence_length: 1792
481
+ shuffle: false
482
+ for_inference: true
483
+ split: validation
484
+ use_memory_cache: false
485
+ num_epochs: 1
486
+ shuffle_buffer_size: 1000
487
+ per_node_data_loader: null
488
+ device_eval_batch_size: null
489
+ subset_num_batches: -1
490
+ max_new_tokens: 12
491
+ mm_evaluator:
492
+ cider: ''
493
+ num_wandb_examples: 32
494
+ ptb_tokenizer: false
495
+ save_html: 0
496
+ save_predictions: null
497
+ named_entity_eval: false
498
+ save_tokens: false
499
+ vqa_eval: mc_ai2d_opaque,mc_ai2d_transparent
500
+ n_to_log: 0
501
+ mme_eval: false
502
+ mmbench_eval: false
503
+ sugar_crepe_eval: false
504
+ pointing_eval: false
505
+ count_eval: false
506
+ point_count_eval: false
507
+ refexp_eval: false
508
+ pointing: false
509
+ android_eval: false
510
+ clock_eval: false
511
+ gpt_eval: null
512
+ save_dir: null
513
+ save_to_checkpoint_dir: false
514
+ eval_name: null
515
+ skip_if_metrics_cached: true
516
+ - label: clocks
517
+ type: multi_modal
518
+ data:
519
+ multi_modal: true
520
+ mixture_or_task_name: clocks
521
+ paths: null
522
+ datasets: null
523
+ label_mask_paths: null
524
+ pad_direction: right
525
+ generate_attention_mask: false
526
+ num_workers: 0
527
+ drop_last: true
528
+ pin_memory: false
529
+ prefetch_factor: null
530
+ persistent_workers: false
531
+ timeout: 0
532
+ seed: null
533
+ instance_filter: null
534
+ mixture: null
535
+ sequence_length: 1792
536
+ shuffle: false
537
+ for_inference: true
538
+ split: validation
539
+ use_memory_cache: false
540
+ num_epochs: null
541
+ shuffle_buffer_size: 200
542
+ per_node_data_loader: null
543
+ device_eval_batch_size: null
544
+ subset_num_batches: 8
545
+ max_new_tokens: 12
546
+ mm_evaluator:
547
+ cider: ''
548
+ num_wandb_examples: 32
549
+ ptb_tokenizer: false
550
+ save_html: 0
551
+ save_predictions: null
552
+ named_entity_eval: false
553
+ save_tokens: false
554
+ vqa_eval: ''
555
+ n_to_log: 0
556
+ mme_eval: false
557
+ mmbench_eval: false
558
+ sugar_crepe_eval: false
559
+ pointing_eval: false
560
+ count_eval: false
561
+ point_count_eval: false
562
+ refexp_eval: false
563
+ pointing: false
564
+ android_eval: false
565
+ clock_eval: true
566
+ gpt_eval: null
567
+ save_dir: null
568
+ save_to_checkpoint_dir: false
569
+ eval_name: null
570
+ skip_if_metrics_cached: true
571
+ - label: android_control_ll
572
+ type: multi_modal
573
+ data:
574
+ multi_modal: true
575
+ mixture_or_task_name: android_control_ll
576
+ paths: null
577
+ datasets: null
578
+ label_mask_paths: null
579
+ pad_direction: right
580
+ generate_attention_mask: false
581
+ num_workers: 0
582
+ drop_last: false
583
+ pin_memory: false
584
+ prefetch_factor: null
585
+ persistent_workers: false
586
+ timeout: 0
587
+ seed: null
588
+ instance_filter: null
589
+ mixture: null
590
+ sequence_length: 1792
591
+ shuffle: false
592
+ for_inference: true
593
+ split: validation
594
+ use_memory_cache: false
595
+ num_epochs: 1
596
+ shuffle_buffer_size: 1000
597
+ per_node_data_loader: null
598
+ device_eval_batch_size: null
599
+ subset_num_batches: -1
600
+ max_new_tokens: 12
601
+ mm_evaluator:
602
+ cider: ''
603
+ num_wandb_examples: 32
604
+ ptb_tokenizer: false
605
+ save_html: 0
606
+ save_predictions: null
607
+ named_entity_eval: false
608
+ save_tokens: false
609
+ vqa_eval: ''
610
+ n_to_log: 0
611
+ mme_eval: false
612
+ mmbench_eval: false
613
+ sugar_crepe_eval: false
614
+ pointing_eval: false
615
+ count_eval: false
616
+ point_count_eval: false
617
+ refexp_eval: false
618
+ pointing: false
619
+ android_eval: true
620
+ clock_eval: false
621
+ gpt_eval: null
622
+ save_dir: null
623
+ save_to_checkpoint_dir: false
624
+ eval_name: null
625
+ skip_if_metrics_cached: true
626
+ - label: pointing_test
627
+ type: multi_modal
628
+ data:
629
+ multi_modal: true
630
+ mixture_or_task_name: pointing_test
631
+ paths: null
632
+ datasets: null
633
+ label_mask_paths: null
634
+ pad_direction: right
635
+ generate_attention_mask: false
636
+ num_workers: 0
637
+ drop_last: false
638
+ pin_memory: false
639
+ prefetch_factor: null
640
+ persistent_workers: false
641
+ timeout: 0
642
+ seed: null
643
+ instance_filter: null
644
+ mixture: null
645
+ sequence_length: 1792
646
+ shuffle: false
647
+ for_inference: true
648
+ split: test
649
+ use_memory_cache: false
650
+ num_epochs: 1
651
+ shuffle_buffer_size: 1000
652
+ per_node_data_loader: null
653
+ device_eval_batch_size: null
654
+ subset_num_batches: -1
655
+ max_new_tokens: 384
656
+ mm_evaluator:
657
+ cider: ''
658
+ num_wandb_examples: 32
659
+ ptb_tokenizer: false
660
+ save_html: 0
661
+ save_predictions: null
662
+ named_entity_eval: false
663
+ save_tokens: false
664
+ vqa_eval: ''
665
+ n_to_log: 0
666
+ mme_eval: false
667
+ mmbench_eval: false
668
+ sugar_crepe_eval: false
669
+ pointing_eval: false
670
+ count_eval: false
671
+ point_count_eval: false
672
+ refexp_eval: false
673
+ pointing: true
674
+ android_eval: false
675
+ clock_eval: false
676
+ gpt_eval: null
677
+ save_dir: null
678
+ save_to_checkpoint_dir: false
679
+ eval_name: null
680
+ skip_if_metrics_cached: true
681
+ - label: countbench_qa
682
+ type: multi_modal
683
+ data:
684
+ multi_modal: true
685
+ mixture_or_task_name: countbench_qa
686
+ paths: null
687
+ datasets: null
688
+ label_mask_paths: null
689
+ pad_direction: right
690
+ generate_attention_mask: false
691
+ num_workers: 0
692
+ drop_last: false
693
+ pin_memory: false
694
+ prefetch_factor: null
695
+ persistent_workers: false
696
+ timeout: 0
697
+ seed: null
698
+ instance_filter: null
699
+ mixture: null
700
+ sequence_length: 1792
701
+ shuffle: false
702
+ for_inference: true
703
+ split: huggingface
704
+ use_memory_cache: false
705
+ num_epochs: 1
706
+ shuffle_buffer_size: 1000
707
+ per_node_data_loader: null
708
+ device_eval_batch_size: null
709
+ subset_num_batches: -1
710
+ max_new_tokens: 384
711
+ mm_evaluator:
712
+ cider: ''
713
+ num_wandb_examples: 32
714
+ ptb_tokenizer: false
715
+ save_html: 0
716
+ save_predictions: null
717
+ named_entity_eval: false
718
+ save_tokens: false
719
+ vqa_eval: ''
720
+ n_to_log: 0
721
+ mme_eval: false
722
+ mmbench_eval: false
723
+ sugar_crepe_eval: false
724
+ pointing_eval: false
725
+ count_eval: false
726
+ point_count_eval: true
727
+ refexp_eval: false
728
+ pointing: false
729
+ android_eval: false
730
+ clock_eval: false
731
+ gpt_eval: null
732
+ save_dir: null
733
+ save_to_checkpoint_dir: false
734
+ eval_name: null
735
+ skip_if_metrics_cached: true
736
+ save_folder: /mnt/data/zhangkaidong/lizihang/weka/oe-training-default/chrisc/cockatoo/models/uber-model-v11/olmoe-5520-3.2-synthetic
737
+ remote_save_folder: null
738
+ canceled_check_interval: 50
739
+ save_interval: 4000
740
+ save_interval_unsharded: 30000
741
+ save_interval_ephemeral: null
742
+ save_num_checkpoints_to_keep: 1
743
+ save_num_unsharded_checkpoints_to_keep: -1
744
+ save_overwrite: true
745
+ force_save_unsharded: false
746
+ no_pre_train_checkpoint: true
747
+ initial_model_checkpoint: /mnt/data/zhangkaidong/lizihang/weka/oe-training-default/mm-olmoe/runs/mm-olmoe-22k-8gpus/step22300-unsharded
748
+ load_model_config: null
749
+ load_path: null
750
+ load_path_sharded_checkpointer: null
751
+ reset_optimizer_state: false
752
+ reset_trainer_state: false
753
+ save_dataloader_state: false
754
+ reset_dataloader_state: false
755
+ sharded_checkpointer: torch_legacy
756
+ new_style_checkpoints: null
757
+ max_duration: 30000
758
+ global_train_batch_size: 256
759
+ device_train_batch_size: 4
760
+ device_train_microbatch_size: 2
761
+ device_eval_batch_size: 4
762
+ eval_subset_num_batches: 8
763
+ eval_on_load: false
764
+ device_inf_eval_batch_size: 4
765
+ inf_eval_subset_num_batches: -1
766
+ device_train_grad_accum: 2
767
+ max_grad_norm: 1.0
768
+ batch_divisor: global_batch
769
+ max_grad_norm_ratio: null
770
+ precision: amp_bf16
771
+ wandb:
772
+ project: cockatoo
773
+ entity: prior-ai2
774
+ group: uber-model-v11
775
+ name: olmoe-5520-3.2-synthetic
776
+ tags:
777
+ - watching
778
+ log_artifacts: false
779
+ rank_zero_only: true
780
+ log_interval: 20
781
+ speed_monitor:
782
+ window_size: 20
783
+ gpu_flops_available: null
784
+ console_log_interval: 20
785
+ gen1_gc_interval: 1
786
+ compile: null
787
+ fsdp:
788
+ use_orig_params: true
789
+ sharding_strategy: FULL_SHARD
790
+ wrapping_strategy: by_block_and_size
791
+ precision: float
792
+ hybrid_sharding_num_model_replicas: null
793
+ softmax_auxiliary_loss: true
794
+ softmax_auxiliary_loss_scale: 0.0001
795
+ time_limit: null
796
+ extra_steps_after_cancel: 10
797
+ early_stopping_factor: null
798
+ save_data_indices: true
799
+ python_profiling: false
800
+ torch_profiling: false
801
+ stop_at: 30000
802
+ stop_after: null
803
+ activation_checkpointing: one_in_two
804
+ fused_loss: null
805
+ tfds_dir: /mnt/data/zhangkaidong/lizihang/weka/oe-training-default/mm-olmo/tensorflow_datasets
model/MolmoE-1B-0924/model.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:78da38d2d48d54ae6e35a3a7816d434c0467a5ae5bfa45e64b214686b837ad78
3
+ size 28887783270