| |=>> 01/16 [16:49:45] - mistral - INFO :: Starting Run: shuffle_local5_ar_AR_randinit_seed53... | |
| |=>> 01/16 [16:49:46] - mistral - INFO :: Setting Random Seed to 53! | |
| |=>> 01/16 [16:49:46] - mistral - INFO :: Building Tokenize and Initializing `gpt2-small` via AutoModel/AutoConfig... | |
| |=>> 01/16 [16:49:46] - mistral - INFO :: Using Configs For Model From: /scratch/ykyao/projects/multilingual-LM/mistral/conf/models/gpt2-small-AR.json ... | |
| |=>> 01/16 [16:49:46] - mistral.models.auto - INFO :: Building Hugging Face GPT2Config from provided configs: {'activation_function': 'gelu_new', 'architectures': ['GPT2LMHeadModel'], 'attn_pdrop': 0.1, 'bos_token_id': 0, 'embd_pdrop': 0.1, 'eos_token_id': 0, 'initializer_range': 0.02, 'layer_norm_epsilon': 1e-05, 'model_type': 'gpt2', 'n_ctx': 1024, 'n_embd': 768, 'n_head': 12, 'n_inner': None, 'n_layer': 12, 'n_positions': 1024, 'reorder_and_upcast_attn': True, 'resid_pdrop': 0.1, 'scale_attn_by_inverse_layer_idx': True, 'scale_attn_weights': True, 'summary_activation': None, 'summary_first_dropout': 0.2, 'summary_proj_to_labels': True, 'summary_type': 'cls_index', 'summary_use_proj': True, 'task_specific_params': {'text-generation': {'do_sample': True, 'max_length': 1024}}, 'torch_dtype': 'float32', 'transformers_version': '4.35.2', 'use_cache': False, 'vocab_size': 64000} ... | |
| |=>> 01/16 [16:49:46] - mistral.models.auto - INFO :: Fetching Hugging Face [Fast] AutoTokenizer for Model: `gpt2`... | |
| |=>> 01/16 [16:49:46] - mistral.models.auto - INFO :: Using a Pretokenized Dataset | |
| |=>> 01/16 [16:49:46] - mistral.models.auto - INFO :: Initializing Custom GPT-2 Model from Configuration: `gpt2`... | |
| |=>> 01/16 [16:49:49] - mistral - INFO :: Setting Training Arguments from Quinfig... | |
| |=>> 01/16 [16:49:49] - mistral.args.training - INFO :: Setting Gradient Accumulation Steps = `64` [BSZ: 512 World Size: 1 Device BSZ: 8] | |
| |=>> 01/16 [16:49:50] - mistral - INFO :: Downloading and Preprocessing Dataset `/scratch/ykyao/projects/multilingual-LM/training/multilingual_dataset.py`... | |
| |=>> 01/16 [16:49:51] - datasets_modules.datasets.multilingual_dataset.622766c60cbb8f561ebaa3b973324d0a680a5fd9ae7223c01dd7f116023232f5.multilingual_dataset - INFO :: Generating examples from = /scratch/ykyao/projects//multilingual-LM/data/multilingual/multilingual_data_perturbed/shuffle_local5_ar/train | |
| |=>> 01/16 [16:49:54] - datasets_modules.datasets.multilingual_dataset.622766c60cbb8f561ebaa3b973324d0a680a5fd9ae7223c01dd7f116023232f5.multilingual_dataset - INFO :: Total sentences: 1020103 | |
| |=>> 01/16 [16:49:54] - datasets_modules.datasets.multilingual_dataset.622766c60cbb8f561ebaa3b973324d0a680a5fd9ae7223c01dd7f116023232f5.multilingual_dataset - INFO :: Loading pre-tokenized data | |
| |=>> 01/16 [16:49:58] - datasets_modules.datasets.multilingual_dataset.622766c60cbb8f561ebaa3b973324d0a680a5fd9ae7223c01dd7f116023232f5.multilingual_dataset - INFO :: Concatenating tokenized data using EOS token | |
| |=>> 01/16 [16:49:58] - datasets_modules.datasets.multilingual_dataset.622766c60cbb8f561ebaa3b973324d0a680a5fd9ae7223c01dd7f116023232f5.multilingual_dataset - INFO :: Chunking tokens into sublists of 1024 | |
| |=>> 01/16 [16:49:59] - datasets_modules.datasets.multilingual_dataset.622766c60cbb8f561ebaa3b973324d0a680a5fd9ae7223c01dd7f116023232f5.multilingual_dataset - INFO :: Writing dataset as space-separated sequences of tokens | |
| |=>> 01/16 [16:50:10] - datasets_modules.datasets.multilingual_dataset.622766c60cbb8f561ebaa3b973324d0a680a5fd9ae7223c01dd7f116023232f5.multilingual_dataset - INFO :: Generating examples from = /scratch/ykyao/projects//multilingual-LM/data/multilingual/multilingual_data_perturbed/shuffle_local5_ar/dev | |
| |=>> 01/16 [16:50:10] - datasets_modules.datasets.multilingual_dataset.622766c60cbb8f561ebaa3b973324d0a680a5fd9ae7223c01dd7f116023232f5.multilingual_dataset - INFO :: Total sentences: 5082 | |
| |=>> 01/16 [16:50:10] - datasets_modules.datasets.multilingual_dataset.622766c60cbb8f561ebaa3b973324d0a680a5fd9ae7223c01dd7f116023232f5.multilingual_dataset - INFO :: Loading pre-tokenized data | |
| |=>> 01/16 [16:50:10] - datasets_modules.datasets.multilingual_dataset.622766c60cbb8f561ebaa3b973324d0a680a5fd9ae7223c01dd7f116023232f5.multilingual_dataset - INFO :: Concatenating tokenized data using EOS token | |
| |=>> 01/16 [16:50:10] - datasets_modules.datasets.multilingual_dataset.622766c60cbb8f561ebaa3b973324d0a680a5fd9ae7223c01dd7f116023232f5.multilingual_dataset - INFO :: Chunking tokens into sublists of 1024 | |
| |=>> 01/16 [16:50:10] - datasets_modules.datasets.multilingual_dataset.622766c60cbb8f561ebaa3b973324d0a680a5fd9ae7223c01dd7f116023232f5.multilingual_dataset - INFO :: Writing dataset as space-separated sequences of tokens | |
| |=>> 01/16 [16:50:10] - mistral.corpora.auto - INFO :: Building Tokenized Indexed Dataset for {dataset_id}/{dataset_name}... | |
| |=>> 01/16 [16:50:10] - mistral.corpora.auto - INFO :: Building Indexed Dataset for train | |
| |=>> 01/16 [16:50:36] - mistral.corpora.auto - INFO :: Building Indexed Dataset for validation | |
| |=>> 01/16 [16:50:37] - mistral - INFO :: Initializing Model Trainer... | |
| |=>> 01/16 [16:50:37] - mistral - INFO :: Training Arguments: TrainingArguments( | |
| _n_gpu=1, | |
| adafactor=False, | |
| adam_beta1=0.9, | |
| adam_beta2=0.999, | |
| adam_epsilon=1e-08, | |
| bf16=False, | |
| bf16_full_eval=False, | |
| data_seed=53, | |
| dataloader_drop_last=False, | |
| dataloader_num_workers=0, | |
| dataloader_pin_memory=True, | |
| ddp_bucket_cap_mb=None, | |
| ddp_find_unused_parameters=None, | |
| debug=[], | |
| deepspeed=None, | |
| disable_tqdm=False, | |
| do_eval=True, | |
| do_predict=False, | |
| do_train=True, | |
| eval_accumulation_steps=None, | |
| eval_delay=0, | |
| eval_steps=1000, | |
| evaluation_strategy=IntervalStrategy.STEPS, | |
| fp16=True, | |
| fp16_backend=auto, | |
| fp16_full_eval=False, | |
| fp16_opt_level=O1, | |
| gradient_accumulation_steps=64, | |
| gradient_checkpointing=False, | |
| greater_is_better=None, | |
| group_by_length=False, | |
| half_precision_backend=auto, | |
| hub_model_id=None, | |
| hub_strategy=HubStrategy.EVERY_SAVE, | |
| hub_token=<HUB_TOKEN>, | |
| ignore_data_skip=False, | |
| label_names=None, | |
| label_smoothing_factor=0.0, | |
| learning_rate=0.0006, | |
| length_column_name=length, | |
| load_best_model_at_end=False, | |
| local_rank=-1, | |
| log_level=-1, | |
| log_level_replica=-1, | |
| log_on_each_node=True, | |
| logging_dir=logs, | |
| logging_first_step=True, | |
| logging_nan_inf_filter=True, | |
| logging_steps=50, | |
| logging_strategy=IntervalStrategy.STEPS, | |
| lr_scheduler_type=SchedulerType.LINEAR, | |
| max_grad_norm=1.0, | |
| max_steps=1200, | |
| metric_for_best_model=None, | |
| mp_parameters=, | |
| no_cuda=False, | |
| num_train_epochs=3.0, | |
| optim=OptimizerNames.ADAMW_HF, | |
| output_dir=//scratch/ykyao/projects/multilingual_models/shuffle_local5_ar_AR_randinit/babylm_shuffle_local5_ar_AR_randinit_seed53/runs/shuffle_local5_ar_AR_randinit_seed53, | |
| overwrite_output_dir=False, | |
| past_index=-1, | |
| per_device_eval_batch_size=16, | |
| per_device_train_batch_size=8, | |
| prediction_loss_only=True, | |
| push_to_hub=False, | |
| push_to_hub_model_id=None, | |
| push_to_hub_organization=None, | |
| push_to_hub_token=<PUSH_TO_HUB_TOKEN>, | |
| remove_unused_columns=True, | |
| report_to=[], | |
| resume_from_checkpoint=None, | |
| run_name=shuffle_local5_ar_AR_randinit_seed53, | |
| save_on_each_node=False, | |
| save_steps=1000, | |
| save_strategy=IntervalStrategy.STEPS, | |
| save_total_limit=None, | |
| seed=53, | |
| sharded_ddp=[], | |
| skip_memory_metrics=True, | |
| tf32=None, | |
| tpu_metrics_debug=False, | |
| tpu_num_cores=None, | |
| use_legacy_prediction_loop=False, | |
| warmup_ratio=0.0, | |
| warmup_steps=120, | |
| weight_decay=0.1, | |
| xpu_backend=None, | |
| ) | |
| |=>> 01/16 [16:50:47] - mistral.core.callbacks - INFO :: Setting W&B Project: ykyao | |
| |=>> 01/16 [16:51:38] - mistral - INFO :: Training... | |
| |=>> 01/16 [16:51:38] - mistral.core.callbacks - INFO :: Automatic Weights & Biases logging enabled, to disable set os.environ["WANDB_DISABLED"] = "true" | |
| |=>> 01/16 [21:37:19] - mistral - INFO :: ...and that's all folks! | |
| |=>> 01/16 [21:37:19] - mistral - INFO :: Running final evaluation... | |