pere commited on
Commit
31fae67
Β·
1 Parent(s): e268c1b

Saving train state of step 1000

Browse files
{checkpoint-1000 β†’ checkpoint-2000}/added_tokens.json RENAMED
File without changes
{checkpoint-1000 β†’ checkpoint-2000}/config.json RENAMED
File without changes
{checkpoint-1000 β†’ checkpoint-2000}/flax_model.msgpack RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a04f4f83603f30a2ef2cda5b555cf138fde78de8b57c0d754bf8de44e7a2e76c
3
  size 1512831199
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d2c82c4afd0d59fcd3b10af4269ed282aad4d6b1a59ebd6cf9f8178d4838ffa4
3
  size 1512831199
{checkpoint-1000 β†’ checkpoint-2000}/generation_config.json RENAMED
File without changes
{checkpoint-1000 β†’ checkpoint-2000}/merges.txt RENAMED
File without changes
{checkpoint-1000 β†’ checkpoint-2000}/preprocessor_config.json RENAMED
File without changes
{checkpoint-1000 β†’ checkpoint-2000}/special_tokens_map.json RENAMED
File without changes
{checkpoint-1000 β†’ checkpoint-2000}/tokenizer_config.json RENAMED
File without changes
{checkpoint-1000 β†’ checkpoint-2000}/train_state.msgpack RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7c7ad83a7e9cb5ca3e7b8aba9b6aaebf06c17d0d0ccea63ca51127348a443bff
3
- size 7564063736
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4432f109f2f5c00cd4efb39749e2e876e584b9473b53018baa10b23491631ac0
3
+ size 1216344064
{checkpoint-1000 β†’ checkpoint-2000}/vocab.json RENAMED
File without changes
run_large_training_lr9e4.sh ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env bash
2
+ TOKENIZERS_PARALLELISM=false python3 run_distillation_nodes.py \
3
+ --model_name_or_path "./nb-distil-large-init" \
4
+ --teacher_model_name_or_path "NbAiLab/nb-whisper-large" \
5
+ --train_dataset_name "NbAiLab/annotated_distil_raw_ncc_speech_v7_large" \
6
+ --train_dataset_config_name "" \
7
+ --train_split_name "train" \
8
+ --eval_dataset_name "NbAiLab/annotated_distil_raw_ncc_speech_v7_large" \
9
+ --eval_dataset_config_name "" \
10
+ --eval_split_name "validation" \
11
+ --eval_steps 100 \
12
+ --save_steps 1000 \
13
+ --warmup_steps 1000 \
14
+ --learning_rate 0.0009 \
15
+ --lr_scheduler_type "linear" \
16
+ --logging_steps 100 \
17
+ --save_total_limit 1 \
18
+ --max_steps 50000 \
19
+ --wer_threshold 10 \
20
+ --per_device_train_batch_size 16\
21
+ --per_device_eval_batch_size 16 \
22
+ --dataloader_num_workers 16 \
23
+ --dtype "bfloat16" \
24
+ --output_dir "./" \
25
+ --do_train \
26
+ --do_eval \
27
+ --use_scan \
28
+ --gradient_checkpointing \
29
+ --overwrite_output_dir \
30
+ --predict_with_generate \
31
+ --freeze_encoder \
32
+ --streaming \
33
+ --use_auth_token \
34
+ --report_to "wandb" \
35
+ --wandb_project "nb-distil-whisper-large-flax2" \
36
+ --wandb_name "flax lr1e9 wer10" \
37
+ --save_code_to_wandb \
38
+ --save_train_state \
39
+ --hub_model_id "NbAiLab/nb-distil-whisper-large-flax4" \
40
+ --push_to_hub
41
+