YufeiWeng commited on
Commit
a12f688
·
verified ·
1 Parent(s): 93d46de

Training in progress, epoch 1

Browse files
Files changed (4) hide show
  1. command.txt +5 -5
  2. config.json +11 -5
  3. model.safetensors +2 -2
  4. training_args.bin +2 -2
command.txt CHANGED
@@ -1,7 +1,7 @@
1
  python run_image_classification.py \
2
  --train_dir /trunk/shared/eebo_data/images_cropped/ \
3
  --validation_dir /trunk/shared/eebo_data/images_cropped/ \
4
- --output_dir ./default_model/ \
5
  --remove_unused_columns False \
6
  --label_column_name labels \
7
  --do_train \
@@ -9,9 +9,9 @@ python run_image_classification.py \
9
  --push_to_hub \
10
  --push_to_hub_model_id donut-base-beans \
11
  --learning_rate 3e-5 \
12
- --num_train_epochs 10 \
13
- --per_device_train_batch_size 64 \
14
- --per_device_eval_batch_size 64 \
15
  --logging_strategy steps \
16
  --logging_steps 10 \
17
  --eval_strategy epoch \
@@ -22,4 +22,4 @@ python run_image_classification.py \
22
  --ignore_mismatched_sizes \
23
  --image_column_name image_url \
24
  --label_column_name label \
25
- --resume_from_checkpoint /trunk2/yufei/summer24/transformers/examples/pytorch/image-classification/default_model/checkpoint-67330
 
1
  python run_image_classification.py \
2
  --train_dir /trunk/shared/eebo_data/images_cropped/ \
3
  --validation_dir /trunk/shared/eebo_data/images_cropped/ \
4
+ --output_dir ./microsoft_dit/ \
5
  --remove_unused_columns False \
6
  --label_column_name labels \
7
  --do_train \
 
9
  --push_to_hub \
10
  --push_to_hub_model_id donut-base-beans \
11
  --learning_rate 3e-5 \
12
+ --num_train_epochs 5 \
13
+ --per_device_train_batch_size 32 \
14
+ --per_device_eval_batch_size 32 \
15
  --logging_strategy steps \
16
  --logging_steps 10 \
17
  --eval_strategy epoch \
 
22
  --ignore_mismatched_sizes \
23
  --image_column_name image_url \
24
  --label_column_name label \
25
+ --model_name_or_path microsoft/dit-base-finetuned-rvlcdip
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "microsoft/beit-base-patch16-224-pt22k-ft22k",
3
  "add_fpn": false,
4
  "architectures": [
5
  "BeitForImageClassification"
@@ -30,10 +30,16 @@
30
  "num_channels": 3,
31
  "num_hidden_layers": 12,
32
  "out_features": [
33
- "stage12"
 
 
 
34
  ],
35
  "out_indices": [
36
- 12
 
 
 
37
  ],
38
  "patch_size": 16,
39
  "pool_scales": [
@@ -62,11 +68,11 @@
62
  ],
63
  "torch_dtype": "float32",
64
  "transformers_version": "4.43.0.dev0",
65
- "use_absolute_position_embeddings": false,
66
  "use_auxiliary_head": true,
67
  "use_mask_token": false,
68
  "use_mean_pooling": true,
69
- "use_relative_position_bias": true,
70
  "use_shared_relative_position_bias": false,
71
  "vocab_size": 8192
72
  }
 
1
  {
2
+ "_name_or_path": "microsoft/dit-base-finetuned-rvlcdip",
3
  "add_fpn": false,
4
  "architectures": [
5
  "BeitForImageClassification"
 
30
  "num_channels": 3,
31
  "num_hidden_layers": 12,
32
  "out_features": [
33
+ "stage3",
34
+ "stage5",
35
+ "stage7",
36
+ "stage11"
37
  ],
38
  "out_indices": [
39
+ 3,
40
+ 5,
41
+ 7,
42
+ 11
43
  ],
44
  "patch_size": 16,
45
  "pool_scales": [
 
68
  ],
69
  "torch_dtype": "float32",
70
  "transformers_version": "4.43.0.dev0",
71
+ "use_absolute_position_embeddings": true,
72
  "use_auxiliary_head": true,
73
  "use_mask_token": false,
74
  "use_mean_pooling": true,
75
+ "use_relative_position_bias": false,
76
  "use_shared_relative_position_bias": false,
77
  "vocab_size": 8192
78
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:87a3e53f87c26251c9485be8d2e6b4896fb3ca5fbc1fe742353ffe4c57e091e8
3
- size 343077252
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b5ab9b8b1e14e6152ce977b120fdb25da84fd41b3cc0f9e81bfe849771b48f31
3
+ size 343258940
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a0a2d72729478c9c56a6673fbc1a93a59085bf2829c446ff056e0e39458bcde9
3
- size 5304
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a8ccf78e753d42e33d8326a19b03c04ad80293fadf790fa06fff55f386aebcd4
3
+ size 5240