Training in progress, epoch 1
Browse files- command.txt +5 -5
- config.json +11 -5
- model.safetensors +2 -2
- training_args.bin +2 -2
command.txt
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
python run_image_classification.py \
|
2 |
--train_dir /trunk/shared/eebo_data/images_cropped/ \
|
3 |
--validation_dir /trunk/shared/eebo_data/images_cropped/ \
|
4 |
-
--output_dir ./
|
5 |
--remove_unused_columns False \
|
6 |
--label_column_name labels \
|
7 |
--do_train \
|
@@ -9,9 +9,9 @@ python run_image_classification.py \
|
|
9 |
--push_to_hub \
|
10 |
--push_to_hub_model_id donut-base-beans \
|
11 |
--learning_rate 3e-5 \
|
12 |
-
--num_train_epochs
|
13 |
-
--per_device_train_batch_size
|
14 |
-
--per_device_eval_batch_size
|
15 |
--logging_strategy steps \
|
16 |
--logging_steps 10 \
|
17 |
--eval_strategy epoch \
|
@@ -22,4 +22,4 @@ python run_image_classification.py \
|
|
22 |
--ignore_mismatched_sizes \
|
23 |
--image_column_name image_url \
|
24 |
--label_column_name label \
|
25 |
-
--
|
|
|
1 |
python run_image_classification.py \
|
2 |
--train_dir /trunk/shared/eebo_data/images_cropped/ \
|
3 |
--validation_dir /trunk/shared/eebo_data/images_cropped/ \
|
4 |
+
--output_dir ./microsoft_dit/ \
|
5 |
--remove_unused_columns False \
|
6 |
--label_column_name labels \
|
7 |
--do_train \
|
|
|
9 |
--push_to_hub \
|
10 |
--push_to_hub_model_id donut-base-beans \
|
11 |
--learning_rate 3e-5 \
|
12 |
+
--num_train_epochs 5 \
|
13 |
+
--per_device_train_batch_size 32 \
|
14 |
+
--per_device_eval_batch_size 32 \
|
15 |
--logging_strategy steps \
|
16 |
--logging_steps 10 \
|
17 |
--eval_strategy epoch \
|
|
|
22 |
--ignore_mismatched_sizes \
|
23 |
--image_column_name image_url \
|
24 |
--label_column_name label \
|
25 |
+
--model_name_or_path microsoft/dit-base-finetuned-rvlcdip
|
config.json
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "microsoft/
|
3 |
"add_fpn": false,
|
4 |
"architectures": [
|
5 |
"BeitForImageClassification"
|
@@ -30,10 +30,16 @@
|
|
30 |
"num_channels": 3,
|
31 |
"num_hidden_layers": 12,
|
32 |
"out_features": [
|
33 |
-
"
|
|
|
|
|
|
|
34 |
],
|
35 |
"out_indices": [
|
36 |
-
|
|
|
|
|
|
|
37 |
],
|
38 |
"patch_size": 16,
|
39 |
"pool_scales": [
|
@@ -62,11 +68,11 @@
|
|
62 |
],
|
63 |
"torch_dtype": "float32",
|
64 |
"transformers_version": "4.43.0.dev0",
|
65 |
-
"use_absolute_position_embeddings":
|
66 |
"use_auxiliary_head": true,
|
67 |
"use_mask_token": false,
|
68 |
"use_mean_pooling": true,
|
69 |
-
"use_relative_position_bias":
|
70 |
"use_shared_relative_position_bias": false,
|
71 |
"vocab_size": 8192
|
72 |
}
|
|
|
1 |
{
|
2 |
+
"_name_or_path": "microsoft/dit-base-finetuned-rvlcdip",
|
3 |
"add_fpn": false,
|
4 |
"architectures": [
|
5 |
"BeitForImageClassification"
|
|
|
30 |
"num_channels": 3,
|
31 |
"num_hidden_layers": 12,
|
32 |
"out_features": [
|
33 |
+
"stage3",
|
34 |
+
"stage5",
|
35 |
+
"stage7",
|
36 |
+
"stage11"
|
37 |
],
|
38 |
"out_indices": [
|
39 |
+
3,
|
40 |
+
5,
|
41 |
+
7,
|
42 |
+
11
|
43 |
],
|
44 |
"patch_size": 16,
|
45 |
"pool_scales": [
|
|
|
68 |
],
|
69 |
"torch_dtype": "float32",
|
70 |
"transformers_version": "4.43.0.dev0",
|
71 |
+
"use_absolute_position_embeddings": true,
|
72 |
"use_auxiliary_head": true,
|
73 |
"use_mask_token": false,
|
74 |
"use_mean_pooling": true,
|
75 |
+
"use_relative_position_bias": false,
|
76 |
"use_shared_relative_position_bias": false,
|
77 |
"vocab_size": 8192
|
78 |
}
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b5ab9b8b1e14e6152ce977b120fdb25da84fd41b3cc0f9e81bfe849771b48f31
|
3 |
+
size 343258940
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a8ccf78e753d42e33d8326a19b03c04ad80293fadf790fa06fff55f386aebcd4
|
3 |
+
size 5240
|