jfargus commited on
Commit
161c350
·
verified ·
1 Parent(s): 947129e

Upload folder using huggingface_hub

Browse files
added_tokens.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</s_adr1>": 57537,
3
+ "</s_adr2>": 57535,
4
+ "</s_bnm>": 57533,
5
+ "</s_bval>": 57541,
6
+ "</s_cbx>": 57539,
7
+ "</s_enm>": 57531,
8
+ "</s_ent>": 57525,
9
+ "</s_sdt>": 57529,
10
+ "</s_ssn>": 57527,
11
+ "<s_adr1>": 57538,
12
+ "<s_adr2>": 57536,
13
+ "<s_bnm>": 57534,
14
+ "<s_bval>": 57542,
15
+ "<s_cbx>": 57540,
16
+ "<s_enm>": 57532,
17
+ "<s_ent>": 57526,
18
+ "<s_iitcdip>": 57523,
19
+ "<s_sdt>": 57530,
20
+ "<s_ssn>": 57528,
21
+ "<s_synthdog>": 57524,
22
+ "<s_w9_cord>": 57543,
23
+ "<sep/>": 57522
24
+ }
artifacts.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aa393432df37edbc874d61352aca8bc593cb6a42902d48604d8d019b67fe00c4
3
+ size 1603760464
config.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "naver-clova-ix/donut-base",
3
+ "align_long_axis": false,
4
+ "architectures": [
5
+ "DonutModel"
6
+ ],
7
+ "decoder_layer": 4,
8
+ "encoder_layer": [
9
+ 2,
10
+ 2,
11
+ 14,
12
+ 2
13
+ ],
14
+ "input_size": [
15
+ 1280,
16
+ 960
17
+ ],
18
+ "max_length": 128,
19
+ "max_position_embeddings": 128,
20
+ "model_type": "donut",
21
+ "torch_dtype": "float32",
22
+ "transformers_version": "4.24.0",
23
+ "window_size": 10
24
+ }
config.yaml ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ resume_from_checkpoint_path: None
2
+ result_path: './result'
3
+ pretrained_model_name_or_path: 'naver-clova-ix/donut-base'
4
+ dataset_name_or_paths:
5
+ - 'jfargus/w9_cord_ltd_fields_phila_domain_v5'
6
+ sort_json_key: True
7
+ train_batch_sizes:
8
+ - 4
9
+ val_batch_sizes:
10
+ - 1
11
+ input_size:
12
+ - 1280
13
+ - 960
14
+ max_length: 128
15
+ align_long_axis: False
16
+ num_nodes: 1
17
+ seed: 2022
18
+ lr: 3e-05
19
+ num_training_samples_per_epoch: 2500
20
+ warmup_steps: 300
21
+ max_epochs: 4
22
+ max_steps: -1
23
+ num_workers: 4
24
+ val_check_interval: 1.0
25
+ check_val_every_n_epoch: 1
26
+ gradient_clip_val: 1.0
27
+ verbose: True
28
+ limit_val_batches: 0.75
29
+ task_name: 'w9_cord'
30
+ augment: True
31
+ augment_prob: 0.75
32
+ pexels_key: 'GpOfCma7X0faYw2FldfWGymfzdCyPJ9AePbfQbLDp757QPU4IUPzGJc2'
33
+ exp_name: 'train_w9_cord_stage_1'
34
+ exp_version: '20250617_132202'
events.out.tfevents.1750166642.218ebeccb76c.2610.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b2944b8ab36cac7ac29037244abc1a4e57369833972e120e6cc882902bdf6d29
3
+ size 10848
hparams.yaml ADDED
@@ -0,0 +1 @@
 
 
1
+ {}
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d5bf830a5c26af44d11799d90ecaafb9fa175cb907dcc949afd9a1e580ab2458
3
+ size 855782142
sentencepiece.bpe.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cb9e3dce4c326195d08fc3dd0f7e2eee1da8595c847bf4c1a9c78b7a82d47e2d
3
+ size 1296245
special_tokens_map.json ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<s_w9_cord>"
4
+ ],
5
+ "bos_token": "<s>",
6
+ "cls_token": "<s>",
7
+ "eos_token": "</s>",
8
+ "mask_token": {
9
+ "content": "<mask>",
10
+ "lstrip": true,
11
+ "normalized": true,
12
+ "rstrip": false,
13
+ "single_word": false
14
+ },
15
+ "pad_token": "<pad>",
16
+ "sep_token": "</s>",
17
+ "unk_token": "<unk>"
18
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "cls_token": "<s>",
4
+ "eos_token": "</s>",
5
+ "from_slow": true,
6
+ "mask_token": {
7
+ "__type": "AddedToken",
8
+ "content": "<mask>",
9
+ "lstrip": true,
10
+ "normalized": true,
11
+ "rstrip": false,
12
+ "single_word": false
13
+ },
14
+ "name_or_path": "naver-clova-ix/donut-base",
15
+ "pad_token": "<pad>",
16
+ "processor_class": "DonutProcessor",
17
+ "sep_token": "</s>",
18
+ "sp_model_kwargs": {},
19
+ "special_tokens_map_file": null,
20
+ "tokenizer_class": "XLMRobertaTokenizer",
21
+ "unk_token": "<unk>"
22
+ }