SonalH commited on
Commit
0554a5b
1 Parent(s): 6da2522

Training in progress, step 500

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "bert-base-uncased",
3
  "architectures": [
4
  "BertForQuestionAnswering"
5
  ],
 
1
  {
2
+ "_name_or_path": "E:\\SonalH\\my_trained_bert",
3
  "architectures": [
4
  "BertForQuestionAnswering"
5
  ],
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ab6c73f9a4d01177dfc7d531d9eb678d701b67dc2be205640d3d32e811e7a479
3
- size 435637673
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:25e986fe88d14bfd4484b6c340f24d42423c133962d9f0baef6b9e3115520c09
3
+ size 435640489
tokenizer_config.json CHANGED
@@ -3,11 +3,18 @@
3
  "cls_token": "[CLS]",
4
  "do_lower_case": true,
5
  "mask_token": "[MASK]",
 
6
  "model_max_length": 512,
 
7
  "pad_token": "[PAD]",
 
 
8
  "sep_token": "[SEP]",
 
9
  "strip_accents": null,
10
  "tokenize_chinese_chars": true,
11
  "tokenizer_class": "BertTokenizer",
 
 
12
  "unk_token": "[UNK]"
13
  }
 
3
  "cls_token": "[CLS]",
4
  "do_lower_case": true,
5
  "mask_token": "[MASK]",
6
+ "max_length": 384,
7
  "model_max_length": 512,
8
+ "pad_to_multiple_of": null,
9
  "pad_token": "[PAD]",
10
+ "pad_token_type_id": 0,
11
+ "padding_side": "right",
12
  "sep_token": "[SEP]",
13
+ "stride": 128,
14
  "strip_accents": null,
15
  "tokenize_chinese_chars": true,
16
  "tokenizer_class": "BertTokenizer",
17
+ "truncation_side": "right",
18
+ "truncation_strategy": "only_second",
19
  "unk_token": "[UNK]"
20
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1c7ff5ce02cf8d8ac0e2340e2dc245d6e371a420a468c707b80fd3ba53521693
3
- size 4091
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bcef42e4cfedddeabf9fabe66c5cc5c81bd99d9569a42012cb96172c10bfa289
3
+ size 4027