versae commited on
Commit
2f92e69
·
verified ·
1 Parent(s): e668b77

Training in progress, step 30000

Browse files
Files changed (4) hide show
  1. config.json +1 -1
  2. model.safetensors +2 -2
  3. tokenizer_config.json +1 -1
  4. vocab.txt +0 -0
config.json CHANGED
@@ -18,7 +18,7 @@
18
  "LABEL_0": 0
19
  },
20
  "layer_norm_eps": 1e-05,
21
- "max_position_embeddings": 514,
22
  "model_type": "xlm-roberta",
23
  "num_attention_heads": 16,
24
  "num_hidden_layers": 24,
 
18
  "LABEL_0": 0
19
  },
20
  "layer_norm_eps": 1e-05,
21
+ "max_position_embeddings": 8194,
22
  "model_type": "xlm-roberta",
23
  "num_attention_heads": 16,
24
  "num_hidden_layers": 24,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:737b40310823000d200182b18aa0baffeceda06a4beeb403a4e1c85eed186203
3
- size 2239614572
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cb6c933724e6625f1ac5c5cb61e8e283e90a353950bed3cf133786d97ebfb0d1
3
+ size 2271071852
tokenizer_config.json CHANGED
@@ -41,7 +41,6 @@
41
  "special": true
42
  }
43
  },
44
- "additional_special_tokens": [],
45
  "bos_token": "<s>",
46
  "clean_up_tokenization_spaces": true,
47
  "cls_token": "<s>",
@@ -51,6 +50,7 @@
51
  "model_max_length": 512,
52
  "pad_token": "<pad>",
53
  "sep_token": "</s>",
 
54
  "tokenizer_class": "XLMRobertaTokenizer",
55
  "unk_token": "<unk>"
56
  }
 
41
  "special": true
42
  }
43
  },
 
44
  "bos_token": "<s>",
45
  "clean_up_tokenization_spaces": true,
46
  "cls_token": "<s>",
 
50
  "model_max_length": 512,
51
  "pad_token": "<pad>",
52
  "sep_token": "</s>",
53
+ "sp_model_kwargs": {},
54
  "tokenizer_class": "XLMRobertaTokenizer",
55
  "unk_token": "<unk>"
56
  }
vocab.txt CHANGED
The diff for this file is too large to render. See raw diff