hf-transformers-bot commited on
Commit
e1c6f4c
·
verified ·
1 Parent(s): 539c42e

Upload FSMTForConditionalGeneration

Browse files
Files changed (5) hide show
  1. .gitattributes +1 -0
  2. README.md +1 -1
  3. config.json +31 -22
  4. generation_config.json +1 -1
  5. model.safetensors +3 -0
.gitattributes CHANGED
@@ -6,3 +6,4 @@
6
  *.tar.gz filter=lfs diff=lfs merge=lfs -text
7
  *.ot filter=lfs diff=lfs merge=lfs -text
8
  *.onnx filter=lfs diff=lfs merge=lfs -text
 
 
6
  *.tar.gz filter=lfs diff=lfs merge=lfs -text
7
  *.ot filter=lfs diff=lfs merge=lfs -text
8
  *.onnx filter=lfs diff=lfs merge=lfs -text
9
+ model.safetensors filter=lfs diff=lfs merge=lfs -text
README.md CHANGED
@@ -1,5 +1,5 @@
1
  ---
2
- language:
3
  - en
4
  - de
5
  tags:
 
1
  ---
2
+ language:
3
  - en
4
  - de
5
  tags:
config.json CHANGED
@@ -1,37 +1,46 @@
1
  {
 
 
2
  "architectures": [
3
  "FSMTForConditionalGeneration"
4
  ],
5
- "model_type": "fsmt",
6
- "activation_dropout": 0.0,
7
- "activation_function": "relu",
8
  "attention_dropout": 0.1,
 
9
  "d_model": 1024,
 
 
 
 
 
 
 
 
 
 
10
  "dropout": 0.2,
11
- "init_std": 0.02,
12
- "max_position_embeddings": 1024,
13
- "num_hidden_layers": 6,
14
- "src_vocab_size": 42024,
15
- "tgt_vocab_size": 42024,
16
- "langs": [
17
- "en",
18
- "de"
19
- ],
20
  "encoder_attention_heads": 16,
21
  "encoder_ffn_dim": 8192,
22
  "encoder_layerdrop": 0,
23
  "encoder_layers": 6,
24
- "decoder_attention_heads": 16,
25
- "decoder_ffn_dim": 4096,
26
- "decoder_layerdrop": 0,
27
- "decoder_layers": 6,
28
- "bos_token_id": 0,
29
- "pad_token_id": 1,
30
  "eos_token_id": 2,
 
 
31
  "is_encoder_decoder": true,
 
 
 
 
 
 
 
 
 
 
32
  "scale_embedding": true,
 
 
33
  "tie_word_embeddings": true,
34
- "num_beams": 5,
35
- "early_stopping": false,
36
- "length_penalty": 1.0
37
- }
 
1
  {
2
+ "activation_dropout": 0.0,
3
+ "activation_function": "relu",
4
  "architectures": [
5
  "FSMTForConditionalGeneration"
6
  ],
 
 
 
7
  "attention_dropout": 0.1,
8
+ "bos_token_id": 0,
9
  "d_model": 1024,
10
+ "decoder": {
11
+ "bos_token_id": 2,
12
+ "model_type": "fsmt_decoder",
13
+ "vocab_size": 42024
14
+ },
15
+ "decoder_attention_heads": 16,
16
+ "decoder_ffn_dim": 4096,
17
+ "decoder_layerdrop": 0,
18
+ "decoder_layers": 6,
19
+ "decoder_start_token_id": 2,
20
  "dropout": 0.2,
 
 
 
 
 
 
 
 
 
21
  "encoder_attention_heads": 16,
22
  "encoder_ffn_dim": 8192,
23
  "encoder_layerdrop": 0,
24
  "encoder_layers": 6,
 
 
 
 
 
 
25
  "eos_token_id": 2,
26
+ "forced_eos_token_id": 2,
27
+ "init_std": 0.02,
28
  "is_encoder_decoder": true,
29
+ "langs": [
30
+ "en",
31
+ "de"
32
+ ],
33
+ "max_length": 200,
34
+ "max_position_embeddings": 1024,
35
+ "model_type": "fsmt",
36
+ "num_beams": 5,
37
+ "num_hidden_layers": 6,
38
+ "pad_token_id": 1,
39
  "scale_embedding": true,
40
+ "src_vocab_size": 42024,
41
+ "tgt_vocab_size": 42024,
42
  "tie_word_embeddings": true,
43
+ "torch_dtype": "float32",
44
+ "transformers_version": "4.53.0.dev0",
45
+ "use_cache": true
46
+ }
generation_config.json CHANGED
@@ -7,5 +7,5 @@
7
  "max_length": 200,
8
  "num_beams": 5,
9
  "pad_token_id": 1,
10
- "transformers_version": "4.27.0.dev0"
11
  }
 
7
  "max_length": 200,
8
  "num_beams": 5,
9
  "pad_token_id": 1,
10
+ "transformers_version": "4.53.0.dev0"
11
  }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:12e5a6c31b372d402c92c5bc39d060ec3e7a52ec93f2d28fbec7135bb56905f0
3
+ size 1087419504