itazap HF Staff commited on
Commit
fda11f7
·
verified ·
1 Parent(s): a7d2998

copy snapshot bb8c23be2 from checkpoint itazap/blt-1b

Browse files
Files changed (4) hide show
  1. .gitattributes +35 -0
  2. config.json +99 -0
  3. model.safetensors +3 -0
  4. tokenizer_config.json +11 -0
.gitattributes ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ckpt filter=lfs diff=lfs merge=lfs -text
6
+ *.ftz filter=lfs diff=lfs merge=lfs -text
7
+ *.gz filter=lfs diff=lfs merge=lfs -text
8
+ *.h5 filter=lfs diff=lfs merge=lfs -text
9
+ *.joblib filter=lfs diff=lfs merge=lfs -text
10
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
+ *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
+ *.model filter=lfs diff=lfs merge=lfs -text
13
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
14
+ *.npy filter=lfs diff=lfs merge=lfs -text
15
+ *.npz filter=lfs diff=lfs merge=lfs -text
16
+ *.onnx filter=lfs diff=lfs merge=lfs -text
17
+ *.ot filter=lfs diff=lfs merge=lfs -text
18
+ *.parquet filter=lfs diff=lfs merge=lfs -text
19
+ *.pb filter=lfs diff=lfs merge=lfs -text
20
+ *.pickle filter=lfs diff=lfs merge=lfs -text
21
+ *.pkl filter=lfs diff=lfs merge=lfs -text
22
+ *.pt filter=lfs diff=lfs merge=lfs -text
23
+ *.pth filter=lfs diff=lfs merge=lfs -text
24
+ *.rar filter=lfs diff=lfs merge=lfs -text
25
+ *.safetensors filter=lfs diff=lfs merge=lfs -text
26
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
28
+ *.tar filter=lfs diff=lfs merge=lfs -text
29
+ *.tflite filter=lfs diff=lfs merge=lfs -text
30
+ *.tgz filter=lfs diff=lfs merge=lfs -text
31
+ *.wasm filter=lfs diff=lfs merge=lfs -text
32
+ *.xz filter=lfs diff=lfs merge=lfs -text
33
+ *.zip filter=lfs diff=lfs merge=lfs -text
34
+ *.zst filter=lfs diff=lfs merge=lfs -text
35
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
config.json ADDED
@@ -0,0 +1,99 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_type": "blt",
3
+ "vocab_size": 260,
4
+ "max_position_embeddings": 4096,
5
+ "patch_in_forward": true,
6
+ "realtime_patching": true,
7
+ "patching_mode": "entropy",
8
+ "patch_size": 4,
9
+ "patching_threshold": 1.335442066192627,
10
+ "patching_threshold_add": null,
11
+ "max_patch_length": null,
12
+ "patching_batch_size": 1,
13
+ "patching_device": "cuda",
14
+ "monotonicity": false,
15
+ "cross_attn_k": 2,
16
+ "encoder_hash_byte_group_size": [
17
+ 3,
18
+ 4,
19
+ 5,
20
+ 6,
21
+ 7,
22
+ 8
23
+ ],
24
+ "encoder_hash_byte_group_vocab": 500002,
25
+ "encoder_hash_byte_group_nb_functions": 1,
26
+ "pm_size": 0,
27
+ "patcher_config": {
28
+ "vocab_size": 260,
29
+ "hidden_size": 768,
30
+ "num_hidden_layers": 14,
31
+ "num_attention_heads": 12,
32
+ "num_key_value_heads": null,
33
+ "max_position_embeddings": 8192,
34
+ "norm_eps": 1e-05,
35
+ "dropout": 0.0,
36
+ "rope_theta": 10000.0,
37
+ "attn_impl": "xformers",
38
+ "attn_bias_type": "local_block_causal",
39
+ "intermediate_size": 2048
40
+ },
41
+ "encoder_config": {
42
+ "vocab_size": 260,
43
+ "cross_attn_all_layers": false,
44
+ "cross_attn_k": 2,
45
+ "hidden_size_global": 2048,
46
+ "pm_size": 0,
47
+ "hidden_size": 1024,
48
+ "num_attention_heads": 16,
49
+ "num_key_value_heads": null,
50
+ "num_hidden_layers": 1,
51
+ "norm_eps": 1e-05,
52
+ "dropout": 0.0,
53
+ "max_position_embeddings": 24576,
54
+ "rope_theta": 500000.0,
55
+ "rope_scaling": {
56
+ "rope_type": "default"
57
+ },
58
+ "hidden_act": "silu",
59
+ "_attn_implementation": "sdpa",
60
+ "intermediate_size": 2816
61
+ },
62
+ "decoder_config": {
63
+ "vocab_size": 260,
64
+ "cross_attn_all_layers": true,
65
+ "cross_attn_k": 2,
66
+ "hidden_size_global": 2048,
67
+ "hidden_size": 1024,
68
+ "num_attention_heads": 16,
69
+ "num_key_value_heads": null,
70
+ "num_hidden_layers": 9,
71
+ "norm_eps": 1e-05,
72
+ "dropout": 0.0,
73
+ "max_position_embeddings": 24576,
74
+ "rope_theta": 500000.0,
75
+ "rope_scaling": {
76
+ "rope_type": "default"
77
+ },
78
+ "hidden_act": "silu",
79
+ "_attn_implementation": "sdpa",
80
+ "intermediate_size": 2816
81
+ },
82
+ "global_config": {
83
+ "hidden_size": 2048,
84
+ "num_attention_heads": 16,
85
+ "num_key_value_heads": null,
86
+ "num_hidden_layers": 25,
87
+ "norm_eps": 1e-05,
88
+ "dropout": 0.0,
89
+ "max_position_embeddings": 4096,
90
+ "rope_theta": 500000.0,
91
+ "rope_scaling": {
92
+ "rope_type": "default"
93
+ },
94
+ "hidden_act": "silu",
95
+ "_attn_implementation": "sdpa",
96
+ "intermediate_size": 5632
97
+ },
98
+ "tie_word_embeddings": false
99
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b42ece52607eacbb4e538c695b137a53d38ea68dcc4a03dd825a9656f476162d
3
+ size 9266850624
tokenizer_config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "tokenizer_class": "BLTTokenizer",
3
+ "vocab_size": 260,
4
+ "model_max_length": 1024,
5
+ "add_bos_token": true,
6
+ "add_eos_token": false,
7
+ "bos_token": "<s>",
8
+ "eos_token": "</s>",
9
+ "pad_token": "<pad>",
10
+ "unk_token": "<unk>"
11
+ }