akoumpa commited on
Commit
a75880f
·
verified ·
1 Parent(s): c2829f2

Upload MixtralForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +3 -3
  2. model.safetensors +2 -2
config.json CHANGED
@@ -6,11 +6,11 @@
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 1,
8
  "eos_token_id": 2,
9
- "head_dim": 32,
10
  "hidden_act": "silu",
11
- "hidden_size": 1024,
12
  "initializer_range": 0.02,
13
- "intermediate_size": 3584,
14
  "max_position_embeddings": 32768,
15
  "model_type": "mixtral",
16
  "num_attention_heads": 32,
 
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 1,
8
  "eos_token_id": 2,
9
+ "head_dim": 16,
10
  "hidden_act": "silu",
11
+ "hidden_size": 512,
12
  "initializer_range": 0.02,
13
+ "intermediate_size": 448,
14
  "max_position_embeddings": 32768,
15
  "model_type": "mixtral",
16
  "num_attention_heads": 32,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:12025417f48d555bb4de75adeff3de5af83c05110e69c17040ec4c7b705b098a
3
- size 987852608
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f68ef52176f9fe663847d2ea5eeaab7cc0bce11ed49114672097a473da8975f5
3
+ size 180405960