Historya commited on
Commit
28b70b9
·
verified ·
1 Parent(s): 70fffc7

rl-swarm: round 32182, agent territorial_mangy_ox

Browse files
Files changed (3) hide show
  1. config.json +3 -29
  2. generation_config.json +1 -1
  3. model.safetensors +1 -1
config.json CHANGED
@@ -4,38 +4,11 @@
4
  ],
5
  "attention_dropout": 0.0,
6
  "bos_token_id": 151643,
7
- "dtype": "float32",
8
  "eos_token_id": 151645,
9
  "hidden_act": "silu",
10
  "hidden_size": 896,
11
  "initializer_range": 0.02,
12
  "intermediate_size": 4864,
13
- "layer_types": [
14
- "full_attention",
15
- "full_attention",
16
- "full_attention",
17
- "full_attention",
18
- "full_attention",
19
- "full_attention",
20
- "full_attention",
21
- "full_attention",
22
- "full_attention",
23
- "full_attention",
24
- "full_attention",
25
- "full_attention",
26
- "full_attention",
27
- "full_attention",
28
- "full_attention",
29
- "full_attention",
30
- "full_attention",
31
- "full_attention",
32
- "full_attention",
33
- "full_attention",
34
- "full_attention",
35
- "full_attention",
36
- "full_attention",
37
- "full_attention"
38
- ],
39
  "max_position_embeddings": 32768,
40
  "max_window_layers": 21,
41
  "model_type": "qwen2",
@@ -45,9 +18,10 @@
45
  "rms_norm_eps": 1e-06,
46
  "rope_scaling": null,
47
  "rope_theta": 1000000.0,
48
- "sliding_window": null,
49
  "tie_word_embeddings": true,
50
- "transformers_version": "4.56.2",
 
51
  "use_cache": true,
52
  "use_sliding_window": false,
53
  "vocab_size": 151936
 
4
  ],
5
  "attention_dropout": 0.0,
6
  "bos_token_id": 151643,
 
7
  "eos_token_id": 151645,
8
  "hidden_act": "silu",
9
  "hidden_size": 896,
10
  "initializer_range": 0.02,
11
  "intermediate_size": 4864,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
12
  "max_position_embeddings": 32768,
13
  "max_window_layers": 21,
14
  "model_type": "qwen2",
 
18
  "rms_norm_eps": 1e-06,
19
  "rope_scaling": null,
20
  "rope_theta": 1000000.0,
21
+ "sliding_window": 32768,
22
  "tie_word_embeddings": true,
23
+ "torch_dtype": "float32",
24
+ "transformers_version": "4.51.3",
25
  "use_cache": true,
26
  "use_sliding_window": false,
27
  "vocab_size": 151936
generation_config.json CHANGED
@@ -10,5 +10,5 @@
10
  "temperature": 0.7,
11
  "top_k": 20,
12
  "top_p": 0.8,
13
- "transformers_version": "4.56.2"
14
  }
 
10
  "temperature": 0.7,
11
  "top_k": 20,
12
  "top_p": 0.8,
13
+ "transformers_version": "4.51.3"
14
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:40197c47152446b50ab03599dab26ea4aaba2966736d8a82a27fc3b7f0eef72a
3
  size 1976163472
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ff208332ee07c8e0538cc753e9b4a0ee6c42b191f878aa21bae2e667fa9c725b
3
  size 1976163472