hazentr commited on
Commit
e1bed8a
·
verified ·
1 Parent(s): 5129157

rl-swarm: round 22190, agent quick_timid_frog

Browse files
Files changed (3) hide show
  1. config.json +2 -28
  2. generation_config.json +1 -1
  3. model.safetensors +1 -1
config.json CHANGED
@@ -9,32 +9,6 @@
9
  "hidden_size": 896,
10
  "initializer_range": 0.02,
11
  "intermediate_size": 4864,
12
- "layer_types": [
13
- "full_attention",
14
- "full_attention",
15
- "full_attention",
16
- "full_attention",
17
- "full_attention",
18
- "full_attention",
19
- "full_attention",
20
- "full_attention",
21
- "full_attention",
22
- "full_attention",
23
- "full_attention",
24
- "full_attention",
25
- "full_attention",
26
- "full_attention",
27
- "full_attention",
28
- "full_attention",
29
- "full_attention",
30
- "full_attention",
31
- "full_attention",
32
- "full_attention",
33
- "full_attention",
34
- "full_attention",
35
- "full_attention",
36
- "full_attention"
37
- ],
38
  "max_position_embeddings": 32768,
39
  "max_window_layers": 21,
40
  "model_type": "qwen2",
@@ -44,10 +18,10 @@
44
  "rms_norm_eps": 1e-06,
45
  "rope_scaling": null,
46
  "rope_theta": 1000000.0,
47
- "sliding_window": null,
48
  "tie_word_embeddings": true,
49
  "torch_dtype": "float32",
50
- "transformers_version": "4.55.0",
51
  "use_cache": true,
52
  "use_sliding_window": false,
53
  "vocab_size": 151936
 
9
  "hidden_size": 896,
10
  "initializer_range": 0.02,
11
  "intermediate_size": 4864,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
12
  "max_position_embeddings": 32768,
13
  "max_window_layers": 21,
14
  "model_type": "qwen2",
 
18
  "rms_norm_eps": 1e-06,
19
  "rope_scaling": null,
20
  "rope_theta": 1000000.0,
21
+ "sliding_window": 32768,
22
  "tie_word_embeddings": true,
23
  "torch_dtype": "float32",
24
+ "transformers_version": "4.51.3",
25
  "use_cache": true,
26
  "use_sliding_window": false,
27
  "vocab_size": 151936
generation_config.json CHANGED
@@ -10,5 +10,5 @@
10
  "temperature": 0.7,
11
  "top_k": 20,
12
  "top_p": 0.8,
13
- "transformers_version": "4.55.0"
14
  }
 
10
  "temperature": 0.7,
11
  "top_k": 20,
12
  "top_p": 0.8,
13
+ "transformers_version": "4.51.3"
14
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3f9a724c21e3e612415e61c20f5cf633883b27b239c6d893f3e5967ceaf379ba
3
  size 1976163472
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ff208332ee07c8e0538cc753e9b4a0ee6c42b191f878aa21bae2e667fa9c725b
3
  size 1976163472