Update config.json

#1
by win10 - opened
Files changed (1) hide show
  1. config.json +6 -0
config.json CHANGED
@@ -3,6 +3,12 @@
3
  "architectures": [
4
  "Qwen2ForCausalLM"
5
  ],
 
 
 
 
 
 
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 151643,
8
  "eos_token_id": 151643,
 
3
  "architectures": [
4
  "Qwen2ForCausalLM"
5
  ],
6
+ "dual_chunk_attention_config": {
7
+ "chunk_size": 262144,
8
+ "local_size": 8192,
9
+ "original_max_position_embeddings": 262144
10
+ },
11
+ }
12
  "attention_dropout": 0.0,
13
  "bos_token_id": 151643,
14
  "eos_token_id": 151643,