cicdatopea commited on
Commit
a8f85fe
·
verified ·
1 Parent(s): 9620120

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +3 -4
config.json CHANGED
@@ -25,7 +25,7 @@
25
  "pretraining_tp": 1,
26
  "quantization_config": {
27
  "amp": true,
28
- "autoround_version": "0.4.1",
29
  "batch_size": 8,
30
  "bits": 4,
31
  "data_type": "int",
@@ -43,15 +43,14 @@
43
  "quant_method": "auto-round",
44
  "scale_dtype": "torch.float16",
45
  "seqlen": 2048,
46
- "sym": true,
47
- "to_quant_block_names": "model.layers"
48
  },
49
  "rms_norm_eps": 1e-05,
50
  "rope_scaling": null,
51
  "rope_theta": 500000.0,
52
  "tie_word_embeddings": false,
53
  "torch_dtype": "float16",
54
- "transformers_version": "4.46.1",
55
  "use_cache": true,
56
  "vocab_size": 128256
57
  }
 
25
  "pretraining_tp": 1,
26
  "quantization_config": {
27
  "amp": true,
28
+ "autoround_version": "0.5.1",
29
  "batch_size": 8,
30
  "bits": 4,
31
  "data_type": "int",
 
43
  "quant_method": "auto-round",
44
  "scale_dtype": "torch.float16",
45
  "seqlen": 2048,
46
+ "sym": true
 
47
  },
48
  "rms_norm_eps": 1e-05,
49
  "rope_scaling": null,
50
  "rope_theta": 500000.0,
51
  "tie_word_embeddings": false,
52
  "torch_dtype": "float16",
53
+ "transformers_version": "4.52.2",
54
  "use_cache": true,
55
  "vocab_size": 128256
56
  }