Upload Qwen3ForCausalLM
Browse files- config.json +8 -4
- pytorch_model-00001-of-00002.bin +1 -1
- pytorch_model-00002-of-00002.bin +1 -1
config.json
CHANGED
@@ -65,6 +65,14 @@
|
|
65 |
"base_config": {
|
66 |
"_data": {
|
67 |
"group_size": 128,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
68 |
"layout": {
|
69 |
"_data": {
|
70 |
"inner_k_tiles": 8
|
@@ -72,10 +80,6 @@
|
|
72 |
"_type": "TensorCoreTiledLayout",
|
73 |
"_version": 1
|
74 |
},
|
75 |
-
"packing_format": {
|
76 |
-
"_data": "PLAIN",
|
77 |
-
"_type": "PackingFormat"
|
78 |
-
},
|
79 |
"preserve_zero": null,
|
80 |
"set_inductor_config": true,
|
81 |
"use_hqq": false,
|
|
|
65 |
"base_config": {
|
66 |
"_data": {
|
67 |
"group_size": 128,
|
68 |
+
"int4_choose_qparams_algorithm": {
|
69 |
+
"_data": "TINYGEMM",
|
70 |
+
"_type": "Int4ChooseQParamsAlgorithm"
|
71 |
+
},
|
72 |
+
"int4_packing_format": {
|
73 |
+
"_data": "PLAIN",
|
74 |
+
"_type": "Int4PackingFormat"
|
75 |
+
},
|
76 |
"layout": {
|
77 |
"_data": {
|
78 |
"inner_k_tiles": 8
|
|
|
80 |
"_type": "TensorCoreTiledLayout",
|
81 |
"_version": 1
|
82 |
},
|
|
|
|
|
|
|
|
|
83 |
"preserve_zero": null,
|
84 |
"set_inductor_config": true,
|
85 |
"use_hqq": false,
|
pytorch_model-00001-of-00002.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4938272073
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:20da9306eb208967f598cf115a672b36d68ca5853d8ee0c2fc6904b566d1732e
|
3 |
size 4938272073
|
pytorch_model-00002-of-00002.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 330623983
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:760a161b40a3e47adedecad1feeac1c8b6b24b1f3f1b23a58e5b785b44df736e
|
3 |
size 330623983
|