sharpenb commited on
Commit
c1c2f58
·
verified ·
1 Parent(s): 63da174

Upload folder using huggingface_hub (#4)

Browse files

- 0c8b412fc928c3375e76834c1690c6045e21653d8718da8249132155cef45942 (a0fe6a4ad12a09b0a4dde002594b6d8e8623332c)

Files changed (2) hide show
  1. config.json +1 -1
  2. smash_config.json +1 -1
config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "_attn_implementation_autoset": true,
3
- "_name_or_path": "/tmp/models/tmp917ab4f6/tmpkwfxbcpn",
4
  "architectures": [
5
  "Qwen2ForCausalLM"
6
  ],
 
1
  {
2
  "_attn_implementation_autoset": true,
3
+ "_name_or_path": "/tmp/models/tmprochl0dh/tmpwqjwvf3f",
4
  "architectures": [
5
  "Qwen2ForCausalLM"
6
  ],
smash_config.json CHANGED
@@ -11,7 +11,7 @@
11
  "quant_hqq_weight_bits": 8,
12
  "max_batch_size": 1,
13
  "device": "cuda",
14
- "cache_dir": "/tmp/models/tmp917ab4f6",
15
  "task": "",
16
  "save_load_fn": "hqq",
17
  "save_load_fn_args": {},
 
11
  "quant_hqq_weight_bits": 8,
12
  "max_batch_size": 1,
13
  "device": "cuda",
14
+ "cache_dir": "/tmp/models/tmprochl0dh",
15
  "task": "",
16
  "save_load_fn": "hqq",
17
  "save_load_fn_args": {},