Update patch_comfyui_nunchaku_lora.py
Browse filesEnhanced Functionality - Replaced Manual Path Input with Dialogue Box.
Now you can select the LoRA file via Dialogue Box as an Input and select a Folder as the Output Folder.
No changes where made to the rest of the functionality.
- patch_comfyui_nunchaku_lora.py +128 -116
patch_comfyui_nunchaku_lora.py
CHANGED
@@ -1,116 +1,128 @@
|
|
1 |
-
import safetensors.torch
|
2 |
-
from safetensors import safe_open
|
3 |
-
import torch
|
4 |
-
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
""
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
if
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
has_linear
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
-
|
83 |
-
|
84 |
-
|
85 |
-
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
|
90 |
-
|
91 |
-
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
-
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
|
112 |
-
|
113 |
-
|
114 |
-
|
115 |
-
|
116 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import safetensors.torch
|
2 |
+
from safetensors import safe_open
|
3 |
+
import torch
|
4 |
+
import os
|
5 |
+
import tkinter as tk
|
6 |
+
from tkinter import filedialog
|
7 |
+
|
8 |
+
def patch_final_layer_adaLN(state_dict, prefix="lora_unet_final_layer", verbose=True):
|
9 |
+
final_layer_linear_down = None
|
10 |
+
final_layer_linear_up = None
|
11 |
+
|
12 |
+
adaLN_down_key = f"{prefix}_adaLN_modulation_1.lora_down.weight"
|
13 |
+
adaLN_up_key = f"{prefix}_adaLN_modulation_1.lora_up.weight"
|
14 |
+
linear_down_key = f"{prefix}_linear.lora_down.weight"
|
15 |
+
linear_up_key = f"{prefix}_linear.lora_up.weight"
|
16 |
+
|
17 |
+
if verbose:
|
18 |
+
print(f"\nπ Checking for final_layer keys with prefix: '{prefix}'")
|
19 |
+
print(f" Linear down: {linear_down_key}")
|
20 |
+
print(f" Linear up: {linear_up_key}")
|
21 |
+
|
22 |
+
if linear_down_key in state_dict:
|
23 |
+
final_layer_linear_down = state_dict[linear_down_key]
|
24 |
+
if linear_up_key in state_dict:
|
25 |
+
final_layer_linear_up = state_dict[linear_up_key]
|
26 |
+
|
27 |
+
has_adaLN = adaLN_down_key in state_dict and adaLN_up_key in state_dict
|
28 |
+
has_linear = final_layer_linear_down is not None and final_layer_linear_up is not None
|
29 |
+
|
30 |
+
if verbose:
|
31 |
+
print(f" β
Has final_layer.linear: {has_linear}")
|
32 |
+
print(f" β
Has final_layer.adaLN_modulation_1: {has_adaLN}")
|
33 |
+
|
34 |
+
if has_linear and not has_adaLN:
|
35 |
+
dummy_down = torch.zeros_like(final_layer_linear_down)
|
36 |
+
dummy_up = torch.zeros_like(final_layer_linear_up)
|
37 |
+
state_dict[adaLN_down_key] = dummy_down
|
38 |
+
state_dict[adaLN_up_key] = dummy_up
|
39 |
+
|
40 |
+
if verbose:
|
41 |
+
print(f"β
Added dummy adaLN weights:")
|
42 |
+
print(f" {adaLN_down_key} (shape: {dummy_down.shape})")
|
43 |
+
print(f" {adaLN_up_key} (shape: {dummy_up.shape})")
|
44 |
+
else:
|
45 |
+
if verbose:
|
46 |
+
print("β
No patch needed β adaLN weights already present or no final_layer.linear found.")
|
47 |
+
|
48 |
+
return state_dict
|
49 |
+
|
50 |
+
def main():
|
51 |
+
print("π Universal final_layer.adaLN LoRA patcher (.safetensors)")
|
52 |
+
|
53 |
+
# GUI for file/folder selection
|
54 |
+
root = tk.Tk()
|
55 |
+
root.withdraw()
|
56 |
+
|
57 |
+
input_path = filedialog.askopenfilename(
|
58 |
+
title="Select LoRA .safetensors file",
|
59 |
+
filetypes=[("Safetensors files", "*.safetensors")]
|
60 |
+
)
|
61 |
+
if not input_path:
|
62 |
+
print("β No file selected. Exiting.")
|
63 |
+
return
|
64 |
+
|
65 |
+
output_dir = filedialog.askdirectory(
|
66 |
+
title="Select folder to save patched file"
|
67 |
+
)
|
68 |
+
if not output_dir:
|
69 |
+
print("β No folder selected. Exiting.")
|
70 |
+
return
|
71 |
+
|
72 |
+
# Generate output filename
|
73 |
+
base_name = os.path.basename(input_path)
|
74 |
+
name, ext = os.path.splitext(base_name)
|
75 |
+
output_filename = f"{name}-Patched{ext}"
|
76 |
+
output_path = os.path.join(output_dir, output_filename)
|
77 |
+
|
78 |
+
# Load
|
79 |
+
state_dict = {}
|
80 |
+
with safe_open(input_path, framework="pt", device="cpu") as f:
|
81 |
+
for k in f.keys():
|
82 |
+
state_dict[k] = f.get_tensor(k)
|
83 |
+
|
84 |
+
print(f"\nβ
Loaded {len(state_dict)} tensors from: {input_path}")
|
85 |
+
|
86 |
+
final_keys = [k for k in state_dict if "final_layer" in k]
|
87 |
+
if final_keys:
|
88 |
+
print("\nπ Found these final_layer-related keys:")
|
89 |
+
for k in final_keys:
|
90 |
+
print(f" {k}")
|
91 |
+
else:
|
92 |
+
print("\nβ οΈ No keys with 'final_layer' found β will try patch anyway.")
|
93 |
+
|
94 |
+
prefixes = [
|
95 |
+
"lora_unet_final_layer",
|
96 |
+
"final_layer",
|
97 |
+
"base_model.model.final_layer"
|
98 |
+
]
|
99 |
+
patched = False
|
100 |
+
|
101 |
+
for prefix in prefixes:
|
102 |
+
before = len(state_dict)
|
103 |
+
state_dict = patch_final_layer_adaLN(state_dict, prefix=prefix)
|
104 |
+
after = len(state_dict)
|
105 |
+
if after > before:
|
106 |
+
patched = True
|
107 |
+
break
|
108 |
+
|
109 |
+
if not patched:
|
110 |
+
print("\nβΉοΈ No patch applied β either adaLN already exists or no final_layer.linear found.")
|
111 |
+
|
112 |
+
# Save
|
113 |
+
safetensors.torch.save_file(state_dict, output_path)
|
114 |
+
print(f"\nβ
Patched file saved to: {output_path}")
|
115 |
+
print(f" Total tensors now: {len(state_dict)}")
|
116 |
+
|
117 |
+
# Verify
|
118 |
+
print("\nπ Verifying patched keys:")
|
119 |
+
with safe_open(output_path, framework="pt", device="cpu") as f:
|
120 |
+
keys = list(f.keys())
|
121 |
+
for k in keys:
|
122 |
+
if "final_layer" in k:
|
123 |
+
print(f" {k}")
|
124 |
+
has_adaLN_after = any("adaLN_modulation_1" in k for k in keys)
|
125 |
+
print(f"β
Contains adaLN after patch: {has_adaLN_after}")
|
126 |
+
|
127 |
+
if __name__ == "__main__":
|
128 |
+
main()
|