svjack commited on
Commit
dac99bd
·
verified ·
1 Parent(s): bf6edf2

Upload qwen_image_instantx_inpainting_controlnet.json

Browse files
qwen_image_instantx_inpainting_controlnet.json ADDED
@@ -0,0 +1,1215 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "id": "91f6bbe2-ed41-4fd6-bac7-71d5b5864ecb",
3
+ "revision": 0,
4
+ "last_node_id": 118,
5
+ "last_link_id": 202,
6
+ "nodes": [
7
+ {
8
+ "id": 38,
9
+ "type": "CLIPLoader",
10
+ "pos": [
11
+ -120,
12
+ 130
13
+ ],
14
+ "size": [
15
+ 380,
16
+ 106
17
+ ],
18
+ "flags": {},
19
+ "order": 0,
20
+ "mode": 0,
21
+ "inputs": [],
22
+ "outputs": [
23
+ {
24
+ "name": "CLIP",
25
+ "type": "CLIP",
26
+ "slot_index": 0,
27
+ "links": [
28
+ 74,
29
+ 75
30
+ ]
31
+ }
32
+ ],
33
+ "properties": {
34
+ "cnr_id": "comfy-core",
35
+ "ver": "0.3.51",
36
+ "Node name for S&R": "CLIPLoader",
37
+ "models": [
38
+ {
39
+ "name": "qwen_2.5_vl_7b_fp8_scaled.safetensors",
40
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors",
41
+ "directory": "text_encoders"
42
+ }
43
+ ]
44
+ },
45
+ "widgets_values": [
46
+ "qwen_2.5_vl_7b_fp8_scaled.safetensors",
47
+ "qwen_image",
48
+ "default"
49
+ ]
50
+ },
51
+ {
52
+ "id": 37,
53
+ "type": "UNETLoader",
54
+ "pos": [
55
+ -120,
56
+ 0
57
+ ],
58
+ "size": [
59
+ 380,
60
+ 82
61
+ ],
62
+ "flags": {},
63
+ "order": 1,
64
+ "mode": 0,
65
+ "inputs": [],
66
+ "outputs": [
67
+ {
68
+ "name": "MODEL",
69
+ "type": "MODEL",
70
+ "slot_index": 0,
71
+ "links": [
72
+ 145
73
+ ]
74
+ }
75
+ ],
76
+ "properties": {
77
+ "cnr_id": "comfy-core",
78
+ "ver": "0.3.51",
79
+ "Node name for S&R": "UNETLoader",
80
+ "models": [
81
+ {
82
+ "name": "qwen_image_fp8_e4m3fn.safetensors",
83
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors",
84
+ "directory": "diffusion_models"
85
+ }
86
+ ]
87
+ },
88
+ "widgets_values": [
89
+ "qwen_image_fp8_e4m3fn.safetensors",
90
+ "default"
91
+ ]
92
+ },
93
+ {
94
+ "id": 8,
95
+ "type": "VAEDecode",
96
+ "pos": [
97
+ 847.4144287109375,
98
+ 560.3872680664062
99
+ ],
100
+ "size": [
101
+ 310,
102
+ 46
103
+ ],
104
+ "flags": {},
105
+ "order": 22,
106
+ "mode": 0,
107
+ "inputs": [
108
+ {
109
+ "name": "samples",
110
+ "type": "LATENT",
111
+ "link": 128
112
+ },
113
+ {
114
+ "name": "vae",
115
+ "type": "VAE",
116
+ "link": 76
117
+ }
118
+ ],
119
+ "outputs": [
120
+ {
121
+ "name": "IMAGE",
122
+ "type": "IMAGE",
123
+ "slot_index": 0,
124
+ "links": [
125
+ 110
126
+ ]
127
+ }
128
+ ],
129
+ "properties": {
130
+ "cnr_id": "comfy-core",
131
+ "ver": "0.3.51",
132
+ "Node name for S&R": "VAEDecode"
133
+ },
134
+ "widgets_values": []
135
+ },
136
+ {
137
+ "id": 86,
138
+ "type": "Note",
139
+ "pos": [
140
+ 847.4144287109375,
141
+ 660.3873901367188
142
+ ],
143
+ "size": [
144
+ 307.4002380371094,
145
+ 127.38092803955078
146
+ ],
147
+ "flags": {},
148
+ "order": 2,
149
+ "mode": 0,
150
+ "inputs": [],
151
+ "outputs": [],
152
+ "properties": {},
153
+ "widgets_values": [
154
+ "Set cfg to 1.0 for a speed boost at the cost of consistency. Samplers like res_multistep work pretty well at cfg 1.0\n\nThe official number of steps is 50 but I think that's too much. Even just 10 steps seems to work."
155
+ ],
156
+ "color": "#432",
157
+ "bgcolor": "#653"
158
+ },
159
+ {
160
+ "id": 84,
161
+ "type": "ControlNetLoader",
162
+ "pos": [
163
+ -120,
164
+ 400
165
+ ],
166
+ "size": [
167
+ 380,
168
+ 58
169
+ ],
170
+ "flags": {},
171
+ "order": 3,
172
+ "mode": 0,
173
+ "inputs": [],
174
+ "outputs": [
175
+ {
176
+ "name": "CONTROL_NET",
177
+ "type": "CONTROL_NET",
178
+ "links": [
179
+ 192
180
+ ]
181
+ }
182
+ ],
183
+ "properties": {
184
+ "cnr_id": "comfy-core",
185
+ "ver": "0.3.51",
186
+ "Node name for S&R": "ControlNetLoader",
187
+ "models": [
188
+ {
189
+ "name": "Qwen-Image-InstantX-ControlNet-Inpainting.safetensors",
190
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image-InstantX-ControlNets/resolve/main/split_files/controlnet/Qwen-Image-InstantX-ControlNet-Inpainting.safetensors",
191
+ "directory": "controlnet"
192
+ }
193
+ ]
194
+ },
195
+ "widgets_values": [
196
+ "Qwen-Image-InstantX-ControlNet-Inpainting.safetensors"
197
+ ]
198
+ },
199
+ {
200
+ "id": 39,
201
+ "type": "VAELoader",
202
+ "pos": [
203
+ -120,
204
+ 290
205
+ ],
206
+ "size": [
207
+ 380,
208
+ 58
209
+ ],
210
+ "flags": {},
211
+ "order": 4,
212
+ "mode": 0,
213
+ "inputs": [],
214
+ "outputs": [
215
+ {
216
+ "name": "VAE",
217
+ "type": "VAE",
218
+ "slot_index": 0,
219
+ "links": [
220
+ 76,
221
+ 144,
222
+ 193
223
+ ]
224
+ }
225
+ ],
226
+ "properties": {
227
+ "cnr_id": "comfy-core",
228
+ "ver": "0.3.51",
229
+ "Node name for S&R": "VAELoader",
230
+ "models": [
231
+ {
232
+ "name": "qwen_image_vae.safetensors",
233
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors",
234
+ "directory": "vae"
235
+ }
236
+ ]
237
+ },
238
+ "widgets_values": [
239
+ "qwen_image_vae.safetensors"
240
+ ]
241
+ },
242
+ {
243
+ "id": 110,
244
+ "type": "VAEEncode",
245
+ "pos": [
246
+ 324.080078125,
247
+ 455
248
+ ],
249
+ "size": [
250
+ 140,
251
+ 46
252
+ ],
253
+ "flags": {},
254
+ "order": 14,
255
+ "mode": 0,
256
+ "inputs": [
257
+ {
258
+ "name": "pixels",
259
+ "type": "IMAGE",
260
+ "link": 197
261
+ },
262
+ {
263
+ "name": "vae",
264
+ "type": "VAE",
265
+ "link": null
266
+ }
267
+ ],
268
+ "outputs": [
269
+ {
270
+ "name": "LATENT",
271
+ "type": "LATENT",
272
+ "links": []
273
+ }
274
+ ],
275
+ "properties": {
276
+ "cnr_id": "comfy-core",
277
+ "ver": "0.3.59",
278
+ "Node name for S&R": "VAEEncode"
279
+ },
280
+ "widgets_values": []
281
+ },
282
+ {
283
+ "id": 66,
284
+ "type": "ModelSamplingAuraFlow",
285
+ "pos": [
286
+ 822.5421752929688,
287
+ -38.42329025268555
288
+ ],
289
+ "size": [
290
+ 310,
291
+ 58
292
+ ],
293
+ "flags": {},
294
+ "order": 16,
295
+ "mode": 0,
296
+ "inputs": [
297
+ {
298
+ "name": "model",
299
+ "type": "MODEL",
300
+ "link": 149
301
+ }
302
+ ],
303
+ "outputs": [
304
+ {
305
+ "name": "MODEL",
306
+ "type": "MODEL",
307
+ "links": [
308
+ 156
309
+ ]
310
+ }
311
+ ],
312
+ "properties": {
313
+ "cnr_id": "comfy-core",
314
+ "ver": "0.3.51",
315
+ "Node name for S&R": "ModelSamplingAuraFlow"
316
+ },
317
+ "widgets_values": [
318
+ 3.1000000000000005
319
+ ]
320
+ },
321
+ {
322
+ "id": 108,
323
+ "type": "ControlNetInpaintingAliMamaApply",
324
+ "pos": [
325
+ 391.0537109375,
326
+ 626.009521484375
327
+ ],
328
+ "size": [
329
+ 317.0093688964844,
330
+ 206
331
+ ],
332
+ "flags": {},
333
+ "order": 20,
334
+ "mode": 0,
335
+ "inputs": [
336
+ {
337
+ "name": "positive",
338
+ "type": "CONDITIONING",
339
+ "link": 190
340
+ },
341
+ {
342
+ "name": "negative",
343
+ "type": "CONDITIONING",
344
+ "link": 191
345
+ },
346
+ {
347
+ "name": "control_net",
348
+ "type": "CONTROL_NET",
349
+ "link": 192
350
+ },
351
+ {
352
+ "name": "vae",
353
+ "type": "VAE",
354
+ "link": 193
355
+ },
356
+ {
357
+ "name": "image",
358
+ "type": "IMAGE",
359
+ "link": 194
360
+ },
361
+ {
362
+ "name": "mask",
363
+ "type": "MASK",
364
+ "link": 195
365
+ }
366
+ ],
367
+ "outputs": [
368
+ {
369
+ "name": "positive",
370
+ "type": "CONDITIONING",
371
+ "links": [
372
+ 188
373
+ ]
374
+ },
375
+ {
376
+ "name": "negative",
377
+ "type": "CONDITIONING",
378
+ "links": [
379
+ 189
380
+ ]
381
+ }
382
+ ],
383
+ "properties": {
384
+ "cnr_id": "comfy-core",
385
+ "ver": "0.3.59",
386
+ "Node name for S&R": "ControlNetInpaintingAliMamaApply"
387
+ },
388
+ "widgets_values": [
389
+ 1,
390
+ 0,
391
+ 1
392
+ ]
393
+ },
394
+ {
395
+ "id": 76,
396
+ "type": "VAEEncode",
397
+ "pos": [
398
+ 557.8602294921875,
399
+ 905.5271606445312
400
+ ],
401
+ "size": [
402
+ 140,
403
+ 46
404
+ ],
405
+ "flags": {
406
+ "collapsed": false
407
+ },
408
+ "order": 19,
409
+ "mode": 0,
410
+ "inputs": [
411
+ {
412
+ "name": "pixels",
413
+ "type": "IMAGE",
414
+ "link": 143
415
+ },
416
+ {
417
+ "name": "vae",
418
+ "type": "VAE",
419
+ "link": 144
420
+ }
421
+ ],
422
+ "outputs": [
423
+ {
424
+ "name": "LATENT",
425
+ "type": "LATENT",
426
+ "links": [
427
+ 142
428
+ ]
429
+ }
430
+ ],
431
+ "properties": {
432
+ "cnr_id": "comfy-core",
433
+ "ver": "0.3.51",
434
+ "Node name for S&R": "VAEEncode"
435
+ },
436
+ "widgets_values": []
437
+ },
438
+ {
439
+ "id": 78,
440
+ "type": "MarkdownNote",
441
+ "pos": [
442
+ -690,
443
+ -50
444
+ ],
445
+ "size": [
446
+ 541.36865234375,
447
+ 579.70263671875
448
+ ],
449
+ "flags": {},
450
+ "order": 5,
451
+ "mode": 0,
452
+ "inputs": [],
453
+ "outputs": [],
454
+ "title": "Model links",
455
+ "properties": {
456
+ "widget_ue_connectable": {}
457
+ },
458
+ "widgets_values": [
459
+ "[Tutorial](https://docs.comfy.org/tutorials/image/qwen/qwen-image) | [教程](https://docs.comfy.org/zh-CN/tutorials/image/qwen/qwen-image)\n\n\n## Model links\n\nYou can find all the models on [Huggingface](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/tree/main) or [Modelscope](https://modelscope.cn/models/Comfy-Org/Qwen-Image_ComfyUI/files)\n\n**Diffusion model**\n\n- [qwen_image_fp8_e4m3fn.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors)\n\n**ControlNet**\n\n- [Qwen-Image-InstantX-ControlNet-Inpainting.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image-InstantX-ControlNets/resolve/main/split_files/controlnet/Qwen-Image-InstantX-ControlNet-Inpainting.safetensors)\n\n\n**LoRA**\n\n- [Qwen-Image-Lightning-4steps-V1.0.safetensors](https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V1.0.safetensors)\n\n**Text encoder**\n\n- [qwen_2.5_vl_7b_fp8_scaled.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors)\n\n**VAE**\n\n- [qwen_image_vae.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors)\n\n\nModel Storage Location\n\n```\n📂 ComfyUI/\n├── 📂 models/\n│ ├── 📂 diffusion_models/\n│ │ ├── qwen_image_fp8_e4m3fn.safetensors\n│ │ └── qwen_image_distill_full_fp8_e4m3fn.safetensors\n│ ├── 📂 loras/\n│ │ └── Qwen-Image-Lightning-8steps-V1.0.safetensors\n│ ├── 📂 controlnet/ \n│ │ └── Qwen-Image-InstantX-ControlNet-Inpainting.safetensors\n│ ├── 📂 vae/\n│ │ └── qwen_image_vae.safetensors\n│ └── 📂 text_encoders/\n│ └── qwen_2.5_vl_7b_fp8_scaled.safetensors\n```\n"
460
+ ],
461
+ "color": "#432",
462
+ "bgcolor": "#653"
463
+ },
464
+ {
465
+ "id": 68,
466
+ "type": "Note",
467
+ "pos": [
468
+ 826.6304931640625,
469
+ -197.34292602539062
470
+ ],
471
+ "size": [
472
+ 310,
473
+ 90
474
+ ],
475
+ "flags": {},
476
+ "order": 6,
477
+ "mode": 0,
478
+ "inputs": [],
479
+ "outputs": [],
480
+ "properties": {},
481
+ "widgets_values": [
482
+ "Increase the shift if you get too many blury/dark/bad images. Decrease if you want to try increasing detail."
483
+ ],
484
+ "color": "#432",
485
+ "bgcolor": "#653"
486
+ },
487
+ {
488
+ "id": 79,
489
+ "type": "MarkdownNote",
490
+ "pos": [
491
+ 853.4926147460938,
492
+ 840.671875
493
+ ],
494
+ "size": [
495
+ 310,
496
+ 140
497
+ ],
498
+ "flags": {},
499
+ "order": 7,
500
+ "mode": 0,
501
+ "inputs": [],
502
+ "outputs": [],
503
+ "title": "KSampler settings",
504
+ "properties": {},
505
+ "widgets_values": [
506
+ "You can test and find the best setting by yourself. The following table is for reference.\n\n| model | steps | cfg |\n|---------------------|---------------|---------------|\n| fp8_e4m3fn | 20 | 2.5 |\n| fp8_e4m3fn + 4 Steps lightning LoRA | 4 | 1.0 |\n"
507
+ ],
508
+ "color": "#432",
509
+ "bgcolor": "#653"
510
+ },
511
+ {
512
+ "id": 75,
513
+ "type": "ImageScaleToTotalPixels",
514
+ "pos": [
515
+ 390,
516
+ 1050
517
+ ],
518
+ "size": [
519
+ 270,
520
+ 82
521
+ ],
522
+ "flags": {},
523
+ "order": 17,
524
+ "mode": 0,
525
+ "inputs": [
526
+ {
527
+ "name": "image",
528
+ "type": "IMAGE",
529
+ "link": 201
530
+ }
531
+ ],
532
+ "outputs": [
533
+ {
534
+ "name": "IMAGE",
535
+ "type": "IMAGE",
536
+ "links": [
537
+ 143,
538
+ 194
539
+ ]
540
+ }
541
+ ],
542
+ "properties": {
543
+ "cnr_id": "comfy-core",
544
+ "ver": "0.3.51",
545
+ "Node name for S&R": "ImageScaleToTotalPixels"
546
+ },
547
+ "widgets_values": [
548
+ "area",
549
+ 1.68
550
+ ]
551
+ },
552
+ {
553
+ "id": 115,
554
+ "type": "MarkdownNote",
555
+ "pos": [
556
+ -462.4257507324219,
557
+ 588.766845703125
558
+ ],
559
+ "size": [
560
+ 307.56927490234375,
561
+ 169.79689025878906
562
+ ],
563
+ "flags": {},
564
+ "order": 8,
565
+ "mode": 0,
566
+ "inputs": [],
567
+ "outputs": [],
568
+ "title": "About how to create mask",
569
+ "properties": {},
570
+ "widgets_values": [
571
+ "Right-click on the Load Image node, then click \"Open in MaskEditor\" to open it and paint the area you want to inpaint.\n\nYou can learn more about MaskEditor in the [MaskEditor Document](https://docs.comfy.org/interface/maskeditor)"
572
+ ],
573
+ "color": "#432",
574
+ "bgcolor": "#653"
575
+ },
576
+ {
577
+ "id": 117,
578
+ "type": "MarkdownNote",
579
+ "pos": [
580
+ -460,
581
+ 1040
582
+ ],
583
+ "size": [
584
+ 307.56927490234375,
585
+ 169.79689025878906
586
+ ],
587
+ "flags": {},
588
+ "order": 9,
589
+ "mode": 0,
590
+ "inputs": [],
591
+ "outputs": [],
592
+ "title": "About outpainting",
593
+ "properties": {},
594
+ "widgets_values": [
595
+ "For outpainting, you should use the mask from **Pad Image for Outpaintin*g** node "
596
+ ],
597
+ "color": "#432",
598
+ "bgcolor": "#653"
599
+ },
600
+ {
601
+ "id": 116,
602
+ "type": "ImagePadForOutpaint",
603
+ "pos": [
604
+ -50,
605
+ 1100
606
+ ],
607
+ "size": [
608
+ 270,
609
+ 174
610
+ ],
611
+ "flags": {},
612
+ "order": 15,
613
+ "mode": 4,
614
+ "inputs": [
615
+ {
616
+ "name": "image",
617
+ "type": "IMAGE",
618
+ "link": 200
619
+ }
620
+ ],
621
+ "outputs": [
622
+ {
623
+ "name": "IMAGE",
624
+ "type": "IMAGE",
625
+ "links": [
626
+ 201,
627
+ 202
628
+ ]
629
+ },
630
+ {
631
+ "name": "MASK",
632
+ "type": "MASK",
633
+ "links": null
634
+ }
635
+ ],
636
+ "properties": {
637
+ "cnr_id": "comfy-core",
638
+ "ver": "0.3.59",
639
+ "Node name for S&R": "ImagePadForOutpaint"
640
+ },
641
+ "widgets_values": [
642
+ 104,
643
+ 104,
644
+ 104,
645
+ 0,
646
+ 40
647
+ ]
648
+ },
649
+ {
650
+ "id": 118,
651
+ "type": "PreviewImage",
652
+ "pos": [
653
+ 400,
654
+ 1200
655
+ ],
656
+ "size": [
657
+ 140,
658
+ 26
659
+ ],
660
+ "flags": {},
661
+ "order": 18,
662
+ "mode": 4,
663
+ "inputs": [
664
+ {
665
+ "name": "images",
666
+ "type": "IMAGE",
667
+ "link": 202
668
+ }
669
+ ],
670
+ "outputs": [],
671
+ "properties": {
672
+ "cnr_id": "comfy-core",
673
+ "ver": "0.3.59",
674
+ "Node name for S&R": "PreviewImage"
675
+ },
676
+ "widgets_values": []
677
+ },
678
+ {
679
+ "id": 71,
680
+ "type": "LoadImage",
681
+ "pos": [
682
+ -73.20216369628906,
683
+ 657.9524536132812
684
+ ],
685
+ "size": [
686
+ 274.080078125,
687
+ 314.00006103515625
688
+ ],
689
+ "flags": {},
690
+ "order": 10,
691
+ "mode": 0,
692
+ "inputs": [],
693
+ "outputs": [
694
+ {
695
+ "name": "IMAGE",
696
+ "type": "IMAGE",
697
+ "links": [
698
+ 197,
699
+ 200
700
+ ]
701
+ },
702
+ {
703
+ "name": "MASK",
704
+ "type": "MASK",
705
+ "links": [
706
+ 195
707
+ ]
708
+ }
709
+ ],
710
+ "properties": {
711
+ "cnr_id": "comfy-core",
712
+ "ver": "0.3.51",
713
+ "Node name for S&R": "LoadImage"
714
+ },
715
+ "widgets_values": [
716
+ "clipspace/clipspace-painted-masked-2629565.png [input]",
717
+ "image"
718
+ ]
719
+ },
720
+ {
721
+ "id": 7,
722
+ "type": "CLIPTextEncode",
723
+ "pos": [
724
+ 300,
725
+ 380
726
+ ],
727
+ "size": [
728
+ 460,
729
+ 140
730
+ ],
731
+ "flags": {},
732
+ "order": 12,
733
+ "mode": 0,
734
+ "inputs": [
735
+ {
736
+ "name": "clip",
737
+ "type": "CLIP",
738
+ "link": 75
739
+ }
740
+ ],
741
+ "outputs": [
742
+ {
743
+ "name": "CONDITIONING",
744
+ "type": "CONDITIONING",
745
+ "slot_index": 0,
746
+ "links": [
747
+ 191
748
+ ]
749
+ }
750
+ ],
751
+ "title": "CLIP Text Encode (Negative Prompt)",
752
+ "properties": {
753
+ "cnr_id": "comfy-core",
754
+ "ver": "0.3.51",
755
+ "Node name for S&R": "CLIPTextEncode"
756
+ },
757
+ "widgets_values": [
758
+ " "
759
+ ],
760
+ "color": "#223",
761
+ "bgcolor": "#335"
762
+ },
763
+ {
764
+ "id": 80,
765
+ "type": "LoraLoaderModelOnly",
766
+ "pos": [
767
+ 320,
768
+ -10
769
+ ],
770
+ "size": [
771
+ 430,
772
+ 82
773
+ ],
774
+ "flags": {},
775
+ "order": 13,
776
+ "mode": 0,
777
+ "inputs": [
778
+ {
779
+ "name": "model",
780
+ "type": "MODEL",
781
+ "link": 145
782
+ }
783
+ ],
784
+ "outputs": [
785
+ {
786
+ "name": "MODEL",
787
+ "type": "MODEL",
788
+ "links": [
789
+ 149
790
+ ]
791
+ }
792
+ ],
793
+ "properties": {
794
+ "cnr_id": "comfy-core",
795
+ "ver": "0.3.51",
796
+ "Node name for S&R": "LoraLoaderModelOnly",
797
+ "models": [
798
+ {
799
+ "name": "Qwen-Image-Lightning-4steps-V1.0.safetensors",
800
+ "url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V1.0.safetensors",
801
+ "directory": "loras"
802
+ }
803
+ ]
804
+ },
805
+ "widgets_values": [
806
+ "Qwen-Image-Lightning-4steps-V1.0.safetensors",
807
+ 1
808
+ ]
809
+ },
810
+ {
811
+ "id": 6,
812
+ "type": "CLIPTextEncode",
813
+ "pos": [
814
+ 300,
815
+ 170
816
+ ],
817
+ "size": [
818
+ 460,
819
+ 164.31304931640625
820
+ ],
821
+ "flags": {},
822
+ "order": 11,
823
+ "mode": 0,
824
+ "inputs": [
825
+ {
826
+ "name": "clip",
827
+ "type": "CLIP",
828
+ "link": 74
829
+ }
830
+ ],
831
+ "outputs": [
832
+ {
833
+ "name": "CONDITIONING",
834
+ "type": "CONDITIONING",
835
+ "slot_index": 0,
836
+ "links": [
837
+ 190
838
+ ]
839
+ }
840
+ ],
841
+ "title": "CLIP Text Encode (Positive Prompt)",
842
+ "properties": {
843
+ "cnr_id": "comfy-core",
844
+ "ver": "0.3.51",
845
+ "Node name for S&R": "CLIPTextEncode"
846
+ },
847
+ "widgets_values": [
848
+ "男孩戴了一只大白兔鸭舌帽"
849
+ ],
850
+ "color": "#232",
851
+ "bgcolor": "#353"
852
+ },
853
+ {
854
+ "id": 3,
855
+ "type": "KSampler",
856
+ "pos": [
857
+ 847.4144287109375,
858
+ 80.38726043701172
859
+ ],
860
+ "size": [
861
+ 310,
862
+ 430
863
+ ],
864
+ "flags": {},
865
+ "order": 21,
866
+ "mode": 0,
867
+ "inputs": [
868
+ {
869
+ "name": "model",
870
+ "type": "MODEL",
871
+ "link": 156
872
+ },
873
+ {
874
+ "name": "positive",
875
+ "type": "CONDITIONING",
876
+ "link": 188
877
+ },
878
+ {
879
+ "name": "negative",
880
+ "type": "CONDITIONING",
881
+ "link": 189
882
+ },
883
+ {
884
+ "name": "latent_image",
885
+ "type": "LATENT",
886
+ "link": 142
887
+ }
888
+ ],
889
+ "outputs": [
890
+ {
891
+ "name": "LATENT",
892
+ "type": "LATENT",
893
+ "slot_index": 0,
894
+ "links": [
895
+ 128
896
+ ]
897
+ }
898
+ ],
899
+ "properties": {
900
+ "cnr_id": "comfy-core",
901
+ "ver": "0.3.51",
902
+ "Node name for S&R": "KSampler"
903
+ },
904
+ "widgets_values": [
905
+ 1043693800229646,
906
+ "randomize",
907
+ 4,
908
+ 1,
909
+ "euler",
910
+ "simple",
911
+ 1
912
+ ]
913
+ },
914
+ {
915
+ "id": 60,
916
+ "type": "SaveImage",
917
+ "pos": [
918
+ 1200.7646484375,
919
+ 81.55579376220703
920
+ ],
921
+ "size": [
922
+ 970,
923
+ 1030
924
+ ],
925
+ "flags": {},
926
+ "order": 23,
927
+ "mode": 0,
928
+ "inputs": [
929
+ {
930
+ "name": "images",
931
+ "type": "IMAGE",
932
+ "link": 110
933
+ }
934
+ ],
935
+ "outputs": [],
936
+ "properties": {
937
+ "cnr_id": "comfy-core",
938
+ "ver": "0.3.51"
939
+ },
940
+ "widgets_values": [
941
+ "ComfyUI"
942
+ ]
943
+ }
944
+ ],
945
+ "links": [
946
+ [
947
+ 74,
948
+ 38,
949
+ 0,
950
+ 6,
951
+ 0,
952
+ "CLIP"
953
+ ],
954
+ [
955
+ 75,
956
+ 38,
957
+ 0,
958
+ 7,
959
+ 0,
960
+ "CLIP"
961
+ ],
962
+ [
963
+ 76,
964
+ 39,
965
+ 0,
966
+ 8,
967
+ 1,
968
+ "VAE"
969
+ ],
970
+ [
971
+ 110,
972
+ 8,
973
+ 0,
974
+ 60,
975
+ 0,
976
+ "IMAGE"
977
+ ],
978
+ [
979
+ 128,
980
+ 3,
981
+ 0,
982
+ 8,
983
+ 0,
984
+ "LATENT"
985
+ ],
986
+ [
987
+ 142,
988
+ 76,
989
+ 0,
990
+ 3,
991
+ 3,
992
+ "LATENT"
993
+ ],
994
+ [
995
+ 143,
996
+ 75,
997
+ 0,
998
+ 76,
999
+ 0,
1000
+ "IMAGE"
1001
+ ],
1002
+ [
1003
+ 144,
1004
+ 39,
1005
+ 0,
1006
+ 76,
1007
+ 1,
1008
+ "VAE"
1009
+ ],
1010
+ [
1011
+ 145,
1012
+ 37,
1013
+ 0,
1014
+ 80,
1015
+ 0,
1016
+ "MODEL"
1017
+ ],
1018
+ [
1019
+ 149,
1020
+ 80,
1021
+ 0,
1022
+ 66,
1023
+ 0,
1024
+ "MODEL"
1025
+ ],
1026
+ [
1027
+ 156,
1028
+ 66,
1029
+ 0,
1030
+ 3,
1031
+ 0,
1032
+ "MODEL"
1033
+ ],
1034
+ [
1035
+ 188,
1036
+ 108,
1037
+ 0,
1038
+ 3,
1039
+ 1,
1040
+ "CONDITIONING"
1041
+ ],
1042
+ [
1043
+ 189,
1044
+ 108,
1045
+ 1,
1046
+ 3,
1047
+ 2,
1048
+ "CONDITIONING"
1049
+ ],
1050
+ [
1051
+ 190,
1052
+ 6,
1053
+ 0,
1054
+ 108,
1055
+ 0,
1056
+ "CONDITIONING"
1057
+ ],
1058
+ [
1059
+ 191,
1060
+ 7,
1061
+ 0,
1062
+ 108,
1063
+ 1,
1064
+ "CONDITIONING"
1065
+ ],
1066
+ [
1067
+ 192,
1068
+ 84,
1069
+ 0,
1070
+ 108,
1071
+ 2,
1072
+ "CONTROL_NET"
1073
+ ],
1074
+ [
1075
+ 193,
1076
+ 39,
1077
+ 0,
1078
+ 108,
1079
+ 3,
1080
+ "VAE"
1081
+ ],
1082
+ [
1083
+ 194,
1084
+ 75,
1085
+ 0,
1086
+ 108,
1087
+ 4,
1088
+ "IMAGE"
1089
+ ],
1090
+ [
1091
+ 195,
1092
+ 71,
1093
+ 1,
1094
+ 108,
1095
+ 5,
1096
+ "MASK"
1097
+ ],
1098
+ [
1099
+ 197,
1100
+ 71,
1101
+ 0,
1102
+ 110,
1103
+ 0,
1104
+ "IMAGE"
1105
+ ],
1106
+ [
1107
+ 200,
1108
+ 71,
1109
+ 0,
1110
+ 116,
1111
+ 0,
1112
+ "IMAGE"
1113
+ ],
1114
+ [
1115
+ 201,
1116
+ 116,
1117
+ 0,
1118
+ 75,
1119
+ 0,
1120
+ "IMAGE"
1121
+ ],
1122
+ [
1123
+ 202,
1124
+ 116,
1125
+ 0,
1126
+ 118,
1127
+ 0,
1128
+ "IMAGE"
1129
+ ]
1130
+ ],
1131
+ "groups": [
1132
+ {
1133
+ "id": 1,
1134
+ "title": "Step 1 - Upload models",
1135
+ "bounding": [
1136
+ -130,
1137
+ -80,
1138
+ 400,
1139
+ 610
1140
+ ],
1141
+ "color": "#3f789e",
1142
+ "font_size": 24,
1143
+ "flags": {}
1144
+ },
1145
+ {
1146
+ "id": 2,
1147
+ "title": "Step 2 - Upload image and edit mask",
1148
+ "bounding": [
1149
+ -130,
1150
+ 550,
1151
+ 420,
1152
+ 440
1153
+ ],
1154
+ "color": "#3f789e",
1155
+ "font_size": 24,
1156
+ "flags": {}
1157
+ },
1158
+ {
1159
+ "id": 4,
1160
+ "title": "Step 3 - Prompt",
1161
+ "bounding": [
1162
+ 290,
1163
+ 100,
1164
+ 490,
1165
+ 430
1166
+ ],
1167
+ "color": "#3f789e",
1168
+ "font_size": 24,
1169
+ "flags": {}
1170
+ },
1171
+ {
1172
+ "id": 5,
1173
+ "title": "4 steps lightning LoRA",
1174
+ "bounding": [
1175
+ 290,
1176
+ -80,
1177
+ 490,
1178
+ 160
1179
+ ],
1180
+ "color": "#3f789e",
1181
+ "font_size": 24,
1182
+ "flags": {}
1183
+ },
1184
+ {
1185
+ "id": 6,
1186
+ "title": "Press Ctrl-B to enable it for outpainting",
1187
+ "bounding": [
1188
+ -130,
1189
+ 1010,
1190
+ 420,
1191
+ 290
1192
+ ],
1193
+ "color": "#3f789e",
1194
+ "font_size": 24,
1195
+ "flags": {}
1196
+ }
1197
+ ],
1198
+ "config": {},
1199
+ "extra": {
1200
+ "ds": {
1201
+ "scale": 0.7045843932024487,
1202
+ "offset": [
1203
+ 550.381121997097,
1204
+ 270.5055785483746
1205
+ ]
1206
+ },
1207
+ "frontendVersion": "1.26.11",
1208
+ "groupNodes": {},
1209
+ "VHS_latentpreview": false,
1210
+ "VHS_latentpreviewrate": 0,
1211
+ "VHS_MetadataImage": true,
1212
+ "VHS_KeepIntermediate": true
1213
+ },
1214
+ "version": 0.4
1215
+ }