svjack commited on
Commit
8fb9d49
·
verified ·
1 Parent(s): 7f1c7a2

Upload qwen_image_instantx_outpainting_controlnet.json

Browse files
qwen_image_instantx_outpainting_controlnet.json ADDED
@@ -0,0 +1,1215 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "id": "91f6bbe2-ed41-4fd6-bac7-71d5b5864ecb",
3
+ "revision": 0,
4
+ "last_node_id": 118,
5
+ "last_link_id": 204,
6
+ "nodes": [
7
+ {
8
+ "id": 38,
9
+ "type": "CLIPLoader",
10
+ "pos": [
11
+ -120,
12
+ 130
13
+ ],
14
+ "size": [
15
+ 380,
16
+ 106
17
+ ],
18
+ "flags": {},
19
+ "order": 0,
20
+ "mode": 0,
21
+ "inputs": [],
22
+ "outputs": [
23
+ {
24
+ "name": "CLIP",
25
+ "type": "CLIP",
26
+ "slot_index": 0,
27
+ "links": [
28
+ 74,
29
+ 75
30
+ ]
31
+ }
32
+ ],
33
+ "properties": {
34
+ "cnr_id": "comfy-core",
35
+ "ver": "0.3.51",
36
+ "Node name for S&R": "CLIPLoader",
37
+ "models": [
38
+ {
39
+ "name": "qwen_2.5_vl_7b_fp8_scaled.safetensors",
40
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors",
41
+ "directory": "text_encoders"
42
+ }
43
+ ]
44
+ },
45
+ "widgets_values": [
46
+ "qwen_2.5_vl_7b_fp8_scaled.safetensors",
47
+ "qwen_image",
48
+ "default"
49
+ ]
50
+ },
51
+ {
52
+ "id": 37,
53
+ "type": "UNETLoader",
54
+ "pos": [
55
+ -120,
56
+ 0
57
+ ],
58
+ "size": [
59
+ 380,
60
+ 82
61
+ ],
62
+ "flags": {},
63
+ "order": 1,
64
+ "mode": 0,
65
+ "inputs": [],
66
+ "outputs": [
67
+ {
68
+ "name": "MODEL",
69
+ "type": "MODEL",
70
+ "slot_index": 0,
71
+ "links": [
72
+ 145
73
+ ]
74
+ }
75
+ ],
76
+ "properties": {
77
+ "cnr_id": "comfy-core",
78
+ "ver": "0.3.51",
79
+ "Node name for S&R": "UNETLoader",
80
+ "models": [
81
+ {
82
+ "name": "qwen_image_fp8_e4m3fn.safetensors",
83
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors",
84
+ "directory": "diffusion_models"
85
+ }
86
+ ]
87
+ },
88
+ "widgets_values": [
89
+ "qwen_image_fp8_e4m3fn.safetensors",
90
+ "default"
91
+ ]
92
+ },
93
+ {
94
+ "id": 8,
95
+ "type": "VAEDecode",
96
+ "pos": [
97
+ 847.4144287109375,
98
+ 560.3872680664062
99
+ ],
100
+ "size": [
101
+ 310,
102
+ 46
103
+ ],
104
+ "flags": {},
105
+ "order": 22,
106
+ "mode": 0,
107
+ "inputs": [
108
+ {
109
+ "name": "samples",
110
+ "type": "LATENT",
111
+ "link": 128
112
+ },
113
+ {
114
+ "name": "vae",
115
+ "type": "VAE",
116
+ "link": 76
117
+ }
118
+ ],
119
+ "outputs": [
120
+ {
121
+ "name": "IMAGE",
122
+ "type": "IMAGE",
123
+ "slot_index": 0,
124
+ "links": [
125
+ 110
126
+ ]
127
+ }
128
+ ],
129
+ "properties": {
130
+ "cnr_id": "comfy-core",
131
+ "ver": "0.3.51",
132
+ "Node name for S&R": "VAEDecode"
133
+ },
134
+ "widgets_values": []
135
+ },
136
+ {
137
+ "id": 86,
138
+ "type": "Note",
139
+ "pos": [
140
+ 847.4144287109375,
141
+ 660.3873901367188
142
+ ],
143
+ "size": [
144
+ 307.4002380371094,
145
+ 127.38092803955078
146
+ ],
147
+ "flags": {},
148
+ "order": 2,
149
+ "mode": 0,
150
+ "inputs": [],
151
+ "outputs": [],
152
+ "properties": {},
153
+ "widgets_values": [
154
+ "Set cfg to 1.0 for a speed boost at the cost of consistency. Samplers like res_multistep work pretty well at cfg 1.0\n\nThe official number of steps is 50 but I think that's too much. Even just 10 steps seems to work."
155
+ ],
156
+ "color": "#432",
157
+ "bgcolor": "#653"
158
+ },
159
+ {
160
+ "id": 84,
161
+ "type": "ControlNetLoader",
162
+ "pos": [
163
+ -120,
164
+ 400
165
+ ],
166
+ "size": [
167
+ 380,
168
+ 58
169
+ ],
170
+ "flags": {},
171
+ "order": 3,
172
+ "mode": 0,
173
+ "inputs": [],
174
+ "outputs": [
175
+ {
176
+ "name": "CONTROL_NET",
177
+ "type": "CONTROL_NET",
178
+ "links": [
179
+ 192
180
+ ]
181
+ }
182
+ ],
183
+ "properties": {
184
+ "cnr_id": "comfy-core",
185
+ "ver": "0.3.51",
186
+ "Node name for S&R": "ControlNetLoader",
187
+ "models": [
188
+ {
189
+ "name": "Qwen-Image-InstantX-ControlNet-Inpainting.safetensors",
190
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image-InstantX-ControlNets/resolve/main/split_files/controlnet/Qwen-Image-InstantX-ControlNet-Inpainting.safetensors",
191
+ "directory": "controlnet"
192
+ }
193
+ ]
194
+ },
195
+ "widgets_values": [
196
+ "Qwen-Image-InstantX-ControlNet-Inpainting.safetensors"
197
+ ]
198
+ },
199
+ {
200
+ "id": 39,
201
+ "type": "VAELoader",
202
+ "pos": [
203
+ -120,
204
+ 290
205
+ ],
206
+ "size": [
207
+ 380,
208
+ 58
209
+ ],
210
+ "flags": {},
211
+ "order": 4,
212
+ "mode": 0,
213
+ "inputs": [],
214
+ "outputs": [
215
+ {
216
+ "name": "VAE",
217
+ "type": "VAE",
218
+ "slot_index": 0,
219
+ "links": [
220
+ 76,
221
+ 144,
222
+ 193
223
+ ]
224
+ }
225
+ ],
226
+ "properties": {
227
+ "cnr_id": "comfy-core",
228
+ "ver": "0.3.51",
229
+ "Node name for S&R": "VAELoader",
230
+ "models": [
231
+ {
232
+ "name": "qwen_image_vae.safetensors",
233
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors",
234
+ "directory": "vae"
235
+ }
236
+ ]
237
+ },
238
+ "widgets_values": [
239
+ "qwen_image_vae.safetensors"
240
+ ]
241
+ },
242
+ {
243
+ "id": 110,
244
+ "type": "VAEEncode",
245
+ "pos": [
246
+ 324.080078125,
247
+ 455
248
+ ],
249
+ "size": [
250
+ 140,
251
+ 46
252
+ ],
253
+ "flags": {},
254
+ "order": 20,
255
+ "mode": 0,
256
+ "inputs": [
257
+ {
258
+ "name": "pixels",
259
+ "type": "IMAGE",
260
+ "link": 204
261
+ },
262
+ {
263
+ "name": "vae",
264
+ "type": "VAE",
265
+ "link": null
266
+ }
267
+ ],
268
+ "outputs": [
269
+ {
270
+ "name": "LATENT",
271
+ "type": "LATENT",
272
+ "links": []
273
+ }
274
+ ],
275
+ "properties": {
276
+ "cnr_id": "comfy-core",
277
+ "ver": "0.3.59",
278
+ "Node name for S&R": "VAEEncode"
279
+ },
280
+ "widgets_values": []
281
+ },
282
+ {
283
+ "id": 66,
284
+ "type": "ModelSamplingAuraFlow",
285
+ "pos": [
286
+ 822.5421752929688,
287
+ -38.42329025268555
288
+ ],
289
+ "size": [
290
+ 310,
291
+ 58
292
+ ],
293
+ "flags": {},
294
+ "order": 15,
295
+ "mode": 0,
296
+ "inputs": [
297
+ {
298
+ "name": "model",
299
+ "type": "MODEL",
300
+ "link": 149
301
+ }
302
+ ],
303
+ "outputs": [
304
+ {
305
+ "name": "MODEL",
306
+ "type": "MODEL",
307
+ "links": [
308
+ 156
309
+ ]
310
+ }
311
+ ],
312
+ "properties": {
313
+ "cnr_id": "comfy-core",
314
+ "ver": "0.3.51",
315
+ "Node name for S&R": "ModelSamplingAuraFlow"
316
+ },
317
+ "widgets_values": [
318
+ 3.1000000000000005
319
+ ]
320
+ },
321
+ {
322
+ "id": 108,
323
+ "type": "ControlNetInpaintingAliMamaApply",
324
+ "pos": [
325
+ 391.0537109375,
326
+ 626.009521484375
327
+ ],
328
+ "size": [
329
+ 317.0093688964844,
330
+ 206
331
+ ],
332
+ "flags": {},
333
+ "order": 19,
334
+ "mode": 0,
335
+ "inputs": [
336
+ {
337
+ "name": "positive",
338
+ "type": "CONDITIONING",
339
+ "link": 190
340
+ },
341
+ {
342
+ "name": "negative",
343
+ "type": "CONDITIONING",
344
+ "link": 191
345
+ },
346
+ {
347
+ "name": "control_net",
348
+ "type": "CONTROL_NET",
349
+ "link": 192
350
+ },
351
+ {
352
+ "name": "vae",
353
+ "type": "VAE",
354
+ "link": 193
355
+ },
356
+ {
357
+ "name": "image",
358
+ "type": "IMAGE",
359
+ "link": 194
360
+ },
361
+ {
362
+ "name": "mask",
363
+ "type": "MASK",
364
+ "link": 203
365
+ }
366
+ ],
367
+ "outputs": [
368
+ {
369
+ "name": "positive",
370
+ "type": "CONDITIONING",
371
+ "links": [
372
+ 188
373
+ ]
374
+ },
375
+ {
376
+ "name": "negative",
377
+ "type": "CONDITIONING",
378
+ "links": [
379
+ 189
380
+ ]
381
+ }
382
+ ],
383
+ "properties": {
384
+ "cnr_id": "comfy-core",
385
+ "ver": "0.3.59",
386
+ "Node name for S&R": "ControlNetInpaintingAliMamaApply"
387
+ },
388
+ "widgets_values": [
389
+ 1,
390
+ 0,
391
+ 1
392
+ ]
393
+ },
394
+ {
395
+ "id": 76,
396
+ "type": "VAEEncode",
397
+ "pos": [
398
+ 557.8602294921875,
399
+ 905.5271606445312
400
+ ],
401
+ "size": [
402
+ 140,
403
+ 46
404
+ ],
405
+ "flags": {
406
+ "collapsed": false
407
+ },
408
+ "order": 18,
409
+ "mode": 0,
410
+ "inputs": [
411
+ {
412
+ "name": "pixels",
413
+ "type": "IMAGE",
414
+ "link": 143
415
+ },
416
+ {
417
+ "name": "vae",
418
+ "type": "VAE",
419
+ "link": 144
420
+ }
421
+ ],
422
+ "outputs": [
423
+ {
424
+ "name": "LATENT",
425
+ "type": "LATENT",
426
+ "links": [
427
+ 142
428
+ ]
429
+ }
430
+ ],
431
+ "properties": {
432
+ "cnr_id": "comfy-core",
433
+ "ver": "0.3.51",
434
+ "Node name for S&R": "VAEEncode"
435
+ },
436
+ "widgets_values": []
437
+ },
438
+ {
439
+ "id": 78,
440
+ "type": "MarkdownNote",
441
+ "pos": [
442
+ -690,
443
+ -50
444
+ ],
445
+ "size": [
446
+ 541.36865234375,
447
+ 579.70263671875
448
+ ],
449
+ "flags": {},
450
+ "order": 5,
451
+ "mode": 0,
452
+ "inputs": [],
453
+ "outputs": [],
454
+ "title": "Model links",
455
+ "properties": {
456
+ "widget_ue_connectable": {}
457
+ },
458
+ "widgets_values": [
459
+ "[Tutorial](https://docs.comfy.org/tutorials/image/qwen/qwen-image) | [教程](https://docs.comfy.org/zh-CN/tutorials/image/qwen/qwen-image)\n\n\n## Model links\n\nYou can find all the models on [Huggingface](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/tree/main) or [Modelscope](https://modelscope.cn/models/Comfy-Org/Qwen-Image_ComfyUI/files)\n\n**Diffusion model**\n\n- [qwen_image_fp8_e4m3fn.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors)\n\n**ControlNet**\n\n- [Qwen-Image-InstantX-ControlNet-Inpainting.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image-InstantX-ControlNets/resolve/main/split_files/controlnet/Qwen-Image-InstantX-ControlNet-Inpainting.safetensors)\n\n\n**LoRA**\n\n- [Qwen-Image-Lightning-4steps-V1.0.safetensors](https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V1.0.safetensors)\n\n**Text encoder**\n\n- [qwen_2.5_vl_7b_fp8_scaled.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors)\n\n**VAE**\n\n- [qwen_image_vae.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors)\n\n\nModel Storage Location\n\n```\n📂 ComfyUI/\n├── 📂 models/\n│ ├── 📂 diffusion_models/\n│ │ ├── qwen_image_fp8_e4m3fn.safetensors\n│ │ └── qwen_image_distill_full_fp8_e4m3fn.safetensors\n│ ├── 📂 loras/\n│ │ └── Qwen-Image-Lightning-8steps-V1.0.safetensors\n│ ├── 📂 controlnet/ \n│ │ └── Qwen-Image-InstantX-ControlNet-Inpainting.safetensors\n│ ├── 📂 vae/\n│ │ └── qwen_image_vae.safetensors\n│ └── 📂 text_encoders/\n│ └── qwen_2.5_vl_7b_fp8_scaled.safetensors\n```\n"
460
+ ],
461
+ "color": "#432",
462
+ "bgcolor": "#653"
463
+ },
464
+ {
465
+ "id": 68,
466
+ "type": "Note",
467
+ "pos": [
468
+ 826.6304931640625,
469
+ -197.34292602539062
470
+ ],
471
+ "size": [
472
+ 310,
473
+ 90
474
+ ],
475
+ "flags": {},
476
+ "order": 6,
477
+ "mode": 0,
478
+ "inputs": [],
479
+ "outputs": [],
480
+ "properties": {},
481
+ "widgets_values": [
482
+ "Increase the shift if you get too many blury/dark/bad images. Decrease if you want to try increasing detail."
483
+ ],
484
+ "color": "#432",
485
+ "bgcolor": "#653"
486
+ },
487
+ {
488
+ "id": 79,
489
+ "type": "MarkdownNote",
490
+ "pos": [
491
+ 853.4926147460938,
492
+ 840.671875
493
+ ],
494
+ "size": [
495
+ 310,
496
+ 140
497
+ ],
498
+ "flags": {},
499
+ "order": 7,
500
+ "mode": 0,
501
+ "inputs": [],
502
+ "outputs": [],
503
+ "title": "KSampler settings",
504
+ "properties": {},
505
+ "widgets_values": [
506
+ "You can test and find the best setting by yourself. The following table is for reference.\n\n| model | steps | cfg |\n|---------------------|---------------|---------------|\n| fp8_e4m3fn | 20 | 2.5 |\n| fp8_e4m3fn + 4 Steps lightning LoRA | 4 | 1.0 |\n"
507
+ ],
508
+ "color": "#432",
509
+ "bgcolor": "#653"
510
+ },
511
+ {
512
+ "id": 115,
513
+ "type": "MarkdownNote",
514
+ "pos": [
515
+ -462.4257507324219,
516
+ 588.766845703125
517
+ ],
518
+ "size": [
519
+ 307.56927490234375,
520
+ 169.79689025878906
521
+ ],
522
+ "flags": {},
523
+ "order": 8,
524
+ "mode": 0,
525
+ "inputs": [],
526
+ "outputs": [],
527
+ "title": "About how to create mask",
528
+ "properties": {},
529
+ "widgets_values": [
530
+ "Right-click on the Load Image node, then click \"Open in MaskEditor\" to open it and paint the area you want to inpaint.\n\nYou can learn more about MaskEditor in the [MaskEditor Document](https://docs.comfy.org/interface/maskeditor)"
531
+ ],
532
+ "color": "#432",
533
+ "bgcolor": "#653"
534
+ },
535
+ {
536
+ "id": 117,
537
+ "type": "MarkdownNote",
538
+ "pos": [
539
+ -460,
540
+ 1040
541
+ ],
542
+ "size": [
543
+ 307.56927490234375,
544
+ 169.79689025878906
545
+ ],
546
+ "flags": {},
547
+ "order": 9,
548
+ "mode": 0,
549
+ "inputs": [],
550
+ "outputs": [],
551
+ "title": "About outpainting",
552
+ "properties": {},
553
+ "widgets_values": [
554
+ "For outpainting, you should use the mask from **Pad Image for Outpaintin*g** node "
555
+ ],
556
+ "color": "#432",
557
+ "bgcolor": "#653"
558
+ },
559
+ {
560
+ "id": 80,
561
+ "type": "LoraLoaderModelOnly",
562
+ "pos": [
563
+ 320,
564
+ -10
565
+ ],
566
+ "size": [
567
+ 430,
568
+ 82
569
+ ],
570
+ "flags": {},
571
+ "order": 13,
572
+ "mode": 0,
573
+ "inputs": [
574
+ {
575
+ "name": "model",
576
+ "type": "MODEL",
577
+ "link": 145
578
+ }
579
+ ],
580
+ "outputs": [
581
+ {
582
+ "name": "MODEL",
583
+ "type": "MODEL",
584
+ "links": [
585
+ 149
586
+ ]
587
+ }
588
+ ],
589
+ "properties": {
590
+ "cnr_id": "comfy-core",
591
+ "ver": "0.3.51",
592
+ "Node name for S&R": "LoraLoaderModelOnly",
593
+ "models": [
594
+ {
595
+ "name": "Qwen-Image-Lightning-4steps-V1.0.safetensors",
596
+ "url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V1.0.safetensors",
597
+ "directory": "loras"
598
+ }
599
+ ]
600
+ },
601
+ "widgets_values": [
602
+ "Qwen-Image-Lightning-4steps-V1.0.safetensors",
603
+ 1
604
+ ]
605
+ },
606
+ {
607
+ "id": 3,
608
+ "type": "KSampler",
609
+ "pos": [
610
+ 847.4144287109375,
611
+ 80.38726043701172
612
+ ],
613
+ "size": [
614
+ 310,
615
+ 430
616
+ ],
617
+ "flags": {},
618
+ "order": 21,
619
+ "mode": 0,
620
+ "inputs": [
621
+ {
622
+ "name": "model",
623
+ "type": "MODEL",
624
+ "link": 156
625
+ },
626
+ {
627
+ "name": "positive",
628
+ "type": "CONDITIONING",
629
+ "link": 188
630
+ },
631
+ {
632
+ "name": "negative",
633
+ "type": "CONDITIONING",
634
+ "link": 189
635
+ },
636
+ {
637
+ "name": "latent_image",
638
+ "type": "LATENT",
639
+ "link": 142
640
+ }
641
+ ],
642
+ "outputs": [
643
+ {
644
+ "name": "LATENT",
645
+ "type": "LATENT",
646
+ "slot_index": 0,
647
+ "links": [
648
+ 128
649
+ ]
650
+ }
651
+ ],
652
+ "properties": {
653
+ "cnr_id": "comfy-core",
654
+ "ver": "0.3.51",
655
+ "Node name for S&R": "KSampler"
656
+ },
657
+ "widgets_values": [
658
+ 674751509502536,
659
+ "randomize",
660
+ 4,
661
+ 1,
662
+ "euler",
663
+ "simple",
664
+ 1
665
+ ]
666
+ },
667
+ {
668
+ "id": 118,
669
+ "type": "PreviewImage",
670
+ "pos": [
671
+ 400,
672
+ 1200
673
+ ],
674
+ "size": [
675
+ 140,
676
+ 246
677
+ ],
678
+ "flags": {},
679
+ "order": 17,
680
+ "mode": 0,
681
+ "inputs": [
682
+ {
683
+ "name": "images",
684
+ "type": "IMAGE",
685
+ "link": 202
686
+ }
687
+ ],
688
+ "outputs": [],
689
+ "properties": {
690
+ "cnr_id": "comfy-core",
691
+ "ver": "0.3.59",
692
+ "Node name for S&R": "PreviewImage"
693
+ },
694
+ "widgets_values": []
695
+ },
696
+ {
697
+ "id": 60,
698
+ "type": "SaveImage",
699
+ "pos": [
700
+ 1277.1220703125,
701
+ 158.8800811767578
702
+ ],
703
+ "size": [
704
+ 970,
705
+ 1030
706
+ ],
707
+ "flags": {},
708
+ "order": 23,
709
+ "mode": 0,
710
+ "inputs": [
711
+ {
712
+ "name": "images",
713
+ "type": "IMAGE",
714
+ "link": 110
715
+ }
716
+ ],
717
+ "outputs": [],
718
+ "properties": {
719
+ "cnr_id": "comfy-core",
720
+ "ver": "0.3.51"
721
+ },
722
+ "widgets_values": [
723
+ "ComfyUI"
724
+ ]
725
+ },
726
+ {
727
+ "id": 116,
728
+ "type": "ImagePadForOutpaint",
729
+ "pos": [
730
+ -63.68890380859375,
731
+ 1097.43408203125
732
+ ],
733
+ "size": [
734
+ 270,
735
+ 174
736
+ ],
737
+ "flags": {},
738
+ "order": 14,
739
+ "mode": 0,
740
+ "inputs": [
741
+ {
742
+ "name": "image",
743
+ "type": "IMAGE",
744
+ "link": 200
745
+ }
746
+ ],
747
+ "outputs": [
748
+ {
749
+ "name": "IMAGE",
750
+ "type": "IMAGE",
751
+ "links": [
752
+ 201,
753
+ 202
754
+ ]
755
+ },
756
+ {
757
+ "name": "MASK",
758
+ "type": "MASK",
759
+ "links": [
760
+ 203
761
+ ]
762
+ }
763
+ ],
764
+ "properties": {
765
+ "cnr_id": "comfy-core",
766
+ "ver": "0.3.59",
767
+ "Node name for S&R": "ImagePadForOutpaint"
768
+ },
769
+ "widgets_values": [
770
+ 0,
771
+ 0,
772
+ 0,
773
+ 512,
774
+ 40
775
+ ]
776
+ },
777
+ {
778
+ "id": 75,
779
+ "type": "ImageScaleToTotalPixels",
780
+ "pos": [
781
+ 365.94482421875,
782
+ 1070.4468994140625
783
+ ],
784
+ "size": [
785
+ 270,
786
+ 82
787
+ ],
788
+ "flags": {},
789
+ "order": 16,
790
+ "mode": 0,
791
+ "inputs": [
792
+ {
793
+ "name": "image",
794
+ "type": "IMAGE",
795
+ "link": 201
796
+ }
797
+ ],
798
+ "outputs": [
799
+ {
800
+ "name": "IMAGE",
801
+ "type": "IMAGE",
802
+ "links": [
803
+ 143,
804
+ 194,
805
+ 204
806
+ ]
807
+ }
808
+ ],
809
+ "properties": {
810
+ "cnr_id": "comfy-core",
811
+ "ver": "0.3.51",
812
+ "Node name for S&R": "ImageScaleToTotalPixels"
813
+ },
814
+ "widgets_values": [
815
+ "area",
816
+ 1.68
817
+ ]
818
+ },
819
+ {
820
+ "id": 7,
821
+ "type": "CLIPTextEncode",
822
+ "pos": [
823
+ 323.9553527832031,
824
+ 429.6221923828125
825
+ ],
826
+ "size": [
827
+ 460,
828
+ 140
829
+ ],
830
+ "flags": {},
831
+ "order": 12,
832
+ "mode": 0,
833
+ "inputs": [
834
+ {
835
+ "name": "clip",
836
+ "type": "CLIP",
837
+ "link": 75
838
+ }
839
+ ],
840
+ "outputs": [
841
+ {
842
+ "name": "CONDITIONING",
843
+ "type": "CONDITIONING",
844
+ "slot_index": 0,
845
+ "links": [
846
+ 191
847
+ ]
848
+ }
849
+ ],
850
+ "title": "CLIP Text Encode (Negative Prompt)",
851
+ "properties": {
852
+ "cnr_id": "comfy-core",
853
+ "ver": "0.3.51",
854
+ "Node name for S&R": "CLIPTextEncode"
855
+ },
856
+ "widgets_values": [
857
+ ""
858
+ ],
859
+ "color": "#223",
860
+ "bgcolor": "#335"
861
+ },
862
+ {
863
+ "id": 6,
864
+ "type": "CLIPTextEncode",
865
+ "pos": [
866
+ 305.13323974609375,
867
+ 165.7223358154297
868
+ ],
869
+ "size": [
870
+ 460,
871
+ 164.31304931640625
872
+ ],
873
+ "flags": {},
874
+ "order": 11,
875
+ "mode": 0,
876
+ "inputs": [
877
+ {
878
+ "name": "clip",
879
+ "type": "CLIP",
880
+ "link": 74
881
+ }
882
+ ],
883
+ "outputs": [
884
+ {
885
+ "name": "CONDITIONING",
886
+ "type": "CONDITIONING",
887
+ "slot_index": 0,
888
+ "links": [
889
+ 190
890
+ ]
891
+ }
892
+ ],
893
+ "title": "CLIP Text Encode (Positive Prompt)",
894
+ "properties": {
895
+ "cnr_id": "comfy-core",
896
+ "ver": "0.3.51",
897
+ "Node name for S&R": "CLIPTextEncode"
898
+ },
899
+ "widgets_values": [
900
+ ""
901
+ ],
902
+ "color": "#232",
903
+ "bgcolor": "#353"
904
+ },
905
+ {
906
+ "id": 71,
907
+ "type": "LoadImage",
908
+ "pos": [
909
+ -66.35772705078125,
910
+ 633.9967651367188
911
+ ],
912
+ "size": [
913
+ 274.080078125,
914
+ 314.00006103515625
915
+ ],
916
+ "flags": {},
917
+ "order": 10,
918
+ "mode": 0,
919
+ "inputs": [],
920
+ "outputs": [
921
+ {
922
+ "name": "IMAGE",
923
+ "type": "IMAGE",
924
+ "links": [
925
+ 200
926
+ ]
927
+ },
928
+ {
929
+ "name": "MASK",
930
+ "type": "MASK",
931
+ "links": []
932
+ }
933
+ ],
934
+ "properties": {
935
+ "cnr_id": "comfy-core",
936
+ "ver": "0.3.51",
937
+ "Node name for S&R": "LoadImage"
938
+ },
939
+ "widgets_values": [
940
+ "IMG_0131.jpeg",
941
+ "image"
942
+ ]
943
+ }
944
+ ],
945
+ "links": [
946
+ [
947
+ 74,
948
+ 38,
949
+ 0,
950
+ 6,
951
+ 0,
952
+ "CLIP"
953
+ ],
954
+ [
955
+ 75,
956
+ 38,
957
+ 0,
958
+ 7,
959
+ 0,
960
+ "CLIP"
961
+ ],
962
+ [
963
+ 76,
964
+ 39,
965
+ 0,
966
+ 8,
967
+ 1,
968
+ "VAE"
969
+ ],
970
+ [
971
+ 110,
972
+ 8,
973
+ 0,
974
+ 60,
975
+ 0,
976
+ "IMAGE"
977
+ ],
978
+ [
979
+ 128,
980
+ 3,
981
+ 0,
982
+ 8,
983
+ 0,
984
+ "LATENT"
985
+ ],
986
+ [
987
+ 142,
988
+ 76,
989
+ 0,
990
+ 3,
991
+ 3,
992
+ "LATENT"
993
+ ],
994
+ [
995
+ 143,
996
+ 75,
997
+ 0,
998
+ 76,
999
+ 0,
1000
+ "IMAGE"
1001
+ ],
1002
+ [
1003
+ 144,
1004
+ 39,
1005
+ 0,
1006
+ 76,
1007
+ 1,
1008
+ "VAE"
1009
+ ],
1010
+ [
1011
+ 145,
1012
+ 37,
1013
+ 0,
1014
+ 80,
1015
+ 0,
1016
+ "MODEL"
1017
+ ],
1018
+ [
1019
+ 149,
1020
+ 80,
1021
+ 0,
1022
+ 66,
1023
+ 0,
1024
+ "MODEL"
1025
+ ],
1026
+ [
1027
+ 156,
1028
+ 66,
1029
+ 0,
1030
+ 3,
1031
+ 0,
1032
+ "MODEL"
1033
+ ],
1034
+ [
1035
+ 188,
1036
+ 108,
1037
+ 0,
1038
+ 3,
1039
+ 1,
1040
+ "CONDITIONING"
1041
+ ],
1042
+ [
1043
+ 189,
1044
+ 108,
1045
+ 1,
1046
+ 3,
1047
+ 2,
1048
+ "CONDITIONING"
1049
+ ],
1050
+ [
1051
+ 190,
1052
+ 6,
1053
+ 0,
1054
+ 108,
1055
+ 0,
1056
+ "CONDITIONING"
1057
+ ],
1058
+ [
1059
+ 191,
1060
+ 7,
1061
+ 0,
1062
+ 108,
1063
+ 1,
1064
+ "CONDITIONING"
1065
+ ],
1066
+ [
1067
+ 192,
1068
+ 84,
1069
+ 0,
1070
+ 108,
1071
+ 2,
1072
+ "CONTROL_NET"
1073
+ ],
1074
+ [
1075
+ 193,
1076
+ 39,
1077
+ 0,
1078
+ 108,
1079
+ 3,
1080
+ "VAE"
1081
+ ],
1082
+ [
1083
+ 194,
1084
+ 75,
1085
+ 0,
1086
+ 108,
1087
+ 4,
1088
+ "IMAGE"
1089
+ ],
1090
+ [
1091
+ 200,
1092
+ 71,
1093
+ 0,
1094
+ 116,
1095
+ 0,
1096
+ "IMAGE"
1097
+ ],
1098
+ [
1099
+ 201,
1100
+ 116,
1101
+ 0,
1102
+ 75,
1103
+ 0,
1104
+ "IMAGE"
1105
+ ],
1106
+ [
1107
+ 202,
1108
+ 116,
1109
+ 0,
1110
+ 118,
1111
+ 0,
1112
+ "IMAGE"
1113
+ ],
1114
+ [
1115
+ 203,
1116
+ 116,
1117
+ 1,
1118
+ 108,
1119
+ 5,
1120
+ "MASK"
1121
+ ],
1122
+ [
1123
+ 204,
1124
+ 75,
1125
+ 0,
1126
+ 110,
1127
+ 0,
1128
+ "IMAGE"
1129
+ ]
1130
+ ],
1131
+ "groups": [
1132
+ {
1133
+ "id": 1,
1134
+ "title": "Step 1 - Upload models",
1135
+ "bounding": [
1136
+ -130,
1137
+ -80,
1138
+ 400,
1139
+ 610
1140
+ ],
1141
+ "color": "#3f789e",
1142
+ "font_size": 24,
1143
+ "flags": {}
1144
+ },
1145
+ {
1146
+ "id": 2,
1147
+ "title": "Step 2 - Upload image and edit mask",
1148
+ "bounding": [
1149
+ -130,
1150
+ 550,
1151
+ 420,
1152
+ 440
1153
+ ],
1154
+ "color": "#3f789e",
1155
+ "font_size": 24,
1156
+ "flags": {}
1157
+ },
1158
+ {
1159
+ "id": 4,
1160
+ "title": "Step 3 - Prompt",
1161
+ "bounding": [
1162
+ 290,
1163
+ 100,
1164
+ 490,
1165
+ 430
1166
+ ],
1167
+ "color": "#3f789e",
1168
+ "font_size": 24,
1169
+ "flags": {}
1170
+ },
1171
+ {
1172
+ "id": 5,
1173
+ "title": "4 steps lightning LoRA",
1174
+ "bounding": [
1175
+ 290,
1176
+ -80,
1177
+ 490,
1178
+ 160
1179
+ ],
1180
+ "color": "#3f789e",
1181
+ "font_size": 24,
1182
+ "flags": {}
1183
+ },
1184
+ {
1185
+ "id": 6,
1186
+ "title": "Press Ctrl-B to enable it for outpainting",
1187
+ "bounding": [
1188
+ -130,
1189
+ 1010,
1190
+ 420,
1191
+ 290
1192
+ ],
1193
+ "color": "#3f789e",
1194
+ "font_size": 24,
1195
+ "flags": {}
1196
+ }
1197
+ ],
1198
+ "config": {},
1199
+ "extra": {
1200
+ "ds": {
1201
+ "scale": 0.795368528420226,
1202
+ "offset": [
1203
+ -771.9655189452302,
1204
+ -192.82709879507493
1205
+ ]
1206
+ },
1207
+ "frontendVersion": "1.26.11",
1208
+ "groupNodes": {},
1209
+ "VHS_latentpreview": false,
1210
+ "VHS_latentpreviewrate": 0,
1211
+ "VHS_MetadataImage": true,
1212
+ "VHS_KeepIntermediate": true
1213
+ },
1214
+ "version": 0.4
1215
+ }