svjack commited on
Commit
0e2a4f5
·
verified ·
1 Parent(s): 370b745

Upload qwen_image_4_Grid_Display_lora.json

Browse files
Files changed (1) hide show
  1. qwen_image_4_Grid_Display_lora.json +651 -0
qwen_image_4_Grid_Display_lora.json ADDED
@@ -0,0 +1,651 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "id": "91f6bbe2-ed41-4fd6-bac7-71d5b5864ecb",
3
+ "revision": 0,
4
+ "last_node_id": 72,
5
+ "last_link_id": 154,
6
+ "nodes": [
7
+ {
8
+ "id": 8,
9
+ "type": "VAEDecode",
10
+ "pos": [
11
+ 1210,
12
+ 190
13
+ ],
14
+ "size": [
15
+ 210,
16
+ 46
17
+ ],
18
+ "flags": {},
19
+ "order": 10,
20
+ "mode": 0,
21
+ "inputs": [
22
+ {
23
+ "name": "samples",
24
+ "type": "LATENT",
25
+ "link": 128
26
+ },
27
+ {
28
+ "name": "vae",
29
+ "type": "VAE",
30
+ "link": 76
31
+ }
32
+ ],
33
+ "outputs": [
34
+ {
35
+ "name": "IMAGE",
36
+ "type": "IMAGE",
37
+ "slot_index": 0,
38
+ "links": [
39
+ 110
40
+ ]
41
+ }
42
+ ],
43
+ "properties": {
44
+ "cnr_id": "comfy-core",
45
+ "ver": "0.3.48",
46
+ "Node name for S&R": "VAEDecode"
47
+ },
48
+ "widgets_values": []
49
+ },
50
+ {
51
+ "id": 7,
52
+ "type": "CLIPTextEncode",
53
+ "pos": [
54
+ 413,
55
+ 389
56
+ ],
57
+ "size": [
58
+ 425.27801513671875,
59
+ 180.6060791015625
60
+ ],
61
+ "flags": {},
62
+ "order": 4,
63
+ "mode": 0,
64
+ "inputs": [
65
+ {
66
+ "name": "clip",
67
+ "type": "CLIP",
68
+ "link": 75
69
+ }
70
+ ],
71
+ "outputs": [
72
+ {
73
+ "name": "CONDITIONING",
74
+ "type": "CONDITIONING",
75
+ "slot_index": 0,
76
+ "links": [
77
+ 52
78
+ ]
79
+ }
80
+ ],
81
+ "title": "CLIP Text Encode (Negative Prompt)",
82
+ "properties": {
83
+ "cnr_id": "comfy-core",
84
+ "ver": "0.3.48",
85
+ "Node name for S&R": "CLIPTextEncode"
86
+ },
87
+ "widgets_values": [
88
+ " "
89
+ ],
90
+ "color": "#322",
91
+ "bgcolor": "#533"
92
+ },
93
+ {
94
+ "id": 66,
95
+ "type": "ModelSamplingAuraFlow",
96
+ "pos": [
97
+ 469.15350341796875,
98
+ 39.34361267089844
99
+ ],
100
+ "size": [
101
+ 270,
102
+ 58
103
+ ],
104
+ "flags": {},
105
+ "order": 7,
106
+ "mode": 0,
107
+ "inputs": [
108
+ {
109
+ "name": "model",
110
+ "type": "MODEL",
111
+ "link": 153
112
+ }
113
+ ],
114
+ "outputs": [
115
+ {
116
+ "name": "MODEL",
117
+ "type": "MODEL",
118
+ "links": [
119
+ 125
120
+ ]
121
+ }
122
+ ],
123
+ "properties": {
124
+ "cnr_id": "comfy-core",
125
+ "ver": "0.3.48",
126
+ "Node name for S&R": "ModelSamplingAuraFlow"
127
+ },
128
+ "widgets_values": [
129
+ 3.500000000000001
130
+ ]
131
+ },
132
+ {
133
+ "id": 3,
134
+ "type": "KSampler",
135
+ "pos": [
136
+ 863,
137
+ 187
138
+ ],
139
+ "size": [
140
+ 315,
141
+ 262
142
+ ],
143
+ "flags": {},
144
+ "order": 9,
145
+ "mode": 0,
146
+ "inputs": [
147
+ {
148
+ "name": "model",
149
+ "type": "MODEL",
150
+ "link": 125
151
+ },
152
+ {
153
+ "name": "positive",
154
+ "type": "CONDITIONING",
155
+ "link": 46
156
+ },
157
+ {
158
+ "name": "negative",
159
+ "type": "CONDITIONING",
160
+ "link": 52
161
+ },
162
+ {
163
+ "name": "latent_image",
164
+ "type": "LATENT",
165
+ "link": 107
166
+ }
167
+ ],
168
+ "outputs": [
169
+ {
170
+ "name": "LATENT",
171
+ "type": "LATENT",
172
+ "slot_index": 0,
173
+ "links": [
174
+ 128
175
+ ]
176
+ }
177
+ ],
178
+ "properties": {
179
+ "cnr_id": "comfy-core",
180
+ "ver": "0.3.48",
181
+ "Node name for S&R": "KSampler"
182
+ },
183
+ "widgets_values": [
184
+ 190320870383225,
185
+ "randomize",
186
+ 25,
187
+ 4,
188
+ "euler",
189
+ "simple",
190
+ 1
191
+ ]
192
+ },
193
+ {
194
+ "id": 37,
195
+ "type": "UNETLoader",
196
+ "pos": [
197
+ 20,
198
+ 60
199
+ ],
200
+ "size": [
201
+ 346.7470703125,
202
+ 82
203
+ ],
204
+ "flags": {},
205
+ "order": 0,
206
+ "mode": 0,
207
+ "inputs": [],
208
+ "outputs": [
209
+ {
210
+ "name": "MODEL",
211
+ "type": "MODEL",
212
+ "slot_index": 0,
213
+ "links": [
214
+ 129
215
+ ]
216
+ }
217
+ ],
218
+ "properties": {
219
+ "cnr_id": "comfy-core",
220
+ "ver": "0.3.48",
221
+ "Node name for S&R": "UNETLoader"
222
+ },
223
+ "widgets_values": [
224
+ "qwen_image_fp8_e4m3fn.safetensors",
225
+ "default"
226
+ ],
227
+ "color": "#223",
228
+ "bgcolor": "#335"
229
+ },
230
+ {
231
+ "id": 38,
232
+ "type": "CLIPLoader",
233
+ "pos": [
234
+ 20,
235
+ 190
236
+ ],
237
+ "size": [
238
+ 380,
239
+ 106
240
+ ],
241
+ "flags": {},
242
+ "order": 1,
243
+ "mode": 0,
244
+ "inputs": [],
245
+ "outputs": [
246
+ {
247
+ "name": "CLIP",
248
+ "type": "CLIP",
249
+ "slot_index": 0,
250
+ "links": [
251
+ 75,
252
+ 131
253
+ ]
254
+ }
255
+ ],
256
+ "properties": {
257
+ "cnr_id": "comfy-core",
258
+ "ver": "0.3.48",
259
+ "Node name for S&R": "CLIPLoader"
260
+ },
261
+ "widgets_values": [
262
+ "qwen_2.5_vl_7b_fp8_scaled.safetensors",
263
+ "qwen_image",
264
+ "default"
265
+ ],
266
+ "color": "#223",
267
+ "bgcolor": "#335"
268
+ },
269
+ {
270
+ "id": 39,
271
+ "type": "VAELoader",
272
+ "pos": [
273
+ 20,
274
+ 340
275
+ ],
276
+ "size": [
277
+ 330,
278
+ 60
279
+ ],
280
+ "flags": {},
281
+ "order": 2,
282
+ "mode": 0,
283
+ "inputs": [],
284
+ "outputs": [
285
+ {
286
+ "name": "VAE",
287
+ "type": "VAE",
288
+ "slot_index": 0,
289
+ "links": [
290
+ 76
291
+ ]
292
+ }
293
+ ],
294
+ "properties": {
295
+ "cnr_id": "comfy-core",
296
+ "ver": "0.3.48",
297
+ "Node name for S&R": "VAELoader"
298
+ },
299
+ "widgets_values": [
300
+ "qwen_image_vae.safetensors"
301
+ ],
302
+ "color": "#223",
303
+ "bgcolor": "#335"
304
+ },
305
+ {
306
+ "id": 58,
307
+ "type": "EmptySD3LatentImage",
308
+ "pos": [
309
+ 410.6574401855469,
310
+ 620.5468139648438
311
+ ],
312
+ "size": [
313
+ 270,
314
+ 106
315
+ ],
316
+ "flags": {},
317
+ "order": 3,
318
+ "mode": 0,
319
+ "inputs": [],
320
+ "outputs": [
321
+ {
322
+ "name": "LATENT",
323
+ "type": "LATENT",
324
+ "links": [
325
+ 107
326
+ ]
327
+ }
328
+ ],
329
+ "properties": {
330
+ "cnr_id": "comfy-core",
331
+ "ver": "0.3.48",
332
+ "Node name for S&R": "EmptySD3LatentImage"
333
+ },
334
+ "widgets_values": [
335
+ 768,
336
+ 768,
337
+ 1
338
+ ]
339
+ },
340
+ {
341
+ "id": 6,
342
+ "type": "CLIPTextEncode",
343
+ "pos": [
344
+ 415,
345
+ 186
346
+ ],
347
+ "size": [
348
+ 422.84503173828125,
349
+ 164.31304931640625
350
+ ],
351
+ "flags": {},
352
+ "order": 8,
353
+ "mode": 0,
354
+ "inputs": [
355
+ {
356
+ "name": "clip",
357
+ "type": "CLIP",
358
+ "link": 154
359
+ }
360
+ ],
361
+ "outputs": [
362
+ {
363
+ "name": "CONDITIONING",
364
+ "type": "CONDITIONING",
365
+ "slot_index": 0,
366
+ "links": [
367
+ 46
368
+ ]
369
+ }
370
+ ],
371
+ "title": "CLIP Text Encode (Positive Prompt)",
372
+ "properties": {
373
+ "cnr_id": "comfy-core",
374
+ "ver": "0.3.48",
375
+ "Node name for S&R": "CLIPTextEncode"
376
+ },
377
+ "widgets_values": [
378
+ "\"In the style of GPT-4o-Design-Images, Generate 4 image samples for the current design concept and piece them into 4 square blocks.\"\n\n1、日式黑白漫画:一个机器人在月下躺在沙漠里。周围是仙人掌和月色。\n2、日式黑白漫画:一个戴眼镜的男孩接近沙漠中躺着的机器人,在机器人身上进行修理。\n3、日式黑白漫画:机器人醒来,头边有一个白色气泡,里面有文字“谢谢!”\n4、日式黑白漫画:男孩和机器人手拉手走向远处的绿洲。"
379
+ ],
380
+ "color": "#232",
381
+ "bgcolor": "#353"
382
+ },
383
+ {
384
+ "id": 70,
385
+ "type": "LoraLoader",
386
+ "pos": [
387
+ 480.5247497558594,
388
+ -205.89479064941406
389
+ ],
390
+ "size": [
391
+ 270,
392
+ 126
393
+ ],
394
+ "flags": {},
395
+ "order": 6,
396
+ "mode": 4,
397
+ "inputs": [
398
+ {
399
+ "name": "model",
400
+ "type": "MODEL",
401
+ "link": 141
402
+ },
403
+ {
404
+ "name": "clip",
405
+ "type": "CLIP",
406
+ "link": 139
407
+ }
408
+ ],
409
+ "outputs": [
410
+ {
411
+ "name": "MODEL",
412
+ "type": "MODEL",
413
+ "links": [
414
+ 153
415
+ ]
416
+ },
417
+ {
418
+ "name": "CLIP",
419
+ "type": "CLIP",
420
+ "links": [
421
+ 154
422
+ ]
423
+ }
424
+ ],
425
+ "properties": {
426
+ "cnr_id": "comfy-core",
427
+ "ver": "0.3.49",
428
+ "Node name for S&R": "LoraLoader"
429
+ },
430
+ "widgets_values": [
431
+ "qwen_image_black_white_naoki_urasawa_v1.safetensors",
432
+ 1,
433
+ 1
434
+ ]
435
+ },
436
+ {
437
+ "id": 60,
438
+ "type": "SaveImage",
439
+ "pos": [
440
+ 1239.4698486328125,
441
+ 337.7687072753906
442
+ ],
443
+ "size": [
444
+ 821.0496215820312,
445
+ 871.7067260742188
446
+ ],
447
+ "flags": {},
448
+ "order": 11,
449
+ "mode": 0,
450
+ "inputs": [
451
+ {
452
+ "name": "images",
453
+ "type": "IMAGE",
454
+ "link": 110
455
+ }
456
+ ],
457
+ "outputs": [],
458
+ "properties": {
459
+ "cnr_id": "comfy-core",
460
+ "ver": "0.3.48"
461
+ },
462
+ "widgets_values": [
463
+ "ComfyUI"
464
+ ]
465
+ },
466
+ {
467
+ "id": 68,
468
+ "type": "LoraLoader",
469
+ "pos": [
470
+ 66.83535766601562,
471
+ -205.47305297851562
472
+ ],
473
+ "size": [
474
+ 270,
475
+ 126
476
+ ],
477
+ "flags": {},
478
+ "order": 5,
479
+ "mode": 0,
480
+ "inputs": [
481
+ {
482
+ "name": "model",
483
+ "type": "MODEL",
484
+ "link": 129
485
+ },
486
+ {
487
+ "name": "clip",
488
+ "type": "CLIP",
489
+ "link": 131
490
+ }
491
+ ],
492
+ "outputs": [
493
+ {
494
+ "name": "MODEL",
495
+ "type": "MODEL",
496
+ "links": [
497
+ 141
498
+ ]
499
+ },
500
+ {
501
+ "name": "CLIP",
502
+ "type": "CLIP",
503
+ "links": [
504
+ 139
505
+ ]
506
+ }
507
+ ],
508
+ "properties": {
509
+ "cnr_id": "comfy-core",
510
+ "ver": "0.3.49",
511
+ "Node name for S&R": "LoraLoader"
512
+ },
513
+ "widgets_values": [
514
+ "Four_qwen_image_lora-step00002920.safetensors",
515
+ 1,
516
+ 1
517
+ ]
518
+ }
519
+ ],
520
+ "links": [
521
+ [
522
+ 46,
523
+ 6,
524
+ 0,
525
+ 3,
526
+ 1,
527
+ "CONDITIONING"
528
+ ],
529
+ [
530
+ 52,
531
+ 7,
532
+ 0,
533
+ 3,
534
+ 2,
535
+ "CONDITIONING"
536
+ ],
537
+ [
538
+ 75,
539
+ 38,
540
+ 0,
541
+ 7,
542
+ 0,
543
+ "CLIP"
544
+ ],
545
+ [
546
+ 76,
547
+ 39,
548
+ 0,
549
+ 8,
550
+ 1,
551
+ "VAE"
552
+ ],
553
+ [
554
+ 107,
555
+ 58,
556
+ 0,
557
+ 3,
558
+ 3,
559
+ "LATENT"
560
+ ],
561
+ [
562
+ 110,
563
+ 8,
564
+ 0,
565
+ 60,
566
+ 0,
567
+ "IMAGE"
568
+ ],
569
+ [
570
+ 125,
571
+ 66,
572
+ 0,
573
+ 3,
574
+ 0,
575
+ "MODEL"
576
+ ],
577
+ [
578
+ 128,
579
+ 3,
580
+ 0,
581
+ 8,
582
+ 0,
583
+ "LATENT"
584
+ ],
585
+ [
586
+ 129,
587
+ 37,
588
+ 0,
589
+ 68,
590
+ 0,
591
+ "MODEL"
592
+ ],
593
+ [
594
+ 131,
595
+ 38,
596
+ 0,
597
+ 68,
598
+ 1,
599
+ "CLIP"
600
+ ],
601
+ [
602
+ 139,
603
+ 68,
604
+ 1,
605
+ 70,
606
+ 1,
607
+ "CLIP"
608
+ ],
609
+ [
610
+ 141,
611
+ 68,
612
+ 0,
613
+ 70,
614
+ 0,
615
+ "MODEL"
616
+ ],
617
+ [
618
+ 153,
619
+ 70,
620
+ 0,
621
+ 66,
622
+ 0,
623
+ "MODEL"
624
+ ],
625
+ [
626
+ 154,
627
+ 70,
628
+ 1,
629
+ 6,
630
+ 0,
631
+ "CLIP"
632
+ ]
633
+ ],
634
+ "groups": [],
635
+ "config": {},
636
+ "extra": {
637
+ "ds": {
638
+ "scale": 0.6934334949441455,
639
+ "offset": [
640
+ 476.4825791094596,
641
+ 217.36649263363267
642
+ ]
643
+ },
644
+ "frontendVersion": "1.25.9",
645
+ "VHS_latentpreview": false,
646
+ "VHS_latentpreviewrate": 0,
647
+ "VHS_MetadataImage": true,
648
+ "VHS_KeepIntermediate": true
649
+ },
650
+ "version": 0.4
651
+ }