Upload proteus_v06.json with huggingface_hub
Browse files- proteus_v06.json +257 -0
proteus_v06.json
ADDED
@@ -0,0 +1,257 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"id": 969886,
|
3 |
+
"modelId": 267242,
|
4 |
+
"name": "v0.6",
|
5 |
+
"createdAt": "2024-10-18T16:34:00.255Z",
|
6 |
+
"updatedAt": "2024-10-18T17:03:27.417Z",
|
7 |
+
"status": "Published",
|
8 |
+
"publishedAt": "2024-10-18T17:03:27.409Z",
|
9 |
+
"trainedWords": [],
|
10 |
+
"trainingStatus": null,
|
11 |
+
"trainingDetails": null,
|
12 |
+
"baseModel": "SDXL 1.0",
|
13 |
+
"baseModelType": "Standard",
|
14 |
+
"earlyAccessEndsAt": null,
|
15 |
+
"earlyAccessConfig": null,
|
16 |
+
"description": "<p>ProteusV0.5 is the latest full release of my AI image generation model, built as a sophisticated enhancement over OpenDalleV1.1. This version brings significant improvements in photorealism, prompt comprehension, and stylistic capabilities across various domains. About Proteus Proteus leverages and enhances the core functionalities of OpenDalleV1.1 to deliver superior outcomes. Key areas of advancement include heightened responsiveness to prompts and augmented creative capacities. The model has been fine-tuned using a carefully curated dataset of copyright-free stock images and high-quality AI-generated image pairs.</p>",
|
17 |
+
"uploadType": "Created",
|
18 |
+
"usageControl": "Download",
|
19 |
+
"air": "urn:air:sdxl:checkpoint:civitai:267242@969886",
|
20 |
+
"stats": {
|
21 |
+
"downloadCount": 992,
|
22 |
+
"ratingCount": 0,
|
23 |
+
"rating": 0,
|
24 |
+
"thumbsUpCount": 59
|
25 |
+
},
|
26 |
+
"model": {
|
27 |
+
"name": "Proteus",
|
28 |
+
"type": "Checkpoint",
|
29 |
+
"nsfw": false,
|
30 |
+
"poi": false
|
31 |
+
},
|
32 |
+
"files": [
|
33 |
+
{
|
34 |
+
"id": 876365,
|
35 |
+
"sizeKB": 6775438.611328125,
|
36 |
+
"name": "proteus_v06.safetensors",
|
37 |
+
"type": "Model",
|
38 |
+
"pickleScanResult": "Success",
|
39 |
+
"pickleScanMessage": "No Pickle imports",
|
40 |
+
"virusScanResult": "Success",
|
41 |
+
"virusScanMessage": null,
|
42 |
+
"scannedAt": "2024-10-18T16:49:08.998Z",
|
43 |
+
"metadata": {
|
44 |
+
"format": "SafeTensor",
|
45 |
+
"size": "full",
|
46 |
+
"fp": "fp32"
|
47 |
+
},
|
48 |
+
"hashes": {
|
49 |
+
"AutoV1": "1E6A098D",
|
50 |
+
"AutoV2": "8C4B5D3BE0",
|
51 |
+
"SHA256": "8C4B5D3BE05B1136B69BD61AB6C06F43BC3279614D6325A4622E665EF4DCFD88",
|
52 |
+
"CRC32": "4DBECDC3",
|
53 |
+
"BLAKE3": "EC172FE895427102E46DC3526F6A230B5F2E8F0311973A849CC357AEEBC54B13",
|
54 |
+
"AutoV3": "AB727C350A2D"
|
55 |
+
},
|
56 |
+
"primary": true,
|
57 |
+
"downloadUrl": "https://civitai.com/api/download/models/969886"
|
58 |
+
},
|
59 |
+
{
|
60 |
+
"id": 233666,
|
61 |
+
"sizeKB": 326798.009765625,
|
62 |
+
"name": "sdxl_vae.safetensors",
|
63 |
+
"type": "VAE",
|
64 |
+
"pickleScanResult": "Success",
|
65 |
+
"pickleScanMessage": "No Pickle imports",
|
66 |
+
"virusScanResult": "Success",
|
67 |
+
"virusScanMessage": null,
|
68 |
+
"scannedAt": "2024-01-12T23:21:01.892Z",
|
69 |
+
"metadata": {
|
70 |
+
"format": "SafeTensor",
|
71 |
+
"size": null,
|
72 |
+
"fp": null
|
73 |
+
},
|
74 |
+
"hashes": {
|
75 |
+
"AutoV1": "584F63AB",
|
76 |
+
"AutoV2": "235745AF8D",
|
77 |
+
"SHA256": "235745AF8D86BF4A4C1B5B4F529868B37019A10F7C0B2E79AD0ABCA3A22BC6E1",
|
78 |
+
"CRC32": "5BB75FB4",
|
79 |
+
"BLAKE3": "D46EB6988A26BCAF9E9CFA5E5C6264C4EE1A70F2018F33B8BC2DD7CA0681B490",
|
80 |
+
"AutoV3": "55F20A1016E7"
|
81 |
+
},
|
82 |
+
"primary": false,
|
83 |
+
"downloadUrl": "https://civitai.com/api/download/models/969886?type=VAE&format=SafeTensor"
|
84 |
+
}
|
85 |
+
],
|
86 |
+
"images": [
|
87 |
+
{
|
88 |
+
"url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/14e53a93-cecf-47b8-8ed5-6bb4016418ff/width=1024/35275864.jpeg",
|
89 |
+
"nsfwLevel": 1,
|
90 |
+
"width": 1024,
|
91 |
+
"height": 1024,
|
92 |
+
"hash": "UGIXjb~VPW0L^*nht,I:9FWBIUxu?bkCt7f6",
|
93 |
+
"type": "image",
|
94 |
+
"metadata": {
|
95 |
+
"hash": "UGIXjb~VPW0L^*nht,I:9FWBIUxu?bkCt7f6",
|
96 |
+
"size": 1642818,
|
97 |
+
"width": 1024,
|
98 |
+
"height": 1024
|
99 |
+
},
|
100 |
+
"meta": {
|
101 |
+
"seed": 1065596547516194,
|
102 |
+
"vaes": [],
|
103 |
+
"Model": "SDXL\\Proteus-v0.6.8",
|
104 |
+
"comfy": "{\"prompt\": {\"3\": {\"inputs\": {\"seed\": 1065596547516194, \"steps\": 50, \"cfg\": 7.0, \"sampler_name\": \"dpmpp_3m_sde_gpu\", \"scheduler\": \"karras\", \"denoise\": 1.0, \"model\": [\"50\", 0], \"positive\": [\"6\", 0], \"negative\": [\"7\", 0], \"latent_image\": [\"5\", 0]}, \"class_type\": \"KSampler\"}, \"5\": {\"inputs\": {\"width\": 1024, \"height\": 1024, \"batch_size\": 4}, \"class_type\": \"EmptyLatentImage\"}, \"6\": {\"inputs\": {\"text\": \"The image features an older man, a long white beard and mustache, He has a stern expression, giving the impression of a wise and experienced individual. The mans beard and mustache are prominent, adding to his distinguished appearance. The close-up shot of the mans face emphasizes his facial features and the intensity of his gaze.\", \"clip\": [\"51\", 0]}, \"class_type\": \"CLIPTextEncode\"}, \"7\": {\"inputs\": {\"text\": \"bad\", \"clip\": [\"51\", 0]}, \"class_type\": \"CLIPTextEncode\"}, \"8\": {\"inputs\": {\"samples\": [\"3\", 0], \"vae\": [\"42\", 2]}, \"class_type\": \"VAEDecode\"}, \"9\": {\"inputs\": {\"filename_prefix\": \"ComfyUI\", \"images\": [\"8\", 0]}, \"class_type\": \"SaveImage\"}, \"42\": {\"inputs\": {\"ckpt_name\": \"SDXL\\\\Proteus-v0.6.8.safetensors\"}, \"class_type\": \"CheckpointLoaderSimple\"}, \"49\": {\"inputs\": {\"switch_1\": \"On\", \"lora_name_1\": \"proteus\\\\20-prod.safetensors\", \"model_weight_1\": 0.1, \"clip_weight_1\": 0.1, \"switch_2\": \"On\", \"lora_name_2\": \"proteus\\\\proteus-v0.6.8-lora.safetensors\", \"model_weight_2\": 0.05, \"clip_weight_2\": 0.01, \"switch_3\": \"On\", \"lora_name_3\": \"proteus\\\\proteus-v068-lora-kohya.safetensors\", \"model_weight_3\": 0.15, \"clip_weight_3\": 0.0}, \"class_type\": \"CR LoRA Stack\"}, \"50\": {\"inputs\": {\"model\": [\"42\", 0], \"clip\": [\"42\", 1], \"lora_stack\": [\"49\", 0]}, \"class_type\": \"CR Apply LoRA Stack\"}, \"51\": {\"inputs\": {\"stop_at_clip_layer\": -1, \"clip\": [\"50\", 1]}, \"class_type\": \"CLIPSetLastLayer\"}}, \"workflow\": {\"last_node_id\": 51, \"last_link_id\": 162, \"nodes\": [{\"id\": 8, \"type\": \"VAEDecode\", \"pos\": {\"0\": 1209, \"1\": 188}, \"size\": {\"0\": 210, \"1\": 46}, \"flags\": {}, \"order\": 8, \"mode\": 0, \"inputs\": [{\"name\": \"samples\", \"type\": \"LATENT\", \"link\": 7}, {\"name\": \"vae\", \"type\": \"VAE\", \"link\": 143}], \"outputs\": [{\"name\": \"IMAGE\", \"type\": \"IMAGE\", \"links\": [9], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"VAEDecode\"}}, {\"id\": 5, \"type\": \"EmptyLatentImage\", \"pos\": {\"0\": 473, \"1\": 609}, \"size\": {\"0\": 315, \"1\": 106}, \"flags\": {}, \"order\": 0, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"LATENT\", \"type\": \"LATENT\", \"links\": [2], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"EmptyLatentImage\"}, \"widgets_values\": [1024, 1024, 4]}, {\"id\": 3, \"type\": \"KSampler\", \"pos\": {\"0\": 863, \"1\": 186}, \"size\": {\"0\": 315, \"1\": 474}, \"flags\": {}, \"order\": 7, \"mode\": 0, \"inputs\": [{\"name\": \"model\", \"type\": \"MODEL\", \"link\": 159}, {\"name\": \"positive\", \"type\": \"CONDITIONING\", \"link\": 4}, {\"name\": \"negative\", \"type\": \"CONDITIONING\", \"link\": 6}, {\"name\": \"latent_image\", \"type\": \"LATENT\", \"link\": 2}], \"outputs\": [{\"name\": \"LATENT\", \"type\": \"LATENT\", \"links\": [7], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"KSampler\"}, \"widgets_values\": [1065596547516194, \"randomize\", 50, 7, \"dpmpp_3m_sde_gpu\", \"karras\", 1]}, {\"id\": 7, \"type\": \"CLIPTextEncode\", \"pos\": {\"0\": 413, \"1\": 389}, \"size\": {\"0\": 425.27801513671875, \"1\": 180.6060791015625}, \"flags\": {}, \"order\": 5, \"mode\": 0, \"inputs\": [{\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 161}], \"outputs\": [{\"name\": \"CONDITIONING\", \"type\": \"CONDITIONING\", \"links\": [6], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"CLIPTextEncode\"}, \"widgets_values\": [\"bad\"]}, {\"id\": 50, \"type\": \"CR Apply LoRA Stack\", \"pos\": {\"0\": -311, \"1\": 314}, \"size\": {\"0\": 277.20001220703125, \"1\": 66}, \"flags\": {}, \"order\": 3, \"mode\": 0, \"inputs\": [{\"name\": \"model\", \"type\": \"MODEL\", \"link\": 156}, {\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 155}, {\"name\": \"lora_stack\", \"type\": \"LORA_STACK\", \"link\": 154}], \"outputs\": [{\"name\": \"MODEL\", \"type\": \"MODEL\", \"links\": [159], \"shape\": 3, \"slot_index\": 0}, {\"name\": \"CLIP\", \"type\": \"CLIP\", \"links\": [160], \"shape\": 3, \"slot_index\": 1}, {\"name\": \"show_help\", \"type\": \"STRING\", \"links\": null, \"shape\": 3}], \"properties\": {\"Node name for S&R\": \"CR Apply LoRA Stack\"}}, {\"id\": 51, \"type\": \"CLIPSetLastLayer\", \"pos\": {\"0\": 14, \"1\": 333}, \"size\": {\"0\": 315, \"1\": 58}, \"flags\": {}, \"order\": 4, \"mode\": 0, \"inputs\": [{\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 160}], \"outputs\": [{\"name\": \"CLIP\", \"type\": \"CLIP\", \"links\": [161, 162], \"shape\": 3, \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"CLIPSetLastLayer\"}, \"widgets_values\": [-1]}, {\"id\": 42, \"type\": \"CheckpointLoaderSimple\", \"pos\": {\"0\": -671, \"1\": 300}, \"size\": {\"0\": 315, \"1\": 98}, \"flags\": {}, \"order\": 1, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"MODEL\", \"type\": \"MODEL\", \"links\": [156], \"slot_index\": 0}, {\"name\": \"CLIP\", \"type\": \"CLIP\", \"links\": [155], \"slot_index\": 1}, {\"name\": \"VAE\", \"type\": \"VAE\", \"links\": [143], \"slot_index\": 2}], \"properties\": {\"Node name for S&R\": \"CheckpointLoaderSimple\"}, \"widgets_values\": [\"SDXL\\\\Proteus-v0.6.8.safetensors\"]}, {\"id\": 6, \"type\": \"CLIPTextEncode\", \"pos\": {\"0\": 415, \"1\": 186}, \"size\": {\"0\": 422.84503173828125, \"1\": 164.31304931640625}, \"flags\": {}, \"order\": 6, \"mode\": 0, \"inputs\": [{\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 162}], \"outputs\": [{\"name\": \"CONDITIONING\", \"type\": \"CONDITIONING\", \"links\": [4], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"CLIPTextEncode\"}, \"widgets_values\": [\"The image features an older man, a long white beard and mustache, He has a stern expression, giving the impression of a wise and experienced individual. The mans beard and mustache are prominent, adding to his distinguished appearance. The close-up shot of the mans face emphasizes his facial features and the intensity of his gaze.\"]}, {\"id\": 9, \"type\": \"SaveImage\", \"pos\": {\"0\": 1508, \"1\": 201}, \"size\": [667.9075218394742, 760.1454424943933], \"flags\": {}, \"order\": 9, \"mode\": 0, \"inputs\": [{\"name\": \"images\", \"type\": \"IMAGE\", \"link\": 9}], \"outputs\": [], \"properties\": {}, \"widgets_values\": [\"ComfyUI\"]}, {\"id\": 49, \"type\": \"CR LoRA Stack\", \"pos\": {\"0\": -672, \"1\": -104}, \"size\": [419.49158146472223, 345.60073301642353], \"flags\": {}, \"order\": 2, \"mode\": 0, \"inputs\": [{\"name\": \"lora_stack\", \"type\": \"LORA_STACK\", \"link\": null}], \"outputs\": [{\"name\": \"LORA_STACK\", \"type\": \"LORA_STACK\", \"links\": [154], \"shape\": 3, \"slot_index\": 0}, {\"name\": \"show_help\", \"type\": \"STRING\", \"links\": null, \"shape\": 3}], \"properties\": {\"Node name for S&R\": \"CR LoRA Stack\"}, \"widgets_values\": [\"On\", \"proteus\\\\20-prod.safetensors\", 0.1, 0.1, \"On\", \"proteus\\\\proteus-v0.6.8-lora.safetensors\", 0.05, 0.01, \"On\", \"proteus\\\\proteus-v068-lora-kohya.safetensors\", 0.15, 0]}], \"links\": [[2, 5, 0, 3, 3, \"LATENT\"], [4, 6, 0, 3, 1, \"CONDITIONING\"], [6, 7, 0, 3, 2, \"CONDITIONING\"], [7, 3, 0, 8, 0, \"LATENT\"], [9, 8, 0, 9, 0, \"IMAGE\"], [143, 42, 2, 8, 1, \"VAE\"], [154, 49, 0, 50, 2, \"LORA_STACK\"], [155, 42, 1, 50, 1, \"CLIP\"], [156, 42, 0, 50, 0, \"MODEL\"], [159, 50, 0, 3, 0, \"MODEL\"], [160, 50, 1, 51, 0, \"CLIP\"], [161, 51, 0, 7, 0, \"CLIP\"], [162, 51, 0, 6, 0, \"CLIP\"]], \"groups\": [], \"config\": {}, \"extra\": {\"ds\": {\"scale\": 0.6830134553650705, \"offset\": [681.7379785471077, 180.91636604867674]}}, \"version\": 0.4, \"widget_idx_map\": {\"3\": {\"seed\": 0, \"sampler_name\": 4, \"scheduler\": 5}}}}",
|
105 |
+
"steps": 50,
|
106 |
+
"width": 1024,
|
107 |
+
"height": 1024,
|
108 |
+
"models": [
|
109 |
+
"SDXL\\Proteus-v0.6.8.safetensors"
|
110 |
+
],
|
111 |
+
"prompt": "The image features an older man, a long white beard and mustache, He has a stern expression, giving the impression of a wise and experienced individual. The mans beard and mustache are prominent, adding to his distinguished appearance. The close-up shot of the mans face emphasizes his facial features and the intensity of his gaze.",
|
112 |
+
"denoise": 1,
|
113 |
+
"sampler": "dpmpp_3m_sde_gpu",
|
114 |
+
"cfgScale": 7,
|
115 |
+
"modelIds": [],
|
116 |
+
"scheduler": "karras",
|
117 |
+
"upscalers": [],
|
118 |
+
"versionIds": [],
|
119 |
+
"controlNets": [],
|
120 |
+
"negativePrompt": "bad",
|
121 |
+
"additionalResources": []
|
122 |
+
},
|
123 |
+
"availability": "Public",
|
124 |
+
"hasMeta": true,
|
125 |
+
"hasPositivePrompt": true,
|
126 |
+
"onSite": false,
|
127 |
+
"remixOfId": null
|
128 |
+
},
|
129 |
+
{
|
130 |
+
"url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/9ca78e0e-55c2-47da-b602-8c5ead41e4d7/width=1024/35275866.jpeg",
|
131 |
+
"nsfwLevel": 1,
|
132 |
+
"width": 1024,
|
133 |
+
"height": 1024,
|
134 |
+
"hash": "UOF=~$xYIoxa_Nj[xZofx[xtRjaet7?GRjRj",
|
135 |
+
"type": "image",
|
136 |
+
"metadata": {
|
137 |
+
"hash": "UOF=~$xYIoxa_Nj[xZofx[xtRjaet7?GRjRj",
|
138 |
+
"size": 1301451,
|
139 |
+
"width": 1024,
|
140 |
+
"height": 1024
|
141 |
+
},
|
142 |
+
"meta": {
|
143 |
+
"seed": 548825752111351,
|
144 |
+
"vaes": [],
|
145 |
+
"Model": "SDXL\\Proteus-v0.6.8",
|
146 |
+
"comfy": "{\"prompt\": {\"3\": {\"inputs\": {\"seed\": 548825752111351, \"steps\": 50, \"cfg\": 7.1000000000000005, \"sampler_name\": \"dpmpp_3m_sde_gpu\", \"scheduler\": \"karras\", \"denoise\": 1.0, \"model\": [\"50\", 0], \"positive\": [\"6\", 0], \"negative\": [\"7\", 0], \"latent_image\": [\"5\", 0]}, \"class_type\": \"KSampler\"}, \"5\": {\"inputs\": {\"width\": 1024, \"height\": 1024, \"batch_size\": 4}, \"class_type\": \"EmptyLatentImage\"}, \"6\": {\"inputs\": {\"text\": \"cinematic film still of Kodak Motion Picture Film: (Sharp Detailed Image) An Oscar winning movie for Best Cinematography a woman in a kimono standing on a subway train in Japan Kodak Motion Picture Film Style, shallow depth of field, vignette, highly detailed, high budget, bokeh, cinemascope, moody, epic, gorgeous, film grain, grainy\", \"clip\": [\"51\", 0]}, \"class_type\": \"CLIPTextEncode\"}, \"7\": {\"inputs\": {\"text\": \"worst quality, low quality, deformed, malformed, distorted, blurry, unfocused, \", \"clip\": [\"51\", 0]}, \"class_type\": \"CLIPTextEncode\"}, \"8\": {\"inputs\": {\"samples\": [\"3\", 0], \"vae\": [\"42\", 2]}, \"class_type\": \"VAEDecode\"}, \"9\": {\"inputs\": {\"filename_prefix\": \"ComfyUI\", \"images\": [\"8\", 0]}, \"class_type\": \"SaveImage\"}, \"42\": {\"inputs\": {\"ckpt_name\": \"SDXL\\\\Proteus-v0.6.8.safetensors\"}, \"class_type\": \"CheckpointLoaderSimple\"}, \"49\": {\"inputs\": {\"switch_1\": \"On\", \"lora_name_1\": \"proteus\\\\20-prod.safetensors\", \"model_weight_1\": 0.1, \"clip_weight_1\": 0.1, \"switch_2\": \"On\", \"lora_name_2\": \"proteus\\\\proteus-v0.6.8-lora.safetensors\", \"model_weight_2\": 0.05, \"clip_weight_2\": 0.01, \"switch_3\": \"On\", \"lora_name_3\": \"proteus\\\\proteus-v068-lora-kohya.safetensors\", \"model_weight_3\": 0.15, \"clip_weight_3\": 0.0}, \"class_type\": \"CR LoRA Stack\"}, \"50\": {\"inputs\": {\"model\": [\"42\", 0], \"clip\": [\"42\", 1], \"lora_stack\": [\"49\", 0]}, \"class_type\": \"CR Apply LoRA Stack\"}, \"51\": {\"inputs\": {\"stop_at_clip_layer\": -1, \"clip\": [\"50\", 1]}, \"class_type\": \"CLIPSetLastLayer\"}}, \"workflow\": {\"last_node_id\": 53, \"last_link_id\": 164, \"nodes\": [{\"id\": 8, \"type\": \"VAEDecode\", \"pos\": {\"0\": 1209, \"1\": 188}, \"size\": {\"0\": 210, \"1\": 46}, \"flags\": {}, \"order\": 8, \"mode\": 0, \"inputs\": [{\"name\": \"samples\", \"type\": \"LATENT\", \"link\": 7}, {\"name\": \"vae\", \"type\": \"VAE\", \"link\": 143}], \"outputs\": [{\"name\": \"IMAGE\", \"type\": \"IMAGE\", \"links\": [9], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"VAEDecode\"}}, {\"id\": 5, \"type\": \"EmptyLatentImage\", \"pos\": {\"0\": 473, \"1\": 609}, \"size\": {\"0\": 315, \"1\": 106}, \"flags\": {}, \"order\": 0, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"LATENT\", \"type\": \"LATENT\", \"links\": [2], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"EmptyLatentImage\"}, \"widgets_values\": [1024, 1024, 4]}, {\"id\": 50, \"type\": \"CR Apply LoRA Stack\", \"pos\": {\"0\": -311, \"1\": 314}, \"size\": {\"0\": 277.20001220703125, \"1\": 66}, \"flags\": {}, \"order\": 3, \"mode\": 0, \"inputs\": [{\"name\": \"model\", \"type\": \"MODEL\", \"link\": 156}, {\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 155}, {\"name\": \"lora_stack\", \"type\": \"LORA_STACK\", \"link\": 154}], \"outputs\": [{\"name\": \"MODEL\", \"type\": \"MODEL\", \"links\": [159], \"shape\": 3, \"slot_index\": 0}, {\"name\": \"CLIP\", \"type\": \"CLIP\", \"links\": [160], \"shape\": 3, \"slot_index\": 1}, {\"name\": \"show_help\", \"type\": \"STRING\", \"links\": null, \"shape\": 3}], \"properties\": {\"Node name for S&R\": \"CR Apply LoRA Stack\"}}, {\"id\": 51, \"type\": \"CLIPSetLastLayer\", \"pos\": {\"0\": 14, \"1\": 333}, \"size\": {\"0\": 315, \"1\": 58}, \"flags\": {}, \"order\": 4, \"mode\": 0, \"inputs\": [{\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 160}], \"outputs\": [{\"name\": \"CLIP\", \"type\": \"CLIP\", \"links\": [161, 162], \"shape\": 3, \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"CLIPSetLastLayer\"}, \"widgets_values\": [-1]}, {\"id\": 42, \"type\": \"CheckpointLoaderSimple\", \"pos\": {\"0\": -671, \"1\": 300}, \"size\": {\"0\": 315, \"1\": 98}, \"flags\": {}, \"order\": 1, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"MODEL\", \"type\": \"MODEL\", \"links\": [156], \"slot_index\": 0}, {\"name\": \"CLIP\", \"type\": \"CLIP\", \"links\": [155], \"slot_index\": 1}, {\"name\": \"VAE\", \"type\": \"VAE\", \"links\": [143], \"slot_index\": 2}], \"properties\": {\"Node name for S&R\": \"CheckpointLoaderSimple\"}, \"widgets_values\": [\"SDXL\\\\Proteus-v0.6.8.safetensors\"]}, {\"id\": 49, \"type\": \"CR LoRA Stack\", \"pos\": {\"0\": -721, \"1\": -102}, \"size\": [419.49158146472223, 345.60073301642353], \"flags\": {}, \"order\": 2, \"mode\": 0, \"inputs\": [{\"name\": \"lora_stack\", \"type\": \"LORA_STACK\", \"link\": null}], \"outputs\": [{\"name\": \"LORA_STACK\", \"type\": \"LORA_STACK\", \"links\": [154], \"shape\": 3, \"slot_index\": 0}, {\"name\": \"show_help\", \"type\": \"STRING\", \"links\": null, \"shape\": 3}], \"properties\": {\"Node name for S&R\": \"CR LoRA Stack\"}, \"widgets_values\": [\"On\", \"proteus\\\\20-prod.safetensors\", 0.1, 0.1, \"On\", \"proteus\\\\proteus-v0.6.8-lora.safetensors\", 0.05, 0.01, \"On\", \"proteus\\\\proteus-v068-lora-kohya.safetensors\", 0.15, 0]}, {\"id\": 7, \"type\": \"CLIPTextEncode\", \"pos\": {\"0\": 413, \"1\": 389}, \"size\": {\"0\": 425.27801513671875, \"1\": 180.6060791015625}, \"flags\": {}, \"order\": 5, \"mode\": 0, \"inputs\": [{\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 161}], \"outputs\": [{\"name\": \"CONDITIONING\", \"type\": \"CONDITIONING\", \"links\": [6], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"CLIPTextEncode\"}, \"widgets_values\": [\"worst quality, low quality, deformed, malformed, distorted, blurry, unfocused, \"]}, {\"id\": 6, \"type\": \"CLIPTextEncode\", \"pos\": {\"0\": 415, \"1\": 186}, \"size\": {\"0\": 422.84503173828125, \"1\": 164.31304931640625}, \"flags\": {}, \"order\": 6, \"mode\": 0, \"inputs\": [{\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 162}], \"outputs\": [{\"name\": \"CONDITIONING\", \"type\": \"CONDITIONING\", \"links\": [4], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"CLIPTextEncode\"}, \"widgets_values\": [\"cinematic film still of Kodak Motion Picture Film: (Sharp Detailed Image) An Oscar winning movie for Best Cinematography a woman in a kimono standing on a subway train in Japan Kodak Motion Picture Film Style, shallow depth of field, vignette, highly detailed, high budget, bokeh, cinemascope, moody, epic, gorgeous, film grain, grainy\"]}, {\"id\": 9, \"type\": \"SaveImage\", \"pos\": {\"0\": 1529, \"1\": 200}, \"size\": [659.0197498937328, 752.0825694327606], \"flags\": {}, \"order\": 9, \"mode\": 0, \"inputs\": [{\"name\": \"images\", \"type\": \"IMAGE\", \"link\": 9}], \"outputs\": [], \"properties\": {}, \"widgets_values\": [\"ComfyUI\"]}, {\"id\": 3, \"type\": \"KSampler\", \"pos\": {\"0\": 863, \"1\": 186}, \"size\": {\"0\": 315, \"1\": 474}, \"flags\": {}, \"order\": 7, \"mode\": 0, \"inputs\": [{\"name\": \"model\", \"type\": \"MODEL\", \"link\": 159}, {\"name\": \"positive\", \"type\": \"CONDITIONING\", \"link\": 4}, {\"name\": \"negative\", \"type\": \"CONDITIONING\", \"link\": 6}, {\"name\": \"latent_image\", \"type\": \"LATENT\", \"link\": 2}], \"outputs\": [{\"name\": \"LATENT\", \"type\": \"LATENT\", \"links\": [7], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"KSampler\"}, \"widgets_values\": [548825752111351, \"randomize\", 50, 7.1000000000000005, \"dpmpp_3m_sde_gpu\", \"karras\", 1]}], \"links\": [[2, 5, 0, 3, 3, \"LATENT\"], [4, 6, 0, 3, 1, \"CONDITIONING\"], [6, 7, 0, 3, 2, \"CONDITIONING\"], [7, 3, 0, 8, 0, \"LATENT\"], [9, 8, 0, 9, 0, \"IMAGE\"], [143, 42, 2, 8, 1, \"VAE\"], [154, 49, 0, 50, 2, \"LORA_STACK\"], [155, 42, 1, 50, 1, \"CLIP\"], [156, 42, 0, 50, 0, \"MODEL\"], [159, 50, 0, 3, 0, \"MODEL\"], [160, 50, 1, 51, 0, \"CLIP\"], [161, 51, 0, 7, 0, \"CLIP\"], [162, 51, 0, 6, 0, \"CLIP\"]], \"groups\": [], \"config\": {}, \"extra\": {\"ds\": {\"scale\": 1, \"offset\": [230.59613365275737, -3.0202237894033033]}}, \"version\": 0.4, \"widget_idx_map\": {\"3\": {\"seed\": 0, \"sampler_name\": 4, \"scheduler\": 5}}}}",
|
147 |
+
"steps": 50,
|
148 |
+
"width": 1024,
|
149 |
+
"height": 1024,
|
150 |
+
"models": [
|
151 |
+
"SDXL\\Proteus-v0.6.8.safetensors"
|
152 |
+
],
|
153 |
+
"prompt": "cinematic film still of Kodak Motion Picture Film: (Sharp Detailed Image) An Oscar winning movie for Best Cinematography a woman in a kimono standing on a subway train in Japan Kodak Motion Picture Film Style, shallow depth of field, vignette, highly detailed, high budget, bokeh, cinemascope, moody, epic, gorgeous, film grain, grainy",
|
154 |
+
"denoise": 1,
|
155 |
+
"sampler": "dpmpp_3m_sde_gpu",
|
156 |
+
"cfgScale": 7.100000000000001,
|
157 |
+
"modelIds": [],
|
158 |
+
"scheduler": "karras",
|
159 |
+
"upscalers": [],
|
160 |
+
"versionIds": [],
|
161 |
+
"controlNets": [],
|
162 |
+
"negativePrompt": "worst quality, low quality, deformed, malformed, distorted, blurry, unfocused, ",
|
163 |
+
"additionalResources": []
|
164 |
+
},
|
165 |
+
"availability": "Public",
|
166 |
+
"hasMeta": true,
|
167 |
+
"hasPositivePrompt": true,
|
168 |
+
"onSite": false,
|
169 |
+
"remixOfId": null
|
170 |
+
},
|
171 |
+
{
|
172 |
+
"url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/b0bce465-eee9-49d4-99b3-8eeca6586722/width=1024/35275867.jpeg",
|
173 |
+
"nsfwLevel": 1,
|
174 |
+
"width": 1024,
|
175 |
+
"height": 1024,
|
176 |
+
"hash": "UYJ8kW.8_NRjxDM{%gogIUM{f5tRxuofWBM{",
|
177 |
+
"type": "image",
|
178 |
+
"metadata": {
|
179 |
+
"hash": "UYJ8kW.8_NRjxDM{%gogIUM{f5tRxuofWBM{",
|
180 |
+
"size": 1682548,
|
181 |
+
"width": 1024,
|
182 |
+
"height": 1024
|
183 |
+
},
|
184 |
+
"meta": {
|
185 |
+
"seed": 624460581444277,
|
186 |
+
"vaes": [],
|
187 |
+
"Model": "SDXL\\Proteus-v0.6.8",
|
188 |
+
"comfy": "{\"prompt\": {\"3\": {\"inputs\": {\"seed\": 624460581444277, \"steps\": 50, \"cfg\": 7.1000000000000005, \"sampler_name\": \"dpmpp_3m_sde_gpu\", \"scheduler\": \"karras\", \"denoise\": 1.0, \"model\": [\"50\", 0], \"positive\": [\"6\", 0], \"negative\": [\"7\", 0], \"latent_image\": [\"5\", 0]}, \"class_type\": \"KSampler\"}, \"5\": {\"inputs\": {\"width\": 1024, \"height\": 1024, \"batch_size\": 4}, \"class_type\": \"EmptyLatentImage\"}, \"6\": {\"inputs\": {\"text\": \"Super Closeup Portrait, action shot, Profoundly dark whitish meadow, glass flowers, Stains, space grunge style, Jeanne d'Arc wearing White Olive green used styled Cotton frock, Wielding thin silver sword, Sci-fi vibe, dirty, noisy, Vintage monk style, very detailed, best\", \"clip\": [\"51\", 0]}, \"class_type\": \"CLIPTextEncode\"}, \"7\": {\"inputs\": {\"text\": \"worst quality, low quality, deformed, malformed, distorted, blurry, unfocused, \", \"clip\": [\"51\", 0]}, \"class_type\": \"CLIPTextEncode\"}, \"8\": {\"inputs\": {\"samples\": [\"3\", 0], \"vae\": [\"42\", 2]}, \"class_type\": \"VAEDecode\"}, \"9\": {\"inputs\": {\"filename_prefix\": \"ComfyUI\", \"images\": [\"8\", 0]}, \"class_type\": \"SaveImage\"}, \"42\": {\"inputs\": {\"ckpt_name\": \"SDXL\\\\Proteus-v0.6.8.safetensors\"}, \"class_type\": \"CheckpointLoaderSimple\"}, \"49\": {\"inputs\": {\"switch_1\": \"On\", \"lora_name_1\": \"proteus\\\\20-prod.safetensors\", \"model_weight_1\": 0.1, \"clip_weight_1\": 0.1, \"switch_2\": \"On\", \"lora_name_2\": \"proteus\\\\proteus-v0.6.8-lora.safetensors\", \"model_weight_2\": 0.05, \"clip_weight_2\": 0.01, \"switch_3\": \"On\", \"lora_name_3\": \"proteus\\\\proteus-v068-lora-kohya.safetensors\", \"model_weight_3\": 0.15, \"clip_weight_3\": 0.0}, \"class_type\": \"CR LoRA Stack\"}, \"50\": {\"inputs\": {\"model\": [\"42\", 0], \"clip\": [\"42\", 1], \"lora_stack\": [\"49\", 0]}, \"class_type\": \"CR Apply LoRA Stack\"}, \"51\": {\"inputs\": {\"stop_at_clip_layer\": -1, \"clip\": [\"50\", 1]}, \"class_type\": \"CLIPSetLastLayer\"}}, \"workflow\": {\"last_node_id\": 53, \"last_link_id\": 164, \"nodes\": [{\"id\": 8, \"type\": \"VAEDecode\", \"pos\": {\"0\": 1209, \"1\": 188}, \"size\": {\"0\": 210, \"1\": 46}, \"flags\": {}, \"order\": 8, \"mode\": 0, \"inputs\": [{\"name\": \"samples\", \"type\": \"LATENT\", \"link\": 7}, {\"name\": \"vae\", \"type\": \"VAE\", \"link\": 143}], \"outputs\": [{\"name\": \"IMAGE\", \"type\": \"IMAGE\", \"links\": [9], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"VAEDecode\"}}, {\"id\": 5, \"type\": \"EmptyLatentImage\", \"pos\": {\"0\": 473, \"1\": 609}, \"size\": {\"0\": 315, \"1\": 106}, \"flags\": {}, \"order\": 0, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"LATENT\", \"type\": \"LATENT\", \"links\": [2], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"EmptyLatentImage\"}, \"widgets_values\": [1024, 1024, 4]}, {\"id\": 50, \"type\": \"CR Apply LoRA Stack\", \"pos\": {\"0\": -311, \"1\": 314}, \"size\": {\"0\": 277.20001220703125, \"1\": 66}, \"flags\": {}, \"order\": 3, \"mode\": 0, \"inputs\": [{\"name\": \"model\", \"type\": \"MODEL\", \"link\": 156}, {\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 155}, {\"name\": \"lora_stack\", \"type\": \"LORA_STACK\", \"link\": 154}], \"outputs\": [{\"name\": \"MODEL\", \"type\": \"MODEL\", \"links\": [159], \"shape\": 3, \"slot_index\": 0}, {\"name\": \"CLIP\", \"type\": \"CLIP\", \"links\": [160], \"shape\": 3, \"slot_index\": 1}, {\"name\": \"show_help\", \"type\": \"STRING\", \"links\": null, \"shape\": 3}], \"properties\": {\"Node name for S&R\": \"CR Apply LoRA Stack\"}}, {\"id\": 51, \"type\": \"CLIPSetLastLayer\", \"pos\": {\"0\": 14, \"1\": 333}, \"size\": {\"0\": 315, \"1\": 58}, \"flags\": {}, \"order\": 4, \"mode\": 0, \"inputs\": [{\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 160}], \"outputs\": [{\"name\": \"CLIP\", \"type\": \"CLIP\", \"links\": [161, 162], \"shape\": 3, \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"CLIPSetLastLayer\"}, \"widgets_values\": [-1]}, {\"id\": 42, \"type\": \"CheckpointLoaderSimple\", \"pos\": {\"0\": -671, \"1\": 300}, \"size\": {\"0\": 315, \"1\": 98}, \"flags\": {}, \"order\": 1, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"MODEL\", \"type\": \"MODEL\", \"links\": [156], \"slot_index\": 0}, {\"name\": \"CLIP\", \"type\": \"CLIP\", \"links\": [155], \"slot_index\": 1}, {\"name\": \"VAE\", \"type\": \"VAE\", \"links\": [143], \"slot_index\": 2}], \"properties\": {\"Node name for S&R\": \"CheckpointLoaderSimple\"}, \"widgets_values\": [\"SDXL\\\\Proteus-v0.6.8.safetensors\"]}, {\"id\": 49, \"type\": \"CR LoRA Stack\", \"pos\": {\"0\": -721, \"1\": -102}, \"size\": [419.49158146472223, 345.60073301642353], \"flags\": {}, \"order\": 2, \"mode\": 0, \"inputs\": [{\"name\": \"lora_stack\", \"type\": \"LORA_STACK\", \"link\": null}], \"outputs\": [{\"name\": \"LORA_STACK\", \"type\": \"LORA_STACK\", \"links\": [154], \"shape\": 3, \"slot_index\": 0}, {\"name\": \"show_help\", \"type\": \"STRING\", \"links\": null, \"shape\": 3}], \"properties\": {\"Node name for S&R\": \"CR LoRA Stack\"}, \"widgets_values\": [\"On\", \"proteus\\\\20-prod.safetensors\", 0.1, 0.1, \"On\", \"proteus\\\\proteus-v0.6.8-lora.safetensors\", 0.05, 0.01, \"On\", \"proteus\\\\proteus-v068-lora-kohya.safetensors\", 0.15, 0]}, {\"id\": 6, \"type\": \"CLIPTextEncode\", \"pos\": {\"0\": 415, \"1\": 186}, \"size\": {\"0\": 422.84503173828125, \"1\": 164.31304931640625}, \"flags\": {}, \"order\": 6, \"mode\": 0, \"inputs\": [{\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 162}], \"outputs\": [{\"name\": \"CONDITIONING\", \"type\": \"CONDITIONING\", \"links\": [4], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"CLIPTextEncode\"}, \"widgets_values\": [\"Super Closeup Portrait, action shot, Profoundly dark whitish meadow, glass flowers, Stains, space grunge style, Jeanne d'Arc wearing White Olive green used styled Cotton frock, Wielding thin silver sword, Sci-fi vibe, dirty, noisy, Vintage monk style, very detailed, best\"]}, {\"id\": 7, \"type\": \"CLIPTextEncode\", \"pos\": {\"0\": 413, \"1\": 389}, \"size\": {\"0\": 425.27801513671875, \"1\": 180.6060791015625}, \"flags\": {}, \"order\": 5, \"mode\": 0, \"inputs\": [{\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 161}], \"outputs\": [{\"name\": \"CONDITIONING\", \"type\": \"CONDITIONING\", \"links\": [6], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"CLIPTextEncode\"}, \"widgets_values\": [\"worst quality, low quality, deformed, malformed, distorted, blurry, unfocused, \"]}, {\"id\": 9, \"type\": \"SaveImage\", \"pos\": {\"0\": 1524, \"1\": 200}, \"size\": [659.0197498937328, 752.0825694327606], \"flags\": {}, \"order\": 9, \"mode\": 0, \"inputs\": [{\"name\": \"images\", \"type\": \"IMAGE\", \"link\": 9}], \"outputs\": [], \"properties\": {}, \"widgets_values\": [\"ComfyUI\"]}, {\"id\": 3, \"type\": \"KSampler\", \"pos\": {\"0\": 863, \"1\": 186}, \"size\": {\"0\": 315, \"1\": 474}, \"flags\": {}, \"order\": 7, \"mode\": 0, \"inputs\": [{\"name\": \"model\", \"type\": \"MODEL\", \"link\": 159}, {\"name\": \"positive\", \"type\": \"CONDITIONING\", \"link\": 4}, {\"name\": \"negative\", \"type\": \"CONDITIONING\", \"link\": 6}, {\"name\": \"latent_image\", \"type\": \"LATENT\", \"link\": 2}], \"outputs\": [{\"name\": \"LATENT\", \"type\": \"LATENT\", \"links\": [7], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"KSampler\"}, \"widgets_values\": [624460581444277, \"randomize\", 50, 7.1000000000000005, \"dpmpp_3m_sde_gpu\", \"karras\", 1]}], \"links\": [[2, 5, 0, 3, 3, \"LATENT\"], [4, 6, 0, 3, 1, \"CONDITIONING\"], [6, 7, 0, 3, 2, \"CONDITIONING\"], [7, 3, 0, 8, 0, \"LATENT\"], [9, 8, 0, 9, 0, \"IMAGE\"], [143, 42, 2, 8, 1, \"VAE\"], [154, 49, 0, 50, 2, \"LORA_STACK\"], [155, 42, 1, 50, 1, \"CLIP\"], [156, 42, 0, 50, 0, \"MODEL\"], [159, 50, 0, 3, 0, \"MODEL\"], [160, 50, 1, 51, 0, \"CLIP\"], [161, 51, 0, 7, 0, \"CLIP\"], [162, 51, 0, 6, 0, \"CLIP\"]], \"groups\": [], \"config\": {}, \"extra\": {\"ds\": {\"scale\": 1.2100000000000002, \"offset\": [-567.5801365564367, -196.38485034302553]}}, \"version\": 0.4, \"widget_idx_map\": {\"3\": {\"seed\": 0, \"sampler_name\": 4, \"scheduler\": 5}}}}",
|
189 |
+
"steps": 50,
|
190 |
+
"width": 1024,
|
191 |
+
"height": 1024,
|
192 |
+
"models": [
|
193 |
+
"SDXL\\Proteus-v0.6.8.safetensors"
|
194 |
+
],
|
195 |
+
"prompt": "Super Closeup Portrait, action shot, Profoundly dark whitish meadow, glass flowers, Stains, space grunge style, Jeanne d'Arc wearing White Olive green used styled Cotton frock, Wielding thin silver sword, Sci-fi vibe, dirty, noisy, Vintage monk style, very detailed, best",
|
196 |
+
"denoise": 1,
|
197 |
+
"sampler": "dpmpp_3m_sde_gpu",
|
198 |
+
"cfgScale": 7.100000000000001,
|
199 |
+
"modelIds": [],
|
200 |
+
"scheduler": "karras",
|
201 |
+
"upscalers": [],
|
202 |
+
"versionIds": [],
|
203 |
+
"controlNets": [],
|
204 |
+
"negativePrompt": "worst quality, low quality, deformed, malformed, distorted, blurry, unfocused, ",
|
205 |
+
"additionalResources": []
|
206 |
+
},
|
207 |
+
"availability": "Public",
|
208 |
+
"hasMeta": true,
|
209 |
+
"hasPositivePrompt": true,
|
210 |
+
"onSite": false,
|
211 |
+
"remixOfId": null
|
212 |
+
},
|
213 |
+
{
|
214 |
+
"url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/0f320a97-8031-4651-8c42-009d6d9ec4fc/width=1024/35275871.jpeg",
|
215 |
+
"nsfwLevel": 1,
|
216 |
+
"width": 1024,
|
217 |
+
"height": 1024,
|
218 |
+
"hash": "UlI#W1ofD%j[~WoeM{ayjEj[ozayj[fQj[j[",
|
219 |
+
"type": "image",
|
220 |
+
"metadata": {
|
221 |
+
"hash": "UlI#W1ofD%j[~WoeM{ayjEj[ozayj[fQj[j[",
|
222 |
+
"size": 1244616,
|
223 |
+
"width": 1024,
|
224 |
+
"height": 1024
|
225 |
+
},
|
226 |
+
"meta": {
|
227 |
+
"seed": 949825792965028,
|
228 |
+
"vaes": [],
|
229 |
+
"Model": "SDXL\\Proteus-v0.6",
|
230 |
+
"comfy": "{\"prompt\": {\"3\": {\"inputs\": {\"seed\": 949825792965028, \"steps\": 50, \"cfg\": 7.46, \"sampler_name\": \"dpmpp_3m_sde_gpu\", \"scheduler\": \"karras\", \"denoise\": 1.0, \"model\": [\"42\", 0], \"positive\": [\"6\", 0], \"negative\": [\"7\", 0], \"latent_image\": [\"5\", 0]}, \"class_type\": \"KSampler\"}, \"5\": {\"inputs\": {\"width\": 1024, \"height\": 1024, \"batch_size\": 4}, \"class_type\": \"EmptyLatentImage\"}, \"6\": {\"inputs\": {\"text\": \"(\\\"Proteus\\\" text logo) powerful aura, swirling power, cinematic, masterpiece, award-winning \", \"clip\": [\"42\", 1]}, \"class_type\": \"CLIPTextEncode\"}, \"7\": {\"inputs\": {\"text\": \"worst quality, low quality, deformed, malformed, distorted, blurry, unfocused, \", \"clip\": [\"42\", 1]}, \"class_type\": \"CLIPTextEncode\"}, \"8\": {\"inputs\": {\"samples\": [\"3\", 0], \"vae\": [\"42\", 2]}, \"class_type\": \"VAEDecode\"}, \"9\": {\"inputs\": {\"filename_prefix\": \"ComfyUI\", \"images\": [\"8\", 0]}, \"class_type\": \"SaveImage\"}, \"42\": {\"inputs\": {\"ckpt_name\": \"SDXL\\\\Proteus-v0.6.safetensors\"}, \"class_type\": \"CheckpointLoaderSimple\"}}, \"workflow\": {\"last_node_id\": 57, \"last_link_id\": 178, \"nodes\": [{\"id\": 8, \"type\": \"VAEDecode\", \"pos\": {\"0\": 1209, \"1\": 188}, \"size\": {\"0\": 210, \"1\": 46}, \"flags\": {}, \"order\": 6, \"mode\": 0, \"inputs\": [{\"name\": \"samples\", \"type\": \"LATENT\", \"link\": 7}, {\"name\": \"vae\", \"type\": \"VAE\", \"link\": 143}], \"outputs\": [{\"name\": \"IMAGE\", \"type\": \"IMAGE\", \"links\": [9], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"VAEDecode\"}}, {\"id\": 5, \"type\": \"EmptyLatentImage\", \"pos\": {\"0\": 473, \"1\": 609}, \"size\": {\"0\": 315, \"1\": 106}, \"flags\": {}, \"order\": 0, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"LATENT\", \"type\": \"LATENT\", \"links\": [2], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"EmptyLatentImage\"}, \"widgets_values\": [1024, 1024, 4]}, {\"id\": 42, \"type\": \"CheckpointLoaderSimple\", \"pos\": {\"0\": -90, \"1\": 287}, \"size\": {\"0\": 315, \"1\": 98}, \"flags\": {}, \"order\": 1, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"MODEL\", \"type\": \"MODEL\", \"links\": [176], \"slot_index\": 0}, {\"name\": \"CLIP\", \"type\": \"CLIP\", \"links\": [177, 178], \"slot_index\": 1}, {\"name\": \"VAE\", \"type\": \"VAE\", \"links\": [143], \"slot_index\": 2}], \"properties\": {\"Node name for S&R\": \"CheckpointLoaderSimple\"}, \"widgets_values\": [\"SDXL\\\\Proteus-v0.6.safetensors\"]}, {\"id\": 56, \"type\": \"Note\", \"pos\": {\"0\": -21, \"1\": 531}, \"size\": {\"0\": 312.8000183105469, \"1\": 60}, \"flags\": {}, \"order\": 2, \"mode\": 0, \"inputs\": [], \"outputs\": [], \"properties\": {}, \"widgets_values\": [\"worst quality, low quality, deformed, malformed, distorted, blurry, unfocused, \"], \"color\": \"#432\", \"bgcolor\": \"#653\"}, {\"id\": 7, \"type\": \"CLIPTextEncode\", \"pos\": {\"0\": 413, \"1\": 389}, \"size\": {\"0\": 425.27801513671875, \"1\": 180.6060791015625}, \"flags\": {}, \"order\": 3, \"mode\": 0, \"inputs\": [{\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 177}], \"outputs\": [{\"name\": \"CONDITIONING\", \"type\": \"CONDITIONING\", \"links\": [6], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"CLIPTextEncode\"}, \"widgets_values\": [\"worst quality, low quality, deformed, malformed, distorted, blurry, unfocused, \"]}, {\"id\": 3, \"type\": \"KSampler\", \"pos\": {\"0\": 863, \"1\": 187}, \"size\": {\"0\": 315, \"1\": 474}, \"flags\": {}, \"order\": 5, \"mode\": 0, \"inputs\": [{\"name\": \"model\", \"type\": \"MODEL\", \"link\": 176}, {\"name\": \"positive\", \"type\": \"CONDITIONING\", \"link\": 4}, {\"name\": \"negative\", \"type\": \"CONDITIONING\", \"link\": 6}, {\"name\": \"latent_image\", \"type\": \"LATENT\", \"link\": 2}], \"outputs\": [{\"name\": \"LATENT\", \"type\": \"LATENT\", \"links\": [7], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"KSampler\"}, \"widgets_values\": [949825792965028, \"randomize\", 50, 7.46, \"dpmpp_3m_sde_gpu\", \"karras\", 1]}, {\"id\": 6, \"type\": \"CLIPTextEncode\", \"pos\": {\"0\": 415, \"1\": 186}, \"size\": {\"0\": 422.84503173828125, \"1\": 164.31304931640625}, \"flags\": {}, \"order\": 4, \"mode\": 0, \"inputs\": [{\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 178}], \"outputs\": [{\"name\": \"CONDITIONING\", \"type\": \"CONDITIONING\", \"links\": [4], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"CLIPTextEncode\"}, \"widgets_values\": [\"(\\\"Proteus\\\" text logo) powerful aura, swirling power, cinematic, masterpiece, award-winning \"]}, {\"id\": 9, \"type\": \"SaveImage\", \"pos\": {\"0\": 1535, \"1\": 200}, \"size\": {\"0\": 642.628662109375, \"1\": 727.1895751953125}, \"flags\": {}, \"order\": 7, \"mode\": 0, \"inputs\": [{\"name\": \"images\", \"type\": \"IMAGE\", \"link\": 9}], \"outputs\": [], \"properties\": {}, \"widgets_values\": [\"ComfyUI\"]}], \"links\": [[2, 5, 0, 3, 3, \"LATENT\"], [4, 6, 0, 3, 1, \"CONDITIONING\"], [6, 7, 0, 3, 2, \"CONDITIONING\"], [7, 3, 0, 8, 0, \"LATENT\"], [9, 8, 0, 9, 0, \"IMAGE\"], [143, 42, 2, 8, 1, \"VAE\"], [176, 42, 0, 3, 0, \"MODEL\"], [177, 42, 1, 7, 0, \"CLIP\"], [178, 42, 1, 6, 0, \"CLIP\"]], \"groups\": [], \"config\": {}, \"extra\": {\"ds\": {\"scale\": 0.7513148009015777, \"offset\": [-150.9499622122977, -16.929856111594276]}}, \"version\": 0.4, \"widget_idx_map\": {\"3\": {\"seed\": 0, \"sampler_name\": 4, \"scheduler\": 5}}}}",
|
231 |
+
"steps": 50,
|
232 |
+
"width": 1024,
|
233 |
+
"height": 1024,
|
234 |
+
"models": [
|
235 |
+
"SDXL\\Proteus-v0.6.safetensors"
|
236 |
+
],
|
237 |
+
"prompt": "(\"Proteus\" text logo) powerful aura, swirling power, cinematic, masterpiece, award-winning ",
|
238 |
+
"denoise": 1,
|
239 |
+
"sampler": "dpmpp_3m_sde_gpu",
|
240 |
+
"cfgScale": 7.46,
|
241 |
+
"modelIds": [],
|
242 |
+
"scheduler": "karras",
|
243 |
+
"upscalers": [],
|
244 |
+
"versionIds": [],
|
245 |
+
"controlNets": [],
|
246 |
+
"negativePrompt": "worst quality, low quality, deformed, malformed, distorted, blurry, unfocused, ",
|
247 |
+
"additionalResources": []
|
248 |
+
},
|
249 |
+
"availability": "Public",
|
250 |
+
"hasMeta": true,
|
251 |
+
"hasPositivePrompt": true,
|
252 |
+
"onSite": false,
|
253 |
+
"remixOfId": null
|
254 |
+
}
|
255 |
+
],
|
256 |
+
"downloadUrl": "https://civitai.com/api/download/models/969886"
|
257 |
+
}
|