Juggernaut-X-Hyper-onnx / model_template.json
saddam213's picture
Initial Upload
80cf091 verified
{
"Id": "3C23BD80-E694-4AF5-B098-D1682F325102",
"FileVersion": "1",
"Created": "2024-06-016T00:00:00",
"Name": "Juggernaut XL Hyper",
"ImageIcon": "https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx/resolve/main/Icon.png",
"Author": "KandooAI",
"Description": "Juggernaut Hyper is an optimized version of the Juggernaut model for faster performance on lower-end hardware. It features a rebuilt framework with GPT-4 Vision and a new structure for easier prompting. While some areas like text, male genitalia, and hand poses are still in development, the model aims to deliver high-quality results efficiently. Users are encouraged to refer to the detailed guide for best practices. Regular updates are planned.",
"Rank": 310,
"Group": "Online",
"Template": "SDXL",
"Category": "StableDiffusion",
"StableDiffusionTemplate": {
"PipelineType": "StableDiffusionXL",
"ModelType": "Base",
"SampleSize": 1024,
"TokenizerLength": 768,
"DiffuserTypes": [
"TextToImage",
"ImageToImage",
"ImageInpaintLegacy"
],
"SchedulerDefaults": {
"SchedulerType": "EulerAncestral",
"Steps": 8,
"StepsMin": 2,
"StepsMax": 50,
"Guidance": 1.4,
"GuidanceMin": 0,
"GuidanceMax": 10,
"TimestepSpacing": "Linspace",
"BetaSchedule": "ScaledLinear",
"BetaStart": 0.00085,
"BetaEnd": 0.012
}
},
"Precision": "F16",
"MemoryMin": 6,
"MemoryMax": 14,
"DownloadSize": 11.2,
"Website": "https://civitai.com/models/133005?modelVersionId=471120",
"Repository": "https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx",
"RepositoryFiles": [
"https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx/resolve/main/controlnet/model.onnx",
"https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx/resolve/main/controlnet/model.onnx.data",
"https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx/resolve/main/controlnet/config.json",
"https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx/resolve/main/scheduler/scheduler_config.json",
"https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx/resolve/main/text_encoder/model.onnx",
"https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx/resolve/main/text_encoder/config.json",
"https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx/resolve/main/text_encoder_2/model.onnx",
"https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx/resolve/main/text_encoder_2/config.json",
"https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx/resolve/main/tokenizer/merges.txt",
"https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx/resolve/main/tokenizer/model.onnx",
"https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx/resolve/main/tokenizer/special_tokens_map.json",
"https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx/resolve/main/tokenizer/tokenizer_config.json",
"https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx/resolve/main/tokenizer/vocab.json",
"https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx/resolve/main/tokenizer_2/merges.txt",
"https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx/resolve/main/tokenizer_2/model.onnx",
"https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx/resolve/main/tokenizer_2/special_tokens_map.json",
"https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx/resolve/main/tokenizer_2/tokenizer_config.json",
"https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx/resolve/main/tokenizer_2/vocab.json",
"https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx/resolve/main/unet/model.onnx",
"https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx/resolve/main/unet/model.onnx.data",
"https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx/resolve/main/unet/config.json",
"https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx/resolve/main/vae_decoder/model.onnx",
"https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx/resolve/main/vae_decoder/config.json",
"https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx/resolve/main/vae_encoder/model.onnx",
"https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx/resolve/main/vae_encoder/config.json",
"https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx/resolve/main/model_index.json",
"https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx/resolve/main/model_template.json",
"https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx/resolve/main/README.md"
],
"PreviewImages": [
"https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx/resolve/main/Sample.png",
"https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx/resolve/main/Sample2.png",
"https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx/resolve/main/Sample3.png",
"https://huggingface.co/TensorStack/Juggernaut-X-Hyper-onnx/resolve/main/Sample4.png"
],
"Tags": [
"GPU",
"F16"
]
}