Duskfallcrew commited on
Commit
8d4fd1a
·
1 Parent(s): 7b349a6

Upload app.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. app.py +3 -7
app.py CHANGED
@@ -4,7 +4,7 @@ import torch
4
  from PIL import Image
5
 
6
  model_id = 'Duskfallcrew/Animated_Dreams'
7
- prefix = 'meitu'
8
 
9
  scheduler = DPMSolverMultistepScheduler.from_pretrained(model_id, subfolder="scheduler")
10
 
@@ -79,11 +79,7 @@ with gr.Blocks(css=css) as demo:
79
  <h1>Animated Dreams</h1>
80
  </div>
81
  <p>
82
- Demo for <a href="https://huggingface.co/Duskfallcrew/Animated_Dreams">Animated Dreams</a> Stable Diffusion model. </br> All samples and info are here:
83
- https://civitai.com/user/duskfallcrew
84
- </br>
85
- If you want to donate towards costs and don't want to subscribe:
86
- https://ko-fi.com/DUSKFALLcrew<br>
87
  {"Add the following tokens to your prompts for the model to work properly: <b>prefix</b>" if prefix else ""}
88
  </p>
89
  Running on {"<b>GPU 🔥</b>" if torch.cuda.is_available() else f"<b>CPU 🥶</b>. For faster inference it is recommended to <b>upgrade to GPU in <a href='https://huggingface.co/spaces/Duskfallcrew/Animated_Dreams/settings'>Settings</a></b>"} after duplicating the space<br><br>
@@ -106,7 +102,7 @@ https://ko-fi.com/DUSKFALLcrew<br>
106
  with gr.Tab("Options"):
107
  with gr.Group():
108
  neg_prompt = gr.Textbox(label="Negative prompt", placeholder="What to exclude from the image")
109
- auto_prefix = gr.Checkbox(label="Prefix styling tokens automatically (meitu)", value=prefix, visible=prefix)
110
 
111
  with gr.Row():
112
  guidance = gr.Slider(label="Guidance scale", value=7.5, maximum=15)
 
4
  from PIL import Image
5
 
6
  model_id = 'Duskfallcrew/Animated_Dreams'
7
+ prefix = ''
8
 
9
  scheduler = DPMSolverMultistepScheduler.from_pretrained(model_id, subfolder="scheduler")
10
 
 
79
  <h1>Animated Dreams</h1>
80
  </div>
81
  <p>
82
+ Demo for <a href="https://huggingface.co/Duskfallcrew/Animated_Dreams">Animated Dreams</a> Stable Diffusion model. Running on Free CPU, if there's a queue make sure you duplicate the space to your own and if you got the funds upgrade to GPU. No prefix tokens. If you like what you see consider donating here: <a href="https://ko-fi.com/DUSKFALLcrew">Ko-Fi Duskfallcrew </a><br>
 
 
 
 
83
  {"Add the following tokens to your prompts for the model to work properly: <b>prefix</b>" if prefix else ""}
84
  </p>
85
  Running on {"<b>GPU 🔥</b>" if torch.cuda.is_available() else f"<b>CPU 🥶</b>. For faster inference it is recommended to <b>upgrade to GPU in <a href='https://huggingface.co/spaces/Duskfallcrew/Animated_Dreams/settings'>Settings</a></b>"} after duplicating the space<br><br>
 
102
  with gr.Tab("Options"):
103
  with gr.Group():
104
  neg_prompt = gr.Textbox(label="Negative prompt", placeholder="What to exclude from the image")
105
+ auto_prefix = gr.Checkbox(label="Prefix styling tokens automatically ()", value=prefix, visible=prefix)
106
 
107
  with gr.Row():
108
  guidance = gr.Slider(label="Guidance scale", value=7.5, maximum=15)