Spaces:
Running
Running
update app to stable cascade
Browse files
app.py
CHANGED
@@ -18,13 +18,13 @@ enable_space_ci()
|
|
18 |
client = Client("multimodalart/stable-cascade")
|
19 |
|
20 |
|
21 |
-
def generate(prompt: str, profile: gr.OAuthProfile | None) -> tuple[str, list[str]]:
|
22 |
generated_img_path = client.predict(
|
23 |
prompt, # str in 'Prompt' Textbox component
|
24 |
-
|
25 |
0, # float (numeric value between 0 and 2147483647) in 'Seed' Slider component
|
26 |
-
|
27 |
-
|
28 |
20, # float (numeric value between 10 and 30) in 'Prior Inference Steps' Slider component
|
29 |
4, # float (numeric value between 0 and 20) in 'Prior Guidance Scale' Slider component
|
30 |
10, # float (numeric value between 4 and 12) in 'Decoder Inference Steps' Slider component
|
@@ -35,14 +35,14 @@ def generate(prompt: str, profile: gr.OAuthProfile | None) -> tuple[str, list[st
|
|
35 |
|
36 |
metadata = {
|
37 |
"prompt": prompt,
|
38 |
-
"negative_prompt":
|
39 |
"prior_inference_steps": 20,
|
40 |
"prior_guidance_scale": 4,
|
41 |
"decoder_inference_steps": 10,
|
42 |
"decoder_guidance_scale": 0,
|
43 |
"seed": 0,
|
44 |
-
"width":
|
45 |
-
"height":
|
46 |
}
|
47 |
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as metadata_file:
|
48 |
json.dump(metadata, metadata_file)
|
@@ -56,6 +56,7 @@ def generate(prompt: str, profile: gr.OAuthProfile | None) -> tuple[str, list[st
|
|
56 |
with gr.Blocks(css="style.css") as demo:
|
57 |
with gr.Group():
|
58 |
prompt = gr.Text(show_label=False, placeholder="Prompt")
|
|
|
59 |
gallery = gr.Gallery(
|
60 |
show_label=False,
|
61 |
columns=2,
|
@@ -63,7 +64,7 @@ with gr.Blocks(css="style.css") as demo:
|
|
63 |
height="600px",
|
64 |
object_fit="scale-down",
|
65 |
)
|
66 |
-
prompt.submit(fn=generate, inputs=prompt, outputs=gallery)
|
67 |
|
68 |
with gr.Blocks() as demo_with_history:
|
69 |
with gr.Tab("README"):
|
|
|
18 |
client = Client("multimodalart/stable-cascade")
|
19 |
|
20 |
|
21 |
+
def generate(prompt: str, negprompt: str, profile: gr.OAuthProfile | None) -> tuple[str, list[str]]:
|
22 |
generated_img_path = client.predict(
|
23 |
prompt, # str in 'Prompt' Textbox component
|
24 |
+
negprompt, # str in 'Negative prompt' Textbox component
|
25 |
0, # float (numeric value between 0 and 2147483647) in 'Seed' Slider component
|
26 |
+
1536, # float (numeric value between 1024 and 1536) in 'Width' Slider component
|
27 |
+
1536, # float (numeric value between 1024 and 1536) in 'Height' Slider component
|
28 |
20, # float (numeric value between 10 and 30) in 'Prior Inference Steps' Slider component
|
29 |
4, # float (numeric value between 0 and 20) in 'Prior Guidance Scale' Slider component
|
30 |
10, # float (numeric value between 4 and 12) in 'Decoder Inference Steps' Slider component
|
|
|
35 |
|
36 |
metadata = {
|
37 |
"prompt": prompt,
|
38 |
+
"negative_prompt": negprompt,
|
39 |
"prior_inference_steps": 20,
|
40 |
"prior_guidance_scale": 4,
|
41 |
"decoder_inference_steps": 10,
|
42 |
"decoder_guidance_scale": 0,
|
43 |
"seed": 0,
|
44 |
+
"width": 1536,
|
45 |
+
"height": 1536,
|
46 |
}
|
47 |
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as metadata_file:
|
48 |
json.dump(metadata, metadata_file)
|
|
|
56 |
with gr.Blocks(css="style.css") as demo:
|
57 |
with gr.Group():
|
58 |
prompt = gr.Text(show_label=False, placeholder="Prompt")
|
59 |
+
negprompt = gr.Text(show_label=False, placeholder="Negative Prompt")
|
60 |
gallery = gr.Gallery(
|
61 |
show_label=False,
|
62 |
columns=2,
|
|
|
64 |
height="600px",
|
65 |
object_fit="scale-down",
|
66 |
)
|
67 |
+
prompt.submit(fn=generate, inputs=[prompt,negprompt], outputs=gallery)
|
68 |
|
69 |
with gr.Blocks() as demo_with_history:
|
70 |
with gr.Tab("README"):
|