Surn commited on
Commit
c16e905
·
1 Parent(s): a1a2ac2

Update client and gradio version

Browse files
Files changed (2) hide show
  1. README.md +1 -1
  2. app.py +42 -24
README.md CHANGED
@@ -1,7 +1,7 @@
1
  ---
2
  title: Gradio User History
3
  sdk: gradio
4
- sdk_version: 5.25.0
5
  app_file: app.py
6
  emoji: 🖼️
7
  colorFrom: gray
 
1
  ---
2
  title: Gradio User History
3
  sdk: gradio
4
+ sdk_version: 5.33.0
5
  app_file: app.py
6
  emoji: 🖼️
7
  colorFrom: gray
app.py CHANGED
@@ -15,48 +15,63 @@ from gradio_client import Client
15
  #enable_space_ci()
16
 
17
 
18
- client = Client("multimodalart/stable-cascade")
19
-
20
-
21
- def generate(prompt: str, negprompt: str, profile: gr.OAuthProfile | None) -> tuple[str, list[str]]:
22
- generated_img_path = client.predict(
23
- prompt, # str in 'Prompt' Textbox component
24
- negprompt, # str in 'Negative prompt' Textbox component
25
- 0, # float (numeric value between 0 and 2147483647) in 'Seed' Slider component
26
- 1536, # float (numeric value between 1024 and 1536) in 'Width' Slider component
27
- 1536, # float (numeric value between 1024 and 1536) in 'Height' Slider component
28
- 20, # float (numeric value between 10 and 30) in 'Prior Inference Steps' Slider component
29
- 4, # float (numeric value between 0 and 20) in 'Prior Guidance Scale' Slider component
30
- 10, # float (numeric value between 4 and 12) in 'Decoder Inference Steps' Slider component
31
- 0, # float (numeric value between 0 and 0) in 'Decoder Guidance Scale' Slider component
32
- 1, # float (numeric value between 1 and 2) in 'Number of Images' Slider component
33
- api_name="/run"
 
 
 
 
 
 
 
34
  )
35
 
 
 
 
36
  metadata = {
37
  "prompt": prompt,
38
  "negative_prompt": negprompt,
39
- "prior_inference_steps": 20,
40
- "prior_guidance_scale": 4,
41
- "decoder_inference_steps": 10,
42
- "decoder_guidance_scale": 0,
43
- "seed": 0,
44
  "width": 1024,
45
  "height": 1024,
 
 
46
  }
47
  with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as metadata_file:
48
  json.dump(metadata, metadata_file)
49
 
50
  # Saving user history
51
- gr_user_history.save_image(label=prompt, image=generated_img_path, profile=profile, metadata=metadata)
 
 
52
 
53
- return [generated_img_path] # type: ignore
54
 
55
 
56
  with gr.Blocks(css="style.css") as demo:
57
  with gr.Group():
58
  prompt = gr.Text(show_label=False, placeholder="Prompt")
59
  negprompt = gr.Text(show_label=False, placeholder="Negative Prompt")
 
 
 
 
60
  gallery = gr.Gallery(
61
  show_label=False,
62
  columns=2,
@@ -64,7 +79,10 @@ with gr.Blocks(css="style.css") as demo:
64
  height="600px",
65
  object_fit="scale-down",
66
  )
67
- prompt.submit(fn=generate, inputs=[prompt,negprompt], outputs=gallery)
 
 
 
68
 
69
  with gr.Blocks(theme='Surn/beeuty@==0.5.25') as demo_with_history:
70
  with gr.Tab("README"):
 
15
  #enable_space_ci()
16
 
17
 
18
+ client = Client("multimodalart/stable-diffusion-3.5-large-turboX")
19
+
20
+
21
+ def generate(prompt: str, negprompt: str, seed: int, randomize_seed: bool, profile: gr.OAuthProfile | None) -> list[str | None]:
22
+ # API call to the new endpoint
23
+ # The result is a tuple, where the first element is a dictionary containing image information
24
+ # and the second element is the seed.
25
+ actual_seed = seed
26
+ if randomize_seed:
27
+ # The API documentation implies that if randomize_seed is True, the provided seed value might be overridden.
28
+ # The API returns the actual seed used.
29
+ pass # No need to generate a random seed here if the API handles it.
30
+
31
+ result = client.predict(
32
+ prompt=prompt, # str in 'Prompt' Textbox component
33
+ negative_prompt=negprompt, # str in 'Negative prompt' Textbox component
34
+ seed=actual_seed, # float (numeric value between 0 and 2147483647) in 'Seed' Slider component
35
+ randomize_seed=randomize_seed, # bool in 'Randomize seed' Checkbox component
36
+ width=1024, # float (numeric value between 1024 and 1536) in 'Width' Slider component
37
+ height=1024, # float (numeric value between 1024 and 1536) in 'Height' Slider component
38
+ guidance_scale=1.5, # float (numeric value between 0 and 20) in 'Guidance scale' Slider component
39
+ num_inference_steps=8, # float (numeric value between 4 and 12) in 'Number of inference steps' Slider component
40
+ api_name="/infer"
41
  )
42
 
43
+ generated_img_path: str | None = result[0] # Extracting the image path safely
44
+ returned_seed = result[1] # Extracting the seed from the result
45
+
46
  metadata = {
47
  "prompt": prompt,
48
  "negative_prompt": negprompt,
49
+ "seed": returned_seed, # Using the seed returned by the API
50
+ "randomize_seed": randomize_seed,
 
 
 
51
  "width": 1024,
52
  "height": 1024,
53
+ "guidance_scale": 1.5,
54
+ "num_inference_steps": 8,
55
  }
56
  with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as metadata_file:
57
  json.dump(metadata, metadata_file)
58
 
59
  # Saving user history
60
+ # Ensure generated_img_path is not None if save_image expects a valid path
61
+ if generated_img_path:
62
+ gr_user_history.save_image(label=prompt, image=generated_img_path, profile=profile, metadata=metadata)
63
 
64
+ return [generated_img_path]
65
 
66
 
67
  with gr.Blocks(css="style.css") as demo:
68
  with gr.Group():
69
  prompt = gr.Text(show_label=False, placeholder="Prompt")
70
  negprompt = gr.Text(show_label=False, placeholder="Negative Prompt")
71
+ # Add Seed Slider and Randomize Seed Checkbox
72
+ with gr.Row():
73
+ seed_slider = gr.Slider(minimum=0, maximum=2147483647, step=1, label="Seed", value=0, scale=4)
74
+ randomize_checkbox = gr.Checkbox(label="Randomize seed", value=True, scale=1)
75
  gallery = gr.Gallery(
76
  show_label=False,
77
  columns=2,
 
79
  height="600px",
80
  object_fit="scale-down",
81
  )
82
+ submit_button = gr.Button("Generate")
83
+
84
+ submit_button.click(fn=generate, inputs=[prompt, negprompt, seed_slider, randomize_checkbox], outputs=gallery)
85
+ prompt.submit(fn=generate, inputs=[prompt, negprompt, seed_slider, randomize_checkbox], outputs=gallery)
86
 
87
  with gr.Blocks(theme='Surn/beeuty@==0.5.25') as demo_with_history:
88
  with gr.Tab("README"):