Wilbur1240 commited on
Commit
fc58cf5
·
1 Parent(s): 4cf4d70

update requirements.txt

Browse files
Files changed (1) hide show
  1. app.py +129 -33
app.py CHANGED
@@ -4,15 +4,25 @@ import random
4
  from huggingface_hub import InferenceClient
5
  import os
6
 
7
- MAX_SEED = np.iinfo(np.int32).max
8
- MAX_IMAGE_SIZE = 1024
9
-
10
  client = InferenceClient(
11
  provider="hf-inference",
12
- api_key=os.environ["HF_API_TOKEN"]
13
  )
14
 
15
- def infer(prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps):
 
 
 
 
 
 
 
 
 
 
 
 
16
  if randomize_seed:
17
  seed = random.randint(0, MAX_SEED)
18
 
@@ -28,34 +38,120 @@ def infer(prompt, negative_prompt, seed, randomize_seed, width, height, guidance
28
  )
29
  return image, seed
30
 
31
- # Step 1: Build the interface (as before)
32
- interface = gr.Interface(
33
- fn=infer,
34
- inputs=[
35
- gr.Textbox(label="Prompt"),
36
- gr.Textbox(label="Negative Prompt", value=""),
37
- gr.Slider(0, MAX_SEED, label="Seed", value=0),
38
- gr.Checkbox(label="Randomize Seed", value=True),
39
- gr.Slider(256, MAX_IMAGE_SIZE, label="Width", step=32, value=768),
40
- gr.Slider(256, MAX_IMAGE_SIZE, label="Height", step=32, value=768),
41
- gr.Slider(0, 10, label="Guidance Scale", step=0.1, value=0.0),
42
- gr.Slider(1, 50, label="Steps", step=1, value=2)
43
- ],
44
- outputs=[
45
- gr.Image(label="Generated Image"),
46
- gr.Number(label="Seed Used")
47
- ],
48
- examples=[
49
- ["A futuristic cityscape at sunset", "", 0, True, 768, 768, 0.0, 2],
50
- ["A cat in a space suit", "", 0, True, 768, 768, 0.0, 2],
51
- ],
52
- allow_flagging="never"
53
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
54
 
55
- # Step 2: Render the interface inside a Blocks container
56
- with gr.Blocks() as demo:
57
- interface.render()
 
 
 
 
 
 
 
 
 
 
 
58
 
59
- # Step 3: Launch cleanly
60
  if __name__ == "__main__":
61
- demo.launch(show_api=False)
 
4
  from huggingface_hub import InferenceClient
5
  import os
6
 
7
+ # For security in deployment: use secrets instead of hardcoding your API key
 
 
8
  client = InferenceClient(
9
  provider="hf-inference",
10
+ api_key=os.environ["HF_API_TOKEN"] # Make sure this is set as a secret
11
  )
12
 
13
+ MAX_SEED = np.iinfo(np.int32).max
14
+ MAX_IMAGE_SIZE = 1024
15
+
16
+ def infer(
17
+ prompt,
18
+ negative_prompt,
19
+ seed,
20
+ randomize_seed,
21
+ width,
22
+ height,
23
+ guidance_scale,
24
+ num_inference_steps,
25
+ ):
26
  if randomize_seed:
27
  seed = random.randint(0, MAX_SEED)
28
 
 
38
  )
39
  return image, seed
40
 
41
+ examples = [
42
+ "Astronaut in a jungle, cold color palette, muted colors, detailed, 8k",
43
+ "An astronaut riding a green horse",
44
+ "A delicious ceviche cheesecake slice",
45
+ ]
46
+
47
+ css = """
48
+ #col-container {
49
+ margin: 0 auto;
50
+ max-width: 640px;
51
+ }
52
+ """
53
+
54
+ with gr.Blocks(css=css) as demo:
55
+ with gr.Column(elem_id="col-container"):
56
+ gr.Markdown(" # Text-to-Image Gradio Template")
57
+
58
+ with gr.Row():
59
+ prompt = gr.Text(
60
+ label="Prompt",
61
+ show_label=False,
62
+ max_lines=1,
63
+ placeholder="Enter your prompt",
64
+ container=False,
65
+ )
66
+
67
+ run_button = gr.Button("Run", scale=0, variant="primary")
68
+
69
+ result = gr.Image(label="Result", show_label=False)
70
+
71
+ with gr.Accordion("Advanced Settings", open=False):
72
+ negative_prompt = gr.Text(
73
+ label="Negative prompt",
74
+ max_lines=1,
75
+ placeholder="Enter a negative prompt",
76
+ visible=True,
77
+ value=""
78
+ )
79
+
80
+ seed = gr.Slider(
81
+ label="Seed",
82
+ minimum=0,
83
+ maximum=MAX_SEED,
84
+ step=1,
85
+ value=0,
86
+ )
87
+
88
+ randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
89
+
90
+ with gr.Row():
91
+ width = gr.Slider(
92
+ label="Width",
93
+ minimum=256,
94
+ maximum=MAX_IMAGE_SIZE,
95
+ step=32,
96
+ value=1024,
97
+ )
98
+
99
+ height = gr.Slider(
100
+ label="Height",
101
+ minimum=256,
102
+ maximum=MAX_IMAGE_SIZE,
103
+ step=32,
104
+ value=1024,
105
+ )
106
+
107
+ with gr.Row():
108
+ guidance_scale = gr.Slider(
109
+ label="Guidance scale",
110
+ minimum=0.0,
111
+ maximum=10.0,
112
+ step=0.1,
113
+ value=0.0,
114
+ )
115
+
116
+ num_inference_steps = gr.Slider(
117
+ label="Number of inference steps",
118
+ minimum=1,
119
+ maximum=50,
120
+ step=1,
121
+ value=2,
122
+ )
123
+
124
+ gr.Examples(examples=examples, inputs=[prompt])
125
+
126
+ run_button.click(
127
+ fn=infer,
128
+ inputs=[
129
+ prompt,
130
+ negative_prompt,
131
+ seed,
132
+ randomize_seed,
133
+ width,
134
+ height,
135
+ guidance_scale,
136
+ num_inference_steps,
137
+ ],
138
+ outputs=[result, seed],
139
+ )
140
 
141
+ prompt.submit(
142
+ fn=infer,
143
+ inputs=[
144
+ prompt,
145
+ negative_prompt,
146
+ seed,
147
+ randomize_seed,
148
+ width,
149
+ height,
150
+ guidance_scale,
151
+ num_inference_steps,
152
+ ],
153
+ outputs=[result, seed],
154
+ )
155
 
 
156
  if __name__ == "__main__":
157
+ demo.launch(show_api=False, prevent_thread_lock=True)