charliebaby2023 commited on
Commit
15b1858
·
verified ·
1 Parent(s): bbc7a6d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +163 -52
app.py CHANGED
@@ -2,9 +2,19 @@ import gradio as gr
2
  from random import randint
3
  from all_models import models
4
  from datetime import datetime
 
 
 
5
 
6
  now2 = 0
7
- kii=" needs async iterative function for all models selected to go to appropriate output boxes, because gradio update lost multiselect? OR not. where am i, how did i find myself here, what is this strange language";
 
 
 
 
 
 
 
8
 
9
 
10
  def get_current_time():
@@ -14,6 +24,7 @@ def get_current_time():
14
  ki = f'{kii} {current_time}'
15
  return ki
16
 
 
17
  def load_fn(models):
18
  global models_load
19
  models_load = {}
@@ -21,95 +32,195 @@ def load_fn(models):
21
  if model not in models_load.keys():
22
  try:
23
  m = gr.load(f'models/{model}')
 
24
  except Exception as error:
25
- m = gr.Interface(lambda txt: None, ['text'], ['image'])
 
26
  models_load.update({model: m})
27
 
 
28
  load_fn(models)
29
 
30
  num_models = len(models)
31
  default_models = models[:num_models]
32
 
 
33
  def extend_choices(choices):
34
  return choices + (num_models - len(choices)) * ['NA']
35
 
 
36
  def update_imgbox(choices):
37
  choices_plus = extend_choices(choices)
38
  return [gr.Image(None, label=m, visible=(m != 'NA')) for m in choices_plus]
39
 
 
 
 
 
40
  def gen_fn(model_str, prompt):
41
  if model_str == 'NA':
42
  return None
43
- noise = str(randint(0, 9999))
44
- return models_load[model_str](f'{prompt} {noise}')
45
-
46
- def process_choices(selected_choices):
47
- results = []
48
- for choice in selected_choices:
49
- results.append(f"Processed data for {choice}") # Example output for each model
50
- return results # Return a separate output for each choice
51
 
 
 
 
52
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
53
 
54
  def make_me():
55
  with gr.Row():
56
- txt_input = gr.Textbox(lines=2, value=kii)
57
- #txt_input = gr.Textbox(label='Your prompt:', lines=2, value=kii)
58
-
59
- gen_button = gr.Button('Generate images')
60
- stop_button = gr.Button('Stop', variant='secondary', interactive=False)
61
-
62
- gen_button.click(lambda s: gr.update(interactive=True), None, stop_button)
63
- gr.HTML("""
64
- <div style="text-align: center; max-width: 100%; margin: 0 auto;">
65
- <body>
66
- </body>
67
- </div>
68
- """)
69
- with gr.Row():
70
- output = [gr.Image(label=m) for m in default_models]
71
- current_models = [gr.Textbox(m, visible=False) for m in default_models]
72
- for m, o in zip(current_models, output):
73
- gen_event = gen_button.click(gen_fn, [m, txt_input], o)
74
- stop_button.click(lambda s: gr.update(interactive=False), None, stop_button, cancels=[gen_event])
75
- with gr.Accordion('Model selection'):
76
- # model_choice = gr.CheckboxGroup(models, label=f' {num_models} different models selected', value=default_models, multiselect=True, interactive=True, filterable=False)
77
- model_choice = gr.CheckboxGroup(models, label=f' {num_models} different models selected', value=default_models, interactive=True )
78
- model_choice.change(update_imgbox, model_choice, output)
79
- model_choice.change(extend_choices, model_choice, current_models)
80
-
81
-
82
  with gr.Row():
83
- gr.HTML("""
84
- <div class="footer">
85
- <p> Based on the <a href="https://huggingface.co/spaces/derwahnsinn/TestGen">TestGen</a> Space by derwahnsinn, the <a href="https://huggingface.co/spaces/RdnUser77/SpacIO_v1">SpacIO</a> Space by RdnUser77 and Omnibus's Maximum Multiplier!
86
- </p>
87
- """)
88
 
 
 
 
 
89
 
90
 
91
  js_code = """
92
- console.log('testing js_code');
93
-
 
 
 
 
 
 
 
 
94
  """
95
 
96
 
97
-
 
98
  with gr.Blocks(css="""
99
  label.float.svelte-i3tvor { top:auto!important; bottom: 0; position: absolute; background: rgba(0,0,0,0.0); left: var(--block-label-margin); color: rgba(200,200,200,.7);}
100
  .genbut { max-width: 50px; max-height: 30px; width:150px; height:30px}
101
  .stopbut { max-width: 50px; max-height: 30px; width:150px; height:30px}
102
  .float.svelte-1mwvhlq { position: absolute; top: var(--block-label-margin); left: var(--block-label-margin); background: none; border: none;}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
103
  """) as demo:
104
  gr.Markdown("<script>" + js_code + "</script>")
105
  make_me()
106
 
107
  demo.queue()
108
- demo.launch(max_threads=200 )
109
-
110
-
111
-
112
-
113
- # output = [gr.Image(label=m, height=170) for m in default_models]
114
- #demo.queue(concurrency_count=200)
115
- #demo.launch()
 
2
  from random import randint
3
  from all_models import models
4
  from datetime import datetime
5
+ from concurrent.futures import ThreadPoolExecutor
6
+ import time
7
+ import requests
8
 
9
  now2 = 0
10
+ kii=" this is your prompt input window still a wip"
11
+ combined_prompt = ""
12
+
13
+
14
+
15
+
16
+
17
+
18
 
19
 
20
  def get_current_time():
 
24
  ki = f'{kii} {current_time}'
25
  return ki
26
 
27
+
28
  def load_fn(models):
29
  global models_load
30
  models_load = {}
 
32
  if model not in models_load.keys():
33
  try:
34
  m = gr.load(f'models/{model}')
35
+ print(f"{m}");
36
  except Exception as error:
37
+ print(f"Error loading model {model}: {error}")
38
+ m = gr.Interface(lambda _: None, inputs=gr.Textbox(), outputs=gr.Image(), enable_queue=False)
39
  models_load.update({model: m})
40
 
41
+
42
  load_fn(models)
43
 
44
  num_models = len(models)
45
  default_models = models[:num_models]
46
 
47
+
48
  def extend_choices(choices):
49
  return choices + (num_models - len(choices)) * ['NA']
50
 
51
+
52
  def update_imgbox(choices):
53
  choices_plus = extend_choices(choices)
54
  return [gr.Image(None, label=m, visible=(m != 'NA')) for m in choices_plus]
55
 
56
+
57
+ executor = ThreadPoolExecutor(max_workers=num_models)
58
+
59
+
60
  def gen_fn(model_str, prompt):
61
  if model_str == 'NA':
62
  return None
 
 
 
 
 
 
 
 
63
 
64
+ noise = str(randint(0, 9999))
65
+ combined_prompt = f'{prompt}'
66
+ print(f"Generating with prompt: {combined_prompt}")
67
 
68
+ try:
69
+ image_response = models_load[model_str](f'{prompt} {noise}')
70
+ # print(f"77 {models_load[model_str](f'{combined_prompt}')}")
71
+ # image_response = models_load[model_str](f'{combined_prompt}')
72
+ # Ensure the response is an image or image-like object
73
+ if isinstance(image_response, gr.Image):
74
+ return image_response
75
+ elif isinstance(image_response, str): # If the response is a path or URL, pass it as a string
76
+ return gr.Image(image_response) # You can handle it based on your model's return type
77
+ else:
78
+ print(f"Unexpected response type: {type(image_response)}")
79
+ return None
80
+ except Exception as e:
81
+ print(f"Error occurred: {e}")
82
+ return None
83
+
84
+
85
 
86
  def make_me():
87
  with gr.Row():
88
+ txt_input = gr.Textbox(lines=2, value=kii, label=None)
89
+ gen_button = gr.Button('Generate images')
90
+ # stop_button = gr.Button('Stop', variant='secondary', interactive=False)
91
+
92
+ #gen_button.click(lambda _: gr.update(interactive=True), None, stop_button)
93
+ gen_button.click(lambda _: gr.update(interactive=True), None)
94
+
95
+
96
+ gr.HTML("""
97
+ <div style="text-align: center; max-width: 100%; margin: 0 auto;">
98
+ <body>
99
+ </body>
100
+ </div>
101
+ """)
102
+
 
 
 
 
 
 
 
 
 
 
 
103
  with gr.Row():
104
+ output = [gr.Image(label=m) for m in default_models]
105
+ current_models = [gr.Textbox(m, visible=False) for m in default_models]
106
+ for m, o in zip(current_models, output):
107
+ gen_event = gen_button.click(gen_fn, [m, txt_input], o, queue=False)
 
108
 
109
+ with gr.Accordion('Model selection', visible=False):
110
+ model_choice = gr.CheckboxGroup(models, label=f' {num_models} different models selected', value=default_models, interactive=True)
111
+ model_choice.change(update_imgbox, model_choice, output)
112
+ model_choice.change(extend_choices, model_choice, current_models)
113
 
114
 
115
  js_code = """
116
+ <script>
117
+ const originalScroll = window.scrollTo;
118
+ const originalShowToast = gradio.Toast.show;
119
+ gradio.Toast.show = function() {
120
+ originalShowToast.apply(this, arguments);
121
+ window.scrollTo = function() {};};
122
+ setTimeout(() => {
123
+ window.scrollTo = originalScroll;
124
+ }, 1000); // Restore scroll function after 3 seconds
125
+ </script>
126
  """
127
 
128
 
129
+
130
+
131
  with gr.Blocks(css="""
132
  label.float.svelte-i3tvor { top:auto!important; bottom: 0; position: absolute; background: rgba(0,0,0,0.0); left: var(--block-label-margin); color: rgba(200,200,200,.7);}
133
  .genbut { max-width: 50px; max-height: 30px; width:150px; height:30px}
134
  .stopbut { max-width: 50px; max-height: 30px; width:150px; height:30px}
135
  .float.svelte-1mwvhlq { position: absolute; top: var(--block-label-margin); left: var(--block-label-margin); background: none; border: none;}
136
+ textarea:hover { background:#55555555;}
137
+ textarea { overflow-y: scroll; top:0px; width: 100%; height:100%!important;
138
+
139
+ font-size: 1.5em;
140
+ letter-spacing: 3px;
141
+ color: limegreen;
142
+
143
+ border: none!important;
144
+ background: none;
145
+ outline: none !important; }
146
+ .svelte-5y6bt2 {max-height:161px;min-height:160px;}
147
+ .hide-container { max-height: 2px; position: fixed; min-height: 1px;}
148
+ .svelte-1gfkn6j {display:none;}
149
+ .gradio-container .gri-textbox .gri-input { border: none; padding: 0; background: transparent; box-shadow: none; }
150
+ .padded.svelte-5y6bt2:hover { border:1px solid cyan;}
151
+ .padded.svelte-5y6bt2 {
152
+
153
+ border: none;
154
+ background: none!important; padding: 0px!important; min-width:100%!important; max-width:101%!important;position:relative;right:0px;max-height:100%; }
155
+ .secondary.svelte-1137axg {
156
+ width: 200px;
157
+ flex: none!important;
158
+ position: relative;
159
+ min-width: 160px;
160
+ border: var(--button-border-width) solid
161
+ var(--button-secondary-border-color);
162
+ background: var(--button-secondary-background-fill);
163
+ color: var(--button-secondary-text-color);
164
+ box-shadow: var(--button-secondary-shadow);
165
+ left: 0px;
166
+ float: left;
167
+ }
168
+ div.svelte-633qhp {
169
+ /* display: flex; */
170
+ /* flex-direction: inherit; */
171
+ /* flex-wrap: wrap; */
172
+ gap: var(--form-gap-width);
173
+ box-shadow: var(--block-shadow);
174
+ height: 20px;
175
+ width: 250px;
176
+ position:fixed;
177
+ left:calc(50% - 100px);
178
+ flex: none!important;
179
+ /* border: var(--block-border-width) solid var(--block-border-color); */
180
+ /* border-radius: var(--block-radius); */
181
+ /* background: var(--border-color-primary); */
182
+ /* overflow-y: hidden; */
183
+ }
184
+
185
+ .form.svelte-633qhp{
186
+ height:50px;
187
+ width:auto!important;
188
+ z-index: 4000;
189
+ position: fixed;
190
+ flex: auto!important;
191
+ border: none!important;
192
+ background: none!important;
193
+
194
+ min-width: 30%!important;
195
+
196
+ min-height: 45px !important;
197
+ resize:both;
198
+
199
+
200
+ left: 50%;
201
+ transform: translate(-50%,0);
202
+ }
203
+
204
+
205
+ .input-container.svelte-11mx0st.svelte-11mx0st {
206
+ /* display: flex; */
207
+ top: 0px;
208
+ position: absolute;
209
+ align-items: flex-end;
210
+ bottom: 0px;
211
+
212
+ background: none;
213
+ border: none;
214
+ left: 0px;
215
+ right: 0px;
216
+ }
217
+ }
218
+
219
  """) as demo:
220
  gr.Markdown("<script>" + js_code + "</script>")
221
  make_me()
222
 
223
  demo.queue()
224
+ demo.queue = False
225
+ demo.config["queue"] = False
226
+ demo.launch(max_threads=200)