charliebaby2023 commited on
Commit
ac1a404
·
verified ·
1 Parent(s): c98a9f1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +83 -20
app.py CHANGED
@@ -2,19 +2,13 @@ import gradio as gr
2
  from random import randint
3
  from all_models import models
4
  from datetime import datetime
5
- from concurrent.futures import ThreadPoolExecutor
 
 
 
6
  import numpy as np
7
  import time
8
  import requests
9
- #attempting to remove logged user input for the respect of users privacy of hugginface
10
- #thats right you heard me. logs happen here. and ive seen your input,
11
- #AND, i can see it live (i think it may just be intrinsic in this hugginface network)
12
- #BUT, im really not interested one bit in your input, at all, and im not here to judge others (ive always suspected this was happening, and im just now becoming fully aware and i havent found many solutions yet)
13
- #SO, im trying my best to help protect our privacy
14
- #PLEASE bare with me and if you have questions or suggestions or help
15
- #OR EVEN if you just want to leave a comment about it
16
- #please let me know (keep in mind, im a 0.32_ on the scale of (1 - 10) for python literacy )
17
- #BUT KEEP IN MIND ---- this data helps me select the VERY BEST, and most stable models for US ALL to use. and thats my over all goal. (and the good news is, i have no idea who you are or where you are. the only thing i know, is whats currently being prompted and which models succedded and that some of you, smh just aint right lol. just try n play nice )
18
  now2 = 0
19
  index_tracker = 0 # Index tracker for the current model
20
  model_scores = {model: 0 for model in models} # Dictionary to track scores for each model
@@ -53,22 +47,37 @@ def update_imgbox(choices):
53
  executor = ThreadPoolExecutor(max_workers=num_models)
54
 
55
 
 
 
 
 
 
56
  def gen_fn(model_str, prompt):
57
- global index_tracker, model_scores, processed_models_count # Declare all global variables
 
 
 
 
58
  if model_str == 'NA':
59
  return None
60
- try:
 
 
 
 
61
  index_tracker = (index_tracker + 1) % len(models)
62
  current_model_index = index_tracker
63
  current_model_name = models[current_model_index]
64
  combined_prompt = f"{prompt} {randint(0, 9999)}"
65
- response = models_load[model_str](f"{combined_prompt}")
66
-
67
- if isinstance(response, gr.Image):
68
- return response
69
- elif isinstance(response, tuple):
70
- return None
71
- elif isinstance(response, str): # If the response is a path or URL, pass it as a string
 
 
72
  if processed_models_count == 0:
73
  print(f"**************")
74
  print(f"{prompt}")
@@ -84,7 +93,25 @@ def gen_fn(model_str, prompt):
84
  processed_models_count = 0
85
  return response
86
 
87
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
88
  except Exception as e:
89
  if processed_models_count == 0:
90
  print(f"**************")
@@ -99,6 +126,14 @@ def gen_fn(model_str, prompt):
99
  print(model_scores)
100
  processed_models_count = 0
101
  return None
 
 
 
 
 
 
 
 
102
 
103
  def make_me():
104
  with gr.Row():
@@ -150,3 +185,31 @@ demo.queue()
150
  demo.queue = False
151
  demo.config["queue"] = False
152
  demo.launch(max_threads=400)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
  from random import randint
3
  from all_models import models
4
  from datetime import datetime
5
+ from concurrent.futures import ThreadPoolExecutor, TimeoutError
6
+
7
+
8
+
9
  import numpy as np
10
  import time
11
  import requests
 
 
 
 
 
 
 
 
 
12
  now2 = 0
13
  index_tracker = 0 # Index tracker for the current model
14
  model_scores = {model: 0 for model in models} # Dictionary to track scores for each model
 
47
  executor = ThreadPoolExecutor(max_workers=num_models)
48
 
49
 
50
+
51
+
52
+
53
+
54
+
55
  def gen_fn(model_str, prompt):
56
+ global index_tracker, model_scores, processed_models_count # Declare global variables
57
+
58
+
59
+
60
+
61
  if model_str == 'NA':
62
  return None
63
+
64
+
65
+
66
+ def generate_image():
67
+ """Inner function to generate the image."""
68
  index_tracker = (index_tracker + 1) % len(models)
69
  current_model_index = index_tracker
70
  current_model_name = models[current_model_index]
71
  combined_prompt = f"{prompt} {randint(0, 9999)}"
72
+ response = models_load[model_str](f'{combined_prompt}')
73
+ return current_model_name, response
74
+
75
+ try:
76
+ # Run the image generation task with a timeout
77
+ future = executor.submit(generate_image)
78
+ current_model_name, response = future.result(timeout=8) # Set timeout in seconds
79
+
80
+ if isinstance(response, str): # If the response is valid
81
  if processed_models_count == 0:
82
  print(f"**************")
83
  print(f"{prompt}")
 
93
  processed_models_count = 0
94
  return response
95
 
96
+ else:
97
+ processed_models_count += 1
98
+ return None
99
+
100
+ except TimeoutError:
101
+ if processed_models_count == 0:
102
+ print(f"**************")
103
+ print(f"{prompt}")
104
+ print(f"{prompt}")
105
+ print(f"{prompt}")
106
+ print(f"**************")
107
+ print(f"TTT n:{processed_models_count} x:{current_model_index} r[{model_scores[current_model_name]}] {model_str}")
108
+ processed_models_count += 1
109
+ if processed_models_count == len(models):
110
+ print("\nCycle Complete! Updated Scores:")
111
+ print(model_scores)
112
+ processed_models_count = 0
113
+ return None
114
+
115
  except Exception as e:
116
  if processed_models_count == 0:
117
  print(f"**************")
 
126
  print(model_scores)
127
  processed_models_count = 0
128
  return None
129
+
130
+
131
+
132
+
133
+
134
+
135
+
136
+
137
 
138
  def make_me():
139
  with gr.Row():
 
185
  demo.queue = False
186
  demo.config["queue"] = False
187
  demo.launch(max_threads=400)
188
+
189
+
190
+
191
+
192
+
193
+
194
+
195
+
196
+
197
+
198
+
199
+
200
+
201
+
202
+
203
+
204
+
205
+
206
+
207
+
208
+
209
+
210
+
211
+
212
+
213
+
214
+
215
+