import gradio as gr from random import randint from all_models import models from datetime import datetime #from concurrent.futures import TimeoutError, ThreadPoolExecutor import time import logging import traceback # For better error reporting #os.environ["CUDA_VISIBLE_DEVICES"] = "-1" #logger = logging.getLogger() #for handler in logger.handlers[:]: # handler.close() # logger.removeHandler(handler) logging.basicConfig(level=logging.DEBUG) now2 = 0 index_tracker = 0 model_scores = {model: 0 for model in models} processed_models_count = 0 kii=" blonde mohawk femboy playing game with self at computer with programmer socks on, still a wip" combined_prompt = "cat" def get_current_time(): now = datetime.now() now2 = now current_time = now2.strftime("%Y-%m-%d %H:%M:%S") ki = f'{kii} {current_time}' return ki # Sanitize file names and truncate them def sanitize_file_name(file_name, max_length=100): """Shortens and removes unsafe characters from file name.""" file_name = file_name[:max_length] return file_name.replace(" ", "_").replace("/", "_") def load_fn(models): global models_load models_load = {} for model in models: if model not in models_load.keys(): try: m = gr.load(f'models/{model}') models_load.update({model: m}) models_load[model] = m # Store in dictionary print(f"[[[[[[]]]]] ") print(f"[[[[[[]]]]] ") print(f"{m}\n"); print(f"[[[[[[]]]]] ") print(f"[[[[[[]]]]] ") except Exception as error: print(f"********** Error loading model ") print(f"********** Error loading model ") print(f"********** Error loading model ") print(f" {model}: {error}\n") print(f"********** Error loading model ") print(f"********** Error loading model ") print(f"********** Error loading model ") #m = gr.Interface(lambda _: None, inputs=gr.Textbox(), outputs=gr.Image(), queue=False) #models_load.update({model: m}) traceback.print_exc() # Prints full error stack trace for debugging #m = gr.Interface(fn=lambda _: None, inputs=gr.Textbox(), outputs=gr.Image(), queue=False) models_load[model] = None # m = gr.Interface(lambda txt: None, ['text'], ['image']) # models_load.update({model: m}) #return models_load # Return dictionary instead of using global load_fn(models) num_models = len(models) default_models = models[:num_models] def extend_choices(choices): return choices + (num_models - len(choices)) * ['NA'] def update_imgbox(choices): choices_plus = extend_choices(choices) return [gr.Image(None, label=m, visible=(m != 'NA')) for m in choices_plus] #def gen_fn(model_str, prompt): # if model_str == 'NA': # return None # noise = str(randint(0, 9999)) # return models_load[model_str](f'{prompt} {noise}') #executor = ThreadPoolExecutor(max_workers=num_models) def gen_fn(model_str, prompt): global index_tracker, model_scores, processed_models_count if model_str == 'NA': return None try: #index_tracker = (index_tracker + 1) % len(models) #current_model_index = index_tracker #current_model_name = models[current_model_index] max_prompt_length = 100 #truncated_prompt = sanitize_file_name(prompt[:max_prompt_length]) #combined_prompt = f"{truncated_prompt}_{randint(0, 9999)}" # Execute the model's processing with a timeout #future = executor.submit(models_load[model_str], f"{prompt}") #response = future.result(timeout=150) # Wait for result with timeout response = models_load.get(model_str, lambda txt: None)(f'{prompt}') if isinstance(response, gr.Image): return response elif isinstance(response, tuple): return None elif isinstance(response, str): if processed_models_count == 0: print(f"***a***********") # print(f"{prompt}") ##print(f"{prompt}") # print(f"{prompt}") ##print(f"***b***********") model_scores[current_model_name] += 1 ##print(f"OOO n:{processed_models_count} x:{current_model_index} r[{model_scores[current_model_name]}] {model_str}") processed_models_count += 1 if processed_models_count == len(models): ## print("\nCycle Complete! Updated Scores:") ## print(model_scores) processed_models_count = 0 return response except Exception as e: if processed_models_count == 0: print(f"******c*******") # print(f"{prompt}") # print(f"{prompt}") # print(f"{prompt}") ## print(f"******d*******") ##print(f"--- n:{processed_models_count} x:{current_model_index} r[{model_scores[current_model_name]}] {model_str}") processed_models_count += 1 if processed_models_count == len(models): ## print("\nCycle Complete! Updated Scores:") ## print(model_scores) processed_models_count = 0 return None def make_me(): with gr.Row(): txt_input = gr.Textbox(lines=2, value=kii, label=None) gen_button = gr.Button('Generate images') stop_button = gr.Button('Stop', variant='secondary', interactive=False) #gen_button.click(lambda _: gr.update(interactive=True), None, stop_button) #gen_button.click(lambda s: gr.update(interactive=True), None) gen_button.click(lambda _: gr.update(interactive=True), None, stop_button) gr.HTML("""