man_asshole / app.py
artificiallover0's picture
Update app.py
fc6b4b7 verified
import gradio as gr
import numpy as np
import random
import spaces #[uncomment to use ZeroGPU]
from diffusers import DiffusionPipeline , DPMSolverMultistepScheduler
import torch
from huggingface_hub import login
import os
a=os.getenv('key_for_man_asshole')
login(token=a )
use_karras_sigmas=True
device = "cuda" if torch.cuda.is_available() else "cpu"
model_repo_id = "black-forest-labs/FLUX.1-dev" # Replace to the model you would like to use
if torch.cuda.is_available():
torch_dtype = torch.float16
else:
torch_dtype = torch.float32
pipe = DiffusionPipeline.from_pretrained(model_repo_id, torch_dtype=torch_dtype)
pipe.load_lora_weights("artificiallover0/man_asshole")
pipe.fuse_lora()
pipe = pipe.to(device)
print(pipe.scheduler.compatibles)
MAX_SEED = np.iinfo(np.int32).max
MAX_IMAGE_SIZE = 1024
@spaces.GPU #[uncomment to use ZeroGPU]
def infer(
prompt,
negative_prompt,
seed,
randomize_seed,
width,
height,
guidance_scale,
num_inference_steps,
progress=gr.Progress(track_tqdm=True),
):
if randomize_seed:
seed = random.randint(0, MAX_SEED)
generator = torch.Generator().manual_seed(seed)
image = pipe(
prompt=prompt,
negative_prompt=negative_prompt,
guidance_scale=guidance_scale,
num_inference_steps=num_inference_steps,
width=width,
height=height,
generator=generator,
).images[0]
return image, seed
examples = [
"""a naked hairy man kneeling on all fours with his dirty feet facing the viewer
dirty feet, big hands holding his butt
he is crowd surfing
the crowd has their hands on him passing him forward
he legs rest on their shoulders
people below him are cheering
big erect penis""",
"""Photograph of a plus-sized ginger man kneeling with his butt facing the viewer. he has a large belly
he has a hairy butt, big low hanging testicles and dirty unwashed bare feet
high quality, fashion photography
he is eating corn from a metal trough
in a barn
he is eating from the metal bin like a pig""",
"""Photograph of a huge muscle man kneeling with his butt facing the viewer.
he has a hairy butt, big low hanging testicles and dirty unwashed bare feet
high quality, fashion photography
he is laying face down on a red leather fainting couch""",
"""
8k resolution, ultra detailed, 1 chico , Alone, huge muscle man
, man in mechanic naked, ultra detailed piernas gorditas y peludas, futurist,
with his ass in a sexual pose, asshole , military hat, looking over shoulder,
bending down, back towards viewer, big body, fit body, not fat, gigantic buttocks,
looking over shoulder, bending down,
back towards viewer whole body, sexy round hairy ass butt, thigh show, Super detailed""",
"""a chunky naked plumber kneeling under a sink holding a wrench and fixing a metal pipe under the sink.
he looking over shoulder, bending down, back towards viewer, big body,
His exposed anus and big testicles are the focus of the image, rear view,
facing away from viewer, ass in viewers face, greasy, (huge:1.9) muscle man ,
(huge:1.9) hairy back, leather boots, plumbers crack, ((dirty sweatpants pulled down:1.9)) view from below""",
]
css = """
#col-container {
margin: 0 auto;
max-width: 640px;
}
"""
with gr.Blocks(css=css) as demo:
with gr.Column(elem_id="col-container"):
gr.Markdown(" # Text-to-Image Gradio Template")
with gr.Row():
prompt = gr.Text(
label="Prompt",
show_label=False,
max_lines=1,
placeholder="Enter your prompt",
container=False,
lines=5,
)
run_button = gr.Button("Run", scale=0, variant="primary")
result = gr.Image(label="Result", show_label=False)
with gr.Accordion("Advanced Settings", open=False):
negative_prompt = gr.Text(
label="Negative prompt",
max_lines=1,
placeholder="Enter a negative prompt",
visible=False,
)
seed = gr.Slider(
label="Seed",
minimum=0,
maximum=MAX_SEED,
step=1,
value=0,
)
randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
with gr.Row():
width = gr.Slider(
label="Width",
minimum=256,
maximum=MAX_IMAGE_SIZE,
step=32,
value=1024, # Replace with defaults that work for your model
)
height = gr.Slider(
label="Height",
minimum=256,
maximum=MAX_IMAGE_SIZE,
step=32,
value=1024, # Replace with defaults that work for your model
)
with gr.Row():
guidance_scale = gr.Slider(
label="Guidance scale",
minimum=0.0,
maximum=10.0,
step=0.1,
value=7.0, # Replace with defaults that work for your model
)
num_inference_steps = gr.Slider(
label="Number of inference steps",
minimum=1,
maximum=50,
step=1,
value=30, # Replace with defaults that work for your model
)
gr.Examples(examples=examples, inputs=[prompt])
gr.on(
triggers=[run_button.click, prompt.submit],
fn=infer,
inputs=[
prompt,
negative_prompt,
seed,
randomize_seed,
width,
height,
guidance_scale,
num_inference_steps,
],
outputs=[result, seed],
)
if __name__ == "__main__":
demo.launch()