Spaces:
Running
on
Zero
Running
on
Zero
Upload app.py
Browse files
app.py
CHANGED
@@ -33,7 +33,7 @@ pipe.unet.to(torch.float16)
|
|
33 |
MAX_SEED = np.iinfo(np.int32).max
|
34 |
MAX_IMAGE_SIZE = 1216
|
35 |
|
36 |
-
@spaces.GPU(
|
37 |
def infer(prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps):
|
38 |
# Check and truncate prompt if too long (CLIP can only handle 77 tokens)
|
39 |
if len(prompt.split()) > 60: # Rough estimate to avoid exceeding token limit
|
@@ -66,7 +66,7 @@ def infer(prompt, negative_prompt, seed, randomize_seed, width, height, guidance
|
|
66 |
css = """
|
67 |
#col-container {
|
68 |
margin: 0 auto;
|
69 |
-
max-width:
|
70 |
}
|
71 |
"""
|
72 |
|
|
|
33 |
MAX_SEED = np.iinfo(np.int32).max
|
34 |
MAX_IMAGE_SIZE = 1216
|
35 |
|
36 |
+
@spaces.GPU()
|
37 |
def infer(prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps):
|
38 |
# Check and truncate prompt if too long (CLIP can only handle 77 tokens)
|
39 |
if len(prompt.split()) > 60: # Rough estimate to avoid exceeding token limit
|
|
|
66 |
css = """
|
67 |
#col-container {
|
68 |
margin: 0 auto;
|
69 |
+
max-width: 1024px;
|
70 |
}
|
71 |
"""
|
72 |
|