dhead commited on
Commit
ba6b905
·
verified ·
1 Parent(s): e64b265

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -33,7 +33,7 @@ pipe.unet.to(torch.float16)
33
  MAX_SEED = np.iinfo(np.int32).max
34
  MAX_IMAGE_SIZE = 1216
35
 
36
- @spaces.GPU(duration=10)
37
  def infer(prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps):
38
  # Check and truncate prompt if too long (CLIP can only handle 77 tokens)
39
  if len(prompt.split()) > 60: # Rough estimate to avoid exceeding token limit
@@ -66,7 +66,7 @@ def infer(prompt, negative_prompt, seed, randomize_seed, width, height, guidance
66
  css = """
67
  #col-container {
68
  margin: 0 auto;
69
- max-width: 520px;
70
  }
71
  """
72
 
 
33
  MAX_SEED = np.iinfo(np.int32).max
34
  MAX_IMAGE_SIZE = 1216
35
 
36
+ @spaces.GPU()
37
  def infer(prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps):
38
  # Check and truncate prompt if too long (CLIP can only handle 77 tokens)
39
  if len(prompt.split()) > 60: # Rough estimate to avoid exceeding token limit
 
66
  css = """
67
  #col-container {
68
  margin: 0 auto;
69
+ max-width: 1024px;
70
  }
71
  """
72