multimodalart HF Staff commited on
Commit
0abb371
·
verified ·
1 Parent(s): 944abe8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -10
app.py CHANGED
@@ -4,6 +4,7 @@ import random
4
  from diffusers import AuraFlowPipeline
5
  import torch
6
  import spaces
 
7
 
8
  device = "cuda" if torch.cuda.is_available() else "cpu"
9
 
@@ -14,9 +15,9 @@ device = "cuda" if torch.cuda.is_available() else "cpu"
14
  #torch._inductor.config.epilogue_fusion = False
15
  #torch._inductor.config.coordinate_descent_check_all_directions = True
16
 
17
- pipe = AuraFlowPipeline.from_pretrained(
18
- "fal/AuraFlow",
19
- torch_dtype=torch.float16
20
  ).to("cuda")
21
 
22
  #pipe.transformer.to(memory_format=torch.channels_last)
@@ -62,17 +63,12 @@ css="""
62
  }
63
  """
64
 
65
- if torch.cuda.is_available():
66
- power_device = "GPU"
67
- else:
68
- power_device = "CPU"
69
-
70
  with gr.Blocks(css=css) as demo:
71
 
72
  with gr.Column(elem_id="col-container"):
73
  gr.Markdown(f"""
74
- # AuraFlow 0.1
75
- Demo of the [AuraFlow 0.1](https://huggingface.co/fal/AuraFlow) 6.8B parameters open source diffusion transformer model
76
  [[blog](https://blog.fal.ai/auraflow/)] [[model](https://huggingface.co/fal/AuraFlow)] [[fal](https://fal.ai/models/fal-ai/aura-flow)]
77
  """)
78
 
 
4
  from diffusers import AuraFlowPipeline
5
  import torch
6
  import spaces
7
+ import PixArtSigmaPipeline
8
 
9
  device = "cuda" if torch.cuda.is_available() else "cpu"
10
 
 
15
  #torch._inductor.config.epilogue_fusion = False
16
  #torch._inductor.config.coordinate_descent_check_all_directions = True
17
 
18
+ pipe = PixArtSigmaPipeline.from_pretrained(
19
+ "dataautogpt3/PixArt-Sigma-900M",
20
+ torch_dtype=torch.float16,
21
  ).to("cuda")
22
 
23
  #pipe.transformer.to(memory_format=torch.channels_last)
 
63
  }
64
  """
65
 
 
 
 
 
 
66
  with gr.Blocks(css=css) as demo:
67
 
68
  with gr.Column(elem_id="col-container"):
69
  gr.Markdown(f"""
70
+ # PixArt Sigma 900M
71
+ Demo of the [PixArt Sigma 900M](https://huggingface.co/fal/AuraFlow) 6.8B parameters open source diffusion transformer model
72
  [[blog](https://blog.fal.ai/auraflow/)] [[model](https://huggingface.co/fal/AuraFlow)] [[fal](https://fal.ai/models/fal-ai/aura-flow)]
73
  """)
74