svjack commited on
Commit
f54cdb7
·
verified ·
1 Parent(s): f52ebfb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -1,6 +1,6 @@
1
- import spaces
2
  import gradio as gr
3
- from huggingface_hub import InferenceClient
4
  from torch import nn
5
  from transformers import AutoModel, AutoProcessor, AutoTokenizer, PreTrainedTokenizer, PreTrainedTokenizerFast, AutoModelForCausalLM
6
  from pathlib import Path
@@ -156,7 +156,7 @@ image_adapter.eval()
156
  image_adapter.to("cuda")
157
 
158
 
159
- @spaces.GPU()
160
  @torch.no_grad()
161
  def stream_chat(input_image: Image.Image, caption_type: str, caption_length: str | int, extra_options: list[str], name_input: str, custom_prompt: str) -> tuple[str, str]:
162
  torch.cuda.empty_cache()
@@ -333,4 +333,4 @@ with gr.Blocks() as demo:
333
 
334
 
335
  if __name__ == "__main__":
336
- demo.launch()
 
1
+ #import spaces
2
  import gradio as gr
3
+ #from huggingface_hub import InferenceClient
4
  from torch import nn
5
  from transformers import AutoModel, AutoProcessor, AutoTokenizer, PreTrainedTokenizer, PreTrainedTokenizerFast, AutoModelForCausalLM
6
  from pathlib import Path
 
156
  image_adapter.to("cuda")
157
 
158
 
159
+ #@spaces.GPU()
160
  @torch.no_grad()
161
  def stream_chat(input_image: Image.Image, caption_type: str, caption_length: str | int, extra_options: list[str], name_input: str, custom_prompt: str) -> tuple[str, str]:
162
  torch.cuda.empty_cache()
 
333
 
334
 
335
  if __name__ == "__main__":
336
+ demo.launch(share = True)