Spaces:
Runtime error
Runtime error
google/flan-ul2
Browse files
app.py
CHANGED
@@ -11,7 +11,7 @@ examples = [['COVID-19 is'],['A 65-year-old female patient with a past medical h
|
|
11 |
print(f"Is CUDA available: {torch.cuda.is_available()}")
|
12 |
print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")
|
13 |
|
14 |
-
pipe_biogpt = pipeline("text-generation", model="microsoft/
|
15 |
pipe_flan_ul2 = pipeline("text-generation", model="google/flan-ul2", device="cuda:0", model_kwargs={"torch_dtype":torch.bfloat16})
|
16 |
|
17 |
title = "LLM vs LLM!"
|
@@ -29,8 +29,8 @@ io = gr.Interface(
|
|
29 |
inference,
|
30 |
gr.Textbox(lines=3),
|
31 |
outputs=[
|
32 |
-
gr.Textbox(lines=3, label="
|
33 |
-
gr.Textbox(lines=3, label="
|
34 |
],
|
35 |
title=title,
|
36 |
description=description,
|
|
|
11 |
print(f"Is CUDA available: {torch.cuda.is_available()}")
|
12 |
print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")
|
13 |
|
14 |
+
pipe_biogpt = pipeline("text-generation", model="microsoft/biogpt", device="cuda:0", model_kwargs={"torch_dtype":torch.bfloat16})
|
15 |
pipe_flan_ul2 = pipeline("text-generation", model="google/flan-ul2", device="cuda:0", model_kwargs={"torch_dtype":torch.bfloat16})
|
16 |
|
17 |
title = "LLM vs LLM!"
|
|
|
29 |
inference,
|
30 |
gr.Textbox(lines=3),
|
31 |
outputs=[
|
32 |
+
gr.Textbox(lines=3, label="microsoft/biogpt"),
|
33 |
+
gr.Textbox(lines=3, label="google/flan-ul2"),
|
34 |
],
|
35 |
title=title,
|
36 |
description=description,
|