Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -20,27 +20,28 @@ if not os.path.exists(UPLOAD_FOLDER):
|
|
20 |
os.makedirs(UPLOAD_FOLDER)
|
21 |
|
22 |
# Load PaliGemma model and processor (load once)
|
|
|
|
|
|
|
23 |
def load_paligemma_model():
|
24 |
try:
|
25 |
-
print("Loading PaliGemma model from
|
26 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
27 |
print(f"Using device: {device}")
|
28 |
|
29 |
-
#
|
30 |
-
|
31 |
-
|
32 |
-
# Load model and processor from the specified local path
|
33 |
model = PaliGemmaForConditionalGeneration.from_pretrained(
|
34 |
-
|
35 |
torch_dtype=torch.float16
|
36 |
)
|
37 |
-
processor = AutoProcessor.from_pretrained(
|
38 |
model = model.to(device)
|
|
|
39 |
print("Model loaded successfully")
|
40 |
return model, processor, device
|
41 |
except Exception as e:
|
42 |
print(f"Error loading model: {str(e)}")
|
43 |
-
traceback.print_exc()
|
44 |
raise
|
45 |
|
46 |
|
|
|
20 |
os.makedirs(UPLOAD_FOLDER)
|
21 |
|
22 |
# Load PaliGemma model and processor (load once)
|
23 |
+
import torch
|
24 |
+
from transformers import PaliGemmaForConditionalGeneration, AutoProcessor
|
25 |
+
|
26 |
def load_paligemma_model():
|
27 |
try:
|
28 |
+
print("Loading PaliGemma model from Hugging Face...")
|
29 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
30 |
print(f"Using device: {device}")
|
31 |
|
32 |
+
# Load model and processor from Hugging Face Hub
|
33 |
+
model_name = "ahmed-masry/chartgemma" # Update with the correct model name if needed
|
|
|
|
|
34 |
model = PaliGemmaForConditionalGeneration.from_pretrained(
|
35 |
+
model_name,
|
36 |
torch_dtype=torch.float16
|
37 |
)
|
38 |
+
processor = AutoProcessor.from_pretrained(model_name)
|
39 |
model = model.to(device)
|
40 |
+
|
41 |
print("Model loaded successfully")
|
42 |
return model, processor, device
|
43 |
except Exception as e:
|
44 |
print(f"Error loading model: {str(e)}")
|
|
|
45 |
raise
|
46 |
|
47 |
|