Spaces:
Running
Running
Update main.py
Browse files
main.py
CHANGED
@@ -849,7 +849,8 @@ async def _call_groq(text: str, summary_type: str) -> Tuple[Optional[str], Optio
|
|
849 |
|
850 |
async def _call_gemini(text: str, summary_type: str, model_name: str) -> Tuple[Optional[str], Optional[str]]:
|
851 |
"""Internal function to call Gemini API. Returns (summary, error_message)."""
|
852 |
-
|
|
|
853 |
if not _gemini_api_enabled:
|
854 |
logger.error(f"[Gemini {model_name}] Called but API is disabled.");
|
855 |
return None, f"Error: AI service (Gemini API) not configured/available."
|
@@ -859,11 +860,6 @@ async def _call_gemini(text: str, summary_type: str, model_name: str) -> Tuple[O
|
|
859 |
logger.error(f"[Gemini {model_name}] SDK or safety types (HarmCategory/HarmBlockThreshold) are None/unavailable.")
|
860 |
return None, f"Sorry, an internal configuration error occurred with the AI service ({model_name}). SDK components missing."
|
861 |
|
862 |
-
# Also check if the google exceptions module loaded - needed for specific error handling
|
863 |
-
if google is None or not hasattr(google, 'api_core') or not hasattr(google.api_core, 'exceptions'):
|
864 |
-
logger.warning(f"[Gemini {model_name}] google.api_core.exceptions not available for specific error handling. Will use general exceptions.")
|
865 |
-
# We can proceed but error handling might be less specific
|
866 |
-
|
867 |
logger.info(f"[Gemini {model_name}] Generating {summary_type} summary using {model_name}. Input length: {len(text)}")
|
868 |
|
869 |
prompt = PROMPT_PARAGRAPH if summary_type == "paragraph" else PROMPT_POINTS
|
@@ -892,7 +888,7 @@ async def _call_gemini(text: str, summary_type: str, model_name: str) -> Tuple[O
|
|
892 |
logger.error(f"[Gemini {model_name}] Unexpected error defining safety settings ({type(e).__name__}): {e}.", exc_info=True)
|
893 |
return None, f"Sorry, an internal error occurred configuring the AI service ({model_name}). Safety settings definition failed."
|
894 |
|
895 |
-
if not safety_settings:
|
896 |
logger.error(f"[Gemini {model_name}] Failed to define any safety settings.")
|
897 |
return None, f"Sorry, an internal error occurred configuring the AI service ({model_name}). No safety settings defined."
|
898 |
|
@@ -945,47 +941,53 @@ async def _call_gemini(text: str, summary_type: str, model_name: str) -> Tuple[O
|
|
945 |
if finish_reason_str == 'SAFETY': return None, f"Sorry, the AI model ({model_name}) response was blocked by safety filters."
|
946 |
return None, f"Sorry, the AI model ({model_name}) did not provide a summary (Finish Reason: {finish_reason_str})."
|
947 |
|
948 |
-
# --- Exception Handling (
|
949 |
-
except AttributeError as ae:
|
950 |
logger.error(f"[Gemini {model_name}] AttributeError during Gemini response processing: {ae}. SDK/response structure issue.", exc_info=True);
|
951 |
return None, f"Sorry, there was an issue processing the response from the AI service ({model_name}). Attribute error."
|
952 |
|
953 |
-
#
|
954 |
-
|
955 |
-
|
956 |
logger.error(f"[Gemini {model_name}] Model Not Found error from Gemini API: {nfe}", exc_info=False)
|
957 |
user_message = f"Sorry, the AI model '{model_name}' was not found by the API service. It might be unavailable or spelled incorrectly."
|
958 |
return None, user_message
|
959 |
-
except google.api_core.exceptions.InvalidArgument as iae
|
960 |
logger.error(f"[Gemini {model_name}] Invalid Argument error from Gemini API: {iae}", exc_info=False)
|
961 |
error_detail = str(iae)
|
962 |
user_message = f"Sorry, the AI service ({model_name}) reported an invalid argument."
|
963 |
if "API key not valid" in error_detail: user_message = f"Error: The API key for the AI service ({model_name}) is invalid."
|
964 |
-
# Add other specific InvalidArgument checks if needed
|
965 |
return None, user_message
|
966 |
-
except google.api_core.exceptions.PermissionDenied as pde
|
967 |
logger.error(f"[Gemini {model_name}] Permission Denied error from Gemini API: {pde}", exc_info=False)
|
968 |
user_message = f"Error: Access denied for the AI service ({model_name}). Check API key permissions."
|
969 |
return None, user_message
|
970 |
-
except google.api_core.exceptions.ResourceExhausted as ree
|
971 |
logger.error(f"[Gemini {model_name}] Resource Exhausted (Quota/Rate Limit) error from Gemini API: {ree}", exc_info=False)
|
972 |
user_message = f"Sorry, the AI model ({model_name}) is busy or quota exceeded. Please try again later."
|
973 |
return None, user_message
|
974 |
-
except google.api_core.exceptions.GoogleAPIError as gae
|
975 |
-
# Catch other Google API errors
|
976 |
logger.error(f"[Gemini {model_name}] Google API error during Gemini call: {gae}", exc_info=False)
|
977 |
status_code = getattr(gae, 'code', 'Unknown')
|
978 |
user_message = f"Sorry, the AI service ({model_name}) encountered an API error (Code: {status_code})."
|
979 |
if status_code == 500: user_message = f"Sorry, the AI service ({model_name}) had an internal server error."
|
980 |
-
# Add other status codes if needed
|
981 |
return None, user_message
|
|
|
|
|
982 |
except Exception as e:
|
983 |
-
#
|
984 |
-
|
985 |
-
|
986 |
-
#
|
987 |
-
if isinstance(e, NameError) and 'google' in str(e):
|
988 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
989 |
return None, error_msg
|
990 |
|
991 |
|
|
|
849 |
|
850 |
async def _call_gemini(text: str, summary_type: str, model_name: str) -> Tuple[Optional[str], Optional[str]]:
|
851 |
"""Internal function to call Gemini API. Returns (summary, error_message)."""
|
852 |
+
# Make sure globals are accessible if needed (import should suffice)
|
853 |
+
global _gemini_api_enabled, HarmCategory, HarmBlockThreshold, genai, google
|
854 |
if not _gemini_api_enabled:
|
855 |
logger.error(f"[Gemini {model_name}] Called but API is disabled.");
|
856 |
return None, f"Error: AI service (Gemini API) not configured/available."
|
|
|
860 |
logger.error(f"[Gemini {model_name}] SDK or safety types (HarmCategory/HarmBlockThreshold) are None/unavailable.")
|
861 |
return None, f"Sorry, an internal configuration error occurred with the AI service ({model_name}). SDK components missing."
|
862 |
|
|
|
|
|
|
|
|
|
|
|
863 |
logger.info(f"[Gemini {model_name}] Generating {summary_type} summary using {model_name}. Input length: {len(text)}")
|
864 |
|
865 |
prompt = PROMPT_PARAGRAPH if summary_type == "paragraph" else PROMPT_POINTS
|
|
|
888 |
logger.error(f"[Gemini {model_name}] Unexpected error defining safety settings ({type(e).__name__}): {e}.", exc_info=True)
|
889 |
return None, f"Sorry, an internal error occurred configuring the AI service ({model_name}). Safety settings definition failed."
|
890 |
|
891 |
+
if not safety_settings:
|
892 |
logger.error(f"[Gemini {model_name}] Failed to define any safety settings.")
|
893 |
return None, f"Sorry, an internal error occurred configuring the AI service ({model_name}). No safety settings defined."
|
894 |
|
|
|
941 |
if finish_reason_str == 'SAFETY': return None, f"Sorry, the AI model ({model_name}) response was blocked by safety filters."
|
942 |
return None, f"Sorry, the AI model ({model_name}) did not provide a summary (Finish Reason: {finish_reason_str})."
|
943 |
|
944 |
+
# --- Exception Handling (Corrected Syntax) ---
|
945 |
+
except AttributeError as ae:
|
946 |
logger.error(f"[Gemini {model_name}] AttributeError during Gemini response processing: {ae}. SDK/response structure issue.", exc_info=True);
|
947 |
return None, f"Sorry, there was an issue processing the response from the AI service ({model_name}). Attribute error."
|
948 |
|
949 |
+
# --- Specific Google API Error Handling (Only if google.api_core.exceptions was imported) ---
|
950 |
+
# Attempt to catch specific errors if the 'google' object and submodules are available
|
951 |
+
except google.api_core.exceptions.NotFound as nfe:
|
952 |
logger.error(f"[Gemini {model_name}] Model Not Found error from Gemini API: {nfe}", exc_info=False)
|
953 |
user_message = f"Sorry, the AI model '{model_name}' was not found by the API service. It might be unavailable or spelled incorrectly."
|
954 |
return None, user_message
|
955 |
+
except google.api_core.exceptions.InvalidArgument as iae:
|
956 |
logger.error(f"[Gemini {model_name}] Invalid Argument error from Gemini API: {iae}", exc_info=False)
|
957 |
error_detail = str(iae)
|
958 |
user_message = f"Sorry, the AI service ({model_name}) reported an invalid argument."
|
959 |
if "API key not valid" in error_detail: user_message = f"Error: The API key for the AI service ({model_name}) is invalid."
|
|
|
960 |
return None, user_message
|
961 |
+
except google.api_core.exceptions.PermissionDenied as pde:
|
962 |
logger.error(f"[Gemini {model_name}] Permission Denied error from Gemini API: {pde}", exc_info=False)
|
963 |
user_message = f"Error: Access denied for the AI service ({model_name}). Check API key permissions."
|
964 |
return None, user_message
|
965 |
+
except google.api_core.exceptions.ResourceExhausted as ree:
|
966 |
logger.error(f"[Gemini {model_name}] Resource Exhausted (Quota/Rate Limit) error from Gemini API: {ree}", exc_info=False)
|
967 |
user_message = f"Sorry, the AI model ({model_name}) is busy or quota exceeded. Please try again later."
|
968 |
return None, user_message
|
969 |
+
except google.api_core.exceptions.GoogleAPIError as gae:
|
|
|
970 |
logger.error(f"[Gemini {model_name}] Google API error during Gemini call: {gae}", exc_info=False)
|
971 |
status_code = getattr(gae, 'code', 'Unknown')
|
972 |
user_message = f"Sorry, the AI service ({model_name}) encountered an API error (Code: {status_code})."
|
973 |
if status_code == 500: user_message = f"Sorry, the AI service ({model_name}) had an internal server error."
|
|
|
974 |
return None, user_message
|
975 |
+
|
976 |
+
# --- General Exception Catch-all ---
|
977 |
except Exception as e:
|
978 |
+
# Check if 'google' was defined during import, if not, specific catches above would have failed with NameError
|
979 |
+
google_exceptions_available = 'google' in globals() and google and hasattr(google, 'api_core') and hasattr(google.api_core, 'exceptions')
|
980 |
+
|
981 |
+
# Log the error appropriately
|
982 |
+
if isinstance(e, NameError) and 'google' in str(e) and not google_exceptions_available:
|
983 |
+
# This specifically means the import failed and we tried to use a google.api_core exception type
|
984 |
+
logger.error(f"[Gemini {model_name}] Failed to import google.api_core.exceptions. Cannot catch specific API errors. Fallback error: {e}", exc_info=True);
|
985 |
+
error_msg = f"Sorry, an internal configuration error occurred with the AI service ({model_name}). Cannot handle specific API errors."
|
986 |
+
else:
|
987 |
+
# General unexpected error
|
988 |
+
logger.error(f"[Gemini {model_name}] Unexpected error during Gemini API call: {e}", exc_info=True);
|
989 |
+
error_msg = f"Sorry, an unexpected error occurred while using the AI service ({model_name})."
|
990 |
+
|
991 |
return None, error_msg
|
992 |
|
993 |
|