fmab777 commited on
Commit
71bccd9
·
verified ·
1 Parent(s): d29aaf9

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +43 -12
main.py CHANGED
@@ -1,4 +1,4 @@
1
- # main.py (Full Code - Specific April 2025 Models: Llama 4 Scout & DeepSeek V3 Free)
2
  import os
3
  import re
4
  import logging
@@ -536,7 +536,8 @@ async def _call_groq(text: str, summary_type: str) -> Tuple[Optional[str], Optio
536
  try:
537
  groq_client = Groq( api_key=GROQ_API_KEY, timeout=httpx.Timeout(120.0, connect=10.0) )
538
  logger.info(f"[Groq Primary] Sending request to Groq ({GROQ_LLAMA4_MODEL})...")
539
- chat_completion = await groq_client.chat.completions.create(
 
540
  messages=[ { "role": "user", "content": full_prompt } ],
541
  model=GROQ_LLAMA4_MODEL,
542
  temperature=0.7, # Adjust from Groq default of 1 if needed
@@ -567,11 +568,18 @@ async def _call_groq(text: str, summary_type: str) -> Tuple[Optional[str], Optio
567
  return None, f"Sorry, couldn't connect to the primary AI service ({GROQ_LLAMA4_MODEL})."
568
  except Exception as e:
569
  logger.error(f"[Groq Primary] Unexpected error during Groq API call: {e}", exc_info=True);
 
 
 
 
 
 
570
  return None, f"Sorry, an unexpected error occurred while using the primary AI service ({GROQ_LLAMA4_MODEL})."
571
 
 
572
  async def _call_gemini(text: str, summary_type: str, model_name: str) -> Tuple[Optional[str], Optional[str]]:
573
  """Internal function to call Gemini API. Returns (summary, error_message)."""
574
- global _gemini_api_enabled
575
  if not _gemini_api_enabled:
576
  logger.error(f"[Gemini {model_name}] Called but API is disabled.");
577
  return None, f"Error: AI service (Gemini API) not configured/available."
@@ -583,11 +591,23 @@ async def _call_gemini(text: str, summary_type: str, model_name: str) -> Tuple[O
583
  logger.warning(f"[Gemini {model_name}] Input length ({len(text)}) exceeds limit ({MAX_INPUT_LENGTH_GEMINI}). Truncating.");
584
  text = text[:MAX_INPUT_LENGTH_GEMINI] + "... (Content truncated)"
585
  full_prompt = f"{prompt}\n\n{text}"
586
- safety_settings = { HarmCategory.HARM_CATEGORY_HARASSMENT: HarmBlockThreshold.BLOCK_NONE, HarmCategory.HARM_CATEGORY_HATE_SPEECH: HarmBlockThreshold.BLOCK_NONE, HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: HarmBlockThreshold.BLOCK_NONE, HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: HarmBlockThreshold.BLOCK_NONE, }
587
- for category_name in dir(HarmCategory):
588
- if category_name.startswith('HARM_CATEGORY_') and getattr(HarmCategory, category_name) not in safety_settings:
589
- safety_settings[getattr(HarmCategory, category_name)] = HarmBlockThreshold.BLOCK_NONE
590
- logger.debug(f"[Gemini {model_name}] Using safety settings: { {k.name: v.name for k, v in safety_settings.items()} }")
 
 
 
 
 
 
 
 
 
 
 
 
591
 
592
  try:
593
  logger.debug(f"[Gemini {model_name}] Initializing model {model_name}")
@@ -629,9 +649,14 @@ async def _call_gemini(text: str, summary_type: str, model_name: str) -> Tuple[O
629
  return None, f"Sorry, there was an issue processing the response from the AI service ({model_name})."
630
  except Exception as e:
631
  logger.error(f"[Gemini {model_name}] Unexpected error during Gemini API call: {e}", exc_info=True);
 
 
 
 
632
  error_msg = f"Sorry, an unexpected error occurred while using the AI service ({model_name})."
633
  return None, error_msg
634
 
 
635
  async def _call_openrouter(text: str, summary_type: str) -> Tuple[Optional[str], Optional[str]]:
636
  """Internal function to call OpenRouter API (Final Fallback - DeepSeek V3 Free). Returns (summary, error_message)."""
637
  global OPENROUTER_API_KEY, OPENROUTER_DEEPSEEK_MODEL, _openrouter_fallback_enabled
@@ -741,7 +766,7 @@ async def generate_summary(text: str, summary_type: str) -> str:
741
  errors["GeminiFlash"] = "Service disabled/unavailable."
742
 
743
  # --- Attempt 3: Gemini 2.0 Flash ---
744
- if _gemini_api_enabled and errors["GeminiFlash"] is None:
745
  logger.info(f"[Summary Generation] Attempting 3: Gemini ({GEMINI_FLASH_MODEL})")
746
  summary, errors["GeminiFlash"] = await _call_gemini(text, summary_type, GEMINI_FLASH_MODEL)
747
  if summary:
@@ -749,7 +774,7 @@ async def generate_summary(text: str, summary_type: str) -> str:
749
  return summary
750
  else:
751
  logger.warning(f"[Summary Generation] Gemini 2.0 Flash failed. Error: {errors['GeminiFlash']}. Proceeding to OpenRouter DeepSeek V3.")
752
- elif errors["GeminiFlash"] is None:
753
  logger.warning("[Summary Generation] Skipping Gemini 2.0 Flash (API was disabled).")
754
  errors["GeminiFlash"] = "Service disabled/unavailable."
755
 
@@ -1106,7 +1131,7 @@ async def health_check(request: Request) -> PlainTextResponse:
1106
 
1107
  async def telegram_webhook(request: Request) -> Response:
1108
  """Handles incoming updates from Telegram."""
1109
- global WEBHOOK_SECRET
1110
  if not ptb_app: logger.error("Webhook received but PTB application not initialized."); return PlainTextResponse('Bot not initialized', status_code=503)
1111
  if not ptb_app.running: logger.warning("Webhook received but PTB application not running."); return PlainTextResponse('Bot not running, cannot process update', status_code=503)
1112
  if WEBHOOK_SECRET:
@@ -1117,7 +1142,7 @@ async def telegram_webhook(request: Request) -> Response:
1117
  logger.debug(f"Processing update_id: {update.update_id} via webhook"); await ptb_app.process_update(update)
1118
  return Response(status_code=200)
1119
  except json.JSONDecodeError: logger.error("Webhook received invalid JSON."); return PlainTextResponse('Bad Request: Invalid JSON', status_code=400)
1120
- except Exception as e: logger.error(f"Error processing webhook update: {e}", exc_info=True); return Response(status_code=200)
1121
 
1122
  # --- Starlette App Definition ---
1123
  app = Starlette(
@@ -1133,6 +1158,12 @@ logger.info("Starlette ASGI application created with health check and webhook ro
1133
  # --- Development Server (if run directly) ---
1134
  if __name__ == '__main__':
1135
  import uvicorn
 
 
 
 
 
 
1136
  logger.warning("Running in development mode using Uvicorn directly - NOT for production!")
1137
  log_level = os.environ.get("LOGGING_LEVEL", "info").lower()
1138
  local_port = int(os.environ.get('PORT', 8080))
 
1
+ # main.py (Full Code - Specific April 2025 Models: Llama 4 Scout & DeepSeek V3 Free - FIXED)
2
  import os
3
  import re
4
  import logging
 
536
  try:
537
  groq_client = Groq( api_key=GROQ_API_KEY, timeout=httpx.Timeout(120.0, connect=10.0) )
538
  logger.info(f"[Groq Primary] Sending request to Groq ({GROQ_LLAMA4_MODEL})...")
539
+ # FIX: Removed await from the synchronous call
540
+ chat_completion = groq_client.chat.completions.create(
541
  messages=[ { "role": "user", "content": full_prompt } ],
542
  model=GROQ_LLAMA4_MODEL,
543
  temperature=0.7, # Adjust from Groq default of 1 if needed
 
568
  return None, f"Sorry, couldn't connect to the primary AI service ({GROQ_LLAMA4_MODEL})."
569
  except Exception as e:
570
  logger.error(f"[Groq Primary] Unexpected error during Groq API call: {e}", exc_info=True);
571
+ # Don't expose internal TypeError potentially, give a generic message
572
+ # Check if the specific error is the TypeError we saw earlier.
573
+ if isinstance(e, TypeError) and "can't be used in 'await' expression" in str(e):
574
+ # This case *shouldn't* happen now await is removed, but good to handle defensively.
575
+ logger.error("[Groq Primary] Encountered unexpected await TypeError again.")
576
+ return None, f"Sorry, an internal configuration error occurred with the primary AI service ({GROQ_LLAMA4_MODEL})."
577
  return None, f"Sorry, an unexpected error occurred while using the primary AI service ({GROQ_LLAMA4_MODEL})."
578
 
579
+
580
  async def _call_gemini(text: str, summary_type: str, model_name: str) -> Tuple[Optional[str], Optional[str]]:
581
  """Internal function to call Gemini API. Returns (summary, error_message)."""
582
+ global _gemini_api_enabled, HarmCategory, HarmBlockThreshold # Ensure HarmCategory/Threshold are available if SDK is loaded
583
  if not _gemini_api_enabled:
584
  logger.error(f"[Gemini {model_name}] Called but API is disabled.");
585
  return None, f"Error: AI service (Gemini API) not configured/available."
 
591
  logger.warning(f"[Gemini {model_name}] Input length ({len(text)}) exceeds limit ({MAX_INPUT_LENGTH_GEMINI}). Truncating.");
592
  text = text[:MAX_INPUT_LENGTH_GEMINI] + "... (Content truncated)"
593
  full_prompt = f"{prompt}\n\n{text}"
594
+
595
+ # FIX: Define safety_settings explicitly with the 5 categories from the error log
596
+ # Removed the problematic loop that dynamically added potentially unsupported categories.
597
+ try:
598
+ safety_settings = {
599
+ HarmCategory.HARM_CATEGORY_HARASSMENT: HarmBlockThreshold.BLOCK_NONE,
600
+ HarmCategory.HARM_CATEGORY_HATE_SPEECH: HarmBlockThreshold.BLOCK_NONE,
601
+ HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: HarmBlockThreshold.BLOCK_NONE,
602
+ HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: HarmBlockThreshold.BLOCK_NONE,
603
+ HarmCategory.HARM_CATEGORY_CIVIC_INTEGRITY: HarmBlockThreshold.BLOCK_NONE, # Added based on error msg
604
+ }
605
+ logger.debug(f"[Gemini {model_name}] Using safety settings: { {k.name: v.name for k, v in safety_settings.items()} }")
606
+ except AttributeError:
607
+ # Handle case where HarmCategory or HarmBlockThreshold might not be loaded correctly
608
+ # This shouldn't happen if _gemini_sdk_available is True, but defensive coding
609
+ logger.error(f"[Gemini {model_name}] Failed to define safety settings. HarmCategory/HarmBlockThreshold missing?")
610
+ return None, f"Sorry, an internal error occurred configuring the AI service ({model_name})."
611
 
612
  try:
613
  logger.debug(f"[Gemini {model_name}] Initializing model {model_name}")
 
649
  return None, f"Sorry, there was an issue processing the response from the AI service ({model_name})."
650
  except Exception as e:
651
  logger.error(f"[Gemini {model_name}] Unexpected error during Gemini API call: {e}", exc_info=True);
652
+ # Check if it's the specific InvalidArgument error we were seeing
653
+ if isinstance(e, google.api_core.exceptions.InvalidArgument) and "safety_settings" in str(e):
654
+ logger.error(f"[Gemini {model_name}] Safety settings InvalidArgument error persisted: {e}")
655
+ return None, f"Sorry, the AI service ({model_name}) reported an invalid configuration (Safety Settings). Please report this issue."
656
  error_msg = f"Sorry, an unexpected error occurred while using the AI service ({model_name})."
657
  return None, error_msg
658
 
659
+
660
  async def _call_openrouter(text: str, summary_type: str) -> Tuple[Optional[str], Optional[str]]:
661
  """Internal function to call OpenRouter API (Final Fallback - DeepSeek V3 Free). Returns (summary, error_message)."""
662
  global OPENROUTER_API_KEY, OPENROUTER_DEEPSEEK_MODEL, _openrouter_fallback_enabled
 
766
  errors["GeminiFlash"] = "Service disabled/unavailable."
767
 
768
  # --- Attempt 3: Gemini 2.0 Flash ---
769
+ if _gemini_api_enabled and errors["GeminiFlash"] is None: # Check if Gemini API is enabled AND wasn't already marked as failed/skipped
770
  logger.info(f"[Summary Generation] Attempting 3: Gemini ({GEMINI_FLASH_MODEL})")
771
  summary, errors["GeminiFlash"] = await _call_gemini(text, summary_type, GEMINI_FLASH_MODEL)
772
  if summary:
 
774
  return summary
775
  else:
776
  logger.warning(f"[Summary Generation] Gemini 2.0 Flash failed. Error: {errors['GeminiFlash']}. Proceeding to OpenRouter DeepSeek V3.")
777
+ elif errors["GeminiFlash"] is None: # Only log skip message if it wasn't already marked as disabled
778
  logger.warning("[Summary Generation] Skipping Gemini 2.0 Flash (API was disabled).")
779
  errors["GeminiFlash"] = "Service disabled/unavailable."
780
 
 
1131
 
1132
  async def telegram_webhook(request: Request) -> Response:
1133
  """Handles incoming updates from Telegram."""
1134
+ global WEBHOOK_SECRET, ptb_app # Ensure ptb_app is accessible
1135
  if not ptb_app: logger.error("Webhook received but PTB application not initialized."); return PlainTextResponse('Bot not initialized', status_code=503)
1136
  if not ptb_app.running: logger.warning("Webhook received but PTB application not running."); return PlainTextResponse('Bot not running, cannot process update', status_code=503)
1137
  if WEBHOOK_SECRET:
 
1142
  logger.debug(f"Processing update_id: {update.update_id} via webhook"); await ptb_app.process_update(update)
1143
  return Response(status_code=200)
1144
  except json.JSONDecodeError: logger.error("Webhook received invalid JSON."); return PlainTextResponse('Bad Request: Invalid JSON', status_code=400)
1145
+ except Exception as e: logger.error(f"Error processing webhook update: {e}", exc_info=True); return Response(status_code=200) # Return 200 to TG even if processing fails, to avoid retries
1146
 
1147
  # --- Starlette App Definition ---
1148
  app = Starlette(
 
1158
  # --- Development Server (if run directly) ---
1159
  if __name__ == '__main__':
1160
  import uvicorn
1161
+ # Need to import google.api_core.exceptions for the explicit check in _call_gemini error handling
1162
+ try:
1163
+ import google.api_core.exceptions
1164
+ except ImportError:
1165
+ pass # It's already handled by _gemini_sdk_available flag
1166
+
1167
  logger.warning("Running in development mode using Uvicorn directly - NOT for production!")
1168
  log_level = os.environ.get("LOGGING_LEVEL", "info").lower()
1169
  local_port = int(os.environ.get('PORT', 8080))