Spaces:
Sleeping
Sleeping
from flask import Flask, render_template, request | |
import pandas as pd | |
import folium | |
from folium.plugins import HeatMap, MarkerCluster, Fullscreen, MiniMap | |
from folium.raster_layers import TileLayer | |
from datetime import datetime, timedelta, timezone | |
import re | |
import google.generativeai as genai | |
import os | |
import requests # <--- CORRECTED: Added requests import | |
import toml # For potentially reading secrets.toml | |
from datetime import datetime, timezone # Make sure timezone is imported | |
# --- Configuration --- | |
DEFAULT_MIN_LAT_INDIA = 6.0 | |
DEFAULT_MAX_LAT_INDIA = 38.0 | |
DEFAULT_MIN_LON_INDIA = 68.0 | |
DEFAULT_MAX_LON_INDIA = 98.0 | |
DEFAULT_REGION_NAME_INDIA = "India & Surrounding" | |
TSUNAMI_MAG_THRESHOLD = 6.8 | |
TSUNAMI_DEPTH_THRESHOLD = 70 | |
OFFSHORE_KEYWORDS = ["sea", "ocean", "off the coast", "ridge", "trench", "gulf", "bay", "islands region", "strait"] | |
TECTONIC_PLATES_URL = "https://raw.githubusercontent.com/fraxen/tectonicplates/master/GeoJSON/PB2002_boundaries.json" | |
USGS_API_URL = "https://earthquake.usgs.gov/fdsnws/event/1/query" | |
# --- Load API Key --- | |
GEMINI_API_KEY = os.getenv("GEMINI_API_KEY") | |
if not GEMINI_API_KEY: | |
try: | |
secrets_path = os.path.join(os.path.dirname(__file__), '.streamlit', 'secrets.toml') | |
if os.path.exists(secrets_path): | |
secrets = toml.load(secrets_path) | |
GEMINI_API_KEY = secrets.get("GEMINI_API_KEY") | |
except Exception as e: | |
print(f"Could not load secrets.toml: {e}") | |
GEMINI_API_KEY = None | |
if not GEMINI_API_KEY: | |
print("WARNING: GEMINI_API_KEY not found. AI features will be disabled.") | |
# --- API Functions --- | |
def fetch_earthquake_data(starttime, endtime, min_mag, min_lat, max_lat, min_lon, max_lon, source="USGS"): | |
print(f"Fetching data from {source} from {starttime.strftime('%Y-%m-%d %H:%M')} to {endtime.strftime('%Y-%m-%d %H:%M')} for Mag >= {min_mag}...") | |
params = { | |
'format': 'geojson', | |
'starttime': starttime.strftime('%Y-%m-%dT%H:%M:%S'), | |
'endtime': endtime.strftime('%Y-%m-%dT%H:%M:%S'), | |
'minlatitude': min_lat, | |
'maxlatitude': max_lat, | |
'minlongitude': min_lon, | |
'maxlongitude': max_lon, | |
'minmagnitude': min_mag, | |
'orderby': 'time' | |
} | |
try: | |
response = requests.get(USGS_API_URL, params=params, timeout=30) | |
response.raise_for_status() | |
data = response.json() | |
features = data.get('features', []) | |
earthquakes = [] | |
for feature in features: | |
properties = feature.get('properties', {}) | |
geometry = feature.get('geometry', {}) | |
coordinates = geometry.get('coordinates', [None, None, None]) | |
earthquakes.append({ | |
'id': feature.get('id'), | |
'magnitude': properties.get('mag'), | |
'place': properties.get('place'), | |
'time': pd.to_datetime(properties.get('time'), unit='ms', utc=True), | |
'url': properties.get('url'), | |
'longitude': coordinates[0], | |
'latitude': coordinates[1], | |
'depth': coordinates[2] | |
}) | |
df = pd.DataFrame(earthquakes) | |
if not df.empty: | |
df = df.sort_values(by='time', ascending=False) | |
print(f"Fetched {len(df)} earthquakes from {source}.") | |
return df | |
except requests.exceptions.RequestException as e: | |
print(f"Error fetching data from {source}: {e}") | |
return pd.DataFrame() | |
except Exception as e: | |
print(f"An unexpected error occurred during data fetching: {e}") | |
return pd.DataFrame() | |
def fetch_tectonic_plates_data(url): | |
print("Fetching tectonic plate boundaries data...") | |
try: | |
response = requests.get(url, timeout=30) | |
response.raise_for_status() | |
print("Tectonic plate data fetched.") | |
return response.json() | |
except requests.exceptions.RequestException as e: | |
print(f"Error fetching tectonic plate data: {e}") | |
return None | |
# --- Helper Functions --- | |
def is_offshore(place_description, depth_km): | |
if place_description is None or depth_km is None: return False | |
place_lower = str(place_description).lower() | |
for keyword in OFFSHORE_KEYWORDS: | |
if keyword in place_lower: return True | |
return False | |
def get_marker_color_by_magnitude(magnitude): | |
if magnitude is None: return 'gray' | |
if magnitude < 4.0: return 'green' | |
elif magnitude < 5.0: return 'blue' | |
elif magnitude < 6.0: return 'orange' | |
elif magnitude < 7.0: return 'red' | |
else: return 'darkred' | |
# --- Gemini LLM Function --- | |
def get_gemini_interpretation(api_key, data_summary_prompt): | |
if not api_key: | |
return "API Key not configured for Gemini." | |
try: | |
genai.configure(api_key=api_key) | |
model = genai.GenerativeModel('gemini-1.5-flash-latest') | |
response = model.generate_content(data_summary_prompt) | |
return response.text | |
except Exception as e: | |
print(f"Error communicating with Gemini API: {e}") | |
return "Could not retrieve interpretation from AI model." | |
app = Flask(__name__) | |
predefined_regions_dict = { | |
"India & Surrounding": (DEFAULT_MIN_LAT_INDIA, DEFAULT_MAX_LAT_INDIA, DEFAULT_MIN_LON_INDIA, DEFAULT_MAX_LON_INDIA), | |
"Indian Ocean Region (Tsunami Focus)": (-20, 35, 40, 120), | |
"Northern India (Himalayan Belt)": (25, 38, 70, 98), | |
"Andaman & Nicobar Region": (5, 15, 90, 95), | |
"Global (Significant Quakes)": (-60, 60, -180, 180) | |
} | |
def get_default_configs_for_region(region_name): | |
if region_name == "Global (Significant Quakes)": | |
return {"days_historical": 730, "min_magnitude_historical": 4.5, "days_recent": 7, "min_magnitude_recent": 4.0, "alert_threshold_magnitude": 5.0, "show_tectonic_plates": True, "enable_ai_interpretation": bool(GEMINI_API_KEY)} | |
elif region_name == "Indian Ocean Region (Tsunami Focus)": | |
return {"days_historical": 730, "min_magnitude_historical": 4.0, "days_recent": 7, "min_magnitude_recent": 3.5, "alert_threshold_magnitude": 4.5, "show_tectonic_plates": True, "enable_ai_interpretation": bool(GEMINI_API_KEY)} | |
else: | |
return {"days_historical": 730, "min_magnitude_historical": 3.0, "days_recent": 7, "min_magnitude_recent": 2.5, "alert_threshold_magnitude": 3.5, "show_tectonic_plates": True, "enable_ai_interpretation": bool(GEMINI_API_KEY)} | |
def index(): | |
map_html = None | |
historical_df = pd.DataFrame() | |
recent_df = pd.DataFrame() | |
significant_quakes = [] | |
tsunami_potential_events = [] | |
ai_interpretation = None | |
initial_load = True | |
current_config = {} | |
if request.method == 'POST': | |
initial_load = False | |
current_config['selected_region_name'] = request.form.get('selected_region_name', DEFAULT_REGION_NAME_INDIA) | |
# Load defaults for selected region first, then override with form values | |
defaults = get_default_configs_for_region(current_config['selected_region_name']) | |
current_config.update(defaults) | |
current_config['days_historical'] = int(request.form.get('days_historical', defaults['days_historical'])) | |
current_config['min_magnitude_historical'] = float(request.form.get('min_magnitude_historical', defaults['min_magnitude_historical'])) | |
current_config['days_recent'] = int(request.form.get('days_recent', defaults['days_recent'])) | |
current_config['min_magnitude_recent'] = float(request.form.get('min_magnitude_recent', defaults['min_magnitude_recent'])) | |
current_config['alert_threshold_magnitude'] = float(request.form.get('alert_threshold_magnitude', defaults['alert_threshold_magnitude'])) | |
current_config['show_tectonic_plates'] = request.form.get('show_tectonic_plates') == 'True' | |
current_config['enable_ai_interpretation'] = request.form.get('enable_ai_interpretation') == 'True' | |
else: # GET | |
initial_load = True | |
current_config['selected_region_name'] = DEFAULT_REGION_NAME_INDIA | |
defaults = get_default_configs_for_region(DEFAULT_REGION_NAME_INDIA) | |
current_config.update(defaults) | |
if request.method == 'POST': # Process data only on POST | |
min_lat, max_lat, min_lon, max_lon = predefined_regions_dict[current_config['selected_region_name']] | |
end_time_global = datetime.now(timezone.utc) | |
start_time_historical = end_time_global - timedelta(days=current_config['days_historical']) | |
start_time_recent = end_time_global - timedelta(days=current_config['days_recent']) | |
historical_df = fetch_earthquake_data(start_time_historical, end_time_global, current_config['min_magnitude_historical'], min_lat, max_lat, min_lon, max_lon) | |
recent_df = fetch_earthquake_data(start_time_recent, end_time_global, current_config['min_magnitude_recent'], min_lat, max_lat, min_lon, max_lon) | |
map_center_lat = (min_lat + max_lat) / 2; map_center_lon = (min_lon + max_lon) / 2 | |
if current_config['selected_region_name'] == "Global (Significant Quakes)": initial_zoom = 2 | |
elif abs(max_lat - min_lat) > 30 or abs(max_lon - min_lon) > 30: initial_zoom = 3 | |
else: initial_zoom = 4 if abs(max_lat - min_lat) > 15 or abs(max_lon - min_lon) > 15 else 5 | |
m = folium.Map(location=[map_center_lat, map_center_lon], zoom_start=initial_zoom, tiles=None) | |
TileLayer(tiles='https://{s}.tile.opentopomap.org/{z}/{x}/{y}.png', attr='OpenTopoMap', name='OpenTopoMap (Terrain)', show=True).add_to(m) | |
TileLayer("OpenStreetMap", name="OpenStreetMap", show=False).add_to(m) | |
if current_config['show_tectonic_plates']: | |
plate_data = fetch_tectonic_plates_data(TECTONIC_PLATES_URL) | |
if plate_data: | |
folium.GeoJson(plate_data, name="Tectonic Plates", style_function=lambda x: {'color': '#E04A00', 'weight': 3, 'opacity': 0.9}, tooltip=folium.GeoJsonTooltip(fields=['Name'], aliases=['Plate Name:'], localize=True)).add_to(m) | |
if not historical_df.empty: | |
heat_data_list = [[row_h['latitude'], row_h['longitude'], row_h['magnitude']] if pd.notnull(row_h['magnitude']) else [row_h['latitude'], row_h['longitude']] for _, row_h in historical_df.iterrows() if pd.notnull(row_h['latitude']) and pd.notnull(row_h['longitude'])] | |
if heat_data_list: HeatMap(heat_data_list, name="Historical Heatmap").add_to(m) | |
if not recent_df.empty: | |
mc = MarkerCluster(name="Recent Earthquakes").add_to(m) | |
for _, row_r in recent_df.iterrows(): | |
if pd.notnull(row_r['latitude']) and pd.notnull(row_r['longitude']) and pd.notnull(row_r['magnitude']): | |
mag = f"{row_r['magnitude']:.1f}"; depth = f"{row_r['depth']:.1f} km" if pd.notnull(row_r['depth']) else "N/A"; color = get_marker_color_by_magnitude(row_r['magnitude']) | |
popup = f"<b>RECENT</b><br>M{mag} at {row_r['place']}<br>{row_r['time'].strftime('%Y-%m-%d %H:%M')}<br>Depth: {depth}<br><a href='{row_r['url']}' target='_blank'>USGS</a>" | |
folium.CircleMarker(location=[row_r['latitude'], row_r['longitude']], radius=max(3, (row_r['magnitude'] * 1.8)), popup=folium.Popup(popup, max_width=300), color=color, fill=True, fill_color=color, fill_opacity=0.7, tooltip=f"M{mag}").add_to(mc) | |
folium.LayerControl().add_to(m) | |
Fullscreen().add_to(m) | |
map_html = m._repr_html_() | |
if not recent_df.empty: | |
recent_alerts = recent_df.copy() | |
recent_alerts['magnitude'] = pd.to_numeric(recent_alerts['magnitude'], errors='coerce').dropna() | |
temp_sig_q = recent_alerts[recent_alerts['magnitude'] >= current_config['alert_threshold_magnitude']] | |
for _, sq_row in temp_sig_q.iterrows(): | |
q_info = sq_row.to_dict(); q_info['tsunami_risk_info'] = "" | |
if pd.notnull(sq_row.get('magnitude')) and pd.notnull(sq_row.get('depth')) and pd.notnull(sq_row.get('place')): | |
if sq_row['magnitude'] >= TSUNAMI_MAG_THRESHOLD and sq_row['depth'] <= TSUNAMI_DEPTH_THRESHOLD and is_offshore(sq_row['place'], sq_row['depth']): | |
q_info['tsunami_risk_info'] = "π POTENTIAL TSUNAMI RISK"; tsunami_potential_events.append(q_info) | |
significant_quakes.append(q_info) | |
if current_config['enable_ai_interpretation'] and GEMINI_API_KEY: | |
summary_ai = f"Region: {current_config['selected_region_name']}\nHist: {current_config['days_historical']}d, M>={current_config['min_magnitude_historical']}, Tot:{len(historical_df)}\n" | |
if not historical_df.empty and 'magnitude' in historical_df and not historical_df['magnitude'].dropna().empty: summary_ai += f"Lgst hist.M: {historical_df['magnitude'].dropna().max():.1f}\n" | |
summary_ai += f"Recent: {current_config['days_recent']}d, M>={current_config['min_magnitude_recent']}, Tot:{len(recent_df)}\n" | |
if not recent_df.empty and 'magnitude' in recent_df and not recent_df['magnitude'].dropna().empty: summary_ai += f"Lgst rec.M: {recent_df['magnitude'].dropna().max():.1f}, Avg rec.M: {recent_df['magnitude'].dropna().mean():.1f}\n" | |
summary_ai += f"Alerts (M>={current_config['alert_threshold_magnitude']}): {len(significant_quakes)}\n" | |
if significant_quakes: | |
summary_ai += "Top sig. quakes:\n" | |
for r_s in significant_quakes[:2]: summary_ai += f" - M {r_s.get('magnitude',0.0):.1f} at {r_s.get('place','N/A')} on {r_s.get('time').strftime('%Y-%m-%d')}\n" | |
summary_ai += f"Tsunami risk events: {len(tsunami_potential_events)}\n" | |
full_prompt_ai = f"""You are a seismic data analyst. Based on this summary for '{current_config['selected_region_name']}', give a concise interpretation (max 3-4 paragraphs): | |
1. Overall seismic activity (recent vs. hist.). | |
2. Notable patterns/clusters in recent data. | |
3. Areas more active recently. | |
4. General outlook (NO PREDICTIONS). | |
IMPORTANT: Start with a disclaimer. | |
Data: {summary_ai}Interpretation:""" | |
ai_interpretation = get_gemini_interpretation(GEMINI_API_KEY, full_prompt_ai) | |
current_time_for_render = datetime.now(timezone.utc) | |
return render_template('index.html', | |
map_html=map_html, | |
historical_df=historical_df, # For potential future use in template | |
recent_df=recent_df, # For Top 5 table | |
significant_quakes=significant_quakes, | |
tsunami_potential_events=tsunami_potential_events, | |
ai_interpretation=ai_interpretation, | |
predefined_regions_list=predefined_regions_dict.keys(), | |
now= current_time_for_render, | |
current_config=current_config, | |
initial_load=initial_load) | |
if __name__ == '__main__': | |
app.run(debug=True) |