|
import os |
|
from datetime import datetime |
|
import ee |
|
import json |
|
import geemap |
|
import numpy as np |
|
import geemap.foliumap as gee_folium |
|
import leafmap.foliumap as leaf_folium |
|
import streamlit as st |
|
import pandas as pd |
|
import geopandas as gpd |
|
from shapely.ops import transform |
|
from functools import reduce |
|
import plotly.express as px |
|
import branca.colormap as cm |
|
import folium |
|
import pyproj |
|
from io import StringIO, BytesIO |
|
import requests |
|
import kml2geojson |
|
|
|
|
|
def force_stop(): |
|
show_credits() |
|
st.stop() |
|
|
|
|
|
def one_time_setup(): |
|
credentials_path = os.path.expanduser("~/.config/earthengine/credentials") |
|
if os.path.exists(credentials_path): |
|
pass |
|
elif "EE" in os.environ: |
|
ee_credentials = os.environ.get("EE") |
|
os.makedirs(os.path.dirname(credentials_path), exist_ok=True) |
|
with open(credentials_path, "w") as f: |
|
f.write(ee_credentials) |
|
else: |
|
raise ValueError( |
|
f"Earth Engine credentials not found at {credentials_path} or in the environment variable 'EE'" |
|
) |
|
|
|
ee.Initialize() |
|
|
|
|
|
def show_credits(): |
|
|
|
st.write( |
|
""" |
|
<div style="display: flex; justify-content: center; align-items: center; margin-top: 20px;"> |
|
<p style="text-align: center;">Developed by <a href="https://sustainability-lab.github.io/">Sustainability Lab</a>, <a href="https://www.iitgn.ac.in/">IIT Gandhinagar</a></p> |
|
</div> |
|
<div style="display: flex; justify-content: center; align-items: center;"> |
|
<p style="text-align: center;"> Supported by <a href="https://forests.gujarat.gov.in/">Gujarat Forest Department</a> |
|
</p> |
|
</div> |
|
""", |
|
unsafe_allow_html=True, |
|
) |
|
|
|
|
|
def get_gdf_from_file_url(file_url): |
|
if isinstance(file_url, str): |
|
if file_url.startswith("https://drive.google.com/file/d/"): |
|
ID = file_url.replace("https://drive.google.com/file/d/", "").split("/")[0] |
|
file_url = f"https://drive.google.com/uc?id={ID}" |
|
elif file_url.startswith("https://drive.google.com/open?id="): |
|
ID = file_url.replace("https://drive.google.com/open?id=", "") |
|
file_url = f"https://drive.google.com/uc?id={ID}" |
|
|
|
response = requests.get(file_url) |
|
bytes_data = BytesIO(response.content) |
|
string_data = response.text |
|
else: |
|
bytes_data = BytesIO(file_url.getvalue()) |
|
string_data = file_url.getvalue().decode("utf-8") |
|
|
|
if string_data.startswith("<?xml"): |
|
geojson = kml2geojson.convert(bytes_data) |
|
features = geojson[0]["features"] |
|
epsg = 4326 |
|
input_gdf = gpd.GeoDataFrame.from_features(features, crs=f"EPSG:{epsg}") |
|
else: |
|
input_gdf = gpd.read_file(bytes_data) |
|
|
|
return input_gdf |
|
|
|
|
|
|
|
def find_best_epsg(geometry): |
|
if geometry.geom_type == "Polygon": |
|
centroid = geometry.centroid |
|
else: |
|
st.error("Geometry is not Polygon !!!") |
|
st.stop() |
|
common_epsg_codes = [ |
|
7756, |
|
7757, |
|
7758, |
|
7759, |
|
7760, |
|
7761, |
|
7762, |
|
7763, |
|
7764, |
|
7765, |
|
7766, |
|
7767, |
|
7768, |
|
7769, |
|
7770, |
|
7772, |
|
7773, |
|
7774, |
|
7775, |
|
7776, |
|
7777, |
|
7778, |
|
7779, |
|
7780, |
|
7781, |
|
7782, |
|
7783, |
|
7784, |
|
7785, |
|
7786, |
|
7787, |
|
7771, |
|
7755, |
|
] |
|
|
|
for epsg in common_epsg_codes: |
|
crs = pyproj.CRS.from_epsg(epsg) |
|
area_of_use = crs.area_of_use.bounds |
|
|
|
|
|
if (area_of_use[0] <= centroid.x <= area_of_use[2]) and (area_of_use[1] <= centroid.y <= area_of_use[3]): |
|
return epsg |
|
|
|
|
|
def daterange_str_to_dates(daterange_str): |
|
start_date, end_date = daterange_str.split("-") |
|
start_date = pd.to_datetime(start_date) |
|
end_date = pd.to_datetime(end_date) |
|
return start_date, end_date |
|
|
|
|
|
def daterange_dates_to_str(start_date, end_date): |
|
return f"{start_date.strftime('%Y/%m/%d')}-{end_date.strftime('%Y/%m/%d')}" |
|
|
|
|
|
def daterange_str_to_year(daterange_str): |
|
start_date, _ = daterange_str.split("-") |
|
year = pd.to_datetime(start_date).year |
|
return year |
|
|
|
|
|
def shape_3d_to_2d(shape): |
|
if shape.has_z: |
|
return transform(lambda x, y, z: (x, y), shape) |
|
else: |
|
return shape |
|
|
|
|
|
def preprocess_gdf(gdf): |
|
gdf["geometry"] = gdf["geometry"].apply(shape_3d_to_2d) |
|
gdf["geometry"] = gdf.buffer(0) |
|
return gdf |
|
|
|
|
|
def to_best_crs(gdf): |
|
best_epsg_code = find_best_epsg(gdf["geometry"].iloc[0]) |
|
gdf = gdf.to_crs(epsg=best_epsg_code) |
|
return gdf |
|
|
|
|
|
def is_valid_polygon(geometry_gdf): |
|
geometry = geometry_gdf.geometry.item() |
|
return (geometry.type == "Polygon") and (not geometry.is_empty) |
|
|
|
|
|
def add_geometry_to_maps(map_list, geometry_gdf, buffer_geometry_gdf, opacity=0.0): |
|
for m in map_list: |
|
m.add_gdf( |
|
buffer_geometry_gdf, |
|
layer_name="Geometry Buffer", |
|
style_function=lambda x: {"color": "red", "fillOpacity": opacity, "fillColor": "red"}, |
|
) |
|
m.add_gdf( |
|
geometry_gdf, |
|
layer_name="Geometry", |
|
style_function=lambda x: {"color": "blue", "fillOpacity": opacity, "fillColor": "blue"}, |
|
) |
|
|
|
|
|
def get_dem_slope_maps(ee_geometry, wayback_url, wayback_title): |
|
|
|
dem_map = gee_folium.Map(controls={"scale": "bottomleft"}) |
|
dem_map.add_tile_layer(wayback_url, name=wayback_title, attribution="Esri") |
|
|
|
dem_layer = ee.Image("USGS/SRTMGL1_003") |
|
|
|
target_resolution = 10 |
|
dem_layer = dem_layer.resample("bilinear").reproject(crs="EPSG:4326", scale=target_resolution).clip(ee_geometry) |
|
|
|
|
|
terrain = ee.Algorithms.Terrain(dem_layer) |
|
contour_interval = 1 |
|
contours = ( |
|
terrain.select("elevation").subtract(terrain.select("elevation").mod(contour_interval)).rename("contours") |
|
) |
|
|
|
|
|
stats = contours.reduceRegion(reducer=ee.Reducer.minMax(), scale=10, maxPixels=1e13) |
|
max_value = stats.get("contours_max").getInfo() |
|
min_value = stats.get("contours_min").getInfo() |
|
vis_params = {"min": min_value, "max": max_value, "palette": ["blue", "green", "yellow", "red"]} |
|
dem_map.addLayer(contours, vis_params, "Contours") |
|
|
|
cmap = cm.LinearColormap(colors=vis_params["palette"], vmin=vis_params["min"], vmax=vis_params["max"]) |
|
tick_size = int((max_value - min_value) / 4) |
|
dem_map.add_legend( |
|
title="Elevation (m)", |
|
legend_dict={ |
|
"{}-{} m".format(min_value, min_value + tick_size): "#0000FF", |
|
"{}-{} m".format(min_value + tick_size, min_value + 2 * tick_size): "#00FF00", |
|
"{}-{} m".format(min_value + 2 * tick_size, min_value + 3 * tick_size): "#FFFF00", |
|
"{}-{} m".format(min_value + 3 * tick_size, max_value): "#FF0000", |
|
}, |
|
position="bottomright", |
|
draggable=False, |
|
) |
|
|
|
|
|
slope_map = gee_folium.Map(controls={"scale": "bottomleft"}) |
|
slope_map.add_tile_layer(wayback_url, name=wayback_title, attribution="Esri") |
|
|
|
|
|
slope_layer = ( |
|
ee.Terrain.slope( |
|
ee.Image("USGS/SRTMGL1_003").resample("bilinear").reproject(crs="EPSG:4326", scale=target_resolution) |
|
) |
|
.clip(ee_geometry) |
|
.rename("slope") |
|
) |
|
|
|
stats = slope_layer.reduceRegion(reducer=ee.Reducer.minMax(), scale=10, maxPixels=1e13) |
|
max_value = int(stats.get("slope_max").getInfo()) |
|
min_value = int(stats.get("slope_min").getInfo()) |
|
vis_params = {"min": min_value, "max": max_value, "palette": ["blue", "green", "yellow", "red"]} |
|
slope_map.addLayer(slope_layer, vis_params, "Slope Layer") |
|
|
|
colormap = cm.LinearColormap(colors=vis_params["palette"], vmin=vis_params["min"], vmax=vis_params["max"]) |
|
tick_size = int((max_value - min_value) / 4) |
|
slope_map.add_legend( |
|
title="Slope (degrees)", |
|
legend_dict={ |
|
"{}-{} deg".format(min_value, min_value + tick_size): "#0000FF", |
|
"{}-{} deg".format(min_value + tick_size, min_value + 2 * tick_size): "#00FF00", |
|
"{}-{} deg".format(min_value + 2 * tick_size, min_value + 3 * tick_size): "#FFFF00", |
|
"{}-{} deg".format(min_value + 3 * tick_size, max_value): "#FF0000", |
|
}, |
|
position="bottomright", |
|
draggable=False, |
|
) |
|
return dem_map, slope_map |
|
|
|
|
|
def add_indices(image, nir_band, red_band, blue_band, evi_vars): |
|
|
|
neg_cloud = image.select("MSK_CLDPRB").multiply(-1).rename("Neg_MSK_CLDPRB") |
|
nir = image.select(nir_band).divide(10000) |
|
red = image.select(red_band).divide(10000) |
|
blue = image.select(blue_band).divide(10000) |
|
numerator = nir.subtract(red) |
|
ndvi = (numerator).divide(nir.add(red)).rename("NDVI").clamp(-1, 1) |
|
|
|
|
|
denominator = nir.add(red.multiply(evi_vars["C1"])).subtract(blue.multiply(evi_vars["C2"])).add(evi_vars["L"]) |
|
evi = numerator.divide(denominator).multiply(evi_vars["G"]).rename("EVI").clamp(-1, 1) |
|
evi2 = ( |
|
numerator.divide(nir.add(evi_vars["L"]).add(red.multiply(evi_vars["C"]))) |
|
.multiply(evi_vars["G"]) |
|
.rename("EVI2") |
|
.clamp(-1, 1) |
|
) |
|
return image.addBands([neg_cloud, ndvi, evi, evi2]) |
|
|
|
|
|
def get_histogram(image, geometry, bins): |
|
|
|
values = image.reduceRegion(reducer=ee.Reducer.toList(), geometry=geometry, scale=10, maxPixels=1e13).get("NDVI") |
|
|
|
|
|
values_array = np.array(values.getInfo()) |
|
|
|
|
|
hist, bin_edges = np.histogram(values_array, bins=bins) |
|
|
|
return hist, bin_edges |
|
|
|
|
|
def process_date( |
|
daterange, |
|
satellite, |
|
veg_indices, |
|
satellites, |
|
buffer_ee_geometry, |
|
ee_feature_collection, |
|
buffer_ee_feature_collection, |
|
result_df, |
|
): |
|
start_date, end_date = daterange |
|
daterange_str = daterange_dates_to_str(start_date, end_date) |
|
prefix = f"Processing {satellite} - {daterange_str}" |
|
try: |
|
attrs = satellites[satellite] |
|
collection = attrs["collection"] |
|
collection = collection.filterBounds(buffer_ee_geometry) |
|
collection = collection.filterDate(start_date, end_date) |
|
|
|
bucket = {} |
|
for veg_index in veg_indices: |
|
mosaic_veg_index = collection.qualityMosaic(veg_index) |
|
fc = geemap.zonal_stats( |
|
mosaic_veg_index, ee_feature_collection, scale=attrs["scale"], return_fc=True |
|
).getInfo() |
|
mean_veg_index = fc["features"][0]["properties"][veg_index] |
|
bucket[veg_index] = mean_veg_index |
|
fc = geemap.zonal_stats( |
|
mosaic_veg_index, buffer_ee_feature_collection, scale=attrs["scale"], return_fc=True |
|
).getInfo() |
|
buffer_mean_veg_index = fc["features"][0]["properties"][veg_index] |
|
bucket[f"{veg_index}_buffer"] = buffer_mean_veg_index |
|
bucket[f"{veg_index}_ratio"] = mean_veg_index / buffer_mean_veg_index |
|
bucket[f"mosaic_{veg_index}"] = mosaic_veg_index |
|
|
|
|
|
bucket["mosaic_visual_max_ndvi"] = collection.qualityMosaic("NDVI") |
|
bucket["mosaic_visual_median"] = collection.median() |
|
bucket["image_visual_least_cloud"] = collection.sort("CLOUDY_PIXEL_PERCENTAGE").first() |
|
|
|
if satellite == "COPERNICUS/S2_SR_HARMONIZED": |
|
cloud_mask_probability = fc["features"][0]["properties"]["MSK_CLDPRB"] / 100 |
|
else: |
|
cloud_mask_probability = None |
|
bucket["Cloud (0 to 1)"] = cloud_mask_probability |
|
result_df.loc[daterange_str, list(bucket.keys())] = list(bucket.values()) |
|
count = collection.size().getInfo() |
|
suffix = f" - Processed {count} images" |
|
write_info(f"{prefix}{suffix}") |
|
except Exception as e: |
|
print(e) |
|
suffix = f" - Imagery not available" |
|
write_info(f"{prefix}{suffix}") |
|
|
|
|
|
def write_info(info, center_align=False): |
|
if center_align: |
|
st.write(f"<div style='text-align: center; color:#006400;'>{info}</div>", unsafe_allow_html=True) |
|
else: |
|
st.write(f"<span style='color:#006400;'>{info}</span>", unsafe_allow_html=True) |
|
|