Paras Shah
commited on
Commit
·
5c11e48
1
Parent(s):
bf58072
Beautification and add info
Browse files- .streamlit/config.toml +1 -1
- README.md +2 -2
- app.py +113 -21
- static/DBH_info.JPG +0 -0
- static/canopy_info.JPG +0 -0
- static/sidebar.png +0 -0
.streamlit/config.toml
CHANGED
@@ -162,7 +162,7 @@ enableWebsocketCompression = false
|
|
162 |
# Enable serving files from a `static` directory in the running app's
|
163 |
# directory.
|
164 |
# Default: false
|
165 |
-
enableStaticServing =
|
166 |
|
167 |
# Server certificate file for connecting via HTTPS.
|
168 |
# Must be set at the same time as "server.sslKeyFile".
|
|
|
162 |
# Enable serving files from a `static` directory in the running app's
|
163 |
# directory.
|
164 |
# Default: false
|
165 |
+
enableStaticServing = true
|
166 |
|
167 |
# Server certificate file for connecting via HTTPS.
|
168 |
# Must be set at the same time as "server.sslKeyFile".
|
README.md
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
---
|
2 |
title: Tree Species Classification
|
3 |
-
emoji:
|
4 |
colorFrom: pink
|
5 |
colorTo: green
|
6 |
sdk: streamlit
|
@@ -10,4 +10,4 @@ pinned: false
|
|
10 |
short_description: Individual tree species identification
|
11 |
---
|
12 |
|
13 |
-
|
|
|
1 |
---
|
2 |
title: Tree Species Classification
|
3 |
+
emoji: 🌲
|
4 |
colorFrom: pink
|
5 |
colorTo: green
|
6 |
sdk: streamlit
|
|
|
10 |
short_description: Individual tree species identification
|
11 |
---
|
12 |
|
13 |
+
This project predicts the tree species, given its point cloud data, and also calculates the tree inventory such as tree height, canopy volume and DBH.
|
app.py
CHANGED
@@ -18,32 +18,106 @@ with st.spinner("Loading PointNet++ model..."):
|
|
18 |
classifier = pn2.get_model(num_class=4, normal_channel=False)
|
19 |
classifier.load_state_dict(checkpoint['model_state_dict'])
|
20 |
classifier.eval()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
21 |
|
22 |
-
st.
|
|
|
23 |
|
24 |
uploaded_file = st.file_uploader(
|
25 |
label="Upload Point Cloud Data",
|
26 |
type=['laz', 'las', 'pcd'],
|
27 |
help="Please upload trees with ground points removed"
|
28 |
)
|
29 |
-
Z_THRESHOLD = st.slider(
|
30 |
-
label="Z-Threshold(%)",
|
31 |
-
min_value=10,
|
32 |
-
max_value=90,
|
33 |
-
value=50,
|
34 |
-
step=1,
|
35 |
-
help="Please select a Z-Threshold for canopy volume calculation"
|
36 |
-
)
|
37 |
-
DBH_HEIGHT = st.slider(
|
38 |
-
label="DBH Height(m)",
|
39 |
-
min_value=1.3,
|
40 |
-
max_value=1.4,
|
41 |
-
value=1.4,
|
42 |
-
step=0.01,
|
43 |
-
help="Enter height used for DBH calculation"
|
44 |
-
)
|
45 |
-
proceed = None
|
46 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
47 |
if uploaded_file:
|
48 |
try:
|
49 |
with st.spinner("Reading point cloud file..."):
|
@@ -72,7 +146,7 @@ if proceed:
|
|
72 |
z_max = np.max(points[:, 2])
|
73 |
height = z_max - z_min
|
74 |
|
75 |
-
canopy_volume, canopy_points = calc_canopy_volume(points,
|
76 |
|
77 |
with st.spinner("Visualizing point cloud..."):
|
78 |
fig = go.Figure()
|
@@ -120,9 +194,27 @@ if proceed:
|
|
120 |
yaxis_title="Y",
|
121 |
zaxis_title="Z",
|
122 |
aspectmode='data'
|
123 |
-
)
|
|
|
124 |
)
|
125 |
-
st.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
126 |
|
127 |
|
128 |
with st.spinner("Running inference..."):
|
|
|
18 |
classifier = pn2.get_model(num_class=4, normal_channel=False)
|
19 |
classifier.load_state_dict(checkpoint['model_state_dict'])
|
20 |
classifier.eval()
|
21 |
+
|
22 |
+
st.sidebar.markdown(
|
23 |
+
body=
|
24 |
+
"<div style='text-align: justify;'>The species <strong>Pinus sylvestris "
|
25 |
+
"(Scots Pine), Fagus sylvatica (European Beech), Picea abies (Norway Spruce), "
|
26 |
+
"and Betula pendula (Silver Birch)</strong> are native to Europe and parts "
|
27 |
+
"of Asia but are also found in India (Parts of Himachal Pradesh, "
|
28 |
+
"Uttarakhand, Jammu and Kashmir, Sikkim and Arunachal Pradesh). "
|
29 |
+
"These temperate species, typically thriving in boreal and montane ecosystems, "
|
30 |
+
"are occasionally introduced in cooler Indian regions like the Himalayan "
|
31 |
+
"foothills for afforestation or experimental forestry, where climatic "
|
32 |
+
"conditions are favourable. However, their growth and ecological interactions "
|
33 |
+
"in India may vary significantly due to the region's unique biodiversity "
|
34 |
+
"and environmental factors.<br><br>"
|
35 |
+
"This AI-powered application employs the PointNet++ deep learning "
|
36 |
+
"architecture, optimized for processing 3D point cloud data from "
|
37 |
+
"individual <code>.laz</code> files (fused aerial and terrestrial LiDAR) "
|
38 |
+
"to classify tree species up to four classes (<strong>Pinus sylvestris, "
|
39 |
+
"Fagus sylvatica, Picea abies, and Betula pendula</strong>) "
|
40 |
+
"with associated confidence scores. Additionally, it calculates critical "
|
41 |
+
"metrics such as Diameter at Breast Height (DBH), actual height and "
|
42 |
+
"customizable canopy volume, enabling precise refinement of predictions "
|
43 |
+
"and analyses. By integrating species-specific and volumetric insights, "
|
44 |
+
"the tool enhances ecological research workflows, facilitating data-driven "
|
45 |
+
"decision-making.</div>"
|
46 |
+
,
|
47 |
+
unsafe_allow_html=True,
|
48 |
+
)
|
49 |
+
st.markdown(
|
50 |
+
"""
|
51 |
+
<style>
|
52 |
+
[data-testid="stSidebar"] {
|
53 |
+
background-image: url("static/sidebar.png");
|
54 |
+
background-size: cover;
|
55 |
+
background-position: center;
|
56 |
+
}
|
57 |
+
</style>
|
58 |
+
""",
|
59 |
+
unsafe_allow_html=True
|
60 |
+
)
|
61 |
|
62 |
+
st.header("ArborSphere")
|
63 |
+
st.subheader("Tree Identity and Biometrics")
|
64 |
|
65 |
uploaded_file = st.file_uploader(
|
66 |
label="Upload Point Cloud Data",
|
67 |
type=['laz', 'las', 'pcd'],
|
68 |
help="Please upload trees with ground points removed"
|
69 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
70 |
|
71 |
+
col1, col2 = st.columns([2, 2])
|
72 |
+
with col1:
|
73 |
+
st.image("static/canopy_info.jpg")
|
74 |
+
with col2:
|
75 |
+
CANOPY_VOLUME = st.slider(
|
76 |
+
label="Canopy Volume in % (Z)",
|
77 |
+
min_value=10,
|
78 |
+
max_value=90,
|
79 |
+
value=70,
|
80 |
+
step=1,
|
81 |
+
help=
|
82 |
+
"Adjust the Z-threshold value to calculate the canopy volume "
|
83 |
+
"within specified limits, it uses Quickhull and DBSCAN algorithms. "
|
84 |
+
|
85 |
+
"The Quickhull algorithm computes the convex hull of a set of points "
|
86 |
+
"by identifying extreme points to form an initial boundary and recursively "
|
87 |
+
"refining it by adding the farthest points until all points lie within the "
|
88 |
+
"convex boundary. It uses a divide-and-conquer approach, similar to QuickSort. "
|
89 |
+
|
90 |
+
"DBSCAN (Density-Based Spatial Clustering of Applications with Noise) is a "
|
91 |
+
"density-based clustering algorithm that groups densely packed points within "
|
92 |
+
"a specified distance 'eps' and minimum points 'minpoints', while treating "
|
93 |
+
"sparse points as noise. It effectively identifies arbitrarily shaped clusters "
|
94 |
+
"and handles outliers, making it suitable for spatial data and anomaly detection."
|
95 |
+
)
|
96 |
+
|
97 |
+
col1, col2 = st.columns([2, 2])
|
98 |
+
with col1:
|
99 |
+
st.image("static/DBH_info.jpg")
|
100 |
+
with col2:
|
101 |
+
DBH_HEIGHT = st.slider(
|
102 |
+
label="DBH (Diameter above Breast Height, in metres) (H)",
|
103 |
+
min_value=1.3,
|
104 |
+
max_value=1.4,
|
105 |
+
value=1.4,
|
106 |
+
step=0.01,
|
107 |
+
help=
|
108 |
+
"Adjust to calculate the DBH value within specified limits, "
|
109 |
+
"it utilizes Least square circle fitting method Levenberg-Marquardt "
|
110 |
+
"optimization technique."
|
111 |
+
|
112 |
+
"The Least Squares Circle Fitting method is used to find the "
|
113 |
+
"best-fitting circle to a set of 2D points by minimizing the sum of "
|
114 |
+
"squared distances between each point and the circle's circumference."
|
115 |
+
"Levenberg-Marquardt Optimization is used to fit models (like circles) "
|
116 |
+
"to point cloud data by minimizing the error between the model and the "
|
117 |
+
"actual points."
|
118 |
+
)
|
119 |
+
|
120 |
+
proceed = None
|
121 |
if uploaded_file:
|
122 |
try:
|
123 |
with st.spinner("Reading point cloud file..."):
|
|
|
146 |
z_max = np.max(points[:, 2])
|
147 |
height = z_max - z_min
|
148 |
|
149 |
+
canopy_volume, canopy_points = calc_canopy_volume(points, CANOPY_VOLUME, height, z_min)
|
150 |
|
151 |
with st.spinner("Visualizing point cloud..."):
|
152 |
fig = go.Figure()
|
|
|
194 |
yaxis_title="Y",
|
195 |
zaxis_title="Z",
|
196 |
aspectmode='data'
|
197 |
+
),
|
198 |
+
showlegend=False
|
199 |
)
|
200 |
+
col1, col2, col3 = st.columns([1, 3, 1])
|
201 |
+
with col2:
|
202 |
+
st.markdown("""
|
203 |
+
<style>
|
204 |
+
.centered-plot {
|
205 |
+
text-align: center;
|
206 |
+
}
|
207 |
+
</style>
|
208 |
+
""", unsafe_allow_html=True)
|
209 |
+
st.plotly_chart(fig, use_container_width=True)
|
210 |
+
hide_st_style = """
|
211 |
+
<style>
|
212 |
+
#MainMenu {visibility: hidden;}
|
213 |
+
footer {visibility: hidden;}
|
214 |
+
header {visibility: hidden;}
|
215 |
+
</style>
|
216 |
+
"""
|
217 |
+
st.markdown(hide_st_style, unsafe_allow_html=True)
|
218 |
|
219 |
|
220 |
with st.spinner("Running inference..."):
|
static/DBH_info.JPG
ADDED
|
static/canopy_info.JPG
ADDED
|
static/sidebar.png
ADDED
![]() |