cathv commited on
Commit
9c9d650
·
verified ·
1 Parent(s): 0ea816d

Upload test_neurips_2025.py

Browse files
Files changed (1) hide show
  1. test_neurips_2025.py +220 -0
test_neurips_2025.py ADDED
@@ -0,0 +1,220 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pandas as pd
2
+ import datasets
3
+ import numpy as np
4
+ import ast
5
+ from PIL import Image
6
+
7
+ class NeuripsConfig(datasets.BuilderConfig):
8
+
9
+ def __init__(self, **kwargs):
10
+ super(NeuripsConfig, self).__init__(**kwargs)
11
+
12
+ class NeuripsDataset(datasets.GeneratorBasedBuilder):
13
+
14
+ BUILDER_CONFIGS = [
15
+ NeuripsConfig(
16
+ name="Kenya",
17
+ version=datasets.Version("1.0.0"),
18
+ description="Full dataset, combining both systematically and opportunistically sampled leaves"
19
+ ),
20
+ NeuripsConfig(
21
+ name="South_Africa",
22
+ version=datasets.Version("1.0.0"),
23
+ description="Subset containing only systematically sampled leaves"
24
+ ),
25
+ NeuripsConfig(
26
+ name="USA_Summer",
27
+ version=datasets.Version("1.0.0"),
28
+ description="Subset containing only opportunistically sampled leaves"
29
+ ),
30
+ NeuripsConfig(
31
+ name="USA_Winter",
32
+ version=datasets.Version("1.0.0"),
33
+ description="Subset containing only opportunistically sampled leaves"
34
+ ),
35
+ NeuripsConfig(
36
+ name="Species_ID",
37
+ version=datasets.Version("1.0.0"),
38
+ description="Subset containing the DataFrames allowing to link the target encounter rates to a list of species for each country"
39
+ )
40
+ ]
41
+
42
+ DEFAULT_CONFIG_NAME = "Kenya"
43
+
44
+ def _info(self):
45
+ #state,state_code,split,num_complete_checklists,target,geometry
46
+
47
+
48
+ if self.config.name == "Kenya":
49
+ features = datasets.Features({
50
+ "hotspot_id": datasets.Value("string"),
51
+ "hotspot_name": datasets.Value("string"),
52
+ "lon": datasets.Value("float32"),
53
+ "lat": datasets.Value("float32"),
54
+ "county": datasets.Value("string"),
55
+ "county_code": datasets.Value("string"),
56
+ "state": datasets.Value("string"),
57
+ "state_code": datasets.Value("string"),
58
+ 'sat_imagery_path': datasets.Value("string"),
59
+ 'environmental_path': datasets.Value("string"),
60
+ "split": datasets.Value("string"),
61
+ "num_complete_checklists" : datasets.Value("int32"),
62
+ "target": datasets.Sequence(feature=datasets.Value("float32"), length=1054),
63
+ "geometry": datasets.Value("string")
64
+
65
+ })
66
+ elif self.config.name == "South_Africa":
67
+ features = datasets.Features({
68
+ "hotspot_id": datasets.Value("string"),
69
+ "hotspot_name": datasets.Value("string"),
70
+ "lon": datasets.Value("float32"),
71
+ "lat": datasets.Value("float32"),
72
+ "county": datasets.Value("string"),
73
+ "county_code": datasets.Value("string"),
74
+ "state": datasets.Value("string"),
75
+ "state_code": datasets.Value("string"),
76
+ 'sat_imagery_path': datasets.Value("string"),
77
+ 'environmental_path': datasets.Value("string"),
78
+ "split": datasets.Value("string"),
79
+ "num_complete_checklists" : datasets.Value("int32"),
80
+ "target": datasets.Sequence(feature=datasets.Value("float32"), length=1054),
81
+ "geometry": datasets.Value("string")
82
+ })
83
+ elif self.config.name == "USA_Summer":
84
+ features = datasets.Features({
85
+ "hotspot_id": datasets.Value("string"),
86
+ "hotspot_name": datasets.Value("string"),
87
+ "lon": datasets.Value("float32"),
88
+ "lat": datasets.Value("float32"),
89
+ "county": datasets.Value("string"),
90
+ "county_code": datasets.Value("string"),
91
+ "state": datasets.Value("string"),
92
+ "state_code": datasets.Value("string"),
93
+ 'sat_imagery_path': datasets.Value("string"),
94
+ 'environmental_path': datasets.Value("string"),
95
+ "split": datasets.Value("string"),
96
+ "num_complete_checklists" : datasets.Value("int32"),
97
+ "target": datasets.Sequence(feature=datasets.Value("float32"), length=1054),
98
+ "geometry": datasets.Value("string")
99
+ })
100
+ elif self.config.name == "USA_Winter":
101
+ features = datasets.Features({
102
+ "hotspot_id": datasets.Value("string"),
103
+ "hotspot_name": datasets.Value("string"),
104
+ "lon": datasets.Value("float32"),
105
+ "lat": datasets.Value("float32"),
106
+ "county": datasets.Value("string"),
107
+ "county_code": datasets.Value("string"),
108
+ "state": datasets.Value("string"),
109
+ "state_code": datasets.Value("string"),
110
+ 'sat_imagery_path': datasets.Value("string"),
111
+ 'environmental_path': datasets.Value("string"),
112
+ "split": datasets.Value("string"),
113
+ "num_complete_checklists" : datasets.Value("int32"),
114
+ "target": datasets.Sequence(feature=datasets.Value("float32"), length=1054),
115
+ "geometry": datasets.Value("string")
116
+ })
117
+ elif self.config.name == "Species_ID":
118
+ features = datasets.Features({
119
+ "scientific_name": datasets.Value("string"),
120
+ "ebird_code": datasets.Value("string"),
121
+ "inat_preview": datasets.Value("string"),
122
+ "target_value_index" : datasets.Value("int32"),
123
+ })
124
+ else:
125
+ raise ValueError(f"Unsupported config: {self.config.name}")
126
+
127
+ return datasets.DatasetInfo(
128
+ description="The SITTELLE Benchmark Dataset",
129
+ features=features,
130
+ supervised_keys=None,
131
+ homepage="https://huggingface.co/datasets/imageomics/invasive_plants_hawaii",
132
+ license="MIT",
133
+ )
134
+
135
+ def _split_generators(self, dl_manager):
136
+ if self.config.name == "Kenya":
137
+ train_csv = "Kenya/train_split.csv"
138
+ test_csv = "Kenya/test_split.csv"
139
+ val_csv = "Kenya/valid_split.csv"
140
+ return [
141
+ datasets.SplitGenerator(name="train", gen_kwargs={"filepath": train_csv}),
142
+ datasets.SplitGenerator(name="val", gen_kwargs={"filepath": test_csv}),
143
+ datasets.SplitGenerator(name="test", gen_kwargs={"filepath": val_csv}),
144
+ ]
145
+ elif self.config.name == "South_Africa":
146
+ train_csv = "Kenya/train_split.csv"
147
+ test_csv = "Kenya/test_split.csv"
148
+ val_csv = "Kenya/valid_split.csv"
149
+ return [
150
+ datasets.SplitGenerator(name="train", gen_kwargs={"filepath": train_csv}),
151
+ datasets.SplitGenerator(name="val", gen_kwargs={"filepath": test_csv}),
152
+ datasets.SplitGenerator(name="test", gen_kwargs={"filepath": val_csv}),
153
+ ]
154
+ elif self.config.name == "USA_Summer":
155
+ train_csv = "Kenya/train_split.csv"
156
+ test_csv = "Kenya/test_split.csv"
157
+ val_csv = "Kenya/valid_split.csv"
158
+ return [
159
+ datasets.SplitGenerator(name="train", gen_kwargs={"filepath": train_csv}),
160
+ datasets.SplitGenerator(name="val", gen_kwargs={"filepath": test_csv}),
161
+ datasets.SplitGenerator(name="test", gen_kwargs={"filepath": val_csv}),
162
+ ]
163
+ elif self.config.name == "USA_Winter":
164
+ train_csv = "Kenya/train_split.csv"
165
+ test_csv = "Kenya/test_split.csv"
166
+ val_csv = "Kenya/valid_split.csv"
167
+ return [
168
+ datasets.SplitGenerator(name="train", gen_kwargs={"filepath": train_csv}),
169
+ datasets.SplitGenerator(name="val", gen_kwargs={"filepath": test_csv}),
170
+ datasets.SplitGenerator(name="test", gen_kwargs={"filepath": val_csv}),
171
+ ]
172
+ elif self.config.name == "Species_ID":
173
+ kenya_csv = "Kenya/species_id_kenya.csv"
174
+ southafrica_csv = "Kenya/species_id_kenya.csv"
175
+ usasummer_csv = "Kenya/species_id_kenya.csv"
176
+ usawinter_csv = "Kenya/species_id_kenya.csv"
177
+ return [
178
+ datasets.SplitGenerator(name="Kenya", gen_kwargs={"filepath": kenya_csv}),
179
+ datasets.SplitGenerator(name="South_Africa", gen_kwargs={"filepath": southafrica_csv}),
180
+ datasets.SplitGenerator(name="USA_Summer", gen_kwargs={"filepath": usasummer_csv}),
181
+ datasets.SplitGenerator(name="USA_Winter", gen_kwargs={"filepath": usawinter_csv}),
182
+ ]
183
+ else:
184
+ raise ValueError(f"Unknown config: {self.config.name}")
185
+
186
+ def _generate_examples(self, filepath):
187
+ if self.config.name in ["Kenya", "South_Africa", "USA_Summer", "USA_Winter"]:
188
+ df_metadata = pd.read_csv(filepath)
189
+ df_metadata["target"] = df_metadata["target"].apply(ast.literal_eval).apply(lambda x: list(map(float, x)))
190
+ for idx in range(len(df_metadata)):
191
+ row = df_metadata.iloc[idx]
192
+ yield idx, {
193
+ "hotspot_id": row['hotspot_id'],
194
+ "hotspot_name": row['hotspot_name'],
195
+ "lon": row['lon'],
196
+ "lat": row['lat'],
197
+ "county": row['county'],
198
+ "county_code": row['county_code'],
199
+ "state": row['state'],
200
+ "state_code": row['state_code'],
201
+ 'sat_imagery_path': row['sat_imagery_path'],
202
+ 'environmental_path': row['environmental_path'],
203
+ "split": row['split'],
204
+ "num_complete_checklists" : row['num_complete_checklists'],
205
+ "target": row['target'],
206
+ "geometry": row['geometry']
207
+ }
208
+
209
+ if self.config.name == "Species_ID":
210
+ print("Species ID!")
211
+ df_metadata = pd.read_csv(filepath)
212
+ for idx in range(len(df_metadata)):
213
+ row = df_metadata.iloc[idx]
214
+
215
+ yield idx, {
216
+ "scientific_name": row['scientific_name'],
217
+ "ebird_code": row['ebird_code'],
218
+ "inat_preview": row['inat_preview'],
219
+ "target_value_index" : row['target_value_index'],
220
+ }