klamike commited on
Commit
98e9e1e
·
verified ·
1 Parent(s): 0634c7b

Add files using upload-large-folder tool

Browse files
PGLearn-Large-6470_rte-nminus1.py ADDED
@@ -0,0 +1,427 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+ from dataclasses import dataclass
3
+ from pathlib import Path
4
+ import json
5
+ import shutil
6
+
7
+ import datasets as hfd
8
+ import h5py
9
+ import pgzip as gzip
10
+ import pyarrow as pa
11
+
12
+ # ┌──────────────┐
13
+ # │ Metadata │
14
+ # └──────────────┘
15
+
16
+ @dataclass
17
+ class CaseSizes:
18
+ n_bus: int
19
+ n_load: int
20
+ n_gen: int
21
+ n_branch: int
22
+
23
+ CASENAME = "6470_rte-nminus1"
24
+ SIZES = CaseSizes(n_bus=6470, n_load=3670, n_gen=761, n_branch=9005)
25
+ NUM_TRAIN = 73351
26
+ NUM_TEST = 18338
27
+ NUM_INFEASIBLE = 8311
28
+ SPLITFILES = {}
29
+
30
+ URL = "https://huggingface.co/datasets/PGLearn/PGLearn-Large-6470_rte-nminus1"
31
+ DESCRIPTION = """\
32
+ The 6470_rte-nminus1 PGLearn optimal power flow dataset, part of the PGLearn-Large collection. \
33
+ """
34
+ VERSION = hfd.Version("1.0.0")
35
+ DEFAULT_CONFIG_DESCRIPTION="""\
36
+ This configuration contains feasible input, primal solution, and dual solution data \
37
+ for the ACOPF and DCOPF formulations on the {case} system. For case data, \
38
+ download the case.json.gz file from the `script` branch of the repository. \
39
+ https://huggingface.co/datasets/PGLearn/PGLearn-Large-6470_rte-nminus1/blob/script/case.json.gz
40
+ """
41
+ USE_ML4OPF_WARNING = """
42
+ ================================================================================================
43
+ Loading PGLearn-Large-6470_rte-nminus1 through the `datasets.load_dataset` function may be slow.
44
+
45
+ Consider using ML4OPF to directly convert to `torch.Tensor`; for more info see:
46
+ https://github.com/AI4OPT/ML4OPF?tab=readme-ov-file#manually-loading-data
47
+
48
+ Or, use `huggingface_hub.snapshot_download` and an HDF5 reader; for more info see:
49
+ https://huggingface.co/datasets/PGLearn/PGLearn-Large-6470_rte-nminus1#downloading-individual-files
50
+ ================================================================================================
51
+ """
52
+ CITATION = """\
53
+ @article{klamkinpglearn,
54
+ title={{PGLearn - An Open-Source Learning Toolkit for Optimal Power Flow}},
55
+ author={Klamkin, Michael and Tanneau, Mathieu and Van Hentenryck, Pascal},
56
+ year={2025},
57
+ }\
58
+ """
59
+
60
+ IS_COMPRESSED = True
61
+
62
+ # ┌──────────────────┐
63
+ # │ Formulations │
64
+ # └──────────────────┘
65
+
66
+ def acopf_features(sizes: CaseSizes, primal: bool, dual: bool, meta: bool):
67
+ features = {}
68
+ if primal: features.update(acopf_primal_features(sizes))
69
+ if dual: features.update(acopf_dual_features(sizes))
70
+ if meta: features.update({f"ACOPF/{k}": v for k, v in META_FEATURES.items()})
71
+ return features
72
+
73
+ def dcopf_features(sizes: CaseSizes, primal: bool, dual: bool, meta: bool):
74
+ features = {}
75
+ if primal: features.update(dcopf_primal_features(sizes))
76
+ if dual: features.update(dcopf_dual_features(sizes))
77
+ if meta: features.update({f"DCOPF/{k}": v for k, v in META_FEATURES.items()})
78
+ return features
79
+
80
+ def socopf_features(sizes: CaseSizes, primal: bool, dual: bool, meta: bool):
81
+ features = {}
82
+ if primal: features.update(socopf_primal_features(sizes))
83
+ if dual: features.update(socopf_dual_features(sizes))
84
+ if meta: features.update({f"SOCOPF/{k}": v for k, v in META_FEATURES.items()})
85
+ return features
86
+
87
+ FORMULATIONS_TO_FEATURES = {
88
+ "ACOPF": acopf_features,
89
+ "DCOPF": dcopf_features,
90
+ "SOCOPF": socopf_features,
91
+ }
92
+
93
+ # ┌───────────────────┐
94
+ # │ BuilderConfig │
95
+ # └───────────────────┘
96
+
97
+ class PGLearnLarge6470_rteNminus1Config(hfd.BuilderConfig):
98
+ """BuilderConfig for PGLearn-Large-6470_rte-nminus1.
99
+ By default, primal solution data, metadata, input, casejson, are included for the train and test splits.
100
+
101
+ To modify the default configuration, pass attributes of this class to `datasets.load_dataset`:
102
+
103
+ Attributes:
104
+ formulations (list[str]): The formulation(s) to include, e.g. ["ACOPF", "DCOPF"]
105
+ primal (bool, optional): Include primal solution data. Defaults to True.
106
+ dual (bool, optional): Include dual solution data. Defaults to False.
107
+ meta (bool, optional): Include metadata. Defaults to True.
108
+ input (bool, optional): Include input data. Defaults to True.
109
+ casejson (bool, optional): Include case.json data. Defaults to True.
110
+ train (bool, optional): Include training samples. Defaults to True.
111
+ test (bool, optional): Include testing samples. Defaults to True.
112
+ infeasible (bool, optional): Include infeasible samples. Defaults to False.
113
+ """
114
+ def __init__(self,
115
+ formulations: list[str],
116
+ primal: bool=True, dual: bool=False, meta: bool=True, input: bool = True, casejson: bool=True,
117
+ train: bool=True, test: bool=True, infeasible: bool=False,
118
+ compressed: bool=IS_COMPRESSED, **kwargs
119
+ ):
120
+ super(PGLearnLarge6470_rteNminus1Config, self).__init__(version=VERSION, **kwargs)
121
+
122
+ self.case = CASENAME
123
+ self.formulations = formulations
124
+
125
+ self.primal = primal
126
+ self.dual = dual
127
+ self.meta = meta
128
+ self.input = input
129
+ self.casejson = casejson
130
+
131
+ self.train = train
132
+ self.test = test
133
+ self.infeasible = infeasible
134
+
135
+ self.gz_ext = ".gz" if compressed else ""
136
+
137
+ @property
138
+ def size(self):
139
+ return SIZES
140
+
141
+ @property
142
+ def features(self):
143
+ features = {}
144
+ if self.casejson: features.update(case_features())
145
+ if self.input: features.update(input_features(SIZES))
146
+ for formulation in self.formulations:
147
+ features.update(FORMULATIONS_TO_FEATURES[formulation](SIZES, self.primal, self.dual, self.meta))
148
+ return hfd.Features(features)
149
+
150
+ @property
151
+ def splits(self):
152
+ splits: dict[hfd.Split, dict[str, str | int]] = {}
153
+ if self.train:
154
+ splits[hfd.Split.TRAIN] = {
155
+ "name": "train",
156
+ "num_examples": NUM_TRAIN
157
+ }
158
+ if self.test:
159
+ splits[hfd.Split.TEST] = {
160
+ "name": "test",
161
+ "num_examples": NUM_TEST
162
+ }
163
+ if self.infeasible:
164
+ splits[hfd.Split("infeasible")] = {
165
+ "name": "infeasible",
166
+ "num_examples": NUM_INFEASIBLE
167
+ }
168
+ return splits
169
+
170
+ @property
171
+ def urls(self):
172
+ urls: dict[str, None | str | list] = {
173
+ "case": None, "train": [], "test": [], "infeasible": [],
174
+ }
175
+
176
+ if self.casejson:
177
+ urls["case"] = f"case.json" + self.gz_ext
178
+ else:
179
+ urls.pop("case")
180
+
181
+ split_names = []
182
+ if self.train: split_names.append("train")
183
+ if self.test: split_names.append("test")
184
+ if self.infeasible: split_names.append("infeasible")
185
+
186
+ for split in split_names:
187
+ if self.input: urls[split].append(f"{split}/input.h5" + self.gz_ext)
188
+ for formulation in self.formulations:
189
+ if self.primal:
190
+ filename = f"{split}/{formulation}/primal.h5" + self.gz_ext
191
+ if filename in SPLITFILES: urls[split].append(SPLITFILES[filename])
192
+ else: urls[split].append(filename)
193
+ if self.dual:
194
+ filename = f"{split}/{formulation}/dual.h5" + self.gz_ext
195
+ if filename in SPLITFILES: urls[split].append(SPLITFILES[filename])
196
+ else: urls[split].append(filename)
197
+ if self.meta:
198
+ filename = f"{split}/{formulation}/meta.h5" + self.gz_ext
199
+ if filename in SPLITFILES: urls[split].append(SPLITFILES[filename])
200
+ else: urls[split].append(filename)
201
+ return urls
202
+
203
+ # ┌────────────────────┐
204
+ # │ DatasetBuilder │
205
+ # └────────────────────┘
206
+
207
+ class PGLearnLarge6470_rteNminus1(hfd.ArrowBasedBuilder):
208
+ """DatasetBuilder for PGLearn-Large-6470_rte-nminus1.
209
+ The main interface is `datasets.load_dataset` with `trust_remote_code=True`, e.g.
210
+
211
+ ```python
212
+ from datasets import load_dataset
213
+ ds = load_dataset("PGLearn/PGLearn-Large-6470_rte-nminus1", trust_remote_code=True,
214
+ # modify the default configuration by passing kwargs
215
+ formulations=["DCOPF"],
216
+ dual=False,
217
+ meta=False,
218
+ )
219
+ ```
220
+ """
221
+
222
+ DEFAULT_WRITER_BATCH_SIZE = 10000
223
+ BUILDER_CONFIG_CLASS = PGLearnLarge6470_rteNminus1Config
224
+ DEFAULT_CONFIG_NAME=CASENAME
225
+ BUILDER_CONFIGS = [
226
+ PGLearnLarge6470_rteNminus1Config(
227
+ name=CASENAME, description=DEFAULT_CONFIG_DESCRIPTION.format(case=CASENAME),
228
+ formulations=list(FORMULATIONS_TO_FEATURES.keys()),
229
+ primal=True, dual=True, meta=True, input=True, casejson=False,
230
+ train=True, test=True, infeasible=False,
231
+ )
232
+ ]
233
+
234
+ def _info(self):
235
+ return hfd.DatasetInfo(
236
+ features=self.config.features, splits=self.config.splits,
237
+ description=DESCRIPTION + self.config.description,
238
+ homepage=URL, citation=CITATION,
239
+ )
240
+
241
+ def _split_generators(self, dl_manager: hfd.DownloadManager):
242
+ hfd.logging.get_logger().warning(USE_ML4OPF_WARNING)
243
+
244
+ filepaths = dl_manager.download_and_extract(self.config.urls)
245
+
246
+ splits: list[hfd.SplitGenerator] = []
247
+ if self.config.train:
248
+ splits.append(hfd.SplitGenerator(
249
+ name=hfd.Split.TRAIN,
250
+ gen_kwargs=dict(case_file=filepaths.get("case", None), data_files=tuple(filepaths["train"]), n_samples=NUM_TRAIN),
251
+ ))
252
+ if self.config.test:
253
+ splits.append(hfd.SplitGenerator(
254
+ name=hfd.Split.TEST,
255
+ gen_kwargs=dict(case_file=filepaths.get("case", None), data_files=tuple(filepaths["test"]), n_samples=NUM_TEST),
256
+ ))
257
+ if self.config.infeasible:
258
+ splits.append(hfd.SplitGenerator(
259
+ name=hfd.Split("infeasible"),
260
+ gen_kwargs=dict(case_file=filepaths.get("case", None), data_files=tuple(filepaths["infeasible"]), n_samples=NUM_INFEASIBLE),
261
+ ))
262
+ return splits
263
+
264
+ def _generate_tables(self, case_file: str | None, data_files: tuple[hfd.utils.track.tracked_str | list[hfd.utils.track.tracked_str]], n_samples: int):
265
+ case_data: str | None = json.dumps(json.load(open_maybe_gzip_cat(case_file))) if case_file is not None else None
266
+ data: dict[str, h5py.File] = {}
267
+ for file in data_files:
268
+ v = h5py.File(open_maybe_gzip_cat(file), "r")
269
+ if isinstance(file, list):
270
+ k = "/".join(Path(file[0].get_origin()).parts[-3:-1]).split(".")[0]
271
+ else:
272
+ k = "/".join(Path(file.get_origin()).parts[-2:]).split(".")[0]
273
+ data[k] = v
274
+ for k in list(data.keys()):
275
+ if "/input" in k: data[k.split("/", 1)[1]] = data.pop(k)
276
+
277
+ batch_size = self._writer_batch_size or self.DEFAULT_WRITER_BATCH_SIZE
278
+ for i in range(0, n_samples, batch_size):
279
+ effective_batch_size = min(batch_size, n_samples - i)
280
+
281
+ sample_data = {
282
+ f"{dk}/{k}":
283
+ hfd.features.features.numpy_to_pyarrow_listarray(v[i:i + effective_batch_size, ...])
284
+ for dk, d in data.items() for k, v in d.items() if f"{dk}/{k}" in self.config.features
285
+ }
286
+
287
+ if case_data is not None:
288
+ sample_data["case/json"] = pa.array([case_data] * effective_batch_size)
289
+
290
+ yield i, pa.Table.from_pydict(sample_data)
291
+
292
+ for f in data.values():
293
+ f.close()
294
+
295
+ # ┌──────────────┐
296
+ # │ Features │
297
+ # └──────────────┘
298
+
299
+ FLOAT_TYPE = "float32"
300
+ INT_TYPE = "int64"
301
+ BOOL_TYPE = "bool"
302
+ STRING_TYPE = "string"
303
+
304
+ def case_features():
305
+ # FIXME: better way to share schema of case data -- need to treat jagged arrays
306
+ return {
307
+ "case/json": hfd.Value(STRING_TYPE),
308
+ }
309
+
310
+ META_FEATURES = {
311
+ "meta/seed": hfd.Value(dtype=INT_TYPE),
312
+ "meta/formulation": hfd.Value(dtype=STRING_TYPE),
313
+ "meta/primal_objective_value": hfd.Value(dtype=FLOAT_TYPE),
314
+ "meta/dual_objective_value": hfd.Value(dtype=FLOAT_TYPE),
315
+ "meta/primal_status": hfd.Value(dtype=STRING_TYPE),
316
+ "meta/dual_status": hfd.Value(dtype=STRING_TYPE),
317
+ "meta/termination_status": hfd.Value(dtype=STRING_TYPE),
318
+ "meta/build_time": hfd.Value(dtype=FLOAT_TYPE),
319
+ "meta/extract_time": hfd.Value(dtype=FLOAT_TYPE),
320
+ "meta/solve_time": hfd.Value(dtype=FLOAT_TYPE),
321
+ }
322
+
323
+ def input_features(sizes: CaseSizes):
324
+ return {
325
+ "input/pd": hfd.Sequence(length=sizes.n_load, feature=hfd.Value(dtype=FLOAT_TYPE)),
326
+ "input/qd": hfd.Sequence(length=sizes.n_load, feature=hfd.Value(dtype=FLOAT_TYPE)),
327
+ "input/gen_status": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=BOOL_TYPE)),
328
+ "input/branch_status": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=BOOL_TYPE)),
329
+ "input/seed": hfd.Value(dtype=INT_TYPE),
330
+ }
331
+
332
+ def acopf_primal_features(sizes: CaseSizes):
333
+ return {
334
+ "ACOPF/primal/vm": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)),
335
+ "ACOPF/primal/va": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)),
336
+ "ACOPF/primal/pg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)),
337
+ "ACOPF/primal/qg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)),
338
+ "ACOPF/primal/pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
339
+ "ACOPF/primal/pt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
340
+ "ACOPF/primal/qf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
341
+ "ACOPF/primal/qt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
342
+ }
343
+ def acopf_dual_features(sizes: CaseSizes):
344
+ return {
345
+ "ACOPF/dual/kcl_p": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)),
346
+ "ACOPF/dual/kcl_q": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)),
347
+ "ACOPF/dual/vm": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)),
348
+ "ACOPF/dual/pg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)),
349
+ "ACOPF/dual/qg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)),
350
+ "ACOPF/dual/ohm_pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
351
+ "ACOPF/dual/ohm_pt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
352
+ "ACOPF/dual/ohm_qf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
353
+ "ACOPF/dual/ohm_qt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
354
+ "ACOPF/dual/pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
355
+ "ACOPF/dual/pt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
356
+ "ACOPF/dual/qf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
357
+ "ACOPF/dual/qt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
358
+ "ACOPF/dual/va_diff": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
359
+ "ACOPF/dual/sm_fr": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
360
+ "ACOPF/dual/sm_to": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
361
+ "ACOPF/dual/slack_bus": hfd.Value(dtype=FLOAT_TYPE),
362
+ }
363
+ def dcopf_primal_features(sizes: CaseSizes):
364
+ return {
365
+ "DCOPF/primal/va": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)),
366
+ "DCOPF/primal/pg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)),
367
+ "DCOPF/primal/pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
368
+ }
369
+ def dcopf_dual_features(sizes: CaseSizes):
370
+ return {
371
+ "DCOPF/dual/kcl_p": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)),
372
+ "DCOPF/dual/pg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)),
373
+ "DCOPF/dual/ohm_pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
374
+ "DCOPF/dual/pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
375
+ "DCOPF/dual/va_diff": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
376
+ "DCOPF/dual/slack_bus": hfd.Value(dtype=FLOAT_TYPE),
377
+ }
378
+ def socopf_primal_features(sizes: CaseSizes):
379
+ return {
380
+ "SOCOPF/primal/w": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)),
381
+ "SOCOPF/primal/pg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)),
382
+ "SOCOPF/primal/qg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)),
383
+ "SOCOPF/primal/pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
384
+ "SOCOPF/primal/pt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
385
+ "SOCOPF/primal/qf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
386
+ "SOCOPF/primal/qt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
387
+ "SOCOPF/primal/wr": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
388
+ "SOCOPF/primal/wi": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
389
+ }
390
+ def socopf_dual_features(sizes: CaseSizes):
391
+ return {
392
+ "SOCOPF/dual/kcl_p": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)),
393
+ "SOCOPF/dual/kcl_q": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)),
394
+ "SOCOPF/dual/w": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)),
395
+ "SOCOPF/dual/pg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)),
396
+ "SOCOPF/dual/qg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)),
397
+ "SOCOPF/dual/ohm_pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
398
+ "SOCOPF/dual/ohm_pt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
399
+ "SOCOPF/dual/ohm_qf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
400
+ "SOCOPF/dual/ohm_qt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
401
+ "SOCOPF/dual/jabr": hfd.Array2D(shape=(sizes.n_branch, 4), dtype=FLOAT_TYPE),
402
+ "SOCOPF/dual/sm_fr": hfd.Array2D(shape=(sizes.n_branch, 3), dtype=FLOAT_TYPE),
403
+ "SOCOPF/dual/sm_to": hfd.Array2D(shape=(sizes.n_branch, 3), dtype=FLOAT_TYPE),
404
+ "SOCOPF/dual/va_diff": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
405
+ "SOCOPF/dual/wr": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
406
+ "SOCOPF/dual/wi": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
407
+ "SOCOPF/dual/pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
408
+ "SOCOPF/dual/pt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
409
+ "SOCOPF/dual/qf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
410
+ "SOCOPF/dual/qt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
411
+ }
412
+
413
+ # ┌─────��─────────┐
414
+ # │ Utilities │
415
+ # └───────────────┘
416
+
417
+ def open_maybe_gzip_cat(path: str | list):
418
+ if isinstance(path, list):
419
+ dest = Path(path[0]).parent.with_suffix(".h5")
420
+ if not dest.exists():
421
+ with open(dest, "wb") as dest_f:
422
+ for piece in path:
423
+ with open(piece, "rb") as piece_f:
424
+ shutil.copyfileobj(piece_f, dest_f)
425
+ shutil.rmtree(Path(piece).parent)
426
+ path = dest.as_posix()
427
+ return gzip.open(path, "rb") if path.endswith(".gz") else open(path, "rb")
README.md ADDED
@@ -0,0 +1,293 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: cc-by-sa-4.0
3
+ tags:
4
+ - energy
5
+ - optimization
6
+ - optimal_power_flow
7
+ - power_grid
8
+ pretty_name: PGLearn Optimal Power Flow (6470_rte, N-1)
9
+ task_categories:
10
+ - tabular-regression
11
+ dataset_info:
12
+ config_name: 6470_rte-nminus1
13
+ features:
14
+ - name: input/pd
15
+ sequence: float32
16
+ length: 3670
17
+ - name: input/qd
18
+ sequence: float32
19
+ length: 3670
20
+ - name: input/gen_status
21
+ sequence: bool
22
+ length: 761
23
+ - name: input/branch_status
24
+ sequence: bool
25
+ length: 9005
26
+ - name: input/seed
27
+ dtype: int64
28
+ - name: ACOPF/primal/vm
29
+ sequence: float32
30
+ length: 6470
31
+ - name: ACOPF/primal/va
32
+ sequence: float32
33
+ length: 6470
34
+ - name: ACOPF/primal/pg
35
+ sequence: float32
36
+ length: 761
37
+ - name: ACOPF/primal/qg
38
+ sequence: float32
39
+ length: 761
40
+ - name: ACOPF/primal/pf
41
+ sequence: float32
42
+ length: 9005
43
+ - name: ACOPF/primal/pt
44
+ sequence: float32
45
+ length: 9005
46
+ - name: ACOPF/primal/qf
47
+ sequence: float32
48
+ length: 9005
49
+ - name: ACOPF/primal/qt
50
+ sequence: float32
51
+ length: 9005
52
+ - name: ACOPF/dual/kcl_p
53
+ sequence: float32
54
+ length: 6470
55
+ - name: ACOPF/dual/kcl_q
56
+ sequence: float32
57
+ length: 6470
58
+ - name: ACOPF/dual/vm
59
+ sequence: float32
60
+ length: 6470
61
+ - name: ACOPF/dual/pg
62
+ sequence: float32
63
+ length: 761
64
+ - name: ACOPF/dual/qg
65
+ sequence: float32
66
+ length: 761
67
+ - name: ACOPF/dual/ohm_pf
68
+ sequence: float32
69
+ length: 9005
70
+ - name: ACOPF/dual/ohm_pt
71
+ sequence: float32
72
+ length: 9005
73
+ - name: ACOPF/dual/ohm_qf
74
+ sequence: float32
75
+ length: 9005
76
+ - name: ACOPF/dual/ohm_qt
77
+ sequence: float32
78
+ length: 9005
79
+ - name: ACOPF/dual/pf
80
+ sequence: float32
81
+ length: 9005
82
+ - name: ACOPF/dual/pt
83
+ sequence: float32
84
+ length: 9005
85
+ - name: ACOPF/dual/qf
86
+ sequence: float32
87
+ length: 9005
88
+ - name: ACOPF/dual/qt
89
+ sequence: float32
90
+ length: 9005
91
+ - name: ACOPF/dual/va_diff
92
+ sequence: float32
93
+ length: 9005
94
+ - name: ACOPF/dual/sm_fr
95
+ sequence: float32
96
+ length: 9005
97
+ - name: ACOPF/dual/sm_to
98
+ sequence: float32
99
+ length: 9005
100
+ - name: ACOPF/dual/slack_bus
101
+ dtype: float32
102
+ - name: ACOPF/meta/seed
103
+ dtype: int64
104
+ - name: ACOPF/meta/formulation
105
+ dtype: string
106
+ - name: ACOPF/meta/primal_objective_value
107
+ dtype: float32
108
+ - name: ACOPF/meta/dual_objective_value
109
+ dtype: float32
110
+ - name: ACOPF/meta/primal_status
111
+ dtype: string
112
+ - name: ACOPF/meta/dual_status
113
+ dtype: string
114
+ - name: ACOPF/meta/termination_status
115
+ dtype: string
116
+ - name: ACOPF/meta/build_time
117
+ dtype: float32
118
+ - name: ACOPF/meta/extract_time
119
+ dtype: float32
120
+ - name: ACOPF/meta/solve_time
121
+ dtype: float32
122
+ - name: DCOPF/primal/va
123
+ sequence: float32
124
+ length: 6470
125
+ - name: DCOPF/primal/pg
126
+ sequence: float32
127
+ length: 761
128
+ - name: DCOPF/primal/pf
129
+ sequence: float32
130
+ length: 9005
131
+ - name: DCOPF/dual/kcl_p
132
+ sequence: float32
133
+ length: 6470
134
+ - name: DCOPF/dual/pg
135
+ sequence: float32
136
+ length: 761
137
+ - name: DCOPF/dual/ohm_pf
138
+ sequence: float32
139
+ length: 9005
140
+ - name: DCOPF/dual/pf
141
+ sequence: float32
142
+ length: 9005
143
+ - name: DCOPF/dual/va_diff
144
+ sequence: float32
145
+ length: 9005
146
+ - name: DCOPF/dual/slack_bus
147
+ dtype: float32
148
+ - name: DCOPF/meta/seed
149
+ dtype: int64
150
+ - name: DCOPF/meta/formulation
151
+ dtype: string
152
+ - name: DCOPF/meta/primal_objective_value
153
+ dtype: float32
154
+ - name: DCOPF/meta/dual_objective_value
155
+ dtype: float32
156
+ - name: DCOPF/meta/primal_status
157
+ dtype: string
158
+ - name: DCOPF/meta/dual_status
159
+ dtype: string
160
+ - name: DCOPF/meta/termination_status
161
+ dtype: string
162
+ - name: DCOPF/meta/build_time
163
+ dtype: float32
164
+ - name: DCOPF/meta/extract_time
165
+ dtype: float32
166
+ - name: DCOPF/meta/solve_time
167
+ dtype: float32
168
+ - name: SOCOPF/primal/w
169
+ sequence: float32
170
+ length: 6470
171
+ - name: SOCOPF/primal/pg
172
+ sequence: float32
173
+ length: 761
174
+ - name: SOCOPF/primal/qg
175
+ sequence: float32
176
+ length: 761
177
+ - name: SOCOPF/primal/pf
178
+ sequence: float32
179
+ length: 9005
180
+ - name: SOCOPF/primal/pt
181
+ sequence: float32
182
+ length: 9005
183
+ - name: SOCOPF/primal/qf
184
+ sequence: float32
185
+ length: 9005
186
+ - name: SOCOPF/primal/qt
187
+ sequence: float32
188
+ length: 9005
189
+ - name: SOCOPF/primal/wr
190
+ sequence: float32
191
+ length: 9005
192
+ - name: SOCOPF/primal/wi
193
+ sequence: float32
194
+ length: 9005
195
+ - name: SOCOPF/dual/kcl_p
196
+ sequence: float32
197
+ length: 6470
198
+ - name: SOCOPF/dual/kcl_q
199
+ sequence: float32
200
+ length: 6470
201
+ - name: SOCOPF/dual/w
202
+ sequence: float32
203
+ length: 6470
204
+ - name: SOCOPF/dual/pg
205
+ sequence: float32
206
+ length: 761
207
+ - name: SOCOPF/dual/qg
208
+ sequence: float32
209
+ length: 761
210
+ - name: SOCOPF/dual/ohm_pf
211
+ sequence: float32
212
+ length: 9005
213
+ - name: SOCOPF/dual/ohm_pt
214
+ sequence: float32
215
+ length: 9005
216
+ - name: SOCOPF/dual/ohm_qf
217
+ sequence: float32
218
+ length: 9005
219
+ - name: SOCOPF/dual/ohm_qt
220
+ sequence: float32
221
+ length: 9005
222
+ - name: SOCOPF/dual/jabr
223
+ dtype:
224
+ array2_d:
225
+ shape:
226
+ - 9005
227
+ - 4
228
+ dtype: float32
229
+ - name: SOCOPF/dual/sm_fr
230
+ dtype:
231
+ array2_d:
232
+ shape:
233
+ - 9005
234
+ - 3
235
+ dtype: float32
236
+ - name: SOCOPF/dual/sm_to
237
+ dtype:
238
+ array2_d:
239
+ shape:
240
+ - 9005
241
+ - 3
242
+ dtype: float32
243
+ - name: SOCOPF/dual/va_diff
244
+ sequence: float32
245
+ length: 9005
246
+ - name: SOCOPF/dual/wr
247
+ sequence: float32
248
+ length: 9005
249
+ - name: SOCOPF/dual/wi
250
+ sequence: float32
251
+ length: 9005
252
+ - name: SOCOPF/dual/pf
253
+ sequence: float32
254
+ length: 9005
255
+ - name: SOCOPF/dual/pt
256
+ sequence: float32
257
+ length: 9005
258
+ - name: SOCOPF/dual/qf
259
+ sequence: float32
260
+ length: 9005
261
+ - name: SOCOPF/dual/qt
262
+ sequence: float32
263
+ length: 9005
264
+ - name: SOCOPF/meta/seed
265
+ dtype: int64
266
+ - name: SOCOPF/meta/formulation
267
+ dtype: string
268
+ - name: SOCOPF/meta/primal_objective_value
269
+ dtype: float32
270
+ - name: SOCOPF/meta/dual_objective_value
271
+ dtype: float32
272
+ - name: SOCOPF/meta/primal_status
273
+ dtype: string
274
+ - name: SOCOPF/meta/dual_status
275
+ dtype: string
276
+ - name: SOCOPF/meta/termination_status
277
+ dtype: string
278
+ - name: SOCOPF/meta/build_time
279
+ dtype: float32
280
+ - name: SOCOPF/meta/extract_time
281
+ dtype: float32
282
+ - name: SOCOPF/meta/solve_time
283
+ dtype: float32
284
+ splits:
285
+ - name: train
286
+ num_bytes: 146920054186
287
+ num_examples: 73351
288
+ - name: test
289
+ num_bytes: 36730514291
290
+ num_examples: 18338
291
+ download_size: 151027913425
292
+ dataset_size: 183650568477
293
+ ---
case.json.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f43d0d0af92a30858a123d9ddbbaa2e2fb5060fb044183a637933b4e1bd00f9b
3
+ size 5183573
config.toml ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ export_dir = "/storage/home/hcoda1/3/mklamkin3/qq/proj/OPFGenerator/data/pglearn/6470_rte-n1"
2
+ # Name of the reference PGLib case. Must be a valid PGLib case name.
3
+ pglib_case = "pglib_opf_case6470_rte"
4
+ floating_point_type = "Float32"
5
+
6
+ [sampler]
7
+ # data sampler options
8
+ [sampler.load]
9
+ noise_type = "ScaledUniform"
10
+ l = 0.6 # Lower bound of base load factor
11
+ u = 1.0 # Upper bound of base load factor
12
+ sigma = 0.20 # Relative (multiplicative) noise level.
13
+
14
+ [sampler.status]
15
+ type = "Nminus1"
16
+
17
+ [OPF]
18
+
19
+ [OPF.ACOPF]
20
+ type = "ACOPF"
21
+ solver.name = "Ipopt"
22
+ solver.attributes.tol = 1e-6
23
+ solver.attributes.linear_solver = "ma27"
24
+
25
+ [OPF.DCOPF]
26
+ # Formulation/solver options
27
+ type = "DCOPF"
28
+ solver.name = "HiGHS"
29
+
30
+ [OPF.SOCOPF]
31
+ type = "SOCOPF"
32
+ solver.name = "Clarabel"
33
+ # Tight tolerances
34
+ solver.attributes.tol_gap_abs = 1e-6
35
+ solver.attributes.tol_gap_rel = 1e-6
36
+ solver.attributes.tol_feas = 1e-6
37
+ solver.attributes.tol_infeas_rel = 1e-6
38
+ solver.attributes.tol_ktratio = 1e-6
39
+ # Reduced accuracy settings
40
+ solver.attributes.reduced_tol_gap_abs = 1e-6
41
+ solver.attributes.reduced_tol_gap_rel = 1e-6
42
+ solver.attributes.reduced_tol_feas = 1e-6
43
+ solver.attributes.reduced_tol_infeas_abs = 1e-6
44
+ solver.attributes.reduced_tol_infeas_rel = 1e-6
45
+ solver.attributes.reduced_tol_ktratio = 1e-6
46
+
47
+ [slurm]
48
+ n_samples = 100000
49
+ n_jobs = 44
50
+ minibatch_size = 16
51
+ cpus_per_task = 8
52
+ queue = "embers"
53
+ charge_account = "gts-phentenryck3-ai4opt"
54
+ sysimage_memory = "128G"
55
+ sampler_memory = "8G"
56
+ extract_memory = "500G"
infeasible/ACOPF/dual.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:14c28a2f1b0920c5d39d4028c5cd6796cd492d8040fd5ae47ae67d9a218a0592
3
+ size 3577970109
infeasible/ACOPF/meta.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f47e96394f3bac4bf7d5ad8a80b3277d0f5adaeb83add364717890bed7f85765
3
+ size 311884
infeasible/ACOPF/primal.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:243035d6edac499e89870ee9ce48e077a76be6b9e95a2d4733f1fa8235563b28
3
+ size 1510053893
infeasible/DCOPF/dual.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e25adc84583d72c66b8ba71fb104ba760dab3966f1a6343266a492415fd22e4f
3
+ size 187749566
infeasible/DCOPF/meta.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a303a2b404adcddeb2368574d7b7c134ae89e38c5362f9e47fe606ef55dcf5d8
3
+ size 298019
infeasible/DCOPF/primal.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:43a269e6415f42922ab48409af7f0c942f7ef5e215bb0b8e0a95fb76d8fad8f6
3
+ size 242296626
infeasible/SOCOPF/dual.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:122b51c5fb4b6b38f3688ec82d84231fb088e25c2a32a4e8370fa6d62923c93a
3
+ size 6202612499
infeasible/SOCOPF/meta.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:13828cf512bb047714cba3eed7160a92468b21fdf4603fc8e8b38539f8f3df9e
3
+ size 296459
infeasible/SOCOPF/primal.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:62a23019cd2bf71da40c238450c6b5be5b63ff7c9a4d9c4ebcb50b1f03cbccae
3
+ size 1619455524
infeasible/input.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:80232395dd3c22e27f9c687421f153564f2eec3bce00e345c0c29c7a1362d228
3
+ size 220241221
test/ACOPF/dual.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b2e53bec3448c9e9abf50f5272fbee45a7511217694d37ff870d7a8f347e8798
3
+ size 7250063538
test/ACOPF/meta.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:187094a9308e9b5f7f5b8792cf289f406eabd8590d667422f253ec43ccd7cb3a
3
+ size 629588
test/ACOPF/primal.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eb5e28f9c9f7e980b341d0ea05838bb9ebb5fce6ed8a8ea595482a0959999958
3
+ size 3293762695
test/DCOPF/dual.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a489755f9cc4190c3e687c5f1aa874a7cabc3c8e8c96ffe7c05aa6b459dd3837
3
+ size 753235996
test/DCOPF/meta.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:458be2d504c69c922a18826c5ba02f5f0fec71c7ab33c1c866b0cdf1de8c9a30
3
+ size 627183
test/DCOPF/primal.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a67cf9374913f2a919fd1133b752615e908e027603e751806d805bd32708dd0e
3
+ size 978151537
test/SOCOPF/dual.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d9df8b81f808e75838942531519bd506743cc6e7dd295a3ae67c279ceab90005
3
+ size 13390542132
test/SOCOPF/meta.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e9aa200fe270a5303e2251a928c97771ffc72043e6858252e7040f03d8121fb4
3
+ size 628600
test/SOCOPF/primal.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3eab4022af399689398dd7735d90295e7a3fe9dc6681b05c3b19434125dd0b39
3
+ size 4051988628
test/input.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:60e11feb15311cd80a5a39251aab5f2a32ea0db302279a3bd3e36048b92a983f
3
+ size 485991014
train/ACOPF/dual.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ea9074ed41e71a0906fe1a1e1e52e20c16979f335b758810041cc67e0f55c0a5
3
+ size 29000071052
train/ACOPF/meta.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d84152b04c3103155b370cb6f7956981a56c40a1e9ea63461c87980658bd306e
3
+ size 2488022
train/ACOPF/primal.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1d6c5c7a8d060477ed0c07f8309a04b8088c894e512d91b7c978956d9082cc97
3
+ size 13174657776
train/DCOPF/dual.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fcdbba6d5e0119d8ca377d42a2fb3a2bc30c791fac2b0812acf408aa51499cbb
3
+ size 3013205637
train/DCOPF/meta.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:baf3539717b05044d48d1c21c3178244c91e2f06f83b19d67c70be7083602e16
3
+ size 2475003
train/DCOPF/primal.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7fa0a3ec4c46c5123e4829c417517c6903a9b3f5c47eb3af802a63c3a0630f7c
3
+ size 3912598725
train/SOCOPF/dual.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a90ba8c5252c9fbc94b6a07e6ed3477109f99aa7a21ad3549efeade122fce07e
3
+ size 53562588218
train/SOCOPF/meta.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4a2f32e751e997a3859733f5fe4b8328f0c1de9cab9460e5206ac70a9ae885d2
3
+ size 2480553
train/SOCOPF/primal.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f5f9645fc52bf7c942cbdeb3b1992a6eadd978d9c59a29c827d9ee59039f7379
3
+ size 16207809724
train/input.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:27bcc671977c901c14f8f2f9b0f175b37eafd47ed3b35278dcdacabf87c6b1c7
3
+ size 1943917804