klamike commited on
Commit
943f0d1
·
verified ·
1 Parent(s): 0ccc324

Add files using upload-large-folder tool

Browse files
PGLearn-Small-30_ieee-nminus1.py ADDED
@@ -0,0 +1,427 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+ from dataclasses import dataclass
3
+ from pathlib import Path
4
+ import json
5
+ import shutil
6
+
7
+ import datasets as hfd
8
+ import h5py
9
+ import pgzip as gzip
10
+ import pyarrow as pa
11
+
12
+ # ┌──────────────┐
13
+ # │ Metadata │
14
+ # └──────────────┘
15
+
16
+ @dataclass
17
+ class CaseSizes:
18
+ n_bus: int
19
+ n_load: int
20
+ n_gen: int
21
+ n_branch: int
22
+
23
+ CASENAME = "30_ieee-nminus1"
24
+ SIZES = CaseSizes(n_bus=30, n_load=21, n_gen=6, n_branch=41)
25
+ NUM_TRAIN = 321126
26
+ NUM_TEST = 80282
27
+ NUM_INFEASIBLE = 98592
28
+ SPLITFILES = {}
29
+
30
+ URL = "https://huggingface.co/datasets/PGLearn/PGLearn-Small-30_ieee-nminus1"
31
+ DESCRIPTION = """\
32
+ The 30_ieee-nminus1 PGLearn optimal power flow dataset, part of the PGLearn-Small collection. \
33
+ """
34
+ VERSION = hfd.Version("1.0.0")
35
+ DEFAULT_CONFIG_DESCRIPTION="""\
36
+ This configuration contains feasible input, primal solution, and dual solution data \
37
+ for the ACOPF and DCOPF formulations on the {case} system. For case data, \
38
+ download the case.json.gz file from the `script` branch of the repository. \
39
+ https://huggingface.co/datasets/PGLearn/PGLearn-Small-30_ieee-nminus1/blob/script/case.json.gz
40
+ """
41
+ USE_ML4OPF_WARNING = """
42
+ ================================================================================================
43
+ Loading PGLearn-Small-30_ieee-nminus1 through the `datasets.load_dataset` function may be slow.
44
+
45
+ Consider using ML4OPF to directly convert to `torch.Tensor`; for more info see:
46
+ https://github.com/AI4OPT/ML4OPF?tab=readme-ov-file#manually-loading-data
47
+
48
+ Or, use `huggingface_hub.snapshot_download` and an HDF5 reader; for more info see:
49
+ https://huggingface.co/datasets/PGLearn/PGLearn-Small-30_ieee-nminus1#downloading-individual-files
50
+ ================================================================================================
51
+ """
52
+ CITATION = """\
53
+ @article{klamkinpglearn,
54
+ title={{PGLearn - An Open-Source Learning Toolkit for Optimal Power Flow}},
55
+ author={Klamkin, Michael and Tanneau, Mathieu and Van Hentenryck, Pascal},
56
+ year={2025},
57
+ }\
58
+ """
59
+
60
+ IS_COMPRESSED = True
61
+
62
+ # ┌──────────────────┐
63
+ # │ Formulations │
64
+ # └──────────────────┘
65
+
66
+ def acopf_features(sizes: CaseSizes, primal: bool, dual: bool, meta: bool):
67
+ features = {}
68
+ if primal: features.update(acopf_primal_features(sizes))
69
+ if dual: features.update(acopf_dual_features(sizes))
70
+ if meta: features.update({f"ACOPF/{k}": v for k, v in META_FEATURES.items()})
71
+ return features
72
+
73
+ def dcopf_features(sizes: CaseSizes, primal: bool, dual: bool, meta: bool):
74
+ features = {}
75
+ if primal: features.update(dcopf_primal_features(sizes))
76
+ if dual: features.update(dcopf_dual_features(sizes))
77
+ if meta: features.update({f"DCOPF/{k}": v for k, v in META_FEATURES.items()})
78
+ return features
79
+
80
+ def socopf_features(sizes: CaseSizes, primal: bool, dual: bool, meta: bool):
81
+ features = {}
82
+ if primal: features.update(socopf_primal_features(sizes))
83
+ if dual: features.update(socopf_dual_features(sizes))
84
+ if meta: features.update({f"SOCOPF/{k}": v for k, v in META_FEATURES.items()})
85
+ return features
86
+
87
+ FORMULATIONS_TO_FEATURES = {
88
+ "ACOPF": acopf_features,
89
+ "DCOPF": dcopf_features,
90
+ "SOCOPF": socopf_features,
91
+ }
92
+
93
+ # ┌───────────────────┐
94
+ # │ BuilderConfig │
95
+ # └───────────────────┘
96
+
97
+ class PGLearnSmall30_ieeeNminus1Config(hfd.BuilderConfig):
98
+ """BuilderConfig for PGLearn-Small-30_ieee-nminus1.
99
+ By default, primal solution data, metadata, input, casejson, are included for the train and test splits.
100
+
101
+ To modify the default configuration, pass attributes of this class to `datasets.load_dataset`:
102
+
103
+ Attributes:
104
+ formulations (list[str]): The formulation(s) to include, e.g. ["ACOPF", "DCOPF"]
105
+ primal (bool, optional): Include primal solution data. Defaults to True.
106
+ dual (bool, optional): Include dual solution data. Defaults to False.
107
+ meta (bool, optional): Include metadata. Defaults to True.
108
+ input (bool, optional): Include input data. Defaults to True.
109
+ casejson (bool, optional): Include case.json data. Defaults to True.
110
+ train (bool, optional): Include training samples. Defaults to True.
111
+ test (bool, optional): Include testing samples. Defaults to True.
112
+ infeasible (bool, optional): Include infeasible samples. Defaults to False.
113
+ """
114
+ def __init__(self,
115
+ formulations: list[str],
116
+ primal: bool=True, dual: bool=False, meta: bool=True, input: bool = True, casejson: bool=True,
117
+ train: bool=True, test: bool=True, infeasible: bool=False,
118
+ compressed: bool=IS_COMPRESSED, **kwargs
119
+ ):
120
+ super(PGLearnSmall30_ieeeNminus1Config, self).__init__(version=VERSION, **kwargs)
121
+
122
+ self.case = CASENAME
123
+ self.formulations = formulations
124
+
125
+ self.primal = primal
126
+ self.dual = dual
127
+ self.meta = meta
128
+ self.input = input
129
+ self.casejson = casejson
130
+
131
+ self.train = train
132
+ self.test = test
133
+ self.infeasible = infeasible
134
+
135
+ self.gz_ext = ".gz" if compressed else ""
136
+
137
+ @property
138
+ def size(self):
139
+ return SIZES
140
+
141
+ @property
142
+ def features(self):
143
+ features = {}
144
+ if self.casejson: features.update(case_features())
145
+ if self.input: features.update(input_features(SIZES))
146
+ for formulation in self.formulations:
147
+ features.update(FORMULATIONS_TO_FEATURES[formulation](SIZES, self.primal, self.dual, self.meta))
148
+ return hfd.Features(features)
149
+
150
+ @property
151
+ def splits(self):
152
+ splits: dict[hfd.Split, dict[str, str | int]] = {}
153
+ if self.train:
154
+ splits[hfd.Split.TRAIN] = {
155
+ "name": "train",
156
+ "num_examples": NUM_TRAIN
157
+ }
158
+ if self.test:
159
+ splits[hfd.Split.TEST] = {
160
+ "name": "test",
161
+ "num_examples": NUM_TEST
162
+ }
163
+ if self.infeasible:
164
+ splits[hfd.Split("infeasible")] = {
165
+ "name": "infeasible",
166
+ "num_examples": NUM_INFEASIBLE
167
+ }
168
+ return splits
169
+
170
+ @property
171
+ def urls(self):
172
+ urls: dict[str, None | str | list] = {
173
+ "case": None, "train": [], "test": [], "infeasible": [],
174
+ }
175
+
176
+ if self.casejson:
177
+ urls["case"] = f"case.json" + self.gz_ext
178
+ else:
179
+ urls.pop("case")
180
+
181
+ split_names = []
182
+ if self.train: split_names.append("train")
183
+ if self.test: split_names.append("test")
184
+ if self.infeasible: split_names.append("infeasible")
185
+
186
+ for split in split_names:
187
+ if self.input: urls[split].append(f"{split}/input.h5" + self.gz_ext)
188
+ for formulation in self.formulations:
189
+ if self.primal:
190
+ filename = f"{split}/{formulation}/primal.h5" + self.gz_ext
191
+ if filename in SPLITFILES: urls[split].append(SPLITFILES[filename])
192
+ else: urls[split].append(filename)
193
+ if self.dual:
194
+ filename = f"{split}/{formulation}/dual.h5" + self.gz_ext
195
+ if filename in SPLITFILES: urls[split].append(SPLITFILES[filename])
196
+ else: urls[split].append(filename)
197
+ if self.meta:
198
+ filename = f"{split}/{formulation}/meta.h5" + self.gz_ext
199
+ if filename in SPLITFILES: urls[split].append(SPLITFILES[filename])
200
+ else: urls[split].append(filename)
201
+ return urls
202
+
203
+ # ┌────────────────────┐
204
+ # │ DatasetBuilder │
205
+ # └────────────────────┘
206
+
207
+ class PGLearnSmall30_ieeeNminus1(hfd.ArrowBasedBuilder):
208
+ """DatasetBuilder for PGLearn-Small-30_ieee-nminus1.
209
+ The main interface is `datasets.load_dataset` with `trust_remote_code=True`, e.g.
210
+
211
+ ```python
212
+ from datasets import load_dataset
213
+ ds = load_dataset("PGLearn/PGLearn-Small-30_ieee-nminus1", trust_remote_code=True,
214
+ # modify the default configuration by passing kwargs
215
+ formulations=["DCOPF"],
216
+ dual=False,
217
+ meta=False,
218
+ )
219
+ ```
220
+ """
221
+
222
+ DEFAULT_WRITER_BATCH_SIZE = 10000
223
+ BUILDER_CONFIG_CLASS = PGLearnSmall30_ieeeNminus1Config
224
+ DEFAULT_CONFIG_NAME=CASENAME
225
+ BUILDER_CONFIGS = [
226
+ PGLearnSmall30_ieeeNminus1Config(
227
+ name=CASENAME, description=DEFAULT_CONFIG_DESCRIPTION.format(case=CASENAME),
228
+ formulations=list(FORMULATIONS_TO_FEATURES.keys()),
229
+ primal=True, dual=True, meta=True, input=True, casejson=False,
230
+ train=True, test=True, infeasible=False,
231
+ )
232
+ ]
233
+
234
+ def _info(self):
235
+ return hfd.DatasetInfo(
236
+ features=self.config.features, splits=self.config.splits,
237
+ description=DESCRIPTION + self.config.description,
238
+ homepage=URL, citation=CITATION,
239
+ )
240
+
241
+ def _split_generators(self, dl_manager: hfd.DownloadManager):
242
+ hfd.logging.get_logger().warning(USE_ML4OPF_WARNING)
243
+
244
+ filepaths = dl_manager.download_and_extract(self.config.urls)
245
+
246
+ splits: list[hfd.SplitGenerator] = []
247
+ if self.config.train:
248
+ splits.append(hfd.SplitGenerator(
249
+ name=hfd.Split.TRAIN,
250
+ gen_kwargs=dict(case_file=filepaths.get("case", None), data_files=tuple(filepaths["train"]), n_samples=NUM_TRAIN),
251
+ ))
252
+ if self.config.test:
253
+ splits.append(hfd.SplitGenerator(
254
+ name=hfd.Split.TEST,
255
+ gen_kwargs=dict(case_file=filepaths.get("case", None), data_files=tuple(filepaths["test"]), n_samples=NUM_TEST),
256
+ ))
257
+ if self.config.infeasible:
258
+ splits.append(hfd.SplitGenerator(
259
+ name=hfd.Split("infeasible"),
260
+ gen_kwargs=dict(case_file=filepaths.get("case", None), data_files=tuple(filepaths["infeasible"]), n_samples=NUM_INFEASIBLE),
261
+ ))
262
+ return splits
263
+
264
+ def _generate_tables(self, case_file: str | None, data_files: tuple[hfd.utils.track.tracked_str | list[hfd.utils.track.tracked_str]], n_samples: int):
265
+ case_data: str | None = json.dumps(json.load(open_maybe_gzip_cat(case_file))) if case_file is not None else None
266
+ data: dict[str, h5py.File] = {}
267
+ for file in data_files:
268
+ v = h5py.File(open_maybe_gzip_cat(file), "r")
269
+ if isinstance(file, list):
270
+ k = "/".join(Path(file[0].get_origin()).parts[-3:-1]).split(".")[0]
271
+ else:
272
+ k = "/".join(Path(file.get_origin()).parts[-2:]).split(".")[0]
273
+ data[k] = v
274
+ for k in list(data.keys()):
275
+ if "/input" in k: data[k.split("/", 1)[1]] = data.pop(k)
276
+
277
+ batch_size = self._writer_batch_size or self.DEFAULT_WRITER_BATCH_SIZE
278
+ for i in range(0, n_samples, batch_size):
279
+ effective_batch_size = min(batch_size, n_samples - i)
280
+
281
+ sample_data = {
282
+ f"{dk}/{k}":
283
+ hfd.features.features.numpy_to_pyarrow_listarray(v[i:i + effective_batch_size, ...])
284
+ for dk, d in data.items() for k, v in d.items() if f"{dk}/{k}" in self.config.features
285
+ }
286
+
287
+ if case_data is not None:
288
+ sample_data["case/json"] = pa.array([case_data] * effective_batch_size)
289
+
290
+ yield i, pa.Table.from_pydict(sample_data)
291
+
292
+ for f in data.values():
293
+ f.close()
294
+
295
+ # ┌──────────────┐
296
+ # │ Features │
297
+ # └──────────────┘
298
+
299
+ FLOAT_TYPE = "float32"
300
+ INT_TYPE = "int64"
301
+ BOOL_TYPE = "bool"
302
+ STRING_TYPE = "string"
303
+
304
+ def case_features():
305
+ # FIXME: better way to share schema of case data -- need to treat jagged arrays
306
+ return {
307
+ "case/json": hfd.Value(STRING_TYPE),
308
+ }
309
+
310
+ META_FEATURES = {
311
+ "meta/seed": hfd.Value(dtype=INT_TYPE),
312
+ "meta/formulation": hfd.Value(dtype=STRING_TYPE),
313
+ "meta/primal_objective_value": hfd.Value(dtype=FLOAT_TYPE),
314
+ "meta/dual_objective_value": hfd.Value(dtype=FLOAT_TYPE),
315
+ "meta/primal_status": hfd.Value(dtype=STRING_TYPE),
316
+ "meta/dual_status": hfd.Value(dtype=STRING_TYPE),
317
+ "meta/termination_status": hfd.Value(dtype=STRING_TYPE),
318
+ "meta/build_time": hfd.Value(dtype=FLOAT_TYPE),
319
+ "meta/extract_time": hfd.Value(dtype=FLOAT_TYPE),
320
+ "meta/solve_time": hfd.Value(dtype=FLOAT_TYPE),
321
+ }
322
+
323
+ def input_features(sizes: CaseSizes):
324
+ return {
325
+ "input/pd": hfd.Sequence(length=sizes.n_load, feature=hfd.Value(dtype=FLOAT_TYPE)),
326
+ "input/qd": hfd.Sequence(length=sizes.n_load, feature=hfd.Value(dtype=FLOAT_TYPE)),
327
+ "input/gen_status": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=BOOL_TYPE)),
328
+ "input/branch_status": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=BOOL_TYPE)),
329
+ "input/seed": hfd.Value(dtype=INT_TYPE),
330
+ }
331
+
332
+ def acopf_primal_features(sizes: CaseSizes):
333
+ return {
334
+ "ACOPF/primal/vm": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)),
335
+ "ACOPF/primal/va": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)),
336
+ "ACOPF/primal/pg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)),
337
+ "ACOPF/primal/qg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)),
338
+ "ACOPF/primal/pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
339
+ "ACOPF/primal/pt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
340
+ "ACOPF/primal/qf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
341
+ "ACOPF/primal/qt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
342
+ }
343
+ def acopf_dual_features(sizes: CaseSizes):
344
+ return {
345
+ "ACOPF/dual/kcl_p": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)),
346
+ "ACOPF/dual/kcl_q": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)),
347
+ "ACOPF/dual/vm": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)),
348
+ "ACOPF/dual/pg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)),
349
+ "ACOPF/dual/qg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)),
350
+ "ACOPF/dual/ohm_pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
351
+ "ACOPF/dual/ohm_pt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
352
+ "ACOPF/dual/ohm_qf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
353
+ "ACOPF/dual/ohm_qt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
354
+ "ACOPF/dual/pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
355
+ "ACOPF/dual/pt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
356
+ "ACOPF/dual/qf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
357
+ "ACOPF/dual/qt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
358
+ "ACOPF/dual/va_diff": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
359
+ "ACOPF/dual/sm_fr": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
360
+ "ACOPF/dual/sm_to": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
361
+ "ACOPF/dual/slack_bus": hfd.Value(dtype=FLOAT_TYPE),
362
+ }
363
+ def dcopf_primal_features(sizes: CaseSizes):
364
+ return {
365
+ "DCOPF/primal/va": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)),
366
+ "DCOPF/primal/pg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)),
367
+ "DCOPF/primal/pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
368
+ }
369
+ def dcopf_dual_features(sizes: CaseSizes):
370
+ return {
371
+ "DCOPF/dual/kcl_p": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)),
372
+ "DCOPF/dual/pg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)),
373
+ "DCOPF/dual/ohm_pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
374
+ "DCOPF/dual/pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
375
+ "DCOPF/dual/va_diff": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
376
+ "DCOPF/dual/slack_bus": hfd.Value(dtype=FLOAT_TYPE),
377
+ }
378
+ def socopf_primal_features(sizes: CaseSizes):
379
+ return {
380
+ "SOCOPF/primal/w": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)),
381
+ "SOCOPF/primal/pg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)),
382
+ "SOCOPF/primal/qg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)),
383
+ "SOCOPF/primal/pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
384
+ "SOCOPF/primal/pt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
385
+ "SOCOPF/primal/qf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
386
+ "SOCOPF/primal/qt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
387
+ "SOCOPF/primal/wr": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
388
+ "SOCOPF/primal/wi": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
389
+ }
390
+ def socopf_dual_features(sizes: CaseSizes):
391
+ return {
392
+ "SOCOPF/dual/kcl_p": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)),
393
+ "SOCOPF/dual/kcl_q": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)),
394
+ "SOCOPF/dual/w": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)),
395
+ "SOCOPF/dual/pg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)),
396
+ "SOCOPF/dual/qg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)),
397
+ "SOCOPF/dual/ohm_pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
398
+ "SOCOPF/dual/ohm_pt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
399
+ "SOCOPF/dual/ohm_qf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
400
+ "SOCOPF/dual/ohm_qt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
401
+ "SOCOPF/dual/jabr": hfd.Array2D(shape=(sizes.n_branch, 4), dtype=FLOAT_TYPE),
402
+ "SOCOPF/dual/sm_fr": hfd.Array2D(shape=(sizes.n_branch, 3), dtype=FLOAT_TYPE),
403
+ "SOCOPF/dual/sm_to": hfd.Array2D(shape=(sizes.n_branch, 3), dtype=FLOAT_TYPE),
404
+ "SOCOPF/dual/va_diff": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
405
+ "SOCOPF/dual/wr": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
406
+ "SOCOPF/dual/wi": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
407
+ "SOCOPF/dual/pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
408
+ "SOCOPF/dual/pt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
409
+ "SOCOPF/dual/qf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
410
+ "SOCOPF/dual/qt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
411
+ }
412
+
413
+ # ┌────────────���──┐
414
+ # │ Utilities │
415
+ # └───────────────┘
416
+
417
+ def open_maybe_gzip_cat(path: str | list):
418
+ if isinstance(path, list):
419
+ dest = Path(path[0]).parent.with_suffix(".h5")
420
+ if not dest.exists():
421
+ with open(dest, "wb") as dest_f:
422
+ for piece in path:
423
+ with open(piece, "rb") as piece_f:
424
+ shutil.copyfileobj(piece_f, dest_f)
425
+ shutil.rmtree(Path(piece).parent)
426
+ path = dest.as_posix()
427
+ return gzip.open(path, "rb") if path.endswith(".gz") else open(path, "rb")
README.md ADDED
@@ -0,0 +1,293 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: cc-by-sa-4.0
3
+ tags:
4
+ - energy
5
+ - optimization
6
+ - optimal_power_flow
7
+ - power_grid
8
+ pretty_name: PGLearn Optimal Power Flow (30_ieee, N-1)
9
+ task_categories:
10
+ - tabular-regression
11
+ dataset_info:
12
+ config_name: 30_ieee-nminus1
13
+ features:
14
+ - name: input/pd
15
+ sequence: float32
16
+ length: 21
17
+ - name: input/qd
18
+ sequence: float32
19
+ length: 21
20
+ - name: input/gen_status
21
+ sequence: bool
22
+ length: 6
23
+ - name: input/branch_status
24
+ sequence: bool
25
+ length: 41
26
+ - name: input/seed
27
+ dtype: int64
28
+ - name: ACOPF/primal/vm
29
+ sequence: float32
30
+ length: 30
31
+ - name: ACOPF/primal/va
32
+ sequence: float32
33
+ length: 30
34
+ - name: ACOPF/primal/pg
35
+ sequence: float32
36
+ length: 6
37
+ - name: ACOPF/primal/qg
38
+ sequence: float32
39
+ length: 6
40
+ - name: ACOPF/primal/pf
41
+ sequence: float32
42
+ length: 41
43
+ - name: ACOPF/primal/pt
44
+ sequence: float32
45
+ length: 41
46
+ - name: ACOPF/primal/qf
47
+ sequence: float32
48
+ length: 41
49
+ - name: ACOPF/primal/qt
50
+ sequence: float32
51
+ length: 41
52
+ - name: ACOPF/dual/kcl_p
53
+ sequence: float32
54
+ length: 30
55
+ - name: ACOPF/dual/kcl_q
56
+ sequence: float32
57
+ length: 30
58
+ - name: ACOPF/dual/vm
59
+ sequence: float32
60
+ length: 30
61
+ - name: ACOPF/dual/pg
62
+ sequence: float32
63
+ length: 6
64
+ - name: ACOPF/dual/qg
65
+ sequence: float32
66
+ length: 6
67
+ - name: ACOPF/dual/ohm_pf
68
+ sequence: float32
69
+ length: 41
70
+ - name: ACOPF/dual/ohm_pt
71
+ sequence: float32
72
+ length: 41
73
+ - name: ACOPF/dual/ohm_qf
74
+ sequence: float32
75
+ length: 41
76
+ - name: ACOPF/dual/ohm_qt
77
+ sequence: float32
78
+ length: 41
79
+ - name: ACOPF/dual/pf
80
+ sequence: float32
81
+ length: 41
82
+ - name: ACOPF/dual/pt
83
+ sequence: float32
84
+ length: 41
85
+ - name: ACOPF/dual/qf
86
+ sequence: float32
87
+ length: 41
88
+ - name: ACOPF/dual/qt
89
+ sequence: float32
90
+ length: 41
91
+ - name: ACOPF/dual/va_diff
92
+ sequence: float32
93
+ length: 41
94
+ - name: ACOPF/dual/sm_fr
95
+ sequence: float32
96
+ length: 41
97
+ - name: ACOPF/dual/sm_to
98
+ sequence: float32
99
+ length: 41
100
+ - name: ACOPF/dual/slack_bus
101
+ dtype: float32
102
+ - name: ACOPF/meta/seed
103
+ dtype: int64
104
+ - name: ACOPF/meta/formulation
105
+ dtype: string
106
+ - name: ACOPF/meta/primal_objective_value
107
+ dtype: float32
108
+ - name: ACOPF/meta/dual_objective_value
109
+ dtype: float32
110
+ - name: ACOPF/meta/primal_status
111
+ dtype: string
112
+ - name: ACOPF/meta/dual_status
113
+ dtype: string
114
+ - name: ACOPF/meta/termination_status
115
+ dtype: string
116
+ - name: ACOPF/meta/build_time
117
+ dtype: float32
118
+ - name: ACOPF/meta/extract_time
119
+ dtype: float32
120
+ - name: ACOPF/meta/solve_time
121
+ dtype: float32
122
+ - name: DCOPF/primal/va
123
+ sequence: float32
124
+ length: 30
125
+ - name: DCOPF/primal/pg
126
+ sequence: float32
127
+ length: 6
128
+ - name: DCOPF/primal/pf
129
+ sequence: float32
130
+ length: 41
131
+ - name: DCOPF/dual/kcl_p
132
+ sequence: float32
133
+ length: 30
134
+ - name: DCOPF/dual/pg
135
+ sequence: float32
136
+ length: 6
137
+ - name: DCOPF/dual/ohm_pf
138
+ sequence: float32
139
+ length: 41
140
+ - name: DCOPF/dual/pf
141
+ sequence: float32
142
+ length: 41
143
+ - name: DCOPF/dual/va_diff
144
+ sequence: float32
145
+ length: 41
146
+ - name: DCOPF/dual/slack_bus
147
+ dtype: float32
148
+ - name: DCOPF/meta/seed
149
+ dtype: int64
150
+ - name: DCOPF/meta/formulation
151
+ dtype: string
152
+ - name: DCOPF/meta/primal_objective_value
153
+ dtype: float32
154
+ - name: DCOPF/meta/dual_objective_value
155
+ dtype: float32
156
+ - name: DCOPF/meta/primal_status
157
+ dtype: string
158
+ - name: DCOPF/meta/dual_status
159
+ dtype: string
160
+ - name: DCOPF/meta/termination_status
161
+ dtype: string
162
+ - name: DCOPF/meta/build_time
163
+ dtype: float32
164
+ - name: DCOPF/meta/extract_time
165
+ dtype: float32
166
+ - name: DCOPF/meta/solve_time
167
+ dtype: float32
168
+ - name: SOCOPF/primal/w
169
+ sequence: float32
170
+ length: 30
171
+ - name: SOCOPF/primal/pg
172
+ sequence: float32
173
+ length: 6
174
+ - name: SOCOPF/primal/qg
175
+ sequence: float32
176
+ length: 6
177
+ - name: SOCOPF/primal/pf
178
+ sequence: float32
179
+ length: 41
180
+ - name: SOCOPF/primal/pt
181
+ sequence: float32
182
+ length: 41
183
+ - name: SOCOPF/primal/qf
184
+ sequence: float32
185
+ length: 41
186
+ - name: SOCOPF/primal/qt
187
+ sequence: float32
188
+ length: 41
189
+ - name: SOCOPF/primal/wr
190
+ sequence: float32
191
+ length: 41
192
+ - name: SOCOPF/primal/wi
193
+ sequence: float32
194
+ length: 41
195
+ - name: SOCOPF/dual/kcl_p
196
+ sequence: float32
197
+ length: 30
198
+ - name: SOCOPF/dual/kcl_q
199
+ sequence: float32
200
+ length: 30
201
+ - name: SOCOPF/dual/w
202
+ sequence: float32
203
+ length: 30
204
+ - name: SOCOPF/dual/pg
205
+ sequence: float32
206
+ length: 6
207
+ - name: SOCOPF/dual/qg
208
+ sequence: float32
209
+ length: 6
210
+ - name: SOCOPF/dual/ohm_pf
211
+ sequence: float32
212
+ length: 41
213
+ - name: SOCOPF/dual/ohm_pt
214
+ sequence: float32
215
+ length: 41
216
+ - name: SOCOPF/dual/ohm_qf
217
+ sequence: float32
218
+ length: 41
219
+ - name: SOCOPF/dual/ohm_qt
220
+ sequence: float32
221
+ length: 41
222
+ - name: SOCOPF/dual/jabr
223
+ dtype:
224
+ array2_d:
225
+ shape:
226
+ - 41
227
+ - 4
228
+ dtype: float32
229
+ - name: SOCOPF/dual/sm_fr
230
+ dtype:
231
+ array2_d:
232
+ shape:
233
+ - 41
234
+ - 3
235
+ dtype: float32
236
+ - name: SOCOPF/dual/sm_to
237
+ dtype:
238
+ array2_d:
239
+ shape:
240
+ - 41
241
+ - 3
242
+ dtype: float32
243
+ - name: SOCOPF/dual/va_diff
244
+ sequence: float32
245
+ length: 41
246
+ - name: SOCOPF/dual/wr
247
+ sequence: float32
248
+ length: 41
249
+ - name: SOCOPF/dual/wi
250
+ sequence: float32
251
+ length: 41
252
+ - name: SOCOPF/dual/pf
253
+ sequence: float32
254
+ length: 41
255
+ - name: SOCOPF/dual/pt
256
+ sequence: float32
257
+ length: 41
258
+ - name: SOCOPF/dual/qf
259
+ sequence: float32
260
+ length: 41
261
+ - name: SOCOPF/dual/qt
262
+ sequence: float32
263
+ length: 41
264
+ - name: SOCOPF/meta/seed
265
+ dtype: int64
266
+ - name: SOCOPF/meta/formulation
267
+ dtype: string
268
+ - name: SOCOPF/meta/primal_objective_value
269
+ dtype: float32
270
+ - name: SOCOPF/meta/dual_objective_value
271
+ dtype: float32
272
+ - name: SOCOPF/meta/primal_status
273
+ dtype: string
274
+ - name: SOCOPF/meta/dual_status
275
+ dtype: string
276
+ - name: SOCOPF/meta/termination_status
277
+ dtype: string
278
+ - name: SOCOPF/meta/build_time
279
+ dtype: float32
280
+ - name: SOCOPF/meta/extract_time
281
+ dtype: float32
282
+ - name: SOCOPF/meta/solve_time
283
+ dtype: float32
284
+ splits:
285
+ - name: train
286
+ num_bytes: 3092403240
287
+ num_examples: 321126
288
+ - name: test
289
+ num_bytes: 773105626
290
+ num_examples: 80282
291
+ download_size: 3049813999
292
+ dataset_size: 3865508866
293
+ ---
case.json.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9b477fa50364291d368c7ea81456d5253ae2b574c2d00f9f66a9152fca5d51ad
3
+ size 24637
config.toml ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Name of the reference PGLib case. Must be a valid PGLib case name.
2
+ pglib_case = "pglib_opf_case30_ieee"
3
+ floating_point_type = "Float32"
4
+
5
+ [sampler]
6
+ # data sampler options
7
+ [sampler.load]
8
+ noise_type = "ScaledUniform"
9
+ l = 0.6 # Lower bound of base load factor
10
+ u = 1.0 # Upper bound of base load factor
11
+ sigma = 0.20 # Relative (multiplicative) noise level.
12
+
13
+ [sampler.status]
14
+ type = "Nminus1"
15
+
16
+ [OPF]
17
+
18
+ [OPF.ACOPF]
19
+ type = "ACOPF"
20
+ solver.name = "Ipopt"
21
+ solver.attributes.tol = 1e-6
22
+ solver.attributes.linear_solver = "ma27"
23
+
24
+ [OPF.DCOPF]
25
+ # Formulation/solver options
26
+ type = "DCOPF"
27
+ solver.name = "HiGHS"
28
+
29
+ [OPF.SOCOPF]
30
+ type = "SOCOPF"
31
+ solver.name = "Clarabel"
32
+ # Tight tolerances
33
+ solver.attributes.tol_gap_abs = 1e-6
34
+ solver.attributes.tol_gap_rel = 1e-6
35
+ solver.attributes.tol_feas = 1e-6
36
+ solver.attributes.tol_infeas_rel = 1e-6
37
+ solver.attributes.tol_ktratio = 1e-6
38
+ # Reduced accuracy settings
39
+ solver.attributes.reduced_tol_gap_abs = 1e-6
40
+ solver.attributes.reduced_tol_gap_rel = 1e-6
41
+ solver.attributes.reduced_tol_feas = 1e-6
42
+ solver.attributes.reduced_tol_infeas_abs = 1e-6
43
+ solver.attributes.reduced_tol_infeas_rel = 1e-6
44
+ solver.attributes.reduced_tol_ktratio = 1e-6
infeasible/ACOPF/dual.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:69aa3706c23233228bfdddcd2131a6c4ca9b83a15b72afe63c0869fe3eaac1bd
3
+ size 181723839
infeasible/ACOPF/meta.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:86958e36326eeb3f02dc292b86879782297718c9af671be6ded75e88618818cf
3
+ size 3198291
infeasible/ACOPF/primal.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1b654bad522f64ba1c2fca6cd19171b616ca60beffbbdcbcc662e12c18491b3d
3
+ size 80864740
infeasible/DCOPF/dual.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef4de4bab5d53ca923ac4d9d98283afd0500aadd7ba514fa2d79e863373acfbf
3
+ size 2153438
infeasible/DCOPF/meta.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c71eb4d601b35f1b23e036c143b2bb70ab1079afc85d50b45e7877b713e4a8d
3
+ size 2829257
infeasible/DCOPF/primal.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d6eea86cc700967a8ea9f70d381504ee7661d331ca665062973301d0a353a806
3
+ size 4555971
infeasible/SOCOPF/dual.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4ce235badb3f80562b6005fb1ddbf422ad2502e8314c8a09213fd2eb8ddc2aed
3
+ size 355426063
infeasible/SOCOPF/meta.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7d6bb8cf185f9f899f5687ed6497c963301983b62d7fb3bd087ec62652fa075e
3
+ size 3281082
infeasible/SOCOPF/primal.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c07e2c624e8d91c8339a527130ea3498d444d71acbd16d24423fbf4ac51ed8b4
3
+ size 30146073
infeasible/input.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:be8bbf3dffdf42c19b57afbea552f63e325ccbfc18f4701f922000a19c8dd05e
3
+ size 15213225
test/ACOPF/dual.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d5871cec22a34592b69903ffaa67320763ffb79b2cc652e2f190c124a994030f
3
+ size 147071564
test/ACOPF/meta.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:de4aa9e4458d4c0e1460fa86e26247053f1e80aec5fd8d0cd013d88bfbddc855
3
+ size 2641217
test/ACOPF/primal.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c5659566a1048b76053d8e42a32b080d30aaaac462deef49aa7a0c0d43105538
3
+ size 65318220
test/DCOPF/dual.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:25cbe862d3ed9f3981d676babdada23235a30232a8773edb9c79ed9dc23228fc
3
+ size 1028583
test/DCOPF/meta.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5ac9269b3e2cc07f683257cafd438e9b018636a6de1a96416ca15218beb50580
3
+ size 2576137
test/DCOPF/primal.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:395b03d17d1d1445616450c5182b8951203da5357539441e780d5be9eb74cf60
3
+ size 19584866
test/SOCOPF/dual.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d21d1cd1a25698a9fed4652e804bfda4ac0636a9514f5b1e93111a2fdc1dda48
3
+ size 274785747
test/SOCOPF/meta.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:226d971f49088e448e048361785e497fbc90d0d231b8027da386d3403cda554e
3
+ size 2699340
test/SOCOPF/primal.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:58f855ee729dd82f39e3887144a3fb5e0327c0b632ee80f0b42ce88aa88f037c
3
+ size 81879368
test/input.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ea4816764b2b7ebf399833aea7180e218c5f461302ab92be36a7c499d1a5b98
3
+ size 12433870
train/ACOPF/dual.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9deb69ebdd4732abcebe92f3c65751d5ccd231528ed8801f95696cb577eb40d4
3
+ size 588277428
train/ACOPF/meta.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4744cb29637051c03043b58e8628e873113e899c8cbae3e07b3621af11639b87
3
+ size 10513433
train/ACOPF/primal.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6db604e046fc59a6d6d529bfa92d66764ef994390b0f2d2253c6366f0caf9014
3
+ size 261256922
train/DCOPF/dual.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8b7b748801524b465e7737b96ac815f65322748c2183db15f638fba9f5da31e8
3
+ size 4112004
train/DCOPF/meta.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:851cd1129afa1ce7764104f2ca730b8bd9d7b76d823b6a48762269a46c048b55
3
+ size 10252898
train/DCOPF/primal.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:68ddbf6e2e0d5ce14586509061b08aa86708889d2c037546bfaab4e77e4c8ef7
3
+ size 78336518
train/SOCOPF/dual.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:76aec12d3a442f2a3c80e8196ab7b730bcbaece4022bc7a95bace6394dedd279
3
+ size 1099073557
train/SOCOPF/meta.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c2c390a9d9beed8caf8346601e6420fa3167eb7d0605691dd0fff5569df3f2a1
3
+ size 10747628
train/SOCOPF/primal.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fc95d83b4832b798820dde458766ebe65534c3936d5ae556c699c8ee7f5c7757
3
+ size 327510427
train/input.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fb0039d80b0715c01fb5736f2c29c1cfbde0833cdd42015f832e3e89fcad6101
3
+ size 49714272