def __init__( self, case_name="case30", data_dir="./data", batch_size=32, ratio_train=0.95, num_workers=min(cpu_count(), 8), adj_scale=None, adj_threshold=0.01, pin_memory=False, **kwargs ): super().__init__() self.case_name = case_name self.data_dir = data_dir self.batch_size = batch_size self.ratio_train = ratio_train self.num_workers = num_workers self.pin_memory = pin_memory self.adj_scale = adj_scale self.adj_threshold = adj_threshold self.net_wrapper = NetWrapper(load_case(case_name)) self.dims = (1, self.net_wrapper.n_buses, 2) self.train_data = None self.val_data = None self.test_data = None
def __init__(self, data_dir, case_name, ratio_train, ratio_valid, A_scaling=0.001, A_threshold=0.01, dataType=np.float64, device='cpu'): super().__init__() self.dataType = dataType self.device = device # Dataset partition self.ratio_train = ratio_train self.ratio_valid = ratio_valid self.data_dir = data_dir data = np.load(os.path.join(data_dir, case_name, "data.npz")) self.bus = np.transpose(data['bus'], [0, 2, 1]) self.gen = data['gen'] self.net = load_case(case_name, data_dir) self.manager = NetworkManager(self.net) nDataPoints = self.bus.shape[0] self.nTrain = round(ratio_train * nDataPoints) # Total train set self.nValid = round(ratio_valid * nDataPoints) # Validation set self.nTest = nDataPoints - self.nTrain - self.nValid assert self.nTest > 0 randPerm = np.random.permutation(nDataPoints) # And choose the indices that will correspond to each dataset indexTrainPoints = randPerm[0:self.nTrain] indexValidPoints = randPerm[self.nTrain:self.nTrain + self.nValid] indexTestPoints = randPerm[self.nTrain + self.nValid:nDataPoints] # Finally get the corresponding samples and store them self.samples['train']['signals'] = self.bus[indexTrainPoints, :, :] self.samples['train']['labels'] = self.gen[indexTrainPoints, :] self.samples['valid']['signals'] = self.bus[indexValidPoints, :, :] self.samples['valid']['labels'] = self.gen[indexValidPoints, :] self.samples['test']['signals'] = self.bus[indexTestPoints, :, :] self.samples['test']['labels'] = self.gen[indexTestPoints, :] # And update the index of the data points. self.indexDataPoints = { 'train': indexTrainPoints, 'valid': indexValidPoints, 'test': indexTestPoints } self.adjacencyMatrix = adjacency_from_net(self.net, A_scaling, A_threshold)[0] # Change data to specified type and device self.astype(self.dataType) self.to(self.device)
return None, None, None if __name__ == '__main__': os.chdir("..") # Parameters case_name = "case118" state = "AK" # state to use data from load_scale = 1.0 # scale the load by a factor portion_commercial = 0.5 # how much power should be commercial data_dir = "data/" case_dir = os.path.join(data_dir, case_name) profile_dir = data_dir + "load_profiles/" net = load_case(case_name, data_dir, reindex=True) manager = NetworkManager(net) load = manager.get_load(reactive=True) * load_scale p, q = np.split(load, 2, axis=1) p = LoadGenerator.generate_load_from_random(p, 20000, delta=0.1) q = LoadGenerator.generate_load_from_random(q, 20000, delta=0.1) load = np.stack((p, q), axis=2) results = None with Pool() as p: g = partial(f, manager, load.shape[0]) results = list(tqdm.tqdm((p.imap(g, enumerate(load))), total=load.shape[0])) load, bus, gen = zip(*results) isNotNone = lambda x: x is not None