def make_dataset(path, input_fmt, res_type): net_path = os.path.join(path, 'net') res_path = os.path.join(path, res_type) if not os.path.exists(res_path): os.mkdir(res_path) data_set = GHData(path, net_path, input_fmt) data_set.load_x(x_ratio_thr=-1.0, dt_idx=False) data_set.load_y(res_type, na_value=-1.0) # data_set.drop_times(['00000027']) data_set.normalize() # data_set.column_valid = np.ones((data_set.input_data.shape[1],), dtype=np.bool) data_set.save_data(res_path) return data_set
def make_dataset(path, out_path, input_fmt, res_type): data_set = GHData(path, path + "/net", input_fmt) data_set.load_x() data_set.load_y(res_type) data_set.normalize() data_set.save_data(out_path)
all_types = ['cct', 'sst', 'vs', 'v_curve'] res_type = 'cct' res_name = res_type res_path = path + "/" + res_name input_dic = {'generator': ['p', 'v'], 'station': ['pl', 'ql']} dr_percs = [0.5, 0.5, 0.5, 0.5, 0.5, 0.5] net_path = path + "/net" net = GHNet("inf", input_dic, dr_percs=dr_percs) net.load_net(net_path) net1 = GHNet("inf", input_dic, dr_percs=dr_percs) net1.load_net(net_path) data_set = GHData(path, net_path, net.input_layer) data_set.load_x(x_ratio_thr=-1.0) data_set.load_y(res_type) data_set.normalize() drops = np.array(range(len(data_set.column_valid)))[~data_set.column_valid] net.drop_inputs(drops) net1.drop_inputs(drops) n_batch = 16 n_epochs = 10 n_al_epochs = 10 only_real = True y_columns = list(range(data_set.y.shape[1])) y_columns = [2, 11, 23] net.build_multi_reg_k(len(y_columns), activation=tf.keras.layers.LeakyReLU()) net1.build_multi_reg_k(len(y_columns),
net_path = os.path.join(path, 'net') res_type = 'cct' res_path = os.path.join(path, res_type) input_dic = {'generator': ['p'], 'load': ['p', 'q']} fmt = 'off' power = Power(fmt=fmt) power.load_power(net_path, fmt, lf=True, lp=False, st=False, station=True) input_layer = [] for etype in input_dic: for dtype in input_dic[etype]: t = '_'.join((etype, dtype)) input_layer.extend([(t, n) for n in power.data[etype]['name']]) data_set = GHData(path, net_path, input_layer) data_set.load_x(x_ratio_thr=-1.0, dt_idx=False) data_set.load_y(res_type) data_set.normalize() data_set.column_valid = np.ones((data_set.input_data.shape[1], ), dtype=np.bool) """ y_columns = list(range(data_set.y.shape[1])) column_names = data_set.y.columns[y_columns] print("targets:", column_names) prelu = tf.keras.layers.ReLU(negative_slope=0.1) x = Input(shape=(len(input_layer),), dtype='float32', name='x') fault = Input(shape=(len(y_columns),), dtype='float32', name='fault') y_ = Input(shape=(len(y_columns),), dtype='float32', name='y_') pre_model, all_layers = build_mlp(x, [64, 32, len(y_columns)], 'wepri36', activation=prelu, last_activation=prelu) feature_model = Model(x, all_layers[-2])