Exemplo n.º 1
0
Arquivo: mlp.py Projeto: sdy99/PowerAI
def make_dataset(path, input_fmt, res_type):
    net_path = os.path.join(path, 'net')
    res_path = os.path.join(path, res_type)
    if not os.path.exists(res_path):
        os.mkdir(res_path)
    data_set = GHData(path, net_path, input_fmt)
    data_set.load_x(x_ratio_thr=-1.0, dt_idx=False)
    data_set.load_y(res_type, na_value=-1.0)
    # data_set.drop_times(['00000027'])
    data_set.normalize()
    # data_set.column_valid = np.ones((data_set.input_data.shape[1],), dtype=np.bool)
    data_set.save_data(res_path)
    return data_set
Exemplo n.º 2
0
 def __init__(self, work_path, fmt, res_type, nn_size):
     super().__init__(work_path, fmt)
     self.mode = 'one'
     self.generated_num = 0
     self.nn_size = nn_size
     self.base_path = os.path.join(work_path, 'net')
     self.base_power = Power(fmt=fmt)
     self.base_power.load_power(self.base_path, fmt=fmt)
     self.input_fmt = load_input_fmt(os.path.join(work_path, 'cct', 'predict', 'input.txt'),
                                     input_mode=True)
     self.feature_model = load_model(os.path.join(work_path, res_type, 'feature'),
                                     '', suffix='tf')
     self.data_set = GHData(work_path, '', '')
     self.data_set.load_data(work_path)
     self.features = pd.DataFrame(
         self.feature_model.predict(self.data_set.input_data.values),
         index=self.data_set.input_data.index)
     self.init_assess = self.distribution_assess(nn_size)
Exemplo n.º 3
0
class HighDG(ADG):
    def __init__(self, work_path, fmt, res_type, nn_size):
        super().__init__(work_path, fmt)
        self.mode = 'one'
        self.generated_num = 0
        self.nn_size = nn_size
        self.base_path = os.path.join(work_path, 'net')
        self.base_power = Power(fmt=fmt)
        self.base_power.load_power(self.base_path, fmt=fmt)
        self.input_fmt = load_input_fmt(os.path.join(work_path, 'cct', 'predict', 'input.txt'),
                                        input_mode=True)
        self.feature_model = load_model(os.path.join(work_path, res_type, 'feature'),
                                        '', suffix='tf')
        self.data_set = GHData(work_path, '', '')
        self.data_set.load_data(work_path)
        self.features = pd.DataFrame(
            self.feature_model.predict(self.data_set.input_data.values),
            index=self.data_set.input_data.index)
        self.init_assess = self.distribution_assess(nn_size)

    def distribution_assess(self, nn_size=-1):
        dists = pairwise_distances(self.features)
        if nn_size < 1:
            return np.average(dists) / np.max(dists)
        return np.average(np.partition(dists, nn_size + 1)[:, 1:nn_size + 1])

    def choose_samples(self, size=1):
        idx = np.arange(self.features.shape[0])
        np.random.shuffle(idx)
        return self.features.index[idx[:size]]

    def generate_one(self, power, idx, out_path):
        print(idx, out_path)
        if not power:
            self.input_fmt['value'] = self.data_set.ori_data.loc[idx].values
            power = restore_power_input(self.input_fmt, self.base_power, normalize=False)
            print('old=', self.input_fmt['value'])
        # TODO
        self.input_fmt['value'] *= np.random.normal(loc=1.0, scale=0.1,
                                                    size=self.input_fmt.shape[0])
        power = restore_power_input(self.input_fmt, power, normalize=False)
        print('new=', self.input_fmt['value'])
        shutil.rmtree(out_path, ignore_errors=True)
        power.save_power(out_path, fmt=self.fmt, lp=False)
        shutil.copy(os.path.join(self.base_path, 'LF.L0'), out_path)
        shutil.copy(os.path.join(self.base_path, 'ST.S0'), out_path)
        call_wmlf(out_path)
        ret = check_lfcal(out_path)
        if ret:
            idx = os.path.split(out_path)[-1]
            new_data = load_power_input(self.input_fmt, power)
            new_data = self.data_set.normalize_it(new_data)
            new_feature = self.feature_model.predict(new_data[np.newaxis, :])
            self.features.loc[idx] = new_feature.reshape(-1)
            self.generated_num += 1
        return ret

    def remove_samples(self):
        pass

    def done(self):
        assess = self.distribution_assess(self.nn_size)
        print('num=%d, init=%.6f, now=%.6f' % (self.generated_num, self.init_assess, assess))
        return self.generated_num > 0
Exemplo n.º 4
0
def make_dataset(path, out_path, input_fmt, res_type):
    data_set = GHData(path, path + "/net", input_fmt)
    data_set.load_x()
    data_set.load_y(res_type)
    data_set.normalize()
    data_set.save_data(out_path)
Exemplo n.º 5
0
        path = "d:/python/db/2018_11"

    all_types = ['cct', 'sst', 'vs', 'v_curve']
    res_type = 'cct'
    res_name = res_type
    res_path = path + "/" + res_name
    input_dic = {'generator': ['p', 'v'], 'station': ['pl', 'ql']}

    dr_percs = [0.5, 0.5, 0.5, 0.5, 0.5, 0.5]
    net_path = path + "/net"
    net = GHNet("inf", input_dic, dr_percs=dr_percs)
    net.load_net(net_path)
    net1 = GHNet("inf", input_dic, dr_percs=dr_percs)
    net1.load_net(net_path)

    data_set = GHData(path, net_path, net.input_layer)
    data_set.load_x(x_ratio_thr=-1.0)
    data_set.load_y(res_type)
    data_set.normalize()
    drops = np.array(range(len(data_set.column_valid)))[~data_set.column_valid]
    net.drop_inputs(drops)
    net1.drop_inputs(drops)

    n_batch = 16
    n_epochs = 10
    n_al_epochs = 10
    only_real = True
    y_columns = list(range(data_set.y.shape[1]))
    y_columns = [2, 11, 23]
    net.build_multi_reg_k(len(y_columns),
                          activation=tf.keras.layers.LeakyReLU())
Exemplo n.º 6
0
    input_dic = {'generator': ['p', 'v'],
                 'station': ['pl', 'ql']}
    '''
    input_dic = {'generator':['p','v'],
                 'station':['pg', 'pl','ql'],
                 'dcline':['p','q','acu'],
                 'ed':['ed']}
    '''

    decay_ratios = [0.5] * 8
    net = GHNet("inf", input_dic, decay_ratios=decay_ratios)
    net.load_net(net_path)
    # with open(net_path+"/ghnet_out.txt", "w") as f:
    # 	net.print_node(net.nodes[-1][0], f)

    data_set = GHData(path, net_path, net.input_layer)
    data_set.load_x(x_ratio_thr=-1.0)
    data_set.load_y(args.res_type)
    data_set.normalize()
    drops = np.array(range(len(data_set.column_valid)))[~data_set.column_valid]
    net.drop_inputs(drops)
    y_columns = list(range(data_set.y.shape[1]))
    if args.res_type == 'cct':
        targets = ['东北.青北一线', '东北.燕董一线', '东北.丰徐二线']
        y_columns = data_set.get_y_indices(targets)
    elif args.res_type == 'sst':
        y_columns = [0]
    column_names = data_set.y.columns[y_columns]
    print("targets:", column_names)
    net.build_multi_reg(len(y_columns), activation=tf.keras.layers.LeakyReLU())
Exemplo n.º 7
0
    path = os.path.join(os.path.expanduser('~'), 'data', 'wepri36', 'gen')
    net_path = os.path.join(path, 'net')
    res_type = 'cct'
    res_path = os.path.join(path, res_type)
    input_dic = {'generator': ['p'],
                 'load': ['p', 'q']}
    fmt = 'off'
    power = Power(fmt=fmt)
    power.load_power(net_path, fmt, lf=True, lp=False, st=False, station=True)
    input_layer = []
    for etype in input_dic:
        for dtype in input_dic[etype]:
            t = '_'.join((etype, dtype))
            input_layer.extend([(t, n) for n in power.data[etype]['name']])

    data_set = GHData(path, net_path, input_layer)
    data_set.load_x(x_ratio_thr=-1.0, dt_idx=False)
    data_set.load_y(res_type)
    data_set.normalize()
    data_set.column_valid = np.ones((data_set.input_data.shape[1], ), dtype=np.bool)
    """
    y_columns = list(range(data_set.y.shape[1]))
    column_names = data_set.y.columns[y_columns]
    print("targets:", column_names)

    prelu = tf.keras.layers.ReLU(negative_slope=0.1)
    x = Input(shape=(len(input_layer),), dtype='float32', name='x')
    fault = Input(shape=(len(y_columns),), dtype='float32', name='fault')
    y_ = Input(shape=(len(y_columns),), dtype='float32', name='y_')
    pre_model, all_layers = build_mlp(x, [64, 32, len(y_columns)], 'wepri36',
                                      activation=prelu, last_activation=prelu)
Exemplo n.º 8
0
Arquivo: mlp.py Projeto: sdy99/PowerAI
def restore_dataset(path, input_fmt, res_type):
    data_set = GHData(path, path + "/net", input_fmt)
    data_set.load_data(path + '/' + res_type)
    return data_set