Ejemplo n.º 1
0
    def get_column_minmax(self):
        """
        Initial column_valid, column_min, column_max before normalize
        """
        delta_thres = {
            'generator_p': 0.1,
            'generator_v': 0.001,
            'load_p': 0.1,
            'load_q': 0.1,
            'station_pg': 0.1,
            'station_pl': 0.1,
            'station_ql': 0.1,
            'dcline_p': 0.1,
            'dcline_q': 0.1,
            'dcline_acu': 0.001,
            'ed': 0.00001
        }

        self.column_min = np.nanmin(self.input_data, axis=0)
        self.column_max = np.nanmax(self.input_data, axis=0)
        max_min = self.column_max - self.column_min

        elem_info = load_elem_info(self.model_path + "/elem_info.dat")
        gens = elem_info[elem_info['type'] == 5]
        for i, (t, elem) in enumerate(self.input_layer):
            if t == 'generator_p':
                self.column_min[i] = gens['limit1'][elem]
                self.column_max[i] = gens['limit2'][elem]
            elif t == 'station_pg':
                sub_gens = gens[gens.station == elem]
                self.column_min[i] = np.sum(sub_gens['limit1'])
                self.column_max[i] = np.sum(sub_gens['limit2'])
            elif t == 'generator_v' or t == 'dcline_acu':
                self.column_min[i] = 0.9
                self.column_max[i] = 1.15
            # elif t=='station_pl' or t=='station_ql' or t=='ed_ed'
            # or t=='dcline_p' or t=='dcline_q':
            else:
                d80 = (self.column_max[i] - self.column_min[i]) / 8
                self.column_min[i] -= d80
                self.column_max[i] += d80
            self.column_valid[i] = self.column_valid[i] \
                                   & (not np.isnan(max_min[i])) \
                                   & (max_min[i] > delta_thres[t])
Ejemplo n.º 2
0
    def load_net(self,
                 path,
                 out_node=None,
                 include_areas=[],
                 exclude_names=[]):
        """
        Load ghnet.dat

        :param path: str. Contains ghnet.dat, st_info.dat, elem_info.dat.
        :param out_node: str or None. Output node name.
        :param include_areas: [int]. Include areas, [] for all.
        :param exclude_names: [str]. Exclude station names.
        :return:
        """
        self.ghnet = pd.read_table(path + '/ghnet.dat',
                                   encoding='gbk',
                                   sep=' ')
        self.ghnet.sort_values(by='layer', inplace=True)
        self.st_info = load_station_info(path + '/st_info.dat')
        self.elem_info = load_elem_info(path + '/elem_info.dat')
        self.gens = self.elem_info[self.elem_info['type'] == 5]
        self.ed_info = pd.read_table(path + '/ed_info.dat',
                                     encoding='gbk',
                                     sep=' ')
        self.trim_net(out_node, include_areas, exclude_names, inplace=True)

        # input stations
        self.nodes = []
        used_input_names = set()
        upper_names = []
        start = 0
        input_layer_no = self.ghnet['layer'].min()
        for layer in range(input_layer_no, self.ghnet['layer'].max() + 1):
            lower_names = upper_names.copy()
            upper_names = []
            used_subnet_names = []
            nodes = []
            for _, sub in self.ghnet[self.ghnet['layer'] == layer].iterrows():
                # if sub['upper'] in self.st_info.index:
                #     continue
                ss = sub['lower'].split('+')
                # ss.append(sub['upper'])
                # ss = list(set(ss))
                node = Node(layer, sub['upper'])
                if layer == input_layer_no or layer == input_layer_no + 1:  # 220kV and 500kV
                    for name in ss:
                        ret = self.get_station_input(name)
                        self.input_layer.extend(ret)
                        if name in used_input_names:
                            print("station(%s) has been used as input" % name)
                        else:
                            used_input_names.add(name)
                    node.in_begin = start
                    node.in_end = len(self.input_layer)
                    # node.inputs = list(range(node.in_begin, node.in_end))
                    start = node.in_end
                for name in ss:
                    if name in lower_names:
                        lower_node_name = node.make_name(layer - 1, name)
                        lower_node = self.get_node(lower_node_name)
                        if lower_node is not None:
                            node.subnets.append(lower_node_name)
                            used_subnet_names.append(name)
                if node.in_begin == node.in_end and len(node.subnets) == 0:
                    print("node(%s) has no input, drop it" % node.name)
                    continue
                nodes.append(node)
                upper_names.append(sub['upper'])
            not_used_lower = set(lower_names) - set(used_subnet_names)
            if not_used_lower:
                print("not used lower node: " + '+'.join(not_used_lower))
                # raise NotImplementedError('%d node(s) are not connected'%len(not_used_lower))
            self.nodes.append(nodes)
        print("load ghnet successfully")
Ejemplo n.º 3
0
    input_fmts = []
    output_fmts = []
    lmd_indices = []
    for mn in model_names:
        model_path = os.path.join(path, mn)
        input_fmt = load_input_fmt(os.path.join(model_path, 'input.txt'))
        output_fmt = load_output_fmt(os.path.join(model_path, 'output.txt'))
        net = GHNet("inf", input_types=None)
        net.reload_net(model_path)
        net.input_fmt = input_fmt
        net.build_multi_reg(len(output_fmt), activation=tf.keras.layers.LeakyReLU())
        net.pre_model.load_weights(os.path.join(model_path, 'cct.h5'))
        nets.append(net)
        input_fmts.append(input_fmt)
        output_fmts.append(output_fmt)
        elem_info = load_elem_info(os.path.join(model_path, 'elem_info.dat'))
        # st_info = load_station_info(os.path.join(model_path, 'st_info.dat'))
        lmd_indices.append(get_lmd_indices(input_fmt, elem_info, None))

    # online
    if sys.argv[1] == '-online':
        start_time = datetime.datetime.now()
        results = run_online(sys.argv[2], nets, input_fmts, lmd_indices)
        end_time = datetime.datetime.now()
        log_txt = 'online run: start_time = %s, end_time = %s\n' % (
            datetime.datetime.strftime(start_time, '%Y-%m-%d %H:%M:%S')+'.%d'%start_time.microsecond,
            datetime.datetime.strftime(end_time, '%Y-%m-%d %H:%M:%S')+'.%d'%end_time.microsecond
        )
        with open(os.path.join(path, 'online.log'), 'a') as fp:
            fp.write(log_txt)
        # trend