Example #1
0
    def get_preview_list(self, nn_id):
        """
        return preview file locations
        :param nn_id:
        :return:
        """
        net_info = netconf.get_network_config(nn_id)
        dataframe = net_info['dir']
        table = net_info['table']
        if (len(str(net_info['datasets'])) == 0):
            label_set = []
        else:
            label_set = json.loads(net_info['datasets'])
        preview_file_list = {}
        preview_table = "{0}/{1}/{2}/{3}".format(settings.PREVIEW_IMG_PATH,
                                                 "preview", dataframe, table)
        url_path = "/{0}/{1}/{2}/{3}".format("dist", "preview", dataframe,
                                             table)

        for label in label_set:
            preview_file_list[label] = []
            if not os.path.exists("{0}/{1}/".format(preview_table, label)):
                os.makedirs("{0}/{1}/".format(preview_table, label))
            for filename in os.listdir("{0}/{1}/".format(preview_table,
                                                         label)):
                preview_file_list[label].append("{0}/{1}/{2}".format(
                    url_path, label, filename))
        return preview_file_list
Example #2
0
 def get_all_info_json_by_nnid(self,nnid):
     """get network configuration info json
     :param nnid
     :return: json string """
     result = netconf.get_network_config(nnid)
     utils.tfmsa_logger("((Wdnn_common_model_get_json_by_nnid)) ## END##")
     return result
Example #3
0
    def get(self, request, nnid, type):
        """
        - desc : return network data format information
        """
        #get_network_config
        try:
            result_temp = netconf.get_network_config(nnid)

            datadesc = netconf.load_ori_format(nnid)
            result_datadesc_source = json.loads(datadesc)
            result = dict()

            if type == "cell_feature":
                result = result_datadesc_source["cell_feature"]
            elif type == "label":
                result = result_datadesc_source["label"]
            elif type == "all":
                result = result_datadesc_source["cell_feature"]
                result.update(result_datadesc_source["label"])
            elif type == "labels":
                result = data.ImageManager().get_label_list(nnid)
            return_data = {"status": "200", "result": result}
            return Response(json.dumps(return_data))
        except Exception as e:
            return_data = {"status": "400", "result": str(e)}
            return Response(json.dumps(return_data))
Example #4
0
    def get_predict_data(self, nn_id, predict_data):
        """
        (1) get net column descritions
        (2) modify predict data for 'categorical data'
        (3) caculate size of arrays need for neural networks
        :param nn_id:neural network id want to train
        :return: Train Data Sets
        """
        try:
            tfmsa_logger("modifying predict start!")
            # (1) get data configuration info
            net_conf = netconf.get_network_config(nn_id)
            datadesc = JsonDataConverter().load_obj_json(net_conf['datadesc'])
            datasets = JsonDataConverter().load_obj_json(net_conf['datasets'])

            # (2) modify train data for 'categorical data'
            self.m_train[:] = []
            self.m_tag[:] = []
            self.m_train, self.m_tag = self.reform_train_data(
                predict_data, datasets, datadesc)

            # (3) caculate size of arrays need for neural networks
            self.train_len = len(next(iter(self.m_train), None))

            tfmsa_logger("modified predict data : {0} ".format(self.m_train))
            return self

        except Exception as e:
            print("Exception {0} , {1}".format(self.__class__, e))
            raise Exception(e)
Example #5
0
def train_conv_network(nn_id, epoch=50, testset=100):
    try:
        # check network is ready to train
        utils.tfmsa_logger("[1]check pre steps ready")
        utils.check_requested_nn(nn_id)

        # get network base info
        utils.tfmsa_logger("[2]get network base info")
        net_info = netconf.get_network_config(nn_id)

        # get network format info
        utils.tfmsa_logger("[3]get network format info")
        conf_info = netconf.load_conf(nn_id)

        # load train data
        utils.tfmsa_logger("[4]load train data")
        train_data_set = []
        train_label_set = []

        if (const.TYPE_IMAGE == net_info['preprocess']):
            train_data_set, train_label_set = ConvCommonManager(
                conf_info).prepare_image_data(nn_id, net_info)
        elif (const.TYPE_DATA_FRAME == net_info['preprocess']):
            raise Exception("function not ready")
        elif (const.TYPE_TEXT == net_info['preprocess']):
            raise Exception("function not ready")
        else:
            raise Exception("unknown data type")

        # data size info change
        utils.tfmsa_logger("[5]modify data stuctor info")
        # ConvCommonManager(conf_info).save_changed_data_info(nn_id, train_data_set)

        learnrate = conf_info.data.learnrate
        conf_info.n_class = len(json.loads(net_info['datasets']))

        # change to nummpy array
        train_x = np.array(np.array(train_data_set).astype(float), np.float32)
        train_y = np.array(train_label_set, np.int32)

        # define classifier
        utils.tfmsa_logger("[6]define classifier")
        classifier = learn.Estimator(
            model_fn=ConvCommonManager(conf_info).struct_cnn_layer,
            model_dir=netconf.nn_model_manager.get_model_save_path(nn_id))

        # start train
        utils.tfmsa_logger("[7]fit CNN")
        customsMonitor = Monitors.MonitorCommon(p_nn_id=nn_id,
                                                p_max_steps=10000,
                                                p_every_n_steps=100)
        classifier.fit(train_x, train_y, steps=1000, monitors=[customsMonitor])

        return len(train_y)

    except Exception as e:
        print("Error Message : {0}".format(e))
        raise Exception(e)
    def dataframe_pre_process(self, nnid):
        """
        dataframe_pre_process
        :param nnid:
        :return:
        """
        # run dataframe data preprocess
        nn_info = netconf.get_network_config(nnid)

        json_obj = json.loads(str(nn_info['datadesc']).replace("'", "\""))
        cate_column_list = []
        for column in json_obj.keys():
            if (json_obj[column] == 'cate' or json_obj[column] == 'tag' or json_obj[column] == 'rank'):
                cate_column_list.append(column)

        nninfo = netconf.get_network_config(nnid)
        dist_col_list = data.DataMaster().get_distinct_dataframe(nninfo['dir'], nninfo['table'], cate_column_list)
        netconf.set_train_datasets(nnid, str(json.dumps(dist_col_list)))
Example #7
0
 def get_label_list(self, nn_id):
     """
     get image label list
     :param net_info:
     :param label:
     :return:
     """
     net_info = netconf.get_network_config(nn_id)
     if (len(str(net_info['datasets'])) == 0):
         label_list = []
     else:
         label_list = json.loads(net_info['datasets'].replace("'", "\""))
     return label_list
Example #8
0
def check_requested_nn(nn_id):
    """
    validation checks
    TO-DO : NN name on the list check
    TO-DO : NN model conf on the db check
    TO-DO : NN model trained data on the db check
    """
    conf = netconf.get_network_config(nn_id)

    if(check_nn_exist(conf, nn_id) == False):
        raise SyntaxError("network info not exist")

    if(check_nn_conf_exist(conf, nn_id) == False):
        raise SyntaxError("network configuration not exist")
Example #9
0
 def get(self, request, baseid, tb, nnid):
     """
     return all table
     :param request: Not used
     :param baseid: schemaId
     :return: list of table
     """
     try:
         result = netconf.get_network_config(nnid)
         return_data = {"status": "200", "result": result['datasets']}
         return Response(json.dumps(return_data))
     except Exception as e:
         return_data = {"status": "400", "result": str(e)}
         return Response(json.dumps(return_data))
Example #10
0
def predict_conv_network(nn_id, predict_data):
    try:
        # check network is ready to train
        utils.tfmsa_logger("[1]check pre steps ready")
        utils.check_requested_nn(nn_id)

        # get network base info
        utils.tfmsa_logger("[2]get network base info")
        net_info = netconf.get_network_config(nn_id)

        # get network format info
        utils.tfmsa_logger("[3]get network format info")
        conf_info = netconf.load_conf(nn_id)

        learnrate = conf_info.data.learnrate
        label_set = json.loads(net_info['datasets'])
        conf_info.n_class = len(label_set)

        # define classifier
        utils.tfmsa_logger("[4]define classifier")
        classifier = learn.Estimator(
            model_fn=ConvCommonManager(conf_info).struct_cnn_layer,
            model_dir=netconf.nn_model_manager.get_model_save_path(nn_id),
            config=learn.RunConfig(save_checkpoints_secs=1))

        # start train
        #TODO : need to find way to predict without fit
        utils.tfmsa_logger("[5]fit dummy")
        train_x = np.array([
            ConvCommonManager(conf_info).create_dummy_matrix(
                len(predict_data[0]))
        ], np.float32)
        train_y = np.array(
            netcommon.convert_to_index(json.loads(net_info['datasets'])),
            np.int32)
        classifier.fit(train_x, train_y, steps=int(1))

        # predict result
        utils.tfmsa_logger("[6]predict result")
        y_predicted = [
            label_set[int(p['class'])]
            for p in classifier.predict(x=np.array(predict_data, np.float32),
                                        batch_size=1,
                                        as_iterable=True)
        ]
        return y_predicted
    except Exception as e:
        print("Error Message : {0}".format(e))
        raise Exception(e)
    def test_insert_train_loss(self):
        tfmsa_logger("================TEST START================")
        init_data = {
            "nn_id": "test00001",
            "category": "MES",
            "subcate": "csv",
            "name": "CENSUS_INCOME",
            "desc": "INCOME PREDICT"
        }
        netconf.create_new_network(init_data)

        # just consider worsk fine if no exception occurs
        netconf.set_on_net_conf("test00001")
        netconf.set_on_net_vaild("test00001")
        netconf.set_on_train("test00001")
        netconf.set_on_data_conf("test00001")
        netconf.set_on_data("test00001")

        result = netconf.get_network_config("test00001")
        idx_set = ["datavaild", "config", "train", "confvaild"]

        for idx in idx_set:
            self.assertEqual(result[idx], 'Y')
        netconf.set_off_net_conf("test00001")
        netconf.set_off_net_vaild("test00001")
        netconf.set_off_train("test00001")
        netconf.set_off_data_conf("test00001")
        netconf.set_off_data("test00001")

        result = netconf.get_network_config("test00001")
        idx_set = ["datavaild", "config", "train", "confvaild"]

        for idx in idx_set:
            self.assertEqual(result[idx], 'N')

        tfmsa_logger("================TEST END================")
Example #12
0
    def get_init_info_wdnn(self, nnid):
        """ Get infomation of Wdnn initial
            :param nnid
            :param model_dir : directory of chkpoint of wdnn model
        """
        json_string = netconf.load_ori_format(nnid)
        json_object = json.loads(json_string)
        utils.tfmsa_logger("[4]load net conf form db")

        conf = netconf.load_conf(nnid)
        hidden_layers_value = conf.layer
        result_temp = netconf.get_network_config(nnid)
        label_cnt = json.loads(json.dumps(result_temp))
        label_object  = label_cnt["datasets"]
        utils.tfmsa_logger("((1.Make WDN Network Build)) set up Hidden Layers (" + str(hidden_layers_value) + ")")
        return conf, hidden_layers_value, json_object, label_object
Example #13
0
 def init_job_info(self, nn_id):
     """
     get table info
     :param nn_id:
     :return:
     """
     try:
         net_info = netconf.get_network_config(nn_id)
         table_info = self.get_table_info(net_info['dir'],
                                          net_info['table'])
         data_set = models.JobManagement.objects.get(nn_id=str(nn_id))
         data_set.endpointer = str(table_info.row_len)
         data_set.datapointer = '0'
         data_set.save()
         return data_set
     except Exception as e:
         tfmsa_logger(e)
         return False
Example #14
0
    def delete_label_list(self, nn_id, label):
        """
        delete image label list
        :param net_info:
        :param label:
        :return:
        """
        net_info = netconf.get_network_config(nn_id)
        if (len(str(net_info['datasets'])) == 0):
            label_list = []
        else:
            label_list = json.loads(net_info['datasets'])

        if label in label_list:
            label_list.remove(label)
            jd = jc.load_obj_json("{}")
            jd.nn_id = net_info['nn_id']
            jd.datasets = json.dumps(label_list)
            result = netconf.update_network(jd)
        return self.get_label_list(nn_id)
Example #15
0
    def delete_preview_list(self, nn_id):
        """
        return preview file locations
        :param nn_id:
        :return:
        """
        net_info = netconf.get_network_config(nn_id)
        dataframe = net_info['dir']
        table = net_info['table']
        if (len(str(net_info['datasets'])) == 0):
            label_set = []
        else:
            label_set = json.loads(net_info['datasets'])
        preview_file_list = {}
        preview_table = "{0}/{1}/{2}/{3}".format(settings.PREVIEW_IMG_PATH,
                                                 "preview", dataframe, table)

        for label in label_set:
            preview_file_list[label] = []
            shutil.rmtree(("{0}/{1}/".format(preview_table, label)))
        return preview_file_list
Example #16
0
    def get_train_data(self, nn_id):
        """
        (1) get net column descritions
        (2) get user selected data , exclude user check None
        (3) modify train data for 'categorical data'
        (4) caculate size of arrays need for neural networks
        (5) change neural network configurioatns automtically
        :param nn_id:neural network id want to train
        :return: Train Data Sets
        """
        try:
            tfmsa_logger("modifying Train start!")
            # (1) get data configuration info
            net_conf = netconf.get_network_config(nn_id)
            datadesc = JsonDataConverter().load_obj_json(net_conf['datadesc'])
            datasets = JsonDataConverter().load_obj_json(net_conf['datasets'])

            # (2) get user seleceted data from spark
            sql_stmt = self.get_sql_state(datadesc, net_conf['table'])
            origin_data = HbaseManager().query_data(net_conf['dir'],
                                                    net_conf['table'],
                                                    sql_stmt)

            # (3) modify train data for 'categorical data'
            self.m_train[:] = []
            self.m_tag[:] = []
            self.m_train, self.m_tag = self.reform_train_data(
                origin_data, datasets, datadesc)

            # (4) caculate size of arrays need for neural networks
            self.train_len = len(next(iter(self.m_train), None))
            self.tag_len = len(next(iter(self.m_tag), None))

            tfmsa_logger("modifying Train End!")
            return self

        except IOError as e:
            return e
Example #17
0
def eval_conv_network(nn_id, samplenum=0.1, samplemethod=1):
    try:
        # check network is ready to train
        utils.tfmsa_logger("[1]check pre steps ready")
        utils.check_requested_nn(nn_id)

        # get network base info
        utils.tfmsa_logger("[2]get network base info")
        net_info = netconf.get_network_config(nn_id)

        # get network format info
        utils.tfmsa_logger("[3]get network format info")
        conf_info = netconf.load_conf(nn_id)

        # load train data
        utils.tfmsa_logger("[4]load train data")
        train_data_set = []
        train_label_set = []

        # TODO : need to change data loader to get sample data (not all data)
        if (const.TYPE_IMAGE == net_info['preprocess']):
            train_data_set, train_label_set = ConvCommonManager(
                conf_info).prepare_test_image_data(nn_id, net_info)
        elif (const.TYPE_DATA_FRAME == net_info['preprocess']):
            raise Exception("function not ready")
        elif (const.TYPE_TEXT == net_info['preprocess']):
            raise Exception("function not ready")
        else:
            raise Exception("unknown data type")

        # data size info change
        utils.tfmsa_logger("[5]modify data stuctor info")
        #ConvCommonManager(conf_info).save_changed_data_info(nn_id, train_data_set)

        learnrate = conf_info.data.learnrate
        label_set = json.loads(net_info['datasets'])
        conf_info.n_class = len(label_set)

        # change to nummpy array
        train_x = np.array(train_data_set, np.float32)
        train_y = np.array(train_label_set, np.int32)

        # define classifier
        utils.tfmsa_logger("[6]define classifier")
        classifier = learn.Estimator(
            model_fn=ConvCommonManager(conf_info).struct_cnn_layer,
            model_dir=netconf.nn_model_manager.get_model_save_path(nn_id))

        # start train
        #TODO : need to find way to predict without fit
        utils.tfmsa_logger("[5]fit dummy")
        dummy_x = np.array([
            ConvCommonManager(conf_info).create_dummy_matrix(len(train_x[0]))
        ], np.float32)
        dummy_y = np.array(
            netcommon.convert_to_index(json.loads(net_info['datasets'])),
            np.int32)
        classifier.fit(dummy_x, dummy_y, steps=int(1))

        # start train
        utils.tfmsa_logger("[8]evaluate prediction result")
        counter = 0
        acc_result_obj = AccStaticResult()
        for p in classifier.predict(x=np.array(train_x, np.float32),
                                    batch_size=1,
                                    as_iterable=True):
            acc_result_obj = AccEvalCommon(nn_id).set_result(
                acc_result_obj, label_set[train_y[counter]],
                label_set[int(p['class'])])
            counter = counter + 1
        return len(train_y)

    except Exception as e:
        print("Error Message : {0}".format(e))
        raise Exception(e)
Example #18
0
    def put_data(self, data_frame, table_name, label, file_set, nnid):
        """
        delete label folder under table
        :param db_name:
        :param table_name:
        :return:
        """
        try:
            tfmsa_logger("[1]Start upload images...")
            self.make_inital_path(nnid)

            # get network base info
            tfmsa_logger("[2]get network base info")
            net_info = netconf.get_network_config(nnid)

            # get data format info
            tfmsa_logger("[3]get network format info")
            format_info = json.loads(netconf.load_ori_format(nnid))

            # get hbase trasaction table
            tfmsa_logger("[4]get hbase trasaction table")
            conn, train_table, test_table = self.get_divided_target_table(
                data_frame, table_name)
            train_buffer = train_table.batch(transaction=True)
            test_buffer = test_table.batch(transaction=True)

            #get Label list
            tfmsa_logger("[5]Updata Label List ")
            self.label_info_update(net_info, label)

            # get Label list
            tfmsa_logger("[6]upload image on Hbase - start ")
            file_list = []
            train_key_set, test_key_set = self.divide_train_sample(
                file_set.keys())

            for key in file_set.keys():
                file = file_set[key]
                row_value = dict()
                row_key = table_name + ":" + self.make_hbasekey()
                byte_buffer, width, height = self.image_preprocess(
                    file, net_info, format_info, label)
                row_value[':'.join(
                    ('data', 'filebyte'))] = str(list(byte_buffer))
                row_value[':'.join(('data', 'label'))] = str(label)
                row_value[':'.join(
                    ('data', 'decoder'))] = str(key).split(".")[1]
                row_value[':'.join(('data', 'width'))] = str(width)
                row_value[':'.join(('data', 'height'))] = str(height)
                file_list.append(file._name)
                if (key in train_key_set):
                    train_buffer.put(row_key, row_value)
                if (key in test_key_set):
                    test_buffer.put(row_key, row_value)
            train_buffer.send()
            test_buffer.send()
            tfmsa_logger("[7]upload image on Hbase - finish")
            return file_list
        except Exception as e:
            tfmsa_logger("Error : {0}".format(e))
            raise Exception(e)
        finally:
            conn.close()
            tfmsa_logger("Finish upload image...")