def post(self, request, baseid, table, nnid): """ - desc : create a format data - desc : update data format information \n <textfield> <font size = 1> { "x_size": 100 , "y_size": 100 } </font> </textfield> --- parameters: - name: body paramType: body pytype: json """ try: jd = jc.load_obj_json("{}") jd.dir = baseid jd.table = table jd.nn_id = nnid jd.datadesc = 'Y' jd.preprocess = '2' netconf.save_format(nnid, str(request.body, 'utf-8')) result = netconf.update_network(jd) return_data = {"status": "200", "result": result} return Response(json.dumps(return_data)) except Exception as e: return_data = {"status": "400", "result": str(e)} return Response(json.dumps(return_data))
def put(self, request): """ - Request json data example \n <texfied> <font size = 1> { "nn_id": "nn0000012", "category": "MES", "subcate" : "M60", "name": "evaluation", "desc" : "wdnn_protoType" } </font> </textfield> --- parameters: - name: body paramType: body pytype: json """ try: jd = jc.load_obj_json(request.body) result = netconf.update_network(jd) return_data = {"status": "200", "result": result} return Response(json.dumps(return_data)) except Exception as e: return_data = {"status": "400", "result": str(e)} return Response(json.dumps(return_data))
def post(self, request, baseid, tb, args): """ - desc : insert data into table - Request json data example \n <texfied> <font size = 1> <form action="/api/v1/type/dataframe/base/scm/table/tb_test_incomedata_wdnn3/data/CSV/" method="post" enctype="multipart/form-data"> </font> </textfield> --- parameters: - name: body paramType: body pytype: json """ try: if (args == "JSON"): jd = jc.load_obj_json(str(request.body, 'utf-8')) conf_data = json.dumps(jd.data, cls=CusJsonEncoder) data.HbaseManager().post_josn_data(baseid, tb, conf_data) elif (args == "CSV"): logger.tfmsa_logger("start uploading csv on file system") results_data = "" if len(request.FILES.keys()) > 0: #loop files for key, requestSingileFile in request.FILES.items(): file = requestSingileFile filename = file._name # save file on file system directory = "{0}/{1}/{2}".format( settings.FILE_ROOT, baseid, tb) if not os.path.exists(directory): os.makedirs(directory) fp = open( "{0}/{1}/{2}/{3}".format(settings.FILE_ROOT, baseid, tb, filename), 'wb') for chunk in file.chunks(): fp.write(chunk) fp.close() logger.tfmsa_logger("Before calling save csv_to df") results_data = data.HbaseManager().save_csv_to_df( baseid, tb, filename) else: raise Exception("not supported type") return_data = {"status": "200", "result": results_data} return Response(json.dumps(return_data)) except Exception as e: return_data = {"status": "404", "result": str(e)} return Response(json.dumps(return_data))
def put(self, request, baseid, tb, args=None): """ - desc : append data on the spark table - Request json data example \n <texfied> <font size = 1> <form action="/api/v1/type/dataframe/base/scm/table/tb_test_incomedata_wdnn3/data/CSV/" method="post" enctype="multipart/form-data"> </font> </textfield> --- parameters: - name: body paramType: body """ try: if (args == "JSON"): jd = jc.load_obj_json(str(request.body, 'utf-8')) conf_data = json.dumps(jd.data, cls=CusJsonEncoder) data.HbaseManager().put_josn_data(baseid, tb, conf_data) elif (args == "CSV"): logger.tfmsa_logger("start uploading csv on file system") if 'file' in request.FILES: file = request.FILES['file'] filename = file._name #save upload file on file system directory = "{0}/{1}/{2}".format(settings.FILE_ROOT, baseid, tb) if not os.path.exists(directory): os.makedirs(directory) fp = open( "{0}/{1}/{2}/{3}".format(settings.FILE_ROOT, baseid, tb, filename), 'wb') for chunk in file.chunks(): fp.write(chunk) fp.close() #upload data to hdfs cols = data.HbaseManager().save_csv_to_df( baseid, tb, filename) return HttpResponse('File Uploaded') else: raise Exception("not supported type") return_data = {"status": "200", "result": tb} return Response(json.dumps(return_data)) except Exception as e: return_data = {"status": "404", "result": str(e)} return Response(json.dumps(return_data))
def post(self, request, nnid): """ - desc : insert cnn configuration data """ try: jd = jc.load_obj_json("{}") jd.config = "Y" jd.nn_id = nnid netconf.update_network(jd) netconf.save_conf(nnid, request.body) netconf.set_on_net_conf(nnid) return_data = {"status": "200", "result": nnid} return Response(json.dumps(return_data)) except Exception as e: return_data = {"status": "404", "result": str(e)} return Response(json.dumps(return_data))
def delete(self, request, baseid, tb, nnid): """ delete distinct data :param request: request data :return: renamed table name """ try: jd = jc.load_obj_json("{}") jd.nn_id = nnid jd.datasets = "" result = netconf.update_network(jd) return_data = {"status": "200", "result": result} return_data = {"status": "200", "result": result} return Response(json.dumps(return_data)) except Exception as e: return_data = {"status": "400", "result": str(e)} return Response(json.dumps(return_data))
def network_update(self,nnid, model_dir): """ Wide Deep Network update model directory :param nnid :param model_dir : directory of chkpoint of wdnn model """ try: jd = jc.load_obj_json("{}") jd.query = model_dir jd.nn_id = nnid netconf.update_network(jd) return_data = {"status": "200", "result": nnid} except Exception as e: return_data = {"status": "404", "result": str(e)} print("Error Message : {0}".format(e)) raise Exception(e) finally: return return_data
def label_info_update(self, net_info, label): """ :param net_info: :param label: :return: """ if (len(str(net_info['datasets'])) == 0): label_list = [] else: label_list = json.loads(net_info['datasets']) if label not in label_list: label_list.append(label) jd = jc.load_obj_json("{}") jd.nn_id = net_info['nn_id'] jd.datasets = json.dumps(label_list) result = netconf.update_network(jd)
def delete(self, request, baseid, table, label, nnid): """ - desc : delete data format information """ try: jd = jc.load_obj_json("{}") jd.dir = "" jd.table = "" jd.nn_id = nnid jd.datadesc = "" jd.preprocess = "" netconf.remove_format(nnid) result = netconf.update_network(jd) return_data = {"status": "200", "result": result} return Response(json.dumps(return_data)) except Exception as e: return_data = {"status": "400", "result": str(e)} return Response(json.dumps(return_data))
def put(self, request, baseid, tb, nnid): """ - desc : update data format information """ try: jd = jc.load_obj_json("{}") jd.dir = baseid jd.table = tb jd.nn_id = nnid jd.datadesc = 'Y' netconf.remove_format(nnid) netconf.save_format(nnid, request.body) result = netconf.update_network(jd) return_data = {"status": "200", "result": result} return Response(json.dumps(return_data)) except Exception as e: return_data = {"status": "400", "result": str(e)} return Response(json.dumps(return_data))
def delete_label_list(self, nn_id, label): """ delete image label list :param net_info: :param label: :return: """ net_info = netconf.get_network_config(nn_id) if (len(str(net_info['datasets'])) == 0): label_list = [] else: label_list = json.loads(net_info['datasets']) if label in label_list: label_list.remove(label) jd = jc.load_obj_json("{}") jd.nn_id = net_info['nn_id'] jd.datasets = json.dumps(label_list) result = netconf.update_network(jd) return self.get_label_list(nn_id)
def load_format(net_id): """ load json from path and return it as python object form :param net_id: neural network id :return: """ directory = settings.HDFS_FORMAT_ROOT + "/" + net_id + "/" net_id = net_id + "_format.json" if not os.path.exists(directory): os.makedirs(directory) try: model_conf = open(directory + net_id, 'r') json_data = JsonDataConverter().load_obj_json(model_conf) except Exception as e: print(e) raise Exception(e) finally: model_conf.close() return json_data
def load_conf(net_id): """ load json from path and return it as python object form :param net_id: neural network id :return: -------------------------------------------------------------- 16.10.22 jh100 bugfix add "/" make folder od nnid """ directory = settings.HDFS_CONF_ROOT + "/" + net_id + "/" #Bug fix by jh100 16.10.22 net_id = net_id + "_conf.json" if not os.path.exists(directory): os.makedirs(directory) try: model_conf = open(directory + net_id, 'r') json_data = JsonDataConverter().load_obj_json(model_conf) return json_data except Exception as e: print(e) raise Exception(e) finally: model_conf.close()
def save_result(self, result_obj, train_result): """ save result on db :param train_result: :return: """ netconf.delete_train_acc(self.nn_id) for prd_id in train_result.keys(): save_set = {} save_set['nn_id'] = self.nn_id save_set['label'] = prd_id for acc_result in train_result[prd_id].keys(): save_set['guess'] = acc_result save_set['ratio'] = train_result[prd_id][acc_result] netconf.post_train_acc(save_set) jd = jc.load_obj_json("{}") jd.nn_id = self.nn_id jd.testpass = result_obj.prd_success jd.testfail = result_obj.prd_fail jd.acc = result_obj.prd_success / (result_obj.prd_success + result_obj.prd_fail) netconf.update_network(jd)
def post(self, request, baseid, tb, nnid): """ - desc : create a format data - desc : update data format information \n <textfield> <font size = 1> { "cross_cell": { "col12": {"column2_0": "native_country", "column2_1": "occupation"}, "col1": {"column_1": "occupation", "column_0": "education"} }, "cell_feature": { "hours_per_week": "CONTINUOUS_COLUMNS", "native_country": "CATEGORICAL", "relationship": "CATEGORICAL", "gender": "CATEGORICAL", "age": "CONTINUOUS_COLUMNS", "marital_status": "CATEGORICAL", "race": "CATEGORICAL", "capital_gain": "CONTINUOUS_COLUMNS", "workclass": "CATEGORICAL", "capital_loss": "CONTINUOUS_COLUMNS", "education": "CATEGORICAL", "education_num": "CONTINUOUS_COLUMNS", "occupation": "CATEGORICAL" }, "label": { "income_bracket" : "LABEL" } } </font> </textfield> --- parameters: - name: body paramType: body pytype: json """ try: jd = jc.load_obj_json("{}") jd.dir = baseid jd.table = tb jd.nn_id = nnid jd.preprocess = '1' jd.datadesc = 'Y' coll_format_json = dict() cell_format = str(request.body, 'utf-8') #if format info null print(len(request.body)) print(request.body) if (len(cell_format) == 2): print("request is null ###################") json_string = netconf.load_ori_format(nnid) coll_format_json = json.loads(json_string) cell_format = json_string else: print("request is not null ###################") coll_format_json = json.loads(cell_format) print("print cell format") print(cell_format) netconf.save_format(nnid, cell_format) print("dataformat called1###################") t_label = coll_format_json['label'] label_column = list(t_label.keys())[0] print("dataformat called2###################" + str(label_column)) # lable column_count check lable_list = data.DataMaster().get_distinct_label( baseid, tb, label_column) #hbase query lable_sorted_list = sorted(list(lable_list)) jd.datasets = lable_sorted_list #netconf.save_format(nnid, str(request.body,'utf-8')) result = netconf.update_network(jd) netconf.set_on_data(nnid) return_data = {"status": "200", "result": result} return Response(json.dumps(return_data)) except Exception as e: netconf.set_off_data(nnid) netconf.set_off_data_conf(nnid) return_data = {"status": "400", "result": str(e)} return Response(json.dumps(return_data))