def create_model(): try: # Get opjectId, collabel, feature, algorithm and parameters data_name = request.args.get("dataName") class_name = request.args.get("className") model_name = request.args.get("modelname") col_label = int(request.args.get("label")) col_feature_str = (request.args.get("feature")).split(",") col_feature = [] for col in col_feature_str: col_feature.append(int(col)) athm = request.args.get("algorithm") athm_id = request.args.get("algorithmId") params = json.loads(request.args.get("params")) # print(params) if params == {}: params = None test_size = 0.3 else: test_size = float(params["testSize"]) if test_size >= 1.0 or test_size <= 0.0: data = {"error": "0.0 < test size < 1.0"} return data # get data r = API.get_class(class_name) arr = str(r.data, "utf-8") r_json = json.loads(arr) data = r_json["results"] dataFrame = pd.DataFrame(data) if class_name == "DatasetSurveyBalance": dataFrame = dataFrame.iloc[:, 3:] elif class_name == "DatasetObesity": dataFrame = dataFrame.iloc[:, 3:] if "yearOfBirth" in list(dataFrame.columns): del dataFrame["yearOfBirth"] # dataFrame = dataFrame.dropna(axis="1",how = "any") col_feature_name = np.array((dataFrame.iloc[:, col_feature]).columns) col_feature_name_str = col_feature_name[0] col_feature_name = list(col_feature_name) col_feature_name.pop(0) col_feature_name = np.array(col_feature_name) for col in col_feature_name: col_feature_name_str = col_feature_name_str + "," + col col_label_name = str( np.array( pd.DataFrame(np.matrix(dataFrame.columns)).iloc[0, col_label])) # get data train, test X_train, X_test, y_train, y_test = DATA.get_data_train( dataFrame, col_feature, col_label, test_size) model, evalution, error, params = get_athm(athm, X_train, X_test, y_train, y_test, params) if error != "": data = {"error": error} return data else: # Create random id for file name folder_model = "./upload_model" randomId = str(uuid.uuid4())[:8] file_name_model = (randomId + "_" + str(athm) + "_" + str(class_name) + str(".pkl")) pkl_filename = folder_model + "/" + file_name_model joblib.dump(model, str(file_name_model)) custom_header = {} custom_header[ "X-Parse-Application-Id"] = API.X_Parse_Application_Id custom_header["X-Parse-REST-API-Key"] = API.X_Parse_REST_API_Key custom_header["Content-Type"] = "application/x-binary" desription = description = ( "Model " + " use " + str(athm) + " algorithm " + ". " + "Dataset for model is " + str(data_name) + ", columns label is " + str(col_label_name) + " and columns feature is " + str(col_feature_name)) r_upload = API.upload_model_file( file_name_model, model_name, data_name, athm_id, params, col_label, col_label_name, col_feature, col_feature_name_str, description, evalution, ) return r_upload except: print("[error] (createModel function app.py)") data = {"error": "can't create model"} return data
def create_model_system_mx(): try: # Get opjectId, collabel, feature, algorithm and parameters data_name = 'DatasetMX' class_name = 'DatasetObesity' model_name = "MODEL SYSTEM" col_label = 7 col_feature_str = [ '0', '1', '2', '3', '4', '5', '6', '8', '9', '10', '11', '12', '13', '14', '15', '16' ] col_feature = [] for col in col_feature_str: col_feature.append(int(col)) athm = 'SupportVectorMachine' athm_id = 'ccn7ofeacm' params = { 'C': '100000', 'degree': '3', 'gamma': '0.3', 'kernel': 'linear', 'testSize': '0.3' } if params == {}: params = None test_size = 0.3 else: test_size = float(params["testSize"]) if test_size >= 1.0 or test_size <= 0.0: data = {"error": "0.0 < test size < 1.0"} return data # get data r = API.get_class(class_name) arr = str(r.data, "utf-8") r_json = json.loads(arr) data = r_json["results"] dataFrame = pd.DataFrame(data) if class_name == "DatasetSurveyBalance": dataFrame = dataFrame.iloc[:, 3:] elif class_name == "DatasetObesity": dataFrame = dataFrame.iloc[:, 3:] if "yearOfBirth" in list(dataFrame.columns): del dataFrame["yearOfBirth"] # dataFrame = dataFrame.dropna(axis="1",how = "any") col_feature_name = np.array((dataFrame.iloc[:, col_feature]).columns) col_feature_name_str = col_feature_name[0] col_feature_name = list(col_feature_name) col_feature_name.pop(0) col_feature_name = np.array(col_feature_name) for col in col_feature_name: col_feature_name_str = col_feature_name_str + "," + col col_label_name = str( np.array( pd.DataFrame(np.matrix(dataFrame.columns)).iloc[0, col_label])) # get data train, test X_train, X_test, y_train, y_test = DATA.get_data_train( dataFrame, col_feature, col_label, test_size) model, evalution, error, params = get_athm(athm, X_train, X_test, y_train, y_test, params) if error != "": data = {"error": error} return data else: # Create random id for file name folder_model = "./upload_model" randomId = str(uuid.uuid4())[:8] file_name_model = (randomId + "_" + str(athm) + "_" + str(class_name) + str(".pkl")) pkl_filename = folder_model + "/" + file_name_model joblib.dump(model, str(file_name_model)) custom_header = {} custom_header[ "X-Parse-Application-Id"] = API.X_Parse_Application_Id custom_header["X-Parse-REST-API-Key"] = API.X_Parse_REST_API_Key custom_header["Content-Type"] = "application/x-binary" desription = description = ( "Model " + " use " + str(athm) + " algorithm " + ". " + "Dataset for model is " + str(data_name) + ", columns label is " + str(col_label_name) + " and columns feature is " + str(col_feature_name)) r_upload_2 = API.upload_model_file_system_mx( file_name_model, model_name, data_name, athm_id, params, col_label, col_label_name, col_feature, col_feature_name_str, description, evalution, ) return r_upload_2 except: print("[error] (createModel function app.py)") data = {"error": "can't create model"} return data