training_file = "C:\\Hyperbox-based-ML\\Dataset\\train_test\\training_testing_data\\balance_scale_dps_test.dat" testing_file = "C:\\Hyperbox-based-ML\\Dataset\\train_test\\training_testing_data\\balance_scale_dps_test.dat" validation_file = "C:\\Hyperbox-based-ML\\Dataset\\train_test\\training_testing_data\\balance_scale_dps_val.dat" gamma = 1 theta = 0.5 isNorm = False norm_range = [0, 1] n_estimators = 5 bootstrap_sample = False class_sample_rate = 1 n_jobs = 1 random_state = None selected_alg = 'iol-gfmm' gamma = 1 # Read training file Xtr, _, patClassIdTr, _ = loadDataset(training_file, 1, False) # Read testing file _, Xtest, _, patClassIdTest = loadDataset(testing_file, 0, False) # Read validation file Xval, _, patClassIdVal, _ = loadDataset(validation_file, 1, False) classifier = DecisionLevelCombination(n_estimators=n_estimators, theta=theta, bootstrap_sample=bootstrap_sample, class_sample_rate=class_sample_rate, n_jobs=n_jobs, random_state=None, gamma=gamma) classifier.fit(Xtr, Xtr, patClassIdTr, selected_alg)
'heart_dps', 'page_blocks_dps', 'landsat_satellite_dps', 'waveform_dps', 'yeast_dps' ] fold_index = np.array([1, 2, 3, 4]) for dt in range(len(dataset_names)): #try: print('Current dataset: ', dataset_names[dt]) fold1File = dataset_path + dataset_names[dt] + '_1.dat' fold2File = dataset_path + dataset_names[dt] + '_2.dat' fold3File = dataset_path + dataset_names[dt] + '_3.dat' fold4File = dataset_path + dataset_names[dt] + '_4.dat' # Read data file fold1Data, _, fold1Label, _ = loadDataset(fold1File, 1, False) fold2Data, _, fold2Label, _ = loadDataset(fold2File, 1, False) fold3Data, _, fold3Label, _ = loadDataset(fold3File, 1, False) fold4Data, _, fold4Label, _ = loadDataset(fold4File, 1, False) numhyperbox_online_gfmm_save = np.array([]) training_time_online_gfmm_save = np.array([]) testing_error_online_gfmm_save = np.array([]) optimization_value_online_gfmm_save = np.array([], dtype=np.str) optimization_time_online_gfmm_save = np.array([]) numhyperbox_online_gfmm_manhattan_save = np.array([]) training_time_online_gfmm_manhattan_save = np.array([]) testing_error_online_gfmm_manhattan_save = np.array([]) optimization_value_online_gfmm_manhattan_save = np.array([], dtype=np.str)
else: isNorm = string_to_boolean(sys.argv[9]) if len(sys.argv) < 11: norm_range = [0, 1] else: norm_range = ast.literal_eval(sys.argv[10]) # print('isDraw = ', isDraw, ' teta = ', teta, ' teta_min = ', teta_min, ' gamma = ', gamma, ' oper = ', oper, ' isNorm = ', isNorm, ' norm_range = ', norm_range) start_t = time.perf_counter() if sys.argv[1] == '1': training_file = sys.argv[2] testing_file = sys.argv[3] # Read training file Xtr, X_tmp, patClassIdTr, pat_tmp = loadDataset( training_file, 1, False) # Read testing file X_tmp, Xtest, pat_tmp, patClassIdTest = loadDataset( testing_file, 0, False) else: dataset_file = sys.argv[2] percent_Training = float(sys.argv[3]) Xtr, Xtest, patClassIdTr, patClassIdTest = loadDataset( dataset_file, percent_Training, False) classifier = OnlineGFMM(gamma, teta, teta_min, isDraw, oper, isNorm, norm_range) Xtest_lo = Xtest.copy() Xtest_up = Xtest.copy()
n_estimators = 100 bootstrap_sample = True bootstrap_feature = False class_sample_rate = 0.5 n_jobs = 1 random_state = None K_threshold = 5 # K-nearest neighbor max_depth = 10 for dt in range(len(dataset_names)): #try: print('Current dataset: ', dataset_names[dt]) dataFile = dataset_path + dataset_names[dt] + '.dat' # Read data file foldData, _, foldLabel, _ = loadDataset(dataFile, 1, False) max_features = int(2 * math.sqrt(foldData.shape[1])) f1_weighted_efmnn_save = [] f1_macro_efmnn_save = [] f1_micro_efmnn_save = [] f1_weighted_knefmnn_save = [] f1_macro_knefmnn_save = [] f1_micro_knefmnn_save = [] f1_weighted_rfmnn_save = [] f1_macro_rfmnn_save = [] f1_micro_rfmnn_save = []