def run(self, c=5, param=100, n_fold_cv=10, kernel='RBF'): ''' Run tests with given parameters on currently loaded data @param c: SVM classifier parameter @param param: kernel parameter @param n_fold_cv: n-fold cross-validation parameter @param kernel: used kernel - possibilities in src/kernels.py @return: classification results ''' data = Data(dbfile=None, n_fold_cv=n_fold_cv) k = str2kernel[kernel](param=param) C = c self.svm = SVM(kernel=k, C=C) correct_sum = 0 test_len = 0 res = {} res['true_positive'] = 0.0 res['true_negative'] = 0.0 res['false_positive'] = 0.0 res['false_negative'] = 0.0 for i in xrange(n_fold_cv): X1, Y1, X2, Y2 = data.get(i) self._logger.info('Training SVM... (i={0})'.format(str(i))) self.svm.train(X1, Y1) if self.svm.model_exists: # predict Y_predict = self.svm.predict(X2) self._logger.info('using {0} of {1} support vectors'.format( self.svm.lm_count, self.svm.all_lm_count)) # calculate tp, fp, tn, fn test_len = len(Y_predict) Y_predict_P = Y_predict[:(test_len/2)] Y_predict_N = Y_predict[(test_len/2):] Y2_P = Y2[:(test_len/2)] Y2_N = Y2[(test_len/2):] tp = np.sum(Y_predict_P == Y2_P) fn = np.sum(Y_predict_P != Y2_P) tn = np.sum(Y_predict_N == Y2_N) fp = np.sum(Y_predict_N != Y2_N) res['true_positive'] += tp / float(n_fold_cv) res['false_positive'] += fp / float(n_fold_cv) res['true_negative'] += tn / float(n_fold_cv) res['false_negative'] += fn / float(n_fold_cv) # this iteration result self._logger.info('tp: {0}, fp: {1}, tn: {2}, fn :{3}'.format( str(tp), str(fp), str(tn), str(fn))) # print and return results self._calculate_results(res) return res
test_model_path = r'E:\PycharmProjects\crowdcount_8\best_models\model_20190318_DPLNet\trancos\pool_16_3.83\saved_models_trancos\trancos_18.h5' # test_model_path = './saved_models_trancos/trancos_9.h5' original_dataset_name = 'trancos' test_data_config = dict() test_data_config['tran1Resize1_test'] = test_flag.copy() elif is_ucf_qnrf: test_model_path = r'E:\PycharmProjects\crowdcount_8\best_models\model_20190318_DPLNet\ucf_qnrf\pool_4_112.33_205.77\saved_models_ucf_qnrf\ucf_qnrf_26_21617.h5' original_dataset_name = 'ucf_qnrf' test_data_config = dict() test_data_config['ucfQnrf1Resize1024_test'] = test_flag.copy() # load data all_data = Data(test_data_config) all_data = all_data.get() net = CrowdCount() network.load_net(test_model_path, net) net.cuda() net.eval() # log_info = [] save_path = './test_output' make_path(save_path) make_path(os.path.join(save_path, 'ground_truth_map')) make_path(os.path.join(save_path, 'estimate_map')) make_path(os.path.join(save_path, 'estimate_raw_map'))
'roi'] = '/media/dell/OS/data/mall/formatted_trainval_15_4/mall_patches_1_resize_05_rgb/val_roi' test_path.append(blob) data_path['test'] = test_path if random_seed is not None: np.random.seed(random_seed) torch.manual_seed(random_seed) torch.cuda.manual_seed_all(random_seed) # load data data = Data(data_path, shuffle=True, random_seed=random_seed, pre_load=is_pre_load_data, is_label=False) data = data.get() save_path = './info' if not os.path.exists(save_path): os.mkdir(save_path) excel_book = excel.Workbook() excel_sheet = excel_book.active excel_sheet.title = 'Image' excel_sheet['A1'] = 'filename' excel_sheet['B1'] = 'gt max density' excel_sheet['C1'] = 'gt mean density' row_count = 1 train_data = data['train']