for i, t in enumerate(set(y)): idx = y == t plt.scatter(x_embedded[idx, 0], x_embedded[idx, 1], label=t) plt.title(title, fontsize=16) plt.legend() if save is not None: plt.savefig(save) with open(CONFIG_FILE, 'r') as stream: config = yaml.load(stream) nclasses = config['nclasses'] model = PCB(nclasses) model.load_state_dict(torch.load(WEIGHT_FILE)) model = model.eval().cuda() features = [] labels = [] fol_list = os.listdir(SAMPLE_FOLDER) for folder in fol_list: if folder == 'ALL': continue fol = os.path.join(SAMPLE_FOLDER, folder) if not os.path.isdir(fol): continue print('Folder : ' + folder) cls_name = folder
dataset_filename = [] for i in range(len(dataset_list)): cam, label, filename = get_id(dataset_path[i]) dataset_cam.append(cam) dataset_label.append(label) dataset_filename.append(filename) ###################################################################### # Load Collected data Trained model print('-------test-----------') class_num = len(os.listdir(os.path.join(data_dir, 'train_all'))) model = PCB(class_num) if 'st' in opt.which_epoch: model = load_whole_network(model, name, opt.which_epoch + '_' + str(opt.net_loss_model)) else: model = load_whole_network(model, name, opt.which_epoch) model = model.eval() if use_gpu: model = model.cuda() # Extract feature dataset_feature = [] with torch.no_grad(): for i in range(len(dataset_list)): dataset_feature.append(extract_feature(model, dataloaders[dataset_list[i]])) result = {'gallery_f': dataset_feature[0].numpy(), 'gallery_label': dataset_label[0], 'gallery_name': dataset_filename[0], 'gallery_cam': dataset_cam[0], 'query_f': dataset_feature[1].numpy(), 'query_label': dataset_label[1], 'query_name': dataset_filename[1], 'query_cam': dataset_cam[1]} scipy.io.savemat('pytorch_result.mat', result)
alarm_cooldown = 15 # second. # Yolo model for object detection. print('Load Detection Model.') print('Use YOLO model for Detection.') yolo = YOLO() # Human feature extraction. print('Load Feature Model.') print('Use PCB model for features extractor.') with open(CONFIG_FILE, 'r') as stream: config = yaml.load(stream) nclasses = config['nclasses'] encoder = PCB(nclasses) encoder.load_state_dict(torch.load(WEIGHT_FILE)) encoder = encoder.eval().cuda() print('Load Recognition Model.') if args.SVM: print('Use SVM for matching.') recog = pickle.load(open(RECOG_MODEL, 'rb')) # SVM match. _, _, cls_names = pickle.load(open(RECOG_SAMPLE, 'rb')) else: print('Use Cosine for matching.') ft_set, lb_set, cls_names = pickle.load(open(RECOG_SAMPLE, 'rb')) # ft_set, lb_set = reduceFeaturesSample(np.array(ft_set), lb_set, 20) spl = args.resize.split('x') w = int(spl[0]) h = int(spl[1])