Beispiel #1
0
    def _load_images_salmaps(self, datapath=None, imgext="bmp"):
        """ load preliminary data (images, segmentations, and salience maps) """

        if datapath is None:
            if sys.platform == "darwin":
                homedir = "/Users/rzhao/"
            else:
                homedir = "/home/rzhao/"

            datapath = homedir + "Dropbox/ongoing/reid_jrnl/salgt/data_viper/"

        filepath = datapath + "query/"
        imgfiles = sorted(glob(filepath + "*." + imgext))
        imgs = [imread(im) for im in imgfiles]

        salfilepath = datapath + "labels.pkl"
        data = loadfile(salfilepath)
        segmsks, salmsks = data

        imgs = [imresize(im, size=(segmsks[0].shape), interp="bicubic") for im in imgs]

        # imgs_norm = [imnormalize(im) for im in imgs]
        # return imgs, segmsks, salmsks
        self.imgs = imgs
        self.segmsks = segmsks
        self.salmsks = salmsks
Beispiel #2
0
def loadfile(file=None):
	''' Load data from file with different format '''

	if file is None:
		raise NameError('File not specified!')

	print 'Loading file at {}'.format(file)

	if file[-3:] == '.gz':

		f = gzip.open(file, 'rb')
		data = cPickle.load(f)

	elif file[-3:] == 'pkl':
		with open(file, 'rb') as f:
			data = cPickle.load(f)

	elif file[-3:] == 'csv':
		with open(file, 'rb') as f:
			reader = csv.reader(f)
			data = [row for row in reader]

	elif file[-3:] == 'mat':
		data = loadmat(file)

	elif file[-3:] in ['jpg', 'png', 'bmp']:
		data = imread(file)

	else:
		raise NameError('File format not recognized')

	return data
Beispiel #3
0
    def _load_images_salmaps(self, datapath=None, imgext="bmp"):
        """ load preliminary data (images, segmentations, and salience maps) """

        if datapath is None:
            if sys.platform == "darwin":
                homedir = "/Users/rzhao/"
            else:
                homedir = "/home/rzhao/"

            datapath = homedir + "Dropbox/ongoing/reid_jrnl/salgt/data_viper/"

        filepath = datapath + "query/"
        imgfiles = sorted(glob(filepath + "*." + imgext))
        self.nPerson = len(imgfiles)
        imgs = [imread(im) for im in imgfiles]

        salfilepath = datapath + "labels.pkl"
        data = loadfile(salfilepath)
        segmsks, salmsks = data

        imgs = [imresize(im, size=(self.imH, self.imW)) for im in imgs]
        segmsks = [imresize(im, size=(self.imH, self.imW)) for im in segmsks]
        salmsks = [imresize(im, size=(self.imH, self.imW)) / 255.0 for im in salmsks]

        self.imgs = np.asarray(imgs)
        self.segmsks = np.asarray(segmsks)
        self.salmsks = np.asarray(salmsks)

        # load dense colorsift features
        labeled_imidx_path = "../data_viper/labeled_imidx.mat"
        tmp = loadfile(labeled_imidx_path)
        labeled_imidx = tmp["labeled_imidx"].flatten()
        feat_path = homedir + "Dropbox/ongoing/reid_jrnl/salgt/data_viper/features.mat"
        tmp = loadfile(feat_path)
        self.feats = tmp["features"].astype(np.float)[labeled_imidx]
Beispiel #4
0
    def convert2pkl(self, pklfile):

        if not os.path.isfile(pklfile):
            dataset_dir = "/home/rzhao/Projects/deep-saliency/data/"
            thus10000 = dataset_dir + "THUS10000_Imgs_GT/Imgs"
            msra5000 = dataset_dir + "MSRA5000/Imgs"
            msra5000_test = dataset_dir + "MSRA5000/MSRA-B-test"
            img_ext = ".jpg"
            msk_ext = ".png"
            augX = 10

            trn_img = []
            trn_msk = []
            for single_image in sorted(glob(thus10000 + "/*" + img_ext)):
                rsb = glob(msra5000_test + "/*_" + ntpath.basename(single_image)[:-4] + "_smap" + msk_ext)
                if len(rsb) == 0:
                    trn_img.append(single_image)
                    trn_msk.append(single_image[:-4] + msk_ext)

            tst_img = []
            tst_msk = []
            for single_image in sorted(glob(msra5000_test + "/*" + msk_ext)):
                tst_img.append(msra5000 + "/" + ntpath.basename(single_image)[: -len("_smap.png")] + img_ext)
                tst_msk.append(msra5000 + "/" + ntpath.basename(single_image)[: -len("_smap.png")] + msk_ext)

                # read images
            print "reading ..."
            train_img = [imread(fname) for fname in trn_img]
            train_msk = [imread(fname) for fname in trn_msk]
            test_img = [imread(fname) for fname in tst_img]
            test_msk = [imread(fname) for fname in tst_msk]

            # preprocessing
            print "preprocessing ..."
            train_x, train_y = self.preprocessing(train_img, train_msk, augx=augX)
            test_x, test_y = self.preprocessing(test_img, test_msk, augx=0)

            # shuffle training data
            print "shuffle data ..."
            np.random.seed(123)
            np.random.shuffle(train_x)
            np.random.seed(123)
            np.random.shuffle(train_y)

            # flattern and dtype conversion
            print "flatten data ..."
            train_x = np.asarray(train_x, dtype=np.float32)
            train_y = np.asarray(train_y, dtype=np.float32)
            test_x = np.asarray(test_x, dtype=np.float32)
            test_y = np.asarray(test_y, dtype=np.float32)
            train_x = imflatten(train_x)
            train_y = imflatten(train_y)
            test_x = imflatten(test_x)
            test_y = imflatten(test_y)

            # normalize data to have zero mean and unit std
            train_x = normalize01(train_x)
            test_x = normalize01(test_x)

            # split into train and valid
            nValid = np.int(len(train_img) * 0.1) * augX
            train = [train_x[:-nValid], train_y[:-nValid]]
            valid = [train_x[-nValid:], train_y[-nValid:]]
            # train = [train_x[0:7000], train_y[0:7000]]
            # valid = [train_x[7000:], train_y[7000:]]
            test = [test_x, test_y]
            data = [train, valid, test]
            self.save(data, pklfile)

        else:
            print "History pickle file exists!"
Beispiel #5
0
def print_labeling(data_path = None): 

	# if data_path is None:
	# 	newDialog = QDialog()
	# 	fpath = QFileDialog.getExistingDirectory(newDialog, "Select data directory", '../')
				
	# 	if len(fpath) == 0:
	# 		QMessageBox.warning(None, 'Warning!', 'Nothing loaded.')
	# 		return

	# 	data_path = str(fpath) + '/' # loaded path

	src_file = data_path + 'parts.pkl'
	usr_file = sorted(glob(data_path + '#*.pkl'))
	
	src = DataMan(src_file)
	srcdata = src.load()
	
	usrhits = []
	for f in usr_file:
		tmp = DataMan(f)
		tmpdata = tmp.load()
		usrhits.append(tmpdata['scores'])

	save_path = data_path + 'result/'
	qfiles = sorted(glob(data_path + 'query/*'))
	im = imread(qfiles[0])
	imsz = im.shape[0:2]
	msk0 = np.zeros(srcdata['labels'][0].shape)

	segmsks = []
	salmsks = []
	for i in range(len(qfiles)):
		im = imread(qfiles[i])
		seg = msk0.copy()
		sal = msk0.copy()
		for k in usrhits[0][i].keys():
			idx = srcdata['labels'][i] == k
			nhits = np.asarray([nhit[i][k] for nhit in usrhits])
			sal[idx] = hit2score(nhits)
			seg[idx] = k
		salmsks.append(sal)
		segmsks.append(seg)

	# normalize all msk 
	# scaler = MinMaxScaler()
	# salscores = scaler.fit_transform(np.asarray(salmsks))

	# save label and salience score map
	savefile([segmsks, salmsks], data_path + 'labels.pkl')

	for i in range(len(qfiles)):
		im = imread(qfiles[i])
		msk = salmsks[i]*255.
		im_rs = imresize(im, msk0.shape, interp='bicubic')
		pl.figure(1)
		pl.clf()
		pl.subplot(1, 2, 1)
		pl.imshow(im_rs)
		pl.subplot(1, 2, 2)
		pl.imshow(color.rgb2grey(im_rs), cmap='gray', alpha=0.6)
		pl.imshow(msk, cmap='hot', vmin=0, vmax=255, alpha=0.6)
		pl.savefig(save_path+'{0:03d}.jpg'.format(i))
		print save_path +'{0:03d}.jpg'.format(i) + ' saved!'

	visualize_imfolder(save_path)