def showG(A, B, scalerA, scalerB):
    deb.prints(A.shape)
    assert A.shape == B.shape

    def G(fn_generate, X):
        r = np.array([fn_generate([X[i:i + 1]]) for i in range(X.shape[0])])
        return r.swapaxes(0, 1)[:, :, 0]

    rA = G(cycleA_generate, A)  #cycleA_generate is the function
    rB = G(cycleB_generate, B)
    arr = np.concatenate([A, B, rA[0], rB[0], rA[1], rB[1]])
    print(arr.shape)
    #stats_print(A)
    #stats_print(B)
    #stats_print(rA[0])

    result_folder = "results/"
    cv2.imwrite(result_folder + "A.png",
                unnormalize(A[0], scalerA)[:, :, 0:3] / 4)
    cv2.imwrite(result_folder + "B.png",
                unnormalize(B[0], scalerB)[:, :, 0:3] / 4)
    cv2.imwrite(result_folder + "rA0.png",
                unnormalize(rA[0][0], scalerB)[:, :, 0:3] / 4)
    cv2.imwrite(result_folder + "rB0.png",
                unnormalize(rB[0][0], scalerA)[:, :, 0:3] / 4)
    cv2.imwrite(result_folder + "rA.png",
                unnormalize(rA[1][0], scalerA)[:, :, 0:3] / 4)
    cv2.imwrite(result_folder + "rB.png",
                unnormalize(rB[1][0], scalerB)[:, :, 0:3] / 4)

    np.save("show.npy", arr)
Exemplo n.º 2
0
 def placeholder_init(self, timesteps, shape, channels, n_classes):
     data = tf.placeholder(tf.float32,
                           [None] + [timesteps] + shape + [channels],
                           name='data')
     target = tf.placeholder(tf.float32, [None] + shape[0::], name='target')
     if self.debug: deb.prints(target.get_shape())
     return data, target
    def addDataSource(self, dataSource):
        deb.prints(dataSource.name)
        self.dataSource = dataSource
        if self.dataSource.name == 'SARSource':

            mode = 'var'
            mode = 'fixed'
            if mode == 'var':
                self.im_list = [
                    '20181110_S1', '20181216_S1', '20190121_S1', '20190214_S1',
                    '20190322_S1', '20190415_S1', '20190521_S1', '20190614_S1',
                    '20190720_S1', '20190813_S1', '20190918_S1', '20191012_S1',
                    '20191117_S1', '20191223_S1', '20200116_S1', '20200221_S1',
                    '20200316_S1', '20200421_S1', '20200515_S1', '20200620_S1',
                    '20200714_S1', '20200819_S1', '20200912_S1'
                ]
            else:
                # dec
                self.im_list = [
                    '20190121_S1', '20190214_S1', '20190322_S1', '20190415_S1',
                    '20190521_S1', '20190614_S1', '20190720_S1', '20190813_S1',
                    '20190918_S1', '20191012_S1', '20191117_S1', '20191223_S1'
                ]

            self.label_list = self.im_list.copy()

        self.t_len = len(self.im_list)

        deb.prints(self.t_len)
Exemplo n.º 4
0
 def __init__(self):
     self.path = Path('../data/cv/sar')
     deb.prints(list(self.path.glob('*')))
     self.ims_list = [
         '20151029', '20151110', '20151122', '20151204', '20151216',
         '20160121'
     ]
Exemplo n.º 5
0
    def model_test_on_samples(self, dataset, sample_range=range(15, 20)):

        print("train results")

        print(
            np.around(self.sess.run(
                self.prediction, {
                    self.data: dataset["train"]["ims"][sample_range],
                    self.keep_prob: 1.0,
                    self.training: False
                }),
                      decimals=4))
        deb.prints(dataset["train"]["labels"][sample_range])

        print("test results")

        print(
            np.around(self.sess.run(
                self.prediction, {
                    self.data: dataset["test"]["ims"][sample_range],
                    self.keep_prob: 1.0,
                    self.training: False
                }),
                      decimals=4))
        deb.prints(dataset["test"]["labels"][sample_range])
Exemplo n.º 6
0
def mask_label_load(path, im, flatten=False, all_train=False):

    deb.prints(path['train_test_mask'])
    mask = cv2.imread(path['train_test_mask'], 0).astype(np.uint8)
    unique_count_print(mask)
    label = cv2.imread(path['label'], -1).astype(np.uint8)
    label[label == 2] = 1  # Only use 2 classes
    label = label + 1  # 0 is for background

    bounding_box = cv2.imread(path['bounding_box'], -1).astype(np.uint8)
    channel_n = 6
    chans = range(channel_n)
    for chan in chans:
        bounding_box[im[:, :, chan] == 32767] = 0

    #mask[mask==255]=1
    #mask=mask+1
    print("Mask")
    stats_print(mask)
    if all_train == True:
        #mask.fill(1)
        mask[mask != 3] = 1
    stats_print(mask)
    mask[bounding_box == 0] = 0  # Background. No data
    label[bounding_box == 0] = 0  # Background. No data
    if flatten == True:
        mask = mask.reshape(-1)
        label = label.reshape(-1)
    # Not quite necessary to do this but more informative

    return mask, label, bounding_box
def main(_):

    # Make checkpoint directory
    if not os.path.exists(args.checkpoint_dir):
        os.makedirs(args.checkpoint_dir)

    # Create a dataset object
    if label_type=='one_hot':
        data=utils.DataOneHot(debug=args.debug, patch_overlap=args.patch_overlap, im_size=args.im_size, \
                                band_n=args.band_n, t_len=args.t_len, path=args.path, class_n=args.class_n, pc_mode=args.pc_mode, \
                                test_n_limit=args.test_n_limit,memory_mode=args.memory_mode, \
                                balance_samples_per_class=args.balance_samples_per_class, test_get_stride=args.test_get_stride, \
                                n_apriori=args.n_apriori,patch_length=args.patch_len,squeeze_classes=args.squeeze_classes,im_h=args.im_h,im_w=args.im_w, \
                                id_first=args.id_first, train_test_mask_name=args.train_test_mask_name, \
                                test_overlap_full=args.test_overlap_full,ram_store=args.ram_store,patches_save=args.patches_save)
    elif label_type=='semantic':
        data=utils.DataSemantic(debug=args.debug, patch_overlap=args.patch_overlap, im_size=args.im_size, \
                                band_n=args.band_n, t_len=args.t_len, path=args.path, class_n=args.class_n, pc_mode=args.pc_mode, \
                                test_n_limit=args.test_n_limit,memory_mode=args.memory_mode, \
                                balance_samples_per_class=args.balance_samples_per_class, test_get_stride=args.test_get_stride, \
                                n_apriori=args.n_apriori,patch_length=args.patch_len,squeeze_classes=args.squeeze_classes,im_h=args.im_h,im_w=args.im_w, \
                                id_first=args.id_first, train_test_mask_name=args.train_test_mask_name, \
                                test_overlap_full=args.test_overlap_full,ram_store=args.ram_store,patches_save=args.patches_save,label_folder=args.label_folder)


    # Load images and create dataset (Extract patches)
    if args.memory_mode=="ram":
        data.create()
        deb.prints(data.ram_data["train"]["ims"].shape)
Exemplo n.º 8
0
    def hdd_data_sub_data_get(self, data, n, sub_data):

        deb.prints(len(data["im_paths"]))
        sub_data["im_paths"] = [data["im_paths"][i] for i in sub_data["index"]]
        sub_data["labels_onehot"] = data["labels_onehot"][sub_data["index"]]
        sub_data["ims"] = self.ims_get(sub_data["im_paths"])
        return sub_data
Exemplo n.º 9
0
    def hdd_data_load(self, conf):

        data = {}
        data["train"] = {}
        data["test"] = {}
        data["train"]["im_paths"] = glob.glob(
            conf["train"]["balanced_path_ims"] + '/*.npy')
        data["train"]["im_paths"] = sorted(
            data["train"]["im_paths"], key=lambda x: int(x.split('_')[1][:-4]))
        data["train"]["n"] = len(data["train"]["im_paths"])
        #print(data["train"]["im_paths"])
        data["test"]["im_paths"] = glob.glob(
            conf["test"]["balanced_path_ims"] + '/*.npy')
        data["test"]["im_paths"] = sorted(
            data["test"]["im_paths"], key=lambda x: int(x.split('_')[1][:-4]))

        deb.prints(len(data["train"]["im_paths"]))

        data["train"]["labels"] = np.load(
            conf["train"]["balanced_path_label"] + "labels.npy")

        data["test"]["labels"] = np.load(conf["test"]["balanced_path_label"] +
                                         "labels.npy")

        # Change to a subset of test
        data["test"]["ims"] = [
            np.load(im_path) for im_path in data["test"]["im_paths"]
        ]
        data["test"]["n"] = len(data["test"]["im_paths"])
        return data
 def dotyReplicateSamples(self, sample_n=16):  #,batch['label'].shape[0]
     #self.dotys_sin_cos = self.dotys_sin_cos
     self.dotys_sin_cos = np.expand_dims(self.dotys_sin_cos,
                                         axis=0)  # add batch dimension
     self.dotys_sin_cos = np.repeat(self.dotys_sin_cos, sample_n, axis=0)
     deb.prints(self.dotys_sin_cos.shape)
     return self.dotys_sin_cos
Exemplo n.º 11
0
	def in_label_ram_store(self,data,patch,label_patch,data_idx,label_type):
		data["ims"][data_idx]=patch
		if label_type=="one_hot":
			data["labels"][data_idx]=int(label_patch[self.conf["t_len"]-1,self.conf["patch"]["center_pixel"],self.conf["patch"]["center_pixel"]])
			if data["labels"][data_idx]==0:
				deb.prints("here")
		return data
Exemplo n.º 12
0
	def im_patches_labelsonehot_load2(self,conf_set,data,data_whole,debug=0): #data["n"], self.conf["patch"]["ims_path"], self.conf["patch"]["labels_path"]
		print("[@im_patches_labelsonehot_load2]")
		fname=sys._getframe().f_code.co_name

		data["ims"]=np.zeros((conf_set["n"],self.conf["t_len"],self.conf["patch"]["size"],self.conf["patch"]["size"],self.conf["band_n"]))
		data["labels"]=np.zeros((conf_set["n"])).astype(np.int)
			
		count=0
		if self.debug>=1: deb.prints(conf_set["ims_path"],fname)
		for i in range(1,conf_set["n"]):
			if self.debug>=3: print("i",i)
			im_name=glob.glob(conf_set["ims_path"]+'patch_'+str(i)+'_*')[0]
			data["ims"][count,:,:,:,:]=np.load(im_name)
			
			label_name=glob.glob(conf_set["labels_path"]+'patch_'+str(i)+'_*')[0]
			data["labels"][count]=int(np.load(label_name)[self.conf["t_len"]-1,self.conf["patch"]["center_pixel"],self.conf["patch"]["center_pixel"]])
			if self.debug>=2: print("train_labels[count]",data["labels"][count])
			
			count=count+1
			if i % 1000==0:
				print("file ID",i)
		data["labels_onehot"]=np.zeros((conf_set["n"],self.conf["class_n"]))
		data["labels_onehot"][np.arange(conf_set["n"]),data["labels"]]=1
		#del data["labels"]
		return data
Exemplo n.º 13
0
 def __init__(self, sess=tf.Session(), batch_size=50, epoch=200, train_size=1e8,
      timesteps=7, patch_len=32,
      kernel=[3,3], channels=7, filters=32, n_classes=6,
      checkpoint_dir='./checkpoint',log_dir="../data/summaries/",data=None, conf=None, debug=1, \
      patience=10,squeeze_classes=True,n_repetitions=10,fine_early_stop=False,fine_early_stop_steps=400):
     self.squeeze_classes = squeeze_classes
     self.ram_data = data
     self.sess = sess
     self.batch_size = batch_size
     self.epoch = epoch
     self.train_size = train_size
     self.timesteps = timesteps
     self.patch_len = patch_len
     self.shape = [self.patch_len, self.patch_len]
     self.kernel = kernel
     self.kernel_size = kernel[0]
     self.channels = channels
     deb.prints(self.channels)
     self.filters = filters
     self.n_classes = n_classes
     self.checkpoint_dir = checkpoint_dir
     self.conf = conf
     self.debug = debug
     self.log_dir = log_dir
     self.test_batch_size = 1000
     self.early_stop = {}
     self.early_stop["patience"] = patience
     self.repeat = {"n": n_repetitions, "filename": 'repeat_results.pickle'}
     if self.debug >= 1: print("Initializing NeuralNet instance")
     print(self.log_dir)
     self.remove_sparse_loss = False
     self.fine_early_stop_steps = fine_early_stop_steps
     self.fine_early_stop = fine_early_stop
Exemplo n.º 14
0
	def view_as_windows_multichannel(self, arr_in, window_shape, step=1):
		out = np.squeeze(view_as_windows(arr_in, window_shape, step=step))
		partitioned_shape=out.shape

		deb.prints(out.shape)
		out = np.reshape(out, (out.shape[0] * out.shape[1],) + out.shape[2::])
		return out,partitioned_shape
Exemplo n.º 15
0
def label_apply_mask(im, mask, validating=None):
    im = im.astype(np.uint8)
    im_train = im.copy()
    im_test = im.copy()

    mask_train = mask.copy()
    mask_train[mask != 1] = 0
    mask_test = mask.copy()
    mask_test[mask != 2] = 0
    mask_test[mask == 2] = 1

    deb.prints(im.shape)
    deb.prints(mask_train.shape)

    deb.prints(im.dtype)
    deb.prints(mask_train.dtype)

    im_train = cv2.bitwise_and(im, im, mask=mask_train)
    im_test = cv2.bitwise_and(im, im, mask=mask_test)

    if validating:
        mask_val = mask.copy()
        mask_val[mask != 3] = 0
        mask_val[mask == 3] = 1
        im_val = im.copy()
        im_val = cv2.bitwise_and(im, im, mask=mask_val)

    #im_train[t_step,:,:,band][mask!=1]=-1
    #im_test[t_step,:,:,band][mask!=2]=-1
    deb.prints(im_train.shape)
    if validating:
        return im_train, im_test, im_val
    else:
        return im_train, im_test, None
Exemplo n.º 16
0
def main(_):
    if not os.path.exists(args.checkpoint_dir):
        os.makedirs(args.checkpoint_dir)
    #if args.memory_mode=="ram":
    data=utils.DataOneHot(debug=args.debug, patch_overlap=args.patch_overlap, im_size=args.im_size, \
                            band_n=args.band_n, t_len=args.t_len, path=args.path, class_n=args.class_n, pc_mode=args.pc_mode, \
                            test_n_limit=args.test_n_limit,memory_mode=args.memory_mode,balance_samples_per_class=args.balance_samples_per_class, \
                            test_get_stride=args.test_get_stride)
    if args.memory_mode=="ram":
        data.onehot_create()
        deb.prints(data.ram_data["train"]["ims"].shape)
    with tf.Session() as sess:
        if args.model=='convlstm':
            model = conv_lstm(sess, batch_size=args.batch_size, epoch=args.epoch, train_size=args.train_size,
                            timesteps=args.timesteps, shape=args.shape,
                            kernel=args.kernel, channels=args.channels, filters=args.filters, n_classes=args.n_classes,
                            checkpoint_dir=args.checkpoint_dir,log_dir=args.log_dir,data=data.ram_data,conf=data.conf, debug=args.debug)
        elif args.model=='conv3d':
            model = Conv3DMultitemp(sess, batch_size=args.batch_size, epoch=args.epoch, train_size=args.train_size,
                            timesteps=args.timesteps, shape=args.shape,
                            kernel=args.kernel, channels=args.channels, filters=args.filters, n_classes=args.n_classes,
                            checkpoint_dir=args.checkpoint_dir,log_dir=args.log_dir,data=data.ram_data, debug=args.debug)
        if args.phase == 'train':
            model.train(args)
        
        elif args.phase == 'test':
            model.test(args)
        """
Exemplo n.º 17
0
def view_as_windows_flat(im,window_shape,step=1):
	info={}
	patch=view_as_windows(im,window_shape,step)
	windows_shape=patch.shape
	patch=np.squeeze(patch)
	patch=np.reshape(patch,(patch.shape[0]*patch.shape[1],)+patch.shape[2:])
	deb.prints(patch.shape)
	return patch,windows_shape
Exemplo n.º 18
0
 def data_len_get(self, data, memory_mode):
     if memory_mode == "hdd":
         data_len = len(data["im_paths"])
     elif memory_mode == "ram":
         deb.prints(data["ims"].shape)
         data_len = data["ims"].shape[0]
     deb.prints(data_len)
     return data_len
Exemplo n.º 19
0
	def folder_load(self,folder_path):
		paths=glob.glob(folder_path+'*.npy')
		files=[]
		deb.prints(len(paths))
		for path in paths:
			#print(path)
			files.append(np.load(path))
		return np.asarray(files),paths
Exemplo n.º 20
0
	def im_gray_idx_to_rgb(self,im):
		out=np.zeros((im.shape+(3,)))
		for chan in range(0,3):
			for clss in range(0,self.class_n):
				out[:,:,chan][im==clss]=np.array(self.im_gray_idx_to_rgb_table[clss][1][chan])
		deb.prints(out.shape)
		out=cv2.cvtColor(out.astype(np.uint8),cv2.COLOR_RGB2BGR)
		return out
Exemplo n.º 21
0
 def imsLoad(self):
     ims = []
     for im_name in self.ims_list:
         deb.prints(self.path / (im_name + '.npy'))
         im = np.load(self.path / (im_name + '.npy'))
         ims.append(im)
     self.ims = np.asarray(ims)
     deb.prints(self.ims.shape)
     self.t_len, self.row, self.col, self.bands = self.ims.shape
Exemplo n.º 22
0
    def reconstruct_init(self):
        reconstruct = {}
        reconstruct["im"] = np.zeros(self.conf["im_size"])
        deb.prints(reconstruct["im"].shape)
        reconstruct["idx"] = 0

        return reconstruct

        reconstruct["ids"] = self.reconstruct_ids_get(self.reconstruct["im"])
    def valLabelSelect(self, data, label_id=-1):

        data.patches['val']['label'] = data.patches['val']['label'][:,
                                                                    label_id]
        data.patches['test']['label'] = data.patches['test']['label'][:,
                                                                      label_id]
        deb.prints(data.patches['val']['label'].shape)

        deb.prints(data.patches['test']['label'].shape)
        return data
Exemplo n.º 24
0
    def test(self, args):

        self.sess = tf.Session()
        self.saver.restore(self.sess, tf.train.latest_checkpoint('./'))

        print("Model restored.")
        data = self.data_load(self.conf, memory_mode=self.conf["memory_mode"])
        deb.prints(args.im_reconstruct)
        test_stats = self.data_stats_get(data["test"],
                                         im_reconstruct=args.im_reconstruct)
Exemplo n.º 25
0
    def per_class_label_count_get(self, data_labels):
        per_class_label_count = np.zeros(self.n_classes)
        classes_unique, classes_count = np.unique(data_labels,
                                                  return_counts=True)

        for clss, clss_count in zip(np.nditer(classes_unique),
                                    np.nditer(classes_count)):
            per_class_label_count[int(clss)] = clss_count
        deb.prints(per_class_label_count)
        return per_class_label_count
Exemplo n.º 26
0
def im_apply_mask(im, mask, channel_n):
    im_train = im.copy()
    im_test = im.copy()
    im_val = im.copy()
    for band in range(0, channel_n):
        im_train[:, :, band][mask != 1] = -2
        im_test[:, :, band][mask != 2] = -2
        im_val[:, :, band][mask != 2] = -2

    deb.prints(im_train.shape)
    return im_train, im_test, im_val
	def folder_load(self,folder_path): #move to patches_handler
		paths=glob.glob(folder_path+'*.npy')
		#deb.prints(paths)
		# sort in human order
		paths=natural_sort(paths)
		#deb.prints(paths)
		files=[]
		deb.prints(len(paths))
		for path in paths:
			#print(path)
			files.append(np.load(path))
		return np.asarray(files),paths
Exemplo n.º 28
0
	def metrics_per_class_from_im_get(self,name='im_reconstructed_rgb_test_predictionplen64_3.png',folder='../results/reconstructed/',average=None):
		data={}
		metrics={}
		deb.prints(folder+name)
		data['prediction']=cv2.imread(folder+name,0)[0:-30,0:-2]
		data['label']=cv2.imread(folder+'im_reconstructed_rgb_test_labelplen64_3.png',0)[0:-30,0:-2]

		data['prediction']=np.reshape(data['prediction'],-1)
		data['label']=np.reshape(data['label'],-1)
		
		metrics['f1_score_per_class']=f1_score(data['prediction'],data['label'],average=average)
		print(metrics)
Exemplo n.º 29
0
    def patches_extract(self, image, patch_step):

        patches = {}
        patches['in'], _ = self.view_as_windows_multichannel(
            image['in'], (self.patch_len, self.patch_len, self.channel_n),
            step=patch_step)
        patches['label'], patches[
            'label_partitioned_shape'] = self.view_as_windows_multichannel(
                image['label'], (self.patch_len, self.patch_len, 1),
                step=patch_step)

        # ===================== Switch labels to one-hot ===============#

        if self.debug >= 2:
            deb.prints(patches['label'].shape)

        if flag['label_one_hot']:

            # Get the vectorized integer label
            patches['label_h'] = np.reshape(
                patches['label'],
                (patches['label'].shape[0],
                 patches['label'].shape[1] * patches['label'].shape[2]))
            deb.prints(patches['label_h'].shape)

            # Init the one-hot vectorized label
            patches['label_h2'] = np.zeros(
                (patches['label_h'].shape[0], patches['label_h'].shape[1],
                 self.class_n))

            # Get the one-hot vectorized label
            for sample_idx in range(0, patches['label_h'].shape[0]):
                for loc_idx in range(0, patches['label_h'].shape[1]):
                    patches['label_h2'][
                        sample_idx, loc_idx,
                        patches['label_h'][sample_idx][loc_idx]] = 1

            # Get the image one-hot labels
            patches['label'] = np.reshape(
                patches['label_h2'],
                (patches['label_h2'].shape[0], patches['label'].shape[1],
                 patches['label'].shape[2], self.class_n))

            if self.debug >= 2:
                deb.prints(patches['label_h2'].shape)

        # ============== End switch labels to one-hot =============#
        if self.debug:
            deb.prints(patches['label'].shape)
            deb.prints(patches['in'].shape)

        patches['n'] = patches['in'].shape[0]
        return patches
Exemplo n.º 30
0
    def data_load(self, conf, memory_mode):
        if memory_mode == "hdd":
            data = self.hdd_data_load(conf)
        elif memory_mode == "ram":
            data = self.ram_data
            deb.prints(self.ram_data["train"]["ims"].shape)
            deb.prints(data["train"]["ims"].shape)

        data["train"]["index"] = range(data["test"]["n"])
        data["test"]["index"] = range(data["test"]["n"])

        return data