Esempio n. 1
0
input_layer = tf.placeholder("float", [None, imSize, imSize, 3])
y = tf.placeholder("float", [2, None, imSize, imSize])
freqFeat = tf.placeholder("float", [None, 64, 240])
ratio = 15.0  #tf.placeholder("float",[1])
#out_rnn=tf.placeholder("float", [None, 128,128,3])

############################################################################
#total_layers = 25 #Specify how deep we want our network
units_between_stride = 2
upsample_factor = 16
n_classes = 2
beta = .01
outSize = 16
############################################################################
seq = np.linspace(0, 63, 64).astype(int)
order3 = hilbertCurve(3)
order3 = np.reshape(order3, (64))
hilbert_ind = np.lexsort((seq, order3))
actual_ind = np.lexsort((seq, hilbert_ind))

weights = {'out': tf.Variable(tf.random_normal([64, 64, nbFilter]))}
biases = {'out': tf.Variable(tf.random_normal([nbFilter]))}

with tf.device('/gpu:1'):

    def conv_mask_gt(z):
        # Get ones for each class instead of a number -- we need that
        # for cross-entropy loss later on. Sometimes the groundtruth
        # masks have values other than 1 and 0.
        class_labels_tensor = (z == 1)
        background_labels_tensor = (z == 0)
Esempio n. 2
0
def main():
    parser = argparse.ArgumentParser(
        description='Preprocessing images for darpa challenge')
    parser.add_argument('input_dir',
                        metavar='Input Directory',
                        type=str,
                        help='Directory containing input images')
    parser.add_argument('output_file',
                        metavar='Output File',
                        type=str,
                        help='Full output file path')
    parser.add_argument('mode',
                        choices=['training', 'eval'],
                        type=str,
                        help='Mode that the data will be used for')
    parser.add_argument('--frac',
                        type=float,
                        default=0.2,
                        required=False,
                        help='Percent of images to use for testing')
    parser.add_argument('--name',
                        type=bool,
                        default=False,
                        required=False,
                        help='If true, image file names are stored as well')
    args = parser.parse_args()

    if args.mode == 'training':
        folders = sorted(os.listdir(args.input_dir))
        num_images = 0
        for i in xrange(len(folders)):
            num_images += len(os.listdir(args.input_dir + '/' + folders[i]))

        hdf5 = tb.open_file(args.output_file,'w',\
                            'Medifor hilbert finetuning, 128x128 patch size')
        #Create EArrays that will hold the features and labels
        test_feat = hdf5.create_earray(
            hdf5.root,
            'validation_features',
            tb.Float32Atom(),
            shape=(0, 64, 10,
                   92),  # 10 is # of angles, 92 # of values for 128x128 patch
            expectedrows=num_images * args.frac)  #64 patches per image
        test_label = hdf5.create_earray(
            hdf5.root,
            'validation_labels',
            tb.Float32Atom(),
            shape=(0, len(folders)),  # one hot encoding with two classes
            expectedrows=num_images * args.frac)
        train_feat = hdf5.create_earray(
            hdf5.root,
            'training_features',
            tb.Float32Atom(),
            shape=(0, 64, 10,
                   92),  # 10 is # of angles, 92 # of values for 128x128 patch
            expectedrows=num_images * (1 - args.frac))  #64 patches per image
        train_label = hdf5.create_earray(
            hdf5.root,
            'training_labels',
            tb.Float32Atom(),
            shape=(0, len(folders)),  # one hot encoding with two classes
            expectedrows=num_images * (1 - args.frac))
        if args.name:
            test_names = hdf5.create_earray(hdf5.root,
                                            'validation_names',
                                            tb.StringAtom(32),
                                            shape(0, 1),
                                            expectedrows=num_images *
                                            args.frac)
            train_names = hdf5.create_earray(hdf5.root,
                                             'training_names',
                                             tb.StringAtom(32),
                                             shape(0, 1),
                                             expectedrows=num_images *
                                             (1 - args.frac))

        seq = np.linspace(0, 63, 64).astype(int)
        order3 = hilbertCurve(3)
        order3 = np.reshape(order3, (64))
        ind = np.lexsort((seq, order3))

        folders = sorted(os.listdir(args.input_dir))
        for i in xrange(len(folders)):
            images = sorted(os.listdir(args.input_dir + '/' + folders[i]))
            for j in xrange(len(images)):
                print './' + args.input_dir + '/' + folders[i] + '/' + images[j]
                rgb = openImage(args.input_dir + '/' + folders[i] + '/' +
                                images[j])
                rgb = resize(rgb, [1024, 1024, 3])

                rgb_patches = view_as_windows(rgb, (128, 128, 3), 128)
                rgb_patches = np.squeeze(rgb_patches)
                rgb_patches = np.reshape(rgb_patches, (64, 128, 128, 3))
                radon_features = extractRadonFeat(rgb_patches)
                radon_features = radon_features[ind]
                radon_features = np.reshape(radon_features, (1, 64, 10, 92))

                patch_labels = np.zeros((1, len(folders)))
                patch_labels[0, i] = 1

                if j < len(images) * args.frac:
                    test_feat.append(radon_features)
                    test_label.append(patch_labels)
                    if args.name:
                        test_names.append(np.reshape([images[j][0:32]],
                                                     (1, 1)))
                else:
                    train_feat.append(radon_features)
                    train_label.append(patch_labels)
                    if args.name:
                        train_names.append(
                            np.reshape([images[j][0:32]], (1, 1)))
        hdf5.close()
    else:
        num_images = len(os.listdir(args.input_dir))
        hdf5 = tb.open_file(args.output_file,'w',\
                            'Medifor hilbert finetuning, 128x128 patch size')
        #Create EArrays that will hold the features and labels
        feature = hdf5.create_earray(
            hdf5.root,
            'features',
            tb.Float32Atom(),
            shape=(0, 64, 10,
                   92),  # 10 is # of angles, 92 # of values for 128x128 patch
            expectedrows=num_images)  #64 patches per image
        if args.name:
            names = hdf5.create_earray(hdf5.root,
                                       'names',
                                       tb.StringAtom(32),
                                       shape=(0, 1),
                                       expectedrows=num_images)
        seq = np.linspace(0, 63, 64).astype(int)
        order3 = hilbertCurve(3)
        order3 = np.reshape(order3, (64))
        ind = np.lexsort((seq, order3))
        #	print ind
        images = sorted(os.listdir(args.input_dir))
        for i in xrange(len(images)):
            print '/' + args.input_dir + '/' + images[i]
            rgb = openImage(args.input_dir + '/' + images[i])
            rgb = resize(rgb, [1024, 1024, 3])

            rgb_patches = view_as_windows(rgb, (128, 128, 3), 128)
            rgb_patches = np.squeeze(rgb_patches)
            rgb_patches = np.reshape(rgb_patches, (64, 128, 128, 3))
            radon_features = extractRadonFeat(rgb_patches)
            radon_features = radon_features[ind]
            radon_features = np.reshape(radon_features, (1, 64, 10, 92))

            feature.append(radon_features)
            if args.name:
                names.append(np.reshape([images[i][0:32]], (1, 1)))

        hdf5.close()