Ejemplo n.º 1
0
 def save_npy(sett):
     f = open(os.path.join(FLAGS.meta_dir, sett) + '.txt', 'r')
     image_names = []
     labels = []
     line = f.readline()
     while line:
         l = line.split(' ')
         if len(l) > 2:
             image_name = l[0] + ' ' + l[1]
             label = l[2]
         else:
             image_name = l[0]
             label = l[1]
         # image_name, label = line.split(' ')
         label = label[0:-1]
         image_names.append(image_name)
         labels.append(int(label))
         line = f.readline()
     f.close()
     for i, img_name in enumerate(image_names):
         full_path = os.path.join(FLAGS.data_dir, img_name)
         print('processing ' + full_path, file=ff)
         img = plt.imread(full_path)
         n = 0
         for patch in get_patches(img, FLAGS.max_patches,
                                  FLAGS.patch_size):
             n = n + 1
             img_name0 = img_name.split('/')[-1]
             np.save(
                 os.path.join(
                     temp_name, sett, meta[labels[i]] + '_' +
                     img_name0 + '_' + str(n)) + '.npy', {
                         'label': labels[i],
                         'patch': patch
                     })
Ejemplo n.º 2
0
 def save_npy(sett):
     f = open(os.path.join(FLAGS.meta_dir, sett) + '.txt', 'r')
     image_names = []
     labels = []
     line = f.readline()
     while line:
         image_name, label = line.split(' ')
         label = label[0:-1]
         image_names.append(image_name + '-' + FLAGS.extra + '.' +
                            FLAGS.format)
         labels.append(int(label))
         line = f.readline()
     f.close()
     for i, img_name in enumerate(image_names):
         full_path = os.path.join(FLAGS.data_dir, meta[labels[i]],
                                  img_name)
         print('processing ' + full_path, file=ff)
         ff.flush()
         img = plt.imread(full_path)
         n = 0
         for patch in get_patches(img, FLAGS.max_patches,
                                  FLAGS.patch_size):
             n = n + 1
             np.save(
                 os.path.join(
                     temp_name, sett, meta[labels[i]] + '_' + img_name +
                     '_' + str(n)) + '.npy', {
                         'label': labels[i],
                         'patch': patch
                     })
Ejemplo n.º 3
0
 def save_crop(sett):
     f = open(os.path.join(FLAGS.meta_dir, sett) + '.txt', 'r')
     image_names = []
     labels = []
     line = f.readline()
     while line:
         image_name, label = line.split(' ')
         label = label[0:-1]
         image_names.append(image_name + '-' + extra + '.' + format)
         labels.append(int(label))
         line = f.readline()
     f.close()
     for i, img_name in enumerate(image_names):
         full_path = os.path.join(data_dir, meta[labels[i]], img_name)
         print('processing ' + full_path, file=ff)
         ff.flush()
         img = plt.imread(full_path)
         for patch in get_patches(img, 1, 512):
             plt.imsave(
                 os.path.join(
                     FLAGS.out_dir, meta[labels[i]] + '_' +
                     img_name.split('.')[0]) + '.png', patch)
Ejemplo n.º 4
0
def main(_):
    tf.gfile.MakeDirs(os.path.join(FLAGS.out_dir, 'train'))
    tf.gfile.MakeDirs(os.path.join(FLAGS.out_dir, 'valid'))
    ff = open(FLAGS.out_file, 'w')
    if not ff:
        raise RuntimeError('OUTPUT FILE OPEN ERROR!!!!!!')
    temp_name = '%.06f' % time.time()

    if not tf.gfile.Exists(temp_name):
        tf.gfile.MakeDirs(os.path.join(temp_name, 'train'))
        tf.gfile.MakeDirs(os.path.join(temp_name, 'valid'))

    train_name = os.path.join(FLAGS.out_dir, 'train', 'spc_train.tfrecords')
    valid_name = os.path.join(FLAGS.out_dir, 'valid', 'spc_valid.tfrecords')

    train_writer = tf.python_io.TFRecordWriter(train_name)
    valid_writer = tf.python_io.TFRecordWriter(valid_name)

    if not tf.gfile.Exists(FLAGS.meta_dir):
        tf.gfile.MakeDirs(FLAGS.meta_dir)
        classes = os.listdir(FLAGS.data_dir)
        spc_classes = open(os.path.join(FLAGS.meta_dir, 'spc_classes.txt'),
                           'w')
        train_list = open(os.path.join(FLAGS.meta_dir, 'train.txt'), 'w')
        valid_list = open(os.path.join(FLAGS.meta_dir, 'valid.txt'), 'w')
        for label, class_name in enumerate(classes):
            spc_classes.write(('%d ' % label) + class_name + '\n')
            scenes = os.listdir(os.path.join(FLAGS.data_dir, class_name))
            dice = np.random.randint(0, 5, 1)
            a = dice
            for j, scene in enumerate(scenes):
                sett = 'valid' if j == a else 'train'
                img_names = os.listdir(
                    os.path.join(FLAGS.data_dir, class_name, scene))
                for img_name in img_names:
                    full_path = os.path.join(FLAGS.data_dir, class_name, scene,
                                             img_name)
                    if sett == 'train':
                        train_list.write(
                            os.path.join(class_name, scene, img_name) +
                            (' %d\n' % label))
                    else:
                        valid_list.write(
                            os.path.join(class_name, scene, img_name) +
                            (' %d\n' % label))
                    print('processing ' + full_path, file=ff)
                    img = plt.imread(full_path)
                    dice = np.random.randint(0, 5, 1)
                    n = 0
                    for patch in get_patches(img, FLAGS.max_patches,
                                             FLAGS.patch_size):
                        n = n + 1
                        np.save(
                            os.path.join(
                                temp_name, sett, class_name + '_' + img_name +
                                '_' + str(n)) + '.npy', {
                                    'label': label,
                                    'patch': patch
                                })
        spc_classes.close()
        train_list.close()
        valid_list.close()
    else:
        f = open(os.path.join(FLAGS.meta_dir, 'spc_classes.txt'), 'r')
        meta = {}
        line = f.readline()
        while line:
            label, class_name = line.split(' ')
            class_name = class_name[0:-1]
            meta[int(label)] = class_name
            line = f.readline()
        f.close()

        def save_npy(sett):
            f = open(os.path.join(FLAGS.meta_dir, sett) + '.txt', 'r')
            image_names = []
            labels = []
            line = f.readline()
            while line:
                l = line.split(' ')
                if len(l) > 2:
                    image_name = l[0] + ' ' + l[1]
                    label = l[2]
                else:
                    image_name = l[0]
                    label = l[1]
                # image_name, label = line.split(' ')
                label = label[0:-1]
                image_names.append(image_name)
                labels.append(int(label))
                line = f.readline()
            f.close()
            for i, img_name in enumerate(image_names):
                full_path = os.path.join(FLAGS.data_dir, img_name)
                print('processing ' + full_path, file=ff)
                img = plt.imread(full_path)
                n = 0
                for patch in get_patches(img, FLAGS.max_patches,
                                         FLAGS.patch_size):
                    n = n + 1
                    img_name0 = img_name.split('/')[-1]
                    np.save(
                        os.path.join(
                            temp_name, sett, meta[labels[i]] + '_' +
                            img_name0 + '_' + str(n)) + '.npy', {
                                'label': labels[i],
                                'patch': patch
                            })

        save_npy('train')
        save_npy('valid')

    train = os.listdir(os.path.join(temp_name, 'train'))
    valid = os.listdir(os.path.join(temp_name, 'valid'))
    print(len(train))
    print(len(valid))
    idx = list(range(len(train)))
    shuffle(idx)
    for i in idx:
        dic = np.load(os.path.join(temp_name, 'train', train[i])).item()
        patch = dic['patch']
        label = dic['label']

        patch_raw = patch.tostring()
        label_raw = np.array([label]).astype(np.int32).tostring()
        example = tf.train.Example(features=tf.train.Features(
            feature={
                'patch_raw': _bytes_feature(patch_raw),
                'label': _bytes_feature(label_raw)
            }))
        train_writer.write(example.SerializeToString())

    idx = list(range(len(valid)))
    shuffle(idx)
    for i in idx:
        dic = np.load(os.path.join(temp_name, 'valid', valid[i])).item()
        patch = dic['patch']
        label = dic['label']
        patch_raw = patch.tostring()
        label_raw = np.array([label]).astype(np.int32).tostring()
        example = tf.train.Example(features=tf.train.Features(
            feature={
                'patch_raw': _bytes_feature(patch_raw),
                'label': _bytes_feature(label_raw)
            }))
        valid_writer.write(example.SerializeToString())

    train_writer.close()
    valid_writer.close()

    ff.close()
    tf.gfile.DeleteRecursively(temp_name)
Ejemplo n.º 5
0
def main(_):
    ff = open(FLAGS.out_file, 'w')
    if not ff:
        raise RuntimeError('OUTPUT FILE OPEN ERROR!!!!!!')

    os.environ["CUDA_VISIBLE_DEVICES"] = FLAGS.gpu
    config = tf.ConfigProto()
    config.gpu_options.per_process_gpu_memory_fraction = 0.7
    config.gpu_options.allow_growth = True
    if not tf.gfile.Exists(FLAGS.data_dir):
        raise RuntimeError('data direction is not exist!')

    with tf.name_scope('input'):
        x = tf.placeholder(tf.float32, [None, FLAGS.patch_size, FLAGS.patch_size, 3], 'x')

    y = build.net(x, False, FLAGS)

    # update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
    # with tf.control_dependencies(update_ops):
    pred = tf.nn.softmax(y, 1)

    with tf.name_scope("saver"):
        saver = tf.train.Saver(name="saver")

    f = open(os.path.join(FLAGS.meta_dir, FLAGS.set) + '.txt', 'r')
    image_names = []
    labels = []
    line = f.readline()
    while line:
        l = line.split(' ')
        if len(l) == 2:
            image_name = l[0]
            label = l[1]
        else:
            image_name = l[0] + ' ' + l[1]
            label = l[2]
        # image_name, label = line.split(' ')
        label = label[0:-1]
        image_names.append(image_name.split('.')[0] + '-' + FLAGS.extra + '.' + FLAGS.format)
        labels.append(int(label))
        line = f.readline()
    f.close()

    f = open(os.path.join(FLAGS.meta_dir, 'spc_classes.txt'), 'r')
    meta = {}
    line = f.readline()
    while line:
        label, class_name = line.split(' ')
        class_name = class_name[0:-1]
        meta[int(label)] = class_name
        line = f.readline()
    f.close()
    confusion = np.zeros(shape=(10, 10), dtype=np.uint32)
    confusion_i = np.zeros(shape=(10, 10), dtype=np.uint32)
    total = 0.
    correct = 0.
    total_p = 0.
    correct_p = 0.
    with tf.Session(config = config) as sess:
        if tf.gfile.Exists(os.path.join(FLAGS.ckpt_dir, 'checkpoint')):
            saver.restore(sess, tf.train.latest_checkpoint(FLAGS.ckpt_dir) if FLAGS.model_name is None else os.path.join(FLAGS.ckpt_dir, FLAGS.model_name))
        else:
            raise RuntimeError("Check point files don't exist!")

        for i in range(len(labels)):
            label = labels[i]
            class_name = meta[label]
            image_name = image_names[i]
            full_path = os.path.join(FLAGS.data_dir, class_name, image_name)
            img = plt.imread(full_path)
            for img in get_patches(img, 1, 512):
                data = np.ndarray(shape=(FLAGS.patches, FLAGS.patch_size, FLAGS.patch_size, 3), dtype=np.float32)
                for n, patch in enumerate(get_patches(img, FLAGS.patches, FLAGS.patch_size)):
                    patch = standardization(patch)
                    data[n, :] = patch
                # data = standardization(data)
                prediction = sess.run(pred, feed_dict={x: data})
                prediction0 = np.argmax(prediction, 1)
                for n in prediction0:
                    if n == label:
                        correct_p = correct_p + 1
                    confusion[label, n] = confusion[label, n] + 1
                total_p = total_p + FLAGS.patches
                # count = np.bincount(prediction)
                # prediction = np.argmax(count)
                prediction = np.sum(prediction, 0)
                #print(prediction)
                prediction = np.argmax(prediction)
                confusion_i[label, prediction] = confusion_i[label, prediction] + 1
                print("predict %d while true label is %d." % (prediction, label), file=ff)
                ff.flush()
                total = total + 1
                if prediction == label:
                    correct = correct + 1
    print('accuracy(patch level) = %f' % (correct_p / total_p), file=ff)
    print('accuracy(image level) = %f' % (correct / total), file=ff)
    print('confusion matrix--patch level:', file=ff)
    print(confusion, file=ff)
    print('confusion matrix--image level:', file=ff)
    print(confusion_i, file=ff)
    print('/|\\', file=ff)
    print(' |', file=ff)
    print('actual', file=ff)
    print(' |', file=ff)
    print(' ---prediction--->', file=ff)
    ff.close()
Ejemplo n.º 6
0
def main(_):
    ff = open(FLAGS.out_file, 'w')
    if not ff:
        raise RuntimeError('OUTPUT FILE OPEN ERROR!!!!!!')
    print('fname,camera', file=ff)
    os.environ["CUDA_VISIBLE_DEVICES"] = FLAGS.gpu
    config = tf.ConfigProto()
    config.gpu_options.per_process_gpu_memory_fraction = 0.7
    config.gpu_options.allow_growth = True

    with tf.name_scope('input'):
        x = tf.placeholder(tf.float32,
                           [None, FLAGS.patch_size, FLAGS.patch_size, 3], 'x')

    y = build.net(x, False, FLAGS)

    # update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
    # with tf.control_dependencies(update_ops):
    pred = tf.nn.softmax(y, 1)

    if FLAGS.ema:
        variable_averages = tf.train.ExponentialMovingAverage(0.9999)
        variables_to_restore = variable_averages.variables_to_restore()
        saver = tf.train.Saver(variables_to_restore, name='saver')
    else:
        saver = tf.train.Saver(name="saver")

    # f = open(os.path.join(FLAGS.meta_dir, FLAGS.set) + '.txt', 'r')
    # image_names = []
    # labels = []
    # line = f.readline()
    # while line:
    #     l = line.split(' ')
    #     if len(l) == 2:
    #         image_name = l[0]
    #         label = l[1]
    #     else:
    #         image_name = l[0] + ' ' + l[1]
    #         label = l[2]
    #     # image_name, label = line.split(' ')
    #     label = label[0:-1]
    #     image_names.append(image_name.split('.')[0] + '-' + FLAGS.extra + '.' + FLAGS.format)
    #     labels.append(int(label))
    #     line = f.readline()
    # f.close()
    image_names = os.listdir(FLAGS.data_dir)

    f = open(os.path.join(FLAGS.meta_dir, 'spc_classes.txt'), 'r')
    meta = {}
    line = f.readline()
    while line:
        label, class_name = line.split(' ')
        class_name = class_name[0:-1]
        meta[int(label)] = class_name
        line = f.readline()
    f.close()
    # confusion = np.zeros(shape=(10, 10), dtype=np.uint32)
    # confusion_i = np.zeros(shape=(10, 10), dtype=np.uint32)
    # total = 0.
    # correct = 0.
    # total_p = 0.
    # correct_p = 0.
    with tf.Session(config=config) as sess:
        if tf.gfile.Exists(os.path.join(FLAGS.ckpt_dir, 'checkpoint')):
            saver.restore(
                sess,
                tf.train.latest_checkpoint(FLAGS.ckpt_dir)
                if FLAGS.model_name is None else os.path.join(
                    FLAGS.ckpt_dir, FLAGS.model_name))
        else:
            raise RuntimeError("Check point files don't exist!")

        for i in range(len(image_names)):
            # label = labels[i]
            # class_name = meta[label]
            image_name = image_names[i]
            full_path = os.path.join(FLAGS.data_dir, image_name)
            img = plt.imread(full_path)
            if img.shape[2] == 4:
                img = img[:, :, 0:3]
            data = np.ndarray(shape=(FLAGS.patches, FLAGS.patch_size,
                                     FLAGS.patch_size, 3),
                              dtype=np.float32)
            for n, patch in enumerate(
                    get_patches(img, FLAGS.patches, FLAGS.patch_size)):
                patch = standardization(patch)
                data[n, :] = patch
            # data = standardization(data)
            prediction = sess.run(pred, feed_dict={x: data})
            prediction = np.argmax(prediction, 1)
            # for n in prediction0:
            #     if n == label:
            #         correct_p = correct_p + 1
            #     confusion[label, n] = confusion[label, n] + 1
            # total_p = total_p + FLAGS.patches
            count = np.bincount(prediction)
            prediction = np.argmax(count)
            # prediction = np.sum(prediction, 0)
            #print(prediction)
            # prediction = np.argmax(prediction)

            print("%s,%s" % (image_name, meta[prediction]), file=ff)
            ff.flush()

    ff.close()
Ejemplo n.º 7
0
def main(_):
    # ff = open(FLAGS.out_file, 'w')
    # if not ff:
    #     raise RuntimeError('OUTPUT FILE OPEN ERROR!!!!!!')
    # print('fname,camera', file=ff)
    os.environ["CUDA_VISIBLE_DEVICES"] = FLAGS.gpu
    config = tf.ConfigProto()
    config.gpu_options.per_process_gpu_memory_fraction = 0.7
    config.gpu_options.allow_growth = True

    with tf.name_scope('input'):
        x = tf.placeholder(tf.float32, [None, FLAGS.patch_size, FLAGS.patch_size, 3], 'x')

    w, _ = build_net(x, False, FLAGS)

    # update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
    # with tf.control_dependencies(update_ops):
    # pred = tf.nn.softmax(y, 1)

    with tf.name_scope("saver"):
        saver = tf.train.Saver(name="saver")

    # f = open(os.path.join(FLAGS.meta_dir, FLAGS.set) + '.txt', 'r')
    # image_names = []
    # labels = []
    # line = f.readline()
    # while line:
    #     l = line.split(' ')
    #     if len(l) == 2:
    #         image_name = l[0]
    #         label = l[1]
    #     else:
    #         image_name = l[0] + ' ' + l[1]
    #         label = l[2]
    #     # image_name, label = line.split(' ')
    #     label = label[0:-1]
    #     image_names.append(image_name.split('.')[0] + '-' + FLAGS.extra + '.' + FLAGS.format)
    #     labels.append(int(label))
    #     line = f.readline()
    # f.close()
    image_names = os.listdir('/data/spcup_test')

    # f = open(os.path.join(FLAGS.meta_dir, 'spc_classes.txt'), 'r')
    # meta = {}
    # line = f.readline()
    # while line:
    #     label, class_name = line.split(' ')
    #     class_name = class_name[0:-1]
    #     meta[int(label)] = class_name
    #     line = f.readline()
    # f.close()
    # confusion = np.zeros(shape=(10, 10), dtype=np.uint32)
    # confusion_i = np.zeros(shape=(10, 10), dtype=np.uint32)
    # total = 0.
    # correct = 0.
    # total_p = 0.
    # correct_p = 0.
    with tf.Session(config = config) as sess:
        if tf.gfile.Exists(os.path.join(FLAGS.ckpt_dir, 'checkpoint')):
            saver.restore(sess, tf.train.latest_checkpoint(FLAGS.ckpt_dir) if FLAGS.model_name is None else os.path.join(FLAGS.ckpt_dir, FLAGS.model_name))
        else:
            raise RuntimeError("Check point files don't exist!")

        for i in range(len(image_names)):
            # label = labels[i]
            # class_name = meta[label]
            image_name = image_names[i]
            print(image_name)
            full_path = os.path.join('/data/spcup_test', image_name)
            img = plt.imread(full_path)
            
            data = np.ndarray(shape=(FLAGS.patches, FLAGS.patch_size, FLAGS.patch_size, 3), dtype=np.float32)
            for n, patch in enumerate(get_patches(img, FLAGS.patches, FLAGS.patch_size)):
                patch = standardization(patch)
                data[n, :] = patch
            # data = standardization(data)
            ww = sess.run(w, feed_dict={x: data})[0, :]
            # print(ww.shape)
            plt.imshow(ww)
            plt.show()