def main(argv=None):
    np.random.seed(3796)
    image, logits, is_training, keep_probability, sess, annotation, train_op, loss, acc, loss_summary, acc_summary, saver, pred_annotation, train_writer = build_session(argv[1])

    print("Setting up image reader...")
    train_records, valid_records = reader.read_dataset_potsdam_submission(FLAGS.data_dir)
    print(len(train_records))
    print(len(valid_records))

    print("Setting up dataset reader")
    image_options = {'resize': False, 'resize_size': IMAGE_SIZE}
    if FLAGS.mode == 'train':
        train_dataset_reader = dataset.Batch_manager(train_records, image_options)

    if FLAGS.mode == "train":
        for itr in xrange(MAX_ITERATION):
            train_images, train_annotations = train_dataset_reader.next_batch_potsdam(saver, FLAGS.batch_size, image, logits, keep_probability, sess, is_training, FLAGS.logs_dir)
            feed_dict = {image: train_images, annotation: train_annotations, keep_probability: 0.5, is_training: True}
            tf.set_random_seed(3796 + itr) # get deterministicly random dropouts
            sess.run(train_op, feed_dict=feed_dict)

            if itr % 50 == 0:
                feed_dict = {image: train_images, annotation: train_annotations, keep_probability: 1.0, is_training: False}
                train_loss, train_acc, summary_loss, summary_acc = sess.run([loss, acc, loss_summary, acc_summary], feed_dict=feed_dict)
                print("Step: %d, Train_loss: %g, Train_acc: %g" % (itr, train_loss, train_acc))
                with open(join(FLAGS.logs_dir, 'iter_train_loss.csv'), 'a') as f:
                    f.write(str(itr) + ',' + str(train_loss) + '\n')
                with open(join(FLAGS.logs_dir, 'iter_train_acc.csv'), 'a') as f:
                    f.write(str(itr) + ',' + str(train_acc) + '\n')
                train_writer.add_summary(summary_loss, itr)
                train_writer.add_summary(summary_acc, itr)

    elif FLAGS.mode == "visualize":
        valid_images, valid_annotations = validation_dataset_reader.get_random_batch(FLAGS.batch_size)
        pred = sess.run(pred_annotation, feed_dict={image: valid_images, annotation: valid_annotations,
                                                    keep_probability: 1.0, is_training: False})
        valid_annotations = np.squeeze(valid_annotations, axis=3)
        pred = np.squeeze(pred, axis=3)

        for itr in range(FLAGS.batch_size):
            print(valid_images[itr].astype(np.uint8).shape)
            utils.save_image(valid_images[itr, :, :, :3].astype(np.uint8), FLAGS.logs_dir, name="inp_" + str(itr))
            print(valid_annotations[itr].astype(np.uint8).shape)
            utils.save_image(valid_annotations[itr].astype(np.uint8), FLAGS.logs_dir, name="gt_" + str(itr))
            print(pred[itr].astype(np.uint8).shape)
            utils.save_image(pred[itr].astype(np.uint8), FLAGS.logs_dir, name="pred_" + str(itr))
            print("Saved image: %d" % itr)
Ejemplo n.º 2
0
def main(argv=None):
    np.random.seed(3796)
    image, logits, is_training, keep_probability, sess, annotation, train_op, loss, acc, loss_summary, acc_summary, saver, pred_annotation, train_writer, validation_writer = build_session(argv[1])

    print("Setting up image reader...")
    train_records, valid_records = reader.read_dataset_test(FLAGS.data_dir)
    print(len(train_records))
    print(len(valid_records))

    print("Setting up dataset reader")
    image_options = {'resize': False, 'resize_size': IMAGE_SIZE}
    if FLAGS.mode == 'train':
        train_dataset_reader = dataset.Batch_manager(train_records, image_options)
    validation_dataset_reader = dataset.Batch_manager(valid_records, image_options)

    """ os.environ["CUDA_VISIBLE_DEVICES"] = argv[1]
    keep_probability = tf.placeholder(tf.float32, name="keep_probabilty")
    image = tf.placeholder(tf.float32, shape=[None, IMAGE_SIZE, IMAGE_SIZE, 3], name="input_image")
    annotation = tf.placeholder(tf.int32, shape=[None, IMAGE_SIZE, IMAGE_SIZE, 1], name="annotation")
    pred_annotation, logits = inference(image, keep_probability)
    annotation_64 = tf.cast(annotation, dtype=tf.int64)
    # calculate accuracy for batch.
    cal_acc = tf.equal(pred_annotation, annotation_64)
    cal_acc = tf.cast(cal_acc, dtype=tf.int8)
    acc = tf.count_nonzero(cal_acc) / (FLAGS.batch_size * IMAGE_SIZE * IMAGE_SIZE)
    tf.summary.image("input_image", image, max_outputs=2)
    tf.summary.image("ground_truth", tf.cast(annotation, tf.uint8), max_outputs=2)
    tf.summary.image("pred_annotation", tf.cast(pred_annotation, tf.uint8), max_outputs=2)
    loss = tf.reduce_mean((tf.nn.sparse_softmax_cross_entropy_with_logits(logits=logits,
                                                                          labels=tf.squeeze(annotation,
                                                                                            squeeze_dims=[3]),
                                                                          name="entropy")))
    loss_summary=tf.summary.scalar("entropy", loss)
    # summary accuracy in tensorboard
    acc_summary=tf.summary.scalar("accuracy", acc)
    trainable_var = tf.trainable_variables()
    if FLAGS.debug:
        for var in trainable_var:
            utils.add_to_regularization_and_summary(var)
    train_op = train(loss, trainable_var)
    print("Setting up summary op...")
    summary_op = tf.summary.merge_all()

    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True
    sess = tf.Session(config=config)

    print("Setting up Saver...")
    saver = tf.train.Saver()
    # train_writer = tf.summary.FileWriter(FLAGS.logs_dir + '/train', sess.graph)
    # validation_writer = tf.summary.FileWriter(FLAGS.logs_dir + '/validation')
    sess.run(tf.global_variables_initializer())
    ckpt = tf.train.get_checkpoint_state(FLAGS.logs_dir)
    if ckpt and ckpt.model_checkpoint_path:
        saver.restore(sess, ckpt.model_checkpoint_path)
        print("Model restored...") """

    if FLAGS.mode == "train":
        for itr in xrange(MAX_ITERATION):
            train_images, train_annotations = train_dataset_reader.next_batch(saver, FLAGS.batch_size, image, logits, keep_probability, sess, is_training, FLAGS.logs_dir)
            feed_dict = {image: train_images, annotation: train_annotations, keep_probability: 1.0, is_training: True}
            tf.set_random_seed(3796 + itr) # get deterministicly random dropouts
            sess.run(train_op, feed_dict=feed_dict)

            if itr % 50 == 0:
                feed_dict = {image: train_images, annotation: train_annotations, keep_probability: 1.0, is_training: False}
                train_loss, train_acc, summary_loss, summary_acc = sess.run([loss, acc, loss_summary, acc_summary], feed_dict=feed_dict)
                print("Step: %d, Train_loss: %g, Train_acc: %g" % (itr, train_loss, train_acc))
                with open(join(FLAGS.logs_dir, 'iter_train_loss.csv'), 'a') as f:
                    f.write(str(itr) + ',' + str(train_loss) + '\n')
                with open(join(FLAGS.logs_dir, 'iter_train_acc.csv'), 'a') as f:
                    f.write(str(itr) + ',' + str(train_acc) + '\n')
                train_writer.add_summary(summary_loss, itr)
                train_writer.add_summary(summary_acc, itr)
            if itr % 600 == 0:
                valid_images, valid_annotations = validation_dataset_reader.next_batch(saver, FLAGS.batch_size, image, logits, keep_probability, sess, is_training, FLAGS.logs_dir, is_validation=True)
                valid_loss, valid_acc, summary_loss, summary_acc = sess.run([loss, acc, loss_summary, acc_summary],
                                                feed_dict={image: valid_images, annotation: valid_annotations,
                                                            keep_probability: 1.0, is_training: False})
                validation_writer.add_summary(summary_loss, itr)
                validation_writer.add_summary(summary_acc, itr)
                print("%s ---> Validation_loss: %g , Validation Accuracy: %g" % (datetime.datetime.now(), valid_loss, valid_acc))
                with open(join(FLAGS.logs_dir, 'iter_val_loss.csv'), 'a') as f:
                    f.write(str(itr) + ',' + str(valid_loss) + '\n')
                with open(join(FLAGS.logs_dir, 'iter_val_acc.csv'), 'a') as f:
                    f.write(str(itr) + ',' + str(valid_acc) + '\n')
                saver.save(sess, FLAGS.logs_dir + "model.ckpt", itr)

    elif FLAGS.mode == "visualize":
        valid_images, valid_annotations = validation_dataset_reader.get_random_batch(FLAGS.batch_size)
        pred = sess.run(pred_annotation, feed_dict={image: valid_images, annotation: valid_annotations,
                                                    keep_probability: 1.0, is_training: False})
        valid_annotations = np.squeeze(valid_annotations, axis=3)
        pred = np.squeeze(pred, axis=3)

        for itr in range(FLAGS.batch_size):
            print(valid_images[itr].astype(np.uint8).shape)
            utils.save_image(valid_images[itr, :, :, :3].astype(np.uint8), FLAGS.logs_dir, name="inp_" + str(itr))
            print(valid_annotations[itr].astype(np.uint8).shape)
            utils.save_image(valid_annotations[itr].astype(np.uint8), FLAGS.logs_dir, name="gt_" + str(itr))
            print(pred[itr].astype(np.uint8).shape)
            utils.save_image(pred[itr].astype(np.uint8), FLAGS.logs_dir, name="pred_" + str(itr))
            print("Saved image: %d" % itr)
Ejemplo n.º 3
0
def main(argv=None):
    np.random.seed(3796)
    net, image, logits, is_training, keep_probability, sess, annotation, train_op, loss, acc, loss_summary, acc_summary, saver, pred_annotation, train_writer, validation_writer = build_session(argv[1])

    print("Setting up image reader...")
    """ train_records, valid_records = reader.read_dataset_OCR(FLAGS.data_dir)
    print(len(train_records))
    print(len(valid_records)) """

    files = listdir(join(FLAGS.data_dir, 'annotations'))
    training_image = files[:int(len(files) * 0.8)]
    validation_image = files[int(len(files) * 0.8):]

    print("Setting up dataset reader")
    image_options = {'resize': False, 'resize_size': IMAGE_SIZE}
    if FLAGS.mode == 'train':
        train_dataset_reader = dataset.Batch_manager(training_image, FLAGS.data_dir, image_options)
    validation_dataset_reader = dataset.Batch_manager(validation_image, FLAGS.data_dir, image_options)

    if FLAGS.mode == "train":
        for itr in xrange(MAX_ITERATION):
            train_images, train_annotations = train_dataset_reader.next_batch(saver, FLAGS.batch_size, image, logits, keep_probability, sess, is_training, FLAGS.logs_dir)
            feed_dict = {image: train_images, annotation: train_annotations, keep_probability: 1.0, is_training: True}
            tf.set_random_seed(3796 + itr) # get deterministicly random dropouts
            """ print('input_tensor:', sess.run(tf.shape(image), feed_dict=feed_dict))
            print('pool1:', sess.run(tf.shape(net['pool1']), feed_dict=feed_dict))
            print('pool2:', sess.run(tf.shape(net['pool2']), feed_dict=feed_dict))
            print('pool3:', sess.run(tf.shape(net['pool3']), feed_dict=feed_dict))
            print('pool4:', sess.run(tf.shape(net['pool4']), feed_dict=feed_dict))
            print("\nTRAINING:", itr, '\n') """
            sess.run(train_op, feed_dict=feed_dict)


            if itr % 50 == 0:
                feed_dict = {image: train_images, annotation: train_annotations, keep_probability: 1.0, is_training: False}
                train_loss, train_acc, summary_loss, summary_acc = sess.run([loss, acc, loss_summary, acc_summary], feed_dict=feed_dict)
                print("Step: %d, Train_loss: %g, Train_acc: %g" % (itr, train_loss, train_acc))
                with open(join(FLAGS.logs_dir, 'iter_train_loss.csv'), 'a') as f:
                    f.write(str(itr) + ',' + str(train_loss) + '\n')
                with open(join(FLAGS.logs_dir, 'iter_train_acc.csv'), 'a') as f:
                    f.write(str(itr) + ',' + str(train_acc) + '\n')
                train_writer.add_summary(summary_loss, itr)
                train_writer.add_summary(summary_acc, itr)
            if itr % 500 == 0:
                valid_images, valid_annotations = validation_dataset_reader.next_batch(saver, FLAGS.batch_size, image, logits, keep_probability, sess, is_training, FLAGS.logs_dir, is_validation=True)
                valid_loss, valid_acc, summary_loss, summary_acc = sess.run([loss, acc, loss_summary, acc_summary],
                                                feed_dict={image: valid_images, annotation: valid_annotations,
                                                            keep_probability: 1.0, is_training: False})
                validation_writer.add_summary(summary_loss, itr)
                validation_writer.add_summary(summary_acc, itr)
                print("%s ---> Validation_loss: %g , Validation Accuracy: %g" % (datetime.datetime.now(), valid_loss, valid_acc))
                with open(join(FLAGS.logs_dir, 'iter_val_loss.csv'), 'a') as f:
                    f.write(str(itr) + ',' + str(valid_loss) + '\n')
                with open(join(FLAGS.logs_dir, 'iter_val_acc.csv'), 'a') as f:
                    f.write(str(itr) + ',' + str(valid_acc) + '\n')
                # saver.save(sess, FLAGS.logs_dir + "model.ckpt", itr)

    elif FLAGS.mode == "visualize":
        valid_images, valid_annotations = validation_dataset_reader.get_random_batch(FLAGS.batch_size)
        pred = sess.run(pred_annotation, feed_dict={image: valid_images, annotation: valid_annotations,
                                                    keep_probability: 1.0, is_training: False})
        valid_annotations = np.squeeze(valid_annotations, axis=3)
        pred = np.squeeze(pred, axis=3)

        for itr in range(FLAGS.batch_size):
            print(valid_images[itr].astype(np.uint8).shape)
            utils.save_image(valid_images[itr, :, :, :3].astype(np.uint8), FLAGS.logs_dir, name="inp_" + str(itr))
            print(valid_annotations[itr].astype(np.uint8).shape)
            utils.save_image(valid_annotations[itr].astype(np.uint8), FLAGS.logs_dir, name="gt_" + str(itr))
            print(pred[itr].astype(np.uint8).shape)
            utils.save_image(pred[itr].astype(np.uint8), FLAGS.logs_dir, name="pred_" + str(itr))
            print("Saved image: %d" % itr)