Exemplo n.º 1
0
def main(_):
    data_dir = FLAGS.data_dir
    label_map_dict = dataset_utils.parse_label_map_to_dict(
        FLAGS.label_map_path)

    logging.info('Reading from Pet dataset.')

    image_dir = os.path.join(data_dir, 'images')
    annotations_dir = os.path.join(data_dir, 'annotations')
    examples_path = os.path.join(annotations_dir, 'trainval.txt')
    examples_list = dataset_utils.read_examples_list(examples_path)

    # Test images are not included in the downloaded data set, so we shall perform our own split.
    random.seed(42)
    random.shuffle(examples_list)

    num_examples = len(examples_list)
    num_train = int(0.7 * num_examples)
    train_examples = examples_list[:num_train]
    val_examples = examples_list[num_train:]

    logging.info('%d training and %d validation examples.',
                 len(train_examples), len(val_examples))

    train_output_path = os.path.join(FLAGS.output_dir, 'pet_train.txt')
    val_output_path = os.path.join(FLAGS.output_dir, 'pet_val.txt')
Exemplo n.º 2
0
def main():
    if args.set not in sets:
        raise ValueError('set must be in : {}'.format(sets))

    data_dir = os.path.join(args.data_dir, args.set)
    for label in labels:
        writer = tf.python_io.TFRecordWriter(os.path.join(args.output_path, args.set+'_'+label+'.tfrecord'))
        data_info = dataset_utils.read_examples_list(os.path.join(data_dir, label, label_file))
        for image_name, theta in data_info:
            image_path = os.path.join(data_dir, label, folder, image_name)
            tf_example = dict_to_tf_example(image_path, args.image_size, label, theta)
            writer.write(tf_example.SerializeToString())
        writer.close()
Exemplo n.º 3
0
def main(_):
    if FLAGS.set not in SETS:
        raise ValueError('set must be in : {}'.format(SETS))
    if FLAGS.year not in YEARS:
        raise ValueError('year must be in : {}'.format(YEARS))

    data_dir = FLAGS.data_dir
    years = ['VOC2007', 'VOC2012']
    if FLAGS.year != 'merged':
        years = [FLAGS.year]

    output_file = os.path.join(data_dir,
                               '{}_{}.txt'.format(FLAGS.set, FLAGS.year))
    with open(output_file, 'w') as txt_file:
        items = 0

        for year in years:
            annotations_dir = os.path.join(data_dir, year,
                                           FLAGS.annotations_dir)
            examples_path = os.path.join(data_dir, year, 'ImageSets', 'Main',
                                         'aeroplane_' + FLAGS.set + '.txt')
            examples_list = read_examples_list(examples_path)

            for idx, example in enumerate(examples_list):
                path = os.path.join(annotations_dir, example + '.xml')
                with tf.gfile.GFile(path, 'r') as fid:
                    xml_str = fid.read()
                xml = etree.fromstring(xml_str)
                data = recursive_parse_xml_to_dict(xml)['annotation']

                img_path = os.path.join(data_dir, year, 'JPEGImages',
                                        data['filename'])
                for obj in data['object']:
                    difficult = bool(int(obj['difficult']))
                    if FLAGS.ignore_difficult_instances and difficult:
                        continue

                    xmin = float(obj['bndbox']['xmin'])
                    ymin = float(obj['bndbox']['ymin'])
                    xmax = float(obj['bndbox']['xmax'])
                    ymax = float(obj['bndbox']['ymax'])
                    classes_text = obj['name']

                    # Write item to text file here
                    items += 1
                    instance = "{},{},{},{},{},{}\n".format(
                        img_path, xmin, ymin, xmax, ymax, classes_text)
                    txt_file.write(instance)

        print('Total instances', items)
Exemplo n.º 4
0
def main():
    if args.set not in sets:
        raise ValueError('set must be in : {}'.format(sets))

    data_dir = os.path.join(args.data_dir, args.set)

    hparams = create_domain_adapt_hparams()
    images_t = tf.placeholder(dtype=tf.float32, shape=[None, 224, 224, 3])
    images_t = images_t - [123.68, 116.779, 103.939]
    with slim.arg_scope(model_arg_scope()):
        net, _ = model(inputs=images_t,
                       num_classes=num_classes,
                       is_training=False,
                       dropout_keep_prob=1.0,
                       reuse=tf.AUTO_REUSE,
                       scope=hparams.scope,
                       adapt_scope='target_adapt_layer',
                       adapt_dims=128)
    min_index = tf.argmin(net, axis=1)
    max_index = tf.argmax(net, axis=1)
    saver = tf.train.Saver()
    session_config = tf.ConfigProto(allow_soft_placement=True,
                                    log_device_placement=False)
    session_config.gpu_options.allow_growth = True
    sess = tf.Session(config=session_config)
    saver.restore(sess, tf.train.latest_checkpoint(args.checkpoint_dir))
    print 'Successfully loading model: {}.'.format(
        tf.train.latest_checkpoint(args.checkpoint_dir))

    pos_writer = tf.python_io.TFRecordWriter(
        os.path.join(args.output_path,
                     'pseudo_positive' + '_' + args.set + '.tfrecord'))
    neg_writer = tf.python_io.TFRecordWriter(
        os.path.join(args.output_path,
                     'pseudo_negative' + '_' + args.set + '.tfrecord'))
    for label in labels:
        data_info = dataset_utils.read_examples_list(
            os.path.join(data_dir, label, label_file))
        random.shuffle(data_info)
        for image_name, theta in data_info:
            image_path = os.path.join(data_dir, label, folder, image_name)
            with tf.gfile.GFile(image_path, 'rb') as fid:
                encoded_jpg = fid.read()
            encoded_jpg_io = io.BytesIO(encoded_jpg)
            image = PIL.Image.open(encoded_jpg_io)
            if image.format != 'JPEG':
                raise ValueError('Image format not JPEG')
            image = image.resize((224, 224))
            image_n = np.array(image)
            image_n = np.expand_dims(image_n, axis=0).astype(np.float32)
            scores, min_idx, max_idx = sess.run([net, min_index, max_index],
                                                feed_dict={images_t: image_n})
            if np.min(scores) < 0 and np.max(scores) > 0:
                if np.min(scores) < -5.0:
                    tf_example = dict_to_tf_example(encoded_jpg, 'positive',
                                                    min_idx[0])
                    pos_writer.write(tf_example.SerializeToString())
                if np.max(scores) > 5.0:
                    tf_example = dict_to_tf_example(encoded_jpg, 'negative',
                                                    max_idx[0])
                    neg_writer.write(tf_example.SerializeToString())
    pos_writer.close()
    neg_writer.close()