Exemple #1
0
def absolute_path_to_relative_path(files):
    for f in files:
        annotations = al.parse(f)
        for img in annotations:
            if img.imageName[0] == '/':
                img.imageName = '/'.join(img.imageName.split('/')[-2:])
        al.save(f, annotations)
Exemple #2
0
def create_tracking_files():
    # files = glob.glob('/scail/group/deeplearning/driving_data/andriluka/IMAGES/driving_data_q50_data/all_extracted/*.al')
    # files = [f for f in files if f.find('edit') == -1 and f.find('track') == -1]

    files = glob.glob('/scail/group/deeplearning/driving_data/andriluka/IMAGES/driving_data_q50_data/all_extracted/benchmark_seq*_cleanup_trackid_every20.pal')
    absolute_path_to_relative_path(files)
    all_args = []
    for f in files:
        all_args.append(('-a %s -o ./ ' % f).split(' '))

    pool = multiprocessing.Pool(processes=9)
    for i, result in enumerate( pool.imap_unordered(m.main, all_args) ):
        print "done %0.1f%%" % (float(i+1) * 100 / len(all_args))
    pool.close()
    pool.join()

    # remove partials
    partials = glob.glob('*partial.pal')
    for p in partials:
        os.remove(p)

    # change paths
    for f in files:
        tracked_file = os.path.basename(f).replace('.pal','-track.pal')
        imgs = al.parse(tracked_file)
        for img in imgs:
            if img.imageName[0] == '/':
                img.imageName = '/'.join(img.imageName.split('/')[-2:])
        al.save(tracked_file, imgs)
Exemple #3
0
    def eval(self, weights, test_boxes, min_conf, tau, show_suppressed, expname):
        self.H["grid_width"] = self.H["image_width"] / self.H["region_size"]
        self.H["grid_height"] = self.H["image_height"] / self.H["region_size"]
        x_in = tf.placeholder(tf.float32, name='x_in', shape=[self.H['image_height'], self.H['image_width'], 3])
        if self.H['use_rezoom']:
            pred_boxes, pred_logits, pred_confidences, pred_confs_deltas, pred_boxes_deltas = self.build_forward(tf.expand_dims(x_in, 0), 'test', reuse=None)
            grid_area = self.H['grid_height'] * self.H['grid_width']
            pred_confidences = tf.reshape(tf.nn.softmax(tf.reshape(pred_confs_deltas, [grid_area * self.H['rnn_len'], 2])),
                                          [grid_area, self.H['rnn_len'], 2])
            if self.H['reregress']:
                pred_boxes = pred_boxes + pred_boxes_deltas
        else:
            pred_boxes, pred_logits, pred_confidences = self.build_forward(tf.expand_dims(x_in, 0), 'test', reuse=None)
        saver = tf.train.Saver()
        with tf.Session() as sess:
            sess.run(tf.global_variables_initializer())
            saver.restore(sess, weights)

            pred_annolist = al.AnnoList()

            true_annolist = al.parse(test_boxes)
            data_dir = os.path.dirname(test_boxes)
            image_dir = self.get_image_dir(weights, expname, test_boxes)
            subprocess.call('mkdir -p %s' % image_dir, shell=True)
            for i in range(len(true_annolist)):
                true_anno = true_annolist[i]
                orig_img = imread('%s/%s' % (data_dir, true_anno.imageName))[:,:,:3]
                img = imresize(orig_img, (self.H["image_height"], self.H["image_width"]), interp='cubic')
                feed = {x_in: img}
                (np_pred_boxes, np_pred_confidences) = sess.run([pred_boxes, pred_confidences], feed_dict=feed)
                pred_anno = al.Annotation()
                pred_anno.imageName = true_anno.imageName
                new_img, rects = add_rectangles(self.H, [img], np_pred_confidences, np_pred_boxes,
                                                use_stitching=True, rnn_len=self.H['rnn_len'], min_conf=min_conf, tau=tau, show_suppressed=show_suppressed)
            
                pred_anno.rects = rects
                pred_anno.imagePath = os.path.abspath(data_dir)
                pred_anno = rescale_boxes((self.H["image_height"], self.H["image_width"]), pred_anno, orig_img.shape[0], orig_img.shape[1])
                pred_annolist.append(pred_anno)
                
                imname = '%s/%s' % (image_dir, os.path.basename(true_anno.imageName))
                misc.imsave(imname, new_img)
                if i % 25 == 0:
                    print(i)
        return pred_annolist, true_annolist
Exemple #4
0
def load_idl_tf(idlfile, H, jitter):
    """Take the idlfile and net configuration and create a generator
    that outputs a jittered version of a random image from the annolist
    that is mean corrected."""

    annolist = al.parse(idlfile)
    annos = []
    for anno in annolist:
        anno.imageName = os.path.join(
            os.path.dirname(os.path.realpath(idlfile)), anno.imageName)
        annos.append(anno)
    random.seed(0)

    if H['data']['truncate_data']:
        annos = annos[:10]
    for epoch in itertools.count():
        random.shuffle(annos)
        for origin_anno in annos:
            tiles = preprocess_image(deepcopy(origin_anno), H)
            for I, anno in tiles:
                if jitter:
                    jitter_scale_min = 0.9
                    jitter_scale_max = 1.1
                    jitter_offset = 16
                    I, anno = annotation_jitter(
                        I,
                        anno,
                        target_width=H["image_width"],
                        target_height=H["image_height"],
                        jitter_scale_min=jitter_scale_min,
                        jitter_scale_max=jitter_scale_max,
                        jitter_offset=jitter_offset)

                boxes, flags = annotation_to_h5(H, anno)

                yield {"image": I, "boxes": boxes, "flags": flags}
    parser = argparse.ArgumentParser()
    parser.add_argument(
        "hit_dir", help="hit directory for storing mturk related data (eg. hit id's and worker results)"
    )
    parser.add_argument(
        "annolist_filename",
        help="annotation list in idl/al/pal format, object bounding boxes in the annotation list will be pre-loaded to mturk",
    )

    args = parser.parse_args()

    print "hit_dir: ", args.hit_dir
    print "annolist_filename: ", args.annolist_filename

    annolist = al.parse(args.annolist_filename)

    print "generating hits for {0} images".format(len(annolist))

    # load hit-specific parameters
    if not os.path.isdir(args.hit_dir):
        print args.hit_dir, "does not exist, exiting..."
        sys.exit()
    else:
        hit_params_filename = args.hit_dir + "/hit_params.sh"

        print "loading hit parameters from: ", hit_params_filename

        if not os.path.isfile(hit_params_filename):
            print hit_params_filename, "does not exist, exiting..."
            sys.exit()
Exemple #6
0
 def __init__(self, al_file):
     self.anns = al.parse(al_file)
    parser.add_argument(
        "annolist_filename",
        help=
        "annotation list in idl/al/pal format, object bounding boxes in the annotation list will be pre-loaded to mturk"
    )

    parser.add_argument("--empty_only",
                        action="store_true",
                        help="only add images without annotations")

    args = parser.parse_args()

    print "hit_dir: ", args.hit_dir
    print "annolist_filename: ", args.annolist_filename

    annolist = al.parse(args.annolist_filename)

    print "generating hits for {0} images".format(len(annolist))

    # load hit-specific parameters
    if not os.path.isdir(args.hit_dir):
        print args.hit_dir, "does not exist, exiting..."
        sys.exit()
    else:
        hit_params_filename = args.hit_dir + "/hit_params.sh"

        print "loading hit parameters from: ", hit_params_filename

        if not os.path.isfile(hit_params_filename):
            print hit_params_filename, "does not exist, exiting..."
            sys.exit()
 def __init__(self, al_file):
     self.anns = al.parse(al_file)