Пример #1
0
def triangulate_ransac_pt(Ps, projs, im_shapes, ransac_thresh, max_pt_dist=5.):
    best_inliers = []
    CHOSEN_MIN_ANGLE = True
    MIN_ANGLE = np.radians(1)
    MAX_ANGLE = np.radians(120)

    INLIER_MINDIST = False
    INLIER_MINANGLE = True

    MIN_TRI_DIST = False
    TRI_DIST = 1.

    ts = np.array([center_from_P(P) for P in Ps])
    for inds in ransac_sample(len(Ps), 2, 300):
        Ps_s, projs_s, shapes_s = ut.take_inds_each([Ps, projs, im_shapes],
                                                    inds)
        X = triangulate_nonlinear_pt(Ps_s, projs_s, shapes_s)

        if CHOSEN_MIN_ANGLE:
            angle = ut.angle_between(-X + ts[inds[0]], -X + ts[inds[1]])
            if angle <= MIN_ANGLE or angle >= MAX_ANGLE:
                continue

        if MIN_TRI_DIST and pylab.dist(ts[inds[0]], ts[inds[1]]) <= TRI_DIST:
            continue

        inliers = []
        for pi, (P, proj) in enumerate(zip(Ps, projs)):
            if reproj_error(P, X, proj) <= ransac_thresh \
                   and in_front_of(P, X) and pylab.dist(center_from_P(P), X) <= max_pt_dist:
                inliers.append(pi)
        inliers = np.array(inliers)

        #[inliers] = np.nonzero(np.array([reproj_error(P, X, proj) <= ransac_thresh for P, proj in zip(Ps, projs)]))
        if INLIER_MINDIST:
            inliers = greedy_choose_mindist(ts, inliers, 0.5)
            #inliers = greedy_choose_minangle(ts, inliers, 0.5)
            if len(inliers) < 2:
                continue

        if INLIER_MINANGLE:
            ordered_cams = [i for i in inds if i in inliers] + ut.shuffled(
                without(inliers, inds))
            inliers = greedy_choose_minangle(Ps, X, ordered_cams, MIN_ANGLE)

        if len(inliers) > len(best_inliers):
            best_inliers = inliers

    if len(best_inliers) == 0:
        final = [0, 1]
    else:
        final = best_inliers

    Ps_s, projs_s, shapes_s = ut.take_inds_each([Ps, projs, im_shapes], final)
    X = triangulate_nonlinear_pt(Ps_s, projs_s, shapes_s)
    print 'num inliers', len(best_inliers), 'of', len(
        Ps), 'mean reproj error', np.mean(
            [reproj_error(P, X, x) for P, x in zip(Ps_s, projs_s)])
    return X, best_inliers
Пример #2
0
def greedy_choose_mindist(ts, inds, min_dist):
    import scipy.spatial.distance
    dists = scipy.spatial.distance.squareform(
        scipy.spatial.distance.pdist(ts, 'euclidean'))
    chosen = []
    for ii in ut.shuffled(range(len(inds))):
        if np.all(dists[inds[ii], chosen] >= min_dist):
            chosen.append(inds[ii])

    if 1 and len(chosen) > 1:
        a, b = random.sample(chosen, 2)
        assert (pylab.dist(ts[a], ts[b]) >= min_dist)
    return chosen
Пример #3
0
def make_db_reader(path, pr, batch_size, input_types, db_start = None,
                   num_db_files = None, num_threads = 12, one_pass = False):
  print 'one pass ='******'Data path does not exist: %s' % path)

  if pr.dset_seed is not None or one_pass:
    num_threads = 1
  rec_files = rec_files_from_path(path, num_db_files = num_db_files)

  if not one_pass:
    rec_files = ut.shuffled(rec_files)
  
  # if hasattr(pr, 'alt_tf_path') and pr.alt_tf_path is not None:
  #   assert pr.alt_tf_path[0] in rec_files[0]
  #   rec_files = [(x.replace(pr.alt_tf_path[0], pr.alt_tf_path[1]) if i % 2 == 0 else x) for i, x in enumerate(rec_files)]
  #ut.printlns(rec_files)

  file_groups = ut.split_into(rec_files, num_threads)
  num_threads = min(len(file_groups), num_threads)
  queues = [tf.train.string_input_producer(
    group, seed = pr.dset_seed, 
    shuffle = (pr.dset_seed is None),
    num_epochs = (1 if one_pass else None)) for group in file_groups]
  example_list =  [read_example(queue, pr, input_types) for queue in queues]

  if not one_pass and (pr.dset_seed is None):
    # ims, flows, samples, sfs = tf.train.shuffle_batch_join(
    #   example_list, batch_size = batch_size, capacity = 1200,
    #   min_after_dequeue = 50, seed = pr.dset_seed)
    # ims, flows, samples, sfs, labels, ytids = tf.train.shuffle_batch_join(
    #   example_list, batch_size = batch_size, capacity = 250,
    #   min_after_dequeue = 50, seed = pr.dset_seed)
    ims, flows, samples, sfs, labels, ytids = tf.train.shuffle_batch_join(
      example_list, batch_size = batch_size, capacity = 200,
      min_after_dequeue = 20, seed = 0)
  else:
    ims, flows, samples, sfs, labels, ytids = tf.train.batch(example_list[0], batch_size)

  rets = {'im' : ims,
          'flow' : flows,
          'samples' : samples,
          'sfs' : sfs,
          'label' : labels,
          'ytid' : ytids}

  return [rets[k] for k in input_types]