def synth_circle_noise(infiles, outfiles, NOISE):

    SIM_DURATION = 30.0
    TDELTA = 1 / 30.

    t = np.arange(0, SIM_DURATION, TDELTA)

    frames_to_skip = [20, 35, 36, 50, 51, 52, 53]

    env = util.Environmentz((1.5, 2), (240, 320))

    state = simulate.gen_track_circle(t,
                                      np.pi * 2 / 10,
                                      env,
                                      circle_radius=0.5)
    images = simulate.render(env, state)
    new_images = simulate.add_noise_background(images, NOISE, NOISE,
                                               frames_to_skip)
    pickle.dump(
        {
            'state': state,
            'video': new_images,
            'noise': NOISE,
            'frames_skipped': frames_to_skip
        }, open(outfiles[0], 'w'))
    videotools.dump_grey_movie(outfiles[1], new_images)
Beispiel #2
0
def test_likelihood_evaluator3():
    
    tr = template.TemplateRenderCircleBorder()
    tr.set_params(14, 6, 4)

    t1 = tr.render(0, np.pi/2)
    img = np.zeros((240, 320), dtype=np.uint8)

    env = util.Environmentz((1.5, 2.0), (240, 320))
    
    le2 = likelihood.LikelihoodEvaluator3(env, tr)

    img[(120-t1.shape[0]/2):(120+t1.shape[0]/2), 
        (160-t1.shape[1]/2):(160+t1.shape[1]/2)] += t1 *255
    pylab.subplot(1, 2, 1)
    pylab.imshow(img, interpolation='nearest', cmap=pylab.cm.gray)

    state = np.zeros(1, dtype=util.DTYPE_STATE)

    xvals = np.linspace(0, 2.,  100)
    yvals = np.linspace(0, 1.5, 100)
    res = np.zeros((len(yvals), len(xvals)), dtype=np.float32)
    for yi, y in enumerate(yvals):
        for xi, x in enumerate(xvals):
            state[0]['x'] = x
            state[0]['y'] = y
            state[0]['theta'] = np.pi / 2. 
            res[yi, xi] =     le2.score_state(state, img)
    pylab.subplot(1, 2, 2)
    pylab.imshow(res)
    pylab.colorbar()
    pylab.show()
def picloud_score_frame(dataset_name, x_range, y_range, phi_range, theta_range,
                        state_idx, frame, EO_PARAMS, likelihood_i):
    """
    pi-cloud runner, every instance builds up full state, but
    we only evaluate the states in [state_idx_to_eval[0], state_idx_to_eval[1])
    and return scores
    """
    print "DATSET_NAME=", dataset_name
    dataset_dir = os.path.join(FL_DATA, dataset_name)
    dataset_config_filename = os.path.join(dataset_dir, "config.pickle")
    dataset_region_filename = os.path.join(dataset_dir, "region.pickle")
    frame_hist_filename = os.path.join(dataset_dir, "framehist.npz")

    np.random.seed(0)

    cf = pickle.load(open(dataset_config_filename))
    region = pickle.load(open(dataset_region_filename))

    framehist = np.load(frame_hist_filename)

    env = util.Environmentz(cf['field_dim_m'], cf['frame_dim_pix'])

    tp = template.TemplateRenderCircleBorder()

    tp.set_params(*EO_PARAMS)
    ls = LIKELIHOOD_SETTING[likelihood_i]

    le = likelihood.LikelihoodEvaluator2(env,
                                         tp,
                                         similarity=ls['similarity'],
                                         sim_params=ls['sim_params'])

    frames = organizedata.get_frames(dataset_dir, np.array([frame]))
    frame = frames[0]
    frame[frame < PIX_THRESHOLD] = 0
    # create the state vector

    state = create_state_vect(y_range, x_range, phi_range, theta_range)

    SCORE_N = state_idx[1] - state_idx[0]
    scores = np.zeros(SCORE_N, dtype=np.float32)
    for i, state_i in enumerate(state[state_idx[0]:state_idx[1]]):
        x = state_i['x']
        y = state_i['y']
        if region['x_pos_min'] <= x <= region['x_pos_max'] and \
                region['y_pos_min'] <= y <= region['y_pos_max']:
            score = le.score_state(state_i, frame)
            scores[i] = score
        else:
            scores[i] = -1e100
    return scores
Beispiel #4
0
def per_frame(basedir, func, config):
    config_file = os.path.join(basedir, "config.pickle")
    cf = pickle.load(open(config_file))
    env = util.Environmentz(cf['field_dim_m'], cf['frame_dim_pix'])
    FRAMEN = cf['end_f'] - cf['start_f'] + 1

    d = np.zeros(FRAMES_TO_ANALYZE, dtype=DTYPE_POS_CONF)
    FRAMES_AT_A_TIME = 10
    frames = np.arange(FRAMES_TO_ANALYZE)
    for frame_subset in util.chunk(frames, FRAMES_AT_A_TIME):
        fs = organizedata.get_frames(basedir, frame_subset)
        for fi, frame_no in enumerate(frame_subset):
            real_x, real_y, conf = func(fs[fi], env, **config)
            d[frame_no]['x'] = real_x
            d[frame_no]['y'] = real_y
            d[frame_no]['confidence'] = conf

    return d
Beispiel #5
0
def pf_run(infile, outfile, posnoise, velnoise, PARTICLEN, FRAMEN, NOISE, log):
    np.random.seed(0)
    print "Loading data..."
    d = pickle.load(open(infile))
    print "done!"
    env = util.Environmentz((1.5, 2), (240, 320))

    eo = likelihood.EvaluateObj(240, 320)
    eo.set_params(10, 4, 2)
    le = likelihood.LikelihoodEvaluator(env, eo, log)

    model_inst = model.LinearModel(env,
                                   le,
                                   POS_NOISE_STD=posnoise,
                                   VELOCITY_NOISE_STD=velnoise)

    y = d['video'][:FRAMEN]

    weights, particles = pf.particle_filter(y, model_inst, len(y), PARTICLEN)
    np.savez_compressed(outfile, weights=weights, particles=particles)
Beispiel #6
0
def led_params_to_EO(cf, led_params):
    """ Returns "EO" tuple of (led_dist_in_pix, front_led_radius, back_led_radius"""

    env = util.Environmentz(cf['field_dim_m'], cf['frame_dim_pix'])

    ledimgs_mean = led_params['ledimgs_mean']

    S = ledimgs_mean[0].shape[0]

    # in both cases taking the horizontal one
    sigma_front_h, a, a_x = led_measure(ledimgs_mean[0, S / 2 + 1, :])
    sigma_back_h, a, a_x = led_measure(ledimgs_mean[1, S / 2 + 1, :])

    sigma_front_v, a, a_x = led_measure(ledimgs_mean[0, :, S / 2 + 1])
    sigma_back_v, a, a_x = led_measure(ledimgs_mean[1, :, S / 2 + 1])

    w_front = (sigma_front_h + sigma_front_v) / 2.
    w_back = (sigma_back_h + sigma_back_v) / 2.

    dist_in_m = np.mean(led_params['dist'])
    dist_in_pix = int(dist_in_m * env.gc.pix_per_meter[0])
    return (dist_in_pix, np.ceil(w_front), np.ceil(w_back))
Beispiel #7
0
def average_diode_sep():
    clust_eps = 0.2
    min_dist = 2.0
    min_samples = 3.0
    thold = 240

    FRAMES = np.arange(4000) * 2

    dataset = "bukowski_02.C"
    cf = pickle.load(open(os.path.join(ddir(dataset), 'config.pickle')))
    region = pickle.load(open(os.path.join(ddir(dataset), 'region.pickle')))

    env = util.Environmentz(cf['field_dim_m'], cf['frame_dim_pix'])
    x_min, y_min = env.gc.real_to_image(region['x_pos_min'],
                                        region['y_pos_min'])
    x_max, y_max = env.gc.real_to_image(region['x_pos_max'],
                                        region['y_pos_max'])
    print x_min, x_max
    print y_min, y_max
    if y_min < 0:
        y_min = 0
    frame_images = organizedata.get_frames(ddir(dataset), FRAMES)
    num_clusters = np.zeros(len(FRAMES))
    dists = []
    for fi, im in enumerate(frame_images):
        im = im[y_min:y_max + 1, x_min:x_max + 1]

        centers = frame_clust_points(im, 240, min_dist, clust_eps, min_samples)

        num_clusters[fi] = len(centers)
        if len(centers) == 2:
            dists.append(distance.pdist(centers)[0])
    dists = np.array(dists)
    pylab.hist(dists[dists < 50], bins=20)

    pylab.savefig("average_diode_sep.%s.png" % dataset, dpi=300)
    scores[np.isinf(scores)] = -1e20

    TOP_R, TOP_C = 3, 4
    TOP_N = TOP_R * TOP_C

    score_idx_sorted = np.argsort(scores)[::-1]

    #get the frame
    frames = organizedata.get_frames(data_p['dataset_dir'],
                                     np.array([data_p['frame']]))

    # config file
    cf = pickle.load(open(os.path.join(data_p['dataset_dir'],
                                       'config.pickle')))
    env = util.Environmentz(cf['field_dim_m'], cf['frame_dim_pix'])

    img = frames[0]
    f = pylab.figure()
    for r in range(TOP_N):
        s_i = score_idx_sorted[r]
        score = scores[s_i]
        ax = f.add_subplot(TOP_R, TOP_C, r + 1)
        ax.imshow(img, interpolation='nearest', cmap=pylab.cm.gray)
        x_pix, y_pix = env.gc.real_to_image(sv[s_i]['x'], sv[s_i]['y'])
        ax.axhline(y_pix, linewidth=1, c='b', alpha=0.5)
        ax.axvline(x_pix, linewidth=1, c='b', alpha=0.5)
        ax.set_xticks([])
        ax.set_yticks([])
    f.subplots_adjust(bottom=0,
                      left=.01,
    cf = pickle.load(open(os.path.join(directory, "config.pickle")))
    start_f = cf['start_f']
    # open the frame tarball
    tf = tarfile.open(os.path.join(directory, "%08d.tar.gz" % start_f), "r:gz")

    positions_interp, missing = measure.interpolate(positions)
    pos_derived = measure.compute_derived(positions_interp)

    N = len(positions)
    state = np.zeros(N, dtype=util.DTYPE_STATE)
    state['x'] = positions_interp['x']
    state['y'] = positions_interp['y']
    state['phi'] = pos_derived['phi']
    state['theta'] = np.pi / 2.0

    env = util.Environmentz((1.5, 2), (240, 320))

    images = simulate.render(env, state[:100])
    NOISE = 0

    new_images = simulate.add_noise_background(images, NOISE, NOISE, [])

    FN = 100
    pylab.figure()
    pylab.subplot(2, 1, 1)
    pylab.plot(state['x'][:FN])
    pylab.plot(state['y'][:FN])
    pylab.subplot(2, 1, 2)
    pylab.scatter(positions['led_front'][:FN, 0],
                  positions['led_front'][:FN, 1],
                  c='g')