Пример #1
0
def find_params():

    FRAMES =  np.arange(30)*100

    frame_images = organizedata.get_frames(ddir("bukowski_04.W2"), FRAMES)
    print "DONE READING DATA"

    CLUST_EPS = np.linspace(0, 0.5, 10)
    MIN_SAMPLES = [2, 3, 4, 5]
    MIN_DISTS = [2, 3, 4, 5, 6]
    THOLD = 240

    fracs_2 = np.zeros((len(CLUST_EPS), len(MIN_SAMPLES), len(MIN_DISTS)))

    for cei, CLUST_EP in enumerate(CLUST_EPS):
        for msi, MIN_SAMPLE in enumerate(MIN_SAMPLES):
            for mdi, MIN_DIST in enumerate(MIN_DISTS):
                print cei, msi, mdi
                numclusters = np.zeros(len(FRAMES))
                for fi, im in enumerate(frame_images):
                    centers = frame_clust_points(im, THOLD, MIN_DIST, 
                                                 CLUST_EP, MIN_SAMPLE)
                    # cluster centers
                    numclusters[fi] = len(centers)
                fracs_2[cei, msi, mdi] = float(np.sum(numclusters == 2))/len(numclusters)
    pylab.figure(figsize=(12, 8))
    for mdi, MIN_DIST in enumerate(MIN_DISTS):
        pylab.subplot(len(MIN_DISTS), 1, mdi+1)

        for msi in range(len(MIN_SAMPLES)):
            pylab.plot(CLUST_EPS, fracs_2[:, msi, mdi], label='%d' % MIN_SAMPLES[msi])
        pylab.title("min_dist= %3.2f" % MIN_DIST)
    pylab.legend()
    pylab.savefig('test.png', dpi=300)
Пример #2
0
def average_diode_sep():
    clust_eps = 0.2
    min_dist = 2.0
    min_samples = 3.0
    thold = 240
    
    FRAMES =  np.arange(4000)*2
    
    dataset = "bukowski_02.C"
    cf = pickle.load(open(os.path.join(ddir(dataset), 'config.pickle')))
    region = pickle.load(open(os.path.join(ddir(dataset), 'region.pickle')))
    
    env = util.Environmentz(cf['field_dim_m'], cf['frame_dim_pix'])
    x_min, y_min = env.gc.real_to_image(region['x_pos_min'], region['y_pos_min'])
    x_max, y_max = env.gc.real_to_image(region['x_pos_max'], region['y_pos_max'])
    print x_min, x_max
    print y_min, y_max
    if y_min < 0:
        y_min = 0
    frame_images = organizedata.get_frames(ddir(dataset), FRAMES)
    num_clusters = np.zeros(len(FRAMES))
    dists = []
    for fi, im in enumerate(frame_images):
        im = im[y_min:y_max+1, x_min:x_max+1]

        centers = frame_clust_points(im, 240, min_dist,
                                     clust_eps, min_samples)

        num_clusters[fi] = len(centers)
        if len(centers) == 2:
            dists.append(distance.pdist(centers)[0])
    dists = np.array(dists)
    pylab.hist(dists[dists < 50], bins=20)

    pylab.savefig("average_diode_sep.%s.png" % dataset, dpi=300)
Пример #3
0
def picloud_score_frame(dataset_name, x_range, y_range, phi_range, theta_range,
                        state_idx, frame, EO_PARAMS, likelihood_i):
    """
    pi-cloud runner, every instance builds up full state, but
    we only evaluate the states in [state_idx_to_eval[0], state_idx_to_eval[1])
    and return scores
    """
    print "DATSET_NAME=", dataset_name
    dataset_dir = os.path.join(FL_DATA, dataset_name)
    dataset_config_filename = os.path.join(dataset_dir, "config.pickle")
    dataset_region_filename = os.path.join(dataset_dir, "region.pickle")
    frame_hist_filename = os.path.join(dataset_dir, "framehist.npz")

    np.random.seed(0)

    cf = pickle.load(open(dataset_config_filename))
    region = pickle.load(open(dataset_region_filename))

    framehist = np.load(frame_hist_filename)

    env = util.Environmentz(cf['field_dim_m'], cf['frame_dim_pix'])

    tp = template.TemplateRenderCircleBorder()

    tp.set_params(*EO_PARAMS)
    ls = LIKELIHOOD_SETTING[likelihood_i]

    le = likelihood.LikelihoodEvaluator2(env,
                                         tp,
                                         similarity=ls['similarity'],
                                         sim_params=ls['sim_params'])

    frames = organizedata.get_frames(dataset_dir, np.array([frame]))
    frame = frames[0]
    frame[frame < PIX_THRESHOLD] = 0
    # create the state vector

    state = create_state_vect(y_range, x_range, phi_range, theta_range)

    SCORE_N = state_idx[1] - state_idx[0]
    scores = np.zeros(SCORE_N, dtype=np.float32)
    for i, state_i in enumerate(state[state_idx[0]:state_idx[1]]):
        x = state_i['x']
        y = state_i['y']
        if region['x_pos_min'] <= x <= region['x_pos_max'] and \
                region['y_pos_min'] <= y <= region['y_pos_max']:
            score = le.score_state(state_i, frame)
            scores[i] = score
        else:
            scores[i] = -1e100
    return scores
Пример #4
0
def picloud_score_frame(dataset_name, x_range, y_range, phi_range, theta_range,
                        state_idx, frame, EO_PARAMS, likelihood_i):
    """
    pi-cloud runner, every instance builds up full state, but
    we only evaluate the states in [state_idx_to_eval[0], state_idx_to_eval[1])
    and return scores
    """
    print "DATSET_NAME=", dataset_name
    dataset_dir = os.path.join(FL_DATA, dataset_name)
    dataset_config_filename = os.path.join(dataset_dir, "config.pickle")
    dataset_region_filename = os.path.join(dataset_dir, "region.pickle")
    frame_hist_filename = os.path.join(dataset_dir, "framehist.npz")
    
    np.random.seed(0)
    
    cf = pickle.load(open(dataset_config_filename))
    region = pickle.load(open(dataset_region_filename))

    framehist = np.load(frame_hist_filename)
    
    env = util.Environmentz(cf['field_dim_m'], 
                            cf['frame_dim_pix'])

    tp = template.TemplateRenderCircleBorder()
    
    tp.set_params(*EO_PARAMS)
    ls = LIKELIHOOD_SETTING[likelihood_i]
    
    le = likelihood.LikelihoodEvaluator2(env, tp, similarity=ls['similarity'], 
                                         sim_params = ls['sim_params'])

    frames = organizedata.get_frames(dataset_dir, np.array([frame]))
    frame = frames[0]
    frame[frame < PIX_THRESHOLD] = 0
    # create the state vector

    state = create_state_vect(y_range, x_range, phi_range, theta_range)
    
    SCORE_N = state_idx[1] - state_idx[0]
    scores = np.zeros(SCORE_N, dtype=np.float32)
    for i, state_i in enumerate(state[state_idx[0]:state_idx[1]]):
        x = state_i['x']
        y = state_i['y']
        if region['x_pos_min'] <= x <= region['x_pos_max'] and \
                region['y_pos_min'] <= y <= region['y_pos_max']:
            score = le.score_state(state_i, frame)
            scores[i] = score
        else:
            scores[i] = -1e100
    return scores
Пример #5
0
def per_frame(basedir, func, config):
    config_file = os.path.join(basedir, "config.pickle")
    cf = pickle.load(open(config_file))
    env = util.Environmentz(cf['field_dim_m'], cf['frame_dim_pix'])
    FRAMEN = cf['end_f'] - cf['start_f'] + 1

    d = np.zeros(FRAMES_TO_ANALYZE, dtype=DTYPE_POS_CONF)
    FRAMES_AT_A_TIME = 10
    frames = np.arange(FRAMES_TO_ANALYZE)
    for frame_subset in util.chunk(frames, FRAMES_AT_A_TIME):
        fs = organizedata.get_frames(basedir, frame_subset)
        for fi, frame_no in enumerate(frame_subset):
            real_x, real_y, conf = func(fs[fi], env, **config)
            d[frame_no]['x'] = real_x
            d[frame_no]['y'] = real_y
            d[frame_no]['confidence'] = conf

    return d
Пример #6
0
def per_frame(basedir, func, config):
    config_file = os.path.join(basedir, "config.pickle")
    cf = pickle.load(open(config_file))
    env = util.Environmentz(cf['field_dim_m'], cf['frame_dim_pix'])
    FRAMEN = cf['end_f'] - cf['start_f'] + 1
    

    d = np.zeros(FRAMES_TO_ANALYZE, dtype=DTYPE_POS_CONF)
    FRAMES_AT_A_TIME = 10
    frames = np.arange(FRAMES_TO_ANALYZE)
    for frame_subset in util.chunk(frames, FRAMES_AT_A_TIME):
        fs = organizedata.get_frames(basedir, frame_subset)
        for fi, frame_no in enumerate(frame_subset):
            real_x, real_y, conf = func(fs[fi], env, **config)
            d[frame_no]['x'] = real_x
            d[frame_no]['y'] = real_y
            d[frame_no]['confidence'] = conf
            
    return d
Пример #7
0
def find_params():

    FRAMES = np.arange(30) * 100

    frame_images = organizedata.get_frames(ddir("bukowski_04.W2"), FRAMES)
    print "DONE READING DATA"

    CLUST_EPS = np.linspace(0, 0.5, 10)
    MIN_SAMPLES = [2, 3, 4, 5]
    MIN_DISTS = [2, 3, 4, 5, 6]
    THOLD = 240

    fracs_2 = np.zeros((len(CLUST_EPS), len(MIN_SAMPLES), len(MIN_DISTS)))

    for cei, CLUST_EP in enumerate(CLUST_EPS):
        for msi, MIN_SAMPLE in enumerate(MIN_SAMPLES):
            for mdi, MIN_DIST in enumerate(MIN_DISTS):
                print cei, msi, mdi
                numclusters = np.zeros(len(FRAMES))
                for fi, im in enumerate(frame_images):
                    centers = frame_clust_points(im, THOLD, MIN_DIST, CLUST_EP,
                                                 MIN_SAMPLE)
                    # cluster centers
                    numclusters[fi] = len(centers)
                fracs_2[cei, msi, mdi] = float(
                    np.sum(numclusters == 2)) / len(numclusters)
    pylab.figure(figsize=(12, 8))
    for mdi, MIN_DIST in enumerate(MIN_DISTS):
        pylab.subplot(len(MIN_DISTS), 1, mdi + 1)

        for msi in range(len(MIN_SAMPLES)):
            pylab.plot(CLUST_EPS,
                       fracs_2[:, msi, mdi],
                       label='%d' % MIN_SAMPLES[msi])
        pylab.title("min_dist= %3.2f" % MIN_DIST)
    pylab.legend()
    pylab.savefig('test.png', dpi=300)
Пример #8
0
def average_diode_sep():
    clust_eps = 0.2
    min_dist = 2.0
    min_samples = 3.0
    thold = 240

    FRAMES = np.arange(4000) * 2

    dataset = "bukowski_02.C"
    cf = pickle.load(open(os.path.join(ddir(dataset), 'config.pickle')))
    region = pickle.load(open(os.path.join(ddir(dataset), 'region.pickle')))

    env = util.Environmentz(cf['field_dim_m'], cf['frame_dim_pix'])
    x_min, y_min = env.gc.real_to_image(region['x_pos_min'],
                                        region['y_pos_min'])
    x_max, y_max = env.gc.real_to_image(region['x_pos_max'],
                                        region['y_pos_max'])
    print x_min, x_max
    print y_min, y_max
    if y_min < 0:
        y_min = 0
    frame_images = organizedata.get_frames(ddir(dataset), FRAMES)
    num_clusters = np.zeros(len(FRAMES))
    dists = []
    for fi, im in enumerate(frame_images):
        im = im[y_min:y_max + 1, x_min:x_max + 1]

        centers = frame_clust_points(im, 240, min_dist, clust_eps, min_samples)

        num_clusters[fi] = len(centers)
        if len(centers) == 2:
            dists.append(distance.pdist(centers)[0])
    dists = np.array(dists)
    pylab.hist(dists[dists < 50], bins=20)

    pylab.savefig("average_diode_sep.%s.png" % dataset, dpi=300)
Пример #9
0
    scores = scores[:len(sv)]

    pylab.figure()
    scores_flat = np.array(scores.flat)
    pylab.hist(scores_flat[np.isfinite(scores_flat)], bins=255)
    pylab.savefig(outfile_hist, dpi=300)

    scores[np.isinf(scores)] = -1e20

    TOP_R, TOP_C = 3, 4
    TOP_N = TOP_R * TOP_C

    score_idx_sorted = np.argsort(scores)[::-1]

    #get the frame
    frames = organizedata.get_frames(data_p['dataset_dir'],
                                     np.array([data_p['frame']]))

    # config file
    cf = pickle.load(open(os.path.join(data_p['dataset_dir'],
                                       'config.pickle')))
    env = util.Environmentz(cf['field_dim_m'], cf['frame_dim_pix'])

    img = frames[0]
    f = pylab.figure()
    for r in range(TOP_N):
        s_i = score_idx_sorted[r]
        score = scores[s_i]
        ax = f.add_subplot(TOP_R, TOP_C, r + 1)
        ax.imshow(img, interpolation='nearest', cmap=pylab.cm.gray)
        x_pix, y_pix = env.gc.real_to_image(sv[s_i]['x'], sv[s_i]['y'])
        ax.axhline(y_pix, linewidth=1, c='b', alpha=0.5)
                            cf['frame_dim_pix'])
    led_params = pickle.load(open(led_params_filename, 'r'))

    eo_params = measure.led_params_to_EO(cf, led_params)
    if frame_end > cf['end_f']:
        frame_end = cf['end_f']

    truth = np.load(os.path.join(epoch_dir, 'positions.npy'))
    truth_interp, missing = measure.interpolate(truth)
    derived_truth = measure.compute_derived(truth_interp, 
                                            T_DELTA)


    frame_pos = np.arange(frame_start, frame_end)
    # load frames
    frames = organizedata.get_frames(epoch_dir, frame_pos)

    FRAMEN = len(frames)

    coordinates = []
    
    regions = np.zeros((FRAMEN, frames[0].shape[0], frames[0].shape[1]), 
                       dtype=np.uint8)
    point_est_track_data = []

    for fi, frame in enumerate(frames):
        abs_frame_index = frame_pos[fi]

        coordinates.append(skimage.feature.peak_local_max(frame, 
                                                          min_distance=30, 
                                                          threshold_abs=220))
Пример #11
0
                           front_amount = diode_scale, back_amount = diode_scale)

    #print "EO PARAMS ARE", eoparams
    tr = TemplateObj(0.8, 0.4)
    tr.set_params(*eoparams)
    
    le1 = likelihood.LikelihoodEvaluator2(env, tr, similarity='dist', 
                                         likeli_params = config_params)
    
    
    model_inst = model.CustomModel(env, le1, 
                                   POS_NOISE_STD=posnoise,
                                   VELOCITY_NOISE_STD=velnoise)
    frame_pos = np.arange(frame_start, frame_end)
    # load frames
    frames = organizedata.get_frames(epoch_dir, frame_pos)

    y = frames

    prop2 = proposals.HigherIsotropic()
    def img_to_points(img):
        return dettrack.point_est_track2(img, env, eoparams)
        
    prop3 = proposals.MultimodalData(env, img_to_points, prop2)

    mpk = ssm.proposal.MixtureProposalKernel([prop2, prop3], 
                                             [0.5, 0.5])

    unnormed_weights, particles, ancestors = pf.arbitrary_prop(y, model_inst, 
                                                               mpk,
                                                               PARTICLEN)
Пример #12
0
    # now the real velocity outliers: For the top N velocity outliers,
    # plot the image before, during, and after the peak

    env = util.Environmentz(cf['field_dim_m'], cf['frame_dim_pix'])

    def rc(x, y):
        # convenience real-to-immg
        return env.gc.real_to_image(x, y)

    top_vel_idx = np.argsort(pos_vel)[-1]
    tgt_frame_idx = np.array([
        top_vel_idx - 2, top_vel_idx - 1, top_vel_idx, top_vel_idx + 1,
        top_vel_idx + 2
    ])
    f = organizedata.get_frames(basedir, tgt_frame_idx)
    pylab.figure()
    for i, fi in enumerate(tgt_frame_idx):
        pylab.subplot(2, len(tgt_frame_idx), i + 1)
        pylab.imshow(f[i], interpolation='nearest', cmap=pylab.cm.gray)
        for l, c in [('led_front', 'g'), ('led_back', 'r')]:
            img_x, img_y = rc(positions_interp[l][fi, 0],
                              positions_interp[l][fi, 1])
            pylab.scatter(img_x, img_y, c=c, s=1, linewidth=0)
    for i, fi in enumerate(tgt_frame_idx[:-1]):
        pylab.subplot(2, len(tgt_frame_idx), len(tgt_frame_idx) + i + 1)
        pylab.imshow(f[i + 1] - f[i], interpolation='nearest')
    pylab.savefig(velocity_file, dpi=1000)


@merge("%s/*/positions.npy" % DATA_DIR, ["velocity.pdf", "phidot.pdf"])
Пример #13
0
    # plot the image before, during, and after the peak
    
    env = util.Environmentz(cf['field_dim_m'], 
                            cf['frame_dim_pix'])

    
    def rc(x, y):
        # convenience real-to-immg
        return env.gc.real_to_image(x, y)
    top_vel_idx = np.argsort(pos_vel)[-1]
    tgt_frame_idx = np.array([top_vel_idx -2, 
                              top_vel_idx - 1,
                              top_vel_idx, 
                              top_vel_idx + 1, 
                              top_vel_idx + 2])
    f = organizedata.get_frames(basedir, tgt_frame_idx)
    pylab.figure()
    for i, fi in enumerate(tgt_frame_idx):
        pylab.subplot(2, len(tgt_frame_idx), 
                      i+1)
        pylab.imshow(f[i], interpolation='nearest', cmap=pylab.cm.gray)
        for l, c in [('led_front', 'g'), ('led_back', 'r')]:
            img_x, img_y = rc(positions_interp[l][fi, 0],
                              positions_interp[l][fi, 1])
            pylab.scatter(img_x, img_y, c=c, s=1, linewidth=0)
    for i, fi in enumerate(tgt_frame_idx[:-1]):
        pylab.subplot(2, len(tgt_frame_idx), 
                      len(tgt_frame_idx) + i+1)
        pylab.imshow(f[i+1] -f[i], interpolation='nearest')
    pylab.savefig(velocity_file, dpi=1000)
    
Пример #14
0
    #print "EO PARAMS ARE", eoparams
    tr = TemplateObj(0.8, 0.4)
    tr.set_params(*eoparams)

    le1 = likelihood.LikelihoodEvaluator2(env,
                                          tr,
                                          similarity='dist',
                                          likeli_params=config_params)

    model_inst = model.CustomModel(env,
                                   le1,
                                   POS_NOISE_STD=posnoise,
                                   VELOCITY_NOISE_STD=velnoise)
    frame_pos = np.arange(frame_start, frame_end)
    # load frames
    frames = organizedata.get_frames(epoch_dir, frame_pos)

    y = frames

    prop2 = proposals.HigherIsotropic()

    def img_to_points(img):
        return dettrack.point_est_track2(img, env, eoparams)

    prop3 = proposals.MultimodalData(env, img_to_points, prop2)

    mpk = ssm.proposal.MixtureProposalKernel([prop2, prop3], [0.5, 0.5])

    unnormed_weights, particles, ancestors = pf.arbitrary_prop(
        y, model_inst, mpk, PARTICLEN)
Пример #15
0
    scores = scores[:len(sv)]

    pylab.figure()
    scores_flat = np.array(scores.flat)
    pylab.hist(scores_flat[np.isfinite(scores_flat)], bins=255)
    pylab.savefig(outfile_hist, dpi=300)

    scores[np.isinf(scores)] = -1e20

    TOP_R, TOP_C = 3, 4
    TOP_N = TOP_R * TOP_C

    score_idx_sorted = np.argsort(scores)[::-1]
    
    #get the frame
    frames = organizedata.get_frames(data_p['dataset_dir'], 
                                     np.array([data_p['frame']]))

    # config file
    cf = pickle.load(open(os.path.join(data_p['dataset_dir'], 
                                       'config.pickle')))
    env = util.Environmentz(cf['field_dim_m'], 
                            cf['frame_dim_pix'])

    img = frames[0]
    f = pylab.figure()
    for r in range(TOP_N):
        s_i = score_idx_sorted[r]
        score = scores[s_i]
        ax =f.add_subplot(TOP_R, TOP_C, r+1)
        ax.imshow(img, interpolation='nearest', cmap=pylab.cm.gray)
        x_pix, y_pix = env.gc.real_to_image(sv[s_i]['x'], sv[s_i]['y'])
Пример #16
0
    region = pickle.load(open(region_filename, 'r'))

    env = util.Environmentz(cf['field_dim_m'], cf['frame_dim_pix'])
    led_params = pickle.load(open(led_params_filename, 'r'))

    eo_params = measure.led_params_to_EO(cf, led_params)
    if frame_end > cf['end_f']:
        frame_end = cf['end_f']

    truth = np.load(os.path.join(epoch_dir, 'positions.npy'))
    truth_interp, missing = measure.interpolate(truth)
    derived_truth = measure.compute_derived(truth_interp, T_DELTA)

    frame_pos = np.arange(frame_start, frame_end)
    # load frames
    frames = organizedata.get_frames(epoch_dir, frame_pos)

    FRAMEN = len(frames)

    coordinates = []

    regions = np.zeros((FRAMEN, frames[0].shape[0], frames[0].shape[1]),
                       dtype=np.uint8)
    point_est_track_data = []

    for fi, frame in enumerate(frames):
        abs_frame_index = frame_pos[fi]

        coordinates.append(
            skimage.feature.peak_local_max(frame,
                                           min_distance=30,