Exemple #1
0
def get_data(simple=False, look_back=5):
    if simple:
        n_traj = 20
        train_data = gen_timeline(40, n_traj)
        x = np.linspace(0, n_traj, n_traj)
        train_labels = np.zeros(
            train_data.shape)  #np.vstack([np.sin(k*x) for i in range(n_data)])
        n_data = 40
        k = 0.05
        for dim in range(3):
            new_train_labels = np.vstack(
                [np.sin(k * (1 + dim) * x) for i in range(n_data)])
            train_labels[:, :, dim] = new_train_labels
        #plot_forces(train_labels[0, :,:])
        #x1_train = train_data
        trainX, trainY = create_dataset(train_data, look_back=look_back)
        return trainX, trainY
        #y1_train = train_labels
        #return y1_train, y1_train
    else:
        good_trajs, bad_trajs = get_good_bad_traj()
        good_trajs = [smooth_traj(traj, n=5) for traj in good_trajs]
        bad_trajs = [smooth_traj(traj, n=5) for traj in bad_trajs]

        good_trajs = [subsample_traj(traj, n=5) for traj in good_trajs]
        bad_trajs = [subsample_traj(traj, n=5) for traj in bad_trajs]
        n_traj = good_trajs[0].shape[0]
        print("traj shape", np.array(good_trajs).shape)
        if PLOT:
            plot_forces(good_trajs[2])
        num_train_good = int(0.75 * len(good_trajs))
        num_train_bad = int(0 * len(bad_trajs))
        #data, _ = make_good_and_bad_dataset(num_train_good, num_train_bad, good_trajs, bad_trajs, test=False)
        #data = np.zeros((num_train_good+num_train_bad, n_traj, 3)) #HACK
        #for i in range(num_train_good+num_train_bad):
        #    data[i, :, :] = good_trajs[0]
        #timeline = gen_timeline(data.shape[0], n_traj)
        #data = data[:,:,:,0]
        #data shifted back by repeating the first one lag+1 times
        trainX, trainY = create_dataset(good_trajs[:num_train_good],
                                        look_back=look_back)

        return trainX, trainY
        train_data = np.zeros(data.shape)
        first_row = data[:, 0, :]
        lag = 1
        for i in range(lag):
            train_data[:, i, :] = data[:, i, :]  #first_row
        #and the rest
        train_data[lag:, :, :] = data[:lag, :, :]
        return train_data, data
Exemple #2
0
def predict(model, curr_set=np.zeros((1, 3, 5)), numsteps=20, upsample=1):
    full_prediction = []
    full_prediction = None
    for i in range(int(numsteps / upsample)):
        curr_set[:, :, :-1] = curr_set[:, :, 1:]
        curr_set[:, :, -1] = model.predict(curr_set)[0].T
        for i in range(upsample):
            if full_prediction is None:
                full_prediction = curr_set[:, :, -1]
            else:
                full_prediction = np.vstack(
                    [full_prediction, curr_set[:, :, -1]])
    plot_forces(full_prediction)
    return full_prediction
        forces = predict(model, curr_set=curr_forces, numsteps=100, upsample=5)

        #print("Min", np.min(forces.flatten()))
        #print("Max", np.max(forces.flatten()))
        ipdb.set_trace()
        cortical_response, classification_vector = encoder(
            forces)  #we want the second term to be 1
        dist = distance(cortical_response, good_responses)
        prediction = classification_vector[0][1]
        force_list.append(forces)
        class_list.append(prediction)
        force_to_dist[i] = dist
        dist_list.append(dist)
    #best_i = min(force_to_dist.keys(), key=lambda x: force_to_dist[x])
    best_i = np.argmax(class_list)
    lowest_dist = force_to_dist[best_i]
    print("lowest dist", lowest_dist)
    print("highest class", class_list[best_i])
    if PLOT:
        plt.scatter(dist_list, class_list)
        plt.xlabel("Distance")
        plt.ylabel("Probability successful | theta")
        plt.show()

    return force_list[best_i]


N = 30
#plot_forces(gen_parameterized_forces(gen_weights(N=N), 100, N=N))
plot_forces(find_best_encoding(N=N))