Пример #1
0
def make_input(encoders):
    times = []
    times.extend([trial_length for i in range(vector_n)])
    times.extend([trial_length for i in range(vector_n)])

    gens = []
    if train_on_hrr:
        gens.extend([nf.output(100, False, pe, False, nf.make_hrr_noise(dim, 1)) for pe in encoders])
    else:
        gens.extend([nf.output(100, True, pe, False) for pe in encoders])
    gens.extend([nf.output(100, False, pe, False, nf.make_hrr_noise(dim, 1)) for pe in encoders])

    return times, gens
Пример #2
0
def build_and_run_vectors(seed, dim, DperE, NperD, num_vectors, neurons_per_vector, training_time,
                          testing_time, cleanup_params, ensemble_params, oja_learning_rate,
                          oja_scale, pre_tau, post_tau, pes_learning_rate, **kwargs):

    cleanup_n = neurons_per_vector * num_vectors

    vocab = hrr.Vocabulary(dim)
    training_vectors = [vocab.parse("x"+str(i)).v for i in range(num_vectors)]
    print "Training Vector Similarities:"
    simils = []

    if num_vectors > 1:
        for a,b in itertools.combinations(training_vectors, 2):
            s = np.dot(a,b)
            simils.append(s)
            print s
        print "Mean"
        print np.mean(simils)
        print "Max"
        print np.max(simils)
        print "Min"
        print np.min(simils)

    noise = nf.make_hrr_noise(dim, 2)
    testing_vectors = [noise(tv) for tv in training_vectors] + [hrr.HRR(dim).v]

    ret = build_and_run(seed, dim, DperE, NperD, cleanup_n, training_vectors, training_vectors,
                   testing_vectors, training_time, testing_time, cleanup_params, ensemble_params,
                   oja_learning_rate, oja_scale, pre_tau, post_tau, pes_learning_rate)

    return ret
Пример #3
0
def make_input(encoders):
    times = []
    times.extend([trial_length for i in range(vector_n)])
    times.extend([trial_length for i in range(vector_n)])

    gens = []
    if train_on_hrr:
        gens.extend([
            nf.output(100, False, pe, False, nf.make_hrr_noise(dim, 1))
            for pe in encoders
        ])
    else:
        gens.extend([nf.output(100, True, pe, False) for pe in encoders])
    gens.extend([
        nf.output(100, False, pe, False, nf.make_hrr_noise(dim, 1))
        for pe in encoders
    ])

    return times, gens
Пример #4
0
def build_and_run_vectors(seed, dim, DperE, NperD, num_vectors,
                          neurons_per_vector, training_time, testing_time,
                          cleanup_params, ensemble_params, oja_learning_rate,
                          oja_scale, pre_tau, post_tau, pes_learning_rate,
                          **kwargs):

    cleanup_n = neurons_per_vector * num_vectors

    vocab = hrr.Vocabulary(dim)
    training_vectors = [
        vocab.parse("x" + str(i)).v for i in range(num_vectors)
    ]
    print "Training Vector Similarities:"
    simils = []

    if num_vectors > 1:
        for a, b in itertools.combinations(training_vectors, 2):
            s = np.dot(a, b)
            simils.append(s)
            print s
        print "Mean"
        print np.mean(simils)
        print "Max"
        print np.max(simils)
        print "Min"
        print np.min(simils)

    noise = nf.make_hrr_noise(dim, 2)
    testing_vectors = [noise(tv) for tv in training_vectors] + [hrr.HRR(dim).v]

    ret = build_and_run(seed, dim, DperE, NperD, cleanup_n, training_vectors,
                        training_vectors, testing_vectors, training_time,
                        testing_time, cleanup_params, ensemble_params,
                        oja_learning_rate, oja_scale, pre_tau, post_tau,
                        pes_learning_rate)

    return ret
Пример #5
0
    print "Loaded"
except:
    print "Couldn't load."
    run_sim = True

if run_sim:

    print "Building..."
    start = time.time()
    model = nengo.Model("Network Array OJA", seed=seed)

    training_vector = np.array(hrr.HRR(dim).v)
    ortho = nf.ortho_vector(training_vector)
    ortho2 = nf.ortho_vector(training_vector)

    hrr_noise = nf.make_hrr_noise(dim, hrr_num)
    noisy_vector = hrr_noise(training_vector)
    print "HRR sim: ", np.dot(noisy_vector, training_vector)

# --- Build input for phase 1
    gens1 = [
            nf.interpolator(1, ortho, training_vector,
               lambda x: np.true_divide(x, ttms)),
            nf.interpolator(1, training_vector,
               ortho, lambda x: np.true_divide(x, ttms)),
            nf.interpolator(1, ortho, noisy_vector,
               lambda x: np.true_divide(x, ttms)),
            nf.interpolator(1, noisy_vector,
               ortho, lambda x: np.true_divide(x, ttms)),
            nf.interpolator(1, ortho2, training_vector,
               lambda x: np.true_divide(x, ttms)),
Пример #6
0
def _simulate(fname, seed, dim, DperE, NperD , oja_scale, oja_learning_rate, pre_tau, post_tau,
                pes_learning_rate, cleanup_params, ensemble_params, cleanup_n,
                testing_time, training_time, encoder_similarity):

    random.seed(seed)
    hrr_num = 1
    num_ensembles = int(dim / DperE)
    dim = num_ensembles * DperE
    NperE = NperD * DperE
    ttms = testing_time * 1000 #in ms

    print "Building..."
    model = nengo.Model("Network Array OJA", seed=seed)

    training_vector = np.array(hrr.HRR(dim).v)
    ortho = nf.ortho_vector(training_vector)
    ortho2 = nf.ortho_vector(training_vector)

    hrr_noise = nf.make_hrr_noise(dim, hrr_num)
    noisy_vector = hrr_noise(training_vector)
    print "HRR sim: ", np.dot(noisy_vector, training_vector)

    # --- Build input for phase 1
    gens1 = [
            nf.interpolator(1, ortho, training_vector,
               lambda x: np.true_divide(x, ttms)),
            nf.interpolator(1, training_vector,
               ortho, lambda x: np.true_divide(x, ttms)),
            nf.interpolator(1, ortho, noisy_vector,
               lambda x: np.true_divide(x, ttms)),
            nf.interpolator(1, noisy_vector,
               ortho, lambda x: np.true_divide(x, ttms)),
            nf.interpolator(1, ortho2, training_vector,
               lambda x: np.true_divide(x, ttms)),
            nf.interpolator(1, training_vector,
               ortho2, lambda x: np.true_divide(x, ttms)),
            ]
    times1 = [testing_time] * 6
    phase1_input = nf.make_f(gens1, times1)

    # --- Build input for phase 2
    gens2 = [
            nf.output(100, True, training_vector, False),
            nf.output(100, True, np.zeros(dim), False),
            nf.interpolator(1, ortho, training_vector,
               lambda x: np.true_divide(x, ttms)),
            nf.interpolator(1, training_vector,
               ortho, lambda x: np.true_divide(x, ttms)),
            nf.interpolator(1, ortho, noisy_vector,
               lambda x: np.true_divide(x, ttms)),
            nf.interpolator(1, noisy_vector,
               ortho, lambda x: np.true_divide(x, ttms)),
            nf.interpolator(1, ortho2, training_vector,
               lambda x: np.true_divide(x, ttms)),
            nf.interpolator(1, training_vector,
               ortho2, lambda x: np.true_divide(x, ttms)),
            ]

    times2 = [0.9 * training_time , 0.1 * training_time] + [testing_time] * 6
    phase2_input = nf.make_f(gens2, times2)

    orthos = [ortho]
    if cleanup_n > 1:
        orthos.append(ortho2)
    if cleanup_n > 2:
        orthos.extend([nf.ortho_vector(training_vector) for i in range(max(cleanup_n - 2, 0))])

    p = encoder_similarity
    encoders = [p * training_vector + (1-p) * o for o in orthos]
    encoders = np.array([enc / np.linalg.norm(enc) for enc in encoders])
    print [np.dot(enc,training_vector) for enc in encoders]