Example #1
0
import experiments as exp

#%%
num_class = 8
num_dich = 2
ovlp = 0
N = 100
N_out = 10
N_list = None

input_type = 'task_inp'
# output_type = 'factored'
# output_type = 'rotated1.0'
output_type = [0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0]

task = util.RandomDichotomies(num_class, num_dich, overlap=ovlp)

sample_dichotomies = num_dich
# sample_dichotomies = None

this_exp = exp.random_patterns(task,
                               SAVE_DIR,
                               num_class=num_class,
                               dim=100,
                               var_means=1)

# FOLDERS = 'results/continuous/%d_%d/%s/%s/'%(num_class,num_dich,input_type, output_type)

# if (N_list is None):
#     files = os.listdir(SAVE_DIR+FOLDERS)
#     param_files = [f for f in files if 'parameters' in f]
Example #2
0
    def deriv(self, x):
        if self.linear_grad:
            return torch.ones(x.shape)
        else:
            return 1-nn.Tanh()(x).pow(2)

#%% data
num_var = 3
dim_inp = 25 # dimension per variable
ndat = 5000 # total
noise = 0.1

# apply_rotation = False
apply_rotation = True

input_task = util.RandomDichotomies(d=[(0,1,2,3),(0,2,4,6),(0,3,4,7)])
# output_task = util.RandomDichotomies(d=[(0,1,6,7), (0,2,5,7)]) # xor of first two
output_task = util.RandomDichotomies(d=[(0,1,6,7)]) # 3d xor
# output_task = util.RandomDichotomies(d=[(0,1,6,7), (0,4,5,6)]) # 3d xor
# output_task = util.RandomDichotomies(d=[(0,1,2,3),(0,2,4,6)])
# output_task = util.RandomDichotomies(d=[(0,1,4,5),(0,2,5,7),(0,1,6,7)]) # 3 incompatible dichotomies
# input_task = util.RandomDichotomies(d=[(0,1),(0,2)])
# output_task = util.RandomDichotomies(d=[(0,1)])

# generate inputs
inp_condition = np.random.choice(2**num_var, ndat)
# var_bit = (np.random.rand(num_var, num_data)>0.5).astype(int)
var_bit = input_task(inp_condition).numpy().T

mns = np.random.randn(dim_inp,num_var,1)*var_bit[None,:,:] \
    + np.random.randn(dim_inp,num_var,1)*(1-var_bit[None,:,:])
Example #3
0
    iden = 1

for i in range(num_rep):
    for j in range(num_rep):
        plt.scatter((iden**(j + i)) * fake_labels[:1000, 0] + i * d0,
                    fake_labels[:1000, 1] + j * d1,
                    c=activity)
        # plt.scatter(((-1)**i)*fake_labels[:,0],fake_labels[:,1]+i*d1,c=activity)
        # plt.scatter(fake_labels[:,0]+i*d0,fake_labels[:,1]+i*d1,c=activity)

plt.title('Neuron %d' % which_neuron)
plt.axis('equal')

#%% Representation from a simple linear task

task = util.RandomDichotomies(8, 1, 0)
# task = util.ParityMagnitude()

# this_exp = exp.mnist_multiclass(task, SAVE_DIR, abstracts=abstract_variables)
this_exp = exp.random_patterns(task,
                               SAVE_DIR,
                               num_class=8,
                               dim=100,
                               var_means=1)

coding_dir = np.random.randn(100)

cont_comp = np.random.randn(this_exp.train_data[0].shape[0], 1) * 10
input_states = (this_exp.train_data[0].data +
                cont_comp * coding_dir[None, :]).float()
Example #4
0
# Classification w/ structured inputs
elif which_data == 'struc_class':
    num_var = 2
    dim_inp = 1  # dimension per variable
    noise = 0.0

    ndat = 5000

    num_cond = 2**num_var

    apply_rotation = False
    # apply_rotation = True

    # input_task = util.RandomDichotomies(d=[(0,1,2,3),(0,2,4,6),(0,1,4,5)])
    input_task = util.RandomDichotomies(d=[(0, 1), (0, 2)])
    # task = util.RandomDichotomies(d=[(0,3,5,6)]) # 3d xor
    # task = util.RandomDichotomies(d=[(0,1,6,7)]) # 2d xor
    # task = util.RandomDichotomies(d=[(0,1,3,5),(0,2,3,6),(0,1,2,4)]) # 3 corners
    # task = util.RandomDichotomies(d=[(0,1,3,5)]) # corner dichotomy
    task = util.RandomDichotomies(d=[(0, 3)])

    # generate inputs
    inp_condition = np.random.choice(2**num_var, ndat)
    # inp_condition = np.arange(ndat)
    # var_bit = (np.random.rand(num_var, num_data)>0.5).astype(int)
    var_bit = input_task(inp_condition).numpy().T

    means = np.random.randn(num_var, dim_inp)
    means /= la.norm(means, axis=1, keepdims=True)
Example #5
0
readout_weights = None
# readout_weights = students.BinaryReadout
# readout_weights = students.PositiveReadout

# find experiments 
if which_task == 'mnist': 
    this_exp = exp.mnist_multiclass(task, SAVE_DIR, 
                                    z_prior=latent_dist,
                                    num_layer=num_layer,
                                    weight_decay=decay,
                                    decoder=readout_weights,
                                    nonlinearity=nonlinearity,
                                    good_start=good_start,
                                    init_coding=coding_level)
elif which_task == 'mog':
    task = util.RandomDichotomies(num_cond,num_var)
    this_exp = exp.random_patterns(task, SAVE_DIR, 
                                    num_class=num_cond,
                                    dim=100,
                                    var_means=1,
                                    z_prior=latent_dist,
                                    num_layer=num_layer,
                                    weight_decay=decay,
                                    decoder=readout_weights,
                                    nonlinearity=nonlinearity,
                                    good_start=good_start,
                                    init_coding=coding_level,
                                    rot=rotation)
elif which_task == 'structured':
    inp_task = tasks.EmbeddedCube(tasks.StandardBinary(int(np.log2(num_cond))),100,noise_var=0.1)
    # inp_task = tasks.TwistedCube(tasks.StandardBinary(2), 100, f=rotation, noise_var=0.1)
Example #6
0
import util
import experiments as exp
import plotting as dicplt

#%% data
num_var = 3
dim_inp = 1  # dimension per variable
num_data = 5000  # total
noise = 0.05

num_recoded = 2  # number of input bits represented by random patterns

apply_rotation = False
# apply_rotation = True

input_task = util.RandomDichotomies(d=[(0, 1, 2, 3), (0, 2, 4, 6), (0, 1, 4,
                                                                    5)])
# output_task = util.RandomDichotomies(d=[(0,1,6,7), (0,2,5,7)]) # xor of first two
output_task = util.RandomDichotomies(d=[(0, 3, 5, 6)])  # 3d xor
# output_task = util.RandomDichotomies(d=[(0,1,6,7), (0,4,5,6)]) # 3d xor
# output_task = util.RandomDichotomies(d=[(0,1,2,3),(0,2,4,6)])
# output_task = util.RandomDichotomies(d=[(0,1,4,5),(0,2,5,7),(0,1,6,7)]) # 3 incompatible dichotomies
# input_task = util.RandomDichotomies(d=[(0,1),(0,2)])
# output_task = util.RandomDichotomies(d=[(0,1)])

inp_condition = np.random.choice(2**num_var, num_data)
# var_bit = (np.random.rand(num_var, num_data)>0.5).astype(int)
var_bit = input_task(inp_condition).numpy().T
inp_subcondition = util.decimal(var_bit[:num_recoded, :].T).astype(int)

means = np.random.randn(num_var, dim_inp)
cat_means = np.random.randn(2**num_recoded, dim_inp * num_recoded)
Example #7
0
from matplotlib import animation as anime
from itertools import permutations, combinations
from sklearn import svm, manifold, linear_model
from tqdm import tqdm

# this is my code base, this assumes that you can access it
import students
import assistants
import util
import experiments as exp

#%%
num_cond = 8
num_var = 2

task = util.RandomDichotomies(num_cond,num_var,0)
# task = util.ParityMagnitude()

# this_exp = exp.mnist_multiclass(task, SAVE_DIR, abstracts=abstract_variables)
this_exp = exp.random_patterns(task, SAVE_DIR, 
                               num_class=8,
                               dim=100,
                               var_means=1,
                               var_noise=0.1)

#%% set up the task
p = 2**num_var
allowed_actions = [0,1,2]
# allowed_actions = [0]
p_action = [0.8,0.1,0.1]
# p_action = [1.0]
empty_time = 20

# nonlinearity = 'ReLU'
nonlinearity = 'Tanh'

# which_task = 5
which_task = 5

inp_channels = 1

readout_weights = None
# readout_weights = students.BinaryReadout
# readout_weights = students.PositiveReadout

input_task = util.RandomDichotomies(d=[(0, 1, 2, 3), (0, 2, 4, 6), (0, 1, 4,
                                                                    5)])
output_task = util.LogicalFunctions(d=[(0, 1, 2, 3), (0, 2, 4, 6),
                                       (0, 1, 4, 5)],
                                    function_class=which_task)  # 3d xor

num_cond = 8

this_exp = exp.delayed_logic(input_channels=inp_channels,
                             task=output_task,
                             input_task=input_task,
                             SAVE_DIR=SAVE_DIR,
                             time_between=empty_time,
                             nonlinearity=nonlinearity)

this_folder = SAVE_DIR + this_exp.folder_hierarchy()
Example #9
0
# readout_weights = students.BinaryReadout
# readout_weights = students.PositiveReadout

latent_dist = None
# latent_dist = students.GausId

H = 100 # number of hidden units

nonlinearity = 'ReLU'
# nonlinearity = 'Tanh'
# nonlinearity = 'Sigmoid'
# nonlinearity = 'LeakyReLU'

print('- - - - - - - - - - - - - - - - - - - - - - - - - - ')        
if this_task == 'mnist': 
    task = util.RandomDichotomies(num_class, num_dich, overlap=ovlp, use_mse=gaus_obs)
    exp = experiments.mnist_multiclass(N=N, 
                                      task=task, 
                                      SAVE_DIR=SAVE_DIR, 
                                      H=H,
                                      nonlinearity=nonlinearity,
                                      num_layer=num_layer,
                                      z_prior=latent_dist,
                                      weight_decay=decay,
                                      decoder=readout_weights,
                                      nepoch=nepoch,
                                      sample_dichotomies=sample_dichotomies,
                                      init=init,
                                      skip_metrics=skip_metrics,
                                      good_start=ols_initialised,
                                      init_coding=coding_level,
Example #10
0
import students
import assistants
import util
import experiments as exp
import plotting as dicplt

#%% data
dim_inp = 32  # dimension per variable
num_data = 5000  # total
noise = 0.05

switch_fraction = 0.1

input_task = util.StandardBinary(3)
# output_task = util.RandomDichotomies(d=[(0,1,6,7), (0,2,5,7)]) # xor of first two
output_task = util.RandomDichotomies(d=[(0, 3, 5, 6)])  # 3d xor

inp_condition = np.random.choice(2**3, num_data)
var_bit = input_task(inp_condition).numpy().T
action_outcome = var_bit[:2, :]
context = var_bit[2, :]

stimulus = util.decimal(action_outcome.T).astype(int)
stimulus[context == 1] = np.mod(stimulus[context == 1] + 1,
                                4)  # effect of context

means_pos = np.random.randn(2, dim_inp)
means_neg = np.random.randn(2, dim_inp)
stim_pattern = np.random.randn(4, dim_inp)

mns = (means_pos[:, None, :] *
Example #11
0
                                       num_class=num_cond,
                                       dim=100,
                                       var_means=1,
                                       z_prior=latent_dist,
                                       num_layer=num_layer,
                                       weight_decay=decay,
                                       decoder=readout_weights,
                                       good_start=good_start,
                                       init_coding=coding_level,
                                       rot=rotation)
    elif which_task == 'structured':
        bits = np.nonzero(1 - np.mod(
            np.arange(num_cond)[:, None] //
            (2**np.arange(np.log2(num_cond))[None, :]), 2))
        decs = np.split(bits[0][np.argsort(bits[1])], int(np.log2(num_cond)))
        inp_task = util.RandomDichotomies(d=decs)
        task = util.LogicalFunctions(d=decs, function_class=num_var)
        this_exp = exp.structured_inputs(task,
                                         input_task=inp_task,
                                         SAVE_DIR=SAVE_DIR,
                                         dim_inputs=25,
                                         noise_var=0.1,
                                         num_layer=num_layer,
                                         z_prior=latent_dist,
                                         weight_decay=decay,
                                         decoder=readout_weights,
                                         nonlinearity=nonlinearity,
                                         init_coding=coding_level)

    this_folder = SAVE_DIR + this_exp.folder_hierarchy()
Example #12
0
from sklearn import svm, discriminant_analysis, manifold, linear_model
import scipy.stats as sts
import scipy.linalg as la

# import umap
from cycler import cycler
from tqdm import tqdm

from students import *
from assistants import LinearDecoder
import experiments as exp
import util

#%% Model specification -- for loading purposes
# task = util.ParityMagnitude()
task = util.RandomDichotomies(8, 2, 0)
# task = util.ParityMagnitudeEnumerated()
# task = util.Digits()
# task = util.DigitsBitwise()

# obs_dist = Bernoulli(1)
latent_dist = None
# latent_dist = GausId
nonlinearity = 'ReLU'
# nonlinearity = 'LeakyReLU'

num_layer = 1

decay = 0.0

H = 100