コード例 #1
0

# In[4]:

with h5py.File(glob.glob('output/'+model_name+'*')[0] + '/result.h5','r') as results:
    L,  mu, W, sigma, pi = [results[key][()] for key in ['L',
                                                       'mu',
                                                       'W',
                                                       'sigma',
                                                       'pi']]


# In[15]:
# ## $H\pi$ (sparsity) over iterations

bsc = BSC_ET(*[model_specifications[key] for key in ['D','H','Hprime','gamma']])

data = {'y':data}
model_params = {'W':W[-1],'pi':pi[-1],'sigma':sigma[-1]}

anneal = LinearAnnealing(1)
anneal['T'] = [(0,1.)]
inferred_data = bsc.inference(anneal,model_params,data,no_maps=1)

#test_patches = np.reshape(data['y'],(data['y'].shape[0],)+patchsize)

#inferred_data = {key:np.squeeze(value) for key,value in inferred_data.iteritems()}


# In[10]:
コード例 #2
0
ファイル: analyze_BSC.py プロジェクト: mjboos/auditoryBSC
#Increases variance by a muliplicative factor that slowly goes down to 1
anneal['T'] = T      # [(iteration, value),... ]

#Reduces truncation rate so as not to prematurely exclude data 
anneal['Ncut_factor'] = Ncut     

#Simulated annealing of parameters
#anneal['W_noise'] = Wnoise

#Include prior parameters in the annealing schedule
anneal['anneal_prior'] = False


output_path = create_output_path(basename = job_name)

model = BSC_ET(D, H, Hprime, gamma, to_learn=['W','sigma','pi','mu'])

data = {'y':patches}


out_fname = output_path + "/data.h5"


#setting up logging/output
print_list = ('T', 'Q', 'pi', 'sigma', 'N', 'MAE', 'L')
dlog.set_handler(print_list, TextPrinter)
h5store_list = ('W', 'pi', 'sigma', 'y', 'MAE', 'N','L','Q','mu')
dlog.set_handler(h5store_list, StoreToH5, output_path +'/result.h5')

###### Initialize model #######
# Initialize (Random) model parameters
コード例 #3
0
    dlog.set_handler(["pi"], YTPlotter)
    dlog.set_handler(["sigma"], YTPlotter)
    dlog.set_handler("y", RFViewer, rf_shape=(D2, D2))

    # Prepare ground-truth GFs (bars)
    W_gt = np.zeros((H, D2, D2))
    for i in xrange(D2):
        W_gt[i, i, :] = bar_value
        W_gt[D2 + i, :, i] = bar_value
    if neg_bars > 0.0:
        W_gt[sample(range(H), np.int(H * neg_bars))] *= -1
    W_gt = W_gt.reshape((H, D))
    W_gt += np.random.normal(size=(H, D), scale=0.5)

    # Prepare model...
    model = BSC_ET(D, H, Hprime, gamma, to_learn)
    mparams = {"W": W_gt, "pi": pi_gt, "sigma": sigma_gt, "mu": mu_gt}
    mparams = comm.bcast(mparams)

    pprint("Generating Model Parameters:")
    pprint("pi = " + np.str(mparams["pi"]) + "; sigma = " + np.str(mparams["sigma"]))

    # Generate trainig data
    my_N = N // comm.size
    my_data = model.generate_data(mparams, my_N)
    dlog.append("y", my_data["y"][0:20])

    # Choose annealing schedule
    anneal = LinearAnnealing(anneal_steps)
    anneal["T"] = [(15, start_temp), (-10, end_temp)]
    anneal["Ncut_factor"] = [(0, 0.0), (2.0 / 3, 1.0)]
コード例 #4
0
ファイル: AuditoryBSC.py プロジェクト: mjboos/auditoryBSC
#patches_flat = patches_flat - intercept


# In[6]:

output_path = create_output_path()

N = patches_flat.shape[0]
D = patches_flat.shape[1]

H = 40

Hprime = 6
gamma = 5

model = BSC_ET(D, H, Hprime, gamma, to_learn=['W','sigma','pi'])

data = {'y':patches_flat}


out_fname = output_path + "/data.h5"


# In[7]:



#setting up logging/output
print_list = ('T', 'Q', 'pi', 'sigma', 'N', 'MAE', 'L')
dlog.set_handler(print_list, TextPrinter)
h5store_list = ('W', 'pi', 'sigma', 'y', 'MAE', 'N','L','Q')
コード例 #5
0
    dlog.set_handler(['pi'], YTPlotter)
    dlog.set_handler(['sigma'], YTPlotter)
    dlog.set_handler('y', RFViewer, rf_shape=(D2, D2))

    # Prepare ground-truth GFs (bars)
    W_gt = np.zeros((H, D2, D2))
    for i in xrange(D2):
        W_gt[i, i, :] = bar_value
        W_gt[D2 + i, :, i] = bar_value
    if neg_bars > 0.0:
        W_gt[sample(range(H), np.int(H * neg_bars))] *= -1
    W_gt = W_gt.reshape((H, D))
    W_gt += np.random.normal(size=(H, D), scale=0.5)

    # Prepare model...
    model = BSC_ET(D, H, Hprime, gamma, to_learn)
    mparams = {'W': W_gt, 'pi': pi_gt, 'sigma': sigma_gt, 'mu': mu_gt}
    mparams = comm.bcast(mparams)

    pprint("Generating Model Parameters:")
    pprint("pi = " + np.str(mparams['pi']) + "; sigma = " +
           np.str(mparams['sigma']))

    # Generate trainig data
    my_N = N // comm.size
    my_data = model.generate_data(mparams, my_N)
    dlog.append('y', my_data['y'][0:20])

    # Choose annealing schedule
    anneal = LinearAnnealing(anneal_steps)
    anneal['T'] = [(15, start_temp), (-10, end_temp)]