예제 #1
0
anneal = LinearAnnealing(n_anneal)

#Increases variance by a muliplicative factor that slowly goes down to 1
anneal['T'] = T      # [(iteration, value),... ]

#Reduces truncation rate so as not to prematurely exclude data 
anneal['Ncut_factor'] = Ncut     

#Simulated annealing of parameters
#anneal['W_noise'] = Wnoise

#Include prior parameters in the annealing schedule
anneal['anneal_prior'] = False


output_path = create_output_path(basename = job_name)

model = BSC_ET(D, H, Hprime, gamma, to_learn=['W','sigma','pi','mu'])

data = {'y':patches}


out_fname = output_path + "/data.h5"


#setting up logging/output
print_list = ('T', 'Q', 'pi', 'sigma', 'N', 'MAE', 'L')
dlog.set_handler(print_list, TextPrinter)
h5store_list = ('W', 'pi', 'sigma', 'y', 'MAE', 'N','L','Q','mu')
dlog.set_handler(h5store_list, StoreToH5, output_path +'/result.h5')
예제 #2
0
#test_part = perc_clip(test_part)

test_part = scaler.fit_transform(test_part)
#test_part = test_part / np.sum(test_part,axis=1)[:,np.newaxis]
patchsize = (6,48)
patches_flat = np.reshape(extract_patches_2d(test_part,patchsize),(test_part.shape[0] - patchsize[0] + 1,-1))

#patches_flat = scaler.fit_transform(patches_flat)

#patches_flat = (patches_flat-np.mean(patches_flat))/np.std(patches_flat)


# In[4]:

output_path = create_output_path()

N = patches_flat.shape[0]
D = patches_flat.shape[1]

H = 100

Hprime = 7
gamma = 5

model = BSC_ET(D, H, Hprime, gamma, to_learn=['W','sigma','pi'])

data = {'y':patches_flat}


out_fname = output_path + "/data.h5"
예제 #3
0
dlog.progress("Running %d parallel processes" % comm.size) 
dlog.progress("Using accelerted functions: %s" % accel.backend)
dlog.progress("Reading paramfile %s" % paramfile)
    
# Some default parameter values
#TODO: Agree upon default values... 
enable_tracing = True
partial_a = 1.  
partial_b = 1.
partial_c = 1.

# Read paramfile
execfile(paramfile)

#=================== Create output path and files ==============================
output_path = create_output_path(paramfile)
result_path = output_path + 'result.h5'    
if comm.rank == 0: #We keep a copy of this file mainly because of the ET params
    copyfile(paramfile, output_path+"paramfile.py")

dlog.progress("Output directory: %s" % output_path)

#============================= Load data =====================================
data_file = openFile(datafile, 'r')
N_data = data_file.root.patches.shape[0]

if N_data < N:
    dlog.progress("WARNING: N=%d chosen but only %d data points available. " % (N, N_data))
    N = N_data

# Partition the workload
예제 #4
0
dlog.progress("Running %d parallel processes" % comm.size)
dlog.progress("Using accelerted functions: %s" % accel.backend)
dlog.progress("Reading paramfile %s" % paramfile)

# Some default parameter values
#TODO: Agree upon default values...
enable_tracing = True
partial_a = 1.
partial_b = 1.
partial_c = 1.

# Read paramfile
execfile(paramfile)

#=================== Create output path and files ==============================
output_path = create_output_path(paramfile)
result_path = output_path + 'result.h5'
if comm.rank == 0:  #We keep a copy of this file mainly because of the ET params
    copyfile(paramfile, output_path + "paramfile.py")

dlog.progress("Output directory: %s" % output_path)

#============================= Load data =====================================
data_file = openFile(datafile, 'r')
N_data = data_file.root.patches.shape[0]

if N_data < N:
    dlog.progress("WARNING: N=%d chosen but only %d data points available. " %
                  (N, N_data))
    N = N_data
예제 #5
0
from random import sample

# Import
from pulp.utils import create_output_path, check_basis
from pulp.utils.parallel import pprint

from pulp.utils.datalog import dlog, StoreToH5, TextPrinter, StoreToTxt
from pulp.visualize.gui import GUI, RFViewer, YTPlotter

from pulp.em import EM
from pulp.em.annealing import LinearAnnealing
from pulp.em.camodels.bsc_et import BSC_ET

# Datalogger presettings:
output_path = create_output_path()

# -------------------------------- Parameters ---------------------------------

# Number of datapoints:
N = 1000

# Number of pixels (will be squared for complete image size):
D2 = 5
D = D2**2

# Number of hidden causes (set automatically to 2*D2 for bars test):
H = 2 * D2

# ET approximation parameters
Hprime = 5