Example #1
0
    def load(self, xls_fname):
        xls = pd.ExcelFile(xls_fname)
        obj_dict = {}
        obj_dict['xls_fname'] = xls_fname
        df_tables = {}
        df_names = {}
        for sheet_name in xls.sheet_names:
            df_tables[sheet_name] = xls.parse(sheet_name, index_col=0)
            df_names[sheet_name] = sheet_name
        obj_dict['tables'] = utils.Bunch(**df_tables)
        obj_dict['table_names'] = utils.Bunch(**df_names)

        self.__dict__.update(**obj_dict)
        self.__refresh_components()
Example #2
0
    def generate_connection_e(self, N_e):
        W = zeros((N_e, self.N_a))  # excitatory neurons X input neurons

        available = set(
            range(N_e))  # range from 1 to number of excitatory neurons
        for a in range(self.N_a):
            # e.g. connections from first input to 10 sampled excitatory neurons (out of 200)
            temp = random.sample(available, self.N_u_e)
            # Set weight of sampled connections to 1
            W[temp, a] = 1
            if self.avoid:  # if self-connections should be avoided
                available = available.difference(temp)

        # Check if the letter underscore _ is part of the letter sequence
        # If it is part of the sequence, set their weights to zero (no input)

        # The underscore has the special property that it doesn't
        # activate anything:
        if '_' in self.lookup:
            W[:, self.lookup['_']] = 0

        # Instantiate synapses object and add connections to it
        c = utils.Bunch(use_sparse=False,
                        lamb=np.inf,
                        avoid_self_connections=False)
        ans = synapses.create_matrix((N_e, self.N_a), c)
        ans.W = W

        return ans
Example #3
0
 def __refresh_components(self):
     components = map(str,
                      self.tables.definitions.component.unique().tolist())
     keys = map(lambda component: str.upper(component.replace(' ', '_')),
                components)
     self.__dict__['components'] = utils.Bunch(
         **dict(zip(keys, components)))
Example #4
0
 def generate_connection_i(self, N_i):
     c = utils.Bunch(
         use_sparse=False,
         lamb=np.inf,  # cannot be 0
         avoid_self_connections=False)
     tmpsyn = synapses.create_matrix((N_i, self.N_u), c)
     tmpsyn.set_synapses(tmpsyn.get_synapses() * 0)
     return tmpsyn
Example #5
0
def connect_to_proc(proc_mne, log):
    """ Connect to a process """
    null_cons_info = utils.Bunch(sock_h=None,
                                 exe_file=None,
                                 xmlrpc_url=None,
                                 prompt=None)
    cons_port, exe_file, xmlrpc_url = expand_proc_mne(proc_mne)
    if cons_port is None:
        return null_cons_info
    #print 'cons_port is %d' % cons_port
    sock_h = sock_utils.connect_to_server(log, 'localhost', cons_port)
    if sock_h is None:
        print 'sock_h was None'
        return null_cons_info
    prompt = ''.join([proc_mne, ' > '])
    return utils.Bunch(sock_h=sock_h,
                       exe_file=exe_file,
                       xmlrpc_url=xmlrpc_url,
                       prompt=prompt)
Example #6
0
    def run(self, sorn):
        super(Experiment_mcmc_withSTDP, self).run(sorn)
        c = self.params.c

        # If filename was not set, we are in single mode
        if not c.has_key('file_name'):
            c.file_name = "single"

        # If state does not exist, create it
        if not c.has_key('state'):
            c.state = utils.Bunch()

        if c.display == True:
            print('Run self organization:')
        sorn.simulation(c.steps_plastic)

        ## Prepare noplastic training
        sorn.update = False
        # Run with trials
        # self.inputsource.source.N_a: Size of input source (letters, sequences), e.g. 4
        trialsource = TrialSource(self.inputsource.source, c.wait_min_train,
                                  c.wait_var_train,
                                  zeros(self.inputsource.source.N_a), 'reset')

        sorn.source = trialsource
        shuffle(sorn.x)  # {0,1}
        shuffle(sorn.y)  # {0,1}

        if c.display == True:
            print('\nRun training:')
        sorn.simulation(c.steps_noplastic_train)

        ## Prepare plastic testing

        # Activate stdp again
        sorn.update = True
        # Run with spont (input u is always zero)
        spontsource = NoSource(sorn.source.source.N_a)
        sorn.source = spontsource
        shuffle(sorn.x)
        shuffle(sorn.y)
        # Simulate spontaneous activity
        sorn.c.ff_inhibition_broad = 0
        if not c.always_ip:
            sorn.c.eta_ip = 0

        if c.display == True:
            print('\nRun testing:')
        sorn.simulation(c.steps_noplastic_test)

        return {
            'source_plastic': self.inputsource,
            'source_train': trialsource,
            'source_test': spontsource
        }
Example #7
0
def main():
	ap = argparse.ArgumentParser(description='Generate Grid Search configurations');
	ap.add_argument('--bmDir', default='./bm/l20/')
	ap.add_argument('--gsVersion', type=int, default=2)

	args = ap.parse_args()
	globals().update(vars(args))

	gsConfDir = './gsConfigsV{}/'.format(gsVersion)
	gsOutDir = './gsOutV{}/'.format(gsVersion)

	gsParams = utils.Bunch(GridSearchParams.getGSParams(gsVersion))
	print 'Total runs needed: {}'.format(gsParams.nRuns)

	utils.mkdir(gsConfDir)

	# Write all combinations of GA parameters to output
	configId = 0
	for cld in gsParams.chromoLenDevs:
		for gps in gsParams.gaPopSizes:
			for gi in gsParams.gaIters:
				for gsr in gsParams.gaSurviveRates:
					for gcr in gsParams.gaCrossRates:
						for (gpmr, glmr) in zip(gsParams.gaPointMutateRates, gsParams.gaLimbMutateRates):
							for bmName in gsParams.bmNames:
								outputName = 'gs_{}_{}'.format(configId, bmName)
								bmOutputDir = './{}/{}/'.format(gsOutDir, outputName)
								utils.mkdir(bmOutputDir)

								configJson = OrderedDict([
									('inputFile', './{}/{}.json'.format(bmDir, bmName)),
									('xDBFile', './res/xDB.json'),
									('outputDir', bmOutputDir),
									('randSeed', '0x600d1337'),

									('chromoLenDev', cld),
									('gaPopSize', gps),
									('gaIters', gi),
									('gaSurviveRate', gsr),
									('gaCrossRate', gcr),
									('gaPointMutateRate', gpmr),
									('gaLimbMutateRate', glmr),
								])

								json.dump(configJson,
									open(gsConfDir + '/{}.json'.format(outputName), 'w'),
									separators=(',', ':'),
									ensure_ascii=False,
									indent=4)

							configId = configId + 1

	print 'Max GS config ID: {}'.format(configId - 1)
Example #8
0
 def __init__(self, obj, dlog=None):
     '''The StatsCollection object holds many statistics objects and
     distributes the calls to them. It also simplifies the collection
     of information when report() and cluster_report() are called.'''
     self.obj = obj
     self.c = utils.Bunch()
     self.disable = False
     self.methods = []
     if dlog is None:
         self.dlog = DataLog()
     else:
         self.dlog = dlog
Example #9
0
 def generate_connection(self, N):
     W = np.zeros((N, self.X, self.Y))
     available = set(range(N))
     for a in range(self.X):
         for b in range(self.Y):
             temp = random.sample(available, self.symbol)
             W[temp, a, b] = 1
             available = available.difference(temp)
     W.shape = (N, self.X * self.Y)
     c = utils.Bunch(use_sparse=False,
                     lamb=np.inf,
                     avoid_self_connections=False)
     ans = synapses.create_matrix((N, self.X * self.Y), c)
     ans.W = W
     return ans
Example #10
0
 def generate_connection(self, N_e):
     c = utils.Bunch(
         use_sparse=False,
         lamb=self.density * N_e,
         avoid_self_connections=False,
         #CHANGE should this be different?
         eta_stdp=self.eta_stdp)
     tmp = synapses.create_matrix((N_e, self.N), c)
     # get correct connection density
     noone = True
     while (noone):
         tmp.set_synapses((rand(N_e, self.N) < self.density).astype(float))
         if sum(tmp.get_synapses()) > 0:
             noone = False
     return tmp
Example #11
0
 def generate_connection_i(self, N_i):
     c = utils.Bunch(use_sparse=False,
                     lamb=np.inf,
                     avoid_self_connection=False)
     ans = synapses.create_matrix((N_i, self.N_a), c)
     W = np.zeros((N_i, self.N_a))
     if N_i > 0:
         available = set(range(N_i))
         for a in range(self.N_a):
             temp = np.random.choice(list(available), self.N_u_i)
             W[temp, a] = 1
         if '_' in self.lookup:
             W[:, self.lookup['_']] = 0
     ans.W = W
     return ans
Example #12
0
    def generate_connection_e(self, N_e):
        W = np.zeros((N_e, self.N_a))
        available = set(range(N_e))
        for a in range(self.N_a):
            temp = np.random.choice(list(available), self.N_u_e)
            W[temp, a] = 1
            if self.avoid:
                available = available.difference(temp)
        if '_' in self.lookup:
            W[:, self.lookup['_']] = 0

        c = utils.Bunch(use_sparse=False,
                        lamb=np.inf,
                        avoid_self_connections=False)
        ans = synapses.create_matrix((N_e, self.N_a), c)
        ans.W = W
        return ans
Example #13
0
    def generate_connection_i(self, N_i):
        c = utils.Bunch(use_sparse=False,
                        lamb=np.inf,
                        avoid_self_connections=False)
        ans = synapses.create_matrix((N_i, self.N_a), c)
        W = zeros((N_i, self.N_a))
        if N_i > 0:
            available = set(range(N_i))
            for a in range(self.N_a):
                temp = random.sample(available, self.N_u_i)
                W[temp, a] = 1
                #~ if self.avoid: # N_i is smaller -> broad inhibition?
                #~ available = available.difference(temp)
            if '_' in self.lookup:
                W[:, self.lookup['_']] = 0
        ans.W = W

        return ans
Example #14
0
    def generate_connection_e(self, N_e):
        W = zeros((N_e, self.N_a))

        available = set(range(N_e))
        for a in range(self.N_a):
            temp = random.sample(available, self.N_u_e)
            W[temp, a] = 1
            if self.avoid:
                available = available.difference(temp)

        # The underscore has the special property that it doesn't
        # activate anything:
        if '_' in self.lookup:
            W[:, self.lookup['_']] = 0

        c = utils.Bunch(use_sparse=False,
                        lamb=np.inf,
                        avoid_self_connections=False)
        ans = synapses.create_matrix((N_e, self.N_a), c)
        ans.W = W

        return ans
Example #15
0
import numpy as np
import utils

"""
Default Parameters that are used when no other parameters are specified
"""

c = utils.Bunch()

# Number of units (e: excitatory, i: inhibitory, u: input)
c.N_e = 200
c.N_i = int(0.2*c.N_e)
c.N_u_e = int(0.05*c.N_e)
c.N = c.N_e + c.N_i

# Each submatrix expects a Bunch with some of the following fields:
# c.use_sparse = True
# Number of connections per neuron
# c.lamb = 10 (or inf to get full connectivity)
# c.avoid_self_connections = True
# c.eta_stdp = 0.001 (or 0.0 to disable)
# c.eta_istdp = 0.001 (or 0.0 to disable)
# c.sp_prob = 0.1 (or 0 to disable)
# c.sp_initial = 0.001

c.W_ee = utils.Bunch(lamb=10,
                     avoid_self_connections=True,
                     eta_ip=0.001,
                     eta_stdp = 0.001,
                     sp_prob = 0.1,
                     sp_initial=0.001)
Example #16
0
c.N_e = 200  #200
c.N_i = int(np.floor(0.2 * c.N_e))
c.N = c.N_e + c.N_i
c.N_u_e = np.floor(
    0.05 * c.N_e
)  #np.floor(0.05*c.N_e) # 10 connections from any input to the excitatory neurons
c.N_u_i = 0

c.double_synapses = False

# used Bunch (https://pypi.python.org/pypi/bunch)
c.W_ee = utils.Bunch(use_sparse=True,
                     lamb=0.1 * c.N_e,
                     avoid_self_connections=True,
                     eta_stdp=0.001,
                     sp_prob=0.0,
                     sp_initial=0.000,
                     no_prune=True,
                     upper_bound=1,
                     weighted_stdp=False,
                     eta_ds=0.1)

c.W_ei = utils.Bunch(use_sparse=False,
                     lamb=np.inf,
                     avoid_self_connections=False,
                     eta_istdp=0.0,
                     h_ip=0.1)

c.W_ie = utils.Bunch(use_sparse=False,
                     lamb=np.inf,
                     avoid_self_connections=False)
Example #17
0
def main():
	ap = argparse.ArgumentParser(description='Analyse Grid Search output files');
	# ap.add_argument('--gsConfDir', default='./gsConfigs/')
	# ap.add_argument('--gsOutDir', default='./gs_out/')
	ap.add_argument('--gsVersion', type=int, default=2)

	args = ap.parse_args()
	globals().update(vars(args))

	gsConfDir = './gsConfigsV{}/'.format(gsVersion)
	gsOutDir = './gsOutV{}/'.format(gsVersion)
	
	gsParams = utils.Bunch(GridSearchParams.getGSParams(gsVersion))
	
	scoreLineToken = '#0 score '
	aggregate = []
	confIdRange = xrange(0, gsParams.nRuns/len(gsParams.bmNames))
	for configId in confIdRange:
		for bmName in gsParams.bmNames:
			gsConfName = 'gs_{}_{}'.format(configId, bmName)
			gsConfFile = utils.normPath('{}/{}.json'.format(gsConfDir, gsConfName))
			gsOutLogFile = utils.normPath('{}/{}/log'.format(gsOutDir, gsConfName))

			conf = utils.readJSON(gsConfFile)

			with open(gsOutLogFile, 'r') as file:
				scoreLine = [l for l in file.read().split('\n') if scoreLineToken in l]

			tokenClearLine = scoreLine[0][scoreLine[0].find(scoreLineToken)+len(scoreLineToken):]
			colonClearLine = tokenClearLine[:tokenClearLine.rfind(':')]
			score = float(colonClearLine)
			aggregate.append({
				'gsConfName': gsConfName,
				'conf': conf,
				'score': score
				})

	# Average the 3 bm scores
	averaged = []
	for configId in confIdRange:
		gsConfHeader = 'gs_{}_'.format(configId)
		gsGroup = [a for a in aggregate if gsConfHeader in a['gsConfName']]

		assert(len(gsGroup) == 3)
		avgScore = sum([a['score'] for a in gsGroup]) / 3.0
		averaged.append({
			'gsConfName': gsGroup[0]['gsConfName'],
			'conf': gsGroup[0]['conf'],
			'score': avgScore
			})

	scores = [a['score'] for a in averaged]
	chromoLenDevs = [a['conf']['chromoLenDev'] for a in averaged]
	gaSurviveRates = [a['conf']['gaSurviveRate'] for a in averaged]
	gaCrossRates = [a['conf']['gaCrossRate'] for a in averaged]

	estimateIndependentBest(confIdRange,
							scores,
							averaged,
							chromoLenDevs,
							gaSurviveRates,
							gaCrossRates,
							gsParams.chromoLenDevs,
							gsParams.gaSurviveRates,
							gsParams.gaCrossRates,
							gsParams.pmRatios)

	estimateBestMedoid(confIdRange,
							scores,
							averaged,
							chromoLenDevs,
							gaSurviveRates,
							gaCrossRates,
							gsParams.chromoLenDevs,
							gsParams.gaSurviveRates,
							gsParams.gaCrossRates,
							gsParams.pmRatios)

	utils.pauseCode()
Example #18
0
 def generate_connection_e(self, N_e):
     c = utils.Bunch(lamb=np.inf, avoid_self_connections=False)
     tmpsyn = synapses.create_matrix((N_e, self.N_i), c)
     tmpsyn.set_synapses(tmpsyn.get_synapses() * 0)
     return tmpsyn
Example #19
0
 def generate_connection_i(self, N_i):
     c = utils.Bunch(lamb=np.inf, avoid_self_connections=False)
     return synapses.create_matrix((N_i, self.N_i), c)
Example #20
0
        utils.save_model(model_ft, ckpt_file)

    time_elapsed = time.time() - since
    print('Training complete in {:.0f}m {:.0f}s'.format(time_elapsed // 60, time_elapsed % 60))
    print('Best val Acc: {:4f}'.format(best_acc))

    # load best model weights
    model.load_state_dict(best_model_wts)
    return model, val_acc_history




if __name__ == '__main__':
    data_dir = "/home/etienneperot/workspace/datasets/snakes/"
    args = utils.Bunch()

    args.model_name = sys.argv[1]
    args.num_epochs = 20
    args.progressive_resize = False

    num_classes = 45
    batch_size = 32
    num_epochs = 15
    pretrained = True
    train_file = 'train_and_google.pkl' #change to all.pkl before submission
    resume = True
    checkpoint_file = 'checkpoints/resnet18_finetune_all.pth'

    model_ft = resnet(num_classes=num_classes, pretrained=True, resnet_model='resnet18', add_stn=False)
Example #21
0
 def start(self, c, obj):
     if 'history' not in c:
         c.history = utils.Bunch()
     c.history[self.counter] = 0
Example #22
0
def main(opts):
    # Set up model
    model_map = {
        'v3_resnet50': network.deeplabv3_resnet50,
        'v3plus_resnet50': network.deeplabv3plus_resnet50,
        'v3_resnet101': network.deeplabv3_resnet101,
        'v3plus_resnet101': network.deeplabv3plus_resnet101,
        'v3_mobilenet': network.deeplabv3_mobilenet,
        'v3plus_mobilenet': network.deeplabv3plus_mobilenet
    }
    
    best_score = 0.0
    epoch      = 0
    
    if opts.ckpt is not None and os.path.isfile(opts.ckpt):
        checkpoint = torch.load(opts.ckpt, map_location=torch.device('cpu'))
        checkpoint['teacher_opts']['save_val_results'] = opts.save_val_results
        checkpoint['teacher_opts']['ckpt'] = opts.ckpt
        opts = utils.Bunch(checkpoint['teacher_opts'])
    
    model = model_map[opts.model](num_classes=opts.num_classes, output_stride=opts.output_stride, opts=opts)
    teacher = None
    utils.set_bn_momentum(model.backbone, momentum=0.01)
    
    macs, params = utils.count_flops(model, opts)
    if (opts.count_flops):
        return
    utils.create_result(opts, macs, params)
    
    # Set up optimizer and criterion
    optimizer = torch.optim.SGD(params=[
        {'params': model.backbone.parameters(), 'lr': 0.1*opts.lr},
        {'params': model.classifier.parameters(), 'lr': opts.lr},
    ], lr=opts.lr, momentum=0.9, weight_decay=opts.weight_decay)
    scheduler = utils.PolyLR(optimizer, opts.total_epochs * len(train_loader), power=0.9)
    criterion = nn.CrossEntropyLoss(ignore_index=255, reduction='mean')
    
    
    
    # Load from checkpoint
    if opts.ckpt is not None and os.path.isfile(opts.ckpt):
        checkpoint = torch.load(opts.ckpt, map_location=torch.device('cpu'))
        
        
        model.load_state_dict(checkpoint["model_state"])
        model = nn.DataParallel(model)
        model.to(device)
        optimizer.load_state_dict(checkpoint["optimizer_state"])
        scheduler.load_state_dict(checkpoint["scheduler_state"])
        epoch = checkpoint.get("epoch", 0)
        best_score = checkpoint.get('best_score', 0.0)
        print("Model restored from %s" % opts.ckpt)
        del checkpoint  # free memory 
    else:
        model = nn.DataParallel(model)
        model.to(device)
        
    if opts.save_val_results:
        score = validate(model)
        print(metrics.to_str(score)) 
        return
    
    if opts.mode == "student":
        checkpoint = torch.load(opts.teacher_ckpt, map_location=torch.device('cpu'))
        checkpoint['teacher_opts']['at_type'] = opts.at_type
        
        teacher_opts = utils.Bunch(checkpoint['teacher_opts'])
        
        teacher = model_map[teacher_opts.model](num_classes=opts.num_classes, output_stride=teacher_opts.output_stride, opts=teacher_opts)
        teacher.load_state_dict(checkpoint["model_state"])
        teacher = nn.DataParallel(teacher)
        teacher.to(device)
        for param in teacher.parameters():
            param.requires_grad = False
    
    # =====  Train  =====
    
    for epoch in tqdm(range(epoch, opts.total_epochs)):
        
        if opts.mode == "teacher":
            train_teacher(model, optimizer, criterion, scheduler)
        else:
            train_student(model, teacher, optimizer, criterion, scheduler)
        
        score = validate(model)
        print(metrics.to_str(score))
        utils.save_result(score, opts)
        
        if score['Mean IoU'] > best_score or (opts.max_epochs != opts.total_epochs and epoch+1 == opts.total_epochs):
            best_score = score['Mean IoU']
            utils.save_ckpt(opts.data_root, opts, model, optimizer, scheduler, best_score, epoch+1) 
Example #23
0
import utils
import shutil

import numpy as np
from scipy.misc import imsave
import skimage.measure

cfg = TestOptions().parse()
cfg['in_nc'] = 3
cfg['out_nc'] = 3
cfg['nz'] = 0
cfg['isTrain'] = False
np.random.seed(cfg['seed'])
print(cfg)
opt = utils.Bunch(cfg)

save_dir = os.path.join(opt.model_save, 'test')
opt.save_dir = save_dir
print(save_dir)
if os.path.exists(save_dir):
    print('ERROR: {} already exists'.format(save_dir))
    assert (False)
else:
    os.makedirs(save_dir)

    if opt.frac_save > 0.0:
        img_dir = os.path.join(save_dir, 'output')
        os.makedirs(img_dir)

    if opt.save_gt:
Example #24
0
import numpy as np
import utils

from common.defaults import *

c.N_e = 200
c.N_o = 10
c.N_i = int(np.floor(0.2 * c.N_e))
c.N = c.N_e + c.N_i
c.N_u_e = int(np.floor(0.05 * c.N_e))
c.N_u_i = 0

c.W_ee = utils.Bunch(lamb=0.05 * c.N_e,
                     avoid_self_connections=True,
                     eta_stdp=0.01,
                     f=1,
                     no_prune=False,
                     sp_prob=c.N_e * (c.N_e - 1) * (0.1 / (200 * 199)),
                     sp_initial=0.001,
                     upper_bound=1)
c.W_ei = utils.Bunch(lamb=1 * c.N_e,
                     avoid_self_connections=True,
                     eta_istdp=0.0,
                     h_ip=0.1)
c.W_ie = utils.Bunch(lamb=1.0 * c.N_i, avoid_self_connections=True)
c.W_oe = utils.Bunch(lamb=1.0 * c.N_o,
                     avoid_self_connections=True,
                     eta_stdp=0.01,
                     f=0,
                     no_prune=True,
                     upper_bound=1)
Example #25
0
from __future__ import division
import numpy as np
import utils
utils.backup(__file__)

# see this file for parameter descriptions
from common.defaults import *

c.N_e = 200
c.N_i = np.floor(0.2 * c.N_e)
c.N_u = 0  #noinput is the point of this experiment
c.N = c.N_e + c.N_i

c.W_ee = utils.Bunch(use_sparse=True,
                     lamb=0.1 * c.N_e,
                     avoid_self_connections=True,
                     eta_stdp=0.004,
                     sp_prob=0.1,
                     sp_initial=0.001)

c.W_ei = utils.Bunch(use_sparse=False,
                     lamb=0.2 * c.N_e,
                     avoid_self_connections=False,
                     eta_istdp=0.001,
                     h_ip=0.1)

c.W_ie = utils.Bunch(use_sparse=False,
                     lamb=np.inf,
                     avoid_self_connections=False)

c.steps_plastic = 5000000
c.steps_noplastic_train = 0
def model_verbose(phi_0, X, Y, Z, phase_0=0., oparams=default_params):
    """Return loads and loads of info."""

    t, r, u, b = model_basic(phi_0, X, Y, Z, phase_0, oparams)

    Ufunc = oparams['Ufunc']
    f = oparams['f']
    N = oparams['N']

    # Float change in buoyancy with velocity.
    Wf_pvals = oparams['Wf_pvals']

    # Wave parameters
    w_0 = oparams['w_0']
    k = 2 * np.pi / X
    l = 2 * np.pi / Y
    m = 2 * np.pi / Z
    om = gw.omega(N, k, m, l, f)

    u_0 = gw.U_0(phi_0, k, l, om, f)
    v_0 = gw.V_0(phi_0, k, l, om, f)
    w_0 = gw.W_0(phi_0, m, om, N)
    b_0 = gw.B_0(phi_0, m, om, N)

    if oparams['print']:
        print("N = {:1.2E} rad s-1.\n"
              "om = {:1.2E} rad s-1.\n"
              "u_0 = {:1.2E} m s-1.\n"
              "v_0 = {:1.2E} m s-1.\n"
              "w_0 = {:1.2E} m s-1.\n"
              "phi_0 = {:1.2E} m2 s-2.\n"
              "b_0 = {:1.2E} m s-2.\n"
              "X = {:1.0f} m.\n"
              "k = {:1.2E} rad m-1.\n"
              "Y = {:1.0f} m.\n"
              "l = {:1.2E} rad m-1.\n"
              "Z = {:1.0f} m.\n"
              "m = {:1.2E} rad m-1.\n"
              "".format(N, om, u_0, v_0, w_0, phi_0, b_0, X, k, Y, l, Z, m))

    output = utils.Bunch(Ufunc=Ufunc,
                         U=Ufunc(r[:, 2]),
                         f=f,
                         N=N,
                         Wf_pvals=Wf_pvals,
                         w_0=w_0,
                         k=k,
                         l=l,
                         m=m,
                         om=om,
                         phi_0=phi_0,
                         u_0=u_0,
                         v_0=v_0,
                         b_0=b_0,
                         t=t,
                         z_0=oparams['z_0'],
                         r=r,
                         u=u,
                         b=b,
                         oparams=oparams)

    return output
# see this file for parameter descriptions
from common.defaults import *

c.N_e = 200
c.N_i = np.floor(0.2 * c.N_e)
c.N_u = 0
c.N = c.N_e + c.N_i

c.double_synapses = True
c.W_ee = utils.Bunch(
    use_sparse=True,
    lamb=0.1 * c.N_e,
    avoid_self_connections=True,
    eta_stdp=0.004,
    sp_prob=0.1,
    bias=1.0,  # no bias
    p_failure=0.2,  # not used if W_ee_fail_f exists
    eta_ss=1.0,
    upper_bound=1.0,
    sp_initial=0.001)

W_ee_fail_f = lambda x: np.exp(-6 * (x + 0.1))  # weight-dependent failure

c.W_ei = utils.Bunch(use_sparse=False,
                     lamb=0.2 * c.N_e,
                     avoid_self_connections=False,
                     eta_istdp=0.001,
                     h_ip=0.1)

c.W_ie = utils.Bunch(use_sparse=False,
Example #28
0
            # Increase number of excitatory counter
            k += 1

        # Increase number of transitions matrices counter
        j += 1


# Parameters are read from the second command line argument
param = import_module(utils.param_file())
experiment_module = import_module(param.c.experiment.module)
experiment_name = param.c.experiment.name
experiment = getattr(experiment_module, experiment_name)(param)

# Initialize parameters
c = param.c
c.state = utils.Bunch()

# Store states and remove c.states, otherwise bunch will have a problem
states = c.states
del c.states

# Set logfilepath
c.logfilepath = utils.logfilename('') + '/'

# Set values
num_iterations = range(10)
c.source.transitions_array = c.source.transitions
c.N_e_array = c.N_e
c.N_u_e_coverage_array = c.N_u_e_coverage

total = len(num_iterations) * len(c.source.transitions_array) * len(
Example #29
0
""" Language Task

This script contains the parameters for the Language experiment.
"""

import os

import numpy as np

import utils
par = utils.Bunch()
aux = utils.Bunch()

################################################################################
#                           SORN main parameters                               #
################################################################################
def get_par():
    """ Get main sorn parameters.

    For each sorn simulation, change these parameters manually.
    """
    par.N_e = 1600                                  # excitatory neurons
    par.N_u = int(par.N_e/60)                       # neurons in each input pool

    par.eta_stdp = 0.005                           # STDP learning rate
    par.prune_stdp = False                         # prune very small weights
    par.eta_ip = 0.001                             # IP learning rate
    par.h_ip = 0.1                                 # target firing rate

    par.input_gain = 1                             # input gain factor
        yield X, y_list


if __name__ == '__main__':

    import utils

    images = np.zeros((200, 10, 10, 15))
    labels = [
        np.random.random_integers(0, 2, 200),
        np.random.random_integers(5, 8, 200)
    ]
    exp_config = utils.Bunch(image_size=(10, 10, 15),
                             do_rotations=False,
                             do_scaleaug=False,
                             do_fliplr=False,
                             label_list=(0, 1, 2))

    for batch in iterate_minibatches(images, labels, 3, exp_config):
        x, [y1, y2] = batch
        # print(x)
        print(y1)
        print(y2)
        print('--')

    # for batch in iterate_minibatches_stratified(images, labels, [2,2,3,1], exp_config):
    #     x, y = batch
    #     # print(x)
    #     print(y)
    #     print('--')