Exemplo n.º 1
0
class GridRateParams:  

  # gaugrid small arena  
  gau_grid_small_arena_biphasic_neg = map_merge(def_gau_grid_small_arena_sigma_large,filter_biphasic_neg)
  gau_grid_small_arena_biphasic_pos = map_merge(def_gau_grid_small_arena_sigma_small,filter_biphasic_pos)
  
  # gaugrid large arena
  gau_grid_large_arena_biphasic_neg = map_merge(def_gaugrid_large_arena_sigma_large,filter_biphasic_neg)
  gau_grid_large_arena_biphasic_pos = map_merge(def_gaugrid_large_arena_sigma_small,filter_biphasic_pos)
  
  # gaumix small arena
  gau_mix_small_arena_biphasic_neg = map_merge(def_gaumix_small_arena_sigma_large,filter_biphasic_neg)
  gau_mix_small_arena_biphasic_pos = map_merge(def_gaumix_small_arena_sigma_small,filter_biphasic_pos)
Exemplo n.º 2
0
from grid_const import ModelType
import plotlib as pp
from grid_batch import GridBatch
from simlib import run_from_ipython
from grid_inputs import GridInputs

from simlib import ensureDir

figures_path = '../figures'
ensureDir(figures_path)

force = False

batch_default_map = map_merge(GridRateParams.gau_mix_small_arena_biphasic_neg,
                              {
                                  'dt': 10.,
                                  'compute_scores': False,
                              })
p = get_params(batch_default_map)

batch_override_map = {'inputs_seed': np.arange(100)}

batch = GridBatch(ModelType.MODEL_RATE_AVG,
                  batch_default_map,
                  batch_override_map,
                  force=force)
do_run = batch.post_init()

if (force or do_run) and not run_from_ipython():
    batch.run()
    batch.post_run()
Exemplo n.º 3
0
n = 60

num_gau_mix_ran = np.arange(2, 24, 2)

all_mean_pw_profs = []
all_mean_pw_profs_norm = []

all_teonum_scale_factors = []
all_num_eigs = []

all_teo_eigs = []

for num_gau_mix in num_gau_mix_ran:
    param_map = map_merge(GridRateParams.gau_mix_small_arena_biphasic_neg, {
        'num_gau_mix': num_gau_mix,
        'n': n,
        'inputs_seed': inputs_seed
    })
    p = get_params(param_map)

    inputs = GridInputs(
        param_map,
        keys_to_load=['in_freqs', 'in_mean_pw_profile', 'random_amps'])
    in_freqs = inputs.in_freqs

    corr = GridCorrSpace(param_map, keys_to_load=['eigs', 'CC_teo'])
    num_eigs = np.real(np.linalg.eigvals(corr.CC_teo -
                                         np.diag([p.a] * p.n**2)))
    all_num_eigs.append(num_eigs)

    eigs_freqs, teo_eigs = compute_teo_eigs(inputs,
Exemplo n.º 4
0
import os
from grid_const import ModelType
from grid_functions import map_merge
from grid_params import GridSpikeParams
from grid_batch import GridBatch
from simlib import ensureDir

figures_path = '../figures'
ensureDir(figures_path)

#%%

######################## MULTILPLE LEARNING RATES ###################################

learn_rates = [2e-5, 3e-5, 5e-5, 1e-4]
batch_default_map = map_merge(
    GridSpikeParams.gau_grid_small_arena_biphasic_neg, {'a': 1.1})

# time vector
num_sim_steps = int(batch_default_map['sim_time'] / batch_default_map['dt'])
delta_snap = int(num_sim_steps / batch_default_map['num_snaps'])
snap_times = np.arange(
    batch_default_map['num_snaps']) * delta_snap * batch_default_map['dt']

pl.rc('font', size=13)
pp.set_tick_size(4)

fig = pl.figure(figsize=(8, 3), facecolor='w')
ax1 = pl.subplot(1, 2, 1)
ax2 = pl.subplot(1, 2, 2)
pl.subplots_adjust(bottom=0.2, wspace=0.4, left=0.1, right=0.97)
Exemplo n.º 5
0
    def post_init(self, force_gen_inputs=False):

        self.startClock = clock()
        self.startTime = datetime.datetime.fromtimestamp(time.time())
        self.startTimeStr = self.startTime.strftime('%Y-%m-%d %H:%M:%S')

        self.dx = self.L / self.nx
        X, Y = np.mgrid[-self.L / 2:self.L / 2:self.dx,
                        -self.L / 2:self.L / 2:self.dx]
        self.pos = np.array([np.ravel(X), np.ravel(Y)]).T

        if self.filter_type == FilterType.FILTER_INPUT:
            self.b1 = 1. / self.tau1
            self.b2 = 1. / self.tau2
            self.b3 = 1. / self.tau3
        elif self.filter_type == FilterType.FILTER_OUTPUT:
            self.b_in = 1. / self.tau_in
            self.b_out = 1. / self.tau_out

        self.N = self.n**2

        # number of samples for the filter
        self.tau_samps = 2**8 + 1
        self.tau_ran = np.arange(self.tau_samps) * self.dx

        if self.filter_type == FilterType.FILTER_INPUT:
            self.K_samp = K_t(self.b1, self.b2, self.b3, self.mu1, self.mu2,
                              self.mu3, self.tau_ran / self.speed) / self.speed
        elif self.filter_type == FilterType.FILTER_OUTPUT:
            self.K_samp = K_outeq_t(self.b_in, self.b_out, self.mu_out,
                                    self.tau_ran / self.speed) / self.speed

        tapered = False
        if hasattr(self, 'tap_inputs') and self.tap_inputs is True:
            tapered = True

        # for Gaussian periodic we just compute analytically
        if self.inputs_type == InputType.INPUT_GAU_GRID \
           and self.use_theory is True and tapered is False:

            self.compute_analytically = True
            print 'Analytical estimation for Gaussian receptive fields (%s)' % (
                'periodic' if self.periodic_inputs else 'non periodic')

            ran, step = np.linspace(-self.L / 2.,
                                    self.L / 2.,
                                    self.n,
                                    endpoint=False,
                                    retstep=True)
            SSX, SSY = np.meshgrid(ran, ran)
            self.centers = np.array([np.ravel(SSX), np.ravel(SSY)]).T
            self.amp = self.input_mean * self.L**2 / (2 * np.pi *
                                                      self.sigma**2)

        else:

            print 'Numerical estimation for general inputs'
            self.compute_analytically = False

            # load inputs
            self.inputs = GridInputs(self.__dict__, force_gen=force_gen_inputs)
            self.inputs_flat = self.inputs.inputs_flat
            self.inputs_path = self.inputs.dataPath

        # parameters map
        self.paramMap = {
            'id': self.id,
            'L': self.L,
            'n': self.n,
            'speed': self.speed,
            'nx': self.nx,
            'sigma': self.sigma,
            'input_mean': self.input_mean,
            'periodic_inputs': self.periodic_inputs,
            'inputs_type': self.inputs_type
        }

        if self.filter_type == FilterType.FILTER_INPUT:
            self.paramMap = map_merge(
                self.paramMap, {
                    'tau1': self.tau1,
                    'tau2': self.tau2,
                    'tau3': self.tau3,
                    'mu1': self.mu1,
                    'mu2': self.mu2,
                    'mu3': self.mu3,
                    'b1': self.b1,
                    'b2': self.b2,
                    'b3': self.b3
                })
        elif self.filter_type == FilterType.FILTER_OUTPUT:
            self.paramMap = map_merge(
                self.paramMap, {
                    'tau_in': self.tau_in,
                    'tau_out': self.tau_out,
                    'mu_out': self.mu_out,
                    'b_in': self.b_in,
                    'b_out': self.b_out
                })
Exemplo n.º 6
0
class GridSpikeParams:
  gau_grid_small_arena_biphasic_neg=map_merge(def_spikes_sigma_large,filter_biphasic_neg)
  gau_grid_small_arena_biphasic_pos=map_merge(def_spikes_sigma_small,filter_biphasic_pos)    
Exemplo n.º 7
0
import numpy as np
import pylab as pl
import plotlib as pp
import os
from grid_functions import load_data, compute_teo_eigs, compute_gaussian_teo_corr, map_merge
from grid_spikes import GridSpikes
from grid_params import GridSpikeParams
import gridlib as gl
from simlib import ensureDir

figures_path = '../figures'
ensureDir(figures_path)

par_map = map_merge(GridSpikeParams.gau_grid_small_arena_biphasic_neg, {
    'a': 1.1,
    'seed': 30,
    'variable_speed': False,
})

sim = GridSpikes(par_map)
sim.post_init()

# generate results of the spiking model if not present
if not os.path.exists(sim.dataPath):
    sim.run()
    sim.post_run()

# load data
p, r = load_data(
    os.path.join(GridSpikes.results_path, '%s_data.npz' % sim.hash_id))
Exemplo n.º 8
0
  'up_bound':1.,  
  'J0_std':1e-4,
  'r0':10.,
  
  # STDP
  'Aplus' : 10.,
  'Aminus' : 10.,
  'tau_plus' : 0.05,
  'tau_minus' : 0.05,
  
  'compute_scores':True,
}   

def_spikes_sigma_large=map_merge(def_spikes,
                                {
                                  'sigma':0.0625,
                                  'input_mean':0.4,
                                  'a':1.0
                                })

def_spikes_sigma_small=map_merge(def_spikes,
                                {
                                  'sigma':0.04,
                                  'input_mean':0.2,
                                  'a':5.0
                                })         
      

#### RATE      

def_rate ={
Exemplo n.º 9
0
    def post_init(self, do_print=False):

        if not self.force and not self.force_gen_inputs and os.path.exists(
                self.dataPath):
            if do_print:
                print 'Data hash %s already present' % self.hash_id
            return False

        seed(self.seed)

        self.sigmas_per_dx = self.sigma * self.n / self.L

        self.num_sim_steps = int(self.sim_time / self.dt)
        self.position_dt_scale = int(self.position_dt / self.dt)

        self.startClock = clock()
        self.startTime = datetime.datetime.fromtimestamp(time.time())
        self.startTimeStr = self.startTime.strftime('%Y-%m-%d %H:%M:%S')

        if self.filter_type == FilterType.FILTER_INPUT:
            # inverse filter time constants
            self.b1 = 1 / self.tau1
            self.b2 = 1 / self.tau2
            self.b3 = 1 / self.tau3

            # integral of the filter
            self.K_int = self.mu1 + self.mu2 + self.mu3

        else:
            self.b_in = 1. / self.tau_in
            self.b_out = 1. / self.tau_out

            self.K_int = np.real(
                K_outeq_ft_k(self.b_in, self.b_out, self.mu_out, 0.))

        # total number of neurons and density
        self.N = self.n**2
        self.rho = self.N / self.L**2

        # mean input and mean correlation
        self.C_av = self.input_mean**2 * self.K_int

        # compute normalization time constant
        self.tau_av = 1. / (self.eta *
                            (self.a - self.N *
                             (self.C_av - self.input_mean * self.gamma)))

        # computa B,alpha,beta from a, J_av_target
        self.B = self.J_av_target / (self.eta * self.tau_av)
        self.alpha = self.a / self.input_mean
        self.beta = self.B / self.input_mean - self.r0

        self.J_av_star = self.B * self.eta * self.tau_av

        # derived quantities
        self.k1 = self.input_mean * (self.r0 + self.beta)
        self.k2 = self.input_mean * self.gamma
        self.k3 = self.input_mean * self.alpha

        # load input data
        self.inputs = GridInputs(self.initParamMap,
                                 do_print=do_print,
                                 force_gen=self.force_gen_inputs)
        self.inputs_path = self.inputs.dataPath
        self.inputs_flat = self.inputs.inputs_flat

        if self.inputs_type == InputType.INPUT_GAU_GRID:
            self.amp = self.inputs.amp
        else:
            self.amp = np.NaN

        # compute eigenvalues
        self.freqs, self.raw_eigs = compute_teo_eigs(self.inputs,
                                                     self.__dict__,
                                                     teo_input_pw=False)

        self.raw_eigs[
            0] = self.raw_eigs[0] - self.N * self.gamma * self.input_mean
        self.max_eig_idx = self.raw_eigs.argmax()
        self.max_freq = self.freqs[self.max_eig_idx]

        self.eigs_lf_diff = (self.raw_eigs[self.max_eig_idx] -
                             self.raw_eigs[self.max_eig_idx - 1])
        self.eigs_hf_diff = (self.raw_eigs[self.max_eig_idx] -
                             self.raw_eigs[self.max_eig_idx + 1])

        self.eigs = self.raw_eigs - self.a

        self.max_eig = self.eigs.max()
        self.eig0 = self.eigs[0]

        self.tau_str = 1. / (self.eta * self.max_eig)

        # load walk data
        self.walk = GridWalk(self.__dict__, do_print=do_print)
        self.walk_path = self.walk.dataPath
        self.pos = self.walk.pos
        self.pidx_vect = self.walk.pidx_vect
        self.nx = self.walk.nx
        self.walk_steps = self.walk.walk_steps

        # output rate normalization
        self.r_out_star = (self.input_mean * self.K_int -
                           self.gamma) * self.J_av_star * self.N + self.r0

        # compute boundary input
        if self.add_boundary_input is True:
            self.boundary_input_flat = self.r_out_star - self.get_estimated_output(
                np.ones(self.N) * self.J_av_star, 0)
        else:
            self.boundary_input_flat = np.zeros(self.nx**2)

        # initial weights
        self.J0_mean = self.J_av_star * self.J0_mean_factor

        # exponential distribution
        if self.J0_dist == DistType.J0_EXP:
            self.J0 = exponential(self.J0_mean, self.N)

        # normal distribution
        elif self.J0_dist == DistType.J0_NORM:
            self.J0 = np.ones(self.N) * self.J0_mean + randn(
                self.N) * self.J0_std
            self.J0 = self.J0 / np.mean(self.J0) * self.J0_mean

        # exponential distribution with half of the weights set to zero
        elif self.J0_dist == DistType.J0_HALF_EXP:
            self.J0 = np.zeros(self.N)
            non_zero_idxs = permutation(self.N)[0:self.N / 2]
            self.J0[non_zero_idxs] = exponential(self.J_av_star * 2,
                                                 self.N / 2)
            self.J0 = self.J0 / self.J0.mean() * self.J_av_star

        np.clip(self.J0, 0, self.up_bound, out=self.J0)

        self.delta_snap = int(
            np.floor(float(self.num_sim_steps) / (self.num_snaps)))
        assert (self.delta_snap > 0)

        self.derived_param_str = 'amp=%.1f max_eig=%.2f max_freq=%.1f tau_av=%.1e  tau_str=%1.e\
    eigs_lf_diff=%.3f eigs_hf_diff=%.3f'    \
        %(self.amp,self.max_eig,self.max_freq,self.tau_av,self.tau_str,self.eigs_lf_diff,self.eigs_hf_diff)

        self.summary_str = """
    
HASH: %s

KEY PARAMS: %s

INPUT PARAMS: %s

WALK PARAMS: %s

DERIVED PARAMS: %s
      
      """ % (self.hash_id, self.key_params_str, self.input_params_str,
             self.walk_params_str, self.derived_param_str)

        # derived parameters map
        self.derivedParamMap = {
            'hash_id': self.hash_id,
            'N': self.N,
            'amp': self.amp,
            'rho': self.rho,
            'sigmas_per_dx': self.sigmas_per_dx,
            'num_sim_steps': self.num_sim_steps,
            'delta_snap': self.delta_snap,
            'k1': self.k1,
            'k2': self.k2,
            'k3': self.k3,
            'K_int': self.K_int,
            'B': self.B,
            'alpha': self.alpha,
            'beta': self.beta,
            'C_av': self.C_av,
            'J_av_star': self.J_av_star,
            'tau_av': self.tau_av,
            'r_out_star': self.r_out_star,
            'J0_mean': self.J0_mean,
            'max_eig': self.max_eig,
            'max_freq': self.max_freq,
            'tau_str': self.tau_str,
            'eig0': self.eig0,
            'eigs_lf_diff': self.eigs_lf_diff,
            'eigs_hf_diff': self.eigs_hf_diff,
            'summary_str': self.summary_str
        }

        if self.filter_type == FilterType.FILTER_INPUT:
            self.derivedParamMap = map_merge(self.derivedParamMap, {
                'b1': self.b1,
                'b2': self.b2,
                'b3': self.b3
            })
        else:
            self.derivedParamMap = map_merge(self.derivedParamMap, {
                'b_in': self.b_in,
                'b_out': self.b_out
            })

        self.paramMap = map_merge(self.initParamMap, self.derivedParamMap,
                                  {'filter_type': self.filter_type})

        if do_print is True:
            print self.header_str
            print params_to_str(self.paramMap, to_exclude=['summary_str'])
            print
            print self.summary_str

        return True
Exemplo n.º 10
0
    def __init__(self, paramMap, scale_a_by_density=False, force=False):

        self.header_str = """
    =================================================================
                     GRID DETAILED RATE SIMULATION                   
    ================================================================="""

        self.force_gen_inputs = False
        self.force_gen_corr = False
        self.force = force

        # general parameters
        self.dt = None
        self.sim_time = None
        self.eta = None
        self.seed = None
        self.num_snaps = None

        # input parameters
        self.n = None
        self.input_mean = None
        self.sigma = None
        self.inputs_type = None
        self.periodic_inputs = None
        self.num_gau_mix = None
        self.centers_std = None
        self.inputs_seed = None
        self.tap_inputs = None
        self.norm_bound_add = None
        self.norm_bound_mul = None

        # walk parameters
        self.L = None
        self.nx = None
        self.periodic_walk = None
        self.speed = None
        self.bounce = None
        self.theta_sigma = None
        self.position_dt = None
        self.walk_time = None
        self.walk_seed = None
        self.variable_speed = None

        self.correct_border_effects = None

        # filter parameters

        self.filter_type = FilterType.FILTER_INPUT

        if 'filter_type' not in paramMap.keys(
        ) or paramMap['filter_type'] == FilterType.FILTER_INPUT:
            self.tau1 = None
            self.tau2 = None
            self.tau3 = None
            self.mu1 = None
            self.mu2 = None
            self.mu3 = None
        else:
            self.tau_in = None
            self.tau_out = None
            self.mu_out = None

        # plasticiy params
        self.a = None
        self.J_av_target = None
        self.gamma = None
        self.up_bound = None
        self.J0_std = None
        self.r0 = None
        self.J0_dist = None
        self.J0_mean_factor = None
        self.base_n = None  # base number of neurons to scale a accordingly

        # flags
        self.add_boundary_input = None
        self.clip_weights = None
        self.clip_out_rate = None
        self.compute_scores = None
        self.scale_a_by_n = None

        # set parameter values from input map
        for param, value in paramMap.items():
            setattr(self, param, value)

        if self.scale_a_by_n is True:
            self.a = self.a * self.n**2 / self.base_n**2

        # parameters we never change
        self.plastic = True
        self.transient_time = 2.
        self.arena_shape = 'square'
        self.virtual_bound_ratio = 1.0
        self.bounce_theta_sigma = 0.0
        self.debug_vars = False
        self.sigmoid_out_rate = False
        self.r_out_max = 5.
        self.position_dt = self.L / self.nx

        if self.periodic_inputs is True:
            assert (self.add_boundary_input == False)

        if (self.mu1 + self.mu2 + self.mu3) < 0.:
            assert (self.gamma == 0.0)
        else:
            assert (self.gamma > 0.0)

        # init parameters map
        self.initParamMap = {
            'arena_shape': self.arena_shape,
            'L': self.L,
            'n': self.n,
            'nx': self.nx,
            'sigma': self.sigma,
            'input_mean': self.input_mean,
            'speed': self.speed,
            'theta_sigma': self.theta_sigma,
            'seed': self.seed,
            'sim_time': self.sim_time,
            'dt': self.dt,
            'position_dt': self.position_dt,
            'num_snaps': self.num_snaps,
            'eta': self.eta,
            'a': self.a,
            'gamma': self.gamma,
            'J_av_target': self.J_av_target,
            'J0_std': self.J0_std,
            'up_bound': self.up_bound,
            'r0': self.r0,
            'J0_mean_factor': self.J0_mean_factor,
            'clip_weights': self.clip_weights,
            'periodic_inputs': self.periodic_inputs,
            'outside_ratio': self.outside_ratio,
            'clip_out_rate': self.clip_out_rate,
            'inputs_type': self.inputs_type,
            'num_gau_mix': self.num_gau_mix,
            'inputs_seed': self.inputs_seed,
            'walk_seed': self.walk_seed,
            'walk_time': self.walk_time,
            'periodic_walk': self.periodic_walk,
            'bounce': self.bounce,
            'bounce_theta_sigma': self.bounce_theta_sigma,
            'virtual_bound_ratio': self.virtual_bound_ratio,
            'compute_scores': self.compute_scores,
            'add_boundary_input': self.add_boundary_input,
            'J0_dist': self.J0_dist,
            'sigmoid_out_rate': self.sigmoid_out_rate,
            'r_out_max': self.r_out_max,
            'centers_std': self.centers_std
        }
        if self.filter_type == FilterType.FILTER_INPUT:
            self.initParamMap = map_merge(
                self.initParamMap, {
                    'tau1': self.tau1,
                    'tau2': self.tau2,
                    'tau3': self.tau3,
                    'mu1': self.mu1,
                    'mu2': self.mu2,
                    'mu3': self.mu3
                })
        else:
            self.initParamMap = map_merge(
                self.initParamMap, {
                    'tau_in': self.tau_in,
                    'tau_out': self.tau_out,
                    'mu_out': self.mu_out
                })

        if self.variable_speed is True:
            self.initParamMap = map_merge(
                self.initParamMap, {
                    'variable_speed': self.variable_speed,
                    'speed_theta': self.speed_theta,
                    'speed_sigma': self.speed_sigma
                })

        if self.correct_border_effects is True:
            self.initParamMap = map_merge(
                self.initParamMap, {
                    'correct_border_effects': self.correct_border_effects,
                    'border_edge_type': self.border_edge_type,
                    'border_size_perc': self.border_size_perc
                })

        if self.tap_inputs is True:
            self.initParamMap = map_merge(
                self.initParamMap, {
                    'tap_inputs': self.tap_inputs,
                    'tap_border_type': self.tap_border_type,
                    'tap_border_size': self.tap_border_size
                })

        if self.norm_bound_add is True:
            self.initParamMap = map_merge(
                self.initParamMap, {'norm_bound_add': self.norm_bound_add})

        if self.norm_bound_mul is True:
            self.initParamMap = map_merge(
                self.initParamMap, {'norm_bound_mul': self.norm_bound_mul})

        # human-readable parameter strings (just for printing)
        key_params=GridRate.key_params_filter_input if self.filter_type == FilterType.FILTER_INPUT \
                                                        else GridRate.key_params_filter_output

        self.key_params_str = params_to_str(self.initParamMap,
                                            keyParams=key_params,
                                            compact=True)
        self.input_params_str = params_to_str(
            self.initParamMap,
            keyParams=GridInputs.get_key_params(self.initParamMap),
            compact=True)
        self.walk_params_str = params_to_str(
            self.initParamMap,
            keyParams=GridWalk.get_key_params(paramMap),
            compact=True)

        # generate id and paths
        self.str_id = gen_string_id(self.initParamMap)
        self.hash_id = gen_hash_id(self.str_id)

        self.paramsPath = os.path.join(GridRate.results_path,
                                       self.hash_id + '_log.txt')
        self.dataPath = os.path.join(GridRate.results_path,
                                     self.hash_id + '_data.npz')
        self.figurePath = os.path.join(GridRate.results_path,
                                       self.hash_id + '_fig.png')

        if os.path.exists(self.dataPath):
            print 'Data hash %s already present' % self.hash_id
            self.do_run = False
        else:
            self.do_run = True
Exemplo n.º 11
0
            }
            toSaveMap = dict(toSaveMap.items() + debugVars.items())

        if self.correct_border_effects is True:
            toSaveMap = dict(toSaveMap.items() +
                             {'border_envelope': self.border_envelope}.items())

        # save
        ensureParentDir(self.dataPath)
        np.savez(self.dataPath, **toSaveMap)

        if do_print:
            print 'Result saved in: %s\n' % self.dataPath


if __name__ == '__main__':
    par_map = map_merge(GridRateParams.gau_grid_small_arena_biphasic_neg, {
        'r0': 10.,
        'clip_out_rate': True,
        'periodic_walk': False
    })

    sim = GridRate(par_map)

    sim.post_init()
    if sim.do_run and not run_from_ipython():
        sim.run()
        sim.post_run()
    elif run_from_ipython():
        sim.plot_eigs()
Exemplo n.º 12
0
input_means=np.array([ 0.21 ,  0.085,  0.30 ,  0.10 ])
seeds=[0,1,0,0]

final_weight_maps=[]

# create pool  
host=socket.gethostname()  
num_procs=procs_by_host[host]
pool=Pool(processes=num_procs)
sims=[]
hashes=[]
paramMaps=[]  
  
rate_avg_params=map_merge(
    GridRateParams.gau_grid_large_arena_biphasic_neg,
    {
    'dt':50.,
    'compute_scores':False,
    })
    
      
    
for sigma,tau2,input_mean,seed in zip(sigmas,tau2s,input_means,seeds):
  
  paramMap=map_merge(rate_avg_params,
    {
    'sigma':sigma,
    'input_mean':input_mean,
    'tau2':tau2,
    'seed':seed,
    })
    
Exemplo n.º 13
0
    def post_init(self):

        ##############################################################################
        ###### CREATE POOL
        ##############################################################################

        self.batch_data_folder = batch_data_folder_map[self.model_type]
        ensureDir(self.batch_data_folder)

        # create pool
        self.host = socket.gethostname()
        if self.host in procs_by_host.keys():
            self.num_procs = procs_by_host[self.host]
        else:
            self.num_procs = 7

        self.pool = Pool(processes=self.num_procs)
        self.sims = []
        self.hashes = []

        self.all_par_values = sorted(
            itertools.product(*self.batch_override_map.values()))
        self.batch_override_str = ' '.join([
            '%s (%s-%s)' %
            (key, format_val(min(values)), format_val(max(values)))
            for key, values in self.batch_override_map.items()
        ])

        # loop over all different paramater values
        for par_values in self.all_par_values:

            override_param_map = {
                k: v
                for (k, v) in zip(self.batch_override_map.keys(), par_values)
            }

            parMap = map_merge(self.batch_default_map, override_param_map)

            if self.model_type == ModelType.MODEL_RATE:
                self.sim_class = GridRate
            elif self.model_type == ModelType.MODEL_RATE_AVG:
                self.sim_class = GridRateAvg
            elif self.model_type == ModelType.MODEL_SPIKING:
                self.sim_class = GridSpikes

            sim = self.sim_class(parMap)
            #print sim.hash_id+' Run: %s'%sim.do_run

            if self.force:
                sim.force_gen_inputs = True
                sim.force_gen_corr = True
                sim.do_run = True

            if sim.do_run is True:
                self.sims.append(sim)

            self.hashes.append(sim.hash_id)

        # generate batch hash
        self.batch_hash = gen_hash_id('_'.join(self.hashes))
        self.batch_data_path = os.path.join(self.batch_data_folder,
                                            '%s_data.npz' % self.batch_hash)
        self.batch_params_path = os.path.join(
            self.batch_data_folder, '%s_params.txt' % self.batch_hash)



        self.batch_summary_str=\
        "\n\nBATCH HASH: %s\n\nBATCH PARAMS = %s\n\n"%\
        (self.batch_hash,
         self.batch_override_str
         )

        print self.batch_summary_str

        self.toSaveMap = {
            'hashes': self.hashes,
            'batch_override_map': self.batch_override_map,
            'batch_default_map': self.batch_default_map
        }

        if os.path.exists(self.batch_data_path) and not self.force:
            return False
        else:
            print '\n\n*** BATCH DATA NOT PRESENT!! ***\n\n'
            print self.batch_data_path
            print '%d/%d simulations to be run' % (len(
                self.sims), len(self.all_par_values))
            return True
Exemplo n.º 14
0
    def post_run(self):

        #############################################################################
        ##### MERGE DATA
        #############################################################################

        print
        print 'SIMULATIONS COMPLETED'
        print
        print 'Merging data...'
        sys.stdout.flush()

        initial_weights_map = {}

        final_weights_map = {}
        final_weight_score_map = {}
        final_weight_angle_map = {}
        final_weight_spacing_map = {}
        final_weight_phase_map = {}
        final_weight_cx_map = {}
        evo_weight_scores_map = {}

        final_rates_map = {}
        final_rate_score_map = {}
        final_rate_angle_map = {}
        final_rate_spacing_map = {}
        final_rate_phase_map = {}
        final_rate_cx_map = {}

        evo_weight_profiles_map = {}

        start_clock = time.time()

        # load/compute data to show for each combination of parameter_values
        idx = -1
        for chash, par_values in zip(self.hashes, self.all_par_values):
            idx += 1
            print_progress(idx,
                           len(self.all_par_values),
                           start_clock=start_clock)
            sys.stdout.flush()

            dataPath = os.path.join(self.sim_class.results_path,
                                    '%s_data.npz' % chash)

            try:
                data = np.load(dataPath, mmap_mode='r')
            except Exception:
                print 'This file is corrupted: %s' % dataPath

            initial_weights_map[par_values] = data['J0']
            final_weights_map[par_values] = data['final_weights']
            final_weight_score_map[par_values] = data['final_weight_score']
            final_weight_angle_map[par_values] = data['final_weight_angle']
            final_weight_spacing_map[par_values] = data['final_weight_spacing']
            final_weight_phase_map[par_values] = data['final_weight_phase']
            final_weight_cx_map[par_values] = data['final_weight_cx']
            if 'scores' in data.keys():
                evo_weight_scores_map[par_values] = data['scores']

            final_rates_map[par_values] = data['final_rates']
            final_rate_score_map[par_values] = data['final_rate_score']
            final_rate_angle_map[par_values] = data['final_rate_angle']
            final_rate_spacing_map[par_values] = data['final_rate_spacing']
            final_rate_phase_map[par_values] = data['final_rate_phase']
            final_rate_cx_map[par_values] = data['final_rate_cx']

            # fourier profiles over time
            import gridlib as gl
            L = data['paramMap'][()]['L']
            n = data['paramMap'][()]['n']
            num_snaps = self.batch_default_map['num_snaps']
            J_mat = data['J_vect'].reshape(n, n, num_snaps)
            weights_dft, weights_freqs, weigths_allfreqs = gl.dft2d_num(
                J_mat, L, n)
            weights_dft_profiles = gl.dft2d_profiles(weights_dft)
            evo_weight_profiles_map[par_values] = weights_dft_profiles

        mergedDataMap = {
            'initial_weights_map': initial_weights_map,
            'final_weights_map': final_weights_map,
            'final_weight_score_map': final_weight_score_map,
            'final_weight_angle_map': final_weight_angle_map,
            'final_weight_spacing_map': final_weight_spacing_map,
            'final_weight_phase_map': final_weight_phase_map,
            'final_weight_cx_map': final_weight_cx_map,
            'evo_weight_scores_map': evo_weight_scores_map,
            'final_rates_map': final_rates_map,
            'final_rate_score_map': final_rate_score_map,
            'final_rate_angle_map': final_rate_angle_map,
            'final_rate_spacing_map': final_rate_spacing_map,
            'final_rate_phase_map': final_rate_phase_map,
            'final_rate_cx_map': final_rate_cx_map,
            'evo_weight_profiles_map': evo_weight_profiles_map,
            'weights_freqs': weights_freqs
        }

        self.toSaveMap = map_merge(self.toSaveMap, mergedDataMap)

        # save
        ensureParentDir(self.batch_data_path)
        logSim(self.batch_hash,
               self.batch_override_str,
               self.startTimeStr,
               self.endTimeStr,
               self.elapsedTime,
               self.batch_default_map,
               self.batch_params_path,
               doPrint=False)

        print
        print 'BATCH HASH: %s' % self.batch_hash
        np.savez(self.batch_data_path, **self.toSaveMap)
        print
        print 'Batch data saved in: %s\n' % self.batch_data_path
        print
Exemplo n.º 15
0
num_seeds = 200

a = 4.
J_av_star = 0.05
sigmas = [0.0625, 0.0625]
tau2s = [0.16, 0.35]
input_means = [0.3, 0.1]
batches = []
for sigma, tau2, input_mean in zip(sigmas, tau2s, input_means):

    param_map = map_merge(
        GridRateParams.gau_grid_large_arena_biphasic_neg, {
            'dt': 50.,
            'r0': 10.,
            'compute_scores': False,
            'sigma': sigma,
            'tau2': tau2,
            'input_mean': input_mean,
            'a': a
        })

    print 'sigma: %.4f tau2: %.3f r_av=%.3f  ' % (sigma, tau2, input_mean)

    batch = GridBatch(ModelType.MODEL_RATE_AVG, param_map,
                      {'seed': np.arange(num_seeds)})

    do_run = batch.post_init()
    batches.append(batch)

    if do_run and not run_from_ipython():
        batch.run()