Пример #1
0
    def __init__(self, model):
        """ Initialize the filtered stim model
        """
        self.model = model
        N = model['N']
        prms = model['network']['weight']

        self.prior = create_prior(prms['prior'])

        # Implement refractory period by having negative mean on self loops
        if 'refractory_prior' in prms:
            #self.mu[np.diag_indices(N)] = prms['mu_refractory']
            self.refractory_prior = create_prior(prms['refractory_prior'])

            # Get the upper and lower diagonal indices so that we can evaluate
            # the log prob of the refractory weights separately from the
            # log prob of the regular weights
            self.diags = np.ravel_multi_index(np.diag_indices(N), (N, N))
            lower = np.ravel_multi_index(np.tril_indices(N, k=-1), (N, N))
            upper = np.ravel_multi_index(np.triu_indices(N, k=1), (N, N))
            self.nondiags = np.concatenate((lower, upper))

        # Define weight matrix
        self.W_flat = T.dvector(name='W')
        self.W = T.reshape(self.W_flat, (N, N))

        if hasattr(self, 'refractory_prior'):
            self.log_p = self.prior.log_p(self.W.take(self.nondiags)) + \
                         self.refractory_prior.log_p(self.W.take(self.diags))
        else:
            self.log_p = self.prior.log_p(self.W)
Пример #2
0
    def __init__(self, model):
        """ Initialize the filtered stim model
        """
        self.model = model
        N = model['N']
        prms = model['network']['weight']

        self.prior = create_prior(prms['prior'])
	
        # Implement refractory period by having negative mean on self loops
        if 'refractory_prior' in prms:
            #self.mu[np.diag_indices(N)] = prms['mu_refractory']
            self.refractory_prior = create_prior(prms['refractory_prior'])

            # Get the upper and lower diagonal indices so that we can evaluate
            # the log prob of the refractory weights separately from the
            # log prob of the regular weights
            self.diags = np.ravel_multi_index(np.diag_indices(N), (N,N))
            lower = np.ravel_multi_index(np.tril_indices(N,k=-1), (N,N))
            upper = np.ravel_multi_index(np.triu_indices(N,k=1), (N,N))
            self.nondiags = np.concatenate((lower, upper))

        # Define weight matrix
        self.W_flat = T.dvector(name='W')
        self.W = T.reshape(self.W_flat,(N,N))

        if hasattr(self, 'refractory_prior'):
            self.log_p = self.prior.log_p(self.W.take(self.nondiags)) + \
                         self.refractory_prior.log_p(self.W.take(self.diags))
        else:
            self.log_p = self.prior.log_p(self.W)
Пример #3
0
    def __init__(self, model, loc_model):
        self.model = model
        self.N = model['N']

        self.prms = loc_model
        self.name = self.prms['name']
        self.N_dims = self.prms['N_dims']


        # Create a location prior
        self.location_prior = create_prior(self.prms['location_prior'])

        # Make sure the sample is of the correct type
        from pyglm.components.priors import Categorical, JointCategorical
        if isinstance(self.location_prior, (Categorical,JointCategorical)):
            self.Lflat = T.lvector('L')
            self.dtype = np.int
        else:
            self.Lflat = T.dvector('L')
            self.dtype = np.float

        # Latent distance model has NxN_dims matrix of locations Lm
        self.Lmatrix = T.reshape(self.Lflat, (self.N, self.N_dims))
        self.Lmatrix.name = 'L'

        # Define log probability
        self.log_p = self.location_prior.log_p(self.Lmatrix)
Пример #4
0
    def __init__(self, model):
        self.model = model
        self.imp_model = model['impulse']
        self.prior = create_prior(self.imp_model['prior'])

        # Number of presynaptic neurons
        self.N = model['N']

        # Get parameters of the prior
        # self.mu = self.prms['mu']
        # self.sigma = self.prms['sigma']

        # Create a basis for the impulse responses response
        self.basis = create_basis(self.imp_model['basis'])
        (_,self.B) = self.basis.shape
        # The basis is interpolated once the data is specified
        self.initialize_basis()

        # Initialize memory for the filtered spike train
        self.ir = theano.shared(name='ir',
                                value=np.zeros((1,self.N,self.B)))

        # Define weights
        self.w_ir = T.dvector('w_ir')
        # Repeat them (in a differentiable manner) to create a 3-tensor
        w_ir2 = T.reshape(self.w_ir, [self.N,self.B])
        w_ir3 = T.reshape(self.w_ir, [1,self.N,self.B])

        # Make w_ir3 broadcastable in the 1st dim
        T.addbroadcast(w_ir3,0)

        # Take the elementwise product of the filtered stimulus and
        # the repeated weights to get the weighted impulse current along each
        # impulse basis dimension. Then sum over bases to get the
        # total coupling current from each presynaptic neurons at
        # all time points
        self.I_imp = T.sum(self.ir*w_ir3, axis=2)
        # self.log_p = T.sum(-0.5/self.sigma**2 * (self.w_ir-self.mu)**2)
        self.log_p = self.prior.log_p(w_ir2)


        # Define a helper variable for the impulse response
        # after projecting onto the basis
        self.impulse = T.dot(w_ir2, T.transpose(self.ibasis))
Пример #5
0
    def __init__(self, model):
        self.model = model
        self.imp_model = model['impulse']
        self.prior = create_prior(self.imp_model['prior'])

        # Number of presynaptic neurons
        self.N = model['N']

        # Get parameters of the prior
        # self.mu = self.prms['mu']
        # self.sigma = self.prms['sigma']

        # Create a basis for the impulse responses response
        self.basis = create_basis(self.imp_model['basis'])
        (_, self.B) = self.basis.shape
        # The basis is interpolated once the data is specified
        self.initialize_basis()

        # Initialize memory for the filtered spike train
        self.ir = theano.shared(name='ir', value=np.zeros((1, self.N, self.B)))

        # Define weights
        self.w_ir = T.dvector('w_ir')
        # Repeat them (in a differentiable manner) to create a 3-tensor
        w_ir2 = T.reshape(self.w_ir, [self.N, self.B])
        w_ir3 = T.reshape(self.w_ir, [1, self.N, self.B])

        # Make w_ir3 broadcastable in the 1st dim
        T.addbroadcast(w_ir3, 0)

        # Take the elementwise product of the filtered stimulus and
        # the repeated weights to get the weighted impulse current along each
        # impulse basis dimension. Then sum over bases to get the
        # total coupling current from each presynaptic neurons at
        # all time points
        self.I_imp = T.sum(self.ir * w_ir3, axis=2)
        # self.log_p = T.sum(-0.5/self.sigma**2 * (self.w_ir-self.mu)**2)
        self.log_p = self.prior.log_p(w_ir2)

        # Define a helper variable for the impulse response
        # after projecting onto the basis
        self.impulse = T.dot(w_ir2, T.transpose(self.ibasis))
Пример #6
0
    def __init__(self, model, type_model):
        self.model = model
        self.type_model = type_model
        self.name = self.type_model['name']

        # There are N neurons to assign types to
        self.N = type_model['N']

        # There are has R latent types
        self.R = self.type_model['R']
        # Each neuron has a latent type Y
        self.Y = T.lvector('Y')

        # A probability of each type with a symmetric Dirichlet prior
        self.alpha = T.dvector('alpha')
        self.alpha_prior = create_prior(self.type_model['alpha_prior'])
        # self.alpha0 = self.prms['alpha0']

        # Define log probability
        # log_p_alpha = T.sum((self.alpha0 - 1) * T.log(self.alpha))
        log_p_alpha = self.alpha_prior.log_p(self.alpha)
        log_p_Y = T.sum(T.log(self.alpha[self.Y]))

        self.log_p = log_p_alpha + log_p_Y