Esempio n. 1
0
    def __init__(self, **kwargs):
        self.config = kwargs.pop(str('config'), None)
        self.layer_type = self.class_name
        self.batch_size = self.config.getint('simulation', 'batch_size')
        self.dt = self.config.getfloat('simulation', 'dt')
        self.duration = self.config.getint('simulation', 'duration')
        self.tau_refrac = self.config.getfloat('cell', 'tau_refrac')
        self._v_thresh = self.config.getfloat('cell', 'v_thresh')
        self.v_thresh = None
        self.time = None
        self.mem = self.spiketrain = self.impulse = None
        self.refrac_until = None
        self.last_spiketimes = None

        allowed_kwargs = {
            'input_shape',
            'batch_input_shape',
            'batch_size',
            'dtype',
            'name',
            'trainable',
            'weights',
            'input_dtype',  # legacy
        }
        for kwarg in kwargs.copy():
            if kwarg not in allowed_kwargs:
                kwargs.pop(kwarg)
        Layer.__init__(self, **kwargs)
        self.stateful = True
Esempio n. 2
0
    def __init__(self,
                 shape,
                 my_initializer='RandomNormal',
                 dtype=None,
                 name=None,
                 mult=1.0,
                 **kwargs):

        
        # some input checking
        if not name:
            prefix = 'local_param'
            name = prefix + '_' + str(backend.get_uid(prefix))
            
        if not dtype:
            dtype = backend.floatx()
        
        self.shape = [1, *shape]
        self.my_initializer = my_initializer
        self.mult = mult

        if not name:
            prefix = 'param'
            name = '%s_%d' % (prefix, K.get_uid(prefix))
        Layer.__init__(self, name=name, **kwargs)

        # Create a trainable weight variable for this layer.
        with K.name_scope(self.name):
            self.kernel = self.add_weight(name='kernel', 
                                            shape=shape,
                                            initializer=self.my_initializer,
                                            dtype=dtype,
                                            trainable=True)

        # prepare output tensor, which is essentially the kernel.
        output_tensor = K.expand_dims(self.kernel, 0) * self.mult
        output_tensor._keras_shape = self.shape
        output_tensor._uses_learning_phase = False
        output_tensor._keras_history = base_layer.KerasHistory(self, 0, 0)
        output_tensor._batch_input_shape = self.shape

        self.trainable = True
        self.built = True    
        self.is_placeholder = False

        # create new node
        Node(self,
            inbound_layers=[],
            node_indices=[],
            tensor_indices=[],
            input_tensors=[],
            output_tensors=[output_tensor],
            input_masks=[],
            output_masks=[None],
            input_shapes=[],
            output_shapes=self.shape)
    def __init__(self, **kwargs):
        self.config = kwargs.pop(str('config'), None)
        if self.config is None:
            from snntoolbox.bin.utils import load_config
            # Todo: Enable loading config here. Needed when trying to load a
            #       converted SNN from disk. For now we specify a dummy path.
            try:
                self.config = load_config('wdir/log/gui/test/.config')
            except FileNotFoundError:
                raise NotImplementedError
        self.layer_type = self.class_name
        self.dt = self.config.getfloat('simulation', 'dt')
        self.duration = self.config.getint('simulation', 'duration')
        self.tau_refrac = self.config.getfloat('cell', 'tau_refrac')
        self._v_thresh = self.config.getfloat('cell', 'v_thresh')
        self.v_thresh = None
        self.time = None
        self.mem = self.spiketrain = self.impulse = self.spikecounts = None
        self.mem_input = None  # Used in MaxPooling layers
        self.refrac_until = self.max_spikerate = None
        if clamp_var:
            self.spikerate = self.var = None

        from snntoolbox.utils.utils import get_abs_path
        path, filename = \
            get_abs_path(self.config.get('paths', 'filename_clamp_indices'),
                         self.config)
        if filename != '':
            filepath = os.path.join(path, filename)
            assert os.path.isfile(filepath), \
                "File with clamp indices not found at {}.".format(filepath)
            self.filename_clamp_indices = filepath
            self.clamp_idx = None

        self.payloads = None
        self.payloads_sum = None
        self.online_normalization = self.config.getboolean(
            'normalization', 'online_normalization')

        allowed_kwargs = {
            'input_shape',
            'batch_input_shape',
            'batch_size',
            'dtype',
            'name',
            'trainable',
            'weights',
            'input_dtype',  # legacy
        }
        for kwarg in kwargs.copy():
            if kwarg not in allowed_kwargs:
                kwargs.pop(kwarg)
        Layer.__init__(self, **kwargs)
        self.stateful = True
        self._floatx = tf.keras.backend.floatx()
Esempio n. 4
0
    def __init__(self, units, rbf_units_trainable=False, use_gaussian_kernel=True, **kwargs):
        self.__init_centers = None
        self.__init_radius = None
        self.__bias = None
        self.__rbf_trainable = rbf_units_trainable
        self.__GaussianKernel = use_gaussian_kernel
        self.__input_dimension = 0
        self.__input_batchsize = 0

        if isinstance(units, int):
            self.__rbf_kernel_n = units
        else:
            raise Exception('Only int can be set as num of rbf kernels.')

        Layer.__init__(self, **kwargs)
    def __init__(self, **kwargs):
        self.config = kwargs.pop(str('config'), None)
        self.layer_type = self.class_name
        self.dt = self.config.getfloat('simulation', 'dt')
        self.duration = self.config.getint('simulation', 'duration')
        self.tau_refrac = self.config.getfloat('cell', 'tau_refrac')
        self._v_thresh = self.config.getfloat('cell', 'v_thresh')
        self.v_thresh = None
        self.time = None
        self.mem = self.spiketrain = self.impulse = self.spikecounts = None
        self.refrac_until = self.max_spikerate = None
        if clamp_var:
            self.spikerate = self.var = None

        from snntoolbox.utils.utils import get_abs_path
        path, filename = \
            get_abs_path(self.config.get('paths', 'filename_clamp_indices'),
                         self.config)
        if filename != '':
            filepath = os.path.join(path, filename)
            assert os.path.isfile(filepath), \
                "File with clamp indices not found at {}.".format(filepath)
            self.filename_clamp_indices = filepath
            self.clamp_idx = None

        self.payloads = None
        self.payloads_sum = None
        self.online_normalization = self.config.getboolean(
            'normalization', 'online_normalization')

        allowed_kwargs = {
            'input_shape',
            'batch_input_shape',
            'batch_size',
            'dtype',
            'name',
            'trainable',
            'weights',
            'input_dtype',  # legacy
        }
        for kwarg in kwargs.copy():
            if kwarg not in allowed_kwargs:
                kwargs.pop(kwarg)
        Layer.__init__(self, **kwargs)
        self.stateful = True
        self._floatx = tf.keras.backend.floatx()
Esempio n. 6
0
    def __init__(self, num_head, 
                 mu_shared_nodes, mu_branched_nodes=None, 
                 sigma_shared_nodes=None, sigma_branched_nodes=None, activation='relu',
                 single_head_multi_out=False):
        Layer.__init__(self)
        mu_shared_layers = []
        for msn in mu_shared_nodes:
            mu_shared_layers.append(Dense(msn, activation=activation))
            
        mu_branched_layers = []
        if mu_branched_nodes is not None:
            for mbn in mu_branched_nodes:
                mu_branched_layers.append([Dense(mbn, activation=activation) for _ in num_head])
                
        if sigma_shared_nodes is None:
            sigma_shared_layers = mu_shared_layers
        else:
            sigma_shared_layers = []
            for ssn in sigma_shared_nodes:
                sigma_shared_layers.append(Dense(ssn, activation=activation))
                
        if sigma_branched_nodes is None:
            sigma_branched_layers = mu_branched_layers
        else:
            sigma_branched_layers = []
            for sbn in sigma_branched_nodes:
                sigma_branched_layers.append([Dense(sbn, activation=activation) for _ in num_head])

        self.num_head = num_head
        self.mu_shared_layers = mu_shared_layers
        self.mu_branched_layers = mu_branched_layers
        self.sigma_shared_layers = sigma_shared_layers
        self.sigma_branched_layers = sigma_branched_layers
        self.single_head_multi_out = single_head_multi_out
        if single_head_multi_out:
            assert mu_branched_nodes is None and sigma_branched_nodes is None
            self.mu_outs = [Dense(num_head)]
            self.sigma_outs = [Dense(num_head)]
        else:
            self.mu_outs = [Dense(1) for _ in range(num_head)]
            self.sigma_outs = [Dense(1) for _ in range(num_head)]
Esempio n. 7
0
 def __init__(self, **kwargs):
     self.config = kwargs.pop(str('config'), None)
     self.layer_type = self.class_name
     self.spikerates = None
     self.num_bits = self.config.getint('conversion', 'num_bits')
     self.powers = tf.constant([2**-(i + 1) for i in range(self.num_bits)])
     self._x_binary = None
     self._a = None
     allowed_kwargs = {
         'input_shape',
         'batch_input_shape',
         'batch_size',
         'dtype',
         'name',
         'trainable',
         'weights',
         'input_dtype',  # legacy
     }
     for kwarg in kwargs.copy():
         if kwarg not in allowed_kwargs:
             kwargs.pop(kwarg)
     Layer.__init__(self, **kwargs)
     self.stateful = True
Esempio n. 8
0
 def __init__(self):
     Layer.__init__(self)
     self.frontend_name = 'keras'