def __init__(self, spikes=None, network=None, network_file=None, window_size=1, params=None): object.__init__(self) Restoreable.__init__(self) self._window_size = window_size if spikes is not None: self._spikes = spikes self._network = HopfieldNetMPF(self._spikes.N * self._window_size) if network is not None: self._network = network if network_file is not None and network is None: self._network = HopfieldNet.load(network_file) if params is not None: self._params = params else: self._params = {'Mode': 'default'}
def __init__(self, N=None, J=None, theta=None, name=None, update="asynchronous", symmetric=True): object.__init__(self) Restoreable.__init__(self) self._learn_iterations = 0 # how many learning steps have been taken so far self._N = N self._symmetric = symmetric if J is None and N > 0: self._J = np.zeros((self._N, self._N)) else: self._J = J if theta is None and N > 0: self._theta = np.zeros(self._N) else: self._theta = theta self._name = name or self.__class__.__name__ self._update = update self._neuron_order = range(self._N) if self._N else None self._last_num_iter_for_convergence = 0 # hopfield dynamics steps previous __call__ took self._learn_iterations = 0 # how many learning steps have been taken so far
def __init__(self, stimulus_arr=None, npz_file=None, h5_file=None, preprocess=True): """ Missing documentation Parameters ---------- stimulus_arr : Type, optional Description (default None) npz_file : Type, optional Description (default None) h5_file : Type, optional Description (default None) preprocess : bool, optional Description (default True) Returns ------- Value : Type Description """ object.__init__(self) Restoreable.__init__(self) # TODO reuse io functionality from data module! self.file_name = npz_file or '' if npz_file is None and stimulus_arr is None and h5_file is None: self._M = 0 return if stimulus_arr is not None: self._stimulus_arr = stimulus_arr if npz_file is not None: if not os.path.isfile(npz_file): hdlog.info("File '%s' does not exist!" % npz_file) return self.file_name = npz_file tmp = np.load(npz_file) self._stimulus_arr = tmp[tmp.keys()[0]] if h5_file is not None: import h5py f = h5py.File(h5_file) self._stimulus_arr = f[f.keys()[0]] if preprocess: self.preprocess() self._M = self._stimulus_arr.shape[0] self._X = self._stimulus_arr.shape[1:]
def __init__(self, spikes=None, stimulus=None, window_size=1, learner=None): object.__init__(self) Restoreable.__init__(self) self._stimulus = stimulus self._window_size = window_size self._learner = learner or None self._original_spikes = spikes self._learn_time = None self._sample_spikes = None self._raw_patterns = None self._hopfield_patterns = None self._hopfield_spikes = None
def __init__(self, counter=None, save_sequence=True): object.__init__(self) Restoreable.__init__(self) self._counts = {} self._patterns = [] self._lookup_patterns = {} self._sequence = [] self._save_sequence = save_sequence self._skipped_patterns = 0 self._seen_sequence = [] if counter is not None: self.merge_counts(counter)
def __init__(self, spikes=None, bin_size=None, preprocess=True): object.__init__(self) Restoreable.__init__(self) self._spikes = np.atleast_2d(spikes) spikes_shape = self._spikes.shape if len(spikes_shape) == 2: # single trial self._spikes = self._spikes.reshape((1, spikes_shape[0], spikes_shape[1])) self._T = self._spikes.shape[0] self._N = self._spikes.shape[1] self._M = self._spikes.shape[2] self._bin_size = bin_size self._restricted = None if preprocess: self._preprocess()
def __init__(self, spikes=None, bin_size=None, preprocess=True): object.__init__(self) Restoreable.__init__(self) self._spikes = np.atleast_2d(spikes) spikes_shape = self._spikes.shape if len(spikes_shape) == 2: # single trial self._spikes = self._spikes.reshape( (1, spikes_shape[0], spikes_shape[1])) self._T = self._spikes.shape[0] self._N = self._spikes.shape[1] self._M = self._spikes.shape[2] self._bin_size = bin_size self._restricted = None if preprocess: self._preprocess()
def _load_v1(self, contents, load_extra=False): # internal function to load v1 file format hdlog.debug('loading HopfieldNet, format version 1') return Restoreable._load_attributes(self, contents, self._SAVE_ATTRIBUTES_V1)
def _load_v1(self, contents, load_extra=False): # internal function to load v1 file format hdlog.debug('Loading Stimulus, format version 1') return Restoreable._load_attributes(self, contents, self._SAVE_ATTRIBUTES_V1)
def _load_v2(self, contents, load_extra=False): # internal function to load v1 file format hdlog.debug('Loading PatternsHopfield patterns, format version 2') return Restoreable._load_attributes(self, contents, self._SAVE_ATTRIBUTES_V2)