Exemplo n.º 1
0
 def __init__(self, **kwargs):
   pf.set_attributes_from_dicts(self, kwargs)
   self.batch_idx = 0
   self.batch_grad = np.zeros(self.param.shape)
   self.update_integrator = np.zeros(self.param.shape)
   self.grad_integrator = np.zeros(self.param.shape)
   self.eta = 0
 def __init__(self, **kwargs):
   pf.set_attributes_from_dicts(self, PredictionNet.defaults, kwargs)
   self.run_time, self.last_time = 0, time.time()
   self.rho = self.rho_max
   self.n_input = self.X.shape[1]
   self.y = pf.numpy.random_sample_between((self.n_neuron,),
       self.nonlin(np.inf), self.nonlin(-np.inf))
   self.initialize_weights()
Exemplo n.º 3
0
 def __init__(self, **kwargs):
   pf.set_attributes_from_dicts(self, CacheTracer.defaults, kwargs, conservative=True)
   super(CacheTracer, self).__init__(**kwargs)
   if self.output_root is not None:
     pf.mkdir_p(self.output_root)
   self.set_capture_defaults()
   self.capture_d = { v['name']: v for v in self.capture }
   self.cache = {}
   self.reset_cache()
Exemplo n.º 4
0
 def __init__(self, **kwargs):
   pf.set_attributes_from_dicts(self, PredictionNet.defaults, kwargs)
   # from IPython import embed; embed()
   self.run_time, self.last_time = 0, time.time()
   self.n_input = self.X.shape[1]
   # self.y = pf.numpy.random_sample_between((self.n_neuron,),
   #     self.nonlin(np.inf), self.nonlin(-np.inf))
   self.initialize_weights()
   self.updaters = { k : ParamUpdater(param=getattr(self, k), rho=self.rho,
     epsilon=self.epsilon, batch_size=self.batch_size)
     for k in PredictionNet.weights }
Exemplo n.º 5
0
Arquivo: sp.py Projeto: ursk/sparco
  def __init__(self, **kwargs):
    """Set and validate configuration, initialize output classes."""
    home = os.path.expanduser('~')
    defaults = {
      'sampler': None,
      'batch_size': 10,
      'num_iterations': 100,
      'run_time_limit': float("inf"),
      'dictionary_size': 100,
      'convolution_time_length': 64,
      'phi': None,
      'inference_function': sparco.qn.sparseqn.sparseqn_batch,
      'inference_settings': {
        'lam': 0,
        'maxit': 15,
        'debug': False,
        'positive': False,
        'delta': 0.0001,
        'past': 6
        },
      'eta': .00001,
      'learner_class': sparco.learn.AngleChasingLearner,
      'eta_up_factor': 1.01,
      'eta_down_factor': .99,
      'target_angle': 1.,
      'max_angle': 2.,
      'update_coefficient_statistics_interval': 100,
      'basis_centering_interval': None,
      'basis_centering_max_shift': None,
      'basis_method': 1,  # TODO this is a temporary measure
      }
    pfacets.set_attributes_from_dicts(self, defaults, kwargs)

    # TODO temp for profiling; second line is especially hacky
    self.learn_basis = getattr(self, "learn_basis{0}".format(self.basis_method))
    self.__class__.learn_basis = getattr(self.__class__,
        'learn_basis{0}'.format(self.basis_method))
    self.create_root_buffers = getattr(self,
          "create_root_buffers{0}".format(self.basis_method))

    self.patches_per_node = self.batch_size / mpi.procs
    pfacets.mixin(self, self.learner_class)
    self.a_variance_cumulative = np.zeros(self.dictionary_size)
    self.run_time =0
    self.last_time = time.time()

    C, N, P = len(self.sampler.channels), self.dictionary_size, self.convolution_time_length
    T = self.sampler.patch_length
    buffer_dimensions = { 'a': (N, P+T-1), 'x': (C, T), 'xhat': (C,T),
        'dx': (C,T), 'dphi': (C,N,P), 'E': (1,), 'a_l0_norm': (N,),
        'a_l1_norm': (N,), 'a_l2_norm': (N,), 'a_variance': (N,) }
    self.create_node_buffers(buffer_dimensions)
    self.create_root_buffers(buffer_dimensions)
    self.initialize_phi(C,N,P)
Exemplo n.º 6
0
  def __init__(self, **kwargs):
    """Configure, open hdf5 files, and load an initial cache.

    Parameters
    ----------
    cache_size : int (optional)
      Number of patches to load into memory at once.
    resample_cache : int (optional)
      Multiplier for the cache_size to determine the number of patches that
      should be drawn before a new cache is generated.
    hdf5_data_path : list of str (optional)
      Last element must be the name of a dataset in the wrapped hdf5 file(s). Can
      be preceded by group names.
    time_dimension : int (optional)
      Dimension of the data matrix corresponding to time.
    patch_length : int (optional)
      Number of time steps per patch
    patch_filters : list of functions (optional)
      Used to provide selection criteria for patches. Each filter is a function
      that should take a 2x2 matrix as its sole argument and return a Boolean
      value. A patch is evaluated against all patch filters and must evaluate to
      False for each one in order to be selected.
    channels : list or np.array (optional)
      A list of indices into the channel dimension of the data matrix. Selects a
      subset of channels for analysis. When omitted, all channels are used.
    """
    defaults = {
        'cache_size': 1000,
        'subsample': 1,
        'resample_cache': 1,
        'hdf5_data_path': ['data'],
        'time_dimension': 1,
        'patch_length': 128,
        'patch_filters': map(lambda f: functools.partial(f, self), Sampler.patch_filters),
        'channels': None
        }
    pfacets.set_attributes_from_dicts(self, defaults, kwargs)
    self.superpatch_length = self.patch_length * self.subsample * self.cache_size
    self.channel_dimension = int(not self.time_dimension)
    self.open_files()
    self.update_configuration_from_files()
    self.remove_short_files()
    self.patch_shape = (len(self.channels), self.patch_length)
    self.refresh_cache()
Exemplo n.º 7
0
 def __init__(self, **kwargs):
   pfacets.set_attributes_from_dicts(self, Plotter.defaults, kwargs)
Exemplo n.º 8
0
 def __init__(self, **kwargs):
   pfacets.set_attributes_from_dicts(self, Feature.defaults, kwargs)
Exemplo n.º 9
0
 def __init__(self, **kwargs):
   pf.set_attributes_from_dicts(self, HistoryTracer.defaults, kwargs, conservative=True)
   super(HistoryTracer, self).__init__( **kwargs )
Exemplo n.º 10
0
 def __init__(self, **kwargs):
   pfacets.set_attributes_from_dicts(self, Run.defaults, kwargs)
   self.output_dir = osp.join(self.output_root, self.get_basename())
   pfacets.mkdir_p(self.output_dir)
Exemplo n.º 11
0
 def __init__(self, **kwargs):
   pfacets.set_attributes_from_dicts(self, Dictionary.defaults, kwargs)
Exemplo n.º 12
0
 def __init__(self, **kwargs):
   pf.set_attributes_from_dicts(self, ConcatSampler.defaults, kwargs)
   super(ConcatSampler, self).__init__()
Exemplo n.º 13
0
 def read_dataset_info(self):
   self.Sdim = self.dims.index('S') if 'S' in self.dims else None
   self.Tdim = self.dims.index('T') if 'T' in self.dims else None
   pf.set_attributes_from_dicts(self, self.sampler.read_patch_shape())
   self.D = self.sampler.read_dataset_size()
Exemplo n.º 14
0
Arquivo: sp.py Projeto: ursk/sparco
    def __init__(self, **kwargs):
        """Set and validate configuration, initialize output classes."""
        home = os.path.expanduser('~')
        defaults = {
            'sampler': None,
            'batch_size': 10,
            'num_iterations': 100,
            'run_time_limit': float("inf"),
            'dictionary_size': 100,
            'convolution_time_length': 64,
            'phi': None,
            'inference_function': sparco.qn.sparseqn.sparseqn_batch,
            'inference_settings': {
                'lam': 0,
                'maxit': 15,
                'debug': False,
                'positive': False,
                'delta': 0.0001,
                'past': 6
            },
            'eta': .00001,
            'learner_class': sparco.learn.AngleChasingLearner,
            'eta_up_factor': 1.01,
            'eta_down_factor': .99,
            'target_angle': 1.,
            'max_angle': 2.,
            'update_coefficient_statistics_interval': 100,
            'basis_centering_interval': None,
            'basis_centering_max_shift': None,
            'basis_method': 1,  # TODO this is a temporary measure
        }
        pfacets.set_attributes_from_dicts(self, defaults, kwargs)

        # TODO temp for profiling; second line is especially hacky
        self.learn_basis = getattr(self,
                                   "learn_basis{0}".format(self.basis_method))
        self.__class__.learn_basis = getattr(
            self.__class__, 'learn_basis{0}'.format(self.basis_method))
        self.create_root_buffers = getattr(
            self, "create_root_buffers{0}".format(self.basis_method))

        self.patches_per_node = self.batch_size / mpi.procs
        pfacets.mixin(self, self.learner_class)
        self.a_variance_cumulative = np.zeros(self.dictionary_size)
        self.run_time = 0
        self.last_time = time.time()

        C, N, P = len(self.sampler.channels
                      ), self.dictionary_size, self.convolution_time_length
        T = self.sampler.patch_length
        buffer_dimensions = {
            'a': (N, P + T - 1),
            'x': (C, T),
            'xhat': (C, T),
            'dx': (C, T),
            'dphi': (C, N, P),
            'E': (1, ),
            'a_l0_norm': (N, ),
            'a_l1_norm': (N, ),
            'a_l2_norm': (N, ),
            'a_variance': (N, )
        }
        self.create_node_buffers(buffer_dimensions)
        self.create_root_buffers(buffer_dimensions)
        self.initialize_phi(C, N, P)