Exemplo n.º 1
0
 def parameters(self):
   """ Return list of parameters
   [W, b] if fit_intercept=True
   [W] otherwise
   """
   if self.fit_intercept:
     return [K.get_value(self._model.get('W')),
             K.get_value(self._model.get('b'))]
   else:
     return [K.get_value(self._model.get('W'))]
Exemplo n.º 2
0
 def parameters(self):
     """ Return list of parameters
 [W, b] if fit_intercept=True
 [W] otherwise
 """
     if self.fit_intercept:
         return [
             K.get_value(self._model.get('W')),
             K.get_value(self._model.get('b'))
         ]
     else:
         return [K.get_value(self._model.get('W'))]
Exemplo n.º 3
0
 def set_params(self, **params):
     # mapping: ops.name -> ops
     ops = {i.name: i for i in self._seq_ops.ops}
     for name, p in params.iteritems():
         if name in ops:
             for param_old, param_new in zip(ops[name].parameters, p):
                 if not isinstance(param_new, np.ndarray):
                     param_new = K.get_value(param_new)
                 K.set_value(param_old, param_new)
     return self
Exemplo n.º 4
0
    def test_load_save1(self):
        K.set_training(True)
        X = K.placeholder((None, 1, 28, 28))
        f = N.Dense(128, activation=K.relu)
        y = f(X)
        W, b = [K.get_value(p).sum() for p in K.ComputationGraph(y).parameters]
        num_units = f.num_units
        W_init = f.W_init
        b_init = f.b_init
        activation = f.activation

        f = cPickle.loads(cPickle.dumps(f))
        W1, b1 = [K.get_value(p).sum() for p in f.parameters]
        num_units1 = f.num_units
        W_init1 = f.W_init
        b_init1 = f.b_init
        activation1 = f.activation

        self.assertEqual(W1, W)
        self.assertEqual(b1, b)
        self.assertEqual(num_units1, num_units)
        self.assertEqual(W_init1.__name__, W_init.__name__)
        self.assertEqual(b_init.__name__, b_init1.__name__)
        self.assertEqual(activation1, activation)
Exemplo n.º 5
0
    def test_load_save1(self):
        K.set_training(True)
        X = K.placeholder((None, 1, 28, 28))
        f = N.Dense(128, activation=K.relu)
        y = f(X)
        W, b = [K.get_value(p).sum() for p in K.ComputationGraph(y).parameters]
        num_units = f.num_units
        W_init = f.W_init
        b_init = f.b_init
        activation = f.activation

        f = cPickle.loads(cPickle.dumps(f))
        W1, b1 = [K.get_value(p).sum() for p in f.parameters]
        num_units1 = f.num_units
        W_init1 = f.W_init
        b_init1 = f.b_init
        activation1 = f.activation

        self.assertEqual(W1, W)
        self.assertEqual(b1, b)
        self.assertEqual(num_units1, num_units)
        self.assertEqual(W_init1.__name__, W_init.__name__)
        self.assertEqual(b_init.__name__, b_init1.__name__)
        self.assertEqual(activation1, activation)
Exemplo n.º 6
0
 def get_params(self, deep=False):
     """
     Parameters
     ----------
     deep: boolean
         if True, return the numpy array (i.e. the real values of
         each parameters)
     """
     parameters = {}
     for ops in self._seq_ops.ops:
         name = ops.name
         params = ops.parameters
         if deep:
             params = [K.get_value(i) for i in params]
         parameters[name] = params
     return parameters
Exemplo n.º 7
0
 def __init__(self, lr, decay_rate=0.5):
   super(LRdecay, self).__init__()
   from odin import backend as K
   self.lr = lr
   self.lr_value = K.get_value(lr)
   self.decay_rate = decay_rate
Exemplo n.º 8
0
    def create_params(self, spec, shape, name, nnops, roles=[], nb_params=1):
        """
        Parameters
        ----------
        spec: variable, numpy.ndarray, function
            specification for initializing the weights
        shape: tuple, list
            expected shape for given variable
        name: str
            name for the variable
        nnops: NNOps
            parent operator of this parameters
        roles: odin.basic.VariableRole
            categories of this variable
        nb_params: int
            number of parameters that horizontally stacked into
            given `shape (e.g. nb_params=2, create 2 parameters with
            given `shape and horizontally stack them into 1 parameters)
            * do NOT support when `spec` is variable.
        """
        if not isinstance(roles, (tuple, list)):
            roles = [roles]
        if not isinstance(nnops, NNOps):
            raise Exception('nnops must be instance of odin.nnet.base.NNOps')

        shape = tuple(shape)  # convert to tuple if needed
        if any(d <= 0 for d in shape):
            raise ValueError(
                ("Cannot create param with a non-positive shape dimension. "
                 "Tried to create param with shape=%r, name=%r") %
                (shape, name))

        # ====== create parameters ====== #
        spec = as_tuple(spec, nb_params)
        spec = [_initialize_param(name, s, shape) for s in spec]
        # check shape returned
        shape = list(set([i[-1] for i in spec]))
        if len(shape) > 1:
            raise Exception(
                'shape are inconsitent among all given "spec", the '
                'created shape is: %s' % str(shape))
        shape = shape[0]
        # check spec returned
        spec = [i[0] for i in spec]
        if isinstance(spec[0], np.ndarray):
            with K.variable_scope(nnops.name):
                spec = np.concatenate(spec, axis=-1)
                shape = spec.shape
                spec = K.variable(spec, name=name)
        elif K.is_trainable_variable(spec[0]):
            if nb_params > 1:
                with K.variable_scope(nnops.name):
                    spec = np.concatenate([K.get_value(i) for i in spec],
                                          axis=-1)
                    shape = spec.shape
                    spec = K.variable(spec, name=name)
            else:
                spec = spec[0]
        elif K.is_variable(spec[0]):
            shape = (shape[0] * nb_params,) if len(shape) == 1 \
                else shape[:-1] + (shape[-1] * nb_params,)
            spec = K.concatenate(spec, axis=-1)
        # ====== assign annotations ====== #
        # only add role for trainable variables
        for i in roles:
            if isinstance(i, VariableRole) and K.is_trainable_variable(spec):
                add_role(spec, i)
        # return actual variable or expression
        # override other parameters with same name
        self._variables[name] = spec
        # set parameter attribute for NNOps
        setattr(nnops, name, spec)
        return spec