Esempio n. 1
0
    def _create_slot_var(self, var, name):
        """Create slot variable for the given Variable

        Typical usage is to create variables to hold moving average
        of the given Variable

        Parameters
        ----------
        var : theano.SharedVariable
            Variable of which size and dtype are used to create slot.

        name : str
            The name of slot.

        Returns
        -------
        Variable
            Wrapped Variable of the resulting slot variable.
        """
        value = var.get_value(borrow=True)
        var_name = var.name.split(':')[0]

        name = '/'.join([var_name, name])
        var = make_variable(
            name=name, shape=value.shape, dtype=value.dtype,
            initializer=fetch_initializer('ConstantInitializer')(0),
            broadcastable=var.broadcastable)
        self._create_parameter_slot(name, var, train=False, serialize=True)
        return var.unwrap()
Esempio n. 2
0
    def _instantiate_parameters(self, input_shape):
        dim, fmt = len(input_shape), luchador.get_nn_conv_format()
        channel = 1 if dim == 2 or fmt == 'NCHW' else 3

        self._axes = tuple(i for i in range(dim) if not i == channel)
        shape = tuple(input_shape[i] for i in range(dim) if i == channel)

        const_init = fetch_initializer('ConstantInitializer')
        if self.get_parameter_variable('mean') is None:
            mean = wrapper.make_variable(
                name='mean', shape=shape,
                initializer=const_init(0), trainable=False)
            self.set_parameter_variables(mean=mean)

        if self.get_parameter_variable('var') is None:
            var = wrapper.make_variable(
                name='var', shape=shape,
                initializer=const_init(1), trainable=False)
            self.set_parameter_variables(var=var)

        if self.get_parameter_variable('scale') is None:
            scale = wrapper.make_variable(
                name='scale', shape=shape, trainable=True,
                initializer=const_init(self.args['scale']))
            self.set_parameter_variables(scale=scale)

        if self.get_parameter_variable('offset') is None:
            offset = wrapper.make_variable(
                name='offset', shape=shape, trainable=True,
                initializer=const_init(self.args['offset']))
            self.set_parameter_variables(offset=offset)
Esempio n. 3
0
    def _create_slot_var(self, var, name):
        """Create slot variable for the given Variable

        Typical usage is to create variables to hold moving average
        of the given Variable

        Parameters
        ----------
        var : theano.SharedVariable
            Variable of which size and dtype are used to create slot.

        name : str
            The name of slot.

        Returns
        -------
        Variable
            Wrapped Variable of the resulting slot variable.
        """
        value = var.get_value(borrow=True)
        var_name = var.name.split(':')[0]

        name = '/'.join([var_name, name])
        var = make_variable(
            name=name,
            shape=value.shape,
            dtype=value.dtype,
            initializer=fetch_initializer('ConstantInitializer')(0),
            broadcastable=var.broadcastable)
        self._create_parameter_slot(name, var, train=False, serialize=True)
        return var.unwrap()
Esempio n. 4
0
 def _get_alpha(self):
     _alpha = self.args['alpha']
     initializer = fetch_initializer('ConstantInitializer')(value=_alpha)
     alpha = make_variable(name='alpha', shape=[], initializer=initializer)
     self._create_parameter_slot(
             'alpha', val=alpha, train=True, serialize=True)
     return alpha.unwrap()
Esempio n. 5
0
    def _create_slot(self, initial_value, name):
        """Create slot variable independant to gradients and parameters

        Example use is beta parameters in Adam and Adamax optimizer.
        Only scalar type is supported.

        Parameters
        ----------
        initial_value : number
            Initial value of the resulting slot

        name : str
            The name of slot.

        Returns
        -------
        Variable
            Wrapped Variable of the resulting slot variable.
        """
        init = fetch_initializer('ConstantInitializer')(initial_value)
        var = make_variable(name=name,
                            shape=[],
                            broadcastable=True,
                            initializer=init)
        self._create_parameter_slot(name, var, train=False, serialize=True)
        return var.unwrap()
Esempio n. 6
0
def _get_bias_init(config):
    config = config or {
        'typename': 'ConstantInitializer',
        'args': {
            'value': 0.1
        }
    }
    return fetch_initializer(config['typename'])(**config.get('args', {}))
Esempio n. 7
0
    def _create_slot(self, initial_value, name):
        """Create slot variable independant to gradients and parameters

        Example use is beta parameter in Adamax optimizer.
        Currently only scalar type is supported.
        """
        init = fetch_initializer('ConstantInitializer')(initial_value)
        var = make_variable(name=name, shape=[], initializer=init)
        self._create_parameter_slot(
            name=name, val=var, train=False, serialize=True)
        return var.unwrap()
Esempio n. 8
0
    def _create_slot(self, initial_value, name):
        """Create slot variable independant to gradients and parameters

        Example use is beta parameter in Adamax optimizer.
        Currently only scalar type is supported.
        """
        init = fetch_initializer('ConstantInitializer')(initial_value)
        var = make_variable(name=name, shape=[], initializer=init)
        self._create_parameter_slot(name=name,
                                    val=var,
                                    train=False,
                                    serialize=True)
        return var.unwrap()
Esempio n. 9
0
    def _create_slot_var(self, var, slot_name):
        """Create slot variable for the given Variable

        Typical usage is to create variables to hold moving average
        of the given Variable
        """
        var_name = var.name.split(':')[0]
        name = '/'.join([var_name, slot_name])
        slot_var = make_variable(
            name=name, shape=var.get_shape(), dtype=var.dtype,
            initializer=fetch_initializer('ConstantInitializer')(0))
        self._create_parameter_slot(
            name=name, val=slot_var, train=False, serialize=True)
        return slot_var.unwrap()
Esempio n. 10
0
    def _create_slot_var(self, var, slot_name):
        """Create slot variable for the given Variable

        Typical usage is to create variables to hold moving average
        of the given Variable
        """
        var_name = var.name.split(':')[0]
        name = '/'.join([var_name, slot_name])
        slot_var = make_variable(
            name=name,
            shape=var.get_shape(),
            dtype=var.dtype,
            initializer=fetch_initializer('ConstantInitializer')(0))
        self._create_parameter_slot(name=name,
                                    val=slot_var,
                                    train=False,
                                    serialize=True)
        return slot_var.unwrap()
Esempio n. 11
0
    def _instantiate_parameters(self, input_shape, dtype):
        dim = len(input_shape)
        shape = tuple(input_shape[i] for i in range(dim) if i == 1)
        self._axes = tuple(i for i in range(dim) if not i == 1)
        self._pattern = tuple((0 if i == 1 else 'x') for i in range(dim))

        _LG.debug('    Shape: %s', shape)
        _LG.debug('     Axes: %s', self._axes)
        _LG.debug('  Pattern: %s', self._pattern)

        const_init = fetch_initializer('ConstantInitializer')
        if self._parameter_variables['mean'] is None:
            mean = wrapper.make_variable(name='mean',
                                         shape=shape,
                                         trainable=False,
                                         initializer=const_init(0),
                                         dtype=dtype)
            self.set_parameter_variables(mean=mean)

        if self._parameter_variables['var'] is None:
            var = wrapper.make_variable(name='var',
                                        shape=shape,
                                        trainable=False,
                                        initializer=const_init(1),
                                        dtype=dtype)
            self.set_parameter_variables(var=var)

        if self._parameter_variables['scale'] is None:
            scale_val = self.args['scale']
            scale = wrapper.make_variable(name='scale',
                                          shape=shape,
                                          trainable=True,
                                          initializer=const_init(scale_val),
                                          dtype=dtype)
            self.set_parameter_variables(scale=scale)

        if self._parameter_variables['offset'] is None:
            offset_val = self.args['offset']
            offset = wrapper.make_variable(name='offset',
                                           shape=shape,
                                           trainable=True,
                                           initializer=const_init(offset_val),
                                           dtype=dtype)
            self.set_parameter_variables(offset=offset)
Esempio n. 12
0
    def _instantiate_parameters(self, input_shape):
        dim, fmt = len(input_shape), luchador.get_nn_conv_format()
        channel = 1 if dim == 2 or fmt == 'NCHW' else 3

        self._axes = tuple(i for i in range(dim) if not i == channel)
        shape = tuple(input_shape[i] for i in range(dim) if i == channel)

        const_init = fetch_initializer('ConstantInitializer')
        if self.get_parameter_variable('mean') is None:
            mean = wrapper.make_variable(name='mean',
                                         shape=shape,
                                         initializer=const_init(0),
                                         trainable=False)
            self.set_parameter_variables(mean=mean)

        if self.get_parameter_variable('var') is None:
            var = wrapper.make_variable(name='var',
                                        shape=shape,
                                        initializer=const_init(1),
                                        trainable=False)
            self.set_parameter_variables(var=var)

        if self.get_parameter_variable('scale') is None:
            scale = wrapper.make_variable(name='scale',
                                          shape=shape,
                                          trainable=True,
                                          initializer=const_init(
                                              self.args['scale']))
            self.set_parameter_variables(scale=scale)

        if self.get_parameter_variable('offset') is None:
            offset = wrapper.make_variable(name='offset',
                                           shape=shape,
                                           trainable=True,
                                           initializer=const_init(
                                               self.args['offset']))
            self.set_parameter_variables(offset=offset)
Esempio n. 13
0
    def _create_slot(self, initial_value, name):
        """Create slot variable independant to gradients and parameters

        Example use is beta parameters in Adam and Adamax optimizer.
        Only scalar type is supported.

        Parameters
        ----------
        initial_value : number
            Initial value of the resulting slot

        name : str
            The name of slot.

        Returns
        -------
        Variable
            Wrapped Variable of the resulting slot variable.
        """
        init = fetch_initializer('ConstantInitializer')(initial_value)
        var = make_variable(
            name=name, shape=[], broadcastable=True, initializer=init)
        self._create_parameter_slot(name, var, train=False, serialize=True)
        return var.unwrap()
Esempio n. 14
0
    def _instantiate_parameters(self, input_shape, dtype):
        dim = len(input_shape)
        shape = tuple(input_shape[i] for i in range(dim) if i == 1)
        self._axes = tuple(i for i in range(dim) if not i == 1)
        self._pattern = tuple((0 if i == 1 else 'x') for i in range(dim))

        _LG.debug('    Shape: %s', shape)
        _LG.debug('     Axes: %s', self._axes)
        _LG.debug('  Pattern: %s', self._pattern)

        const_init = fetch_initializer('ConstantInitializer')
        if self._parameter_variables['mean'] is None:
            mean = wrapper.make_variable(
                name='mean', shape=shape, trainable=False,
                initializer=const_init(0), dtype=dtype)
            self.set_parameter_variables(mean=mean)

        if self._parameter_variables['var'] is None:
            var = wrapper.make_variable(
                name='var', shape=shape, trainable=False,
                initializer=const_init(1), dtype=dtype)
            self.set_parameter_variables(var=var)

        if self._parameter_variables['scale'] is None:
            scale_val = self.args['scale']
            scale = wrapper.make_variable(
                name='scale', shape=shape, trainable=True,
                initializer=const_init(scale_val), dtype=dtype)
            self.set_parameter_variables(scale=scale)

        if self._parameter_variables['offset'] is None:
            offset_val = self.args['offset']
            offset = wrapper.make_variable(
                name='offset', shape=shape, trainable=True,
                initializer=const_init(offset_val), dtype=dtype)
            self.set_parameter_variables(offset=offset)
Esempio n. 15
0
def _get_weight_init(config):
    config = config or {'typename': 'XavierInitializer'}
    return fetch_initializer(config['typename'])(**config.get('args', {}))
Esempio n. 16
0
def _get_filter_init(config):
    """Make filter initializer. Default to Xavier"""
    config = config or {'typename': 'XavierInitializer'}
    return fetch_initializer(
        config['typename'])(**config.get('args', {}))
Esempio n. 17
0
def _get_bias_init(config):
    config = config or {
        'typename': 'ConstantInitializer', 'args': {'value': 0.1}}
    return fetch_initializer(config['typename'])(**config.get('args', {}))
Esempio n. 18
0
def _get_weight_init(config):
    config = config or {'typename': 'XavierInitializer'}
    return fetch_initializer(config['typename'])(**config.get('args', {}))
Esempio n. 19
0
def _get_bias_init(config):
    """Make bias initializer. Default to Constant (0.1)"""
    config = config or {
        'typename': 'ConstantInitializer', 'args': {'value': 0.1}}
    return fetch_initializer(
        config['typename'])(**config.get('args', {}))