Пример #1
0
    def create_conv_layers(self, shape, conv_args):
        '''Creates a set of convolutional layers.

        Args:
            shape: Input shape.
            conv_args: List of tuple of convolutional arguments.

        Returns:
            nn.Sequential: a sequence of convolutional layers.

        '''

        conv_layers = nn.Sequential()
        conv_args = conv_args or []

        dim_x, dim_y, dim_in = shape

        for i, (dim_out, f, s, p, batch_norm, dropout, nonlinearity,
                pool) in enumerate(conv_args):
            name = '({}/{})_{}'.format(dim_in, dim_out, i + 1)
            conv_block = nn.Sequential()

            if dim_out is not None:
                conv = nn.Conv2d(dim_in,
                                 dim_out,
                                 kernel_size=f,
                                 stride=s,
                                 padding=p,
                                 bias=not (batch_norm))
                conv_block.add_module(name + 'conv', conv)
                dim_x, dim_y = self.next_size(dim_x, dim_y, f, s, p)
            else:
                dim_out = dim_in

            if dropout:
                conv_block.add_module(name + 'do', nn.Dropout2d(p=dropout))
            if batch_norm:
                bn = nn.BatchNorm2d(dim_out)
                conv_block.add_module(name + 'bn', bn)

            if nonlinearity:
                nonlinearity = get_nonlinearity(nonlinearity)
                conv_block.add_module(nonlinearity.__class__.__name__,
                                      nonlinearity)

            if pool:
                (pool_type, kernel, stride) = pool
                Pool = getattr(nn, pool_type)
                conv_block.add_module(name + 'pool',
                                      Pool(kernel_size=kernel, stride=stride))
                dim_x, dim_y = self.next_size(dim_x, dim_y, kernel, stride, 0)

            conv_layers.add_module(name, conv_block)

            dim_in = dim_out

        dim_out = dim_in

        return conv_layers, (dim_x, dim_y, dim_out)
Пример #2
0
def test_get_nonlinearity(nonlinearity):
    """

    Args:
        nonlinearity(@pytest.fixture): dict

    Asserts: True if right instance of activation function is returned.

    """

    relu = get_nonlinearity(nonlinearity['relu'])
    tanh = get_nonlinearity(nonlinearity['tanh'])
    leakyrelu = get_nonlinearity(nonlinearity['leakyrelu'])
    sigmoid = get_nonlinearity(nonlinearity['sigmoid'])

    assert callable(sigmoid)
    assert callable(tanh)
    assert isinstance(relu, nn.modules.activation.ReLU)
    assert isinstance(leakyrelu, nn.modules.activation.LeakyReLU)
Пример #3
0
def test_get_nonlinearity(nonlinearity):

    """

    Args:
        nonlinearity(@pytest.fixture): dict

    Asserts: True if right instance of activation function is returned.

    """

    relu = get_nonlinearity(nonlinearity['relu'])
    tanh = get_nonlinearity(nonlinearity['tanh'])
    leakyrelu = get_nonlinearity(nonlinearity['leakyrelu'])
    sigmoid = get_nonlinearity(nonlinearity['sigmoid'])

    assert callable(sigmoid)
    assert callable(tanh)
    assert isinstance(relu, nn.modules.activation.ReLU)
    assert isinstance(leakyrelu, nn.modules.activation.LeakyReLU)
Пример #4
0
    def finish_block(self, block, shape, bn=False, ln=False, do=False, act=None, pool=None):
        '''Finishes a block.

        Adds batch norm, dropout, activation, pooling.

        Args:
            block (nn.Sequential): Block to add conv layer to.
            shape (tuple): Shape of the input.
            bn (bool): Batch normalization.
            ln (bool): Layer normalization.
            do (float): Dropout.
            act (str): Activation.
            pool (tuple): Pooling. In format (pool type, kernel size, stride).

        Returns:

        '''
        if len(shape) == 1:
            BN = nn.BatchNorm1d
            DO = nn.Dropout
        elif len(shape) == 3:
            BN = nn.BatchNorm2d
            DO = nn.Dropout2d
        else:
            raise NotImplementedError('Shape {} not supported'.format(shape))
        LN = nn.LayerNorm

        if ln and bn:
            raise ValueError('Use only one sort of normalization.')

        dim_out = shape[-1]

        if do:
            block.add_module('do', DO(p=do))
        if bn:
            block.add_module('bn', BN(dim_out))
        if ln:
            block.add_module('ln', LN(dim_out))

        if act:
            nonlinearity = get_nonlinearity(act)
            block.add_module(nonlinearity.__class__.__name__, nonlinearity)

        if pool:
            if len(shape) == 1:
                raise ValueError('Cannot pool on 1d tensor.')
            (pool_type, kernel, stride) = pool
            Pool = getattr(nn, pool_type)
            block.add_module('pool', Pool(kernel_size=kernel, stride=stride))
            dim_x, dim_y, dim_out = shape
            dim_x, dim_y = self.next_conv_size(dim_x, dim_y, kernel, stride, 0)
            shape = (dim_x, dim_y, dim_out)

        return shape
Пример #5
0
    def create_linear_layers(self, dim_in, fc_args):
        '''

        Args:
            dim_in: Number of input units.
            fc_args: List of tuple of fully-connected arguments.

        Returns:
            nn.Sequential.

        '''

        fc_layers = nn.Sequential()
        fc_args = fc_args or []

        for i, (dim_out, batch_norm, dropout,
                nonlinearity) in enumerate(fc_args):
            name = '({}/{})_{}'.format(dim_in, dim_out, i + 1)
            fc_block = nn.Sequential()

            if dim_out is not None:
                fc_block.add_module(name + 'fc', nn.Linear(dim_in, dim_out))
            else:
                dim_out = dim_in

            if dropout:
                fc_block.add_module(name + 'do', nn.Dropout(p=dropout))
            if batch_norm:
                bn = nn.BatchNorm1d(dim_out)
                fc_block.add_module(name + 'bn', bn)

            if nonlinearity:
                nonlinearity = get_nonlinearity(nonlinearity)
                fc_block.add_module(nonlinearity.__class__.__name__,
                                    nonlinearity)

            fc_layers.add_module(name, fc_block)

            dim_in = dim_out

        return fc_layers, dim_in