Esempio n. 1
0
    def forward(self, input, expand_ratio=None, channel=None):
        self.cur_config = {'expand_ratio': expand_ratio, 'channel': channel}
        ### weight: (Cin, Cout)
        in_nc = int(input.shape[-1])
        assert (
            expand_ratio == None or channel == None
        ), "expand_ratio and channel CANNOT be NOT None at the same time."
        if expand_ratio != None:
            out_nc = int(expand_ratio * self.base_output_dim)
        elif channel != None:
            out_nc = int(channel)
        else:
            out_nc = self.output_dim

        weight = self.weight[:in_nc, :out_nc]
        if self._bias_attr != False:
            bias = self.bias[:out_nc]
            use_bias = True

        pre_bias = _varbase_creator(dtype=input.dtype)
        core.ops.matmul(input, weight, pre_bias, 'transpose_X', False,
                        'transpose_Y', False, "alpha", 1)
        if self._bias_attr != False:
            pre_act = dygraph_utils._append_bias_in_dygraph(
                pre_bias, bias, axis=len(input.shape) - 1)
        else:
            pre_act = pre_bias

        return dygraph_utils._append_activation_in_dygraph(pre_act, self._act)
    def forward(self, input, config):
        in_nc = int(input.shape[1])
        out_nc = int(config['channel'])
        weight = self.weight[:in_nc, :out_nc, :, :]
        if in_dygraph_mode():
            op = getattr(core.ops, self._op_type)
            out = op(input, weight, 'output_size', self._output_size,
                     'strides', self._stride, 'paddings', self._padding,
                     'dilations', self._dilation, 'groups', self._groups,
                     'use_cudnn', self._use_cudnn)
            pre_bias = out
            if self.bias is not None:
                bias = self.bias[:out_nc]
                pre_act = dygraph_utils._append_bias_in_dygraph(
                    pre_bias, bias, 1)
            else:
                pre_act = pre_bias

            return dygraph_utils._append_activation_in_dygraph(pre_act,
                                                               act=self._act)

        check_variable_and_dtype(input, 'input',
                                 ['float16', 'float32', 'float64'],
                                 "SuperConv2DTranspose")

        inputs = {'Input': [input], 'Filter': [weight]}
        attrs = {
            'output_size': self._output_size,
            'strides': self._stride,
            'paddings': self._padding,
            'dilations': self._dilation,
            'groups': self._groups,
            'use_cudnn': self._use_cudnn
        }

        pre_bias = self._helper.create_variable_for_type_inference(
            dtype=input.dtype)
        self._helper.append_op(type=self._op_type,
                               inputs=inputs,
                               outputs={'Output': pre_bias},
                               attrs=attrs)

        if self.bias is not None:
            pre_act = self._helper.create_variable_for_type_inference(
                dtype=self._dtype)
            self._helper.append_op(type='elementwise_add',
                                   inputs={
                                       'X': [pre_bias],
                                       'Y': [bias]
                                   },
                                   outputs={'Out': [pre_act]},
                                   attrs={'axis': 1})
        else:
            pre_act = pre_bias

        out = self._helper.append_activation(pre_act, act=self._act)
        return out
Esempio n. 3
0
    def forward(self,
                input,
                kernel_size=None,
                expand_ratio=None,
                channel=None):
        self.cur_config = {
            'kernel_size': kernel_size,
            'expand_ratio': expand_ratio,
            'channel': channel
        }
        in_nc = int(input.shape[1])
        assert (
            expand_ratio == None or channel == None
        ), "expand_ratio and channel CANNOT be NOT None at the same time."
        if expand_ratio != None:
            out_nc = int(expand_ratio * self.base_channel)
        elif channel != None:
            out_nc = int(channel)
        else:
            out_nc = self._num_filters
        ks = int(
            self._filter_size[0]) if kernel_size == None else int(kernel_size)

        groups, weight_in_nc, weight_out_nc = self.get_groups_in_out_nc(
            in_nc, out_nc)

        weight = self.get_active_filter(weight_in_nc, weight_out_nc, ks)

        if kernel_size != None or 'kernel_size' in self.candidate_config.keys(
        ):
            padding = convert_to_list(get_same_padding(ks), 2)
        else:
            padding = self._padding

        if self._l_type == 'conv2d':
            attrs = ('strides', self._stride, 'paddings', padding, 'dilations',
                     self._dilation, 'groups', groups if groups else 1,
                     'use_cudnn', self._use_cudnn)
            out = core.ops.conv2d(input, weight, *attrs)
        elif self._l_type == 'depthwise_conv2d':
            attrs = ('strides', self._stride, 'paddings', padding, 'dilations',
                     self._dilation, 'groups',
                     groups if groups else self._groups, 'use_cudnn',
                     self._use_cudnn)
            out = core.ops.depthwise_conv2d(input, weight, *attrs)
        else:
            raise ValueError("conv type error")

        pre_bias = out
        out_nc = int(pre_bias.shape[1])
        if self.bias is not None:
            bias = self.bias[:out_nc]
            pre_act = dygraph_utils._append_bias_in_dygraph(pre_bias, bias, 1)
        else:
            pre_act = pre_bias

        return dygraph_utils._append_activation_in_dygraph(pre_act, self._act)
Esempio n. 4
0
    def forward(self,
                input,
                kernel_size=None,
                expand_ratio=None,
                channel=None):
        if not in_dygraph_mode():
            _logger.error("NOT support static graph")

        self.cur_config = {
            'kernel_size': kernel_size,
            'expand_ratio': expand_ratio,
            'channel': channel
        }
        in_nc = int(input.shape[1])
        assert (
            expand_ratio == None or channel == None
        ), "expand_ratio and channel CANNOT be NOT None at the same time."
        if expand_ratio != None:
            out_nc = int(expand_ratio * self.base_channel)
        elif channel != None:
            out_nc = int(channel)
        else:
            out_nc = self._num_filters

        ks = int(
            self._filter_size[0]) if kernel_size == None else int(kernel_size)

        groups, weight_in_nc, weight_out_nc = self.get_groups_in_out_nc(
            in_nc, out_nc)

        weight = self.get_active_filter(weight_in_nc, weight_out_nc, ks)
        if kernel_size != None or 'kernel_size' in self.candidate_config.keys(
        ):
            padding = convert_to_list(get_same_padding(ks), 2)
        else:
            padding = self._padding

        op = getattr(core.ops, self._op_type)
        out = op(input, weight, 'output_size', self._output_size, 'strides',
                 self._stride, 'paddings', padding, 'dilations',
                 self._dilation, 'groups', groups, 'use_cudnn',
                 self._use_cudnn)
        pre_bias = out
        out_nc = int(pre_bias.shape[1])
        if self.bias is not None:
            bias = self.bias[:out_nc]
            pre_act = dygraph_utils._append_bias_in_dygraph(pre_bias, bias, 1)
        else:
            pre_act = pre_bias

        return dygraph_utils._append_activation_in_dygraph(pre_act,
                                                           act=self._act)
Esempio n. 5
0
    def forward(self, input):
        quant_input = self._fake_quant_input(input)
        quant_weight = self._fake_quant_weight(self.weight)

        if in_dygraph_mode() and self._l_type == 'conv2d':
            attrs = ('strides', self._stride, 'paddings', self._padding,
                     'dilations', self._dilation, 'groups',
                     self._groups if self._groups else 1, 'use_cudnn',
                     self._use_cudnn)
            pre_bias = core.ops.conv2d(quant_input, quant_weight, *attrs)

            pre_act = dygraph_utils._append_bias_in_dygraph(
                pre_bias, self.bias, 1)
            return dygraph_utils._append_activation_in_dygraph(
                pre_act, self._act)
        check_variable_and_dtype(quant_input, 'input',
                                 ['float16', 'float32', 'float64'],
                                 'QuantizedConv2D')
        attrs = {
            'strides': self._stride,
            'paddings': self._padding,
            'dilations': self._dilation,
            'groups': self._groups if self._groups else 1,
            'use_cudnn': self._use_cudnn,
            'use_mkldnn': False,
        }
        pre_bias = self._helper.create_variable_for_type_inference(
            dtype=self._dtype)

        self._helper.append_op(type=self._l_type,
                               inputs={
                                   'Input': quant_input,
                                   'Filter': quant_weight,
                               },
                               outputs={"Output": pre_bias},
                               attrs=attrs)

        if self.bias is not None:
            pre_act = self._helper.create_variable_for_type_inference(
                dtype=self._dtype)
            self._helper.append_op(type='elementwise_add',
                                   inputs={
                                       'X': [pre_bias],
                                       'Y': [self.bias]
                                   },
                                   outputs={'Out': [pre_act]},
                                   attrs={'axis': 1})
        else:
            pre_act = pre_bias

        return self._helper.append_activation(pre_act, act=self._act)
Esempio n. 6
0
    def forward(self, input):
        quant_input = self._fake_quant_input(input)
        quant_weight = self._fake_quant_weight(self.weight)
        if in_dygraph_mode():
            pre_bias = _varbase_creator(dtype=input.dtype)
            core.ops.matmul(quant_input, quant_weight, pre_bias, 'transpose_X',
                            False, 'transpose_Y', False, "alpha", 1)
            pre_act = dygraph_utils._append_bias_in_dygraph(
                pre_bias, self.bias, axis=len(input.shape) - 1)

            return dygraph_utils._append_activation_in_dygraph(
                pre_act, self._act)

        check_variable_and_dtype(input, 'input',
                                 ['float16', 'float32', 'float64'],
                                 "QuantizedLinear")
        attrs = {
            "transpose_X": False,
            "transpose_Y": False,
            "alpha": 1,
        }
        inputs = {"X": [quant_input], "Y": [quant_weight]}
        mul_out = self._helper.create_variable_for_type_inference(self._dtype)

        self._helper.append_op(type="matmul",
                               inputs=inputs,
                               outputs={"Out": [mul_out]},
                               attrs=attrs)
        if self.bias is not None:
            pre_activation = self._helper.create_variable_for_type_inference(
                dtype=self._dtype)
            self._helper.append_op(type='elementwise_add',
                                   inputs={
                                       'X': [mul_out],
                                       'Y': [self.bias]
                                   },
                                   outputs={'Out': [pre_activation]},
                                   attrs={'axis': len(input.shape) - 1})
        else:
            pre_activation = mul_out
        return self._helper.append_activation(pre_activation, act=self._act)
Esempio n. 7
0
    def forward(self, input, expand_ratio=None, channel=None):
        self.cur_config = {'expand_ratio': expand_ratio, 'channel': channel}
        in_nc = int(input.shape[1])
        assert (
            expand_ratio == None or channel == None
        ), "expand_ratio and channel CANNOT be NOT None at the same time."
        if expand_ratio != None:
            out_nc = int(expand_ratio * self.base_output_dim)
        elif channel != None:
            out_nc = int(channel)
        else:
            out_nc = self.conv[0]._num_filters

        weight = self.conv[0].weight[:in_nc]
        ###  conv1
        if self.conv[0]._l_type == 'conv2d':
            attrs = ('strides', self.conv[0]._stride, 'paddings',
                     self.conv[0]._padding, 'dilations', self.conv[0]._dilation,
                     'groups', in_nc, 'use_cudnn', self.conv[0]._use_cudnn)
            out = core.ops.conv2d(input, weight, *attrs)
        elif self.conv[0]._l_type == 'depthwise_conv2d':
            attrs = ('strides', self.conv[0]._stride, 'paddings',
                     self.conv[0]._padding, 'dilations', self.conv[0]._dilation,
                     'groups', in_nc, 'use_cudnn', self.conv[0]._use_cudnn)
            out = core.ops.depthwise_conv2d(input, weight, *attrs)
        else:
            raise ValueError("conv type error")

        pre_bias = out
        if self.conv[0].bias is not None:
            bias = self.conv[0].bias[:in_nc]
            pre_act = dygraph_utils._append_bias_in_dygraph(pre_bias, bias, 1)
        else:
            pre_act = pre_bias

        conv0_out = dygraph_utils._append_activation_in_dygraph(
            pre_act, self.conv[0]._act)

        norm_out = self.conv[1](conv0_out)

        weight = self.conv[2].weight[:out_nc, :in_nc, :, :]

        if self.conv[2]._l_type == 'conv2d':
            attrs = ('strides', self.conv[2]._stride, 'paddings',
                     self.conv[2]._padding, 'dilations', self.conv[2]._dilation,
                     'groups', self.conv[2]._groups if self.conv[2]._groups else
                     1, 'use_cudnn', self.conv[2]._use_cudnn)
            out = core.ops.conv2d(norm_out, weight, *attrs)
        elif self.conv[2]._l_type == 'depthwise_conv2d':
            attrs = ('strides', self.conv[2]._stride, 'paddings',
                     self.conv[2]._padding, 'dilations', self.conv[2]._dilation,
                     'groups', self.conv[2]._groups, 'use_cudnn',
                     self.conv[2]._use_cudnn)
            out = core.ops.depthwise_conv2d(norm_out, weight, *attrs)
        else:
            raise ValueError("conv type error")

        pre_bias = out
        if self.conv[2].bias is not None:
            bias = self.conv[2].bias[:out_nc]
            pre_act = dygraph_utils._append_bias_in_dygraph(pre_bias, bias, 1)
        else:
            pre_act = pre_bias

        conv1_out = dygraph_utils._append_activation_in_dygraph(
            pre_act, self.conv[2]._act)

        return conv1_out
    def forward(self, input, config):
        in_nc = int(input.shape[1])
        out_nc = config['channel']
        weight = self.weight[:out_nc, :in_nc, :, :]
        #print('super conv shape', weight.shape)
        if in_dygraph_mode():
            if self._l_type == 'conv2d':
                attrs = ('strides', self._stride, 'paddings', self._padding,
                         'dilations', self._dilation, 'groups',
                         self._groups if self._groups else 1, 'use_cudnn',
                         self._use_cudnn)
                out = core.ops.conv2d(input, weight, *attrs)
            elif self._l_type == 'depthwise_conv2d':
                attrs = ('strides', self._stride, 'paddings', self._padding,
                         'dilations', self._dilation, 'groups', self._groups,
                         'use_cudnn', self._use_cudnn)
                out = core.ops.depthwise_conv2d(input, weight, *attrs)
            else:
                raise ValueError("conv type error")

            pre_bias = out
            if self.bias is not None:
                bias = self.bias[:out_nc]
                pre_act = dygraph_utils._append_bias_in_dygraph(
                    pre_bias, bias, 1)
            else:
                pre_act = pre_bias

            return dygraph_utils._append_activation_in_dygraph(
                pre_act, self._act)

        inputs = {'Input': [input], 'Filter': [weight]}
        attrs = {
            'strides': self._stride,
            'paddings': self._padding,
            'dilations': self._dilation,
            'groups': self._groups if self._groups else 1,
            'use_cudnn': self._use_cudnn,
            'use_mkldnn': False,
        }
        check_variable_and_dtype(input, 'input',
                                 ['float16', 'float32', 'float64'],
                                 'SuperConv2D')
        pre_bias = self._helper.create_variable_for_type_inference(
            dtype=self._dtype)

        self._helper.append_op(type=self._l_type,
                               inputs={
                                   'Input': input,
                                   'Filter': weight,
                               },
                               outputs={"Output": pre_bias},
                               attrs=attrs)

        if self.bias is not None:
            bias = self.bias[:out_nc]
            pre_act = self._helper.create_variable_for_type_inference(
                dtype=self._dtype)
            self._helper.append_op(type='elementwise_add',
                                   inputs={
                                       'X': [pre_bias],
                                       'Y': [bias]
                                   },
                                   outputs={'Out': [pre_act]},
                                   attrs={'axis': 1})
        else:
            pre_act = pre_bias

        # Currently, we don't support inplace in dygraph mode
        return self._helper.append_activation(pre_act, act=self._act)
    def forward(self, input, config):
        in_nc = int(input.shape[1])
        out_nc = int(config['channel'])
        weight = self.conv[0].weight[:in_nc]
        ###  conv1
        if in_dygraph_mode():
            if self.conv[0]._l_type == 'conv2d':
                attrs = ('strides', self.conv[0]._stride, 'paddings',
                         self.conv[0]._padding, 'dilations',
                         self.conv[0]._dilation, 'groups', in_nc, 'use_cudnn',
                         self.conv[0]._use_cudnn)
                out = core.ops.conv2d(input, weight, *attrs)
            elif self.conv[0]._l_type == 'depthwise_conv2d':
                attrs = ('strides', self.conv[0]._stride, 'paddings',
                         self.conv[0]._padding, 'dilations',
                         self.conv[0]._dilation, 'groups', in_nc, 'use_cudnn',
                         self.conv[0]._use_cudnn)
                out = core.ops.depthwise_conv2d(input, weight, *attrs)
            else:
                raise ValueError("conv type error")

            pre_bias = out
            if self.conv[0].bias is not None:
                bias = self.conv[0].bias[:in_nc]
                pre_act = dygraph_utils._append_bias_in_dygraph(
                    pre_bias, bias, 1)
            else:
                pre_act = pre_bias

            conv0_out = dygraph_utils._append_activation_in_dygraph(
                pre_act, self.conv[0]._act)

        norm_out = self.conv[1](conv0_out)

        weight = self.conv[2].weight[:out_nc, :in_nc, :, :]

        if in_dygraph_mode():
            if self.conv[2]._l_type == 'conv2d':
                attrs = ('strides', self.conv[2]._stride, 'paddings',
                         self.conv[2]._padding, 'dilations',
                         self.conv[2]._dilation, 'groups',
                         self.conv[2]._groups if self.conv[2]._groups else 1,
                         'use_cudnn', self.conv[2]._use_cudnn)
                out = core.ops.conv2d(norm_out, weight, *attrs)
            elif self.conv[2]._l_type == 'depthwise_conv2d':
                attrs = ('strides', self.conv[2]._stride, 'paddings',
                         self.conv[2]._padding, 'dilations',
                         self.conv[2]._dilation, 'groups',
                         self.conv[2]._groups, 'use_cudnn',
                         self.conv[2]._use_cudnn)
                out = core.ops.depthwise_conv2d(norm_out, weight, *attrs)
            else:
                raise ValueError("conv type error")

            pre_bias = out
            if self.conv[2].bias is not None:
                bias = self.conv[2].bias[:out_nc]
                pre_act = dygraph_utils._append_bias_in_dygraph(
                    pre_bias, bias, 1)
            else:
                pre_act = pre_bias

            conv1_out = dygraph_utils._append_activation_in_dygraph(
                pre_act, self.conv[2]._act)
        return conv1_out