def forward(self, inputs, drop_connect_rate=None): """ :param inputs: input tensor :param drop_connect_rate: drop connect rate (float, between 0 and 1) :return: output of block """ # Expansion and Depthwise Convolution x = inputs if self._block_args.expand_ratio != 1: x = self._swish(self._bn0(self._expand_conv(inputs))) x = self._swish(self._bn1(self._depthwise_conv(x))) # Squeeze and Excitation if self.has_se: x_squeezed = F.adaptive_avg_pool2d(x, 1) x_squeezed = self._se_expand( self._swish(self._se_reduce(x_squeezed))) x = torch.sigmoid(x_squeezed) * x x = self._bn2(self._project_conv(x)) # Skip connection and drop connect input_filters, output_filters = self._block_args.input_filters, self._block_args.output_filters if self.id_skip and self._block_args.stride == 1 and input_filters == output_filters: if drop_connect_rate: x = drop_connect(x, p=drop_connect_rate, training=self.training) x = x + inputs # skip connection return x
def forward(self, x): residual = x # Point-wise expansion x = self.conv_pw(x) x = self.bn1(x) x = self.act_fn(x, inplace=True) # Depth-wise convolution x = self.conv_dw(x) x = self.bn2(x) x = self.act_fn(x, inplace=True) # Squeeze-and-excitation x = self.se(x) # Point-wise linear projection x = self.conv_pwl(x) x = self.bn3(x) if self.has_residual: if self.drop_connect_rate > 0.: x = drop_connect(x, self.training, self.drop_connect_rate) x += residual return x
def forward(self, x): residual = x x = self.conv_dw(x) x = self.bn1(x) x = self.act_fn(x, inplace=True) x = self.se(x) x = self.conv_pw(x) x = self.bn2(x) if self.has_residual: if self.drop_connect_rate > 0.: x = drop_connect(x, self.training, self.drop_connect_rate) x += residual return x
def call(self, inputs, training=True, drop_connect_rate=None): """Implementation of call(). Args: inputs: the inputs tensor. training: boolean, whether the model is constructed for training. drop_connect_rate: float, between 0 to 1, drop connect rate. Returns: A output tensor. """ tf.logging.info('Block input: %s shape: %s' % (inputs.name, inputs.shape)) if self._block_args.expand_ratio != 1: x = self._relu_fn( self._bn0(self._expand_conv(inputs), training=training)) else: x = inputs tf.logging.info('Expand: %s shape: %s' % (x.name, x.shape)) x = self._relu_fn(self._bn1(self._depthwise_conv(x), training=training)) tf.logging.info('DWConv: %s shape: %s' % (x.name, x.shape)) if self._has_se: with tf.variable_scope('se'): x = self._call_se(x) self.endpoints = {'expansion_output': x} x = self._bn2(self._project_conv(x), training=training) if self._block_args.id_skip: if all( s == 1 for s in self._block_args.strides ) and self._block_args.input_filters == self._block_args.output_filters: # only apply drop_connect if skip presents. if drop_connect_rate: x = utils.drop_connect(x, training, drop_connect_rate) x = tf.add(x, inputs) tf.logging.info('Project: %s shape: %s' % (x.name, x.shape)) return x