def call(self, x): """Forward function. :param x: input tensor :type x: tensor :return: the output of block :rtype: tensor """ if self.InChan != self.G: x_InC = self.InConv(x) x_inter = self.Convs[0](x_InC) x_conc = ops.concat((x_InC, x_inter)) x_in = self.ShrinkConv[0](x_conc) else: x_InC = None x_inter = self.Convs[0](x) x_conc = ops.concat((x, x_inter)) x_in = self.ShrinkConv[0](x_conc) for i in range(1, self.C): x_inter = self.Convs[i](x_in) x_conc = ops.concat((x_conc, x_inter)) x_in = self.ShrinkConv[i](x_conc) if self.OutChan == self.InChan: x_return = x + x_in elif self.OutChan == self.G: x_return = x_InC + x_in else: x_return = self.OutConv(x) + x_in return x_return
def call(self, s0, s1, weights=None, drop_path_prob=0): """Forward function of Cell. :param s0: feature map of previous of previous cell :type s0: torch tensor :param s1: feature map of previous cell :type s1: torch tensor :param weights: weights of operations in cell :type weights: torch tensor, 2 dimension :return: cell output :rtype: torch tensor """ s0 = self.preprocess0(s0) s1 = self.preprocess1(s1) states = [s0, s1] idx = 0 for i in range(self.steps): hlist = [] for j, inp in enumerate(self.out_inp_list[i]): op = list(self.op_list.children())[idx + j] if weights is None: h = op(states[inp]) else: h = op(states[inp], weights[idx + j]) if drop_path_prob > 0. and not isinstance( list(op.children())[0], ops.Identity): h = ops.drop_path(h, drop_path_prob) hlist.append(h) s = sum(hlist) states.append(s) idx += len(self.out_inp_list[i]) states_list = tuple([states[i] for i in self._concat]) return ops.concat(states_list)
def call(self, x, **kwargs): """call.""" x1, x2 = x[self.num1], x[self.num2] if self.conv is not None: x1 = self.conv(x1) x[self.num2] = ops.concat([x1, x2], 1) return x
def call(self, inputs): """Override compile function, connect models into a seq.""" models = list(self.children()) if not models: return None outputs = [] for idx, model in enumerate(models): outputs.append(model(inputs)) return ops.concat(outputs)
def call(self, inputs): """Calculate the output of the model. :param x: input tensor :return: output tensor of the model """ out = () x = inputs for block in self.blocks: x = block(x) out += (x, ) return ops.concat(out)
def call(self, inputs): """Calculate the output of the model. :param x: input tensor :return: output tensor of the model """ out = [] x = inputs for block in self.layers.children(): x = block(x) out.append(x) return ops.concat(tuple(out))
def call(self, s0, s1, weights=None, drop_path_prob=0, selected_idxs=None): """Forward function of Cell. :param s0: feature map of previous of previous cell :type s0: torch tensor :param s1: feature map of previous cell :type s1: torch tensor :param weights: weights of operations in cell :type weights: torch tensor, 2 dimension :return: cell output :rtype: torch tensor """ s0 = self.preprocess0(s0) s1 = self.preprocess1(s1) states = [s0, s1] idx = 0 for i in range(self.steps): hlist = [] for j, inp in enumerate(self.out_inp_list[i]): op = self.oplist[idx + j] if selected_idxs is None: if weights is None: h = op(states[inp]) else: h = op(states[inp], weights[idx + j]) if drop_path_prob > 0. and not isinstance( list(op.children())[0], ops.Identity): h = ops.drop_path(h, drop_path_prob) hlist.append(h) elif selected_idxs[idx + j] == -1: # undecided mix edges h = op(states[inp], weights[idx + j]) hlist.append(h) elif selected_idxs[idx + j] == 0: # zero operation continue else: h = self.oplist[idx + j](states[inp], None, selected_idxs[idx + j]) hlist.append(h) # s = sum(hlist) s = hlist[0] for ii in range(1, len(hlist)): s += hlist[ii] states.append(s) idx += len(self.out_inp_list[i]) states_list = () for i in self._concat: states_list += (states[i], ) # states_list = tuple([states[i] for i in self._concat]) return ops.concat(states_list)
def call(self, x): """Forward function. :param x: input tensor :type x: tensor :return: the output of block :rtype: tensor """ if self.data_format == "channels_first": out = self.conv(channel_shuffle(x, groups=self.shgroup)) else: x = ops.Permute([0, 3, 1, 2])(x) out = self.conv(channel_shuffle(x, groups=self.shgroup)) x = ops.Permute([0, 2, 3, 1])(x) out = ops.Permute([0, 2, 3, 1])(out) return ops.concat((x, out))
def call(self, x): """Forward x.""" out = x[self.collect_inds[0]] for i in range(1, len(self.collect_inds)): collect = x[self.collect_inds[i]] if ops.get_shape(out)[2] > ops.get_shape(collect)[2]: # upsample collect collect = ops.interpolate(collect, size=ops.get_shape( out)[2:], mode='bilinear', align_corners=True) elif ops.get_shape(collect)[2] > ops.get_shape(out)[2]: out = ops.interpolate(out, size=ops.get_shape(collect)[2:], mode='bilinear', align_corners=True) if self.agg_concat: out = ops.concat([out, collect]) else: out += collect out = ops.Relu()(out) return out
def call(self, inputs): """Calculate the output of the model. :param x: input tensor :type x: tensor :return: output tensor of the model :rtype: tensor """ x = self.SFENet2(inputs) ERDBs_out = () for net in self.ERBD: x = net(x) ERDBs_out += (x, ) x = self.GFF(ops.concat(ERDBs_out)) x += inputs return x
def call(self, x1, x2): """Do an inference on AggregateCell. :param x1: first input :param x2: second input :return: output """ if self.pre_transform: x1 = self.branch_1(x1) x2 = self.branch_2(x2) if tuple(ops.get_shape(x1)[2:]) > tuple(ops.get_shape(x2)[2:]): x2 = ops.interpolate(x2, size=ops.get_shape( x1)[2:], mode='bilinear', align_corners=True) elif tuple(ops.get_shape(x1)[2:]) < tuple(ops.get_shape(x2)[2:]): x1 = ops.interpolate(x1, size=ops.get_shape( x2)[2:], mode='bilinear', align_corners=True) if self.concat: return self.conv1x1(ops.concat([x1, x2])) else: return x1 + x2
def call(self, x): """Do an inference on FactorizedReduce.""" x = self.relu(x) out = ops.concat(tuple([self.conv_1(x), self.conv_2(x[:, :, 1:, 1:])])) out = self.bn(out) return out