def __init__(self, w_in, w_out, stride, params): super(BottleneckTransform, self).__init__() w_b = int(round(w_out * params["bot_mul"])) w_se = int(round(w_in * params["se_r"])) w_se1 = int(round(w_in * params["se1_r"])) groups = w_b // params["group_w"] self.a = conv2d(w_in, w_b, 1) self.a_bn = norm2d(w_b) self.a_af = activation() self.b = conv2d(w_b, w_b, 3, stride=stride, groups=groups) self.b_bn = norm2d(w_b) self.b_af = activation() self.se = SE(w_b, w_se) if w_se else None self.c_se = C_SE(w_b, w_se) if params['c_se'] else None self.w_se = W_SE(w_b, w_se1, params["w_se_add"]) if params['w_se'] else None if params['ew_se']: if params['block_idx'] in params['w_se_idx']: self.se = SE(w_b, w_se1) else: self.ew_se = EW_SE(w_b, w_se1) self.w1_se = W1_SE(w_b, w_se1) if params['w1_se'] else None self.w13_se = W13_SE(w_b, w_se1) if params['w13_se'] else None self.se_gap = SE_GAP(w_b, w_se1) if params['se_gap'] else None self.se_gap1 = SE_GAP1(w_b, w_se1) if params['se_gap1'] else None self.se_gap_dw = SE_GAP_DW(w_b) if params['se_gap_dw'] else None self.c = conv2d(w_b, w_out, 1) self.c_bn = norm2d(w_out) self.c_bn.final_bn = True
def __init__(self, w_in, w_out, stride, _params): super(BasicTransform, self).__init__() self.a = conv2d(w_in, w_out, 3, stride=stride) self.a_bn = norm2d(w_out) self.a_af = activation() self.b = conv2d(w_out, w_out, 3) self.b_bn = norm2d(w_out) self.b_bn.final_bn = True
def __init__(self, w_in, w_out, stride, _params): super(VanillaBlock, self).__init__() self.a = conv2d(w_in, w_out, 3, stride=stride) self.a_bn = norm2d(w_out) self.a_af = activation() self.b = conv2d(w_out, w_out, 3) self.b_bn = norm2d(w_out) self.b_af = activation()
def __init__(self, w_in, w_out, stride, w_b=None, groups=1): err_str = "Basic transform does not support w_b and groups options" assert w_b is None and groups == 1, err_str super(BasicTransform, self).__init__() self.a = conv2d(w_in, w_out, 3, stride=stride) self.a_bn = norm2d(w_out) self.a_af = activation() self.b = conv2d(w_out, w_out, 3) self.b_bn = norm2d(w_out) self.b_bn.final_bn = True
def __init__(self, w_in, w_out, stride, w_b, groups): super(BottleneckTransform, self).__init__() # MSRA -> stride=2 is on 1x1; TH/C2 -> stride=2 is on 3x3 (s1, s3) = (stride, 1) if cfg.RESNET.STRIDE_1X1 else (1, stride) self.a = conv2d(w_in, w_b, 1, stride=s1) self.a_bn = norm2d(w_b) self.a_af = activation() self.b = conv2d(w_b, w_b, 3, stride=s3, groups=groups) self.b_bn = norm2d(w_b) self.b_af = activation() self.c = conv2d(w_b, w_out, 1) self.c_bn = norm2d(w_out) self.c_bn.final_bn = True
def __init__(self, w_in, w_out, stride, params): super(BottleneckTransform, self).__init__() w_b = int(round(w_out * params["bot_mul"])) w_se = int(round(w_in * params["se_r"])) groups = w_b // params["group_w"] self.a = conv2d(w_in, w_b, 1) self.a_bn = norm2d(w_b) self.a_af = activation() self.b = conv2d(w_b, w_b, 3, stride=stride, groups=groups) self.b_bn = norm2d(w_b) self.b_af = activation() self.se = SE(w_b, w_se) if w_se else None self.c = conv2d(w_b, w_out, 1) self.c_bn = norm2d(w_out) self.c_bn.final_bn = True
def __init__(self, w_in, exp_r, k, stride, se_r, w_out): # Expansion, kxk dwise, BN, AF, SE, 1x1, BN, skip_connection super(MBConv, self).__init__() self.exp = None w_exp = int(w_in * exp_r) if w_exp != w_in: self.exp = conv2d(w_in, w_exp, 1) self.exp_bn = norm2d(w_exp) self.exp_af = activation() self.dwise = conv2d(w_exp, w_exp, k, stride=stride, groups=w_exp) self.dwise_bn = norm2d(w_exp) self.dwise_af = activation() self.se = SE(w_exp, int(w_in * se_r)) self.lin_proj = conv2d(w_exp, w_out, 1) self.lin_proj_bn = norm2d(w_out) self.has_skip = stride == 1 and w_in == w_out
def __init__(self, w_in, w_out, stride, params): super(ResBottleneckBlock, self).__init__() self.proj, self.bn = None, None if (w_in != w_out) or (stride != 1): self.proj = conv2d(w_in, w_out, 1, stride=stride) self.bn = norm2d(w_out) self.f = BottleneckTransform(w_in, w_out, stride, params) self.af = activation()
def __init__(self, w_in, w_out, stride, trans_fun, w_b=None, groups=1): super(ResBlock, self).__init__() self.proj, self.bn = None, None if (w_in != w_out) or (stride != 1): self.proj = conv2d(w_in, w_out, 1, stride=stride) self.bn = norm2d(w_out) self.f = trans_fun(w_in, w_out, stride, w_b, groups) self.af = activation()
def __init__(self, w_in, w_out, num_classes): super(EffHead, self).__init__() dropout_ratio = cfg.EN.DROPOUT_RATIO self.conv = conv2d(w_in, w_out, 1) self.conv_bn = norm2d(w_out) self.conv_af = activation() self.avg_pool = gap2d(w_out) self.dropout = Dropout(p=dropout_ratio) if dropout_ratio > 0 else None self.fc = linear(w_out, num_classes, bias=True)
def __init__(self, w_in, head_width, num_classes): super(AnyHead, self).__init__() self.head_width = head_width if head_width > 0: self.conv = conv2d(w_in, head_width, 1) self.bn = norm2d(head_width) self.af = activation() w_in = head_width self.avg_pool = gap2d(w_in) self.fc = linear(w_in, num_classes, bias=True)
def __init__(self, w_in, ks, ws, ss): super(ViTStemConv, self).__init__() for i, (k, w_out, stride) in enumerate(zip(ks, ws, ss)): if i < len(ks) - 1: self.add_module(f"cstem{i}_conv", conv2d(w_in, w_out, 3, stride=stride)) self.add_module(f"cstem{i}_bn", norm2d(w_out)) self.add_module(f"cstem{i}_af", activation("relu")) else: m = conv2d(w_in, w_out, k, stride=stride, bias=True) self.add_module("cstem_last_conv", m) w_in = w_out
def __init__(self, w_in, w_out): super(StemIN, self).__init__() self.conv = conv2d(w_in, w_out, 3, stride=2) self.bn = norm2d(w_out) self.af = activation()
def __init__(self, w_in, w_out): super(ResStem, self).__init__() self.conv = conv2d(w_in, w_out, 7, stride=2) self.bn = norm2d(w_out) self.af = activation() self.pool = pool2d(w_out, 3, stride=2)
def __init__(self, w_in, w_out): super(ResStemCifar, self).__init__() self.conv = conv2d(w_in, w_out, 3) self.bn = norm2d(w_out) self.af = activation()