def __init__(self, node_name, cname, matrixadd_params, pad_params, act_params, at_ver=3, gen_ctrl=None, force_relu=True): if gen_ctrl is None: self.gen_ctrl = gen_ctrl = GenCtrl(None, cname=cname) else: gen_ctrl.cname = cname self.gen_ctrl = gen_ctrl self.cname = cname self.node_name = node_name self.at_ver = at_ver if act_params is not None: self.at_act_params = gen_activation_op(act_params.activation, force_relu=force_relu) else: self.at_act_params = NO_ACTIVATION self.padtop = pad_params.padding[0][0] self.padbot = pad_params.padding[0][1] self.padded_idx = 0 if matrixadd_params.in_dims[0].size( ) > matrixadd_params.in_dims[1].size() else 1 self.matrixadd_params = matrixadd_params dimensions0 = make_three_dims(matrixadd_params.in_dims[0]) dimensions1 = make_three_dims(matrixadd_params.in_dims[1]) self.feat_dim = max(dimensions0[0], dimensions1[0]) self.width = dimensions0[1] self.height = dimensions0[2]
def __init__(self, node_name, cname, matmul_params, act_params, at_ver=3, gen_ctrl=None, force_relu=True): if gen_ctrl is None: self.gen_ctrl = gen_ctrl = GenCtrl(None, cname=cname) else: gen_ctrl.cname = cname self.gen_ctrl = gen_ctrl self.cname = cname self.node_name = node_name self.at_ver = at_ver if act_params is not None: self.at_act_params = gen_activation_op(act_params.activation, force_relu=force_relu) else: self.at_act_params = NO_ACTIVATION self.matmul_params = matmul_params self.lineM1 = matmul_params.in_dims[0][0] self.colM1 = matmul_params.in_dims[0][1] self.lineM2 = matmul_params.in_dims[1][0] self.colM2 = matmul_params.in_dims[1][1]
def __init__(self, node_name, cname, matrixadd_params, pad_params, act_params, at_ver=3, gen_ctrl=None, force_relu=True): if gen_ctrl is None: gen_ctrl = GenCtrl(None, cname=cname) else: gen_ctrl.cname = cname if act_params is not None: at_act_params = gen_activation_op( act_params.activation, force_relu=force_relu) else: at_act_params = NO_ACTIVATION padtop = pad_params.padding[0][0] padbot = pad_params.padding[0][1] padded_idx = 0 if matrixadd_params.in_dims[0].size( ) > matrixadd_params.in_dims[1].size() else 1 dimensions0 = make_three_dims(matrixadd_params.in_dims[0]) dimensions1 = make_three_dims(matrixadd_params.in_dims[1]) attrs = { 'feat': max(dimensions0[0], dimensions1[0]), 'width': dimensions0[1], 'height': dimensions0[2], 'padded_idx': padded_idx, 'padtop': padtop, 'padbot': padbot, 'act_oper': at_act_params } extra_attrs = { 'cname': cname, 'node_name': node_name } super().__init__(attrs, extra_attrs, gen_ctrl=gen_ctrl)
def __init__(self, node_name, cname, matadd_params, act_params=None, force_relu=True, gen_ctrl=None): if gen_ctrl is None: gen_ctrl = GenCtrl(None, cname=cname) else: gen_ctrl.cname = cname if act_params is not None: at_act_params = gen_activation_op(act_params.activation, force_relu=force_relu) else: at_act_params = "KOP_NONE" dimensions = make_three_dims(matadd_params.in_dims[0]) attrs = { 'feat': dimensions[0], 'width': dimensions[1], 'height': dimensions[2], 'act_oper': at_act_params } extra_attrs = {'cname': cname, 'node_name': node_name} super().__init__(attrs, extra_attrs, gen_ctrl=gen_ctrl)
def __init__(self, node_name, cname, pool_params, pool_q, act_params, act_q, force_relu, gen_ctrl=None): if gen_ctrl is None: self.gen_ctrl = gen_ctrl = GenCtrl(None, cname=cname) else: gen_ctrl.cname = cname self.gen_ctrl = gen_ctrl at_pool_params = gen_globalpool_at_params(pool_params) in_dim = pool_params.in_dims[0] out_dim = pool_params.out_dims[0] in_q = pool_q.in_qs[0] out_q = pool_q.out_qs[0] reduce_sz = reduce(lambda x, y: x * y, (sz for idx, sz in enumerate(in_dim.shape) if idx not in pool_params.axis), 1) #self.c = in_dim.size()/reduce_sz feat = reduce_sz height, width = balanced_divisors(in_dim.size() / reduce_sz) if act_params is not None: act_op = gen_activation_op( act_params.activation, force_relu=force_relu, asymmetric=act_q.in_qs[0].zero_point != 0) if out_dim is None: out_dim = act_params.out_dims[0].expand_to_chw() out_q = act_q.out_qs[0] else: act_op = "KOP_NONE" attrs = { 'in_size': in_q.dtype_bits // 8 if in_q.signed else -in_q.dtype_bits // 8, 'out_size': out_q.dtype_bits // 8 if out_q.signed else -out_q.dtype_bits // 8, 'feat': feat, 'width': width, 'height': height, 'kop_pool': at_pool_params.GlobalPoolOper, 'kop_act': act_op } extra_attrs = {'cname': cname, 'node_name': node_name} super().__init__(attrs, extra_attrs, gen_ctrl=gen_ctrl)
def __init__(self, node_name, cname, linear_params, act_params, linear_q, act_q, force_relu, gen_ctrl=None): if gen_ctrl is None: gen_ctrl = GenCtrl(None, cname=cname) else: gen_ctrl.cname = cname in_dim = linear_params.in_dims[0] out_dim = linear_params.out_dims[0] in_qs = linear_q.in_qs mulbiases_q = linear_q.cache['mul_biases_q'] if act_params is not None: at_act_params = gen_activation_op( act_params.activation, force_relu=force_relu, asymmetric=act_q.in_qs[0].zero_point != 0) if in_dim is None: in_dim = act_params.in_dims[0] if out_dim is None: out_dim = act_params.out_dims[0] out_qs = act_q.out_qs else: at_act_params = "KOP_NONE" out_qs = linear_q.out_qs attrs = { 'in_size': in_qs[0].dtype_bits // 8 if in_qs[0].signed else -in_qs[0].dtype_bits // 8, 'out_size': out_qs[0].dtype_bits // 8 if out_qs[0].signed else -out_qs[0].dtype_bits // 8, 'bias_size': in_qs[2].dtype_bits // 8, 'scale_size': mulbiases_q.dtype_bits // 8, 'filter_bits': in_qs[1].bits, 'in_feat': in_dim.size(), 'out_feat': out_dim.size(), 'act_op': at_act_params } extra_attrs = {'cname': cname, 'node_name': node_name} super().__init__(attrs, extra_attrs, gen_ctrl=gen_ctrl)
def __init__(self, node_name, cname, matmul_params, matmul_qrec, act_params, gen_ctrl=None, force_relu=True): if gen_ctrl is None: self.gen_ctrl = gen_ctrl = GenCtrl(None, cname=cname) else: gen_ctrl.cname = cname self.gen_ctrl = gen_ctrl if act_params is not None: act_op = gen_activation_op( act_params.activation, force_relu=force_relu) else: act_op = 'KOP_NONE' height_1 = matmul_params.in_dims[0][0] width_1 = matmul_params.in_dims[0][1] height_2 = matmul_params.in_dims[1][0] width_2 = matmul_params.in_dims[1][1] if len(matmul_params.in_dims) == 3: bias_datasize = at_bits(matmul_qrec.in_qs[2]) matmul_op = 'KOP_MATMUL' else: bias_datasize = 0 matmul_op = 'KOP_MATMUL_NOBIAS' if len(matmul_qrec.in_qs[1].scale) == 1: matmul_op += '_SCALE_SCALAR' if isinstance(matmul_params, MatMulTransposedParameters): matmul_op += '_TRANSPOSED' height_2 = matmul_params.in_dims[1][1] width_2 = matmul_params.in_dims[1][0] # attributes affecting generation attrs = { 'height_1': height_1, 'width_1': width_1, 'height_2': height_2, 'width_2': width_2, 'bias_datasize': bias_datasize, 'matmul_op': matmul_op, 'act_op': act_op } # other attributes extra_attrs = { 'cname': cname, 'node_name': node_name } super().__init__(attrs, extra_attrs, gen_ctrl=gen_ctrl)
def __init__(self, node_name, cname, matmul_params, matmul_qrec, act_params, gen_ctrl=None, force_relu=True): if gen_ctrl is None: self.gen_ctrl = gen_ctrl = GenCtrl(None, cname=cname) else: gen_ctrl.cname = cname self.gen_ctrl = gen_ctrl if act_params is not None: act_op = gen_activation_op( act_params.activation, force_relu=force_relu) else: act_op = 'KOP_NONE' height_1 = matmul_params.in_dims[0][0] width_1 = matmul_params.in_dims[0][1] height_2 = matmul_params.in_dims[1][1] width_2 = matmul_params.in_dims[1][0] bias_datasize = at_bits(matmul_qrec.in_qs[2]) in1_datasize = at_bits(matmul_qrec.in_qs[0]) in2_datasize_bits = matmul_qrec.in_qs[1].bits out_datasize = at_bits(matmul_qrec.out_qs[0]) matmul_op = 'KOP_MATMUL_TRANSPOSED' # attributes affecting generation attrs = { 'height_1': height_1, 'width_1': width_1, 'height_2': height_2, 'width_2': width_2, 'bias_datasize': bias_datasize, 'in1_datasize': in1_datasize, 'in2_datasize_bits': in2_datasize_bits, 'out_datasize': out_datasize, 'matmul_op': matmul_op, 'act_op': act_op } # other attributes extra_attrs = { 'cname': cname, 'node_name': node_name } super().__init__(attrs, extra_attrs, gen_ctrl=gen_ctrl)
def __init__(self, node_name, cname, params, aparams, qrec, gen_ctrl=None): if gen_ctrl is None: gen_ctrl = GenCtrl(None, cname=cname) else: gen_ctrl.cname = cname if isinstance(aparams, ReluActivationParameters) and aparams.upper_bound: gen_ctrl.ReluN = aparams.upper_bound in_dims = params.in_dims matop = "KOP_MATMUL" if len(in_dims) > 2 else "KOP_MATMUL_NOBIAS" if isinstance(params, MatMulTransposedParameters): matop += "_TRANSPOSED" attrs = { 'ColM1': in_dims[0][1], 'LineM1': in_dims[0][0], 'ColM2': in_dims[1][0] if isinstance( params, MatMulTransposedParameters) else in_dims[1][1], 'LineM2': in_dims[1][1] if isinstance(params, MatMulTransposedParameters) else in_dims[1][0], 'Width': 0, 'Height': 0, 'Scx': 1, 'Scy': 1, 'kop_matmul': matop, 'actoper': gen_activation_op(aparams.activation if aparams else "none") } extra_attrs = {'cname': cname, 'node_name': node_name} super().__init__(attrs, extra_attrs, gen_ctrl=gen_ctrl)
def __init__(self, node_name, cname, pool_params, pool_q, act_params, act_q, force_relu, gen_ctrl=None): if gen_ctrl is None: self.gen_ctrl = gen_ctrl = GenCtrl(None, cname=cname) else: gen_ctrl.cname = cname self.gen_ctrl = gen_ctrl if pool_params.ker_in_order and pool_params.ker_in_order[0] == [ "h", "w", "c" ]: hwc = True gen_ctrl.hwc = 1 pad_compatibilities = [] at_pool_params = gen_pool_at_params(pool_params, pad_compatibilities) in_dim = pool_params.in_dims[0] out_dim = pool_params.out_dims[0] in_q = pool_q.in_qs[0] out_q = pool_q.out_qs[0] if act_params is not None: act_op = gen_activation_op( act_params.activation, force_relu=force_relu, asymmetric=act_q.in_qs[0].zero_point != 0) if out_dim is None: out_dim = act_params.out_dims[0].expand_to_chw() out_q = act_q.out_qs[0] else: act_op = "KOP_NONE" if pad_compatibilities: reduction = PadDim.pad_compatibility_reduce( *pad_compatibilities, "convolution padding is not compatible with pool padding") if not reduction[2]: # default is balanced pad left at_pad_ctrl = next(i for i, v in enumerate(reduction) if v) LOG.debug("%s: generating pad control block", node_name) self.gen_ctrl.PadType = at_pad_ctrl attrs = { 'in_size': in_q.dtype_bits // 8 if in_q.signed else -in_q.dtype_bits // 8, 'out_size': out_q.dtype_bits // 8 if out_q.signed else -out_q.dtype_bits // 8, 'feat': in_dim.c, 'width': in_dim.w, 'height': in_dim.h, 'kop_pool': at_pool_params.PoolOper, 'fpx': at_pool_params.Fpx, 'fpy': at_pool_params.Fpy, 'dpx': at_pool_params.Dpx, 'dpy': at_pool_params.Dpy, 'spx': at_pool_params.Spx, 'spy': at_pool_params.Spy, 'pool_pad': at_pool_params.PoolPad, 'kop_act': act_op } extra_attrs = {'cname': cname, 'node_name': node_name} super().__init__(attrs, extra_attrs, gen_ctrl=gen_ctrl)
def __init__(self, node_name, cname, conv_params, conv_q, pool_params, pool_q, act_params, act_q, force_relu, gen_ctrl=None): if gen_ctrl is None: self.gen_ctrl = gen_ctrl = GenCtrl(None, cname=cname) else: gen_ctrl.cname = cname self.gen_ctrl = gen_ctrl is_ne16 = conv_q.cache.get('ne16') hwc = False if not is_ne16 and conv_params.ker_in_order and conv_params.ker_in_order[0] == ["h", "w", "c"]: hwc = True gen_ctrl.hwc = 1 if not is_ne16 and not hwc and conv_params.filter.h == 1 and conv_params.filter.w == 1 and gen_ctrl.enableim2col is None: gen_ctrl.enableim2col = 1 in_q = filter_q = out_q = bias_q = None in_dim = out_dim = None pad_compatibilities = [] at_conv_params = gen_conv_at_params( conv_params, pad_compatibilities) in_dim = conv_params.in_dims[0] out_dim = conv_params.out_dims[0] filter_q = conv_q.in_qs[1] in_q = conv_q.in_qs[0] out_q = conv_q.out_qs[0] bias_q = conv_q.in_qs[2] pad_val = in_q.zero_point[0] if pool_params is not None: at_pool_params = gen_pool_at_params( pool_params, pad_compatibilities) out_dim = pool_params.out_dims[0] out_q = pool_q.out_qs[0] else: at_pool_params = NO_POOL if act_params is not None: act_op = gen_activation_op( act_params.activation, force_relu=force_relu, asymmetric=act_q.in_qs[0].zero_point != 0) if out_dim is None: out_dim = act_params.out_dims[0].expand_to_chw() out_q = act_q.out_qs[0] else: act_op = "KOP_NONE" if pad_compatibilities: reduction = PadDim.pad_compatibility_reduce(*pad_compatibilities, "convolution padding is not compatible with pool padding") if not reduction[2]: # default is balanced pad left at_pad_ctrl = next(i for i, v in enumerate(reduction) if v) LOG.debug("%s: generating pad control block", node_name) self.gen_ctrl.PadType = at_pad_ctrl attrs = { 'in_size': in_q.dtype_bits//8 if in_q.signed else -in_q.dtype_bits//8, 'out_size': out_q.dtype_bits//8 if out_q.signed else -out_q.dtype_bits//8, 'bias_size': bias_q.dtype_bits//8, 'filter_bits': filter_q.bits, 'in_feat': in_dim.c, 'out_feat': out_dim.c, 'in_width': in_dim.w, 'in_height': in_dim.h, 'kop_conv': at_conv_params.ConvOper, 'fcx': at_conv_params.Fcx, 'fcy': at_conv_params.Fcy, 'dcx': at_conv_params.Dcx, 'dcy': at_conv_params.Dcy, 'scx': at_conv_params.Scx, 'scy': at_conv_params.Scy, 'conv_pad': at_conv_params.ConvPad, 'pad_value': pad_val, 'kop_pool': at_pool_params.PoolOper, 'fpx': at_pool_params.Fpx, 'fpy': at_pool_params.Fpy, 'dpx': at_pool_params.Dpx, 'dpy': at_pool_params.Dpy, 'spx': at_pool_params.Spx, 'spy': at_pool_params.Spy, 'pool_pad': at_pool_params.PoolPad, 'kop_act': act_op } extra_attrs = { 'cname': cname, 'node_name': node_name } super().__init__(attrs, extra_attrs, gen_ctrl=gen_ctrl)