def __init__(self,
                 node_name,
                 cname,
                 params,
                 linear_q,
                 act_params,
                 act_q,
                 at_ver=3,
                 gen_ctrl=None):
        if gen_ctrl is None:
            gen_ctrl = GenCtrl(None, cname=cname)
        else:
            gen_ctrl.cname = cname

        assert params is not None, "linear should always be included"
        in_size = params.in_dims[0].size()
        out_size = params.out_dims[0].size()
        filter_q = linear_q.in_qs[1]
        in_q = linear_q.in_qs[0]
        out_q = linear_q.out_qs[0]
        bias_q = linear_q.in_qs[2]

        if act_params is not None:
            act_op = gen_activation_op(act_params.activation)
            out_q = act_q.out_qs[0]
            if at_ver < 3:
                if act_params.activation == "relu6" and out_q.q != 0:
                    gen_ctrl.ReluN = 6 << out_q.q
                    gen_ctrl.ReluNNoNorm = 1
            else:
                if act_params.activation == "relun":
                    gen_ctrl.ReluN = act_params.activation_params
        else:
            act_op = "KOP_NONE"

        # attributes used to test equality - i.e. this kernel can be reused
        attrs = {
            'in_q': in_q,
            'filter_q': filter_q,
            'bias_q': bias_q,
            'out_q': out_q,
            'in_size': in_size,
            'out_size': out_size,
            'act_op': act_op
        }

        # other attributes
        extra_attrs = {'cname': cname, 'node_name': node_name}

        super().__init__(attrs, extra_attrs, gen_ctrl=gen_ctrl)
        self.at_ver = at_ver
Beispiel #2
0
    def __init__(self, node_name, cname, params, aparams, qrec, gen_ctrl=None):
        if gen_ctrl is None:
            gen_ctrl = GenCtrl(None, cname=cname)
        else:
            gen_ctrl.cname = cname

        if isinstance(aparams,
                      ReluActivationParameters) and aparams.upper_bound:
            gen_ctrl.ReluN = aparams.upper_bound

        in_dims = params.in_dims
        matop = "KOP_MATMUL" if len(in_dims) > 2 else "KOP_MATMUL_NOBIAS"
        if isinstance(params, MatMulTransposedParameters):
            matop += "_TRANSPOSED"

        attrs = {
            'ColM1':
            in_dims[0][1],
            'LineM1':
            in_dims[0][0],
            'ColM2':
            in_dims[1][0] if isinstance(
                params, MatMulTransposedParameters) else in_dims[1][1],
            'LineM2':
            in_dims[1][1] if isinstance(params, MatMulTransposedParameters)
            else in_dims[1][0],
            'Width':
            0,
            'Height':
            0,
            'Scx':
            1,
            'Scy':
            1,
            'kop_matmul':
            matop,
            'actoper':
            gen_activation_op(aparams.activation if aparams else "none")
        }

        extra_attrs = {'cname': cname, 'node_name': node_name}
        super().__init__(attrs, extra_attrs, gen_ctrl=gen_ctrl)
    def __init__(self, node_name, cname, conv_params, conv_q,
                 pool_params, pool_q, act_params, act_q, at_ver=3, gen_ctrl=None):
        if gen_ctrl is None:
            gen_ctrl = GenCtrl(None, cname=cname)
        else:
            gen_ctrl.cname = cname

        in_q = filter_q = out_q = bias_q = mul_biases_q = None
        in_dim = out_dim = None
        pad_compatibilities = []
        if conv_params is not None:
            at_conv_params = gen_conv_at_params(
                conv_params, conv_q, pad_compatibilities)
            in_dim = conv_params.in_dims[0]
            out_dim = conv_params.out_dims[0]
            filter_q = conv_q.in_qs[1]
            in_q = conv_q.in_qs[0]
            out_q = conv_q.out_qs[0]
            bias_q = conv_q.in_qs[2]
            if conv_params.has_mul_bias:
                mul_biases_q = conv_q.mul_biases_q
        else:
            at_conv_params = NO_CONV

        if pool_params is not None:
            at_pool_params = gen_pool_at_params(
                pool_params, pad_compatibilities)
            if in_dim is None:
                in_dim = pool_params.in_dims[0]
            out_dim = pool_params.out_dims[0]
            if in_q is None:
                in_q = pool_q.in_qs[0]
            out_q = pool_q.out_qs[0]
        else:
            at_pool_params = NO_POOL

        if act_params is not None:
            at_act_params = gen_active_at_params(act_params)
            if in_dim is None:
                in_dim = act_params.in_dims[0]
            if out_dim is None:
                out_dim = act_params.out_dims[0]
            if in_q is None:
                in_q = act_q.in_qs[0]
            out_q = act_q.out_qs[0]
            if at_ver < 3:
                if act_params.activation == "relu6" and out_q.q != 0:
                    gen_ctrl.ReluN = 6 << out_q.q
                    gen_ctrl.ReluNNoNorm = 1
            else:
                if act_params.activation == "relun":
                    gen_ctrl.ReluN = act_params.activation_params

        else:
            at_act_params = NO_ACTIVATION

        if pad_compatibilities:
            reduction = PadDim.pad_compatibility_reduce(*pad_compatibilities,
                                                        "convolution padding is not compatible with pool padding")
            if not reduction[2]:  # default is balanced pad left
                at_pad_ctrl = next(i for i, v in enumerate(reduction) if v)
                LOG.debug("%s: generating pad control block", node_name)
                gen_ctrl.PadType = at_pad_ctrl

        attrs = {
            'in_qtype': in_q,
            'out_qtype': out_q,
            'filter_qtype': filter_q,
            'bias_qtype': bias_q,
            'mul_biases_qtype': mul_biases_q,
            'relu_oper': at_act_params.ReLUOper
        }

        if at_pool_params.PoolOper != 'KOP_NONE':
            attrs.update({
                'pool_oper': at_pool_params.PoolOper,
                'pool_w': at_pool_params.Fpx,
                'pool_h': at_pool_params.Fpy,
                'pool_d_w': at_pool_params.Dpx,
                'pool_d_h': at_pool_params.Dpy,
                'pool_s_w': at_pool_params.Spx,
                'pool_s_h': at_pool_params.Spy,
                'pool_pad': at_pool_params.PoolPad
            })
        else:
            attrs.update({
                'pool_oper': 'KOP_NONE',
                'pool_w': 0,
                'pool_h': 0,
                'pool_d_w': 0,
                'pool_d_h': 0,
                'pool_s_w': 0,
                'pool_s_h': 0,
                'pool_pad': 0
            })

        if at_conv_params == NO_CONV:
            if in_q.dtype_bits != out_q.dtype_bits:
                raise NotImplementedError(
                    "only homogenious operations are supported at present")
            LOG.debug("%s: pool relu inq %s outq %s control block",
                      node_name, in_q, out_q)
            if at_pool_params.PoolOper == 'KOP_NONE' and (not in_dim.is_named or not in_dim.has_keys(['c', 'w', 'h'])):
                in_shape = in_dim.shape + ([1] * (3 - len(in_dim.shape)))
                in_c, in_h, in_w = in_shape[0], in_shape[1], in_shape[2]
            else:
                in_c, in_h, in_w = in_dim.c, in_dim.h, in_dim.w
            if out_dim.is_named and out_dim.has_key('c'):
                out_c = out_dim.c
            else:
                out_c = in_c
            attrs.update({
                'in_c': in_c,
                'in_h': in_h,
                'in_w': in_w,
                'out_c': out_c,
                'conv_oper': 'KOP_NONE'
            })
            self.template = 'CALL_TEMPLATE_POOL_RELU'
        else:
            # swap w and h if w and filter w is 1 so generator sees 1D conv
            if in_dim.w == 1 and at_conv_params.Fcx == 1:
                attrs.update({
                    'in_c': in_dim.c,
                    'in_h': 1,
                    'in_w': in_dim.h,
                    'out_c': out_dim.c,
                    'conv_oper': at_conv_params.ConvOper,
                    'conv_w': at_conv_params.Fcy,
                    'conv_h': 1,
                    'conv_d_w': at_conv_params.Dcy,
                    'conv_d_h': at_conv_params.Dcx,
                    'conv_s_w': at_conv_params.Scy,
                    'conv_s_h': at_conv_params.Scx,
                    'conv_pad': at_conv_params.ConvPad

                })
            else:
                attrs.update({
                    'in_c': in_dim.c,
                    'in_h': in_dim.h,
                    'in_w': in_dim.w,
                    'out_c': out_dim.c,
                    'conv_oper': at_conv_params.ConvOper,
                    'conv_w': at_conv_params.Fcx,
                    'conv_h': at_conv_params.Fcy,
                    'conv_d_w': at_conv_params.Dcx,
                    'conv_d_h': at_conv_params.Dcy,
                    'conv_s_w': at_conv_params.Scx,
                    'conv_s_h': at_conv_params.Scy,
                    'conv_pad': at_conv_params.ConvPad
                })
            if isinstance(at_conv_params, ConvATParam):
                if mul_biases_q is not None:
                    LOG.debug("%s: mulconv pool relu inq %s outq %s control block",
                              node_name, in_q, out_q)
                    self.template = 'CALL_TEMPLATE_MULBIAS_CONV_POOL_RELU'
                else:
                    LOG.debug("%s: conv pool relu inq %s outq %s control block",
                              node_name, in_q, out_q)
                    self.template = 'CALL_TEMPLATE_CONV_POOL_RELU'
            elif isinstance(at_conv_params, GroupedConvATParam):
                attrs.update({
                    'group_in': at_conv_params.GroupIn,
                    'group_out': at_conv_params.GroupOut
                })
                if mul_biases_q is not None:
                    LOG.debug("%s: grouped conv pool relu inq %s outq %s control block",
                              node_name, in_q, out_q)
                    self.template = 'CALL_TEMPLATE_GROUPED_MULBIAS_CONV_POOL_RELU'
                else:
                    LOG.debug("%s: grouped mulconv pool relu inq %s outq %s control block",
                              node_name, in_q, out_q)
                    self.template = 'CALL_TEMPLATE_GROUPED_CONV_POOL_RELU'
            else:
                raise ValueError('Internal error')

        # other attributes
        extra_attrs = {
            'cname': cname,
            'node_name': node_name
        }

        super().__init__(attrs, extra_attrs, gen_ctrl=gen_ctrl)