Example #1
0
    def __init__(self, node_name, cname, params, qrec, gen_ctrl=None):
        if gen_ctrl is None:
            gen_ctrl = GenCtrl(None, cname=cname)
        else:
            gen_ctrl.cname = cname

        if params.hard_act:
            gen_ctrl.rnn_use_hardact = 1

        names = {val: idx for idx, val in enumerate(RNNParameters.INPUT_NAMES)}
        in_qs = qrec.in_qs
        if in_qs[names['i_2_i_w']].bits != in_qs[names['r_2_i_w']].bits:
            ValueError(f'bit width of gates differs in {params.name}')

        attrs = {
            'bias_size': in_qs[names['i_b']].dtype_bits // 8,
            'feat_size': -in_qs[0].dtype_bits // 8,
            'filter_bits': in_qs[names['i_2_i_w']].bits,
            'n_cells': params.n_cells,
            'k0': params.n_input_cells,
            'k1': params.n_output_cells,
            'dim_state': params.n_states,
            'dim_in': params.n_inputs,
            'always_reset': 0,
            'revert': 1 if params.revert else 0,
        }

        extra_attrs = {'cname': cname, 'node_name': node_name}
        super().__init__(attrs, extra_attrs, gen_ctrl=gen_ctrl)
    def __init__(self, cname, params, qrec, gen_ctrl=None):
        if gen_ctrl is None:
            gen_ctrl = GenCtrl(None, cname=cname)
        else:
            gen_ctrl.cname = cname

        if qrec.out_qs[0].is_floating:
            gen_ctrl.float_dump = 1

        attrs = {
            'n_frames': params.n_frames,
            'frame_size': params.frame_size,
            'frame_stride': params.frame_step,
            'n_fft': params.n_fft,
            'preemp_factor': params.preemp_factor,
            'skip_preemp': 0,
            'no_window': int(params.win_fn is None),
            'out_fft': 0,
            'magsquared': int(params.magsquared),
            'data_type': DSP_DTYPE[qrec.out_qs[0].dtype],
        }

        # other attributes
        extra_attrs = {'cname': cname, 'node_name': params.name}

        super().__init__(attrs, extra_attrs, gen_ctrl=gen_ctrl)
    def __init__(self,
                 node_name,
                 cname,
                 matadd_params,
                 act_params=None,
                 force_relu=True,
                 gen_ctrl=None):
        if gen_ctrl is None:
            gen_ctrl = GenCtrl(None, cname=cname)
        else:
            gen_ctrl.cname = cname

        if act_params is not None:
            at_act_params = gen_activation_op(act_params.activation,
                                              force_relu=force_relu)
        else:
            at_act_params = "KOP_NONE"

        dimensions = make_three_dims(matadd_params.in_dims[0])

        attrs = {
            'feat': dimensions[0],
            'width': dimensions[1],
            'height': dimensions[2],
            'act_oper': at_act_params
        }

        extra_attrs = {'cname': cname, 'node_name': node_name}
        super().__init__(attrs, extra_attrs, gen_ctrl=gen_ctrl)
    def __init__(self, node_name, cname, matrixadd_params, pad_params, act_params, at_ver=3, gen_ctrl=None, force_relu=True):
        if gen_ctrl is None:
            gen_ctrl = GenCtrl(None, cname=cname)
        else:
            gen_ctrl.cname = cname

        if act_params is not None:
            at_act_params = gen_activation_op(
                act_params.activation, force_relu=force_relu)
        else:
            at_act_params = NO_ACTIVATION

        padtop = pad_params.padding[0][0]
        padbot = pad_params.padding[0][1]
        padded_idx = 0 if matrixadd_params.in_dims[0].size(
        ) > matrixadd_params.in_dims[1].size() else 1
        dimensions0 = make_three_dims(matrixadd_params.in_dims[0])
        dimensions1 = make_three_dims(matrixadd_params.in_dims[1])

        attrs = {
            'feat': max(dimensions0[0], dimensions1[0]),
            'width': dimensions0[1],
            'height': dimensions0[2],
            'padded_idx': padded_idx,
            'padtop': padtop,
            'padbot': padbot,
            'act_oper': at_act_params
        }

        extra_attrs = {
            'cname': cname,
            'node_name': node_name
        }
        super().__init__(attrs, extra_attrs, gen_ctrl=gen_ctrl)
    def __init__(self,
                 node_name,
                 cname,
                 linear_params,
                 act_params,
                 linear_q,
                 act_q,
                 force_relu,
                 gen_ctrl=None):
        if gen_ctrl is None:
            gen_ctrl = GenCtrl(None, cname=cname)
        else:
            gen_ctrl.cname = cname

        in_dim = linear_params.in_dims[0]
        out_dim = linear_params.out_dims[0]
        in_qs = linear_q.in_qs
        mulbiases_q = linear_q.cache['mul_biases_q']

        if act_params is not None:
            at_act_params = gen_activation_op(
                act_params.activation,
                force_relu=force_relu,
                asymmetric=act_q.in_qs[0].zero_point != 0)
            if in_dim is None:
                in_dim = act_params.in_dims[0]
            if out_dim is None:
                out_dim = act_params.out_dims[0]
            out_qs = act_q.out_qs
        else:
            at_act_params = "KOP_NONE"
            out_qs = linear_q.out_qs

        attrs = {
            'in_size':
            in_qs[0].dtype_bits //
            8 if in_qs[0].signed else -in_qs[0].dtype_bits // 8,
            'out_size':
            out_qs[0].dtype_bits //
            8 if out_qs[0].signed else -out_qs[0].dtype_bits // 8,
            'bias_size':
            in_qs[2].dtype_bits // 8,
            'scale_size':
            mulbiases_q.dtype_bits // 8,
            'filter_bits':
            in_qs[1].bits,
            'in_feat':
            in_dim.size(),
            'out_feat':
            out_dim.size(),
            'act_op':
            at_act_params
        }

        extra_attrs = {'cname': cname, 'node_name': node_name}
        super().__init__(attrs, extra_attrs, gen_ctrl=gen_ctrl)
    def __init__(self,
                 node_name,
                 cname,
                 params,
                 linear_q,
                 act_params,
                 act_q,
                 at_ver=3,
                 gen_ctrl=None):
        if gen_ctrl is None:
            gen_ctrl = GenCtrl(None, cname=cname)
        else:
            gen_ctrl.cname = cname

        assert params is not None, "linear should always be included"
        in_size = params.in_dims[0].size()
        out_size = params.out_dims[0].size()
        filter_q = linear_q.in_qs[1]
        in_q = linear_q.in_qs[0]
        out_q = linear_q.out_qs[0]
        bias_q = linear_q.in_qs[2]

        if act_params is not None:
            act_op = gen_activation_op(act_params.activation)
            out_q = act_q.out_qs[0]
            if at_ver < 3:
                if act_params.activation == "relu6" and out_q.q != 0:
                    gen_ctrl.ReluN = 6 << out_q.q
                    gen_ctrl.ReluNNoNorm = 1
            else:
                if act_params.activation == "relun":
                    gen_ctrl.ReluN = act_params.activation_params
        else:
            act_op = "KOP_NONE"

        # attributes used to test equality - i.e. this kernel can be reused
        attrs = {
            'in_q': in_q,
            'filter_q': filter_q,
            'bias_q': bias_q,
            'out_q': out_q,
            'in_size': in_size,
            'out_size': out_size,
            'act_op': act_op
        }

        # other attributes
        extra_attrs = {'cname': cname, 'node_name': node_name}

        super().__init__(attrs, extra_attrs, gen_ctrl=gen_ctrl)
        self.at_ver = at_ver
Example #7
0
    def __init__(self, cname, params, qrec, gen_ctrl=None):
        if gen_ctrl is None:
            gen_ctrl = GenCtrl(None, cname=cname)
        else:
            gen_ctrl.cname = cname

        attrs = {
            'size': params.out_dims[0].size() * (qrec.out_qs[0].bits // 8)
        }

        # other attributes
        extra_attrs = {'cname': cname, 'node_name': params.name}

        super().__init__(attrs, extra_attrs, gen_ctrl=gen_ctrl)
    def __init__(self, cname, params, qrec, gen_ctrl=None):
        if gen_ctrl is None:
            gen_ctrl = GenCtrl(None, cname=cname)
        else:
            gen_ctrl.cname = cname

        if qrec.out_qs[0].is_floating:
            gen_ctrl.float_dump = 1

        if params.log_type is not None and params.log_offset:
            gen_ctrl.mfcc_log_offset = int(
                np.round(params.log_offset * 2**(30)))

        attrs = {
            'n_frames':
            params.n_frames,
            'frame_size':
            params.frame_size,
            'frame_stride':
            params.frame_step,
            'n_fft':
            params.n_fft,
            'n_melbanks':
            params.n_fbanks,
            'size_mel_coeff':
            params.get_melfilter_size()[0],
            'n_dct':
            params.n_dct,
            'preemp_factor':
            params.preemp_factor,
            'no_window':
            int(params.win_fn is None),
            'lift_coeff':
            0,
            'magsquared':
            int(params.magsquared),
            'data_type':
            DSP_DTYPE[qrec.out_qs[0].dtype],
            'log_type':
            0 if not params.log_type else
            (2 if params.log_type == "db" else 1),
            'out_fft':
            0,
        }

        # other attributes
        extra_attrs = {'cname': cname, 'node_name': params.name}

        super().__init__(attrs, extra_attrs, gen_ctrl=gen_ctrl)
Example #9
0
    def __init__(self, cname, params, qrec, gen_ctrl=None):
        if gen_ctrl is None:
            gen_ctrl = GenCtrl(None, cname=cname)
        else:
            gen_ctrl.cname = cname

        if qrec.out_qs[0].is_floating:
            gen_ctrl.float_dump = 1

        attrs = {
            'size': params.out_dims[0].size(),
            'feature_size': (qrec.out_qs[0].dtype_bits // 8)
        }

        # other attributes
        extra_attrs = {'cname': cname, 'node_name': params.name}

        super().__init__(attrs, extra_attrs, gen_ctrl=gen_ctrl)
Example #10
0
    def __init__(self, node_name, cname, params, aparams, qrec, gen_ctrl=None):
        if gen_ctrl is None:
            gen_ctrl = GenCtrl(None, cname=cname)
        else:
            gen_ctrl.cname = cname

        if isinstance(aparams,
                      ReluActivationParameters) and aparams.upper_bound:
            gen_ctrl.ReluN = aparams.upper_bound

        in_dims = params.in_dims
        matop = "KOP_MATMUL" if len(in_dims) > 2 else "KOP_MATMUL_NOBIAS"
        if isinstance(params, MatMulTransposedParameters):
            matop += "_TRANSPOSED"

        attrs = {
            'ColM1':
            in_dims[0][1],
            'LineM1':
            in_dims[0][0],
            'ColM2':
            in_dims[1][0] if isinstance(
                params, MatMulTransposedParameters) else in_dims[1][1],
            'LineM2':
            in_dims[1][1] if isinstance(params, MatMulTransposedParameters)
            else in_dims[1][0],
            'Width':
            0,
            'Height':
            0,
            'Scx':
            1,
            'Scy':
            1,
            'kop_matmul':
            matop,
            'actoper':
            gen_activation_op(aparams.activation if aparams else "none")
        }

        extra_attrs = {'cname': cname, 'node_name': node_name}
        super().__init__(attrs, extra_attrs, gen_ctrl=gen_ctrl)
Example #11
0
    def __init__(self, node_name, cname, params, gen_ctrl=None):
        if gen_ctrl is None:
            gen_ctrl = GenCtrl(None, cname=cname)
        else:
            gen_ctrl.cname = cname

        in_dims = params.in_dims
        out_dims = params.out_dims

        attrs = {
            'n_anchors': in_dims[0].shape[0],
            'n_classes': in_dims[1][1],
            'n_outboxes': out_dims[0][0],
            'max_bb_before_nms': params.max_bb_before_nms,
            'DecScoreThr': params.nms_score_threshold,
            'NMSThr': params.nms_iou_threshold
        }

        extra_attrs = {'cname': cname, 'node_name': node_name}
        super().__init__(attrs, extra_attrs, gen_ctrl=gen_ctrl)
    def __init__(self, node_name, cname, params, qrec, gen_ctrl=None):
        if gen_ctrl is None:
            gen_ctrl = GenCtrl(None, cname=cname)
        else:
            gen_ctrl.cname = cname

        if params.hard_act:
            gen_ctrl.rnn_use_hardact = 1
            gen_ctrl.gate_prenorm = qrec.cache['i_2_f_q'].pre_normalization

        names = {
            val: idx
            for idx, val in enumerate(LSTMParameters.INPUT_NAMES)
        }
        in_qs = qrec.in_qs

        w_bits = None
        for gate in ['f', 'i', 'c', 'o']:
            for inp_t in ['r', 'i']:
                if w_bits is None:
                    w_bits = in_qs[names[f'{inp_t}_2_{gate}_w']].bits
                elif w_bits != in_qs[names[f'{inp_t}_2_{gate}_w']].bits:
                    ValueError(f'bit width of gates differs in {params.name}')

        attrs = {
            'bias_size': in_qs[names['i_b']].dtype_bits // 8,
            'feat_size': -in_qs[0].dtype_bits // 8,
            'filter_bits': w_bits,
            'n_cells': params.n_cells,
            'k0': params.n_input_cells,
            'k1': params.n_output_cells,
            'dim_state': params.n_states,
            'dim_in': params.n_inputs,
            'always_reset': 0,
            'revert': 1 if params.revert else 0,
        }

        extra_attrs = {'cname': cname, 'node_name': node_name}
        super().__init__(attrs, extra_attrs, gen_ctrl=gen_ctrl)
    def __init__(self,
                 node_name,
                 cname,
                 matadd_params,
                 act_params=None,
                 add_q=None,
                 act_q=None,
                 force_relu=True,
                 gen_ctrl=None):
        if gen_ctrl is None:
            gen_ctrl = GenCtrl(None, cname=cname)
        else:
            gen_ctrl.cname = cname

        if not add_q.out_qs[0].signed:
            gen_ctrl.output_datasize = -add_q.out_qs[0].dtype_bits // 8
        if not add_q.in_qs[0].signed:
            gen_ctrl.input_datasize = -add_q.in_qs[0].dtype_bits // 8

        if act_params is not None:
            at_act_params = gen_active_at_params(
                act_params,
                force_relu=force_relu,
                asymmetric=act_q.in_qs[0].zero_point != 0)
        else:
            at_act_params = NO_ACTIVATION

        dimensions = make_three_dims(matadd_params.in_dims[0])

        attrs = {
            'feat': dimensions[0],
            'width': dimensions[1],
            'height': dimensions[2],
            'act_oper': at_act_params.ReLUOper
        }

        extra_attrs = {'cname': cname, 'node_name': node_name}
        super().__init__(attrs, extra_attrs, gen_ctrl=gen_ctrl)
    def __init__(self, node_name, cname, conv_params, conv_q,
                 pool_params, pool_q, act_params, act_q, at_ver=3, gen_ctrl=None):
        if gen_ctrl is None:
            gen_ctrl = GenCtrl(None, cname=cname)
        else:
            gen_ctrl.cname = cname

        in_q = filter_q = out_q = bias_q = mul_biases_q = None
        in_dim = out_dim = None
        pad_compatibilities = []
        if conv_params is not None:
            at_conv_params = gen_conv_at_params(
                conv_params, conv_q, pad_compatibilities)
            in_dim = conv_params.in_dims[0]
            out_dim = conv_params.out_dims[0]
            filter_q = conv_q.in_qs[1]
            in_q = conv_q.in_qs[0]
            out_q = conv_q.out_qs[0]
            bias_q = conv_q.in_qs[2]
            if conv_params.has_mul_bias:
                mul_biases_q = conv_q.mul_biases_q
        else:
            at_conv_params = NO_CONV

        if pool_params is not None:
            at_pool_params = gen_pool_at_params(
                pool_params, pad_compatibilities)
            if in_dim is None:
                in_dim = pool_params.in_dims[0]
            out_dim = pool_params.out_dims[0]
            if in_q is None:
                in_q = pool_q.in_qs[0]
            out_q = pool_q.out_qs[0]
        else:
            at_pool_params = NO_POOL

        if act_params is not None:
            at_act_params = gen_active_at_params(act_params)
            if in_dim is None:
                in_dim = act_params.in_dims[0]
            if out_dim is None:
                out_dim = act_params.out_dims[0]
            if in_q is None:
                in_q = act_q.in_qs[0]
            out_q = act_q.out_qs[0]
            if at_ver < 3:
                if act_params.activation == "relu6" and out_q.q != 0:
                    gen_ctrl.ReluN = 6 << out_q.q
                    gen_ctrl.ReluNNoNorm = 1
            else:
                if act_params.activation == "relun":
                    gen_ctrl.ReluN = act_params.activation_params

        else:
            at_act_params = NO_ACTIVATION

        if pad_compatibilities:
            reduction = PadDim.pad_compatibility_reduce(*pad_compatibilities,
                                                        "convolution padding is not compatible with pool padding")
            if not reduction[2]:  # default is balanced pad left
                at_pad_ctrl = next(i for i, v in enumerate(reduction) if v)
                LOG.debug("%s: generating pad control block", node_name)
                gen_ctrl.PadType = at_pad_ctrl

        attrs = {
            'in_qtype': in_q,
            'out_qtype': out_q,
            'filter_qtype': filter_q,
            'bias_qtype': bias_q,
            'mul_biases_qtype': mul_biases_q,
            'relu_oper': at_act_params.ReLUOper
        }

        if at_pool_params.PoolOper != 'KOP_NONE':
            attrs.update({
                'pool_oper': at_pool_params.PoolOper,
                'pool_w': at_pool_params.Fpx,
                'pool_h': at_pool_params.Fpy,
                'pool_d_w': at_pool_params.Dpx,
                'pool_d_h': at_pool_params.Dpy,
                'pool_s_w': at_pool_params.Spx,
                'pool_s_h': at_pool_params.Spy,
                'pool_pad': at_pool_params.PoolPad
            })
        else:
            attrs.update({
                'pool_oper': 'KOP_NONE',
                'pool_w': 0,
                'pool_h': 0,
                'pool_d_w': 0,
                'pool_d_h': 0,
                'pool_s_w': 0,
                'pool_s_h': 0,
                'pool_pad': 0
            })

        if at_conv_params == NO_CONV:
            if in_q.dtype_bits != out_q.dtype_bits:
                raise NotImplementedError(
                    "only homogenious operations are supported at present")
            LOG.debug("%s: pool relu inq %s outq %s control block",
                      node_name, in_q, out_q)
            if at_pool_params.PoolOper == 'KOP_NONE' and (not in_dim.is_named or not in_dim.has_keys(['c', 'w', 'h'])):
                in_shape = in_dim.shape + ([1] * (3 - len(in_dim.shape)))
                in_c, in_h, in_w = in_shape[0], in_shape[1], in_shape[2]
            else:
                in_c, in_h, in_w = in_dim.c, in_dim.h, in_dim.w
            if out_dim.is_named and out_dim.has_key('c'):
                out_c = out_dim.c
            else:
                out_c = in_c
            attrs.update({
                'in_c': in_c,
                'in_h': in_h,
                'in_w': in_w,
                'out_c': out_c,
                'conv_oper': 'KOP_NONE'
            })
            self.template = 'CALL_TEMPLATE_POOL_RELU'
        else:
            # swap w and h if w and filter w is 1 so generator sees 1D conv
            if in_dim.w == 1 and at_conv_params.Fcx == 1:
                attrs.update({
                    'in_c': in_dim.c,
                    'in_h': 1,
                    'in_w': in_dim.h,
                    'out_c': out_dim.c,
                    'conv_oper': at_conv_params.ConvOper,
                    'conv_w': at_conv_params.Fcy,
                    'conv_h': 1,
                    'conv_d_w': at_conv_params.Dcy,
                    'conv_d_h': at_conv_params.Dcx,
                    'conv_s_w': at_conv_params.Scy,
                    'conv_s_h': at_conv_params.Scx,
                    'conv_pad': at_conv_params.ConvPad

                })
            else:
                attrs.update({
                    'in_c': in_dim.c,
                    'in_h': in_dim.h,
                    'in_w': in_dim.w,
                    'out_c': out_dim.c,
                    'conv_oper': at_conv_params.ConvOper,
                    'conv_w': at_conv_params.Fcx,
                    'conv_h': at_conv_params.Fcy,
                    'conv_d_w': at_conv_params.Dcx,
                    'conv_d_h': at_conv_params.Dcy,
                    'conv_s_w': at_conv_params.Scx,
                    'conv_s_h': at_conv_params.Scy,
                    'conv_pad': at_conv_params.ConvPad
                })
            if isinstance(at_conv_params, ConvATParam):
                if mul_biases_q is not None:
                    LOG.debug("%s: mulconv pool relu inq %s outq %s control block",
                              node_name, in_q, out_q)
                    self.template = 'CALL_TEMPLATE_MULBIAS_CONV_POOL_RELU'
                else:
                    LOG.debug("%s: conv pool relu inq %s outq %s control block",
                              node_name, in_q, out_q)
                    self.template = 'CALL_TEMPLATE_CONV_POOL_RELU'
            elif isinstance(at_conv_params, GroupedConvATParam):
                attrs.update({
                    'group_in': at_conv_params.GroupIn,
                    'group_out': at_conv_params.GroupOut
                })
                if mul_biases_q is not None:
                    LOG.debug("%s: grouped conv pool relu inq %s outq %s control block",
                              node_name, in_q, out_q)
                    self.template = 'CALL_TEMPLATE_GROUPED_MULBIAS_CONV_POOL_RELU'
                else:
                    LOG.debug("%s: grouped mulconv pool relu inq %s outq %s control block",
                              node_name, in_q, out_q)
                    self.template = 'CALL_TEMPLATE_GROUPED_CONV_POOL_RELU'
            else:
                raise ValueError('Internal error')

        # other attributes
        extra_attrs = {
            'cname': cname,
            'node_name': node_name
        }

        super().__init__(attrs, extra_attrs, gen_ctrl=gen_ctrl)
Example #15
0
    def __init__(self, cname, params, matmul_params, matmul_qrec, act_params, act_qrec, gen_ctrl=None, out_qtype=None):
        if gen_ctrl is None:
            gen_ctrl = GenCtrl(None, cname=cname)
        else:
            gen_ctrl.cname = cname

        if len(params.in_dims[0]) != 2 or len(params.in_dims[1]) != 2:
            raise ValueError(f'Matmul {params.name} has inputs of rank {len(params.in_dims[0])} and {len(params.in_dims[1])}'
                             f'which are not supported by the matmul kernel')
        in1_shape = params.in_dims[0].shape
        in2_shape = params.in_dims[1].shape
        height_2 = in2_shape[0]
        width_2 = in2_shape[1]
        out_shape = params.out_dims[0].shape

        in1_qtype = matmul_qrec.in_qs[0]
        in2_qtype = matmul_qrec.in_qs[1]
        if len(matmul_params.in_dims) == 3:
            bias_bits = at_bits(matmul_qrec.in_qs[2])
            bias_q = matmul_qrec.in_qs[2].q
            matmul_op = 'KOP_MATMUL'
        else:
            bias_q = 0
            bias_bits = 0
            matmul_op = 'KOP_MATMUL_NOBIAS'

        if isinstance(matmul_params, MatMulTransposedParameters):
            matmul_op += '_TRANSPOSED'
            height_2 = in2_shape[1]
            width_2 = in2_shape[0]

        if act_params is not None:
            act_op = gen_activation_op(act_params.activation)
            out_qtype = act_qrec.out_qs[0]
            relu_lower = 0
            if act_params.activation == "relu6" and out_qtype.q != 0:
                relu_upper = 6 << out_qtype.q
            else:
                relu_upper = 0
        else:
            out_qtype = matmul_qrec.out_qs[0]
            relu_upper = relu_lower = 0
            act_op = "KOP_NONE"

        # attributes used to test equality - i.e. this kernel can be reused
        attrs = {
            'in1_qtype': in1_qtype,
            'in2_qtype': in2_qtype,
            'bias_q': bias_q,
            'bias_bits': bias_bits,
            'out_qtype': out_qtype,
            'in1_shape': in1_shape,
            'height_2': height_2,
            'width_2': width_2,
            'out_shape': out_shape,
            'relu_lower': relu_lower,
            'relu_upper': relu_upper,
            'mult_op': matmul_op,
            'act_op': act_op
        }

        # other attributes
        extra_attrs = {
            'cname': cname,
            'node_name': params.name
        }

        super().__init__(attrs, extra_attrs, gen_ctrl=gen_ctrl)