Ejemplo n.º 1
0
def test_paddim_compat():
    red1 = PadDim.pad_compatibility_reduce([True, True, False, True], [True, True, False, True])
    assert red1 == [True, True, False, True]
    red1 = PadDim.pad_compatibility_reduce([True, True, False, True], [True, True, False, False])
    assert red1 == [True, True, False, False]
    dim1 = PadDim(1)
    dim2 = PadDim(1, 2, 1, 2)
    compat1 = dim1.pad_compatibility
    assert compat1 == [False, False, True, True]    
    compat2 = dim2.pad_compatibility
    assert compat2 == [False, False, False, True]    
    red2 = PadDim.pad_compatibility_reduce(compat1, compat2)
    assert red2 == [False, False, False, True]    
    def __init__(self, node_name, cname, conv_params, conv_q,
                 pool_params, pool_q, act_params, act_q, at_ver=3, gen_ctrl=None):
        if gen_ctrl is None:
            gen_ctrl = GenCtrl(None, cname=cname)
        else:
            gen_ctrl.cname = cname

        in_q = filter_q = out_q = bias_q = mul_biases_q = None
        in_dim = out_dim = None
        pad_compatibilities = []
        if conv_params is not None:
            at_conv_params = gen_conv_at_params(
                conv_params, conv_q, pad_compatibilities)
            in_dim = conv_params.in_dims[0]
            out_dim = conv_params.out_dims[0]
            filter_q = conv_q.in_qs[1]
            in_q = conv_q.in_qs[0]
            out_q = conv_q.out_qs[0]
            bias_q = conv_q.in_qs[2]
            if conv_params.has_mul_bias:
                mul_biases_q = conv_q.mul_biases_q
        else:
            at_conv_params = NO_CONV

        if pool_params is not None:
            at_pool_params = gen_pool_at_params(
                pool_params, pad_compatibilities)
            if in_dim is None:
                in_dim = pool_params.in_dims[0]
            out_dim = pool_params.out_dims[0]
            if in_q is None:
                in_q = pool_q.in_qs[0]
            out_q = pool_q.out_qs[0]
        else:
            at_pool_params = NO_POOL

        if act_params is not None:
            at_act_params = gen_active_at_params(act_params)
            if in_dim is None:
                in_dim = act_params.in_dims[0]
            if out_dim is None:
                out_dim = act_params.out_dims[0]
            if in_q is None:
                in_q = act_q.in_qs[0]
            out_q = act_q.out_qs[0]
            if at_ver < 3:
                if act_params.activation == "relu6" and out_q.q != 0:
                    gen_ctrl.ReluN = 6 << out_q.q
                    gen_ctrl.ReluNNoNorm = 1
            else:
                if act_params.activation == "relun":
                    gen_ctrl.ReluN = act_params.activation_params

        else:
            at_act_params = NO_ACTIVATION

        if pad_compatibilities:
            reduction = PadDim.pad_compatibility_reduce(*pad_compatibilities,
                                                        "convolution padding is not compatible with pool padding")
            if not reduction[2]:  # default is balanced pad left
                at_pad_ctrl = next(i for i, v in enumerate(reduction) if v)
                LOG.debug("%s: generating pad control block", node_name)
                gen_ctrl.PadType = at_pad_ctrl

        attrs = {
            'in_qtype': in_q,
            'out_qtype': out_q,
            'filter_qtype': filter_q,
            'bias_qtype': bias_q,
            'mul_biases_qtype': mul_biases_q,
            'relu_oper': at_act_params.ReLUOper
        }

        if at_pool_params.PoolOper != 'KOP_NONE':
            attrs.update({
                'pool_oper': at_pool_params.PoolOper,
                'pool_w': at_pool_params.Fpx,
                'pool_h': at_pool_params.Fpy,
                'pool_d_w': at_pool_params.Dpx,
                'pool_d_h': at_pool_params.Dpy,
                'pool_s_w': at_pool_params.Spx,
                'pool_s_h': at_pool_params.Spy,
                'pool_pad': at_pool_params.PoolPad
            })
        else:
            attrs.update({
                'pool_oper': 'KOP_NONE',
                'pool_w': 0,
                'pool_h': 0,
                'pool_d_w': 0,
                'pool_d_h': 0,
                'pool_s_w': 0,
                'pool_s_h': 0,
                'pool_pad': 0
            })

        if at_conv_params == NO_CONV:
            if in_q.dtype_bits != out_q.dtype_bits:
                raise NotImplementedError(
                    "only homogenious operations are supported at present")
            LOG.debug("%s: pool relu inq %s outq %s control block",
                      node_name, in_q, out_q)
            if at_pool_params.PoolOper == 'KOP_NONE' and (not in_dim.is_named or not in_dim.has_keys(['c', 'w', 'h'])):
                in_shape = in_dim.shape + ([1] * (3 - len(in_dim.shape)))
                in_c, in_h, in_w = in_shape[0], in_shape[1], in_shape[2]
            else:
                in_c, in_h, in_w = in_dim.c, in_dim.h, in_dim.w
            if out_dim.is_named and out_dim.has_key('c'):
                out_c = out_dim.c
            else:
                out_c = in_c
            attrs.update({
                'in_c': in_c,
                'in_h': in_h,
                'in_w': in_w,
                'out_c': out_c,
                'conv_oper': 'KOP_NONE'
            })
            self.template = 'CALL_TEMPLATE_POOL_RELU'
        else:
            # swap w and h if w and filter w is 1 so generator sees 1D conv
            if in_dim.w == 1 and at_conv_params.Fcx == 1:
                attrs.update({
                    'in_c': in_dim.c,
                    'in_h': 1,
                    'in_w': in_dim.h,
                    'out_c': out_dim.c,
                    'conv_oper': at_conv_params.ConvOper,
                    'conv_w': at_conv_params.Fcy,
                    'conv_h': 1,
                    'conv_d_w': at_conv_params.Dcy,
                    'conv_d_h': at_conv_params.Dcx,
                    'conv_s_w': at_conv_params.Scy,
                    'conv_s_h': at_conv_params.Scx,
                    'conv_pad': at_conv_params.ConvPad

                })
            else:
                attrs.update({
                    'in_c': in_dim.c,
                    'in_h': in_dim.h,
                    'in_w': in_dim.w,
                    'out_c': out_dim.c,
                    'conv_oper': at_conv_params.ConvOper,
                    'conv_w': at_conv_params.Fcx,
                    'conv_h': at_conv_params.Fcy,
                    'conv_d_w': at_conv_params.Dcx,
                    'conv_d_h': at_conv_params.Dcy,
                    'conv_s_w': at_conv_params.Scx,
                    'conv_s_h': at_conv_params.Scy,
                    'conv_pad': at_conv_params.ConvPad
                })
            if isinstance(at_conv_params, ConvATParam):
                if mul_biases_q is not None:
                    LOG.debug("%s: mulconv pool relu inq %s outq %s control block",
                              node_name, in_q, out_q)
                    self.template = 'CALL_TEMPLATE_MULBIAS_CONV_POOL_RELU'
                else:
                    LOG.debug("%s: conv pool relu inq %s outq %s control block",
                              node_name, in_q, out_q)
                    self.template = 'CALL_TEMPLATE_CONV_POOL_RELU'
            elif isinstance(at_conv_params, GroupedConvATParam):
                attrs.update({
                    'group_in': at_conv_params.GroupIn,
                    'group_out': at_conv_params.GroupOut
                })
                if mul_biases_q is not None:
                    LOG.debug("%s: grouped conv pool relu inq %s outq %s control block",
                              node_name, in_q, out_q)
                    self.template = 'CALL_TEMPLATE_GROUPED_MULBIAS_CONV_POOL_RELU'
                else:
                    LOG.debug("%s: grouped mulconv pool relu inq %s outq %s control block",
                              node_name, in_q, out_q)
                    self.template = 'CALL_TEMPLATE_GROUPED_CONV_POOL_RELU'
            else:
                raise ValueError('Internal error')

        # other attributes
        extra_attrs = {
            'cname': cname,
            'node_name': node_name
        }

        super().__init__(attrs, extra_attrs, gen_ctrl=gen_ctrl)
    def __init__(self,
                 node_name,
                 cname,
                 conv_params,
                 conv_q,
                 pool_params,
                 pool_q,
                 act_params,
                 act_q,
                 at_ver=3,
                 gen_ctrl=None):
        self.ne16 = False
        if gen_ctrl is None:
            self.gen_ctrl = gen_ctrl = GenCtrl(None, cname=cname)
        else:
            gen_ctrl.cname = cname
            self.gen_ctrl = gen_ctrl

        in_q = filter_q = out_q = bias_q = mul_biases_q = None
        in_dim = out_dim = None
        pad_compatibilities = []
        if conv_params is not None:
            if conv_params.ker_in_order and conv_params.ker_in_order[0] == [
                    "h", "w", "c"
            ]:
                self.hwc = True
                self.gen_ctrl.hwc = 1
            at_conv_params = gen_conv_at_params(conv_params,
                                                pad_compatibilities)
            in_dim = conv_params.in_dims[0]
            out_dim = conv_params.out_dims[0]
            # Set ENABLEIM2COL on 1x1 filters by default
            if conv_params.filter.h == 1 and conv_params.filter.w == 1 and gen_ctrl.enableim2col is None:
                gen_ctrl.enableim2col = 1
            filter_q = conv_q.in_qs[1]
            in_q = conv_q.in_qs[0]
            out_q = conv_q.out_qs[0]
            bias_q = conv_q.in_qs[2]
            if conv_params.has_mul_bias:
                mul_biases_q = conv_q.mul_biases_q
            self.ne16 = conv_q.cache.get('ne16')
        else:
            at_conv_params = NO_CONV

        if pool_params is not None:
            if pool_params.ker_in_order and pool_params.ker_in_order[0] == [
                    "h", "w", "c"
            ]:
                self.hwc = True
                self.gen_ctrl.hwc = 1
            at_pool_params = gen_pool_at_params(pool_params,
                                                pad_compatibilities)
            if in_dim is None:
                in_dim = pool_params.in_dims[0]
            out_dim = pool_params.out_dims[0]
            if in_q is None:
                in_q = pool_q.in_qs[0]
            out_q = pool_q.out_qs[0]
        else:
            at_pool_params = NO_POOL

        if act_params is not None:
            if act_params.ker_in_order and act_params.ker_in_order[0] == [
                    "h", "w", "c"
            ]:
                self.hwc = True
                self.gen_ctrl.hwc = 1
            if in_q is None:
                in_q = act_q.in_qs[0]
            at_act_params = gen_active_at_params(
                act_params,
                force_relu=False,
                asymmetric=act_q.in_qs[0].zero_point != 0)
            if isinstance(act_params,
                          ReluActivationParameters) and act_params.upper_bound:
                self.gen_ctrl.relun = act_params.upper_bound
            if in_dim is None:
                in_dim = act_params.in_dims[0].expand_to_chw()
            if out_dim is None:
                out_dim = act_params.out_dims[0].expand_to_chw()
            out_q = act_q.out_qs[0]

        else:
            at_act_params = NO_ACTIVATION

        if pad_compatibilities:
            reduction = PadDim.pad_compatibility_reduce(
                *pad_compatibilities,
                "convolution padding is not compatible with pool padding")
            if not reduction[2]:  # default is balanced pad left
                at_pad_ctrl = next(i for i, v in enumerate(reduction) if v)
                LOG.debug("%s: generating pad control block", node_name)
                self.gen_ctrl.PadType = at_pad_ctrl
        self.in_dim = in_dim
        self.out_dim = out_dim
        self.in_q = in_q
        self.bias_q = bias_q
        self.out_q = out_q
        self.filter_q = filter_q
        self.mul_biases_q = mul_biases_q
        self.at_act_params = at_act_params
        self.at_pool_params = at_pool_params
        self.at_conv_params = at_conv_params
        self.cname = cname
        self.node_name = node_name
        self.at_ver = at_ver
Ejemplo n.º 4
0
    def __init__(self, node_name, cname, pool_params, pool_q,
                 act_params, act_q, code_block=None, at_ver=3, gen_ctrl=None):
        if gen_ctrl is None:
            self.gen_ctrl = GenCtrl(None, cname=cname)
        else:
            gen_ctrl.cname = cname
            self.gen_ctrl = gen_ctrl

        in_q = out_q = None
        in_dim = out_dim = None
        pad_compatibilities = []

        if pool_params is not None:
            at_pool_params = gen_pool_at_params(pool_params, pad_compatibilities)
            if in_dim is None:
                in_dim = pool_params.in_dims[0]
            out_dim = pool_params.out_dims[0]
            if in_q is None:
                in_q = pool_q.in_qs[0]
            out_q = pool_q.out_qs[0]
        else:
            at_pool_params = NO_POOL

        if act_params is not None:
            at_act_params = gen_active_at_params(act_params)
            if in_dim is None:
                in_dim = act_params.in_dims[0]
            if out_dim is None:
                out_dim = act_params.out_dims[0]
            if in_q is None:
                in_q = act_q.in_qs[0]
            out_q = act_q.out_qs[0]
            if at_ver < 3:
                if act_params.activation == "relu6" and out_q.q != 0:
                    self.gen_ctrl.ReluN = 6 << out_q.q
                    self.gen_ctrl.ReluNNoNorm = 1
            else:
                if act_params.activation == "relun":
                    self.gen_ctrl.ReluN = act_params.activation_params
        else:
            at_act_params = NO_ACTIVATION

        if code_block is None:
            code_block = CodeBlock()

        if pad_compatibilities:
            reduction = PadDim.pad_compatibility_reduce(*pad_compatibilities,
                                                        "convolution padding is not compatible with pool padding")
            if not reduction[2]:  # default is balanced pad left
                at_pad_ctrl = next(i for i, v in enumerate(reduction) if v)
                self.gen_ctrl.PadType = at_pad_ctrl

        if in_q.bits != out_q.bits:
            raise NotImplementedError("only homogenious operations are supported at present")
        if at_pool_params == NO_POOL:
            raise NotImplementedError(
                "activation layer on its own should not be matched by this kernel")

        self.at_pool_params = at_pool_params
        self.in_dim = in_dim
        self.out_dim = out_dim
        self.in_q = in_q
        self.out_q = out_q
        self.at_act_params = at_act_params
        self.cname = cname
        self.node_name = node_name
        self.at_ver = at_ver
    def __init__(self,
                 node_name,
                 cname,
                 conv_params,
                 conv_q,
                 pool_params,
                 pool_q,
                 act_params,
                 act_q,
                 at_ver=3,
                 gen_ctrl=None,
                 force_relu=True):
        if gen_ctrl is None:
            self.gen_ctrl = gen_ctrl = GenCtrl(None, cname=cname)
        else:
            gen_ctrl.cname = cname
            self.gen_ctrl = gen_ctrl

        in_q = filter_q = out_q = bias_q = mul_biases_q = None
        in_dim = out_dim = None
        pad_compatibilities = []
        if conv_params is not None:
            at_conv_params = gen_conv_at_params(conv_params,
                                                pad_compatibilities)
            in_dim = conv_params.in_dims[0]
            out_dim = conv_params.out_dims[0]
            # Set ENABLEIM2COL on 1x1 filters by default
            if conv_params.filter.h == 1 and conv_params.filter.w == 1 and gen_ctrl.enableim2col is None:
                gen_ctrl.enableim2col = 1
            filter_q = conv_q.in_qs[1]
            in_q = conv_q.in_qs[0]
            out_q = conv_q.out_qs[0]
            bias_q = conv_q.in_qs[2]
            if conv_params.has_mul_bias:
                mul_biases_q = conv_q.mul_biases_q
        else:
            at_conv_params = NO_CONV

        if pool_params is not None:
            at_pool_params = gen_pool_at_params(pool_params,
                                                pad_compatibilities)
            if in_dim is None:
                in_dim = pool_params.in_dims[0]
            out_dim = pool_params.out_dims[0]
            if in_q is None:
                in_q = pool_q.in_qs[0]
            out_q = pool_q.out_qs[0]
        else:
            at_pool_params = NO_POOL

        if act_params is not None:
            at_act_params = gen_active_at_params(act_params,
                                                 force_relu=force_relu)
            if in_dim is None:
                in_dim = act_params.in_dims[0].expand_to_chw()
            if out_dim is None:
                out_dim = act_params.out_dims[0].expand_to_chw()
            if in_q is None:
                in_q = act_q.in_qs[0]
            out_q = act_q.out_qs[0]

        else:
            at_act_params = NO_ACTIVATION

        if pad_compatibilities:
            reduction = PadDim.pad_compatibility_reduce(
                *pad_compatibilities,
                "convolution padding is not compatible with pool padding")
            if not reduction[2]:  # default is balanced pad left
                at_pad_ctrl = next(i for i, v in enumerate(reduction) if v)
                LOG.debug("%s: generating pad control block", node_name)
                self.gen_ctrl.PadType = at_pad_ctrl
        self.in_dim = in_dim
        self.out_dim = out_dim
        self.in_q = in_q
        self.bias_q = bias_q
        self.out_q = out_q
        self.filter_q = filter_q
        self.mul_biases_q = mul_biases_q
        self.at_act_params = at_act_params
        self.at_pool_params = at_pool_params
        self.at_conv_params = at_conv_params
        self.cname = cname
        self.node_name = node_name
        self.at_ver = at_ver
Ejemplo n.º 6
0
    def __init__(self,
                 node_name,
                 cname,
                 conv_params,
                 conv_q,
                 pool_params,
                 pool_q,
                 act_params,
                 act_q,
                 at_ver=3,
                 gen_ctrl=None):
        if gen_ctrl is None:
            self.gen_ctrl = gen_ctrl = GenCtrl(None, cname=cname)
        else:
            gen_ctrl.cname = cname
            self.gen_ctrl = gen_ctrl

        in_q = filter_q = out_q = bias_q = mul_biases_q = None
        in_dim = out_dim = None
        pad_compatibilities = []
        if conv_params is not None:
            at_conv_params = gen_conv_at_params(conv_params, conv_q,
                                                pad_compatibilities)
            in_dim = conv_params.in_dims[0]
            out_dim = conv_params.out_dims[0]
            filter_q = conv_q.weights_q
            in_q = conv_q.in_qs[0]
            out_q = conv_q.out_qs[0]
            bias_q = conv_q.biases_q
            if conv_params.has_mul_bias:
                mul_biases_q = conv_q.mul_biases_q
        else:
            at_conv_params = NO_CONV

        if pool_params is not None:
            at_pool_params = gen_pool_at_params(pool_params,
                                                pad_compatibilities)
            if in_dim is None:
                in_dim = pool_params.in_dims[0]
            out_dim = pool_params.out_dims[0]
            if in_q is None:
                in_q = pool_q.in_qs[0]
            out_q = pool_q.out_qs[0]
        else:
            at_pool_params = NO_POOL

        if act_params is not None:
            at_act_params = gen_active_at_params(act_params)
            if in_dim is None:
                in_dim = act_params.in_dims[0]
            if out_dim is None:
                out_dim = act_params.out_dims[0]
            if in_q is None:
                in_q = act_q.in_qs[0]
            out_q = act_q.out_qs[0]
            if at_ver < 3:
                if act_params.activation == "relu6" and out_q.q != 0:
                    self.gen_ctrl.ReluN = 6 << out_q.q
                    self.gen_ctrl.ReluNNoNorm = 1
            else:
                if act_params.activation == "relun":
                    self.gen_ctrl.ReluN = act_params.activation_params

        else:
            at_act_params = NO_ACTIVATION

        if pad_compatibilities:
            reduction = PadDim.pad_compatibility_reduce(
                *pad_compatibilities,
                "convolution padding is not compatible with pool padding")
            if not reduction[2]:  # default is balanced pad left
                at_pad_ctrl = next(i for i, v in enumerate(reduction) if v)
                LOG.debug("%s: generating pad control block", node_name)
                self.gen_ctrl.PadType = at_pad_ctrl
        self.in_dim = in_dim
        self.out_dim = out_dim
        self.in_q = in_q
        self.bias_q = bias_q
        self.out_q = out_q
        self.filter_q = filter_q
        self.mul_biases_q = mul_biases_q
        self.at_act_params = at_act_params
        self.at_pool_params = at_pool_params
        self.at_conv_params = at_conv_params
        self.cname = cname
        self.node_name = node_name
        self.at_ver = at_ver
Ejemplo n.º 7
0
    def __init__(self,
                 node_name,
                 cname,
                 pool_params,
                 pool_q,
                 act_params,
                 act_q,
                 force_relu,
                 gen_ctrl=None):
        if gen_ctrl is None:
            self.gen_ctrl = gen_ctrl = GenCtrl(None, cname=cname)
        else:
            gen_ctrl.cname = cname
            self.gen_ctrl = gen_ctrl
        if pool_params.ker_in_order and pool_params.ker_in_order[0] == [
                "h", "w", "c"
        ]:
            hwc = True
            gen_ctrl.hwc = 1

        pad_compatibilities = []
        at_pool_params = gen_pool_at_params(pool_params, pad_compatibilities)
        in_dim = pool_params.in_dims[0]
        out_dim = pool_params.out_dims[0]
        in_q = pool_q.in_qs[0]
        out_q = pool_q.out_qs[0]

        if act_params is not None:
            act_op = gen_activation_op(
                act_params.activation,
                force_relu=force_relu,
                asymmetric=act_q.in_qs[0].zero_point != 0)
            if out_dim is None:
                out_dim = act_params.out_dims[0].expand_to_chw()
            out_q = act_q.out_qs[0]
        else:
            act_op = "KOP_NONE"

        if pad_compatibilities:
            reduction = PadDim.pad_compatibility_reduce(
                *pad_compatibilities,
                "convolution padding is not compatible with pool padding")
            if not reduction[2]:  # default is balanced pad left
                at_pad_ctrl = next(i for i, v in enumerate(reduction) if v)
                LOG.debug("%s: generating pad control block", node_name)
                self.gen_ctrl.PadType = at_pad_ctrl

        attrs = {
            'in_size':
            in_q.dtype_bits // 8 if in_q.signed else -in_q.dtype_bits // 8,
            'out_size':
            out_q.dtype_bits // 8 if out_q.signed else -out_q.dtype_bits // 8,
            'feat':
            in_dim.c,
            'width':
            in_dim.w,
            'height':
            in_dim.h,
            'kop_pool':
            at_pool_params.PoolOper,
            'fpx':
            at_pool_params.Fpx,
            'fpy':
            at_pool_params.Fpy,
            'dpx':
            at_pool_params.Dpx,
            'dpy':
            at_pool_params.Dpy,
            'spx':
            at_pool_params.Spx,
            'spy':
            at_pool_params.Spy,
            'pool_pad':
            at_pool_params.PoolPad,
            'kop_act':
            act_op
        }

        extra_attrs = {'cname': cname, 'node_name': node_name}
        super().__init__(attrs, extra_attrs, gen_ctrl=gen_ctrl)
    def __init__(self, node_name, cname, conv_params, conv_q, pool_params, pool_q, act_params, act_q, force_relu, gen_ctrl=None):
        if gen_ctrl is None:
            self.gen_ctrl = gen_ctrl = GenCtrl(None, cname=cname)
        else:
            gen_ctrl.cname = cname
            self.gen_ctrl = gen_ctrl

        is_ne16 = conv_q.cache.get('ne16')
        hwc = False
        if not is_ne16 and conv_params.ker_in_order and conv_params.ker_in_order[0] == ["h", "w", "c"]:
            hwc = True
            gen_ctrl.hwc = 1
        if not is_ne16 and not hwc and conv_params.filter.h == 1 and conv_params.filter.w == 1 and gen_ctrl.enableim2col is None:
            gen_ctrl.enableim2col = 1

        in_q = filter_q = out_q = bias_q = None
        in_dim = out_dim = None
        pad_compatibilities = []
        at_conv_params = gen_conv_at_params(
            conv_params, pad_compatibilities)
        in_dim = conv_params.in_dims[0]
        out_dim = conv_params.out_dims[0]
        filter_q = conv_q.in_qs[1]
        in_q = conv_q.in_qs[0]
        out_q = conv_q.out_qs[0]
        bias_q = conv_q.in_qs[2]
        pad_val = in_q.zero_point[0]

        if pool_params is not None:
            at_pool_params = gen_pool_at_params(
                pool_params, pad_compatibilities)
            out_dim = pool_params.out_dims[0]
            out_q = pool_q.out_qs[0]
        else:
            at_pool_params = NO_POOL

        if act_params is not None:
            act_op = gen_activation_op(
                act_params.activation, force_relu=force_relu, asymmetric=act_q.in_qs[0].zero_point != 0)
            if out_dim is None:
                out_dim = act_params.out_dims[0].expand_to_chw()
            out_q = act_q.out_qs[0]
        else:
            act_op = "KOP_NONE"

        if pad_compatibilities:
            reduction = PadDim.pad_compatibility_reduce(*pad_compatibilities,
                                                        "convolution padding is not compatible with pool padding")
            if not reduction[2]:  # default is balanced pad left
                at_pad_ctrl = next(i for i, v in enumerate(reduction) if v)
                LOG.debug("%s: generating pad control block", node_name)
                self.gen_ctrl.PadType = at_pad_ctrl

        attrs = {
            'in_size': in_q.dtype_bits//8 if in_q.signed else -in_q.dtype_bits//8,
            'out_size': out_q.dtype_bits//8 if out_q.signed else -out_q.dtype_bits//8,
            'bias_size': bias_q.dtype_bits//8,
            'filter_bits': filter_q.bits,
            'in_feat': in_dim.c,
            'out_feat': out_dim.c,
            'in_width': in_dim.w,
            'in_height': in_dim.h,
            'kop_conv': at_conv_params.ConvOper,
            'fcx': at_conv_params.Fcx,
            'fcy': at_conv_params.Fcy,
            'dcx': at_conv_params.Dcx,
            'dcy': at_conv_params.Dcy,
            'scx': at_conv_params.Scx,
            'scy': at_conv_params.Scy,
            'conv_pad': at_conv_params.ConvPad,
            'pad_value': pad_val,
            'kop_pool': at_pool_params.PoolOper,
            'fpx': at_pool_params.Fpx,
            'fpy': at_pool_params.Fpy,
            'dpx': at_pool_params.Dpx,
            'dpy': at_pool_params.Dpy,
            'spx': at_pool_params.Spx,
            'spy': at_pool_params.Spy,
            'pool_pad': at_pool_params.PoolPad,
            'kop_act': act_op
        }

        extra_attrs = {
            'cname': cname,
            'node_name': node_name
        }
        super().__init__(attrs, extra_attrs, gen_ctrl=gen_ctrl)
Ejemplo n.º 9
0
def gen_conv_pool_relu(name,
                       conv_params,
                       conv_q,
                       pool_params,
                       pool_q,
                       act_params,
                       act_q,
                       code_block=None):
    in_q = filter_q = out_q = bias_q = None
    in_dim = out_dim = None
    pad_compatibilities = []
    if conv_params is not None:
        at_conv_params = gen_conv_at_params(conv_params, conv_q,
                                            pad_compatibilities)
        in_dim = conv_params.in_dims[0]
        out_dim = conv_params.out_dims[0]
        filter_q = conv_q.weights_q
        in_q = conv_q.in_qs[0]
        out_q = conv_q.out_qs[0]
        bias_q = conv_q.biases_q
    else:
        at_conv_params = NO_CONV

    if pool_params is not None:
        at_pool_params = gen_pool_at_params(pool_params, pad_compatibilities)
        if in_dim is None:
            in_dim = pool_params.in_dims[0]
        out_dim = pool_params.out_dims[0]
        if in_q is None:
            in_q = pool_q.in_qs[0]
        out_q = pool_q.out_qs[0]
    else:
        at_pool_params = NO_POOL

    if act_params is not None:
        at_act_params = gen_active_at_params(act_params)
        if in_dim is None:
            in_dim = act_params.in_dims[0]
        if out_dim is None:
            out_dim = act_params.out_dims[0]
        if in_q is None:
            in_q = act_q.in_qs[0]
        out_q = act_q.out_qs[0]
    else:
        at_act_params = NO_ACTIVATION

    if code_block is None:
        code_block = CodeBlock()

    if not pad_compatibilities:
        at_pad_ctrl = -1
    else:
        reduction = PadDim.pad_compatibility_reduce(*pad_compatibilities,\
            "convolution padding is not compatible with pool padding")
        if reduction[2]:  # default is balanced pad left
            at_pad_ctrl = -1
        else:
            at_pad_ctrl = next(i for i, v in enumerate(reduction) if v)

    gen_ctrl = None
    if at_pad_ctrl != -1:
        gen_ctrl = 'gen_ctrl_' + name
        code_block.write(
            'CNN_GenControl_T {} = {{-1, -1, -1, -1, {}, -1, -1, -1}};',
            gen_ctrl, at_pad_ctrl)

    if conv_params is None:
        if in_q.bits != out_q.bits:
            raise NotImplementedError(
                "only homogenious operations are supported at present")
        gen_at_pool_relu(code_block,
                         name,
                         at_bits(in_q),
                         at_bits(out_q),
                         in_dim,
                         out_dim,
                         at_pool_params,
                         at_act_params,
                         gen_ctrl=gen_ctrl)
    else:
        if isinstance(at_conv_params, ConvATParam):
            gen_at_conv_pool_relu(code_block,
                                  name,
                                  at_bits(in_q),
                                  at_bits(out_q),
                                  at_bits(filter_q),
                                  at_bits(bias_q),
                                  in_dim,
                                  out_dim,
                                  at_conv_params,
                                  at_pool_params,
                                  at_act_params,
                                  gen_ctrl=gen_ctrl)
        elif isinstance(at_conv_params, GroupedConvATParam):
            gen_at_grouped_conv_pool_relu(code_block,
                                          name,
                                          at_bits(in_q),
                                          at_bits(out_q),
                                          at_bits(filter_q),
                                          at_bits(bias_q),
                                          in_dim,
                                          out_dim,
                                          at_conv_params,
                                          at_pool_params,
                                          at_act_params,
                                          gen_ctrl=gen_ctrl)
        else:
            raise ValueError('Internal error')

    return code_block
Ejemplo n.º 10
0
def gen_pool_relu(name,
                  pool_params,
                  pool_q,
                  act_params,
                  act_q,
                  code_block=None,
                  at_ver=2,
                  gen_ctrl=None):
    if gen_ctrl is None:
        gen_ctrl = GenCtrl(None, cname=name)
    else:
        gen_ctrl.cname = name

    if at_ver < 3:
        fsize = at_bits
    else:
        fsize = at_bits_and_q
    in_q = out_q = None
    in_dim = out_dim = None
    pad_compatibilities = []

    if pool_params is not None:
        at_pool_params = gen_pool_at_params(pool_params, pad_compatibilities)
        if in_dim is None:
            in_dim = pool_params.in_dims[0]
        out_dim = pool_params.out_dims[0]
        if in_q is None:
            in_q = pool_q.in_qs[0]
        out_q = pool_q.out_qs[0]
    else:
        at_pool_params = NO_POOL

    if act_params is not None:
        at_act_params = gen_active_at_params(act_params)
        if in_dim is None:
            in_dim = act_params.in_dims[0]
        if out_dim is None:
            out_dim = act_params.out_dims[0]
        if in_q is None:
            in_q = act_q.in_qs[0]
        out_q = act_q.out_qs[0]
        if act_params.activation == "relu6" and out_q.q != 0:
            gen_ctrl.ReluN = 6 << out_q.q
            gen_ctrl.ReluNNoNorm = 1
    else:
        at_act_params = NO_ACTIVATION

    if code_block is None:
        code_block = CodeBlock()

    if pad_compatibilities:
        reduction = PadDim.pad_compatibility_reduce(
            *pad_compatibilities,
            "convolution padding is not compatible with pool padding")
        if not reduction[2]:  # default is balanced pad left
            at_pad_ctrl = next(i for i, v in enumerate(reduction) if v)
            gen_ctrl.PadType = at_pad_ctrl

    if not gen_ctrl.is_unmodified:
        gen_ctrl.gen_ctrl_decl(code_block)

    if in_q.bits != out_q.bits:
        raise NotImplementedError(
            "only homogenious operations are supported at present")
    if pool_params is None:
        raise NotImplementedError(
            "activation layer on its own is not implemented at present")
    gen_at_pool_relu(code_block,
                     name,
                     fsize(in_q),
                     fsize(out_q),
                     in_dim,
                     out_dim,
                     at_pool_params,
                     at_act_params,
                     gen_ctrl=gen_ctrl)
    return code_block
Ejemplo n.º 11
0
def gen_conv_pool_relu(name,
                       conv_params,
                       conv_q,
                       pool_params,
                       pool_q,
                       act_params,
                       act_q,
                       code_block=None,
                       at_ver=2,
                       gen_ctrl=None):

    if gen_ctrl is None:
        gen_ctrl = GenCtrl(None, cname=name)
    else:
        gen_ctrl.cname = name

    if at_ver < 3:
        fsize = at_bits
    else:
        fsize = at_bits_and_q
    in_q = filter_q = out_q = bias_q = None
    in_dim = out_dim = None
    pad_compatibilities = []
    if conv_params is not None:
        at_conv_params = gen_conv_at_params(conv_params, conv_q,
                                            pad_compatibilities)
        in_dim = conv_params.in_dims[0]
        out_dim = conv_params.out_dims[0]
        filter_q = conv_q.weights_q
        in_q = conv_q.in_qs[0]
        out_q = conv_q.out_qs[0]
        bias_q = conv_q.biases_q
    else:
        at_conv_params = NO_CONV

    if pool_params is not None:
        at_pool_params = gen_pool_at_params(pool_params, pad_compatibilities)
        if in_dim is None:
            in_dim = pool_params.in_dims[0]
        out_dim = pool_params.out_dims[0]
        if in_q is None:
            in_q = pool_q.in_qs[0]
        out_q = pool_q.out_qs[0]
    else:
        at_pool_params = NO_POOL

    if act_params is not None:
        at_act_params = gen_active_at_params(act_params)
        if in_dim is None:
            in_dim = act_params.in_dims[0]
        if out_dim is None:
            out_dim = act_params.out_dims[0]
        if in_q is None:
            in_q = act_q.in_qs[0]
        out_q = act_q.out_qs[0]
        if act_params.activation == "relu6" and out_q.q != 0:
            gen_ctrl.ReluN = 6 << out_q.q
            gen_ctrl.ReluNNoNorm = 1
    else:
        at_act_params = NO_ACTIVATION

    if code_block is None:
        code_block = CodeBlock()

    if pad_compatibilities:
        reduction = PadDim.pad_compatibility_reduce(
            *pad_compatibilities,
            "convolution padding is not compatible with pool padding")
        if not reduction[2]:  # default is balanced pad left
            at_pad_ctrl = next(i for i, v in enumerate(reduction) if v)
            LOG.debug("%s: generating pad control block", name)
            gen_ctrl.PadType = at_pad_ctrl

    if not gen_ctrl.is_unmodified:
        gen_ctrl.gen_ctrl_decl(code_block)

    if conv_params is None:
        if in_q.bits != out_q.bits:
            raise NotImplementedError(
                "only homogenious operations are supported at present")
        LOG.debug("%s: pool relu inq %s outq %s control block", name, in_q,
                  out_q)
        gen_at_pool_relu(code_block,
                         name,
                         fsize(in_q),
                         fsize(out_q),
                         in_dim,
                         out_dim,
                         at_pool_params,
                         at_act_params,
                         gen_ctrl=gen_ctrl)
    else:
        if isinstance(at_conv_params, ConvATParam):
            LOG.debug("%s: conv pool relu inq %s outq %s control block", name,
                      in_q, out_q)
            gen_at_conv_pool_relu(code_block,
                                  name,
                                  fsize(in_q),
                                  fsize(out_q),
                                  fsize(filter_q),
                                  fsize(bias_q),
                                  in_dim,
                                  out_dim,
                                  at_conv_params,
                                  at_pool_params,
                                  at_act_params,
                                  gen_ctrl=gen_ctrl)
        elif isinstance(at_conv_params, GroupedConvATParam):
            LOG.debug(
                "%s: grouped conv pool relu inq %s outq %s control block",
                name, in_q, out_q)
            gen_at_grouped_conv_pool_relu(code_block,
                                          name,
                                          fsize(in_q),
                                          fsize(out_q),
                                          fsize(filter_q),
                                          fsize(bias_q),
                                          in_dim,
                                          out_dim,
                                          at_conv_params,
                                          at_pool_params,
                                          at_act_params,
                                          gen_ctrl=gen_ctrl)
        else:
            raise ValueError('Internal error')

    return code_block