Exemplo n.º 1
0
def test_activation():
    in_q = QType(16, 13, True)
    input_ = in_q.quantize(np.array([-1.2, 0.5, 0.5, -0.6])).reshape([4, 1, 1])
    in_dims = Dim.named(c=4, h=1, w=1).impose_order(['c', 'h', 'w'])
    params = ActivationParameters("test")
    qrec = QuantizationRecord([in_q], [in_q])
    out_dims = params.get_output_size([in_dims])
    output_ = activation(params, in_dims, out_dims[0], input_, qrec=qrec)
    output_ = in_q.dequantize(output_)
    assert np.array_equal(output_, [[[0]], [[0.5]], [[0.5]], [[0]]])
Exemplo n.º 2
0
def fuse_activation(G: NNGraph, tfl_opts, name: str, node: Node):
    if tfl_opts.FusedActivationFunction(
    ) == ActivationFunctionType.ActivationFunctionType.NONE:
        return add_node(G, node)

    activation = TF_ACTIVATIONS[tfl_opts.FusedActivationFunction()]
    anode = ActivationParameters(aname(name), activation)
    return add_node(G, node, anode=anode)
Exemplo n.º 3
0
def add_activation(G,
                   name,
                   subgraph,
                   op_name,
                   op,
                   load_tensors=False,
                   dequantize=False):
    check(op.InputsLength() == 1,\
        "Very odd " + str(op.InputsAsNumpy()))
    activation = TF_ACTIVATION_OPERATORS[op_name]
    return add_node(G, ActivationParameters(name, activation))
    def fuse_activation(cls, tfl_opts, name, params, **kwargs):
        G = kwargs['G']
        opts = kwargs['opts']
        ext = hashlib.sha1(name.encode(
            "UTF-8")).hexdigest()[:8] if opts.get('anonymise') else 'activation'
        if opts.get('load_quantization') and NodeId(params) in G.quantization:
            node_qrec = G.quantization[NodeId(params)]
        else:
            node_qrec = None
        # if node_qrec is not None and None in node_qrec.in_qs + node_qrec.out_qs:
        #     # one of the input is a constant or strange behaviour -> may be is something fusions will get rid of
        #     return add_node(self.G, node)
        aparams = None
        if tfl_opts.FusedActivationFunction() == ActivationFunctionType.NONE:
            if node_qrec is not None and node_qrec.ktype.startswith('scaled'): # and opts.get('insert_relus'):
                # here we have no activation in an asymmetric qtype -> may be an omitted relu
                if node_qrec.out_qs[0] is not None and node_qrec.out_qs[0].min_val == 0:
                    if np.all(np.round(node_qrec.out_qs[0].max_val) == 6):
                        aparams = ActivationParameters.get_activation(
                            'relu6', name + f"_{ext}")
                    else:
                        aparams = ActivationParameters.get_activation(
                            'relu', name + f"_{ext}")
        else:
            aparams = ActivationParameters.get_activation(cls.TF_ACTIVATIONS[tfl_opts.FusedActivationFunction()],
                                                          name + f"_{ext}")
        if aparams:
            G.add_edge(NNEdge(from_node=params, to_node=aparams))

            if opts.get('load_quantization'):
                # In between the fused operation and activation the
                # transfer is in int32 representation
                node_qrec = G.quantization[NodeId(params)]
                ina_qtype = deepcopy(node_qrec.out_qs[0])
                outa_qtype = deepcopy(ina_qtype)
                G.quantization[NodeId(aparams)] = QRec.scaled(
                    in_qs=[ina_qtype], out_qs=[outa_qtype])
            params = aparams
        return params
Exemplo n.º 5
0
def add_shortcut(out_graph, routes, idx, l):
    node_name = "{}_{}".format(l['type'], idx)
    activation = l.get('activation')
    if activation is None:
        routes['in'][idx], routes['out'][idx] =\
            out_graph.add_operator(node_name, MatrixAddParameters())
    else:
        activation = DARKNET_ACTIVATION_TYPES[activation]
        routes['in'][idx], routes['out'][idx] =\
            out_graph.add_operators(node_name,\
                [MatrixAddParameters(), ActivationParameters(activation)])

    routes['edges'].append([idx - 1, idx])
    routes['edges'].append([int(l['from']) + idx, idx])
    return False
Exemplo n.º 6
0
def add_fully_connected(out_graph, routes, idx, l):
    activation = get_str(l, 'activation', default="logistic")
    filter_c = get_int(l, 'output')
    node_name = "{}_{}".format(l['type'], idx)
    if activation is None:
        routes['in'][idx], routes['out'][idx] =\
            out_graph.add_operator(node_name, FcParameters(FcFilterDim(filter_c), has_bias=True))
    else:
        activation = DARKNET_ACTIVATION_TYPES[activation]
        routes['in'][idx], routes['out'][idx] =\
            out_graph.add_operators(
                node_name,
                [FcParameters(FcFilterDim(filter_c), has_bias=True),\
                    ActivationParameters(activation)]
            )

    return True
Exemplo n.º 7
0
def add_convolution(out_graph, routes, idx, l):
    activation = get_str(l, 'activation', default="logistic")
    node_name = "{}_{}".format(l['type'], idx)
    routes['in'][idx] = node_name
    padding = l.get("padding")
    pad = l.get("pad")
    size = get_int(l, 'size', 1)
    groups = get_int(l, 'groups', 1)
    filters_c = get_int(l, 'filters', 1)
    stride = get_int(l, 'stride', 1)
    batch_normalize = get_int(l, 'batch_normalize', 0)
    flipped = get_int(l, 'flipped', 0)
    custom = {'batch_normalize': batch_normalize == 1, 'flipped': flipped == 1}

    assert 'binary' not in l, "Binary convolutions are not implemented"
    assert 'xnor' not in l, "XNOR convolutions are not implemented"
    assert 'dot' not in l, "dot is not implemented"

    # padding calculation as per Darknet code
    if pad is not None:
        padding = int(size / 2)
    if padding is None:
        padding = 0

    if activation is None:
        routes['in'][idx], routes['out'][idx] =\
            out_graph.add_operator(node_name,\
                Conv2DParameters(Conv2DFilterDim(size, size, filters_c),\
                    StrideDim(stride), PadDim(padding), groups=groups,\
                        custom=custom, has_bias=True))
    else:
        activation = DARKNET_ACTIVATION_TYPES[activation]
        routes['in'][idx], routes['out'][idx] =\
            out_graph.add_operators(
                node_name,
                [
                    Conv2DParameters(Conv2DFilterDim(size, size, filters_c),\
                        StrideDim(stride), PadDim(padding), groups=groups,\
                            custom=custom, has_bias=True),
                    ActivationParameters(activation)
                ]
            )

    return True