Esempio n. 1
0
    def replace_sub_graph(self, graph: Graph, match: [dict, SubgraphMatch]):

        reduce_max_axis = match['reduce_indices_max'].value
        reduce_sum_axis = match['reduce_indices_sum'].value

        if reduce_max_axis.ndim == 0:
            reduce_max_axis = reduce_max_axis.reshape([1])

        if reduce_sum_axis.ndim == 0:
            reduce_sum_axis = reduce_sum_axis.reshape([1])

        if len(reduce_max_axis) != 1:
            log.info('The reductions indices contain more than 1 element. Cannot convert to Softmax.')
            return

        if not np.array_equal(reduce_max_axis, reduce_sum_axis):
            log.info('The reduce indices are not equal: {} vs {}. Cannot convert to Softmax'
                     ''.format(reduce_max_axis, reduce_sum_axis))
            return

        softmax = Softmax(graph, {'name': match['input'].name + '/Softmax', 'axis': reduce_sum_axis[0]}).create_node()
        match['input'].out_port(0).connect(softmax.in_port(0))
        match['div'].out_port(0).get_connection().set_source(softmax.out_port(0))

        log.debug('Successfully created SoftMax node')
Esempio n. 2
0
    def replace_op(self, graph: Graph, node: Node):
        node_name = node.soft_get('name', node.id)
        assert node.has_valid(
            'axis'
        ), 'The node "{}" does not have mandatory attribute "axis"'.format(
            node_name)

        flatten_node = FlattenONNX(graph, {
            'name': node_name + '/FlattenONNX_',
            'axis': node.axis
        }).create_node()
        shape_node = Shape(graph, {
            'name': node_name + '/ShapeOf_'
        }).create_node()
        softmax_node = Softmax(
            graph, {
                'name':
                node_name + '/Softmax_',
                'axis':
                1,
                'framework_node_name':
                node_name,
                'rename_condition':
                lambda n: len(n.graph.get_op_nodes(name=node_name)) == 0
            }).create_node()
        reshape_node = Reshape(graph, {}).create_node()

        rename_nodes([(node, node_name + '/delete'),
                      (reshape_node, node_name)])

        flatten_node.out_port(0).connect(softmax_node.in_port(0))
        softmax_node.out_port(0).connect(reshape_node.in_port(0))
        shape_node.out_port(0).connect(reshape_node.in_port(1))

        source = node.in_port(0).get_source()

        flatten_node.in_port(0).connect(source)
        shape_node.in_port(0).connect(source)

        return [reshape_node.id]
Esempio n. 3
0
    def replace_op(self, graph: Graph, node: Node):
        log = LogOp(graph, {'name': node.name + '/Log_'}).create_node()
        softmax = Softmax(graph, {
            'axis': 1,
            'name': node.name + '/SoftMax_'
        }).create_node()

        # Connect nodes: input -> Softmax -> Log
        node.in_port(0).get_connection().set_destination(softmax.in_port(0))
        log.in_port(0).get_connection().set_source(softmax.out_port(0))

        # The "explicit" version of the return value is: [(out_node.id, 0)])
        return [log.id]
Esempio n. 4
0
    def replace_op(self, graph: Graph, node: Node):
        node_name = node.soft_get('name', node.id)
        assert node.has_valid(
            'axis'
        ), 'The node "{}" does not have mandatory attribute "axis"'.format(
            node_name)

        log = LogOp(graph, {}).create_node()
        softmax = Softmax(graph, {
            'axis': node.axis,
            'name': node_name + '/Softmax'
        }).create_node()
        rename_nodes([(node, node_name + '/delete'), (log, node_name)])

        # Connect nodes: input -> Softmax -> Log
        node.in_port(0).get_connection().set_destination(softmax.in_port(0))
        log.in_port(0).get_connection().set_source(softmax.out_port(0))
        return [log.id]