Ejemplo n.º 1
0
def xgraph_dpu_external_quantizer_optimizer(xgraph, target=None, **kwargs):
    layout_transform_pass = XGraphLayoutTransformationPass('NHWC',
                                                           target=target)
    dpu_xgraph = layout_transform_pass.execute(xgraph, subgraphs_only=False)
    optimizer = ExternalQOptimizer(dpu_xgraph)
    optimizer.optimize()
    return dpu_xgraph
Ejemplo n.º 2
0
def xgraph_dpu_optimizer(xgraph, target=None, **kwargs):
    layout_transform_pass = XGraphLayoutTransformationPass('NHWC',
                                                           target=target)
    dpu_xgraph = layout_transform_pass.execute(xgraph, subgraphs_only=False)
    optimizer = XGraphTfGeneratorOptimizer(dpu_xgraph)
    optimizer.optimize()
    return dpu_xgraph
Ejemplo n.º 3
0
    def test_simple(self):
        net = [
            XLayer(name='in1',
                   type=['Input'],
                   shapes=[1, 1, 4, 4],
                   sizes=[16],
                   bottoms=[],
                   tops=['conv1'],
                   layer=['in1'],
                   targets=[]),
            XLayer(name='conv1',
                   type=['Convolution'],
                   shapes=[1, 2, 3, 3],
                   sizes=[18],
                   bottoms=['in1'],
                   tops=[],
                   layer=['conv1'],
                   data=ConvData(np.array([1, 1]), np.array([0, 0])),
                   attrs={
                       'data_layout': 'NCHW',
                       'padding': [[0, 0], [0, 0], [1, 1], [1, 1]]
                   },
                   targets=[])
        ]
        xgraph = TestLayoutTransformationPass.xgraph_factory\
            .build_from_xlayer(net)

        layout_transform_pass = XGraphLayoutTransformationPass('NHWC')
        new_xgraph = layout_transform_pass.execute(xgraph)

        xlayers = new_xgraph.get_layers()
        # print(xlayers)
        assert len(new_xgraph) == 4
        assert xlayers[0].type[0] == 'Input'
        assert xlayers[1].type[0] == 'Transpose'
        assert xlayers[2].type[0] == 'Convolution'
        assert xlayers[3].type[0] == 'Transpose'

        assert xlayers[0].bottoms == []
        assert xlayers[0].tops == ['conv1_bottom_NCHW>NHWC']
        assert xlayers[0].shapes == [1, 1, 4, 4]
        assert xlayers[1].bottoms == ['in1']
        assert xlayers[1].tops == ['conv1']
        assert xlayers[1].shapes == [1, 4, 4, 1]
        assert xlayers[2].bottoms == ['conv1_bottom_NCHW>NHWC']
        assert xlayers[2].tops == ['conv1_top_NHWC>NCHW']
        assert xlayers[2].shapes == [1, 3, 3, 2]
        assert xlayers[3].bottoms == ['conv1']
        assert xlayers[3].tops == []
        assert xlayers[3].shapes == [1, 2, 3, 3]

        # NCHW -> NHWC
        assert xlayers[1].attrs['axes'] == [0, 2, 3, 1]
        # NHWC -> NCHW
        assert xlayers[3].attrs['axes'] == [0, 3, 1, 2]

        assert xlayers[2].attrs['data_layout'] == 'NHWC'
Ejemplo n.º 4
0
def transform_layout(xgraph: XGraph, layout: str):
    """ Transform the layout of the XGraph model to the given layout """

    if layout not in ['NCHW', 'NHWC']:
        raise ValueError("Unsupported layout for model: {}. The supported"
                         " layouts are: `NCHW` and `NHWC`".format(layout))

    layout_transform_pass = XGraphLayoutTransformationPass(layout)
    xgraph = layout_transform_pass.execute(xgraph, subgraphs_only=False)
    return xgraph
Ejemplo n.º 5
0
def xgraph_dpu_optimizer(xgraph, target=None, **kwargs):
    # Annoate and merge patterns (e.g. mul + max = leaky relu)
    XGraphPatternAnnotator()(xgraph)
    xgraph = XGraphPatternMutator()(xgraph)

    layout_transform_pass = \
        XGraphLayoutTransformationPass('NHWC', target=target)
    dpu_xgraph = layout_transform_pass.execute(xgraph, subgraphs_only=False)

    # optimizer = QOptimizer(dpu_xgraph)
    # optimizer.optimize()
    optimizer = XGraphTfGeneratorOptimizer(dpu_xgraph)
    optimizer.optimize()

    return dpu_xgraph
Ejemplo n.º 6
0
def xgraph_build_func(xgraph: XGraph,
                      target: str,
                      xtype,
                      layout='NCHW',
                      **kwargs) -> XGraph:

    fancy_logger.banner("Subgraph build func, target: {}, layout: {}".format(
        target, layout))

    compiler_output = xgraph.get_compiler_output() if xgraph.is_compiled() \
        else None
    compiler_output_keys = list(compiler_output.keys()) \
        if compiler_output else []
    logger.debug("Compiler output keys: {}".format(compiler_output_keys))

    if layout not in ['NCHW', 'NHWC']:
        raise ValueError(
            "Supported layouts are [NCHW, NHWC] but got: {}".format(layout))

    layout_transform_pass = \
        XGraphLayoutTransformationPass(layout, target=target)
    xgraph = layout_transform_pass.execute(xgraph, subgraphs_only=False)

    xgraph_factory = XGraphFactory()
    xgraph_partitioner = XGraphPartitioner()

    subgraphs = {
        xp.name: xp
        for xp in xgraph_partitioner.get_subgraphs(xgraph)
    }

    # Retrieve CompilerOutput if available
    # compiler_output = xgraph.get_compiler_output() if xgraph.is_compiled() \
    #     else None
    # compiler_output_keys = list(compiler_output.keys()) \
    #     if compiler_output else []
    # logger.debug("Compiler output keys: {}".format(compiler_output_keys))
    # Keep track of the visited partitions/subgraphs and the layers
    #   inside the partition
    visited_xps = {}

    # Keep track of the subgraph output tensors and the corresponding
    #   new layers (TupleGetItem or Transpose)
    xp_out_tensors_2_layers = {}

    name_changes = {}
    net_map = {}
    net = []
    for X in xgraph.get_layers():

        if X.subgraph is not None and X.subgraph not in visited_xps:

            Xp = subgraphs[X.subgraph]

            if 'target' in Xp.attrs and Xp.attrs['target'] == target:

                visited_xps[Xp.name] = set([X.name])

                logger.debug("XSHAPES: {}".format(X.shapes))

                bottoms = Xp.bottoms

                # Keep track of subgraph input and output names
                sub_xgraph = xgraph_factory.build_from_xlayer(Xp.subgraph_data)

                input_names = Xp.attrs['input_names'][:]
                output_names = Xp.attrs['output_names'][:]
                input_layers = \
                    [sub_xgraph.get(in_name) for in_name in input_names]
                output_layers = \
                    [sub_xgraph.get(out_name) for out_name in output_names]

                attrs = {
                    'input_names': input_names,
                    'output_names': output_names,
                    'input_layers':
                    {il.name: il.layer[:]
                     for il in input_layers},
                    'output_layers':
                    {ol.name: ol.layer[:]
                     for ol in output_layers}
                }
                for k, v in kwargs.items():
                    if k in attrs:
                        raise ValueError("Provided claimed subgraph layer"
                                         " key: {}".format(k))
                    attrs[k] = v

                if Xp.name in compiler_output_keys:
                    attrs['rt_in_map'] = compiler_output.get_in_map(Xp.name)
                    for in_name in input_names:
                        for merged_layer in attrs['input_layers'][in_name]:
                            attrs['rt_in_map'][merged_layer] = \
                                attrs['rt_in_map'][in_name]
                    attrs['rt_out_map'] = compiler_output.get_out_map(Xp.name)
                    for out_name in output_names:
                        for merged_layer in attrs['output_layers'][out_name]:
                            attrs['rt_out_map'][merged_layer] = \
                                attrs['rt_out_map'][out_name]

                Xp.attrs.update(attrs)

                shapes = Xp.shapes[:]

                subgraph_X = Xp._replace(
                    # name = X.name,
                    type=[xtype],
                    shapes=shapes,
                    bottoms=bottoms,
                    # Fill tops later
                    tops=[],
                    subgraph_data=[])
                net.append(subgraph_X.name)
                net_map[Xp.name] = subgraph_X

                # Subgraph layers have multiple outputs (Tuple) so we
                #   retrieve the different subgraph outputs
                #   (see output_names variable) using a TupleGetItem
                #   layer
                top_tensors = Xp.attrs['__top_tensors']

                for i, output_name in enumerate(output_names):
                    # Handle merged layers
                    out_tensor = Xp.attrs['output_layers'][output_name][-1]
                    tgi_name = out_tensor
                    # tgi_name = subgraph_X.name + '_tgi' + str(i)

                    top_tensor = top_tensors[output_name]

                    shapes = subgraph_X.shapes[i][:]
                    X_tgi = defaultXLayer()
                    X_tgi = X_tgi._replace(name=tgi_name,
                                           type=['TupleGetItem'],
                                           shapes=shapes,
                                           sizes=shapes.get_size(),
                                           layer=[tgi_name],
                                           tops=top_tensor[:],
                                           bottoms=[subgraph_X.name],
                                           internal=1,
                                           attrs={'index': i})
                    net.append(X_tgi.name)
                    # Keep track of TGI layer for both last merged layer and output name
                    net_map[tgi_name] = X_tgi
                    net_map[output_name] = X_tgi

                    subgraph_X.tops.append(tgi_name)

                    xp_out_tensors_2_layers[output_name] = tgi_name

            else:
                net.append(X.name)
                net_map[X.name] = X

        elif X.subgraph is not None and X.subgraph in visited_xps:
            # Remove layer
            visited_xps[X.subgraph].add(X.name)
        elif 'Transpose' in X.type:
            # Possibly merge transpose in TupleGetItem layer
            bX = net_map[X.bottoms[0]]
            new_tops = []
            for t in bX.tops:
                if t != X.name:
                    new_tops.append(t)
                elif len(X.tops) > 0:
                    new_tops.append(X.tops[0])
            if 'TupleGetItem' in bX.type:
                new_X = bX._replace(tops=new_tops)
                new_X.attrs['transpose'] = True
                new_X.attrs['axes'] = X.attrs['axes']
                new_X.shapes[:] = TensorShape(X.shapes[:])
                net_map[new_X.name] = new_X
                name_changes[X.name] = bX.name
            else:
                net.append(X.name)
                net_map[X.name] = X
        else:
            net.append(X.name)
            net_map[X.name] = X

        # Reflect possibly merged layers
        new_bottoms = [
            b if b not in name_changes else name_changes[b] for b in X.bottoms
        ]
        if new_bottoms != X.bottoms:
            new_X = X._replace(bottoms=new_bottoms)
            net_map[X.name] = new_X

    # Set tops and bottoms  & enforce topological sequence
    for xp in visited_xps.keys():
        Xp = subgraphs[xp]

        for b in Xp.bottoms:
            top_name = Xp.name
            bX = xgraph.get(b)
            bX.tops = [(bXt if bXt not in visited_xps[Xp.name] else top_name)
                       for bXt in bX.tops]

        for t in Xp.tops:
            tX = xgraph.get(t)
            tX.bottoms = [(tXb if tXb not in visited_xps[Xp.name] else
                           xp_out_tensors_2_layers[tXb]) for tXb in tX.bottoms]

    # Topological sorting
    X_net = [net_map[e] for e in net]
    top_net = sort_topologically(X_net)

    sub_xgraph = xgraph_factory.build_from_xlayer(top_net)

    # Merge transposes if they are cancelling out
    # optimizer = XGraphTransposesOptimizer(sub_xgraph)
    # optimizer.optimize()

    return sub_xgraph
Ejemplo n.º 7
0
    def test_target(self):
        net = [
            XLayer(name='in1',
                   type=['Input'],
                   shapes=[1, 1, 4, 4],
                   sizes=[16],
                   bottoms=[],
                   tops=['conv1'],
                   layer=['in1'],
                   targets=[]),
            XLayer(name='conv1',
                   type=['Convolution'],
                   shapes=[1, 2, 3, 3],
                   sizes=[18],
                   bottoms=['in1'],
                   tops=['pool1'],
                   layer=['conv1'],
                   data=ConvData(np.array([1, 1]), np.array([0, 0])),
                   attrs={
                       'data_layout': 'NCHW',
                       'padding': [[0, 0], [0, 0], [1, 1], [1, 1]]
                   },
                   targets=[],
                   target='test'),
            XLayer(name='pool1',
                   type=['Pooling'],
                   shapes=[1, 2, 2, 2],
                   sizes=[8],
                   bottoms=['conv1'],
                   tops=['concat1'],
                   layer=['pool1'],
                   attrs={
                       'data_layout': 'NCHW',
                       'padding': [[0, 0], [0, 0], [1, 1], [1, 1]]
                   },
                   targets=[]),
            XLayer(name='in2',
                   type=['Input'],
                   shapes=[1, 4, 4, 1],
                   sizes=[16],
                   bottoms=[],
                   tops=['in2_transpose'],
                   layer=['in2'],
                   targets=[]),
            XLayer(name='in2_transpose',
                   type=['Transpose'],
                   shapes=[1, 1, 4, 4],
                   sizes=[16],
                   bottoms=['in2'],
                   tops=['conv2'],
                   layer=['in2_transpose'],
                   attrs={'axes': [0, 3, 1, 2]},
                   targets=[]),
            XLayer(name='conv2',
                   type=['Convolution'],
                   shapes=[1, 2, 2, 2],
                   sizes=[8],
                   bottoms=['in2_transpose'],
                   tops=['concat1'],
                   layer=['conv2'],
                   data=ConvData(np.array([1, 1]), np.array([0, 0])),
                   attrs={
                       'data_layout': 'NCHW',
                       'padding': [[0, 0], [0, 0], [1, 1], [1, 1]]
                   },
                   targets=[],
                   target='test'),
            XLayer(name='concat1',
                   type=['Concat'],
                   shapes=[1, 4, 2, 2],
                   sizes=[16],
                   bottoms=['pool1', 'conv2'],
                   tops=['concat1_transpose'],
                   layer=['concat1'],
                   attrs={'axis': 1},
                   targets=[]),
            XLayer(name='concat1_transpose',
                   type=['Transpose'],
                   shapes=[1, 2, 2, 4],
                   sizes=[16],
                   bottoms=['concat1'],
                   tops=['dense1'],
                   layer=['concat1_transpose'],
                   attrs={'axes': [0, 2, 3, 1]},
                   targets=[]),
            XLayer(name='dense1',
                   type=['Dense'],
                   shapes=[1, 20],
                   sizes=[],
                   bottoms=['concat1_transpose'],
                   tops=[],
                   data=ConvData(np.array([1, 1]), np.array([0, 0])),
                   layer=['dense1'],
                   targets=[])
        ]
        xgraph = TestLayoutTransformationPass.xgraph_factory\
            .build_from_xlayer(net)

        layout_transform_pass = XGraphLayoutTransformationPass('NHWC',
                                                               target='test')
        new_xgraph = layout_transform_pass.execute(xgraph)

        xlayers = new_xgraph.get_layers()
        # print(xlayers)
        # print(len(xlayers))
        assert len(new_xgraph) == 10

        assert xlayers[0].type[0] == 'Input'
        assert xlayers[0].name == 'in1'
        assert xlayers[0].bottoms == []
        assert xlayers[0].tops == ['conv1_bottom_NCHW>NHWC']
        assert xlayers[0].shapes == [1, 1, 4, 4]

        assert xlayers[1].type[0] == 'Transpose'
        assert xlayers[1].name == 'conv1_bottom_NCHW>NHWC'
        assert xlayers[1].bottoms == ['in1']
        assert xlayers[1].tops == ['conv1']
        assert xlayers[1].shapes == [1, 4, 4, 1]
        assert xlayers[1].attrs['axes'] == [0, 2, 3, 1]

        assert xlayers[2].type[0] == 'Convolution'
        assert xlayers[2].name == 'conv1'
        assert xlayers[2].bottoms == ['conv1_bottom_NCHW>NHWC']
        assert xlayers[2].tops == ['conv1_top_NHWC>NCHW']
        assert xlayers[2].shapes == [1, 3, 3, 2]
        assert xlayers[2].attrs['data_layout'] == 'NHWC'
        assert xlayers[2].attrs['padding'] == [[0, 0], [1, 1], [1, 1], [0, 0]]

        assert xlayers[3].type[0] == 'Transpose'
        assert xlayers[3].name == 'conv1_top_NHWC>NCHW'
        assert xlayers[3].bottoms == ['conv1']
        assert xlayers[3].tops == ['pool1']
        assert xlayers[3].shapes == [1, 2, 3, 3]
        assert xlayers[3].attrs['axes'] == (0, 3, 1, 2)

        assert xlayers[4].type[0] == 'Pooling'
        assert xlayers[4].name == 'pool1'
        assert xlayers[4].bottoms == ['conv1_top_NHWC>NCHW']
        assert xlayers[4].tops == ['0_split_concat1_transpose']
        assert xlayers[4].shapes == [1, 2, 2, 2]
        assert xlayers[4].attrs['data_layout'] == 'NCHW'
        assert xlayers[4].attrs['padding'] == [[0, 0], [0, 0], [1, 1], [1, 1]]

        assert xlayers[5].type[0] == 'Transpose'
        assert xlayers[5].name == '0_split_concat1_transpose'
        assert xlayers[5].bottoms == ['pool1']
        assert xlayers[5].tops == ['concat1']
        assert xlayers[5].shapes == [1, 2, 2, 2]
        assert xlayers[5].attrs['axes'] == [0, 2, 3, 1]

        assert xlayers[6].type[0] == 'Input'
        assert xlayers[6].name == 'in2'
        assert xlayers[6].bottoms == []
        assert xlayers[6].tops == ['conv2']
        assert xlayers[6].shapes == [1, 4, 4, 1]

        assert xlayers[7].type[0] == 'Convolution'
        assert xlayers[7].name == 'conv2'
        assert xlayers[7].bottoms == ['in2']
        assert xlayers[7].tops == ['concat1']
        assert xlayers[7].shapes == [1, 2, 2, 2]
        assert xlayers[7].attrs['data_layout'] == 'NHWC'

        assert xlayers[8].type[0] == 'Concat'
        assert xlayers[8].name == 'concat1'
        assert xlayers[8].bottoms == ['0_split_concat1_transpose', 'conv2']
        assert xlayers[8].tops == ['dense1']
        assert xlayers[8].shapes == [1, 2, 2, 4]
        assert xlayers[8].attrs['axis'] == 3

        assert xlayers[9].type[0] == 'Dense'
        assert xlayers[9].name == 'dense1'
        assert xlayers[9].bottoms == ['concat1']
        assert xlayers[9].tops == []
        assert xlayers[9].shapes == [1, 20]
Ejemplo n.º 8
0
    def compile(self):
        # type: () -> None
        """ """
        layout_transform_pass = \
            XGraphLayoutTransformationPass('NHWC', target=self.target)
        self.xgraph = layout_transform_pass.execute(self.xgraph,
                                                    subgraphs_only=False)

        # netcfg = list(self.netcfgs.values())[0]  # orig pb file
        quant_info_file = list(self.quant_info.values())[0]  # quant info file

        subxg_layers = DPUCompiler.xgraph_partitioner\
            .get_subgraphs(self.xgraph)[0].subgraph_data
        xgraph = DPUCompiler.xgraph_factory.build_from_xlayer(subxg_layers)
        net_name = list(self.netcfgs.keys())[0]
        fs = DPUCompiler.tf_generator.generate(xgraph,
                                               'graph',
                                               subgraphs_only=True,
                                               layout='NHWC',
                                               batch_size=1,
                                               placeholder=True,
                                               out_dir=self.work_dir)
        netcfg = list(fs.values())[0]

        input_names = xgraph.get_input_names()
        input_shapes = [
            xgraph.get(in_name).shapes.tolist()[:] for in_name in input_names
        ]
        output_names = xgraph.get_output_names()
        output_shapes = [
            xgraph.get(out_name).shapes.tolist()[:]
            for out_name in output_names
        ]
        if len(input_names) > 1:
            raise NotImplementedError(
                "DPUCompiler only handles models with"
                " one input at the moment but found: {}".format(
                    len(input_names)))
        opt_input_shapes = {
            in_name: [e if e != -1 else 1 for e in input_shape]
            for in_name, input_shape in zip(input_names, input_shapes)
        }
        opts = self.Getopts(opt_input_shapes)
        if not os.path.isfile(quant_info_file):
            raise ValueError(
                "quant file: {} does not exist".format(quant_info_file))
        opts['quant_cfgfile'] = quant_info_file
        opts = str(opts)
        command = """
            vai_c_tensorflow \
            --frozen_pb {} \
            --arch {} \
            --output_dir {} \
            --net_name {}\
            --options "{}"
        """.format(netcfg, self.arch, self.build_dir, 'compiler', opts)
        logger.info("command: {}".format(command))
        process = subprocess.Popen(command,
                                   shell=True,
                                   cwd=FILE_PATH,
                                   stdout=subprocess.PIPE)
        output, error = process.communicate()
        if output is not None:
            output = output.decode('utf-8')
            if 'SUCCESSFUL COMPILATION' not in output:
                logger.info(output)
                raise ValueError('compiler is failed. Please see the log for'
                                 ' more details')
        if error is not None:
            error = error.decode('utf-8')
            # raise ValueError(error)

        logger.debug("Output: {}".format(output))
        logger.debug("Error: {}".format(error))
        compiler_json_file = self.build_dir + '/compiler.json'
        with open(compiler_json_file) as json_file:
            json_graph = json.load(json_file)
        graph_inputs = json_graph["inputs"]
        graph_outputs = json_graph["outputs"]
        logger.debug("{} {}".format(input_names, graph_inputs))
        logger.debug("{} {}".format(output_names, graph_outputs))

        in_map = {in_name: in_name for in_name in input_names}
        out_node_merged = []
        out_nodes = [
            graph_output['previous_layers'][0]
            for graph_output in graph_outputs
        ]
        for i in range(len(out_nodes)):
            out_node_merged.append([
                layer['merged'][-1] for layer in json_graph['network']
                if layer['name'] == out_nodes[i]
            ][0])
        in_map = {in_name: in_name for in_name in input_names}
        out_map = {out_name: t for out_name, t in zip(output_names, out_nodes)}
        #out_map = {out_name: out_name for out_name in output_names}

        self.c_output.add(net_name, ['dpuv1lib.so'], in_map, out_map)
        self.xgraph.set_compiler_output(self.c_output)

        # TODO
        self.xgraph.meta_attrs['compiled'] = True
        self.xgraph.meta_attrs['compiler_libs'] = ['dpuv1lib.so']
        self.xgraph.meta_attrs['compiler_in_map'] = in_map
        self.xgraph.meta_attrs['compiler_out_map'] = out_map

        return self.xgraph
Ejemplo n.º 9
0
    def from_relay_to_xgraph(
        self, sym, params, output_op=None, postprocessing=None, cvx_preprocessing=None
    ):
        # type: (tvm.relay.expr.Expr, dict, str, str, list, dict) ->  XGraph
        """
        Transform a TVM Relay expression to a xfDNN graph and schedule

        Arguments
        ---------
        sym: tvm.relay.expr.Expr
            the Relay expression
        params: dict
            the parameters of the Relay expression
        input_layouts: List[str] # TODO
            the layouts of the data inputs
        output_op: str
            the output operation (unused)
        postprocessing: List[str]
            list of postprocessing layers to be added
        cvx_preprocessing: Dict
            dictionary mapping input names to their cvx preprocessing
            key

        Returns:
        --------
        xgraph: XGraph
            the graph data structure containing all information
        """

        if postprocessing is None:
            postprocessing = []
        if cvx_preprocessing is None:
            cvx_preprocessing = {}

        if output_op is not None:
            raise NotImplementedError("'output_op' should be None for now")

        fancy_logger.banner("RELAY IR TO PYXIR")

        # schedule = []
        net = {}
        schedule = Schedule(net)
        # CONVERT RELAY EXPRESSION TO XLAYER GRAPH
        # This starts a rescursive expression to graph conversion function
        X = Relay2XGraphConverter.RELAY_2_XLAYER[sym.__class__.__name__](
            sym,
            params,
            schedule,
            net,
            {},
            Relay2XGraphConverter.RELAY_2_XLAYER,
            cvx_prep=cvx_preprocessing,
        )

        # For now only softmax layers can be added to a graph output
        OP_2_XLAYER = {
            "Softmax": xlayer_factory.get_xop_factory_func("Softmax", internal=True)
        }

        # Add additional output layers to the network that are not specified
        #   in the network file (usually only used for adding softmax layers)
        for i, output in enumerate(postprocessing):
            if output not in OP_2_XLAYER:
                continue

            op_name = output + str(i)

            # Update tops of current last layer
            X.tops.append(op_name)
            X = OP_2_XLAYER[output](op_name, [X])

            if X.name in net:
                raise ValueError(
                    "This should never happen. Error because the"
                    " generated output name already exists in the"
                    " network dictionary used for setup."
                )

            schedule.append(X.name)
            net[X.name] = X

        # Possibly replace Input layers with CvxInput layers
        xlayers = [net[op_id] for op_id in schedule]
        xgraph = self.xgraph_factory.build_from_xlayer(
            net=xlayers, name="relay_xgraph", blobs=False
        )

        # TODO remove this layout transformer
        layout_transform_pass = XGraphLayoutTransformationPass("NCHW")
        xgraph = layout_transform_pass.execute(xgraph, subgraphs_only=False)

        # Merge transpose layers
        t_optimizer = XGraphTransposesOptimizer(xgraph)
        t_optimizer.optimize()

        # Remove unused ops
        xgraph = RemoveUnusedOps()(xgraph)

        return xgraph