Пример #1
0
def test_fake_values_concat(concat_test_graph):
    G = create_graph(concat_test_graph, opts={"load_tensors": True})
    G.add_dimensions()
    G.adjust_order()
    matcher = get_std_match_group()
    matcher.match(G)
    G.add_dimensions()
    G.constant_store.fake = True
    stats_collector = ActivationStatsCollector()
    stats_collector.collect_stats(
        G, [np.random.rand(*node.dims.shape) for node in G.inputs()])
    astats = stats_collector.reduce_stats()
    stats_collector = FilterStatsCollector()
    fstats = stats_collector.collect_stats(G)
    quantizer = SimpleQuantizer(astats, fstats, force_width=8)
    qrecs = quantizer.quantize(G)
    G.quantization = qrecs
    with tempfile.TemporaryDirectory() as tempdir:
        opts = {
            'default_input_location': 'ARG_LOC_L2',
            'default_output_location': 'ARG_LOC_L2',
            'default_global_location': 'ARG_LOC_L3_HFLASH',
            'default_local_location': '0',
            'at_ver': 3,
            'tensor_directory': tempdir
        }
        code_gen = CodeGenerator(G, DefaultNamingConvension(G), opts)
        print(default_template(G, code_generator=code_gen))
        code_gen.write_constants()
Пример #2
0
def generate_code(args):
    LOG.propagate = False

    nntool_shell = NNToolShell(args, allow_cli_args=False)

    # handler = GeneratorShellLogHandler()
    # formatter = logging.Formatter('%(module)s - %(message)s')
    # handler.setFormatter(formatter)
    # LOG.addHandler(handler)
    # if args.log_level:
    #     LOG.setLevel(args.log_level.upper())
    # else:
    #     LOG.setLevel('WARN')

    LOG.info("Loading %s", args.graph_file)

    nntool_shell.load_state_file(args.graph_file)
    G = nntool_shell.G
    opts = nntool_shell.settings

    if args.model_file:
        opts['model_file'] = args.model_file
    if args.model_directory:
        opts['model_directory'] = args.model_directory
    if args.tensor_directory:
        opts['tensor_directory'] = args.tensor_directory
    opts['basic_kernel_header_file'] = args.model_file
    opts['basic_kernel_source_file'] = args.model_file

    os.makedirs(os.path.abspath(opts['model_directory']),
                mode=0o750,
                exist_ok=True)
    os.makedirs(os.path.abspath(opts['tensor_directory']),
                mode=0o750,
                exist_ok=True)

    code_gen = CodeGenerator(G, DefaultNamingConvension(G), opts)
    if args.template_file:
        code_template = dynamic_template(args.template_file)
    else:
        code_template = default_template
    write_template(G, code_gen, opts['model_directory'], opts['model_file'],
                   code_template, "model")
    if G.has_expressions:
        write_template(G, code_gen, opts['model_directory'],
                       opts['basic_kernel_header_file'],
                       basic_kernel_header_template, "kernel headers")
        write_template(G, code_gen, opts['model_directory'],
                       opts['basic_kernel_source_file'],
                       basic_kernel_source_template, "kernel source")

    if args.header_file:
        with open(os.path.join(opts['model_directory'], args.header_file),
                  "w") as output_fp:
            output_fp.write(header_template(G, code_generator=code_gen))
    if not args.dont_dump_tensors:
        LOG.info("Writing constants to %s", opts['model_directory'])
        code_gen.write_constants()
Пример #3
0
    def do_gen(self, args):
        """
Generate AutoTiler model C code and optionally dump tensors. If no destination file is
given the generated code will be outputed to the screen. Check the 'set' command for
settings related to code generation."""
        self._check_graph()
        self._check_quantized()
        self._check_adjusted()
        if args.checksums:
            input_args = self._get_input_args(None)
            LOG.info("input file %s", args.checksums)
            data = import_data(args.checksums, **input_args)
            executer = GraphExecuter(self.G, qrecs=self.G.quantization)
            executer.execute([data], qmode=QuantizationMode.all())
            self.settings['checksum_file'] = args.checksums
            self.settings['generate_checksums'] = True

        if args.tensor_directory:
            self.settings['tensor_directory'] = args.tensor_directory
        if args.model_directory:
            self.settings['model_directory'] = args.model_directory
        self.settings['basic_kernel_source_file'] = args.basic_kernel_source_file
        self.settings['basic_kernel_header_file'] = args.basic_kernel_header_file
        code_gen = CodeGenerator(self.G, DefaultNamingConvension(self.G), self.settings)

        if self.settings['template_file']:
            code_template = dynamic_template(self.settings['template_file'])
        else:
            code_template = default_template

        if args.model_file:
            with open(os.path.join(self.settings['model_directory'],
                                   args.model_file), "w") as output_fp:
                output_fp.write(code_template(self.G, code_generator=code_gen))
            if self.G.has_expressions:
                with open(os.path.join(self.settings['model_directory'],
                                       args.basic_kernel_source_file), "w") as output_fp:
                    output_fp.write(basic_kernel_source_template(self.G, code_generator=code_gen))
                with open(os.path.join(self.settings['model_directory'],
                                       args.basic_kernel_header_file), "w") as output_fp:
                    output_fp.write(basic_kernel_header_template(self.G, code_generator=code_gen))
        else:
            self.ppaged(code_template(self.G, code_generator=code_gen))
            if self.G.has_expressions:
                self.ppaged(basic_kernel_source_template(self.G, code_generator=code_gen))
                self.ppaged(basic_kernel_header_template(self.G, code_generator=code_gen))
        if args.output_tensors:
            code_gen.write_constants()

        if args.header_file:
            with open(os.path.join(self.settings['model_directory'], args.header_file), "w") as output_fp:
                output_fp.write(header_template(self.G, code_generator=code_gen))
Пример #4
0
def generate_code(args):
    LOG.propagate = False
    handler = GeneratorShellLogHandler()
    formatter = logging.Formatter('%(module)s - %(message)s')
    handler.setFormatter(formatter)
    LOG.addHandler(handler)
    if args.log_level:
        LOG.setLevel(args.log_level.upper())
    else:
        LOG.setLevel('WARN')

    LOG.info("Loading %s", args.graph_file)
    G, opts = load_state(args.graph_file, return_extra=True)
    if args.model_file:
        opts['model_file'] = args.model_file
    if args.model_directory:
        opts['model_directory'] = args.model_directory
    if args.tensor_directory:
        opts['tensor_directory'] = args.tensor_directory

    os.makedirs(os.path.abspath(opts['model_directory']),
                mode=0o750,
                exist_ok=True)
    os.makedirs(os.path.abspath(opts['tensor_directory']),
                mode=0o750,
                exist_ok=True)

    model_path = os.path.join(opts['model_directory'], opts['model_file'])
    LOG.info("Saving model to %s", model_path)
    code_gen = CodeGenerator(G, DefaultNamingConvension(G), opts)
    if args.template_file:
        code_template = dynamic_template(args.template_file)
    else:
        code_template = default_template
    model = code_template(G, code_generator=code_gen)
    if not model:
        LOG.error("error generating model code")
        sys.exit(1)
    with open(model_path, "w") as output_fp:
        output_fp.write(model)
    if args.header_file:
        with open(os.path.join(opts['model_directory'], args.header_file),
                  "w") as output_fp:
            output_fp.write(header_template(G, code_generator=code_gen))
    if not args.dont_dump_tensors:
        LOG.info("Writing constants to %s", opts['model_directory'])
        code_gen.write_constants()
Пример #5
0
def test_fused_operational(caplog, mnist_fused_8bit_state):
    caplog.set_level(logging.INFO)
    G = load_state(mnist_fused_8bit_state)
    opts = {
        'default_input_location': 'ARG_LOC_L2',
        'default_output_location': 'ARG_LOC_L2',
        'default_global_location': 'ARG_LOC_L3_HFLASH',
        'default_local_location': '0',
    }
    code_gen = CodeGenerator(G, DefaultNamingConvension(G), opts)
    default_template(G, code_generator=code_gen)
Пример #6
0
def test_tensor_dump(mnist_fused_8bit_state):
    with tempfile.TemporaryDirectory() as tempdir:
        G = load_state(mnist_fused_8bit_state)
        opts = {
            'default_input_location': 'ARG_LOC_L2',
            'default_output_location': 'ARG_LOC_L2',
            'default_global_location': 'ARG_LOC_L3_HFLASH',
            'default_local_location': '0',
            'tensor_directory': tempdir
        }
        code_gen = CodeGenerator(G, DefaultNamingConvension(G), opts)
        default_template(G, code_generator=code_gen)
        code_gen.write_constants()
        files_list = [
            f for f in os.listdir(tempdir)
            if os.path.isfile(os.path.join(tempdir, f))
        ]
        assert set(files_list) == set([
            'Step2Weights.tensor', 'Step1Weights.tensor', 'Step1Biases.tensor',
            'Step3Weights.tensor', 'Step2Biases.tensor', 'Step3Biases.tensor'
        ])
Пример #7
0
def gen_project(G,
                settings,
                project_folder,
                script_commands,
                overwrite=False,
                performance=False):
    code_gen = CodeGenerator(G, DefaultNamingConvension(G), settings)

    if not os.path.exists(project_folder):
        os.mkdir(project_folder)
    main = os.path.join(project_folder, f"{code_gen.project_name}")
    main_c = main + '.c'
    main_h = main + '.h'
    common_mk = os.path.join(project_folder, "common.mk")
    nntool_script = os.path.join(project_folder, "nntool_script")
    if overwrite or not os.path.exists(main_c):
        with open(os.path.join(project_folder, f"{code_gen.project_name}.c"),
                  "w") as output_fp:
            output_fp.write(generate_main_appl_template(G, code_gen))
    if overwrite or not os.path.exists(main_h):
        with open(os.path.join(project_folder, f"{code_gen.project_name}.h"),
                  "w") as output_fp:
            output_fp.write(generate_main_appl_header(G, code_gen))
    if overwrite or not os.path.exists(common_mk):
        with open(os.path.join(project_folder, "common.mk"), "w") as output_fp:
            output_fp.write(generate_main_appl_make(G, code_gen))
    if overwrite or not os.path.exists(nntool_script):
        with open(nntool_script, 'w') as fp:
            # NOTE - gen_template_project is excluded so that tests work. Normally it will not be in the
            # history.
            fp.writelines([
                f"{line}\n" for line in script_commands
                if not (line.startswith('open') or line.startswith(
                    'gen_project') or line.startswith('performance'))
            ])
            if performance:
                for setting in [
                        'set graph_produce_node_names true',
                        'set graph_produce_operinfos true',
                        'set graph_monitor_cycles true'
                ]:
                    fp.write(f'{setting}\n')
            if script_commands[-1] != "save_state":
                fp.write('save_state\n')
    ignore_function = None if overwrite else skip_existing_files(
        project_folder)
    shutil.copytree('generation/project_template',
                    project_folder,
                    dirs_exist_ok=True,
                    ignore=ignore_function)
Пример #8
0
def test_gen_vergesense(caplog):
    caplog.set_level(logging.DEBUG)
    tfi = TfliteImporter()
    G = tfi.create_graph("tests/graph/marco_17_04.tflite", {
        'load_tensors': True,
        'load_quantization': True
    })
    G.add_dimensions()
    G.adjust_order()
    matcher = get_scale8_match_group()
    matcher.match(G)
    G.add_dimensions()
    with tempfile.TemporaryDirectory() as tempdir:
        opts = {
            'default_input_location': 'ARG_LOC_L2',
            'default_output_location': 'ARG_LOC_L2',
            'default_global_location': 'ARG_LOC_L3_HFLASH',
            'default_local_location': 'AT_MEM_UNDEF',
            'tensor_directory': tempdir
        }
        code_gen = CodeGenerator(G, DefaultNamingConvension(G), opts)
        default_template(G, code_generator=code_gen)
        code_gen.write_constants()
Пример #9
0
def test_activatiofusion(actfusion_graph):
    G = actfusion_graph
    matcher = get_fusion('scale8_match_group')
    matcher.match(G)
    G.add_dimensions()
    astat_col = ActivationStatsCollector()
    astats = astat_col.collect_stats(
        G, [np.full([10, 10, 2], 1),
            np.full([10, 10, 2], 1)])
    astats = astat_col.reduce_stats()
    quantizer = MultQuantizer(astats,
                              force_width=8,
                              quantized_dimension="channel")
    G.quantization = quantizer.quantize(G)
    with tempfile.TemporaryDirectory() as tempdir:
        opts = {
            'default_input_location': 'ARG_LOC_L2',
            'default_output_location': 'ARG_LOC_L2',
            'default_global_location': 'ARG_LOC_L3_HFLASH',
            'default_local_location': 'AT_MEM_UNDEF',
            'tensor_directory': tempdir
        }
        code_gen = CodeGenerator(G, DefaultNamingConvension(G), opts)
        ATModel_code = default_template(G, code_generator=code_gen)
Пример #10
0
def gen_project(G,
                settings,
                project_folder,
                script_commands,
                overwrite=False,
                performance=False,
                quantized=False,
                test_results=False,
                save_inputs=False,
                input_file=None,
                input_args=None,
                gen_atproject=False,
                dump_tensors=False,
                input_tensors=None,
                tolerance=0.0):
    settings = deepcopy(settings)
    settings['graph_monitor_cycles'] = True
    settings['graph_produce_node_names'] = True
    settings['graph_produce_operinfos'] = True

    code_gen = CodeGenerator(G, DefaultNamingConvension(G), settings)

    if not os.path.exists(project_folder):
        os.mkdir(project_folder)

    qoutputs = None
    if test_results:
        np.random.seed(12345)
        finput_tensors = []
        input_tensors = []
        for i, node in enumerate(G.input_nodes()):
            out_q = G.quantization[NodeId(node)].out_qs[0]
            if input_file:
                file_per_input = glob_input_files(input_file, G.num_inputs)[0]
                finput = import_data(file_per_input[i], **input_args)
            else:
                min_val = out_q.min if not out_q.is_floating else -1.0
                max_val = out_q.max if not out_q.is_floating else 1.0
                finput = get_rand(node.out_dims[0].shape,
                                  low_high=(min_val, max_val))
            finput_tensors.append(finput)
        executer = GraphExecuter(G, qrecs=G.quantization)
        qoutput_tensors = executer.execute(finput_tensors.copy(),
                                           qmode=QuantizationMode.all())
        qoutputs = []
        for params in G.outputs():
            outp = qoutput_tensors[params.step_idx][0]
            qoutputs.append(outp)
        for i, params in enumerate(G.input_nodes()):
            inp = qoutput_tensors[params.step_idx][0]
            input_tensors.append(inp)
            if save_inputs:
                nodeq = G.quantization[NodeId(params, None)].out_qs[0]
                np.save(os.path.join(project_folder, f"fake_input_{i}.npy"),
                        nodeq.dequantize(inp))

    main = os.path.join(project_folder, f"{code_gen.project_name}")
    main_c = main + '.c'
    main_h = main + '.h'
    common_mk = os.path.join(project_folder, "common.mk")
    nntool_script = os.path.join(project_folder, "nntool_script")
    if overwrite or not os.path.exists(main_c):
        with open(os.path.join(project_folder, f"{code_gen.project_name}.c"),
                  "w") as output_fp:
            output_fp.write(
                generate_main_appl_template(G, code_gen, input_tensors,
                                            qoutputs, tolerance))
    if overwrite or not os.path.exists(main_h):
        with open(os.path.join(project_folder, f"{code_gen.project_name}.h"),
                  "w") as output_fp:
            output_fp.write(generate_main_appl_header(G, code_gen))
    if overwrite or not os.path.exists(common_mk):
        open_args = parse_last_open(script_commands)
        open_args = build_last_open_args(open_args) if open_args else ""
        with open(os.path.join(project_folder, "common.mk"), "w") as output_fp:
            if gen_atproject:
                output_fp.write(
                    generate_main_appl_make_atproject(G, code_gen, quantized,
                                                      'Model.c'))
            else:
                output_fp.write(
                    generate_main_appl_make(G,
                                            code_gen,
                                            quantized,
                                            open_args=open_args))
    if overwrite or not os.path.exists(nntool_script):
        with open(nntool_script, 'w') as fp:
            # NOTE - gen_template_project is excluded so that tests work. Normally it will not be in the
            # history.
            fp.writelines(process_script(script_commands))
            # always add performance since the main template uses it
            for setting in [
                    'set graph_produce_node_names true',
                    'set graph_produce_operinfos true',
                    'set graph_monitor_cycles true'
            ]:
                fp.write(f'{setting}\n')
            if dump_tensors:
                fp.write('set graph_dump_tensor 7\n')

            if script_commands[-1] != "save_state":
                fp.write('save_state\n')
    if gen_atproject:
        code_gen = CodeGenerator(G, DefaultNamingConvension(G), settings)
        with open(os.path.join(project_folder, 'Model.c'), "w") as output_fp:
            output_fp.write(default_template(G, code_generator=code_gen))
        if G.has_expressions:
            with open(os.path.join(project_folder, "Expression_Kernels.c"),
                      "w") as output_fp:
                output_fp.write(
                    basic_kernel_source_template(G, code_generator=code_gen))
            with open(os.path.join(project_folder, "Expression_Kernels.h"),
                      "w") as output_fp:
                output_fp.write(
                    basic_kernel_header_template(G, code_generator=code_gen))
        code_gen.write_constants(tensor_directory=project_folder)
    ignore_function = None if overwrite else skip_existing_files(
        project_folder)
    shutil.copytree(os.path.join(os.environ.get("NNTOOL_PATH"),
                                 'generation/project_template'),
                    project_folder,
                    dirs_exist_ok=True,
                    ignore=ignore_function)

    if not gen_atproject:
        try:
            shutil.copy(
                G.graph_identity.filename,
                os.path.join(project_folder,
                             os.path.split(G.graph_identity.filename)[1]))
        except shutil.SameFileError:
            pass