Ejemplo n.º 1
0
def compileP4(inputFile, gen_file, isRouter, preprocessor_args):
    h = HLIR(inputFile)

    for parg in preprocessor_args:
        h.add_preprocessor_args(parg)
    if not h.build():
        return CompileResult("HLIR", "Error while building HLIR")

    try:
        basename = os.path.basename(inputFile)
        basename = os.path.splitext(basename)[0]

        config = target.BccConfig()
        e = EbpfProgram(basename, h, isRouter, config)
        serializer = ProgramSerializer()
        e.toC(serializer)
        f = open(gen_file, 'w')
        f.write(serializer.toString())
        return CompileResult("OK", "")
    except CompilationException as e:
        prefix = ""
        if e.isBug:
            prefix = "### Compiler bug: "
        return CompileResult("bug", prefix + e.show())
    except NotSupportedException as e:
        return CompileResult("not supported", e.show())
    except:
        return CompileResult("exception", traceback.format_exc())
Ejemplo n.º 2
0
def main():
    parser = get_parser()
    input_args = sys.argv[1:]
    args, unparsed_args = parser.parse_known_args()

    has_remaining_args = False
    preprocessor_args = []
    for a in unparsed_args:
        if a[:2] == "-D":
            input_args.remove(a)
            preprocessor_args.append(a)
        else:
            has_remaining_args = True

    # trigger error
    if has_remaining_args:
        parser.parse_args(input_args)

    gen_dir = os.path.abspath(args.gen_dir)
    if os.path.exists(gen_dir):
        if not os.path.isdir(gen_dir):
            sys.stderr.write(args.gen_dir + " exists but is not a directory\n")
            sys.exit(1)
    else:
        try:
            os.mkdir(gen_dir)
        except:
            sys.stderr.write("Could not create output directory %s\n" %
                             args.gen_dir)
            sys.exit(1) 

    if args.p4_name:
        p4_name = args.p4_name
    else:
        p4_name = _get_p4_basename(args.source)

    if args.p4_prefix:
        p4_prefix = args.p4_prefix
    else:
        p4_prefix = p4_name

    h = HLIR(args.source)
    h.add_preprocessor_args("-D__TARGET_BM__")
    for parg in preprocessor_args:
        h.add_preprocessor_args(parg)
    if not h.build():
        print "Error while building HLIR"
        sys.exit(1)

    print "Generating files in directory", gen_dir

    render_dict = smart.render_dict_create(h, 
                                           p4_name, p4_prefix,
                                           args.meta_config,
                                           args.public_inc_path,
                                           dump_yaml = args.dump_yaml)
    smart.render_all_files(render_dict, gen_dir,
                           with_thrift = args.thrift,
                           with_plugin_list = args.plugin_list)
Ejemplo n.º 3
0
def main():
    parser = get_parser()
    input_args = sys.argv[1:]
    args, unparsed_args = parser.parse_known_args()

    # parse preprocessor flags
    has_remaining_args = False
    preprocessor_args = []
    for a in unparsed_args:
        if a[:2] == "-D":
            input_args.remove(a)
            preprocessor_args.append(a)
        else:
            has_remaining_args = True

    # trigger error
    if has_remaining_args:
        parser.parse_args(input_args)

    if args.json:
        path_json = _validate_path(args.json)

    from_json = False
    if args.pd:
        path_pd = _validate_dir(args.pd)
        if args.pd_from_json:
            if not os.path.exists(args.source):
                print "Invalid JSON source"
                sys.exit(1)
            from_json = True

    if from_json:
        with open(args.source, 'r') as f:
            json_dict = json.load(f)
    else:
        h = HLIR(args.source)
        h.add_preprocessor_args("-D__TARGET_BMV2__")
        for parg in preprocessor_args:
            h.add_preprocessor_args(parg)
        # in addition to standard P4 primitives
        more_primitives = json.loads(
            resource_string(__name__, 'primitives.json')
        )
        h.add_primitives(more_primitives)
        if not h.build(analyze=False):
            print "Error while building HLIR"
            sys.exit(1)

        json_dict = gen_json.json_dict_create(h)

        if args.json:
            print "Generating json output to", path_json
            with open(path_json, 'w') as fp:
                json.dump(json_dict, fp, indent=4, separators=(',', ': '))

    if args.pd:
        print "Generating PD source files in", path_pd
        gen_pd.generate_pd_source(json_dict, path_pd, args.p4_prefix)
Ejemplo n.º 4
0
def main():
    parser = get_parser()
    input_args = sys.argv[1:]
    args, unparsed_args = parser.parse_known_args()

    # parse preprocessor flags
    has_remaining_args = False
    preprocessor_args = []
    for a in unparsed_args:
        if a[:2] == "-D":
            input_args.remove(a)
            preprocessor_args.append(a)
        else:
            has_remaining_args = True

    # trigger error
    if has_remaining_args:
        parser.parse_args(input_args)

    if args.json:
        path_json = _validate_path(args.json)

    from_json = False
    if args.pd:
        path_pd = _validate_dir(args.pd)
        if args.pd_from_json:
            if not os.path.exists(args.source):
                print "Invalid JSON source"
                sys.exit(1)
            from_json = True

    if from_json:
        with open(args.source, 'r') as f:
            json_dict = json.load(f)
    else:
        h = HLIR(args.source)
        h.add_preprocessor_args("-D__TARGET_BMV2__")
        for parg in preprocessor_args:
            h.add_preprocessor_args(parg)
        # in addition to standard P4 primitives
        more_primitives = json.loads(
            resource_string(__name__, 'primitives.json'))
        h.add_primitives(more_primitives)
        if not h.build(analyze=False):
            print "Error while building HLIR"
            sys.exit(1)

        json_dict = gen_json.json_dict_create(h)

        if args.json:
            print "Generating json output to", path_json
            with open(path_json, 'w') as fp:
                json.dump(json_dict, fp, indent=4, separators=(',', ': '))

    if args.pd:
        print "Generating PD source files in", path_pd
        gen_pd.generate_pd_source(json_dict, path_pd, args.p4_prefix)
Ejemplo n.º 5
0
def main():
    parser = get_parser()
    args = parser.parse_args()

    if args.json:
        path_json = _validate_path(args.json)

    from_json = False
    if args.pd:
        path_pd = _validate_dir(args.pd)
        if args.pd_from_json:
            if not os.path.exists(args.source):
                print "Invalid JSON source"
                sys.exit(1)
            from_json = True

    if from_json:
        with open(args.source, 'r') as f:
            json_dict = json.load(f)
    else:
        h = HLIR(args.source)
        h.add_preprocessor_args("-D__TARGET_BMV2__")
        # in addition to standard P4 primitives
        more_primitives = json.loads(
            resource_string(__name__, 'primitives.json')
        )
        h.add_primitives(more_primitives)
        if not h.build():
            print "Error while building HLIR"
            sys.exit(1)

        json_dict = gen_json.json_dict_create(h)

        if args.json:
            print "Generating json output to", path_json
            with open(path_json, 'w') as fp:
                json.dump(json_dict, fp, indent=4, separators=(',', ': '))

    if args.pd:
        print "Generating PD source files in", path_pd
        gen_pd.generate_pd_source(json_dict, path_pd, args.p4_prefix)
Ejemplo n.º 6
0
def main():
    parser = get_parser()
    args = parser.parse_args()

    if args.json:
        path_json = _validate_path(args.json)

    from_json = False
    if args.pd:
        path_pd = _validate_dir(args.pd)
        if args.pd_from_json:
            if not os.path.exists(args.source):
                print "Invalid JSON source"
                sys.exit(1)
            from_json = True

    if from_json:
        with open(args.source, 'r') as f:
            json_dict = json.load(f)
    else:
        h = HLIR(args.source)
        h.add_preprocessor_args("-D__TARGET_BMV2__")
        # in addition to standard P4 primitives
        more_primitives = json.loads(
            resource_string(__name__, 'primitives.json'))
        h.add_primitives(more_primitives)
        if not h.build():
            print "Error while building HLIR"
            sys.exit(1)

        json_dict = gen_json.json_dict_create(h)

        if args.json:
            print "Generating json output to", path_json
            with open(path_json, 'w') as fp:
                json.dump(json_dict, fp, indent=4, separators=(',', ': '))

    if args.pd:
        print "Generating PD source files in", path_pd
        gen_pd.generate_pd_source(json_dict, path_pd, args.p4_prefix)
Ejemplo n.º 7
0
def main():
    global json_dict, p4
    parser = get_parser()
    args = parser.parse_args()

    if args.json:
        path_json = _validate_path(args.json)

    h = HLIR(args.source)
    h.add_preprocessor_args("-D__TARGET_BMV2__")
    if not h.build():
        print "Error while building HLIR"
        sys.exit(1)

    json_dict = gen_json.json_dict_create(h)
    p4 = DotDict(json_dict)

    if args.json:
        print "Generating json output to", path_json
        with open(path_json, 'w') as fp:
            json.dump(json_dict, fp, indent=4, separators=(',', ': '))

    if args.dump_yaml:
        with open("yaml_dump.yml", 'w') as f:
            dump_render_dict(json_dict, f)

    gen_dir = os.path.abspath(args.gen_dir)
    if os.path.exists(gen_dir):
        if not os.path.isdir(gen_dir):
            sys.stderr.write(args.gen_dir + " exists but is not a directory\n")
            sys.exit(1)
    else:
        try:
            os.mkdir(gen_dir)
        except:
            sys.stderr.write("Could not create output directory %s\n" %
                             args.gen_dir)
            sys.exit(1) 

    render_all_files(json_dict, gen_dir)
Ejemplo n.º 8
0
def main():
    parser = get_parser()
    args = parser.parse_args()

    if args.json:
        path_json = _validate_path(args.json)

    from_json = False
    if args.pd:
        path_pd = _validate_dir(args.pd)
        if args.pd_from_json:
            if not os.path.exists(args.source):
                print "Invalid JSON source"
                sys.exit(1)
            from_json = True

    if from_json:
        with open(args.source, 'r') as f:
            json_dict = json.load(f)
    else:
        h = HLIR(args.source)
        h.add_preprocessor_args("-D__TARGET_BMV2__")
        if not h.build():
            print "Error while building HLIR"
            sys.exit(1)

        json_dict = gen_json.json_dict_create(h)

        if args.json:
            print "Generating json output to", path_json
            with open(path_json, 'w') as fp:
                json.dump(json_dict, fp, indent=4, separators=(',', ': '))
        if args.dump_yaml:
            with open("yaml_dump.yml", 'w') as f:
                dump_render_dict(json_dict, f)

    if args.pd:
        print "Generating PD source files in", path_pd
        gen_pd.generate_pd_source(json_dict, path_pd, args.p4_prefix)
Ejemplo n.º 9
0
Archivo: p4toEbpf.py Proyecto: AoJ/bcc
def compileP4(inputFile, gen_file, isRouter, preprocessor_args):
    h = HLIR(inputFile)

    for parg in preprocessor_args:
        h.add_preprocessor_args(parg)
    if not h.build():
        return CompileResult("HLIR", "Error while building HLIR")

    try:
        basename = os.path.basename(inputFile)
        basename = os.path.splitext(basename)[0]

        config = target.BccConfig()
        e = EbpfProgram(basename, h, isRouter, config)
        serializer = ProgramSerializer()
        e.toC(serializer)
        f = open(gen_file, 'w')
        f.write(serializer.toString())
        return CompileResult("OK", "")
    except CompilationException, e:
        prefix = ""
        if e.isBug:
            prefix = "### Compiler bug: "
        return CompileResult("bug", prefix + e.show())
Ejemplo n.º 10
0
def main():
    parser = get_parser()
    input_args = sys.argv[1:]
    args, unparsed_args = parser.parse_known_args()

    has_remaining_args = False
    preprocessor_args = []
    for a in unparsed_args:
        if a[:2] == "-D":
            input_args.remove(a)
            preprocessor_args.append(a)
        else:
            has_remaining_args = True

    # trigger error
    if has_remaining_args:
        parser.parse_args(input_args)

    gen_dir = os.path.abspath(args.gen_dir)
    if os.path.exists(gen_dir):
        if not os.path.isdir(gen_dir):
            sys.stderr.write(args.gen_dir + " exists but is not a directory\n")
            sys.exit(1)
    else:
        try:
            os.mkdir(gen_dir)
        except:
            sys.stderr.write("Could not create output directory %s\n" %
                             args.gen_dir)
            sys.exit(1) 

    if args.p4_name:
        p4_name = args.p4_name
    else:
        p4_name = _get_p4_basename(args.source)

    if args.p4_prefix:
        p4_prefix = args.p4_prefix
    else:
        p4_prefix = p4_name

    h = HLIR(args.source)
    h.add_preprocessor_args("-D__TARGET_BM__")
    for parg in preprocessor_args:
        h.add_preprocessor_args(parg)
    # in addition to standard P4 primitives
    more_primitives = json.loads(resource_string(__name__, 'primitives.json'))
    h.add_primitives(more_primitives)

    if not h.build():
        print "Error while building HLIR"
        sys.exit(1)

    print "Generating files in directory", gen_dir

    render_dict = smart.render_dict_create(h, 
                                           p4_name, p4_prefix,
                                           args.meta_config,
                                           args.public_inc_path,
                                           dump_yaml = args.dump_yaml)

    # @OVS: dumps the render dict for flow_type_checker.py
    pickle.dump(render_dict, open(gen_dir+"/dict.pickle", "wb"))

    # @OVS: optimizer configurations
    # @Shahbaz: enumerate number of adjustment actions and based on that
    # set this to true or false.
    render_dict["OPT_INLINE_EDITING"] = False

    if args.openflow_mapping_dir and args.openflow_mapping_mod:
        sys.path.append(args.openflow_mapping_dir)
        render_dict['openflow_mapping_mod'] = args.openflow_mapping_mod

    smart.render_all_files(render_dict, gen_dir,
                           with_thrift = args.thrift,
                           with_plugin_list = args.plugin_list)
Ejemplo n.º 11
0
def main():
    args = get_parser().parse_args()
    if not len(args.sources):
        print "No input file specified"
        sys.exit(1)
    prog_name = os.path.split(args.sources[0])
    prog_name = prog_name[1].replace('.p4', '')
    logger_init(log_dir=args.gen_dir,
                prog_name=prog_name,
                loglevel=args.loglevel,
                floglevel=args.floglevel)

    # TBD - Insert toplevel try-except block
    h = HLIR(*args.sources)
    if args.fe_flags:
        args.fe_flags = args.fe_flags.split(" ")
        # If fe_flags is a space separated string, split it into substrings
        # and add each one as a separate preprocessor arg.
        for fe_flags_arg in args.fe_flags:
            h.add_preprocessor_args(fe_flags_arg)
    if not h.build():
        sys.exit(1)

    if args.asic == 'elba':
        setup_elba_hw_parameters(capri_model)

    if args.p4_plus:
        if args.p4_plus_module == 'sxdma':
            setup_sxdma_hw_parameters(capri_model)
        else:
            setup_p4_plus_hw_parameters(capri_model)

    if args.phv_flits:
        setup_num_phv_flits(capri_model, int(args.phv_flits))
    else:
        # init all parameters based on single value to avoid errors/inconsistancies
        setup_num_phv_flits(capri_model, capri_model['phv']['num_flits'])

    capri_be = capri_backend(h, capri_model, args)
    set_pdb_on_assert(args.pdb_on_assert)

    capri_be.initialize()

    # run passes from here
    # create initial field ordering based on parser extraction order and table usage
    capri_be.pa.init_field_ordering()
    re_init = False

    # assign ohi slots
    for d in xgress:
        # make sure each state uses max ohi slots allowed by the hw
        capri_be.parsers[d].update_ohi_per_state()
        max_ohi, max_ohi_path = capri_be.parsers[d].assign_ohi_slots()
        ohi_threshold = capri_be.hw_model['parser']['ohi_threshold']
        # This is just to test multiple passes thru' ohi allocation
        # converting more ohis to phv will increase phv requirement and may exceed phv limits
        max_retry = 4
        while max_ohi > ohi_threshold and max_retry:
            # convert N ohis along longest path to phvs and try again
            capri_be.parsers[d].ohi_to_phv(max_ohi_path,
                                           max_ohi - ohi_threshold)
            # check again
            max_ohi, max_ohi_path = capri_be.parsers[d].assign_ohi_slots()
            max_retry -= 1
            re_init = True
        assert max_ohi <= ohi_threshold, "Cannot bring down the ohi count"

    if re_init:
        # re-init the field order after ohi fields have changed
        capri_be.pa.init_field_ordering()

    capri_be.pa.create_flits()

    for d in xgress:
        capri_be.checksum.ProcessAllCsumObjects(d)  #Regular csum, gso
        capri_be.icrc.ProcessIcrcObjects(d)
        capri_be.parsers[d].assign_hv_bits()
        capri_be.parsers[d].assign_rw_phv_hv_bits()
        capri_be.parsers[d].program_capri_states()
        capri_be.deparsers[d].build_field_dictionary()
        capri_be.checksum.AllocateAllCsumResources(d)
        capri_be.icrc.AllocateIcrcObjects(d)
    capri_be.tables.update_table_config()
    capri_be.tables.create_key_makers()
    capri_be.tables.program_tables()

    # Generate various outputs
    for d in xgress:
        if not capri_be.args.p4_plus:
            # Additional validation
            capri_be.parsers[d].parser_check_flit_violation()

            capri_be.parsers[d].generate_output()
            #capri_be.parsers[d].print_long_paths(10)
            #capri_be.parsers[d].print_short_paths(10)
            #capri_be.parsers[d].print_path_histogram(5)
        capri_be.pa.gress_pa[d].update_phc_map()
        capri_be.pa.gress_pa[d].print_field_order_info("PHV order(Final)")
        if not capri_be.args.p4_plus:
            capri_be.deparsers[d].print_deparser_info()
            capri_be.deparsers[d].generate_output()

    capri_be.tables.generate_output()

    #Create logical output of configuration pushed to parser and deparser
    #for checksum verification, computation.
    capri_be.checksum.CsumLogicalOutputCreate()
    capri_be.checksum.ParserCsumUnitAllocationCodeGenerate()

    #Create logical output of configuration pushed to parser and deparser
    #for icrc verification, computation.
    capri_be.icrc.IcrcLogicalOutputCreate()

    if args.asm_out:
        capri_be.pa.capri_asm_output()
        capri_be.tables.capri_asm_output()

    k_plus_d_dict = None
    if args.pd_gen or args.asm_out:
        p4pd = capri_p4pd_generate_info(capri_be)
        p4pd_gen = capri_p4pd_generator(capri_be)
        p4pd_gen.pddict = p4pd.pddict

        gen_dir = args.gen_dir
        cur_path = gen_dir + '/%s' % capri_be.prog_name
        if not os.path.exists(cur_path):
            try:
                os.makedirs(cur_path)
            except OSError as e:
                if e.errno != errno.EEXIST:
                    raise
        fname = cur_path + '/pddict.api.json'
        pddict_json = open(fname, 'w+')
        json.dump(p4pd.pddict['tables'],
                  pddict_json,
                  indent=4,
                  sort_keys=True,
                  separators=(',', ': '))

        k_plus_d_dict = capri_p4pd_code_generate(p4pd_gen)

    # generate debug information for model
    # output into a JSON file for model debug logs
    capri_be.model_dbg_output(k_plus_d_dict)
Ejemplo n.º 12
0
def main():
    parser = get_parser()
    input_args = sys.argv[1:]
    args, unparsed_args = parser.parse_known_args()

    has_remaining_args = False
    preprocessor_args = []
    for a in unparsed_args:
        if a[:2] == "-D":
            input_args.remove(a)
            preprocessor_args.append(a)
        else:
            has_remaining_args = True

    # trigger error
    if has_remaining_args:
        parser.parse_args(input_args)

    gen_dir = os.path.abspath(args.gen_dir)
    if os.path.exists(gen_dir):
        if not os.path.isdir(gen_dir):
            sys.stderr.write(args.gen_dir + " exists but is not a directory\n")
            sys.exit(1)
    else:
        try:
            os.mkdir(gen_dir)
        except:
            sys.stderr.write("Could not create output directory %s\n" %
                             args.gen_dir)
            sys.exit(1)

    if args.p4_name:
        p4_name = args.p4_name
    else:
        p4_name = _get_p4_basename(args.source)

    if args.p4_prefix:
        p4_prefix = args.p4_prefix
    else:
        p4_prefix = p4_name

    h = HLIR(args.source)
    h.add_preprocessor_args("-D__TARGET_BM__")
    for parg in preprocessor_args:
        h.add_preprocessor_args(parg)
    # in addition to standard P4 primitives
    more_primitives = json.loads(resource_string(__name__, 'primitives.json'))
    h.add_primitives(more_primitives)

    if not h.build():
        print "Error while building HLIR"
        sys.exit(1)

    print "Generating files in directory", gen_dir

    render_dict = smart.render_dict_create(h,
                                           p4_name,
                                           p4_prefix,
                                           args.meta_config,
                                           args.public_inc_path,
                                           dump_yaml=args.dump_yaml)
    render_dict['hlir'] = h

    if args.openflow_mapping_dir and args.openflow_mapping_mod:
        sys.path.append(args.openflow_mapping_dir)
        render_dict['openflow_mapping_mod'] = args.openflow_mapping_mod

    smart.render_all_files(render_dict,
                           gen_dir,
                           with_thrift=args.thrift,
                           with_plugin_list=args.plugin_list,
                           with_plugin_path=args.plugin_path)