Exemplo n.º 1
0
def test_gen_of_pd(tmpdir):
    input_p4 = os.path.join("tests", "p4_programs", "l2_openflow.p4")
    assert os.path.exists(input_p4)
    p = str(tmpdir)
    h = HLIR(input_p4)
    more_primitives = json.loads(
        resource_string(__name__,
                        os.path.join('..', 'p4c_bm', 'primitives.json')))
    h.add_primitives(more_primitives)
    assert h.build()
    json_dict = gen_json.json_dict_create(h)
    assert json_dict

    # hack the args
    from argparse import Namespace
    args = Namespace(plugin_list=["of"],
                     openflow_mapping_dir=os.path.join("tests", "of_mapping"),
                     openflow_mapping_mod="l2_openflow")

    gen_pd.generate_pd_source(json_dict, p, "pref", args)
    # now we check for all generated files
    of_path = tmpdir.join("plugin", "of")
    inc_path = of_path.join("inc")
    src_path = of_path.join("src")
    assert inc_path.ensure_dir()
    assert src_path.ensure_dir()
    expected_inc_path = "p4c_bm/plugin/of/inc"
    expected_inc = [f for f in os.listdir(expected_inc_path)]
    expected_src_path = "p4c_bm/plugin/of/src/"
    expected_src = [f for f in os.listdir(expected_src_path)]
    assert set(expected_inc) == set([f.basename for f in inc_path.listdir()])
    assert set(expected_src) == set([f.basename for f in src_path.listdir()])
Exemplo n.º 2
0
def main():
    parser = get_parser()
    args = parser.parse_args()

    graphs_to_generate = args.graphs
    if not graphs_to_generate:
        graphs_to_generate = {"parser", "table", "deps"}
    else:
        graphs_to_generate = set(graphs_to_generate)

    if args.gen_dir:
        if not os.path.isdir(args.gen_dir):
            print args.gen_dir, "is not a valid directory"
            sys.exit(1)
    gen_dir = os.path.abspath(args.gen_dir)

    h = HLIR(args.source)
    if not h.build():
        print "Error while building HLIR"
        sys.exit(1)

    print "Generating files in directory", gen_dir

    basename = _get_p4_basename(args.source)

    if "parser" in graphs_to_generate:
        dot.export_parse_graph(h, basename, gen_dir)
    if "table" in graphs_to_generate:
        dot.export_table_graph(h, basename, gen_dir, predecessors=args.table_predecessors)
    if "deps" in graphs_to_generate:
        dot.export_table_dependency_graph(h, basename, gen_dir,
                                          show_conds = args.dep_stages_with_conds)
    
    pass
Exemplo n.º 3
0
def compileP4(inputFile, gen_file, isRouter, preprocessor_args):
    h = HLIR(inputFile)

    for parg in preprocessor_args:
        h.add_preprocessor_args(parg)
    if not h.build():
        return CompileResult("HLIR", "Error while building HLIR")

    try:
        basename = os.path.basename(inputFile)
        basename = os.path.splitext(basename)[0]

        config = target.BccConfig()
        e = EbpfProgram(basename, h, isRouter, config)
        serializer = ProgramSerializer()
        e.toC(serializer)
        f = open(gen_file, 'w')
        f.write(serializer.toString())
        return CompileResult("OK", "")
    except CompilationException as e:
        prefix = ""
        if e.isBug:
            prefix = "### Compiler bug: "
        return CompileResult("bug", prefix + e.show())
    except NotSupportedException as e:
        return CompileResult("not supported", e.show())
    except:
        return CompileResult("exception", traceback.format_exc())
Exemplo n.º 4
0
def test_gen_of_pd(tmpdir):
    input_p4 = os.path.join("tests", "p4_programs", "l2_openflow.p4")
    assert os.path.exists(input_p4)
    p = str(tmpdir)
    h = HLIR(input_p4)
    more_primitives = json.loads(
        resource_string(__name__,
                        os.path.join('..', 'p4c_bm', 'primitives.json'))
    )
    h.add_primitives(more_primitives)
    assert h.build()
    json_dict = gen_json.json_dict_create(h)
    assert json_dict

    # hack the args
    from argparse import Namespace
    args = Namespace(plugin_list=["of"],
                     openflow_mapping_dir=os.path.join("tests", "of_mapping"),
                     openflow_mapping_mod="l2_openflow")

    gen_pd.generate_pd_source(json_dict, p, "pref", args)
    # now we check for all generated files
    of_path = tmpdir.join("plugin", "of")
    inc_path = of_path.join("inc")
    src_path = of_path.join("src")
    assert inc_path.ensure_dir()
    assert src_path.ensure_dir()
    expected_inc_path = "p4c_bm/plugin/of/inc"
    expected_inc = [f for f in os.listdir(expected_inc_path)]
    expected_src_path = "p4c_bm/plugin/of/src/"
    expected_src = [f for f in os.listdir(expected_src_path)]
    assert set(expected_inc) == set([f.basename for f in inc_path.listdir()])
    assert set(expected_src) == set([f.basename for f in src_path.listdir()])
Exemplo n.º 5
0
def main():
    p4_path = "/root/workspace/ankit/vanilla_p4c/examples/l2_switch_test.p4"
    if os.path.isfile(p4_path) is False:
        print("FILE NOT FOUND: %s" % p4_path)
        sys.exit(1)

    h = HLIR(p4_path)
    h.build()
    print "\n\n"
    for table_names, table in h.p4_tables.items():
        print table_names, "\t", table.actions
    print "\n\n"

    for actions, action in h.p4_actions.items():
        print actions, "\t", action.flat_call_sequence
        print action.signature
    print "\n\n"

    for actions_s, action_s in h.p4_action_selectors.items():
        print actions, "\t", action
    print "\n\n"

    for headers, header in h.p4_headers.items():
        print headers, "\t", header.layout, "\t", header.attributes
    print "\n\n"

    for pss, ps in h.p4_parse_states.items():
        print pss, "\t", ps.call_sequence
Exemplo n.º 6
0
def test_gen_json(input_p4):
    assert os.path.exists(input_p4)
    h = HLIR(input_p4)
    more_primitives = json.loads(
        resource_string(__name__,
                        os.path.join('..', 'p4c_bm', 'primitives.json')))
    h.add_primitives(more_primitives)
    assert h.build()
    json_dict = gen_json.json_dict_create(h)
    assert json_dict
Exemplo n.º 7
0
def test_gen_json(input_p4):
    assert os.path.exists(input_p4)
    h = HLIR(input_p4)
    more_primitives = json.loads(
        resource_string(__name__,
                        os.path.join('..', 'p4c_bm', 'primitives.json'))
    )
    h.add_primitives(more_primitives)
    assert h.build()
    json_dict = gen_json.json_dict_create(h)
    assert json_dict
Exemplo n.º 8
0
def main():
    parser = get_parser()
    input_args = sys.argv[1:]
    args, unparsed_args = parser.parse_known_args()

    # parse preprocessor flags
    has_remaining_args = False
    preprocessor_args = []
    for a in unparsed_args:
        if a[:2] == "-D":
            input_args.remove(a)
            preprocessor_args.append(a)
        else:
            has_remaining_args = True

    # trigger error
    if has_remaining_args:
        parser.parse_args(input_args)

    if args.json:
        path_json = _validate_path(args.json)

    from_json = False
    if args.pd:
        path_pd = _validate_dir(args.pd)
        if args.pd_from_json:
            if not os.path.exists(args.source):
                print "Invalid JSON source"
                sys.exit(1)
            from_json = True

    if from_json:
        with open(args.source, 'r') as f:
            json_dict = json.load(f)
    else:
        h = HLIR(args.source)
        h.add_preprocessor_args("-D__TARGET_BMV2__")
        for parg in preprocessor_args:
            h.add_preprocessor_args(parg)
        # in addition to standard P4 primitives
        more_primitives = json.loads(
            resource_string(__name__, 'primitives.json'))
        h.add_primitives(more_primitives)
        if not h.build(analyze=False):
            print "Error while building HLIR"
            sys.exit(1)

        json_dict = gen_json.json_dict_create(h)

        if args.json:
            print "Generating json output to", path_json
            with open(path_json, 'w') as fp:
                json.dump(json_dict, fp, indent=4, separators=(',', ': '))

    if args.pd:
        print "Generating PD source files in", path_pd
        gen_pd.generate_pd_source(json_dict, path_pd, args.p4_prefix)
Exemplo n.º 9
0
def test_gen_json(input_p4):
    assert os.path.exists(input_p4)
    h = HLIR(input_p4)
    more_primitives = json.loads(
        resource_string(__name__,
                        os.path.join('..', 'p4c_bm', 'primitives.json')))
    h.add_primitives(more_primitives)
    assert h.build()
    if "negative" in input_p4:  # negative test => compiler must exit
        with pytest.raises(SystemExit):
            gen_json.json_dict_create(h)
    else:
        # using keep_pragmas == True to maximize coverage
        json_dict = gen_json.json_dict_create(h, keep_pragmas=True)
        assert json_dict
Exemplo n.º 10
0
def main():
    parser = get_parser()
    input_args = sys.argv[1:]
    args, unparsed_args = parser.parse_known_args()

    # parse preprocessor flags
    has_remaining_args = False
    preprocessor_args = []
    for a in unparsed_args:
        if a[:2] == "-D":
            input_args.remove(a)
            preprocessor_args.append(a)
        else:
            has_remaining_args = True

    # trigger error
    if has_remaining_args:
        parser.parse_args(input_args)

    if args.json:
        path_json = _validate_path(args.json)

    from_json = False
    if args.pd:
        path_pd = _validate_dir(args.pd)
        if args.pd_from_json:
            if not os.path.exists(args.source):
                print "Invalid JSON source"
                sys.exit(1)
            from_json = True

    if from_json:
        with open(args.source, 'r') as f:
            json_dict = json.load(f)
    else:
        h = HLIR(args.source)
        h.add_preprocessor_args("-D__TARGET_BMV2__")
        for parg in preprocessor_args:
            h.add_preprocessor_args(parg)
        # in addition to standard P4 primitives
        more_primitives = json.loads(
            resource_string(__name__, 'primitives.json')
        )
        h.add_primitives(more_primitives)
        if not h.build(analyze=False):
            print "Error while building HLIR"
            sys.exit(1)

        json_dict = gen_json.json_dict_create(h)

        if args.json:
            print "Generating json output to", path_json
            with open(path_json, 'w') as fp:
                json.dump(json_dict, fp, indent=4, separators=(',', ': '))

    if args.pd:
        print "Generating PD source files in", path_pd
        gen_pd.generate_pd_source(json_dict, path_pd, args.p4_prefix)
Exemplo n.º 11
0
def main():
    if len(sys.argv) <= 1:
        print("Usage: %s p4_file [compiler_files_dir] [generated_dir]" % (os.path.basename(__file__)))
        sys.exit(1)

    filepath, compiler_files_path, desugared_path, generated_path = setup_paths()

    if os.path.isfile(filepath) is False:
        print("FILE NOT FOUND: %s" % filepath)
        sys.exit(1)

    _, ext = os.path.splitext(filepath)
    if ext == '.p4':
        hlir = HLIR(filepath)
        success = build_hlir(hlir)
    elif ext == '.json':
        hlir = json2hlir(filepath)
        success = True
    else:
        print("EXTENSION NOT SUPPORTED: %s" % ext)
        sys.exit(1)

    if not success:
        print("P4 compilation failed for file %s" % (os.path.basename(__file__)))
        sys.exit(1)

    generate_all_in_dir(compiler_files_path, desugared_path, generated_path, hlir)
   
    showErrors()
    showWarnings()

    global errors
    if len(errors) > 0:
	sys.exit(1)
Exemplo n.º 12
0
def test_gen_json(input_p4):
    assert os.path.exists(input_p4)
    h = HLIR(input_p4)
    more_primitives = json.loads(
        resource_string(__name__,
                        os.path.join('..', 'p4c_bm', 'primitives.json'))
    )
    h.add_primitives(more_primitives)
    assert h.build()
    if "negative" in input_p4:  # negative test => compiler must exit
        with pytest.raises(SystemExit):
            gen_json.json_dict_create(h)
    else:
        # using keep_pragmas == True to maximize coverage
        json_dict = gen_json.json_dict_create(h, keep_pragmas=True)
        assert json_dict
Exemplo n.º 13
0
def test_gen_json_field_aliases(input_aliases):
    assert os.path.exists(input_aliases)

    input_p4 = os.path.join("tests", "p4_programs", "triv_eth.p4")
    assert os.path.exists(input_p4)
    h = HLIR(input_p4)
    assert h.build()

    if "error" in input_aliases:
        # make sure that the program exits
        with pytest.raises(SystemExit):
            gen_json.json_dict_create(h, input_aliases)
    else:
        assert "sample" in input_aliases
        json_dict = gen_json.json_dict_create(h, input_aliases)
        assert json_dict
        assert "field_aliases" in json_dict
Exemplo n.º 14
0
def test_gen_json_field_aliases(input_aliases):
    assert os.path.exists(input_aliases)

    input_p4 = os.path.join("tests", "p4_programs", "triv_eth.p4")
    assert os.path.exists(input_p4)
    h = HLIR(input_p4)
    assert h.build()

    if "error" in input_aliases:
        # make sure that the program exits
        with pytest.raises(SystemExit):
            gen_json.json_dict_create(h, input_aliases)
    else:
        assert "sample" in input_aliases
        json_dict = gen_json.json_dict_create(h, input_aliases)
        assert json_dict
        assert "field_aliases" in json_dict
Exemplo n.º 15
0
def test_gen_pd(input_p4, tmpdir):
    assert os.path.exists(input_p4)
    p = str(tmpdir)
    h = HLIR(input_p4)
    assert h.build()
    json_dict = gen_json.json_dict_create(h)
    assert json_dict
    gen_pd.generate_pd_source(json_dict, p, "pref")
    # now we check for all generated files
    inc_path = tmpdir.join("pd")
    src_path = tmpdir.join("src")
    assert inc_path.ensure_dir()
    assert src_path.ensure_dir()
    expected_inc_path = "p4c_bm/templates/pd/"
    expected_inc = [f for f in os.listdir(expected_inc_path)]
    expected_src_path = "p4c_bm/templates/src/"
    expected_src = [f for f in os.listdir(expected_src_path)]
    assert set(expected_inc) == set([f.basename for f in inc_path.listdir()])
    assert set(expected_src) == set([f.basename for f in src_path.listdir()])
Exemplo n.º 16
0
def main():
    global json_dict, p4
    parser = get_parser()
    args = parser.parse_args()

    if args.json:
        path_json = _validate_path(args.json)

    h = HLIR(args.source)
    h.add_preprocessor_args("-D__TARGET_BMV2__")
    if not h.build():
        print "Error while building HLIR"
        sys.exit(1)

    json_dict = gen_json.json_dict_create(h)
    p4 = DotDict(json_dict)

    if args.json:
        print "Generating json output to", path_json
        with open(path_json, 'w') as fp:
            json.dump(json_dict, fp, indent=4, separators=(',', ': '))

    if args.dump_yaml:
        with open("yaml_dump.yml", 'w') as f:
            dump_render_dict(json_dict, f)

    gen_dir = os.path.abspath(args.gen_dir)
    if os.path.exists(gen_dir):
        if not os.path.isdir(gen_dir):
            sys.stderr.write(args.gen_dir + " exists but is not a directory\n")
            sys.exit(1)
    else:
        try:
            os.mkdir(gen_dir)
        except:
            sys.stderr.write("Could not create output directory %s\n" %
                             args.gen_dir)
            sys.exit(1) 

    render_all_files(json_dict, gen_dir)
Exemplo n.º 17
0
def main():
    parser = get_parser()
    input_args = sys.argv[1:]
    args, unparsed_args = parser.parse_known_args()

    has_remaining_args = False
    preprocessor_args = []
    for a in unparsed_args:
        if a[:2] == "-D":
            input_args.remove(a)
            preprocessor_args.append(a)
        else:
            has_remaining_args = True

    # trigger error
    if has_remaining_args:
        parser.parse_args(input_args)

    gen_dir = os.path.abspath(args.gen_dir)
    if os.path.exists(gen_dir):
        if not os.path.isdir(gen_dir):
            sys.stderr.write(args.gen_dir + " exists but is not a directory\n")
            sys.exit(1)
    else:
        try:
            os.mkdir(gen_dir)
        except:
            sys.stderr.write("Could not create output directory %s\n" %
                             args.gen_dir)
            sys.exit(1) 

    if args.p4_name:
        p4_name = args.p4_name
    else:
        p4_name = _get_p4_basename(args.source)

    if args.p4_prefix:
        p4_prefix = args.p4_prefix
    else:
        p4_prefix = p4_name

    h = HLIR(args.source)
    h.add_preprocessor_args("-D__TARGET_BM__")
    for parg in preprocessor_args:
        h.add_preprocessor_args(parg)
    if not h.build():
        print "Error while building HLIR"
        sys.exit(1)

    print "Generating files in directory", gen_dir

    render_dict = smart.render_dict_create(h, 
                                           p4_name, p4_prefix,
                                           args.meta_config,
                                           args.public_inc_path,
                                           dump_yaml = args.dump_yaml)
    smart.render_all_files(render_dict, gen_dir,
                           with_thrift = args.thrift,
                           with_plugin_list = args.plugin_list)
Exemplo n.º 18
0
def main():
    parser = get_parser()
    args = parser.parse_args()

    if args.json:
        path_json = _validate_path(args.json)

    from_json = False
    if args.pd:
        path_pd = _validate_dir(args.pd)
        if args.pd_from_json:
            if not os.path.exists(args.source):
                print "Invalid JSON source"
                sys.exit(1)
            from_json = True

    if from_json:
        with open(args.source, 'r') as f:
            json_dict = json.load(f)
    else:
        h = HLIR(args.source)
        h.add_preprocessor_args("-D__TARGET_BMV2__")
        if not h.build():
            print "Error while building HLIR"
            sys.exit(1)

        json_dict = gen_json.json_dict_create(h)

        if args.json:
            print "Generating json output to", path_json
            with open(path_json, 'w') as fp:
                json.dump(json_dict, fp, indent=4, separators=(',', ': '))
        if args.dump_yaml:
            with open("yaml_dump.yml", 'w') as f:
                dump_render_dict(json_dict, f)

    if args.pd:
        print "Generating PD source files in", path_pd
        gen_pd.generate_pd_source(json_dict, path_pd, args.p4_prefix)
Exemplo n.º 19
0
def test_gen_pd(input_p4, tmpdir):
    assert os.path.exists(input_p4)
    p = str(tmpdir)
    h = HLIR(input_p4)
    more_primitives = json.loads(
        resource_string(__name__,
                        os.path.join('..', 'p4c_bm', 'primitives.json')))
    h.add_primitives(more_primitives)
    assert h.build()
    json_dict = gen_json.json_dict_create(h)
    assert json_dict
    gen_pd.generate_pd_source(json_dict, p, "pref")
    # now we check for all generated files
    inc_path = tmpdir.join("pd")
    src_path = tmpdir.join("src")
    assert inc_path.ensure_dir()
    assert src_path.ensure_dir()
    expected_inc_path = "p4c_bm/templates/pd/"
    expected_inc = [f for f in os.listdir(expected_inc_path)]
    expected_src_path = "p4c_bm/templates/src/"
    expected_src = [f for f in os.listdir(expected_src_path)]
    assert set(expected_inc) == set([f.basename for f in inc_path.listdir()])
    assert set(expected_src) == set([f.basename for f in src_path.listdir()])
Exemplo n.º 20
0
Arquivo: p4toEbpf.py Projeto: AoJ/bcc
def compileP4(inputFile, gen_file, isRouter, preprocessor_args):
    h = HLIR(inputFile)

    for parg in preprocessor_args:
        h.add_preprocessor_args(parg)
    if not h.build():
        return CompileResult("HLIR", "Error while building HLIR")

    try:
        basename = os.path.basename(inputFile)
        basename = os.path.splitext(basename)[0]

        config = target.BccConfig()
        e = EbpfProgram(basename, h, isRouter, config)
        serializer = ProgramSerializer()
        e.toC(serializer)
        f = open(gen_file, 'w')
        f.write(serializer.toString())
        return CompileResult("OK", "")
    except CompilationException, e:
        prefix = ""
        if e.isBug:
            prefix = "### Compiler bug: "
        return CompileResult("bug", prefix + e.show())
Exemplo n.º 21
0
def main():
    if len(sys.argv) <= 1:
        print("Usage: %s p4_file [compiler_files_dir] [generated_dir]" %
              (os.path.basename(__file__)))
        sys.exit(1)

    p4_path, compiler_files_path, desugared_path, generated_path = setup_paths(
    )

    if os.path.isfile(p4_path) is False:
        print("FILE NOT FOUND: %s" % p4_path)
        sys.exit(1)

    hlir = HLIR(p4_path)
    build_hlir(hlir)

    generate_all_in_dir(compiler_files_path, desugared_path, generated_path,
                        hlir)

    showErrors()
    showWarnings()
Exemplo n.º 22
0
def main():
    parser = get_parser()
    args = parser.parse_args()

    if args.json:
        path_json = _validate_path(args.json)

    from_json = False
    if args.pd:
        path_pd = _validate_dir(args.pd)
        if args.pd_from_json:
            if not os.path.exists(args.source):
                print "Invalid JSON source"
                sys.exit(1)
            from_json = True

    if from_json:
        with open(args.source, 'r') as f:
            json_dict = json.load(f)
    else:
        h = HLIR(args.source)
        h.add_preprocessor_args("-D__TARGET_BMV2__")
        # in addition to standard P4 primitives
        more_primitives = json.loads(
            resource_string(__name__, 'primitives.json'))
        h.add_primitives(more_primitives)
        if not h.build():
            print "Error while building HLIR"
            sys.exit(1)

        json_dict = gen_json.json_dict_create(h)

        if args.json:
            print "Generating json output to", path_json
            with open(path_json, 'w') as fp:
                json.dump(json_dict, fp, indent=4, separators=(',', ': '))

    if args.pd:
        print "Generating PD source files in", path_pd
        gen_pd.generate_pd_source(json_dict, path_pd, args.p4_prefix)
Exemplo n.º 23
0
    def principal(self, p4_code):

        #print "Transpiler started"

        # if len(sys.argv) <= 1:
        #     print("Usage: %s p4_file [compiler_files_dir] [generated_dir]" % (os.path.basename(__file__)))
        #     sys.exit(1)

        filepath, compiler_files_path, desugared_path, generated_path = setup_paths(
            p4_code)

        # if p4_code is False:
        #     print("FILE NOT FOUND: %s" % filepath)
        #     sys.exit(1)

        _, ext = os.path.splitext(filepath)
        if ext == '.p4':
            hlir = HLIR(filepath)
            success = build_hlir(hlir)
        # elif ext == '.json':
        #     hlir = json2hlir(filepath)
        #     success = True
        else:
            print("EXTENSION NOT SUPPORTED: %s" % ext)
            sys.exit(1)

        if not success:
            print("Transpiler failed for use-case %s" %
                  (os.path.basename(__file__)))
            sys.exit(1)

        generate_all_in_dir(compiler_files_path, desugared_path,
                            generated_path, hlir)

        showErrors()
        showWarnings()
Exemplo n.º 24
0
def main():
    parser = get_parser()
    args = parser.parse_args()

    if args.json:
        path_json = _validate_path(args.json)

    from_json = False
    if args.pd:
        path_pd = _validate_dir(args.pd)
        if args.pd_from_json:
            if not os.path.exists(args.source):
                print "Invalid JSON source"
                sys.exit(1)
            from_json = True

    if from_json:
        with open(args.source, 'r') as f:
            json_dict = json.load(f)
    else:
        h = HLIR(args.source)
        h.add_preprocessor_args("-D__TARGET_BMV2__")
        # in addition to standard P4 primitives
        more_primitives = json.loads(
            resource_string(__name__, 'primitives.json')
        )
        h.add_primitives(more_primitives)
        if not h.build():
            print "Error while building HLIR"
            sys.exit(1)

        json_dict = gen_json.json_dict_create(h)

        if args.json:
            print "Generating json output to", path_json
            with open(path_json, 'w') as fp:
                json.dump(json_dict, fp, indent=4, separators=(',', ': '))

    if args.pd:
        print "Generating PD source files in", path_pd
        gen_pd.generate_pd_source(json_dict, path_pd, args.p4_prefix)
Exemplo n.º 25
0
def main():
    parser = get_parser()
    input_args = sys.argv[1:]
    args, unparsed_args = parser.parse_known_args()

    has_remaining_args = False
    preprocessor_args = []
    for a in unparsed_args:
        if a[:2] == "-D":
            input_args.remove(a)
            preprocessor_args.append(a)
        else:
            has_remaining_args = True

    # trigger error
    if has_remaining_args:
        parser.parse_args(input_args)

    gen_dir = os.path.abspath(args.gen_dir)
    if os.path.exists(gen_dir):
        if not os.path.isdir(gen_dir):
            sys.stderr.write(args.gen_dir + " exists but is not a directory\n")
            sys.exit(1)
    else:
        try:
            os.mkdir(gen_dir)
        except:
            sys.stderr.write("Could not create output directory %s\n" %
                             args.gen_dir)
            sys.exit(1)

    if args.p4_name:
        p4_name = args.p4_name
    else:
        p4_name = _get_p4_basename(args.source)

    if args.p4_prefix:
        p4_prefix = args.p4_prefix
    else:
        p4_prefix = p4_name

    h = HLIR(args.source)
    h.add_preprocessor_args("-D__TARGET_BM__")
    for parg in preprocessor_args:
        h.add_preprocessor_args(parg)
    # in addition to standard P4 primitives
    more_primitives = json.loads(resource_string(__name__, 'primitives.json'))
    h.add_primitives(more_primitives)

    if not h.build():
        print "Error while building HLIR"
        sys.exit(1)

    print "Generating files in directory", gen_dir

    render_dict = smart.render_dict_create(h,
                                           p4_name,
                                           p4_prefix,
                                           args.meta_config,
                                           args.public_inc_path,
                                           dump_yaml=args.dump_yaml)
    render_dict['hlir'] = h

    if args.openflow_mapping_dir and args.openflow_mapping_mod:
        sys.path.append(args.openflow_mapping_dir)
        render_dict['openflow_mapping_mod'] = args.openflow_mapping_mod

    smart.render_all_files(render_dict,
                           gen_dir,
                           with_thrift=args.thrift,
                           with_plugin_list=args.plugin_list,
                           with_plugin_path=args.plugin_path)
Exemplo n.º 26
0
def parseParser(metadata_list):

    #h = HLIR("./stateful.p4")
    h = HLIR("./l2_switch.p4")
    #h = HLIR("../../tutorials-master/SIGCOMM_2015/flowlet_switching/p4src/simple_router.p4")
    #h = HLIR("../../tutorials-master/SIGCOMM_2015/source_routing/p4src/source_routing.p4")

    h.build()

    #for action_name, action in h.p4_actions.items():
    #	print action.name+"============="
    #	print action.call_sequence
    #print action.flat_call_sequence

    #print action.signature
    #print action.signature_widths

    #print action.signature_flags
    '''
		for sigF_name, sigF in action.signature_flags.items():
			#print sigF_name,sigF
					
			for sigF_item_name, sigF_item in sigF.items():
				#print sigF_item_name, sigF_item
				if str(sigF_item_name) == "data_width":
					#print action.signature_flags
					#print sigF
					print type(sigF_item), sigF_item
		'''
    '''
		print a_name.name, type(a_name.name)
		for c in a_name.match_fields:
			print c[0].name, c[1], c[2]
		print "=====actions====="
		for d in a_name.actions:
			print d.name	
		print "=====size====="
		print a_name.min_size, a_name.max_size
		print "=====next====="
		print a_name.next_, type(a_name.next_)
		if type(a_name.next_) == dict:
			print "abc"
			for hit_ness, e in a_name.next_.items():
				if hit_ness == "miss":
					f_miss = e
				else: 
					f_hit = e
			print f_miss.next_
			print f_hit.next_
		print "=====timeOut===="
		if a_name.support_timeout == False:
			print a_name.support_timeout
		'''

    # p4_egress_ptr is only a table node
    #print h.p4_egress_ptr, type(h.p4_egress_ptr), h.p4_egress_ptr.next_
    '''
		for c in b_item:
			print c.name
	'''
    #print h.p4_egress_ptr

    #p4_tables
    """
	for table_name, table in h.p4_tables.items():
		print table_name, table.match_fields
	"""

    #p4_headers
    '''
	for header_name, header in h.p4_headers.items():
		print header.name, type(header.length)
		#print header.layout
		print header.attributes	
		#for field, width in header.layout.items():
		#	print type(field), width
	'''

    #p4_header_instances
    '''
	for header_name, header in h.p4_header_instances.items():
		print header.name + "===================================="	
		print header.virtual	
		#for field, width in header.header_type.layout.items():
		#	print type(field)
	'''
    '''
	#p4_fields
	for field_name, field in h.p4_fields.items():
		print field.name, field.calculation	
		for item in field.calculation:
			print item[0]
			print item[1].name
			print item[2].left, item[2].right, item[2].op
	'''

    #p4_field_lists
    '''
	for field_list_name, field_list in h.p4_field_lists.items():
		print field_list.name
		for field in field_list.fields:
			print field.name, field.offset, field.width, field.calculation
			for item in field.calculation:		
				for i in range(3):			
					print type(item[i])		
				#print item[1].output_width
	'''

    #p4_field_list_calculations
    '''
	for field_list_name, field_list in h.p4_field_list_calculations.items():
		print field_list.name, field_list.input, field_list.output_width, field_list.algorithm
		for a in field_list.input:
			for b in a.fields:		
				print b
	'''

    #p4_parser_state
    #print type(h.p4_parse_states)

    print '==================parser_state'
    for parser_name, parser in h.p4_parse_states.items():
        print parser.name
        #call_sequence
        print 'parser.call_sequence', parser.call_sequence

        for se in parser.call_sequence:
            print se
            if len(se) == 3:
                print str(se[0]) == "set"
                print se[1].name, se[1].instance, se[1].offset

        #branch_on

        #print parser.branch_on, type(parser.branch_on)
        for field in parser.branch_on:
            print field.name

        #branch_to
        for key, dest in parser.branch_to.items():
            print key, dest

        #prev

        #print parser.prev
        for state in parser.prev:
            print state.name

    #p4_action
    '''
	for action_name, action in h.p4_actions.items():
		print action.name+"============="
			
		for sig_name in action.signature:
			print sig_name

		print action.signature_widths
		
		#print action.signature_flags
		
		for sigF_name, sigF in action.signature_flags.items():
			#print sigF_name,sigF
					
			for sigF_item_name, sigF_item in sigF.items():
				#print sigF_item_name, sigF_item
				if str(sigF_item_name) == "data_width":
					#print action.signature_flags
					#print sigF
					print type(sigF_item), sigF_item
			

		
		#call_sequence
		print action.call_sequence
		for call_function in action.call_sequence:
			for i in range(len(call_function)):
				if i ==0:
					print call_function[0].name, call_function[0].signature
				else:
					print call_function[i]
					for item in call_function[1]:
						print item,type(item)
		#print "***************"	
		#print action.flat_call_sequence
	'''

    #p4_node
    '''
	for table_name, table in h.p4_nodes.items():
		print table.name+"============="
		#print table.next_
		#match_fields	
		print table.control_flow_parent
		print table.base_default_next

		for match_field in table.match_fields:
			for field in match_field:
				print field	
		#print table.attached_counters
		print "1"+table.control_flow_parent, table.conditional_barrier
		print table.base_default_next
		print table.dependencies_to
	'''
    '''
	#p4_action_node
	for action_node_name, action_node in h.p4_action_nodes.items():
		print action_node.name
	'''
    #p4_conditional_node

    for action_node_name, action_node in h.p4_conditional_nodes.items():
        print action_node_name, action_node.name
    '''
	for action_node_name, action_node in h.p4_action_profiles.items():
		print action_node.name
	'''

    #p4_counter
    """
	for counter_name, counter in h.p4_counters.items():
		print counter.name, counter.type, counter.min_width, counter.saturating
		print counter.binding, counter.instance_count
	"""

    #p4_register
    """
	for register_name, register in h.p4_registers.items():
		print register.name+"=================="
		print register.layout, register.width, register.instance_count
		print register.binding
	"""

    #p4_parser_exception
    """
	for parser_ex_name, parser_ex in h.p4_parser_exceptions.items():
		print parser_ex
	"""
    """
Exemplo n.º 27
0
def parseControlFLow():

    #h = HLIR("./stateful.p4")
    h = HLIR("./l2_switch.p4")
    #h = HLIR("../../tutorials-master/SIGCOMM_2015/flowlet_switching/p4src/simple_router.p4")
    #h = HLIR("../../tutorials-master/SIGCOMM_2015/source_routing/p4src/source_routing.p4")

    h.build()

    print "\n====start"

    # start form p4_ingress_ptr
    for table_node, parser_node_set in h.p4_ingress_ptr.items():
        #sys.exit(0)

        table_p = table_node  # table_p is the current table node
        hit_p = None
        miss_p = None
        table_list = []  # table sequence in dataPlane
        hit_list = []  # hit_list has not been analysised
        i = 0  # test_loop_tag
        while table_p != None:
            print "======%d loop======" % i
            #print i, table_p
            i = i + 1
            appendTableList(table_list, table_p)
            '''	# table node info.
			print table_p.name + ".next_ info: ", table_p.next_
			print "==control_flow_parent", table_p.control_flow_parent
			print "==conditional_barrier", table_p.conditional_barrier
			print '==dependencies_to', table_p.dependencies_to
			print '==dependencies_for', table_p.dependencies_for
			print '==base_default_next', table_p.base_default_next
			'''

            miss_p = None
            if type(table_p.next_) == dict:  # {"hit": **,"miss": **}
                for hit_ness, hit_table_node in table_p.next_.items():
                    if hit_ness == 'hit':
                        if hit_table_node != None:
                            hit_list.append(hit_table_node)
                    else:
                        miss_p = hit_table_node
                if miss_p != None:
                    table_p = miss_p
                else:
                    table_p = None
            else:  # {actions: **, actions: **}
                #print table_p.next_
                for action_node, action_table_node in table_p.next_.items():
                    table_p = action_table_node
                    #print "abc", action_node, action_table_node
                    break
            if (len(hit_list) > 0) and (table_p == None):
                table_p = hit_list[0]
                del hit_list[0]
            else:
                table_p = table_p
            #print "hit_lis:", hit_list
        print table_list
        print "end===="

    p4_field_type = '<class \'p4_hlir.hlir.p4_headers.p4_field\'>'
    p4_signature_ref_type = '<class \'p4_hlir.hlir.p4_imperatives.p4_signature_ref\'>'

    table_matchWidth_list = [
    ]  # list, used to describe the width of each table
    table_actionWidth_list = [
    ]  # list, used to describe the width of total actions in each table, including parameter and action_bit
    table_matchType_list = []  # list, used to describe the type of each table
    table_action_matching_list = {
    }  #dict, used to describe the matching relationship of table to actions
    table_dep_list = [
    ]  # list, used to describe the table dependent to the front table represented by tableID
    metadata_list = [
    ]  # list, used to describe the field/key should be included in the metadata
    table_match_meta_list = [
    ]  # list, used to describe the field/key used by each table_match
    table_action_meta_list = {
    }  # dict, used to describe the field/key used by each table_action

    # add switching_metadata to metadata_list
    for header_instances_name, header_instances in h.p4_header_instances.items(
    ):
        print header_instances_name
        if header_instances.header_type.name == 'switching_metadata_t':
            for field_p in header_instances.fields:
                #print '\t', field_p.name, field_p.width
                metadata_list.append(field_p)

    # get table_list...
    for table_p in table_list:
        #print 'match_fields:', table_p.match_fields
        #print table_p, table_p.conditional_barrier #table_p.dependencies_to, table_p.dependencies_for
        match_width = 0
        action_width = 0
        match_type = ''
        premitive_action_list = []
        table_dep_id = 0
        table_dep_hitness = ''
        eachTable_match_meta_list = []
        eachTable_action_meta_list = []

        # add table dependence; just supporting "hit" & "miss" in this version
        if table_p.conditional_barrier != None:
            table_dep_hitness = table_p.conditional_barrier[1]
            table_dep_id = findTableID(table_p.conditional_barrier[0].name,
                                       table_list)
            #print "============table_dep_id:", table_dep_id
        else:
            table_dep_id = 0
            table_dep_hitness = ''
        table_dep_list.append((table_dep_hitness, table_dep_id))

        # add match_width & match_type
        for match_field_p in table_p.match_fields:
            match_width += match_field_p[0].width
            match_type = str(match_field_p[1])
            appendMetadataList(metadata_list, match_field_p[0])
            #print type(match_field_p[0]), match_field_p[0].name
        table_matchWidth_list.append(match_width)
        table_matchType_list.append(match_type)

        # calculate table_match_meta_list
        for match_field_p in table_p.match_fields:
            match_field_startBit = locateField(metadata_list, match_field_p[0])
            match_field_endBit = match_field_startBit + match_field_p[0].width
            eachTable_match_meta_list.append(
                (match_field_startBit, match_field_endBit))

        # add action_width &action_table_matching list
        for action_p in table_p.actions:
            subAction_list = []
            #print "1", action_p.name, action_p.signature, action_p.signature_widths
            #action_width += action_p.signature_widths
            for signature_width_p in action_p.signature_widths:
                action_width += signature_width_p
            #print "call_sequence:", action_p.call_sequence
            #print "flat_call_sequence:", action_p.flat_call_sequence
            eachSubAction_meta_list = []
            for subAction in action_p.call_sequence:
                #print subAction[0].name, subAction[1]
                subAction_list.append(subAction)
                #appendMetadataList(metadata_list, action_field_p)
                para_meta_list = []
                for action_field_p in subAction[1]:
                    if str(type(action_field_p)) == p4_field_type:
                        appendMetadataList(metadata_list, action_field_p)
                        action_field_startBit = locateField(
                            metadata_list, action_field_p)
                        action_field_endBit = action_field_startBit + action_field_p.width
                    else:
                        action_field_startBit = 0
                        action_field_endBit = 0
                    para_meta_list.append(
                        (action_field_startBit, action_field_endBit))
                eachSubAction_meta_list.append((subAction[0], para_meta_list))
                '''
				if subAction[1] == []:
					print "2"
				for parameter in subAction[1]:
					if str(type(parameter)) == p4_field_type:
						print parameter.width
						print "3"
					elif str(type(parameter)) == p4_signature_ref_type:
						print '4', parameter.idx
				'''
            # each action  refrence to 1bit in actionBit
            action_width += 1
            premitive_action_list.append(subAction_list)
            eachTable_action_meta_list.append(eachSubAction_meta_list)

        table_actionWidth_list.append(action_width)
        table_action_matching_list[str(table_p.name)] = premitive_action_list
        table_match_meta_list.append(eachTable_match_meta_list)
        table_action_meta_list[str(table_p.name)] = eachTable_action_meta_list

    print 'table_matchWidth_list:\t', table_matchWidth_list
    print 'table_actionWidth_list:\t', table_actionWidth_list
    print 'table_matchType_list:\t', table_matchType_list
    print 'table_action_matching_dict:\t', table_action_matching_list
    print 'table_dep_list:\t', table_dep_list
    print 'metadata_list:'
    for field_p in metadata_list:
        print '\t', field_p.name, field_p.instance, field_p.width
    print 'table_match_meta_list:\t', table_match_meta_list
    print 'table_action_matching_dict:\t', table_action_meta_list

    metadata_list_pkt = []
    for field_p in metadata_list:
        if field_p.instance.header_type.name != 'switching_metadata_t':
            metadata_list_pkt.append(field_p)
    return metadata_list_pkt

    #for action_name, action in h.p4_actions.items():
    #	print action.name+"============="
    #	print action.call_sequence
    #print action.flat_call_sequence

    #print action.signature
    #print action.signature_widths

    #print action.signature_flags
    '''
		for sigF_name, sigF in action.signature_flags.items():
			#print sigF_name,sigF
					
			for sigF_item_name, sigF_item in sigF.items():
				#print sigF_item_name, sigF_item
				if str(sigF_item_name) == "data_width":
					#print action.signature_flags
					#print sigF
					print type(sigF_item), sigF_item
		'''
    '''
		print a_name.name, type(a_name.name)
		for c in a_name.match_fields:
			print c[0].name, c[1], c[2]
		print "=====actions====="
		for d in a_name.actions:
			print d.name	
		print "=====size====="
		print a_name.min_size, a_name.max_size
		print "=====next====="
		print a_name.next_, type(a_name.next_)
		if type(a_name.next_) == dict:
			print "abc"
			for hit_ness, e in a_name.next_.items():
				if hit_ness == "miss":
					f_miss = e
				else: 
					f_hit = e
			print f_miss.next_
			print f_hit.next_
		print "=====timeOut===="
		if a_name.support_timeout == False:
			print a_name.support_timeout
		'''

    # p4_egress_ptr is only a table node
    #print h.p4_egress_ptr, type(h.p4_egress_ptr), h.p4_egress_ptr.next_
    '''
		for c in b_item:
			print c.name
	'''
    #print h.p4_egress_ptr

    #p4_tables
    """
	for table_name, table in h.p4_tables.items():
		print table_name, table.match_fields
	"""

    #p4_headers
    '''
	for header_name, header in h.p4_headers.items():
		print header.name, type(header.length)
		#print header.layout
		print header.attributes	
		#for field, width in header.layout.items():
		#	print type(field), width
	'''

    #p4_header_instances
    '''
	for header_name, header in h.p4_header_instances.items():
		print header.name + "===================================="	
		print header.virtual	
		#for field, width in header.header_type.layout.items():
		#	print type(field)
	'''
    '''
	#p4_fields
	for field_name, field in h.p4_fields.items():
		print field.name, field.calculation	
		for item in field.calculation:
			print item[0]
			print item[1].name
			print item[2].left, item[2].right, item[2].op
	'''

    #p4_field_lists
    '''
	for field_list_name, field_list in h.p4_field_lists.items():
		print field_list.name
		for field in field_list.fields:
			print field.name, field.offset, field.width, field.calculation
			for item in field.calculation:		
				for i in range(3):			
					print type(item[i])		
				#print item[1].output_width
	'''

    #p4_field_list_calculations
    '''
	for field_list_name, field_list in h.p4_field_list_calculations.items():
		print field_list.name, field_list.input, field_list.output_width, field_list.algorithm
		for a in field_list.input:
			for b in a.fields:		
				print b
	'''

    #p4_parser_state
    #print type(h.p4_parse_states)
    '''
	for parser_name, parser in h.p4_parse_states.items():
		print parser.name
		#call_sequence
		#print parser.call_sequence
		
		for se in parser.call_sequence:
			print se
			if len(se) == 3:
				print str(se[0]) == "set"
				print se[1].name, se[1].instance, se[1].offset
		
		
		#branch_on
		
		#print parser.branch_on, type(parser.branch_on)
		for field in parser.branch_on:
			print field.name
		
		
		#branch_to
		for key, dest in parser.branch_to.items():	
			print key, dest
		
		#prev
		
		#print parser.prev
		for state in parser.prev:
			print state.name
	'''

    #p4_action
    '''
	for action_name, action in h.p4_actions.items():
		print action.name+"============="
			
		for sig_name in action.signature:
			print sig_name

		print action.signature_widths
		
		#print action.signature_flags
		
		for sigF_name, sigF in action.signature_flags.items():
			#print sigF_name,sigF
					
			for sigF_item_name, sigF_item in sigF.items():
				#print sigF_item_name, sigF_item
				if str(sigF_item_name) == "data_width":
					#print action.signature_flags
					#print sigF
					print type(sigF_item), sigF_item
			

		
		#call_sequence
		print action.call_sequence
		for call_function in action.call_sequence:
			for i in range(len(call_function)):
				if i ==0:
					print call_function[0].name, call_function[0].signature
				else:
					print call_function[i]
					for item in call_function[1]:
						print item,type(item)
		#print "***************"	
		#print action.flat_call_sequence
	'''

    #p4_node
    '''
	for table_name, table in h.p4_nodes.items():
		print table.name+"============="
		#print table.next_
		#match_fields	
		print table.control_flow_parent
		print table.base_default_next

		for match_field in table.match_fields:
			for field in match_field:
				print field	
		#print table.attached_counters
		print "1"+table.control_flow_parent, table.conditional_barrier
		print table.base_default_next
		print table.dependencies_to
	'''
    '''
	#p4_action_node
	for action_node_name, action_node in h.p4_action_nodes.items():
		print action_node.name
	'''
    #p4_conditional_node

    for action_node_name, action_node in h.p4_conditional_nodes.items():
        print action_node_name, action_node.name
    '''
	for action_node_name, action_node in h.p4_action_profiles.items():
		print action_node.name
	'''

    #p4_counter
    """
	for counter_name, counter in h.p4_counters.items():
		print counter.name, counter.type, counter.min_width, counter.saturating
		print counter.binding, counter.instance_count
	"""

    #p4_register
    """
	for register_name, register in h.p4_registers.items():
		print register.name+"=================="
		print register.layout, register.width, register.instance_count
		print register.binding
	"""

    #p4_parser_exception
    """
	for parser_ex_name, parser_ex in h.p4_parser_exceptions.items():
		print parser_ex
	"""
    """
Exemplo n.º 28
0
import os.path as path

import pytest
from p4_hlir.main import HLIR

import p4t.vmrs.simple as svmr
from p4t.vmrs.p4 import P4VMRAction

PROGRAM = HLIR(path.join(path.dirname(path.realpath(__file__)), "test.p4"))
PROGRAM.build()

ACTION = PROGRAM.p4_actions['test_action']

TABLE = PROGRAM.p4_tables['test_table']

ENTRIES = [
    svmr.SimpleVMREntry([True, True, False], [True, True, True], P4VMRAction(ACTION, [1]), 1),
    svmr.SimpleVMREntry([True, False, False], [True, True, False], P4VMRAction(ACTION, [2]), 2),
    svmr.SimpleVMREntry([False, True, False], [True, False, False], P4VMRAction(ACTION, [3]), 3)
    ]


@pytest.fixture
def vmr(vmr_instance):
    for entry in ENTRIES:
        vmr_instance.append(entry)
    return vmr_instance


class TestVMRGeneric(object):
    __test__ = False
Exemplo n.º 29
0
def main():
    args = get_parser().parse_args()
    if not len(args.sources):
        print "No input file specified"
        sys.exit(1)
    prog_name = os.path.split(args.sources[0])
    prog_name = prog_name[1].replace('.p4', '')
    logger_init(log_dir=args.gen_dir,
                prog_name=prog_name,
                loglevel=args.loglevel,
                floglevel=args.floglevel)

    # TBD - Insert toplevel try-except block
    h = HLIR(*args.sources)
    if args.fe_flags:
        args.fe_flags = args.fe_flags.split(" ")
        # If fe_flags is a space separated string, split it into substrings
        # and add each one as a separate preprocessor arg.
        for fe_flags_arg in args.fe_flags:
            h.add_preprocessor_args(fe_flags_arg)
    if not h.build():
        sys.exit(1)

    if args.asic == 'elba':
        setup_elba_hw_parameters(capri_model)

    if args.p4_plus:
        if args.p4_plus_module == 'sxdma':
            setup_sxdma_hw_parameters(capri_model)
        else:
            setup_p4_plus_hw_parameters(capri_model)

    if args.phv_flits:
        setup_num_phv_flits(capri_model, int(args.phv_flits))
    else:
        # init all parameters based on single value to avoid errors/inconsistancies
        setup_num_phv_flits(capri_model, capri_model['phv']['num_flits'])

    capri_be = capri_backend(h, capri_model, args)
    set_pdb_on_assert(args.pdb_on_assert)

    capri_be.initialize()

    # run passes from here
    # create initial field ordering based on parser extraction order and table usage
    capri_be.pa.init_field_ordering()
    re_init = False

    # assign ohi slots
    for d in xgress:
        # make sure each state uses max ohi slots allowed by the hw
        capri_be.parsers[d].update_ohi_per_state()
        max_ohi, max_ohi_path = capri_be.parsers[d].assign_ohi_slots()
        ohi_threshold = capri_be.hw_model['parser']['ohi_threshold']
        # This is just to test multiple passes thru' ohi allocation
        # converting more ohis to phv will increase phv requirement and may exceed phv limits
        max_retry = 4
        while max_ohi > ohi_threshold and max_retry:
            # convert N ohis along longest path to phvs and try again
            capri_be.parsers[d].ohi_to_phv(max_ohi_path,
                                           max_ohi - ohi_threshold)
            # check again
            max_ohi, max_ohi_path = capri_be.parsers[d].assign_ohi_slots()
            max_retry -= 1
            re_init = True
        assert max_ohi <= ohi_threshold, "Cannot bring down the ohi count"

    if re_init:
        # re-init the field order after ohi fields have changed
        capri_be.pa.init_field_ordering()

    capri_be.pa.create_flits()

    for d in xgress:
        capri_be.checksum.ProcessAllCsumObjects(d)  #Regular csum, gso
        capri_be.icrc.ProcessIcrcObjects(d)
        capri_be.parsers[d].assign_hv_bits()
        capri_be.parsers[d].assign_rw_phv_hv_bits()
        capri_be.parsers[d].program_capri_states()
        capri_be.deparsers[d].build_field_dictionary()
        capri_be.checksum.AllocateAllCsumResources(d)
        capri_be.icrc.AllocateIcrcObjects(d)
    capri_be.tables.update_table_config()
    capri_be.tables.create_key_makers()
    capri_be.tables.program_tables()

    # Generate various outputs
    for d in xgress:
        if not capri_be.args.p4_plus:
            # Additional validation
            capri_be.parsers[d].parser_check_flit_violation()

            capri_be.parsers[d].generate_output()
            #capri_be.parsers[d].print_long_paths(10)
            #capri_be.parsers[d].print_short_paths(10)
            #capri_be.parsers[d].print_path_histogram(5)
        capri_be.pa.gress_pa[d].update_phc_map()
        capri_be.pa.gress_pa[d].print_field_order_info("PHV order(Final)")
        if not capri_be.args.p4_plus:
            capri_be.deparsers[d].print_deparser_info()
            capri_be.deparsers[d].generate_output()

    capri_be.tables.generate_output()

    #Create logical output of configuration pushed to parser and deparser
    #for checksum verification, computation.
    capri_be.checksum.CsumLogicalOutputCreate()
    capri_be.checksum.ParserCsumUnitAllocationCodeGenerate()

    #Create logical output of configuration pushed to parser and deparser
    #for icrc verification, computation.
    capri_be.icrc.IcrcLogicalOutputCreate()

    if args.asm_out:
        capri_be.pa.capri_asm_output()
        capri_be.tables.capri_asm_output()

    k_plus_d_dict = None
    if args.pd_gen or args.asm_out:
        p4pd = capri_p4pd_generate_info(capri_be)
        p4pd_gen = capri_p4pd_generator(capri_be)
        p4pd_gen.pddict = p4pd.pddict

        gen_dir = args.gen_dir
        cur_path = gen_dir + '/%s' % capri_be.prog_name
        if not os.path.exists(cur_path):
            try:
                os.makedirs(cur_path)
            except OSError as e:
                if e.errno != errno.EEXIST:
                    raise
        fname = cur_path + '/pddict.api.json'
        pddict_json = open(fname, 'w+')
        json.dump(p4pd.pddict['tables'],
                  pddict_json,
                  indent=4,
                  sort_keys=True,
                  separators=(',', ': '))

        k_plus_d_dict = capri_p4pd_code_generate(p4pd_gen)

    # generate debug information for model
    # output into a JSON file for model debug logs
    capri_be.model_dbg_output(k_plus_d_dict)
Exemplo n.º 30
0
def json2hlir(filepath):

    # Loading and processing JSON...

    with open(filepath) as data_file:
        data = json.load(data_file)

    # Creating the P4 objects described in JSON...

    all_p4_objects = []

    for ht in data["header_types"]:
        if ht['name'] == 'standard_metadata_t':
            continue
        all_p4_objects.append(header_type(ht))

    for h in data["headers"]:
        all_p4_objects.append(header_instance(h))

    # for x in data["header_stacks"]:
    # for x in data["field_lists"]:

    for p in data["parsers"]:
        for ps in p["parse_states"]:
            all_p4_objects.append(parse_state(ps))

    # for x in data["deparsers"]:
    # for x in data["meter_arrays"]:
    # for x in data["counter_arrays"]:
    # for x in data["register_arrays"]:
    # for x in data["calculations"]:
    # for x in data["learn_lists"]:

    for a in data["actions"]:
        all_p4_objects.append(action(a))

    for p in data["pipelines"]:
        all_p4_objects += control(p)

    # for x in data["checksums"]:
    # for x in data["force_arith"]:

    # Synthesising the P4 AST...

    p4_program = P4Program("", -1, all_p4_objects)

    with open('src/utils/primitives.json') as data_file:
        primitives = json.load(data_file)

    sc = P4SemanticChecker()
    sc.semantic_check(p4_program, primitives)

    # Translating the P4 AST to HLIR...

    h = HLIR()

    d = P4HlirDumper()
    d.dump_to_p4(h, p4_program, primitives)

    p4_validate(h)
    p4_dependencies(h)
    p4_field_access(h)

    return h
Exemplo n.º 31
0
def test_gen_json(input_p4):
    assert os.path.exists(input_p4)
    h = HLIR(input_p4)
    assert h.build()
    json_dict = gen_json.json_dict_create(h)
    assert json_dict
Exemplo n.º 32
0
                flag = True
            elif (((write_sets[i][1] & write_sets[j][1]) != Set([]))
                  and (i != j)):
                print >> sys.stderr, "Flagging write/write intersection between action_primitives "
                print >> sys.stderr, "@ location", i, ":", pretty_print_primitive(
                    compound_action.flat_call_sequence[i])
                print >> sys.stderr, "@ location", j, ":", pretty_print_primitive(
                    compound_action.flat_call_sequence[j])
                flag = True
    if (flag == False):
        print >> sys.stderr, " no read/write intersection",
    print >> sys.stderr


if __name__ == "__main__":
    # Build HLIR
    h = HLIR(sys.argv[1])
    h.build()
    actions = h.p4_actions

    # Accumulate all compound actions (user-defined actions)
    # These are actions where the flat_call_sequence is not empty
    # Otherwise, it would be a primitive.
    compound_actions = []
    for a in actions:
        if (actions[a].flat_call_sequence != []):
            compound_actions += [actions[a]]

    for compound_action in compound_actions:
        analyze_read_write_sets(compound_action)
Exemplo n.º 33
0
                print >>sys.stderr, "Flagging read/write intersection between action_primitives "
                print >>sys.stderr, "@ location", i, ":", pretty_print_primitive(compound_action.flat_call_sequence[i])
                print >>sys.stderr, "@ location", j, ":", pretty_print_primitive(compound_action.flat_call_sequence[j])
                flag = True
            elif ((write_sets[i][1] & write_sets[j][1]) != Set([])) and (i != j):
                print >>sys.stderr, "Flagging write/write intersection between action_primitives "
                print >>sys.stderr, "@ location", i, ":", pretty_print_primitive(compound_action.flat_call_sequence[i])
                print >>sys.stderr, "@ location", j, ":", pretty_print_primitive(compound_action.flat_call_sequence[j])
                flag = True
    if flag == False:
        print >>sys.stderr, " no read/write intersection",
    print >>sys.stderr


if __name__ == "__main__":
    # Build HLIR
    h = HLIR(sys.argv[1])
    h.build()
    actions = h.p4_actions

    # Accumulate all compound actions (user-defined actions)
    # These are actions where the flat_call_sequence is not empty
    # Otherwise, it would be a primitive.
    compound_actions = []
    for a in actions:
        if actions[a].flat_call_sequence != []:
            compound_actions += [actions[a]]

    for compound_action in compound_actions:
        analyze_read_write_sets(compound_action)
Exemplo n.º 34
0
                    type=str,
                    action="store",
                    required=True)
parser.add_argument('--output',
                    help='path to csv output file',
                    type=str,
                    action="store",
                    default="results_ternmatch.csv")
parser.add_argument('-v',
                    '--verbose',
                    help='increase stdout verbosity',
                    action="store_true")

args = parser.parse_args()

h = HLIR('../p4src/hp4.p4')
h.build(analyze=False)

r = open(args.output, 'w')
writer = csv.writer(r)
writer.writerow(['Packet', 'Table', 'Field', 'Bitwidth'])

n = open(args.nano, 'r')
reader = csv.reader(n)

packetevents = {}

for line in reader:
    packetid = int(line[4].split()[1])
    if packetid not in packetevents.keys():
        packetevents[packetid] = []
Exemplo n.º 35
0
                    metavar='source',
                    type=str,
                    help='A source file to include in the P4 program.')
parser.add_argument('--ac',
                    help='Where to write annotated commands file',
                    type=str,
                    action="store",
                    required=True)
parser.add_argument('--SEB',
                    help='Number of standard extracted bytes',
                    type=int,
                    action="store",
                    default=20)
args = parser.parse_args()

h = HLIR(args.source)
h.build()

# We need a separate file that has commands like this:
#   print("table_set_default t_norm_SEB a_norm_SEB")
# and all others that are program independent but make
# HP4 go
# Others:
#  mirroring_add <port#> <port#>
# for all ports
#  table_set_default t_prep_deparse_SEB a_prep_deparse_SEB
# etc.

total = 0

for call in h.p4_parse_states['start'].call_sequence:
Exemplo n.º 36
0
def main():
    parser = get_parser()
    input_args = sys.argv[1:]
    args, unparsed_args = parser.parse_known_args()

    has_remaining_args = False
    preprocessor_args = []
    for a in unparsed_args:
        if a[:2] == "-D":
            input_args.remove(a)
            preprocessor_args.append(a)
        else:
            has_remaining_args = True

    # trigger error
    if has_remaining_args:
        parser.parse_args(input_args)

    gen_dir = os.path.abspath(args.gen_dir)
    if os.path.exists(gen_dir):
        if not os.path.isdir(gen_dir):
            sys.stderr.write(args.gen_dir + " exists but is not a directory\n")
            sys.exit(1)
    else:
        try:
            os.mkdir(gen_dir)
        except:
            sys.stderr.write("Could not create output directory %s\n" %
                             args.gen_dir)
            sys.exit(1) 

    if args.p4_name:
        p4_name = args.p4_name
    else:
        p4_name = _get_p4_basename(args.source)

    if args.p4_prefix:
        p4_prefix = args.p4_prefix
    else:
        p4_prefix = p4_name

    h = HLIR(args.source)
    h.add_preprocessor_args("-D__TARGET_BM__")
    for parg in preprocessor_args:
        h.add_preprocessor_args(parg)
    # in addition to standard P4 primitives
    more_primitives = json.loads(resource_string(__name__, 'primitives.json'))
    h.add_primitives(more_primitives)

    if not h.build():
        print "Error while building HLIR"
        sys.exit(1)

    print "Generating files in directory", gen_dir

    render_dict = smart.render_dict_create(h, 
                                           p4_name, p4_prefix,
                                           args.meta_config,
                                           args.public_inc_path,
                                           dump_yaml = args.dump_yaml)

    # @OVS: dumps the render dict for flow_type_checker.py
    pickle.dump(render_dict, open(gen_dir+"/dict.pickle", "wb"))

    # @OVS: optimizer configurations
    # @Shahbaz: enumerate number of adjustment actions and based on that
    # set this to true or false.
    render_dict["OPT_INLINE_EDITING"] = False

    if args.openflow_mapping_dir and args.openflow_mapping_mod:
        sys.path.append(args.openflow_mapping_dir)
        render_dict['openflow_mapping_mod'] = args.openflow_mapping_mod

    smart.render_all_files(render_dict, gen_dir,
                           with_thrift = args.thrift,
                           with_plugin_list = args.plugin_list)