예제 #1
0
def __copy_files(OutputDir, FileSet):
    include_db = [
        ("declarations",      "$$INCLUDE_TOKEN_CLASS_DEFINITION$$",     Lng.INCLUDE(Setup.output_token_class_file)),
        ("implementations.i", "$$INCLUDE_TOKEN_CLASS_IMPLEMENTATION$$", Lng.INCLUDE(Setup.output_token_class_file_implementation)),
        ("implementations-inline.i", "$$INCLUDE_TOKEN_CLASS_IMPLEMENTATION$$", Lng.INCLUDE(Setup.output_token_class_file_implementation)),
        ("token/TokenQueue",  "$$INCLUDE_TOKEN_CLASS_DEFINITION$$",     Lng.INCLUDE(Setup.output_token_class_file)),
        ("token/TokenQueue",  "$$INCLUDE_LEXER_CLASS_DEFINITION$$",     Lng.INCLUDE(Setup.output_header_file)),
    ]
    for path, dummy, dummy in include_db:
        directory, basename = os.path.split(path)
        assert (not directory and basename in dir_db[""]) \
               or (basename in dir_db["%s/" % directory])

    file_pair_list,   \
    out_directory_set = __get_source_drain_list(OutputDir, FileSet)

    # Make directories
    # Sort according to length => create parent directories before child.
    for directory in sorted(out_directory_set, key=len):
        if os.access(directory, os.F_OK) == True: continue
        os.makedirs(directory) # create parents, if necessary

    # Copy
    for source_file, drain_file in file_pair_list:
        content = open_file_or_die(source_file, "rb").read()
        for path, origin, replacement in include_db:
            if not source_file.endswith(path): continue
            content = content.replace(origin, replacement)

        content = adapt.do(content, OutputDir, OriginalPath=source_file)
        write_safely_and_close(drain_file, content)
예제 #2
0
def add_engine_stuff(mode_db, FileName, TokenClassImplementationF=False):

    global output_dir
    dummy, \
    member_function_signature_list = analyzer_class.do(mode_db, "")

    # FSM class implementation
    #
    analyzer_class_implementation = "#ifndef QUEX_OPTION_UNIT_TEST_NO_IMPLEMENTATION_IN_HEADER_EXT\n"
    analyzer_class_implementation += analyzer_class.do_implementation(
        mode_db, member_function_signature_list)
    analyzer_class_implementation += "\n"
    # analyzer_class_implementation += templates.get_implementation_header(Setup)
    analyzer_class_implementation += "\n"
    analyzer_class_implementation += "bool UserConstructor_UnitTest_return_value = true;\n"
    analyzer_class_implementation += "bool UserReset_UnitTest_return_value       = true;\n"
    analyzer_class_implementation += "bool UserMementoPack_UnitTest_return_value = true;\n"
    analyzer_class_implementation += "#endif /* QUEX_OPTION_UNIT_TEST_NO_IMPLEMENTATION_IN_HEADER_EXT */\n"

    with open(FileName, "a") as fh:
        fh.write("\n%s\n" %
                 adapt.do(analyzer_class_implementation, output_dir))

    if not TokenClassImplementationF:
        return

    dummy,                     \
    token_class_implementation = token_class.do()

    with open(FileName, "a") as fh:
        fh.write(
            "#ifndef QUEX_OPTION_UNIT_TEST_NO_IMPLEMENTATION_IN_HEADER_EXT\n")
        fh.write("%s\n" % adapt.do(token_class_implementation, output_dir))

        # fh.write("#else  /* QUEX_OPTION_UNIT_TEST_NO_IMPLEMENTATION_IN_HEADER_EXT */\n")
        # fh.write("bool UserConstructor_UnitTest_return_value = true;\n")
        # fh.write("bool UserReset_UnitTest_return_value       = true;\n")
        # fh.write("bool UserMementoPack_UnitTest_return_value = true;\n")
        fh.write(
            "#endif /* QUEX_OPTION_UNIT_TEST_NO_IMPLEMENTATION_IN_HEADER_EXT */\n"
        )

    Lng.straighten_open_line_pragmas(FileName)
예제 #3
0
def append_variable_definitions(FileName):
    global output_dir
    fh = open(FileName)
    content = fh.read()
    fh.close()
    fh = open(FileName, "wb")
    fh.write("%s\n" % adapt.do(content, output_dir))
    fh.write("\n")
    fh.write("bool UserConstructor_UnitTest_return_value = true;\n")
    fh.write("bool UserReset_UnitTest_return_value       = true;\n")
    fh.write("bool UserMementoPack_UnitTest_return_value = true;\n")
    fh.close()
예제 #4
0
파일: core.py 프로젝트: smmckay/quex3
def _write_all(content_table):

    content_table = [
        (adapt.do(x[0], Setup.output_directory), x[1]) for x in content_table
    ]
    content_table = [
        (Lng.straighten_open_line_pragmas_new(x[0], x[1]), x[1]) for x in content_table
    ]

    for content, file_name in content_table:
        if not content: continue
        write_safely_and_close(file_name, content)
예제 #5
0
파일: helper.py 프로젝트: smmckay/quex3
def create_customized_analyzer_function(Language,
                                        TestStr,
                                        EngineSourceCode,
                                        QuexBufferSize,
                                        CommentTestStrF,
                                        ShowPositionF,
                                        EndStr,
                                        SkipUntilMarkerSet,
                                        LocalVariableDB,
                                        IndentationSupportF=False,
                                        ReloadF=False,
                                        OnePassOnlyF=False,
                                        DoorIdOnSkipRangeOpenF=False,
                                        CounterPrintF=True,
                                        BeforeCode=None):

    txt = create_common_declarations(Language,
                                     QuexBufferSize,
                                     IndentationSupportF=IndentationSupportF,
                                     QuexBufferFallbackN=0)

    if BeforeCode is not None:
        txt += BeforeCode

    state_router_txt = do_state_router(dial_db)
    EngineSourceCode.extend(state_router_txt)
    txt += my_own_mr_unit_test_function(EngineSourceCode, EndStr,
                                        LocalVariableDB, ReloadF, OnePassOnlyF,
                                        DoorIdOnSkipRangeOpenF, CounterPrintF)

    if SkipUntilMarkerSet == "behind newline":
        txt += skip_behind_newline()
    elif SkipUntilMarkerSet:
        txt += skip_irrelevant_character_function(SkipUntilMarkerSet)
    else:
        txt += "static bool skip_irrelevant_characters(QUEX_TYPE_ANALYZER* me) { return true; }\n"

    txt += show_next_character_function(ShowPositionF)

    txt += create_main_function(Language, TestStr, QuexBufferSize,
                                CommentTestStrF)

    txt = txt.replace(Lng._SOURCE_REFERENCE_END(), "")

    Setup.analyzer_class_name = "TestAnalyzer"
    Setup.analyzer_name_safe = "TestAnalyzer"
    return adapt.do(txt, test_analyzer_dir(Language))
예제 #6
0
def get_test_application(encoding, ca_map):

    # (*) Setup the buffer encoding ___________________________________________
    #
    if encoding == "utf_32_le": byte_n_per_code_unit = 4
    elif encoding == "ascii": byte_n_per_code_unit = 1
    elif encoding == "utf_8": byte_n_per_code_unit = 1
    elif encoding == "utf_16_le": byte_n_per_code_unit = 2
    elif encoding == "cp737": byte_n_per_code_unit = 1
    else: assert False

    Setup.buffer_setup("", byte_n_per_code_unit,
                       encoding.replace("_le", "").replace("_", ""))

    Setup.analyzer_class_name = "Lexer"
    # (*) Generate Code _______________________________________________________
    #
    counter_str = run_time_counter.get(ca_map, "TEST_MODE")
    counter_str = counter_str.replace("static void", "void")

    # Double check if reference delta counting has been implemented as expected.
    expect_reference_p_f = ca_map.get_column_number_per_code_unit() is not None
    assert_reference_delta_count_implementation(counter_str,
                                                expect_reference_p_f)

    counter_str = adapt.do(counter_str, "data", "")
    open("./data/test.c",
         "wb").write("#include <data/check.h>\n\n" + counter_str)

    # (*) Compile _____________________________________________________________
    #
    counter_function_name = Lng.DEFAULT_COUNTER_FUNCTION_NAME("TEST_MODE")
    os.system("rm -f test")
    compile_str =   "gcc -Wall -Werror -I. -ggdb ./data/check.c ./data/test.c "     \
                  + " -DQUEX_OPTION_COUNTER"                                \
                  + " -DDEF_COUNTER_FUNCTION='%s' " % counter_function_name \
                  + " -DDEF_FILE_NAME='\"data/input.txt\"' "                \
                  + " -DDEF_CHARACTER_TYPE=%s" % Setup.lexatom.type         \
                  + " -o test"
    # + " -DDEF_DEBUG_TRACE "

    print "## %s" % compile_str
    os.system(compile_str)
예제 #7
0
    target_dir      = sys.argv[2]
    input_file_list = sys.argv[3:]

    for input_file in input_file_list:
        with open(input_file) as fh:
            txt = fh.read()
        txt = adapt.produce_include_statements(target_dir, txt)
        with open(input_file, "w") as fh:
            fh.write(txt)

elif "--specify" in sys.argv:
    Setup.analyzer_class_name = sys.argv[2]
    target_dir      = sys.argv[3]
    token_name      = "Token"
    input_file_list = sys.argv[3:]
    for input_file in input_file_list:
        with open(input_file) as fh:
            txt = fh.read()

        txt = adapt.do(target_dir, txt)
        with open(input_file, "w") as fh:
            fh.write(txt)

else:
    target_dir    = sys.argv[1]
    code_dir_list = sys.argv[2:]
    if not code_dir_list: code_dir_list = None
    try:    os.mkdir(target_dir)
    except: print "Directory '%s' already exists." % target_dir
    source_package.do(target_dir, code_dir_list)
예제 #8
0
    if ShowBufferLoadsF:
        state_machine_code = "#define QUEX_OPTION_DEBUG_SHOW_LOADS_EXT\n" + \
                             "#define QUEX_OPTION_UNIT_TEST_EXT\n"                   + \
                             state_machine_code

    source_code =   create_common_declarations(Language, QuexBufferSize,
                                               QuexBufferFallbackN, BufferLimitCode,
                                               ComputedGotoF=Setup.computed_gotos_f) \
                  + state_machine_code \
                  + test_program

    # Verify, that Templates and Pathwalkers are really generated
    __verify_code_generation(FullLanguage, source_code)

    source_code = "%s%s" % (language_defines,
                            adapt.do(source_code, test_analyzer_dir(Language)))

    source_code = source_code.replace("$$TEST_ANALYZER_DIR$$",
                                      test_analyzer_dir(Language))

    source_code = source_code.replace("$$COMPUTED_GOTOS_CHECK$$",
                                      computed_gotos_check_str())

    compile_and_run(Language, source_code, AssertsActionvation_str,
                    CompileOptionStr, test_str_list)


def run_this(Str, filter_result_db=None, FilterFunc=None):
    if True:  #try:
        fh_out = open("tmp.out", "w")
        fh_err = open("tmp.err", "w")
예제 #9
0
파일: helper.py 프로젝트: smmckay/quex3
def create_indentation_handler_code(Language, TestStr, ISetup, BufferSize):

    end_str = __prepare(Language)

    class MiniIncidenceDb(dict):
        def __init__(self):
            self[E_IncidenceIDs.INDENTATION_BAD] = ""

        def default_indentation_handler_f(self):
            return True

    mini_incidence_db = MiniIncidenceDb()

    ca_map = LineColumnCount_Default()

    counter_code = run_time_counter.get(ca_map, "M")

    code = [
    ]  # [ "%s\n" % Lng.LABEL(DoorID.incidence(E_IncidenceIDs.INDENTATION_HANDLER, dial_db)) ]

    variable_db.init()
    analyzer_list,         \
    terminal_list,         \
    required_register_set, \
    run_time_counter_f     = indentation_counter.do("M", ca_map, ISetup,
                                                    mini_incidence_db, FSM.reload_state,
                                                    dial_db)
    loop_code = generator.do_analyzer_list(analyzer_list)

    loop_code.extend(
        generator.do_terminals(terminal_list,
                               TheAnalyzer=None,
                               dial_db=dial_db))

    if not run_time_counter_f:
        counter_code = None

    code.extend(loop_code)

    __require_variables(required_register_set)
    main_txt = create_customized_analyzer_function(
        Language,
        TestStr,
        code,
        QuexBufferSize=BufferSize,
        CommentTestStrF="",
        ShowPositionF=True,
        EndStr=end_str,
        SkipUntilMarkerSet="behind newline",
        LocalVariableDB=deepcopy(variable_db.get()),
        IndentationSupportF=True,
        ReloadF=True,
        CounterPrintF=False,
        BeforeCode=counter_code)

    on_indentation_txt = indentation_handler.do(
        AuxMode(), ["M", "M2"]).replace("$on_indentation",
                                        "QUEX_NAME(M_on_indentation)")

    Setup.analyzer_class_name = "TestAnalyzer"
    Setup.analyzer_name_safe = "TestAnalyzer"
    result = adapt.do(main_txt + on_indentation_txt,
                      test_analyzer_dir(Language))
    result = language_defines + result
    result = result.replace("$$TEST_ANALYZER_DIR$$",
                            test_analyzer_dir(Language))
    result = result.replace("$$COMPUTED_GOTOS_CHECK$$",
                            computed_gotos_check_str())
    return result
예제 #10
0
            "    TestAnalyzer_unicode_to_utf8_character(&u32_input_p, &buffer_p);\n"
            "}\n"
        ]
    else:
        txt = ["buffer[0] = unicode_input;\n"]
    return "".join("        %s" % line for line in txt)


iid_map = prepare(tm0)
transition_txt = get_transition_function(iid_map, codec)
txt = get_main_function(tm0, transition_txt, codec)

Lng.REPLACE_INDENT(txt)

fh = open("test.c", "wb")
fh.write("".join(adapt.do(txt, "ut")))
fh.close()
try:
    os.remove("./test")
except:
    pass

if codec == "UTF8": qtc_str = "-DQUEX_TYPE_LEXATOM_EXT=uint8_t"
else: qtc_str = "-DQUEX_TYPE_LEXATOM_EXT=uint32_t"

os.system(
    "gcc -Wall -Werror -I. -I../../../code_base -DQUEX_OPTION_ASSERTS -DQUEX_INLINE=static %s -o test test.c -ggdb -std=c89"
    % qtc_str)
os.system("./test")

if True: