예제 #1
0
def __copy_files(OutputDir, FileSet):
    include_db = [
        ("declarations",      "$$INCLUDE_TOKEN_CLASS_DEFINITION$$",     Lng.INCLUDE(Setup.output_token_class_file)),
        ("implementations.i", "$$INCLUDE_TOKEN_CLASS_IMPLEMENTATION$$", Lng.INCLUDE(Setup.output_token_class_file_implementation)),
        ("implementations-inline.i", "$$INCLUDE_TOKEN_CLASS_IMPLEMENTATION$$", Lng.INCLUDE(Setup.output_token_class_file_implementation)),
        ("token/TokenQueue",  "$$INCLUDE_TOKEN_CLASS_DEFINITION$$",     Lng.INCLUDE(Setup.output_token_class_file)),
        ("token/TokenQueue",  "$$INCLUDE_LEXER_CLASS_DEFINITION$$",     Lng.INCLUDE(Setup.output_header_file)),
    ]
    for path, dummy, dummy in include_db:
        directory, basename = os.path.split(path)
        assert (not directory and basename in dir_db[""]) \
               or (basename in dir_db["%s/" % directory])

    file_pair_list,   \
    out_directory_set = __get_source_drain_list(OutputDir, FileSet)

    # Make directories
    # Sort according to length => create parent directories before child.
    for directory in sorted(out_directory_set, key=len):
        if os.access(directory, os.F_OK) == True: continue
        os.makedirs(directory) # create parents, if necessary

    # Copy
    for source_file, drain_file in file_pair_list:
        content = open_file_or_die(source_file, "rb").read()
        for path, origin, replacement in include_db:
            if not source_file.endswith(path): continue
            content = content.replace(origin, replacement)

        content = adapt.do(content, OutputDir, OriginalPath=source_file)
        write_safely_and_close(drain_file, content)
예제 #2
0
파일: core.py 프로젝트: smmckay/quex3
def do_implementation(ModeDB, MemberFunctionSignatureList):

    func_txt = Lng.open_template(Lng.analyzer_template_i_file())

    if blackboard.mode_db:
        map_token_ids_to_names_str = token_id_maker.do_map_id_to_name_cases()
    else:
        map_token_ids_to_names_str = ""

    func_txt = blue_print(func_txt, [
        [
            "$$MEMBER_FUNCTION_ASSIGNMENT$$",
            Lng.MEMBER_FUNCTION_ASSIGNMENT(MemberFunctionSignatureList)
        ],
        [
            "$$CONSTRUCTOR_EXTENSTION$$",
            Lng.SOURCE_REFERENCED(blackboard.class_constructor_extension)
        ],
        [
            "$$DESTRUCTOR_EXTENSTION$$",
            Lng.SOURCE_REFERENCED(blackboard.class_destructor_extension)
        ],
        [
            "$$USER_DEFINED_PRINT$$",
            Lng.SOURCE_REFERENCED(blackboard.class_print_extension)
        ],
        [
            "$$RESET_EXTENSIONS$$",
            Lng.SOURCE_REFERENCED(blackboard.reset_extension)
        ],
        [
            "$$MEMENTO_EXTENSIONS_PACK$$",
            Lng.SOURCE_REFERENCED(blackboard.memento_pack_extension)
        ],
        [
            "$$MEMENTO_EXTENSIONS_UNPACK$$",
            Lng.SOURCE_REFERENCED(blackboard.memento_unpack_extension)
        ],
        [
            "$$INCLUDE_TOKEN_ID_HEADER$$",
            Lng.INCLUDE(Setup.output_token_id_file_ref)
        ],
        ["$$MAP_ID_TO_NAME_CASES$$", map_token_ids_to_names_str],
    ])

    return "\n%s\n" % func_txt
예제 #3
0
def do(setup):
    """________________________________________________________________________
       (1) Error Check 
       
       (2) Generates a file containing:
    
       -- token id definitions (if they are not done in '--foreign-token-id-file').

       -- const string& TokenClass::map_token_id_to_name(), i.e. a function which can 
          convert token ids into strings.
       ________________________________________________________________________
    """
    global file_str
    # At this point, assume that the token type has been generated.
    assert token_db.token_type_definition is not None

    # (1) Error Check
    #
    __warn_implicit_token_definitions()
    if not Setup.token_class_only_f:
        __error_on_no_specific_token_ids()

    if Setup.extern_token_id_file:
        __error_on_mandatory_token_id_missing()
        return None

    __autogenerate_token_id_numbers()
    __warn_on_double_definition()
    # If a mandatory token id is missing, this means that Quex did not
    # properly do implicit token definitions. Program error-abort.
    __error_on_mandatory_token_id_missing(AssertF=True)

    # (2) Generate token id file (if not specified outside)
    #
    if not Setup.extern_token_id_file:
        token_id_txt = __get_token_id_definition_txt()
    else:
        # Content of file = inclusion of 'Setup.extern_token_id_file'.
        token_id_txt = ["%s\n" % Lng.INCLUDE(Setup.extern_token_id_file)]

    return blue_print(file_str, [
        ["$$TOKEN_ID_DEFINITIONS$$", "".join(token_id_txt)],
        ["$$DATE$$", time.asctime()],
        ["$$TOKEN_PREFIX$$", Setup.token_id_prefix],
    ])
예제 #4
0
def _do(Descr):
    txt, txt_i = _do_core(token_db.token_type_definition)

    if Setup.language.upper() == "C++":
        # C++: declaration and (inline) implementation in header.
        header_txt         = "\n".join([txt, txt_i])
        implementation_txt = ""
    else:
        # C: declaration in header, implementation in source file.
        header_txt         = txt
        implementation_txt = txt_i 

    # The 'lexeme null' definition *must be* in the implementation file!
    # Except that the token class comes from outside
    if not Setup.extern_token_class_file:
        if not implementation_txt:
            implementation_txt = "%s\n" % Lng.INCLUDE(Setup.output_token_class_file)

    implementation_txt += extra_lib_implementations_txt

    return header_txt, implementation_txt
예제 #5
0
def _do_core(Descr):
    # The following things must be ensured before the function is called
    assert Descr is not None
    assert isinstance(Descr, TokenTypeDescriptor)

    virtual_destructor_str,      \
    copy_str,                    \
    take_text_str                = _some_standard_stuff(Descr)

    # In case of plain 'C' the class name must incorporate the namespace (list)
    if Setup.language == "C":
        token_class_name = Setup.token_class_name_safe
    else:
        token_class_name = Descr.class_name

    # ------------
    # TODO: Following should be always placed in front of footer/header:
    # ------------
    if Setup.token_class_only_f: 
        helper_definitions = helper_definitions_common
    elif Setup.output_configuration_file:                        
        helper_definitions = Lng.INCLUDE(Setup.output_configuration_file)
    else:
        helper_definitions = ""

    if not Setup.implement_lib_quex_f:
        quex_lib_dir = "lib/quex"
    else:
        quex_lib_dir = "%s/lib/quex" % Setup.output_directory

    helper_variable_replacements = [
        ["$$HELPER_DEFINITIONS$$", helper_definitions],
        ["$$OUTPUT_DIR$$",         Setup.output_directory],
        ["$$QUEX_LIB_DIR$$",       quex_lib_dir],
        ["$$NAMESPACE_OPEN$$",     Lng.NAMESPACE_OPEN(Descr.name_space)],
        ["$$NAMESPACE_CLOSE$$",    Lng.NAMESPACE_CLOSE(Descr.name_space)],
        ["$$TOKEN_CLASS$$",        token_class_name],
    ]

    template_str = Lng.open_template(Lng.token_template_file())
    txt = blue_print(template_str, [
        ["$$BODY$$",                    Lng.SOURCE_REFERENCED(Descr.body)],
        ["$$CONSTRUCTOR$$",             Lng.SOURCE_REFERENCED(Descr.constructor)],
        ["$$COPY$$",                    copy_str],
        ["$$DESTRUCTOR$$",              Lng.SOURCE_REFERENCED(Descr.destructor)],
        ["$$DISTINCT_MEMBERS$$",        get_distinct_members(Descr)],
        ["$$FOOTER$$",                  Lng.SOURCE_REFERENCED(Descr.footer)],
        ["$$FUNC_TAKE_TEXT$$",          take_text_str],
        ["$$HEADER$$",                  Lng.SOURCE_REFERENCED(Descr.header)],
        ["$$QUICK_SETTERS$$",           get_quick_setters(Descr)],
        ["$$SETTERS_GETTERS$$",         get_setter_getter(Descr)],
        ["$$TOKEN_REPETITION_N_GET$$",  Lng.SOURCE_REFERENCED(Descr.repetition_get)],
        ["$$TOKEN_REPETITION_N_SET$$",  Lng.SOURCE_REFERENCED(Descr.repetition_set)],
        ["$$UNION_MEMBERS$$",           get_union_members(Descr)],
        ["$$VIRTUAL_DESTRUCTOR$$",      virtual_destructor_str],
    ])

    template_i_str = Lng.open_template(Lng.token_template_i_file())
    txt_i = blue_print(template_i_str, [
        ["$$INCLUDE_TOKEN_CLASS_HEADER$$", Lng.INCLUDE(Setup.output_token_class_file)],
        ["$$CONSTRUCTOR$$",                Lng.SOURCE_REFERENCED(Descr.constructor)],
        ["$$COPY$$",                       copy_str],
        ["$$DESTRUCTOR$$",                 Lng.SOURCE_REFERENCED(Descr.destructor)],
        ["$$FOOTER$$",                     Lng.SOURCE_REFERENCED(Descr.footer)],
        ["$$FUNC_TAKE_TEXT$$",             take_text_str],
        ["$$TOKEN_CLASS_HEADER$$",         token_db.token_type_definition.get_file_name()],
        ["$$TOKEN_REPETITION_N_GET$$",     Lng.SOURCE_REFERENCED(Descr.repetition_get)],
        ["$$TOKEN_REPETITION_N_SET$$",     Lng.SOURCE_REFERENCED(Descr.repetition_set)],
    ])

    txt   = blue_print(txt, helper_variable_replacements)
    txt_i = blue_print(txt_i, helper_variable_replacements)

    if Setup.token_class_only_f:
        # All type definitions need to be replaced!
        replacements = Lng.type_replacements(DirectF=True)
        txt   = blue_print(txt, replacements, "QUEX_TYPE_")
        txt_i = blue_print(txt_i, replacements, "QUEX_TYPE_")

    return txt, txt_i