コード例 #1
0
def resume_logging(log_dir: str = ''):
    """Resumes logging in the current thread with the given log-dir."""
    if not 'log_dir':
        access_presets()
        log_dir = get_preset_value('log_dir')
        finalize_presets()
    set_config_value('log_dir', log_dir)
コード例 #2
0
 def setup(self):
     stop_tcp_server('127.0.0.1', TEST_PORT)
     self.p = None
     self.DEBUG = False
     if self.DEBUG:
         from DHParser import log
         log.start_logging('LOGS')
         set_config_value('log_server', True)
コード例 #3
0
def local_log_dir(path: str = './LOGS'):
    """Context manager for temporarily switching to a different log-directory."""
    assert path, "Pathname cannot be empty"
    saved_log_dir = get_config_value('log_dir')
    log_dir(path)
    try:
        yield
    finally:
        set_config_value('log_dir', saved_log_dir)
コード例 #4
0
def start_logging(dirname: str = "LOGS"):
    """Turns logging on an sets the log-directory to `dirname`.
    The log-directory, if it does not already exist, will be created
    lazily, i.e. only when logging actually starts."""
    access_presets()
    log_dir = os.path.abspath(dirname) if dirname else ''
    if log_dir != get_preset_value('log_dir'):
        set_preset_value('log_dir', log_dir)
        set_config_value('log_dir', log_dir)
    finalize_presets()
コード例 #5
0
def log_dir(path: str = "") -> str:
    """Creates a directory for log files (if it does not exist) and
    returns its path.

    WARNING: Any files in the log dir will eventually be overwritten.
    Don't use a directory name that could be the name of a directory
    for other purposes than logging.

    ATTENTION: The log-dir is stored thread locally, which means the log-dir
    as well as the information whether logging is turned on or off will not
    automatically be transferred to any subprocesses. This needs to be done
    explicitly. (See `testing.grammar_suite()` for an example, how this can
    be done.

    Parameters:
        path:   The directory path. If empty, the configured value will be
            used: `configuration.get_config_value('log_dir')`.

    Returns:
        str - name of the logging directory or '' if logging is turned off.
    """
    dirname = path if path else get_config_value('log_dir')
    if not dirname:
        return ''
    dirname = os.path.normpath(dirname)
    # `try ... except` rather than `if os.path.exists(...)` for thread-safety
    try:
        os.mkdir(dirname)
        info_file_name = os.path.join(dirname, 'info.txt')
        if not os.path.exists(info_file_name):
            with open(info_file_name, 'w', encoding="utf-8") as f:
                f.write(
                    "This directory has been created by DHParser to store log files from\n"
                    "parsing. ANY FILE IN THIS DIRECTORY CAN BE OVERWRITTEN! Therefore,\n"
                    "do not place any files here and do not bother editing files in this\n"
                    "directory as any changes will get lost.\n")
    except FileExistsError:
        if not os.path.isdir(dirname):
            raise IOError('"' + dirname + '" cannot be used as log directory, '
                          'because it is not a directory!')
    set_config_value('log_dir', dirname)
    return dirname
コード例 #6
0
ファイル: test_syntaxtree.py プロジェクト: jecki/DHParser
 def test_compact_representation(self):
     tree = parse_sxpr('(A (B (C "D") (E "F")) (G "H"))')
     compact = tree.as_sxpr(compact=True, flatten_threshold=0)
     assert compact == '(A\n  (B\n    (C "D")\n    (E "F"))\n  (G "H"))'
     tree = parse_sxpr('(A (B (C "D\nX") (E "F")) (G " H \n Y "))')
     compact = tree.as_sxpr(compact=True, flatten_threshold=0)
     assert compact == '(A\n  (B\n    (C\n      "D"\n      "X")\n    (E "F"))' \
         '\n  (G\n    " H "\n    " Y "))'
     tree = parse_sxpr('(A (B (C "D") (E "F")) (G "H"))')
     C = tree['B']['C']
     C.attr['attr'] = 'val'
     threshold = get_config_value('flatten_sxpr_threshold')
     set_config_value('flatten_sxpr_threshold', 20)
     compact = tree.serialize('indented')
     # assert compact == 'A\n  B\n    C `(attr "val")\n      "D"\n    E\n      "F"\n  G\n    "H"'
     assert compact == 'A\n  B\n    C `(attr "val") "D"\n    E "F"\n  G "H"', compact
     tree = parse_xml(
         '<note><priority level="high" /><remark></remark></note>')
     assert tree.serialize(
         how='indented') == 'note\n  priority `(level "high")\n  remark'
     set_config_value('flatten_sxpr_threshold', threshold)
コード例 #7
0
ファイル: tst_EBNF_grammar.py プロジェクト: jecki/DHParser
def recompile_grammar(grammar_src, force):
    grammar_tests_dir = os.path.join(scriptpath, 'test_grammar')
    testing.create_test_templates(grammar_src, grammar_tests_dir)
    DHParser.log.start_logging('LOGS')
    # recompiles Grammar only if it has changed
    saved_syntax_variant = get_config_value('syntax_variant')
    set_config_value('syntax_variant', 'heuristic')
    if not dsl.recompile_grammar(
            grammar_src,
            force=force,
            notify=lambda: print('recompiling ' + grammar_src)):
        print('\nErrors while recompiling "%s":' % grammar_src +
              '\n--------------------------------------\n\n')
        if is_filename(grammar_src):
            err_name = grammar_src.replace('.', '_') + '_ERRORS.txt'
        else:
            err_name = 'EBNF_ebnf_ERRORS.txt'
        with open(err_name, encoding='utf-8') as f:
            print(f.read())
        sys.exit(1)
    set_config_value('syntax_variant', saved_syntax_variant)
コード例 #8
0
ファイル: test_syntaxtree.py プロジェクト: jecki/DHParser
 def test_attr_error_reporting_and_fixing(self):
     n = Node('tag', 'content').with_attr(faulty='<&"')
     set_config_value('xml_attribute_error_handling', 'fail')
     try:
         s = n.as_xml()
         assert False, "ValueError expected"
     except ValueError:
         pass
     set_config_value('xml_attribute_error_handling', 'fix')
     assert n.as_xml(
     ) == '''<tag faulty='&lt;&amp;"'>content</tag>''', n.as_xml()
     set_config_value('xml_attribute_error_handling', 'ignore')
     assert n.as_xml() == '''<tag faulty='<&"'>content</tag>'''
     n.attr['nonascii'] = 'ἱεραρχικωτάτου'
     set_config_value('xml_attribute_error_handling', 'lxml')
     assert n.as_xml(
     ) == '''<tag faulty='&lt;&amp;"' nonascii="??????????????">content</tag>'''
コード例 #9
0
def suspend_logging() -> str:
    """Suspends logging in the current thread. Returns the log-dir
    for resuming logging later."""
    save = get_config_value('log_dir')
    set_config_value('log_dir', '')
    return save
コード例 #10
0
class TestTokenParsing:
    ebnf = r"""
        @ tokens     = BEGIN_INDENT, END_INDENT
        @ whitespace = /[ \t]*/ 
        block       = { line | indentBlock }+
        line        = ~/[^\x1b\x1c\x1d\n]*\n/
        indentBlock = BEGIN_INDENT block END_INDENT
        """
    set_config_value('max_parser_dropouts', 3)
    grammar = grammar_provider(ebnf)()
    code = lstrip_docstring("""
        def func(x, y):
            if x > 0:         # a comment
                if y > 0:
                    print(x)  # another comment
                    print(y)
        """)
    tokenized, _ = tokenize_indentation(code)
    srcmap = tokenized_to_original_mapping(tokenized, code)

    def verify_mapping(self, teststr, orig_text, preprocessed_text, mapping):
        mapped_pos = preprocessed_text.find(teststr)
        assert mapped_pos >= 0
        file_name, file_content, original_pos = mapping(mapped_pos)
        # original_pos = source_map(mapped_pos, self.srcmap)
        assert orig_text[original_pos:original_pos + len(teststr)] == teststr, \
            '"%s" (%i) wrongly mapped onto "%s" (%i)' % \
            (teststr, mapped_pos, orig_text[original_pos:original_pos + len(teststr)], original_pos)

    def test_strip_tokens(self):
        assert self.code == strip_tokens(self.tokenized)

    def test_parse_tokenized(self):
        cst = self.grammar(self.tokenized)
        assert not cst.error_flag

    def test_source_mapping_1(self):
        mapping = partial(source_map, srcmap=self.srcmap)
        self.verify_mapping("def func", self.code, self.tokenized, mapping)
        self.verify_mapping("x > 0:", self.code, self.tokenized, mapping)
        self.verify_mapping("if y > 0:", self.code, self.tokenized, mapping)
        self.verify_mapping("print(x)", self.code, self.tokenized, mapping)
        self.verify_mapping("print(y)", self.code, self.tokenized, mapping)

    def test_source_mapping_2(self):
        previous_index = 0
        L = len(self.code)
        for mapped_index in range(len(self.tokenized)):
            _, _, index = source_map(mapped_index, self.srcmap)
            assert previous_index <= index <= L, \
                "%i <= %i <= %i violated" % (previous_index, index, L)
            previous_index = index

    def test_non_token_preprocessor(self):
        _, tokenized, mapping, _ = preprocess_comments(self.code, 'no_uri')
        self.verify_mapping("def func", self.code, tokenized, mapping)
        self.verify_mapping("x > 0:", self.code, tokenized, mapping)
        self.verify_mapping("if y > 0:", self.code, tokenized, mapping)
        self.verify_mapping("print(x)", self.code, tokenized, mapping)
        self.verify_mapping("print(y)", self.code, tokenized, mapping)

    def test_chained_preprocessors(self):
        pchain = chain_preprocessors(preprocess_comments,
                                     preprocess_indentation)
        _, tokenized, mapping, _ = pchain(self.code, 'no_uri')
        self.verify_mapping("def func", self.code, tokenized, mapping)
        self.verify_mapping("x > 0:", self.code, tokenized, mapping)
        self.verify_mapping("if y > 0:", self.code, tokenized, mapping)
        self.verify_mapping("print(x)", self.code, tokenized, mapping)
        self.verify_mapping("print(y)", self.code, tokenized, mapping)

    def test_error_position(self):
        orig_src = self.code.replace('#', '\x1b')
        prepr = chain_preprocessors(preprocess_comments,
                                    preprocess_indentation)
        self.grammar.max_parser_dropouts__ = 3
        result, messages, syntaxtree = compile_source(orig_src, prepr,
                                                      self.grammar,
                                                      lambda i: i, lambda i: i)
        for err in messages:
            if self.code[err.orig_pos] == "#":
                break
        else:
            assert False, "wrong error positions"