예제 #1
0
 def readFile(self, fileName, root):
     '''
     Read the file from the cache if possible.
     Return (s,ok,key)
     '''
     trace = (False or g.app.debug) and not g.unitTesting
     showHits, showLines, verbose = False, False, True
     sfn = g.shortFileName(fileName)
     if not g.enableDB:
         if trace and verbose: g.trace('g.enableDB is False', sfn)
         return '', False, None
     s = g.readFileIntoEncodedString(fileName, silent=True)
     if s is None:
         if trace: g.trace('empty file contents', sfn)
         return s, False, None
     assert not g.isUnicode(s)
     if trace and showLines:
         for i, line in enumerate(g.splitLines(s)):
             print('%3d %s' % (i, repr(line)))
     # There will be a bug if s is not already an encoded string.
     key = self.fileKey(root.h, s, requireEncodedString=True)
     ok = self.db and key in self.db
     if ok:
         if trace and showHits: g.trace('cache hit', key[-6:], sfn)
         # Delete the previous tree, regardless of the @<file> type.
         while root.hasChildren():
             root.firstChild().doDelete()
         # Recreate the file from the cache.
         aList = self.db.get(key)
         self.createOutlineFromCacheList(root.v, aList, fileName=fileName)
     elif trace:
         g.trace('cache miss', key[-6:], sfn)
     return s, ok, key
예제 #2
0
def readBibTexFileIntoTree(c, fn, p):
    '''Import a BibTeX file into a @bibtex tree.'''
    root = p.copy()
    g.es('reading:', fn)
    s = g.readFileIntoEncodedString(fn)
        # Read the encoded bytes for g.getEncodingAt()
    if not s or not s.strip():
        return
    encoding = g.getEncodingAt(p, s)
    s = g.toUnicode(s, encoding=encoding)
    aList, entries, strings = [], [], []
        # aList is a list of tuples (h,b).
    s = '\n' + ''.join([z.lstrip() for z in g.splitLines(s)])
    for line in s.split('\n@')[1:]:
        kind, rest = line[: 6], line[7:].strip()
        if kind == 'string':
            strings.append(rest[: -1] + '\n')
        else:
            i = min(line.find(','), line.find('\n'))
            h = '@' + line[: i]
            h = h.replace('{', ' ').replace('(', ' ').replace('\n', '')
            b = line[i + 1:].rstrip().lstrip('\n')[: -1]
            entries.append((h, b),)
    if strings:
        h, b = '@string', ''.join(strings)
        aList.append((h, b),)
    aList.extend(entries)
    for h, b in aList:
        p = root.insertAsLastChild()
        p.b, p.h = b, h
    root.expand()
    c.redraw()
예제 #3
0
 def readFile(self, fileName, root):
     '''
     Read the file from the cache if possible.
     Return (s,ok,key)
     '''
     trace = (False or g.app.debug) and not g.unitTesting
     showHits, showLines, verbose = False, False, True
     sfn = g.shortFileName(fileName)
     if not g.enableDB:
         if trace and verbose: g.trace('g.enableDB is False', sfn)
         return '', False, None
     s = g.readFileIntoEncodedString(fileName, silent=True)
     if s is None:
         if trace: g.trace('empty file contents', sfn)
         return s, False, None
     assert not g.isUnicode(s)
     if trace and showLines:
         for i, line in enumerate(g.splitLines(s)):
             print('%3d %s' % (i, repr(line)))
     # There will be a bug if s is not already an encoded string.
     key = self.fileKey(root.h, s, requireEncodedString=True)
     ok = self.db and key in self.db
     if ok:
         if trace and showHits: g.trace('cache hit', key[-6:], sfn)
         # Delete the previous tree, regardless of the @<file> type.
         while root.hasChildren():
             root.firstChild().doDelete()
         # Recreate the file from the cache.
         aList = self.db.get(key)
         self.createOutlineFromCacheList(root.v, aList, fileName=fileName)
     elif trace:
         g.trace('cache miss', key[-6:], sfn)
     return s, ok, key
예제 #4
0
    def check_all(self, log_flag, paths):
        '''Run pyflakes on all files in paths.'''
        from pyflakes import api, reporter
        total_errors = 0
        for fn in sorted(paths):
            # Report the file name.
            sfn = g.shortFileName(fn)
            s = g.readFileIntoEncodedString(fn, silent=False)
            if s.strip():
                g.es('Pyflakes: %s' % sfn)

                # Send all output to the log pane.
                class LogStream:
                    def write(self, s):
                        if s.strip():
                            g.es_print(s)
                            # It *is* useful to send pyflakes errors to the console.

                r = reporter.Reporter(
                    errorStream=LogStream(),
                    warningStream=LogStream(),
                )
                errors = api.check(s, sfn, r)
                total_errors += errors
        return total_errors
예제 #5
0
def get_leo_data(source):
    """Return the root node for the specificed .leo file (path or file)"""
    parser = LeoReader()
    if g.os_path_isfile(source):
        source = g.readFileIntoEncodedString(source)
    parseString(source, parser)
    return parser.root
예제 #6
0
    def check_all(self, log_flag, paths, pyflakes_errors_only):
        '''Run pyflakes on all files in paths.'''
        from pyflakes import api, reporter
        total_errors = 0
        for fn in sorted(paths):
            # Report the file name.
            sfn = g.shortFileName(fn)
            s = g.readFileIntoEncodedString(fn)
            if s.strip():
                if not pyflakes_errors_only:
                    g.es('Pyflakes: %s' % sfn)
                # Send all output to the log pane.

                class LogStream:

                    def write(self, s):
                        if s.strip():
                            g.es_print(s)
                                # It *is* useful to send pyflakes errors to the console.

                r = reporter.Reporter(
                        errorStream=LogStream(),
                        warningStream=LogStream(),
                    )
                errors = api.check(s, sfn, r)
                total_errors += errors
        return total_errors
예제 #7
0
def readBibTexFileIntoTree(c, fn, p):
    '''Import a BibTeX file into a @bibtex tree.'''
    root = p.copy()
    g.es('reading:', fn)
    s = g.readFileIntoEncodedString(fn)
        # Read the encoded bytes for g.getEncodingAt()
    if not s or not s.strip():
        return
    encoding = g.getEncodingAt(p, s)
    s = g.toUnicode(s, encoding=encoding)
    aList, entries, strings = [], [], []
        # aList is a list of tuples (h,b).
    s = '\n' + ''.join([z.lstrip() for z in g.splitLines(s)])
    for line in s.split('\n@')[1:]:
        kind, rest = line[: 6], line[7:].strip()
        if kind == 'string':
            strings.append(rest[: -1] + '\n')
        else:
            i = min(line.find(','), line.find('\n'))
            h = '@' + line[: i]
            h = h.replace('{', ' ').replace('(', ' ').replace('\n', '')
            b = line[i + 1:].rstrip().lstrip('\n')[: -1]
            entries.append((h, b),)
    if strings:
        h, b = '@string', ''.join(strings)
        aList.append((h, b),)
    aList.extend(entries)
    for h, b in aList:
        p = root.insertAsLastChild()
        p.b, p.h = b, h
    root.expand()
    c.redraw()
예제 #8
0
 def check_all(self, log_flag, pyflakes_errors_only, roots):
     """Run pyflakes on all files in paths."""
     try:
         from pyflakes import api, reporter
     except Exception:  # ModuleNotFoundError
         return True  # Pretend all is fine.
     total_errors = 0
     for i, root in enumerate(roots):
         fn = self.finalize(root)
         sfn = g.shortFileName(fn)
         # #1306: nopyflakes
         if any([
                 z.strip().startswith('@nopyflakes')
                 for z in g.splitLines(root.b)
         ]):
             continue
         # Report the file name.
         s = g.readFileIntoEncodedString(fn)
         if s and s.strip():
             if not pyflakes_errors_only:
                 g.es(f"Pyflakes: {sfn}")
             # Send all output to the log pane.
             r = reporter.Reporter(
                 errorStream=self.LogStream(i, roots),
                 warningStream=self.LogStream(i, roots),
             )
             errors = api.check(s, sfn, r)
             total_errors += errors
     return total_errors
예제 #9
0
def get_leo_data(source):
    """Return the root node for the specificed .leo file (path or file)"""
    parser = LeoReader()
    if g.os_path_isfile(source):
        source = g.readFileIntoEncodedString(source)
    parseString(source, parser)
    return parser.root
예제 #10
0
 def readFile(self, fileName, root):
     '''
     Read the file from the cache if possible.
     Return (s,ok,key)
     '''
     # sfn = g.shortFileName(fileName)
     if not g.enableDB:
         return '', False, None
     s = g.readFileIntoEncodedString(fileName, silent=True)
     if s is None:
         return s, False, None
     assert not g.isUnicode(s)
     # There will be a bug if s is not already an encoded string.
     key = self.fileKey(fileName, s, requireEncodedString=True)
     # Fix bug #385: use the full fileName, not root.h.
     ok = self.db and key in self.db
     if ok:
         # Delete the previous tree, regardless of the @<file> type.
         while root.hasChildren():
             root.firstChild().doDelete()
         # Recreate the file from the cache.
         aList = self.db.get(key)
         self.collectChangedNodes(root.v, aList, fileName)
         self.createOutlineFromCacheList2(root.v, aList)
         #self.createOutlineFromCacheList(root.v, aList, fileName=fileName)
     return s, ok, key
예제 #11
0
def main(files):
    """Call main in all given files."""
    t1 = time.time()
    for fn in files:
        # Report the file name.
        assert g.os_path_exists(fn), fn
        sfn = g.shortFileName(fn)
        s = g.readFileIntoEncodedString(fn)
        if s and s.strip():
            r = reporter.Reporter(errorStream=sys.stderr, warningStream=sys.stderr)
            api.check(s, sfn, r)
    t2 = time.time()
    n = len(files)
    print(f"{n} file{g.plural(n)}, time: {t2 - t1:5.2f} sec.")
예제 #12
0
def main(files):
    '''Call run on all tables in tables_table.'''
    t1 = time.time()
    for fn in files:
        # Report the file name.
        assert g.os_path_exists(fn), fn
        sfn = g.shortFileName(fn)
        s = g.readFileIntoEncodedString(fn)
        if s and s.strip():
            r = reporter.Reporter(
                errorStream=sys.stderr,
                warningStream=sys.stderr,
            )
            api.check(s, sfn, r)
    t2 = time.time()
    n = len(files)
    print('%s file%s, time: %5.2f sec.' % (n, g.plural(n), t2 - t1))
예제 #13
0
def main(files):
    '''Call run on all tables in tables_table.'''    
    t1 = time.time()
    for fn in files:
        # Report the file name.
        assert g.os_path_exists(fn), fn
        sfn = g.shortFileName(fn)
        s = g.readFileIntoEncodedString(fn, silent=False)
        if s and s.strip():
            r = reporter.Reporter(
                errorStream=sys.stderr,
                warningStream=sys.stderr,
                )
            api.check(s, sfn, r)
    t2 = time.time()
    n = len(files)
    print('%s file%s, time: %5.2f sec.' % (n, g.plural(n), t2-t1))
예제 #14
0
def main(src, dest):
    print('src', src)
    print('dest', dest)
    root = get_leo_data(g.readFileIntoEncodedString(src))
    root.gnx = 'hidden-root-vnode-gnx'
    vns, seq = walk_tree(root)
    data = vnode_data(vns, seq[1:])  # skip hidden root
    with sqlite3.connect(dest) as conn:
        resetdb(conn)
        conn.executemany(sqls('insert-vnode'), data)
        conn.commit()
    acc = []
    settings_harvester(root, [], acc)
    for gnx, kind, name, value, cond in acc:
        if kind == g.u('data'):
            value = repr(value)[:30]

        print(cond or "always", kind, name, pprint.pformat(value))
예제 #15
0
def main(src, dest):
    print('src', src)
    print('dest', dest)
    root = get_leo_data(g.readFileIntoEncodedString(src))
    root.gnx = 'hidden-root-vnode-gnx'
    vns, seq = walk_tree(root)
    data = vnode_data(vns, seq[1:]) # skip hidden root
    with sqlite3.connect(dest) as conn:
        resetdb(conn)
        conn.executemany(sqls('insert-vnode'), data)
        conn.commit()
    acc = []
    settings_harvester(root, [], acc)
    for gnx, kind, name, value, cond in acc:
        if kind == g.u('data'):
            value = repr(value)[:30]
        
        print(cond or "always", kind, name, pprint.pformat(value))
예제 #16
0
 def check_all(self, log_flag, paths, pyflakes_errors_only, roots=None):
     '''Run pyflakes on all files in paths.'''
     from pyflakes import api, reporter
     total_errors = 0
     # pylint: disable=cell-var-from-loop
     for fn_n, fn in enumerate(sorted(paths)):
         # Report the file name.
         sfn = g.shortFileName(fn)
         s = g.readFileIntoEncodedString(fn)
         if s.strip():
             if not pyflakes_errors_only:
                 g.es('Pyflakes: %s' % sfn)
             # Send all output to the log pane.
             r = reporter.Reporter(
                 errorStream=self.LogStream(fn_n, roots),
                 warningStream=self.LogStream(fn_n, roots),
             )
             errors = api.check(s, sfn, r)
             total_errors += errors
     return total_errors
예제 #17
0
 def check_all(self, roots):
     """Run pyflakes on all files in paths."""
     total_errors = 0
     for i, root in enumerate(roots):
         fn = self.finalize(root)
         sfn = g.shortFileName(fn)
         # #1306: nopyflakes
         if any(z.strip().startswith('@nopyflakes')
                for z in g.splitLines(root.b)):
             continue
         # Report the file name.
         s = g.readFileIntoEncodedString(fn)
         if s and s.strip():
             # Send all output to the log pane.
             r = reporter.Reporter(
                 errorStream=self.LogStream(i, roots),
                 warningStream=self.LogStream(i, roots),
             )
             errors = api.check(s, sfn, r)
             total_errors += errors
     return total_errors
예제 #18
0
 def check_all(self, log_flag, paths, pyflakes_errors_only, roots=None):
     '''Run pyflakes on all files in paths.'''
     try:
         from pyflakes import api, reporter
     except Exception: # ModuleNotFoundError
         return True # Pretend all is fine.
     total_errors = 0
     # pylint: disable=cell-var-from-loop
     for fn_n, fn in enumerate(sorted(paths)):
         # Report the file name.
         sfn = g.shortFileName(fn)
         s = g.readFileIntoEncodedString(fn)
         if s.strip():
             if not pyflakes_errors_only:
                 g.es('Pyflakes: %s' % sfn)
             # Send all output to the log pane.
             r = reporter.Reporter(
                 errorStream=self.LogStream(fn_n, roots),
                 warningStream=self.LogStream(fn_n, roots),
             )
             errors = api.check(s, sfn, r)
             total_errors += errors
     return total_errors