Exemple #1
0
def copy_help(srcdir, destdir, force=False, zipdirs=None, include=None,
              exclude=None):
    logger = manager.app.logger
    srcdir = _exp(srcdir)
    destdir = _exp(destdir)

    logger.info("Copying help from %s to %s", srcdir, destdir)
    t = util.perf_counter()
    zipset = set()
    if zipdirs:
        zipdirfile = _exp(zipdirs)
        with open(zipdirfile) as f:
            zipset = set(line.strip() for line in f)

    # Iterate over the top-level items in the srcdir. If it's a directory,
    # check if it should be zipped. If it should, archive it, if not use
    # _copy_tree. If it's a file, use _copy_file.
    for name in os.listdir(srcdir):
        if name.startswith("."):
            continue

        srcpath = os.path.join(srcdir, name)
        destpath = os.path.join(destdir, name)
        if os.path.isdir(srcpath):
            if name in zipset:
                zfile = destpath + ".zip"
                archive(srcpath, zfile, force=force, include=include,
                        exclude=exclude)
            else:
                _copy_tree(srcpath, destpath, force, include, exclude, logger)
        else:
            _copy_file(srcpath, destpath, force, logger)

    logger.info("Copied help in %.01f sec", util.perf_counter() - t)
Exemple #2
0
def debug_wiki(path):
    from bookish.grammars.wiki import blocks
    from bookish.parser import parser, rules

    pages = flaskapp.get_wikipages(manager.app)
    src = wikipages.condition_string(pages.content(path))

    ctx = wikipages.bootstrap_context()
    i = 0
    blist = []
    t = util.perf_counter()
    missed = False
    while rules.streamend.accept(src, i, ctx)[0] is parser.Miss:
        tt = util.perf_counter()
        out, newi = blocks(src, i, ctx)
        tt = util.perf_counter() - tt

        if not isinstance(out, dict):
            lines = parser.Lines(src)
            line, col = lines.line_and_col(i)
            print("Miss at line", line, "column", col, "(char %s)" % i)
            print(repr(src[i:i+10]))
            missed = True
            break

        i = newi
        blist.append((tt, out.get("type"),
                      repr(functions.string(out.get("text"))[:40])))
    t = util.perf_counter() - t
    print("%0.06f" % t)

    if not missed:
        blist.sort(reverse=True)
        for tt, typename, txt in blist:
            print("%0.06f" % tt, typename, txt)
Exemple #3
0
def archive(dirpath, zfile, force=False, include=None, exclude=None):
    import zipfile

    logger = manager.app.logger

    dirpath = _exp(dirpath)
    filepaths = list(util.file_paths(dirpath, include, exclude))

    def _up_to_date():
        if not os.path.exists(zfile):
            return False
        ziptime = os.path.getmtime(zfile)
        return not any(os.path.getmtime(p) > ziptime for p in filepaths)

    # Don't bother archiving if zip is up-to-date
    if not force and _up_to_date():
        return

    logger.info("Archiving directory %s to file %s", dirpath, zfile)
    t = util.perf_counter()
    count = 0
    zf = zipfile.ZipFile(zfile, "w", compression=zipfile.ZIP_DEFLATED)
    for path in filepaths:
        rp = os.path.relpath(path, dirpath).replace("\\", "/")
        zf.write(path, arcname=rp)
        logger.debug("Adding %s", path)
        count += 1
    logger.info("Archived %s files in %.01f sec",
                count, util.perf_counter() - t)
    zf.close()
Exemple #4
0
def copy_files(srcdir, destdir, force=False, include=None, exclude=None):
    logger = manager.app.logger
    srcdir = _exp(srcdir)
    destdir = _exp(destdir)

    logger.info("Copying %s to %s", srcdir, destdir)
    t = util.perf_counter()
    count = _copy_tree(srcdir, destdir, force, include, exclude, logger)
    logger.info("Copied %s files in %.01f sec", count, util.perf_counter() - t)
Exemple #5
0
def generate(dirpath, prefix="/", vars=None, longest=10, cache=True,
             nocache=False):
    pages = flaskapp.get_wikipages(manager.app)
    logger = manager.app.logger
    dirpath = _exp(dirpath)
    indexer = flaskapp.get_indexer(manager.app)
    searcher = indexer.searcher()

    if nocache:
        empty_cache(pages)

    count = 0
    largest = []

    if vars:
        vars = _parse_vars(vars)
        manager.app.config.setdefault("VARS", {}).update(vars)

    t = util.perf_counter()
    for path in get_prefixed_paths(pages, prefix):
        if not pages.is_wiki_source(path):
            continue

        logger.debug("Generating %s", path)
        count += 1

        tt = util.perf_counter()
        html = pages.html(path, save_to_cache=cache, searcher=searcher)
        tt = util.perf_counter() - tt

        htmlpath = paths.basepath(path) + ".html"
        filepath = os.path.join(dirpath, htmlpath[1:])

        # Make sure the destination directory exists, then create the file.
        parentdirpath = os.path.dirname(filepath)
        if not os.path.exists(parentdirpath):
            os.makedirs(parentdirpath)
        with open(filepath, "w") as f:
            f.write(html.encode("utf8"))

        # Keep track of slowest pages
        if len(largest) < longest or tt > largest[0][0]:
            if len(largest) >= longest:
                largest.pop(0)
            bisect.insort(largest, (tt, path))
    totaltime = util.perf_counter() - t

    logger.info("Generated %s files in %s secs", count, totaltime)
    logger.info("Average %s sec per page", totaltime / count)
    logger.info("Top %s longest times:")
    for gentime, path in largest:
        logger.info("%s | %03.04f secs ", path, gentime)