Exemple #1
0
def main():
    """Main Loop."""
    args = make_arguments_parser()
    log.disable(log.CRITICAL) if args.quiet else log.debug("Max Logging ON")
    log.info(__doc__ + __version__)
    check_folder(os.path.dirname(args.fullpath))
    atexit.register(beep) if args.beep else log.debug("Beep sound at exit OFF")
    if os.path.isfile(args.fullpath) and args.fullpath.endswith(".css"):
        log.info("Target is a CSS File.")  # Work based on if argument is
        list_of_files = str(args.fullpath)  # file or folder, folder is slower.
        process_single_css_file(
            args.fullpath, wrap=args.wrap, timestamp=args.timestamp,
            comments=args.comments, sort=args.sort, overwrite=args.overwrite,
            zipy=args.zipy, prefix=args.prefix, add_hash=args.hash)
    elif os.path.isfile(args.fullpath) and args.fullpath.endswith(
            ".html" if args.overwrite else ".htm"):
        log.info("Target is HTML File.")
        list_of_files = str(args.fullpath)
        process_single_html_file(
            args.fullpath, comments=args.comments,
            overwrite=args.overwrite, prefix=args.prefix, add_hash=args.hash)
    elif os.path.isfile(args.fullpath) and args.fullpath.endswith(".js"):
        log.info("Target is a JS File.")
        list_of_files = str(args.fullpath)
        process_single_js_file(
            args.fullpath, timestamp=args.timestamp,
            overwrite=args.overwrite, zipy=args.zipy)
    elif os.path.isdir(args.fullpath):
        log.info("Target is a Folder with CSS, HTML, JS files !.")
        log.warning("Processing a whole Folder may take some time...")
        list_of_files = walk2list(
            args.fullpath,
            (".css", ".js", ".html" if args.overwrite else ".htm"),
            (".min.css", ".min.js", ".htm" if args.overwrite else ".html"))
        log.info('Total Maximum CPUs used: ~{0} Cores.'.format(cpu_count()))
        pool = Pool(cpu_count())  # Multiprocessing Async
        pool.map_async(partial(
                process_multiple_files, watch=args.watch,
                wrap=args.wrap, timestamp=args.timestamp,
                comments=args.comments, sort=args.sort,
                overwrite=args.overwrite, zipy=args.zipy,
                prefix=args.prefix, add_hash=args.hash),
            list_of_files)
        pool.close()
        pool.join()
    else:
        log.critical("File or folder not found,or cant be read,or I/O Error.")
        sys.exit(1)
    if args.after:
        out = Popen(str(args.after), stdout=PIPE, stderr=STDOUT).communicate()
        log.info(str(out))
    log.info('\n {0} \n Files Processed: {1}.'.format('-' * 80, list_of_files))
    log.info('Number of Files Processed: {0}.'.format(
        len(list_of_files) if isinstance(list_of_files, tuple) else 1))
    set_terminal_title()
    make_post_exec_msg(start_time)
Exemple #2
0
def main():
    """Main Loop."""
    make_arguments_parser()
    global log
    log = make_logger("css-html-prettify")  # AutoMagically make a Logger Log
    check_encoding()  # AutoMagically Check Encodings/root
    set_process_name("css-html-prettify")  # set Name
    set_single_instance("css-html-prettify")  # Auto set Single Instance
    set_terminal_title("css-html-prettify")
    log.disable(log.CRITICAL) if args.quiet else log.debug("Max Logging ON.")
    log.info(__doc__ + __version__)
    check_folder(os.path.dirname(args.fullpath))
    atexit.register(beep) if args.beep else log.debug("Beep sound at exit OFF")
    if args.before and getoutput:
        log.info(getoutput(str(args.before)))
    if os.path.isfile(args.fullpath) and args.fullpath.endswith(
        (".css", ".scss")):  # Work based on if argument is file or folder.
        log.info("Target is a CSS / SCSS File.")
        list_of_files = str(args.fullpath)
        process_single_css_file(args.fullpath)
    elif os.path.isfile(args.fullpath) and args.fullpath.endswith(
        (".htm", ".html")):
        log.info("Target is a HTML File.")
        list_of_files = str(args.fullpath)
        process_single_html_file(args.fullpath)
    elif os.path.isdir(args.fullpath):
        log.info("Target is a Folder with CSS / SCSS, HTML, JS.")
        log.warning("Processing a whole Folder may take some time...")
        list_of_files = walk2list(args.fullpath,
                                  (".css", ".scss", ".html", ".htm"),
                                  ".min.css")
        pool = Pool(cpu_count())  # Multiprocessing Async
        pool.map_async(process_multiple_files, list_of_files)
        pool.close()
        pool.join()
    else:
        log.critical("File or folder not found,or cant be read,or I/O Error.")
        sys.exit(1)
    if args.after and getoutput:
        log.info(getoutput(str(args.after)))
    log.info('\n {0} \n Files Processed: {1}.'.format('-' * 80, list_of_files))
    log.info('Number of Files Processed: {0}'.format(
        len(list_of_files) if isinstance(list_of_files, tuple) else 1))
    set_terminal_title()
    make_post_exec_msg(start_time, "css-html-prettify")
Exemple #3
0
@threads(4)
def process_job():  # a simple function for testing only
    return time.sleep(1)


process_job()

print("Running anglerfish.ChainableFuture.then()")
future1 = ChainableFuture()
future2 = future1.then(lambda arg: arg + ' using ChainableFuture.then() !!!.')
future1.set_result('This is an anglerfish.ChainableFuture demo')
print(future1.result())  # Future 1 is Chained to Future 2.
print(future2.result())

print("Running anglerfish.walkdir2filelist")
print(walk2list(".", ".py", ".pyc"))
print(walk2list(".", ".py", ".pyc", tuply=False))
print(walk2list(".", ".py", ".pyc", namedtuple="my_folder"))

print("Running anglerfish.seconds2human()")
print(seconds2human(0))
print(seconds2human(42))
print(seconds2human(-666))
print(seconds2human(83490890))
__unit_words = {
    "y": " Anios ",
    "d": " Dias ",
    "h": " Horas ",
    "m": " Minutos ",
    "s": " Segundos "
}
def main():
    """Main Loop. https://www.youtube.com/watch?v=552EBvrmeak """
    make_arguments_parser()
    global log
    log = make_logger("dookumentation")
    check_encoding()  # AutoMagically Check Encodings/root
    set_process_name("dookumentation")  # set Name
    set_single_instance("dookumentation")  # Auto set Single Instance
    set_terminal_title("dookumentation")
    log.disable(log.CRITICAL) if args.quiet else log.debug("Max Logging ON.")
    atexit.register(beep) if args.beep else log.debug("Beep sound at exit OFF")
    _info = " ".join((__doc__, __version__, __url__, "by " + __author__))
    log.info(_info)
    log.debug((platform(), python_version(), str(os.environ), str(args)))
    check_folder(os.path.dirname(args.fullpath))
    set_folder_structure(os.path.dirname(args.fullpath))
    if args.before and getoutput:
        log.info(getoutput(str(args.before)))
    files_exts, list_of_files = (".py", ".pyw"), str(args.fullpath)
    if os.path.isfile(args.fullpath) and args.fullpath.endswith(files_exts):
        log.info("Target is single a *.PY or *.PYW Python Source Code File.")
        process_single_python_file(args.fullpath)
    elif os.path.isdir(args.fullpath):
        log.info("Target is Folder with *.PY & *.PYW Python Source Code Files")
        log.warning("Processing a whole Folder may take some time...")
        list_of_files = walk2list(args.fullpath, files_exts,
                                  tuple(args.skip if args.skip else "",))
        pool = Pool(cpu_count())  # Multiprocessing Async
        pool.map_async(process_multiple_files, list_of_files)
        pool.close()
        pool.join()
    else:
        sys.exit("File or folder not found, or cant be read, or I/O Error !.")
    html_folder = os.path.join(os.path.dirname(args.fullpath), "doc", "html")
    if args.zip and make_archive and os.path.isdir(html_folder):  # HTML to ZIP
        log.debug("OUTPUT: Writing ZIP Documentation {0}.".format(html_folder))
        try:
            os.remove(os.path.join(html_folder, "dookumentation.zip"))
        except:
            pass
        make_archive(html_folder, 'zip', html_folder, logger=log)
        _c = "{0}. Documentation of Python source code. Creation: ~{1}".format(
            _info, datetime.now().isoformat()[:-7])
        if os.path.isfile(html_folder + '.zip'):
            set_zip_comment(html_folder + '.zip', _c)
            move(html_folder + '.zip',
                 os.path.join(html_folder, "dookumentation.zip"))
    if args.ebook and os.path.isdir(html_folder):  # HTML to eBook
        log.debug("OUTPUT: Writing EPUB Documentation {0}".format(html_folder))
        htm = walk2list(html_folder, (".html", ".htm", ".css"), IGNORE)
        htm = [_ for _ in htm if "doc/html/bower_components/" not in _.lower()]
        fyle = os.path.join(html_folder, "dookumentation.epub")
        html2ebook(htm, fyle, {"des": __doc__ + __url__})
    json_meta = {}
    json_folder = os.path.join(os.path.dirname(args.fullpath), "doc", "json")
    for jotason in walk2list(json_folder, (".json", ), ("index.json",)):
        log.debug("INPUT: Reading JSON file {0}.".format(jotason))
        with open(jotason, "r", encoding="utf-8") as jaison_file:
            json_meta[jotason.replace(".json", "")] = loads(jaison_file.read())
    new_json_file = os.path.join(json_folder, "index.json")
    log.debug("OUTPUT: Writing JSON Index file {0}.".format(new_json_file))
    with open(new_json_file, "w", encoding="utf-8") as json_file:
            json_file.write(json_pretty(json_meta))
    html = json_meta_to_template(json_meta, HTML_PLUS, False)
    new_html_dir = os.path.join(os.path.dirname(args.fullpath), "doc", "html")
    new_html_file = os.path.join(new_html_dir, "index.html")
    log.debug("OUTPUT: Writing HTML5 Polymer Docs {0}.".format(new_html_file))
    with open(new_html_file, "w", encoding="utf-8") as html_file:
            html_file.write(html)
    unpack_archive(ZIPY, new_html_dir, "zip")  # Extract bower_components.zip
    if args.after and getoutput:
        log.info(getoutput(str(args.after)))
    if args.serve and os.path.isdir(html_folder):  # HTML to HTTP LiveReload
        log.debug("Serving HTML Docs via HTTP server.")
        serve_http(html_folder)
    log.info('\n {0} \n Files Processed: {1}.'.format('-' * 80, list_of_files))
    _l = int(len(list_of_files) if isinstance(list_of_files, tuple) else 1)
    _m = '{0} source code files Documented!.'.format(_l)
    make_notification("Dookumentation", _m) if args.notify else log.info(_m)
    set_terminal_title()
    make_post_exec_msg(start_time, """Upload all versioned Docs online for Free
    - Read How: https://pages.github.com/#vanilla-step-1
    - TL;DR:
    0) Create and jump to a new Git branch named 'gh-pages' from master branch.
    1) Move all '{0}' to root dir of git repo, commit and push all files.
    2) 'index.html' and '/bower_components/' must be on root dir of git repo.
    3) Go https://username.github.io/repository/ Dookumentation should be there
    """.format(new_html_dir))