def test_json_pretty():
    assert json_pretty({}) == "{}"
    assert json_pretty(
        {
            "foo": True,
            "bar": 42,
            "links": ["https://github.com", "http://localhost"]
        }
    ) == '\n\n{\n    "bar":       42,\n\n    "foo":       true,\n\n    "links": [\n        "https://github.com",\n\n        "http://localhost"\n    ]\n}\n'
 def test_json_pretty(self):
     self.assertEqual(json_pretty({}), "{}")  # a == b
     self.assertEqual(
         json_pretty({
             "foo": True,
             "bar": 42,
             "links": ["https://github.com", "http://localhost"]
         }),
         '\n\n{\n    "bar":       42,\n\n    "foo":       true,\n\n    "links": [\n        "https://github.com",\n\n        "http://localhost"\n    ]\n}\n'
     )
def process_single_python_file(python_filepath: str):
    """Process a single Python file."""
    log.info("Processing Python file: {0}".format(python_filepath))
    if os.path.isfile(python_filepath) and os.access(python_filepath, os.R_OK):
        json_meta = python_file_to_json_meta(python_filepath)
    new_json_file = os.path.join(os.path.dirname(args.fullpath), "doc", "json",
                                 os.path.basename(python_filepath) + ".json")
    log.debug("OUTPUT: Writing MetaData JSON file {0}.".format(new_json_file))
    with open(new_json_file, "w", encoding="utf-8") as json_file:
            json_file.write(json_pretty(json_meta))
    html = json_meta_to_template(json_meta, HTML_PLAIN, bool(not pygments))
    new_html_file = os.path.join(
        os.path.dirname(args.fullpath), "doc", "html",
        os.path.basename(python_filepath) + ".plain.html")
    log.debug("OUTPUT: Writing Plain Flat HTML5 Doc {0}".format(new_html_file))
    with open(new_html_file, "w", encoding="utf-8") as html_file:
            html_file.write(html)
    md = rst = json_meta_to_template(json_meta, MD, False)
    new_md_file = os.path.join(os.path.dirname(args.fullpath), "doc", "md",
                               os.path.basename(python_filepath) + ".md")
    log.debug("OUTPUT: Writing MD Documentation {0}.".format(new_md_file))
    with open(new_md_file, "w", encoding="utf-8") as md_file:
            md_file.write(md)
    new_rst_file = os.path.join(os.path.dirname(args.fullpath), "doc", "rst",
                                os.path.basename(python_filepath) + ".rst")
    log.debug("OUTPUT: Writing RST Documentation {0}.".format(new_rst_file))
    with open(new_rst_file, "w", encoding="utf-8") as md_file:
            md_file.write(rst)
    txt = json_meta_to_template(json_meta, TXT)
    new_txt_file = os.path.join(os.path.dirname(args.fullpath), "doc", "txt",
                                os.path.basename(python_filepath) + ".txt")
    log.debug("OUTPUT: Writing TXT Documentation {0}.".format(new_txt_file))
    with open(new_txt_file, "w", encoding="utf-8") as txt_file:
            txt_file.write(txt)
    if args.odt:
        log.warning("ODT Support is Pre-Alpha, since Design is unfinished.")
        fodt = json_meta_to_template(json_meta, ODT, False)
        new_fodt_file = os.path.join(
            os.path.dirname(args.fullpath), "doc", "odt",
            os.path.basename(python_filepath) + ".fodt")
        log.debug("OUTPUT: Writing ODT Documentation {}".format(new_fodt_file))
        with open(new_fodt_file, "w", encoding="utf-8") as fodt_file:
                fodt_file.write(fodt)
    if args.xml:
        xml = json_meta_to_template(json_meta, XML)
        new_xml_file = os.path.join(
            os.path.dirname(args.fullpath), "doc", "xml",
            os.path.basename(python_filepath) + ".xml")
        log.debug("OUTPUT: Writing XML Documentation {0}".format(new_xml_file))
        with open(new_xml_file, "w", encoding="utf-8") as xml_file:
                xml_file.write(xml)
    plugin_dir = os.path.join(os.path.dirname(args.fullpath), "doc", "plugins")
    log.debug("Checking for Plugins and Running from {0}.".format(plugin_dir))
    json_meta_to_plugins(plugin_dir, python_filepath, json_meta)
Example #4
0
    0 / 0
except Exception:
    log_exception()

print("Running anglerfish.bytes2human()")
print(bytes2human(3284902384, "g"))
print(bytes2human(0, "m"))
print(bytes2human(6666, "k"))
print(bytes2human(-6666, "k"))
print(bytes2human(1024, "k"))

print("Running anglerfish.json_pretty()")
print(
    json_pretty({
        "foo": True,
        "bar": 42,
        "links": ["https://github.com", "http://localhost"]
    }))
print(json_pretty({}))

print("Running anglerfish.multiprocessed()")


def process_job(job):  # a simple function for testing only
    time.sleep(1)
    count = 100
    while count > 0:
        count -= 1
    return job

def main():
    """Main Loop. https://www.youtube.com/watch?v=552EBvrmeak """
    make_arguments_parser()
    global log
    log = make_logger("dookumentation")
    check_encoding()  # AutoMagically Check Encodings/root
    set_process_name("dookumentation")  # set Name
    set_single_instance("dookumentation")  # Auto set Single Instance
    set_terminal_title("dookumentation")
    log.disable(log.CRITICAL) if args.quiet else log.debug("Max Logging ON.")
    atexit.register(beep) if args.beep else log.debug("Beep sound at exit OFF")
    _info = " ".join((__doc__, __version__, __url__, "by " + __author__))
    log.info(_info)
    log.debug((platform(), python_version(), str(os.environ), str(args)))
    check_folder(os.path.dirname(args.fullpath))
    set_folder_structure(os.path.dirname(args.fullpath))
    if args.before and getoutput:
        log.info(getoutput(str(args.before)))
    files_exts, list_of_files = (".py", ".pyw"), str(args.fullpath)
    if os.path.isfile(args.fullpath) and args.fullpath.endswith(files_exts):
        log.info("Target is single a *.PY or *.PYW Python Source Code File.")
        process_single_python_file(args.fullpath)
    elif os.path.isdir(args.fullpath):
        log.info("Target is Folder with *.PY & *.PYW Python Source Code Files")
        log.warning("Processing a whole Folder may take some time...")
        list_of_files = walk2list(args.fullpath, files_exts,
                                  tuple(args.skip if args.skip else "",))
        pool = Pool(cpu_count())  # Multiprocessing Async
        pool.map_async(process_multiple_files, list_of_files)
        pool.close()
        pool.join()
    else:
        sys.exit("File or folder not found, or cant be read, or I/O Error !.")
    html_folder = os.path.join(os.path.dirname(args.fullpath), "doc", "html")
    if args.zip and make_archive and os.path.isdir(html_folder):  # HTML to ZIP
        log.debug("OUTPUT: Writing ZIP Documentation {0}.".format(html_folder))
        try:
            os.remove(os.path.join(html_folder, "dookumentation.zip"))
        except:
            pass
        make_archive(html_folder, 'zip', html_folder, logger=log)
        _c = "{0}. Documentation of Python source code. Creation: ~{1}".format(
            _info, datetime.now().isoformat()[:-7])
        if os.path.isfile(html_folder + '.zip'):
            set_zip_comment(html_folder + '.zip', _c)
            move(html_folder + '.zip',
                 os.path.join(html_folder, "dookumentation.zip"))
    if args.ebook and os.path.isdir(html_folder):  # HTML to eBook
        log.debug("OUTPUT: Writing EPUB Documentation {0}".format(html_folder))
        htm = walk2list(html_folder, (".html", ".htm", ".css"), IGNORE)
        htm = [_ for _ in htm if "doc/html/bower_components/" not in _.lower()]
        fyle = os.path.join(html_folder, "dookumentation.epub")
        html2ebook(htm, fyle, {"des": __doc__ + __url__})
    json_meta = {}
    json_folder = os.path.join(os.path.dirname(args.fullpath), "doc", "json")
    for jotason in walk2list(json_folder, (".json", ), ("index.json",)):
        log.debug("INPUT: Reading JSON file {0}.".format(jotason))
        with open(jotason, "r", encoding="utf-8") as jaison_file:
            json_meta[jotason.replace(".json", "")] = loads(jaison_file.read())
    new_json_file = os.path.join(json_folder, "index.json")
    log.debug("OUTPUT: Writing JSON Index file {0}.".format(new_json_file))
    with open(new_json_file, "w", encoding="utf-8") as json_file:
            json_file.write(json_pretty(json_meta))
    html = json_meta_to_template(json_meta, HTML_PLUS, False)
    new_html_dir = os.path.join(os.path.dirname(args.fullpath), "doc", "html")
    new_html_file = os.path.join(new_html_dir, "index.html")
    log.debug("OUTPUT: Writing HTML5 Polymer Docs {0}.".format(new_html_file))
    with open(new_html_file, "w", encoding="utf-8") as html_file:
            html_file.write(html)
    unpack_archive(ZIPY, new_html_dir, "zip")  # Extract bower_components.zip
    if args.after and getoutput:
        log.info(getoutput(str(args.after)))
    if args.serve and os.path.isdir(html_folder):  # HTML to HTTP LiveReload
        log.debug("Serving HTML Docs via HTTP server.")
        serve_http(html_folder)
    log.info('\n {0} \n Files Processed: {1}.'.format('-' * 80, list_of_files))
    _l = int(len(list_of_files) if isinstance(list_of_files, tuple) else 1)
    _m = '{0} source code files Documented!.'.format(_l)
    make_notification("Dookumentation", _m) if args.notify else log.info(_m)
    set_terminal_title()
    make_post_exec_msg(start_time, """Upload all versioned Docs online for Free
    - Read How: https://pages.github.com/#vanilla-step-1
    - TL;DR:
    0) Create and jump to a new Git branch named 'gh-pages' from master branch.
    1) Move all '{0}' to root dir of git repo, commit and push all files.
    2) 'index.html' and '/bower_components/' must be on root dir of git repo.
    3) Go https://username.github.io/repository/ Dookumentation should be there
    """.format(new_html_dir))