Ejemplo n.º 1
0
    FILE    The notebook file to run.
"""

from docopt import docopt
import os
from pathlib import Path
import json

import nbformat
from nbconvert.exporters import HTMLExporter

import papermill as pm

from bookgender.logutils import start_script

_log = start_script(__file__)

args = docopt(__doc__)
nbfile = Path(args['FILE'])
tmp_nbf = nbfile.with_suffix('.temp.ipynb')

try:
    pfile = args['--param-json']
    if pfile:
        with open(pfile, 'r') as pf:
            params = json.load(pf)
    else:
        params = {}

    _log.info('executing notebook')
    pm.execute_notebook(os.fspath(nbfile), os.fspath(tmp_nbf), params)
Ejemplo n.º 2
0
    _log.info('inspecting file %s', opts.path)
    stat = opts.path.stat()
    _log.info('file size: %s (%s)', stat.st_size, binarysize(stat.st_size))

    timer = Stopwatch()
    with opts.path.open('rb') as f:
        model = pickle.load(f)
    timer.stop()
    gc.collect()
    res = resource.getrusage(resource.RUSAGE_SELF)
    _log.info('loaded model in %s', timer)
    _log.info('max RSS %s', binarysize(res.ru_maxrss * 1024))

    bufs = PBJar()
    timer = Stopwatch()
    p_bytes = pickle5.dumps(model, protocol=5, buffer_callback=bufs)
    timer.stop()
    bsize = bufs.total_size()
    _log.info('pickled to %d bytes in %s', len(p_bytes), timer)
    _log.info('with %d bytes of buffers', bsize)
    _log.info('total size: %s', binarysize(len(p_bytes) + bsize))
    _log.info('compresses to: %s', binarysize(len(p_bytes) + bufs.encoded_size()))


if __name__ == '__main__':
    opts = InspectOpts(__doc__)
    _log = start_script(__file__, opts.verbose)
    inspect(opts)
else:
    _log = logging.getLogger(__name__)