Ejemplo n.º 1
0
def export_notebooks():

    import os
    import dharpa_toolbox as dt
    project_root = os.path.dirname(dt.__file__)
    os.chdir(project_root)
    from nbdev.export import notebook2script
    notebook2script()
Ejemplo n.º 2
0
def create_scripts(nb_name=None, max_elapsed=60, wait=2):
    from nbdev.export import notebook2script
    if nb_name is not None: wait = 0
    try:
        save_nb(nb_name)
    except:
        save_nb(wait=wait)
    time.sleep(0.5)
    notebook2script(nb_name)
    if nb_name is None: output = all_last_saved(max_elapsed=max_elapsed)
    else: output = py_last_saved(nb_name=nb_name, max_elapsed=max_elapsed)
    beep(output)
    return output
Ejemplo n.º 3
0
            res = func()
    return res


# **`rank0_first(f)`** calls `f()` in rank-0 process first, then in parallel on the rest, in distributed training mode. In single process, non-distributed training mode, `f()` is called only once as expected.
#
# One application of `rank0_first()` is to make fresh downloads via `untar_data()` safe in distributed training scripts launched by `python -m fastai.launch <script>`:
#
# > <code>path = untar_data(URLs.IMDB)</code>
#
# becomes:
#
# > <code>path = <b>rank0_first(lambda:</b> untar_data(URLs.IMDB))</code>
#
#
# Some learner factory methods may use `untar_data()` to **download pretrained models** by default:
#
# > <code>learn = text_classifier_learner(dls, AWD_LSTM, drop_mult=0.5, metrics=accuracy)</code>
#
# becomes:
#
# > <code>learn = <b>rank0_first(lambda:</b> text_classifier_learner(dls, AWD_LSTM, drop_mult=0.5, metrics=accuracy))</code>
#
# Otherwise, multiple processes will download at the same time and corrupt the data.
#

# ## Export -

# hide
notebook2script()
Ejemplo n.º 4
0
def create_scripts(max_elapsed=60):
    from nbdev.export import notebook2script
    save_nb()
    notebook2script()
    return last_saved(max_elapsed)
Ejemplo n.º 5
0
def main():
    notebook2script()
    sys.exit(0)