raise IOError(err.message % fname) write_file(rst_file, write_filename) hash_funcs.update_hash_dict(filehash, fname) if __name__ == "__main__": sys.path.insert(0, example_dir) from run_all import filelist sys.path.remove(example_dir) if not os.path.exists(docs_rst_dir): os.makedirs(docs_rst_dir) if len(sys.argv) > 1: # given a file,files to process, no help flag yet for example_file in sys.argv[1:]: restify(example_file) else: # process the whole directory for root, dirnames, filenames in os.walk(example_dir): for example in filenames: example_file = os.path.join(root, example) whole_file = open(example_file, 'r').read() to_write, filehash = hash_funcs.check_hash(whole_file, example) if not to_write: print "Hash has not changed for file %s" % example continue elif (not example.endswith('.py') or example in exclude_list or not check_script(example_file)): continue restify(whole_file, filehash, example)
if __name__ == "__main__": if not os.path.exists(dest_dir): os.makedirs(dest_dir) for dataset in datasets: write_pth = join(dest_dir, dataset + '.rst') data_mod = datasets[dataset] title = getattr(data_mod, 'TITLE') descr = getattr(data_mod, 'DESCRLONG') copyr = getattr(data_mod, 'COPYRIGHT') notes = getattr(data_mod, 'NOTE') source = getattr(data_mod, 'SOURCE') write_file = doc_template.substitute(TITLE=title, title_='=' * len(title), DESCRIPTION=descr, NOTES=notes, SOURCE=source, COPYRIGHT=copyr) to_write, filehash = hash_funcs.check_hash(write_file.encode(), data_mod.__name__.encode()) if not to_write: print("Hash has not changed for docstring of dataset " "{}".format(dataset)) continue with open(os.path.realpath(write_pth), 'w') as rst_file: rst_file.write(write_file) if filehash is not None: hash_funcs.update_hash_dict(filehash, data_mod.__name__)
if __name__ == "__main__": sys.path.insert(0, example_dir) from run_all import filelist sys.path.remove(example_dir) if not os.path.exists(docs_rst_dir): os.makedirs(docs_rst_dir) if len(sys.argv) > 1: # given a file,files to process, no help flag yet for example_file in sys.argv[1:]: whole_file = open(example_file, 'r').read() restify(whole_file, None, example_file) else: # process the whole directory for root, dirnames, filenames in os.walk(example_dir): if 'notebooks' in root: continue for example in filenames: example_file = os.path.join(root, example) whole_file = open(example_file, 'r').read() to_write, filehash = hash_funcs.check_hash(whole_file, example) if not to_write: print "Hash has not changed for file %s" % example continue elif (not example.endswith('.py') or example in exclude_list or not check_script(example_file)): continue restify(whole_file, filehash, example)
'docs/source/examples/notebooks/generated/') if not os.path.exists(rst_target_dir): os.makedirs(rst_target_dir) parser = _get_parser() arg_ns, other_args = parser.parse_known_args() os.chdir(arg_ns.path) # so we execute in notebook dir notebook_runner = NotebookRunner(arg_ns.path, other_args, arg_ns.profile, arg_ns.timeout) try: for fname, nb in notebook_runner: base, ext = os.path.splitext(fname) fname_only = os.path.basename(base) # check if we need to write towrite, filehash = hash_funcs.check_hash(open(fname, "r").read(), fname_only) if not towrite: print "Hash has not changed for file %s" % fname_only continue print "Writing ", fname_only # This edits the notebook cells inplace notebook_runner(nb) # for debugging writes ipynb file with output #new_ipynb = "%s_generated%s" % (base, ".ipynb") #with io.open(new_ipynb, "w", encoding="utf-8") as f: # write(nb, f, "json") # use nbconvert to convert to rst support_file_dir = os.path.join(rst_target_dir, fname_only+"_files")
'docs/source/examples/notebooks/generated/') if not os.path.exists(rst_target_dir): os.makedirs(rst_target_dir) parser = _get_parser() arg_ns, other_args = parser.parse_known_args() os.chdir(arg_ns.path) # so we execute in notebook dir notebook_runner = NotebookRunner(arg_ns.path, other_args, arg_ns.profile, arg_ns.timeout) try: for fname, nb in notebook_runner: base, ext = os.path.splitext(fname) fname_only = os.path.basename(base) # check if we need to write towrite, filehash = hash_funcs.check_hash( open(fname, "r").read(), fname_only) if not towrite: print "Hash has not changed for file %s" % fname_only continue print "Writing ", fname_only # This edits the notebook cells inplace notebook_runner(nb) # for debugging writes ipynb file with output #new_ipynb = "%s_generated%s" % (base, ".ipynb") #with io.open(new_ipynb, "w", encoding="utf-8") as f: # write(nb, f, "json") # use nbconvert to convert to rst support_file_dir = os.path.join(rst_target_dir, fname_only + "_files")
if __name__ == "__main__": if not os.path.exists(dest_dir): os.makedirs(dest_dir) for dataset in datasets: write_pth = join(dest_dir, dataset + '.rst') data_mod = datasets[dataset] title = getattr(data_mod, 'TITLE') descr = getattr(data_mod, 'DESCRLONG') copyr = getattr(data_mod, 'COPYRIGHT') notes = getattr(data_mod, 'NOTE') source = getattr(data_mod, 'SOURCE') write_file = doc_template.substitute(TITLE=title, title_='=' * len(title), DESCRIPTION=descr, NOTES=notes, SOURCE=source, COPYRIGHT=copyr) to_write, filehash = hash_funcs.check_hash(write_file, data_mod.__name__) if not to_write: print("Hash has not changed for docstring of dataset " "{}".format(dataset)) continue with open(os.path.realpath(write_pth), 'w') as rst_file: rst_file.write(write_file) if filehash is not None: hash_funcs.update_hash_dict(filehash, data_mod.__name__)
$COPYRIGHT """) if __name__ == "__main__": if not os.path.exists(dest_dir): os.makedirs(dest_dir) for dataset in datasets: write_pth = join(dest_dir, dataset + '.rst') data_mod = datasets[dataset] title = getattr(data_mod, 'TITLE') descr = getattr(data_mod, 'DESCRLONG') copyr = getattr(data_mod, 'COPYRIGHT') notes = getattr(data_mod, 'NOTE') source = getattr(data_mod, 'SOURCE') write_file = doc_template.substitute(TITLE=title, title_='='*len(title), DESCRIPTION=descr, NOTES=notes, SOURCE=source, COPYRIGHT=copyr) to_write, filehash = hash_funcs.check_hash(write_file.encode(), data_mod.__name__.encode()) if not to_write: print("Hash has not changed for docstring of dataset " "{}".format(dataset)) continue with open(os.path.realpath(write_pth), 'w') as rst_file: rst_file.write(write_file) if filehash is not None: hash_funcs.update_hash_dict(filehash, data_mod.__name__)
$COPYRIGHT """) if __name__ == "__main__": if not os.path.exists(dest_dir): os.makedirs(dest_dir) for dataset in datasets: write_pth = join(dest_dir, dataset + '.rst') data_mod = datasets[dataset] title = getattr(data_mod, 'TITLE') descr = getattr(data_mod, 'DESCRLONG') copyr = getattr(data_mod, 'COPYRIGHT') notes = getattr(data_mod, 'NOTE') source = getattr(data_mod, 'SOURCE') write_file = doc_template.substitute(TITLE=title, title_='='*len(title), DESCRIPTION=descr, NOTES=notes, SOURCE=source, COPYRIGHT=copyr) to_write, filehash = hash_funcs.check_hash(write_file, data_mod.__name__) if not to_write: print("Hash has not changed for docstring of dataset " "{}".format(dataset)) continue with open(os.path.realpath(write_pth), 'w') as rst_file: rst_file.write(write_file) if filehash is not None: hash_funcs.update_hash_dict(filehash, data_mod.__name__)