def make_doc(ctx): with cd("docs_rst"): ctx.run("cp ../CHANGES.rst change_log.rst") ctx.run("sphinx-apidoc -d 6 -o . -f ../pymatgen_diffusion") ctx.run("rm pymatgen_diffusion*.tests.rst") for f in glob.glob("*.rst"): if f.startswith('pymatgen_diffusion') and f.endswith('rst'): newoutput = [] suboutput = [] subpackage = False with open(f, 'r') as fid: for line in fid: clean = line.strip() if clean == "Subpackages": subpackage = True if not subpackage and not clean.endswith("tests"): newoutput.append(line) else: if not clean.endswith("tests"): suboutput.append(line) if clean.startswith("pymatgen") and not clean.endswith("tests"): newoutput.extend(suboutput) subpackage = False suboutput = [] with open(f, 'w') as fid: fid.write("".join(newoutput)) ctx.run("make html") with cd("docs"): ctx.run("cp -r html/* .") ctx.run("rm -r html") # Avoid ths use of jekyll so that _dir works as intended. ctx.run("touch .nojekyll")
def make_doc(ctx): with cd("docs_rst"): ctx.run("cp ../CHANGES.rst change_log.rst") ctx.run("sphinx-apidoc -d 6 -o . -f ../matgendb") ctx.run("rm matgendb*.tests.rst") for f in glob.glob("*.rst"): if f.startswith('matgendb') and f.endswith('rst'): newoutput = [] suboutput = [] subpackage = False with open(f, 'r') as fid: for line in fid: clean = line.strip() if clean == "Subpackages": subpackage = True if not subpackage and not clean.endswith("tests"): newoutput.append(line) else: if not clean.endswith("tests"): suboutput.append(line) if clean.startswith("pymatgen") and not clean.endswith("tests"): newoutput.extend(suboutput) subpackage = False suboutput = [] with open(f, 'w') as fid: fid.write("".join(newoutput)) ctx.run("make html") ctx.run("cp _static/* ../docs/html/_static") with cd("docs"): ctx.run("cp -r html/* .") ctx.run("rm -r html") # Avoid ths use of jekyll so that _dir works as intended. ctx.run("touch .nojekyll")
def make_doc(ctx): with open("CHANGES.rst") as f: contents = f.read() toks = re.split("\-{3,}", contents) n = len(toks[0].split()[-1]) changes = [toks[0]] changes.append("\n" + "\n".join(toks[1].strip().split("\n")[0:-1])) changes = ("-" * n).join(changes) with open("docs_rst/latest_changes.rst", "w") as f: f.write(changes) with cd("examples"): ctx.run("jupyter nbconvert --to html *.ipynb") ctx.run("mv *.html ../docs_rst/_static") with cd("docs_rst"): ctx.run("cp ../CHANGES.rst change_log.rst") ctx.run("sphinx-apidoc --separate -d 6 -o . -f ../pymatgen") ctx.run("rm pymatgen*.tests.*rst") for f in glob.glob("*.rst"): if f.startswith('pymatgen') and f.endswith('rst'): newoutput = [] suboutput = [] subpackage = False with open(f, 'r') as fid: for line in fid: clean = line.strip() if clean == "Subpackages": subpackage = True if not subpackage and not clean.endswith("tests"): newoutput.append(line) else: if not clean.endswith("tests"): suboutput.append(line) if clean.startswith( "pymatgen" ) and not clean.endswith("tests"): newoutput.extend(suboutput) subpackage = False suboutput = [] with open(f, 'w') as fid: fid.write("".join(newoutput)) ctx.run("make html") ctx.run("cp _static/* ../docs/html/_static") with cd("docs"): ctx.run("cp -r html/* .") ctx.run("rm -r html") ctx.run("rm -r doctrees") ctx.run("rm -r _sources") # This makes sure pymatgen.org works to redirect to the Gihub page ctx.run("echo \"pymatgen.org\" > CNAME") # Avoid the use of jekyll so that _dir works as intended. ctx.run("touch .nojekyll")
def update_doc(ctx): with cd("docs/_build/html/"): ctx.run("git pull") make_doc(ctx) with cd("docs/_build/html/"): ctx.run("git add .") ctx.run('git commit -a -m "Update dev docs"') ctx.run("git push origin gh-pages")
def update_doc(ctx): with cd("docs/_build/html/"): ctx.run("git pull") make_doc(ctx) with cd("docs/_build/html/"): ctx.run("git add .") ctx.run("git commit -a -m \"Update dev docs\"") ctx.run("git push origin gh-pages")
def make_doc(ctx): with cd("docs_rst"): ctx.run("sphinx-apidoc -o . -f ../amset") ctx.run("make html") ctx.run("cp -r build/html/* ../docs") with cd("docs"): # Avoid the use of jekyll so that _dir works as intended. ctx.run("touch .nojekyll")
def make_doc(ctx): with open("CHANGES.rst") as f: contents = f.read() toks = re.split("\-{3,}", contents) n = len(toks[0].split()[-1]) changes = [toks[0]] changes.append("\n" + "\n".join(toks[1].strip().split("\n")[0:-1])) changes = ("-" * n).join(changes) with open("docs_rst/latest_changes.rst", "w") as f: f.write(changes) with cd("examples"): ctx.run("jupyter nbconvert --to html *.ipynb") ctx.run("mv *.html ../docs_rst/_static") with cd("docs_rst"): ctx.run("cp ../CHANGES.rst change_log.rst") ctx.run("sphinx-apidoc --separate -d 6 -o . -f ../pymatgen") ctx.run("rm pymatgen*.tests.*rst") for f in glob.glob("*.rst"): if f.startswith('pymatgen') and f.endswith('rst'): newoutput = [] suboutput = [] subpackage = False with open(f, 'r') as fid: for line in fid: clean = line.strip() if clean == "Subpackages": subpackage = True if not subpackage and not clean.endswith("tests"): newoutput.append(line) else: if not clean.endswith("tests"): suboutput.append(line) if clean.startswith("pymatgen") and not clean.endswith("tests"): newoutput.extend(suboutput) subpackage = False suboutput = [] with open(f, 'w') as fid: fid.write("".join(newoutput)) ctx.run("make html") ctx.run("cp _static/* ../docs/html/_static") with cd("docs"): ctx.run("cp -r html/* .") ctx.run("rm -r html") ctx.run("rm -r doctrees") ctx.run("rm -r _sources") # This makes sure pymatgen.org works to redirect to the Gihub page ctx.run("echo \"pymatgen.org\" > CNAME") # Avoid the use of jekyll so that _dir works as intended. ctx.run("touch .nojekyll")
def make_doc(ctx): with cd("docs_rst"): ctx.run("sphinx-apidoc -f -o source ../robocrys " + exclude_paths) ctx.run("make html") ctx.run("cp -r build/html/* ../docs") with cd("docs"): # Avoid the use of jekyll so that _dir works as intended. ctx.run("touch .nojekyll")
def make_doc(ctx): with cd("docs_rst"): ctx.run("sphinx-apidoc -o . -f ../fireworks") ctx.run("make html") with cd("docs"): ctx.run("cp -r html/* .") ctx.run("rm -r html") ctx.run("rm -r doctrees") # Avoid the use of jekyll so that _dir works as intended. ctx.run("touch .nojekyll")
def make_doc(ctx): with cd("docs_rst"): ctx.run("sphinx-apidoc -o api/ -f ../turbomoleio") ctx.run("make html") with cd("docs"): ctx.run("cp -r html/* .") ctx.run("rm -r html") ctx.run("rm -r doctrees") # Avoid the use of jekyll so that _dir works as intended. ctx.run("touch .nojekyll")
def make_doc(ctx): with cd("docs_rst"): ctx.run("sphinx-apidoc -o . -f ../rocketsled") ctx.run("make html") ctx.run("cp _static/* ../docs/html/_static") with cd("docs"): ctx.run("cp -r html/* .") ctx.run("rm -r html") ctx.run("rm -r doctrees") # Avoid the use of jekyll so that _dir works as intended. ctx.run("touch .nojekyll")
def make_doc(ctx): with cd("docs_rst"): ctx.run("python featurizer_summary.py > featurizer_summary.rst") ctx.run("sphinx-apidoc -o . -f ../matminer") ctx.run("make html") ctx.run("cp _static/* ../docs/html/_static") with cd("docs"): ctx.run("cp -r html/* .") ctx.run("rm -r html") ctx.run("rm -r doctrees") # Avoid the use of jekyll so that _dir works as intended. ctx.run("touch .nojekyll")
def make_doc(ctx): """ Generate API documentation + run Sphinx. :param ctx: """ # with open("CHANGES.rst") as f: # contents = f.read() # # toks = re.split(r"\-{3,}", contents) # n = len(toks[0].split()[-1]) # changes = [toks[0]] # changes.append("\n" + "\n".join(toks[1].strip().split("\n")[0:-1])) # changes = ("-" * n).join(changes) # with open("docs_rst/latest_changes.rst", "w") as f: # f.write(changes) ctx.run("cp README.rst api-docs-source/index.rst") with cd("api-docs-source"): ctx.run("rm maml.*.rst", warn=True) ctx.run("sphinx-apidoc --separate -P -d 7 -o . -f ../maml") ctx.run("rm maml*.tests.*rst", warn=True) for f in glob.glob("maml*.rst"): newoutput = [] with open(f, 'r') as fid: for line in fid: if re.search("maml.*\._.*", line): continue else: newoutput.append(line) with open(f, 'w') as fid: fid.write("".join(newoutput)) ctx.run("rm maml*._*.rst") ctx.run("rm -r docs", warn=True) ctx.run("sphinx-build -b html api-docs-source docs") # ctx.run("cp _static/* ../docs/html/_static", warn=True) with cd("docs"): ctx.run("rm -r .doctrees", warn=True) # This makes sure maml.org works to redirect to the Github page # ctx.run("echo \"maml.org\" > CNAME") # Avoid the use of jekyll so that _dir works as intended. ctx.run("touch .nojekyll")
def make_doc(ctx): with cd("docs_rst"): ctx.run("python featurizer_summary.py > featurizer_summary.rst") ctx.run("python dataset_summary.py > dataset_summary.rst") ctx.run("sphinx-apidoc -o . -f ../matminer") ctx.run("make html") ctx.run("cp _static/* ../docs/html/_static") with cd("docs"): ctx.run("cp -r html/* .") ctx.run("rm -r html") ctx.run("rm -r doctrees") # Avoid the use of jekyll so that _dir works as intended. ctx.run("touch .nojekyll")
def makedoc(c, preview=False, port=8000): c.run("sphinx-apidoc --separate -d 6 -o build -f maggma") c.run("make html") if preview: with cd("build/html"): print("Serving docs preview at http://localhost:{}".format(port)) c.run("python -m http.server {}".format(port))
def get_directories_in_queue(self): user_name = getpass.getuser() if self.queue_type == 'PBS': get_job_ids = ['qstat', '-u', user_name] check_job_info = ['qstat', '-f'] workdir_str = 'PBS_O_WORKDIR' split_str = ',' elif self.queue_type == 'SLURM': get_job_ids = ['squeue', '-u', user_name] check_job_info = ['scontrol', 'show', 'job'] workdir_str = 'WorkDir' split_str = '\n' scan = subprocess.Popen(get_job_ids, stdout=subprocess.PIPE) scan.wait() scan_out = scan.stdout.read().split('\n') ids = [i.split()[0] for i in scan_out if user_name in i] dirs = [] for id in ids: scan1 = subprocess.Popen(check_job_info + [id], stdout=subprocess.PIPE) out = scan1.stdout.read().split(split_str) line = [i for i in out if workdir_str in i][0] dir = line.replace('\n\t', '').split('=')[1] with cd(dir): dir_orig = os.getcwd() dirs.append(dir_orig) return dirs
def pytest(ctx): pytest_cmd = r"""\ pytest -n 2 --cov-config=.coveragerc --cov=abipy -v --doctest-modules pseudo_dojo \ --ignore=pseudo_dojo/integration_tests """ with cd(DOJO_ROOTDIR): ctx.run(pytest_cmd, pty=True)
def gnuplot(self): """ Plot the results with gnuplot. Based on the `replot.sh` script provided by the oncvpsp code. """ outfile = self.filepath base = os.path.basename(outfile) gnufile = base + ".scr" plotfile = base + ".plot" temp = base + ".tmp" from monty.os import cd from subprocess import check_call workdir = tempfile.mkdtemp() print("Working in %s" % workdir) with cd(workdir): check_call("awk 'BEGIN{out=0};/GNUSCRIPT/{out=0}; {if(out == 1) {print}}; \ /DATA FOR PLOTTING/{out=1}' %s > %s" % (outfile, plotfile), shell=True) check_call("awk 'BEGIN{out=0};/END_GNU/{out=0}; {if(out == 1) {print}}; \ /GNUSCRIPT/{out=1}' %s > %s" % (outfile, temp), shell=True) check_call('sed -e 1,1000s/t1/"%s"/ %s > %s' % (plotfile, temp, gnufile), shell=True) try: check_call(["gnuplot", gnufile]) except KeyboardInterrupt: print("Received KeyboardInterrupt") os.rmdir(workdir)
def test_continue(self): # Test the continuation functionality with cd(os.path.join(test_dir, 'postprocess')): # Test default functionality with ScratchDir('.', copy_from_current_on_enter=True) as d: v = VaspJob("hello", auto_continue=True) v.setup() self.assertTrue(os.path.exists("continue.json"), "continue.json not created") v.setup() self.assertEqual(Poscar.from_file("CONTCAR").structure, Poscar.from_file("POSCAR").structure) self.assertEqual(Incar.from_file('INCAR')['ISTART'], 1) v.postprocess() self.assertFalse(os.path.exists("continue.json"), "continue.json not deleted after postprocessing") # Test explicit action functionality with ScratchDir('.', copy_from_current_on_enter=True) as d: v = VaspJob("hello", auto_continue=[{"dict": "INCAR", "action": {"_set": {"ISTART": 1}}}]) v.setup() v.setup() self.assertNotEqual(Poscar.from_file("CONTCAR").structure, Poscar.from_file("POSCAR").structure) self.assertEqual(Incar.from_file('INCAR')['ISTART'], 1) v.postprocess()
def write_input(temp, params, xkeys, parr=True, func='pbe'): ''' Method to write QE input file in scratch dir from template. :param temp (list): Returned from get_template() method. :param params (dict): Dict of parameters to pass into template. Keys are the strings within the placeholders in the template. :param xkeys ([str]): Parameters to distinguish between different input files. :param parr (bool): Whether use parallel mode (send the calculation to computational nodes). Default to True. Note in non-parallel (serial, run the calculation on login nodes), you need to cp the psp files into scratch dir. :param func (str): Functional used. Default to GGA. Refer to the format of filename of template files. :return: None ''' t, info = temp pl = [str(params[k]) for k in xkeys] jobname = "_".join(info+pl) if parr: c = Composition(info[0]) elements = [e.symbol for e in c.elements] psp = {} for e in elements: psp[e] = os.path.abspath(glob.glob('../%s.%s*.UPF' % (e, func))[0]) os.makedirs(jobname) with cd(jobname): _write_input_from_temp(t, jobname, params) for e in psp.keys(): shutil.copyfile(psp[e],os.path.split(psp[e])[1]) else: _write_input_from_temp(t, jobname, params)
def update_coverage(ctx): with cd("docs/_build/html/"): ctx.run("git pull") ctx.run( "nosetests --config=nose.cfg --cover-html --cover-html-dir=docs/_build/html/coverage" ) update_doc()
def make_doc(ctx): with cd("docs"): ctx.run("sphinx-apidoc -o . -f ../custodian") ctx.run("rm custodian*.tests.rst") for f in glob.glob("docs/*.rst"): if f.startswith('docs/custodian') and f.endswith('rst'): newoutput = [] suboutput = [] subpackage = False with open(f, 'r') as fid: for line in fid: clean = line.strip() if clean == "Subpackages": subpackage = True if not subpackage and not clean.endswith("tests"): newoutput.append(line) else: if not clean.endswith("tests"): suboutput.append(line) if clean.startswith("custodian") and not clean.endswith("tests"): newoutput.extend(suboutput) subpackage = False suboutput = [] with open(f, 'w') as fid: fid.write("".join(newoutput)) ctx.run("make html")
def makedoc(ctx): os.environ.setdefault("DJANGO_SETTINGS_MODULE", "matgendb.webui.settings") with cd("docs"): ctx.run("sphinx-apidoc -o . -f ../matgendb") ctx.run("rm matgendb*.tests.rst") for f in glob.glob("docs/*.rst"): if f.startswith('docs/matgendb') and f.endswith('rst'): newoutput = [] suboutput = [] subpackage = False with open(f, 'r') as fid: for line in fid: clean = line.strip() if clean == "Subpackages": subpackage = True if not subpackage and not clean.endswith("tests"): newoutput.append(line) else: if not clean.endswith("tests"): suboutput.append(line) if clean.startswith("matgendb") and not clean.endswith("tests"): newoutput.extend(suboutput) subpackage = False suboutput = [] with open(f, 'w') as fid: fid.write("".join(newoutput)) ctx.run("make html") ctx.run("cp favicon.ico _build/html/_static/favicon.ico")
def write_input(temp, params, xkeys, parr=True, func='pbe'): ''' Method to write QE input file in scratch dir from template. :param temp (list): Returned from get_template() method. :param params (dict): Dict of parameters to pass into template. Keys are the strings within the placeholders in the template. :param xkeys ([str]): Parameters to distinguish between different input files. :param parr (bool): Whether use parallel mode (send the calculation to computational nodes). Default to True. Note in non-parallel (serial, run the calculation on login nodes), you need to cp the psp files into scratch dir. :param func (str): Functional used. Default to GGA. Refer to the format of filename of template files. :return: None ''' t, info = temp pl = [str(params[k]) for k in xkeys] jobname = "_".join(info + pl) if parr: c = Composition(info[0]) elements = [e.symbol for e in c.elements] psp = {} for e in elements: psp[e] = os.path.abspath(glob.glob('../%s.%s*.UPF' % (e, func))[0]) os.makedirs(jobname) with cd(jobname): _write_input_from_temp(t, jobname, params) for e in psp.keys(): shutil.copyfile(psp[e], os.path.split(psp[e])[1]) else: _write_input_from_temp(t, jobname, params)
def make_doc(ctx): with cd("docs"): ctx.run("sphinx-apidoc -d 6 -o . -f ../custodian") ctx.run("rm custodian*.tests.rst") for f in glob.glob("*.rst"): if f.startswith('custodian') and f.endswith('rst'): newoutput = [] suboutput = [] subpackage = False with open(f, 'r') as fid: for line in fid: clean = line.strip() if clean == "Subpackages": subpackage = True if not subpackage and not clean.endswith("tests"): newoutput.append(line) else: if not clean.endswith("tests"): suboutput.append(line) if clean.startswith("custodian") and not clean.endswith( "tests"): newoutput.extend(suboutput) subpackage = False suboutput = [] with open(f, 'w') as fid: fid.write("".join(newoutput)) ctx.run("make html") # ctx.run("cp _static/* _build/html/_static") # This makes sure pymatgen.org works to redirect to the Gihub page # ctx.run("echo \"pymatgen.org\" > _build/html/CNAME") # Avoid ths use of jekyll so that _dir works as intended. ctx.run("touch _build/html/.nojekyll")
def test_postprocess(self): with cd(os.path.join(test_dir, "postprocess")): with ScratchDir(".", copy_from_current_on_enter=True): shutil.copy("INCAR", "INCAR.backup") v = VaspJob("hello", final=False, suffix=".test", copy_magmom=True) v.postprocess() incar = Incar.from_file("INCAR") incar_prev = Incar.from_file("INCAR.test") for f in [ "INCAR", "KPOINTS", "CONTCAR", "OSZICAR", "OUTCAR", "POSCAR", "vasprun.xml", ]: self.assertTrue(os.path.isfile(f"{f}.test")) os.remove(f"{f}.test") shutil.move("INCAR.backup", "INCAR") self.assertAlmostEqual(incar["MAGMOM"], [3.007, 1.397, -0.189, -0.189]) self.assertAlmostEqual(incar_prev["MAGMOM"], [5, -5, 0.6, 0.6])
def test_recover_errors(self): fw, launch_id = self.lp.reserve_fw(self.fworker, self.launch_dir) fw = self.lp.get_fw_by_id(1) with cd(self.launch_dir): setup_offline_job(self.lp, fw, launch_id) # remove the directory to cause an exception shutil.rmtree(self.launch_dir) # recover ignoring errors self.assertIsNotNone( self.lp.recover_offline(launch_id, ignore_errors=True, print_errors=True)) fw = self.lp.get_fw_by_id(launch_id) self.assertEqual(fw.state, 'RESERVED') #fizzle self.assertIsNotNone( self.lp.recover_offline(launch_id, ignore_errors=False)) fw = self.lp.get_fw_by_id(launch_id) self.assertEqual(fw.state, 'FIZZLED')
def make_doc(ctx): with cd("docs"): ctx.run("sphinx-apidoc -o . -f ../monty") for f in glob.glob("docs/*.rst"): if f.startswith('docs/monty') and f.endswith('rst'): newoutput = [] suboutput = [] subpackage = False with open(f, 'r') as fid: for line in fid: clean = line.strip() if clean == "Subpackages": subpackage = True if not subpackage and not clean.endswith("tests"): newoutput.append(line) else: if not clean.endswith("tests"): suboutput.append(line) if clean.startswith("monty") and not clean.endswith("tests"): newoutput.extend(suboutput) subpackage = False suboutput = [] with open(f, 'w') as fid: fid.write("".join(newoutput)) ctx.run("make html") #This makes sure monty.org works to redirect to the Gihub page ctx.run("echo \"monty.org\" > _build/html/CNAME") #Avoid ths use of jekyll so that _dir works as intended. ctx.run("touch _build/html/.nojekyll")
def build_conda(ctx, pkg, nopy27=False): with cd(os.path.join(module_dir, "conda-skeletons")): print("Building %s" % pkg) ctx.run("conda build --user matsci %s" % pkg) if not nopy27: # Py27 versions ctx.run("conda build --user matsci --python 2.7 %s" % pkg)
def get_directories_VaspJobNotDone(root_dir): with cd(root_dir): ### avoid the link problems root_dir_real = os.getcwd() scan = subprocess.Popen(['find', root_dir_real, '-name', 'POSCAR'], stdout=subprocess.PIPE) scan.wait() pos_coll = scan.stdout.read().split() pos_dirs = [os.path.split(i)[0] for i in pos_coll] vaspjob_dirs = [] for dir in pos_dirs: try: pos = Poscar.from_file(os.path.join(dir, 'POSCAR')) pot = Potcar.from_file(os.path.join(dir, 'POTCAR')) incar = Incar.from_file(os.path.join(dir, 'INCAR')) kpt = Kpoints.from_file(os.path.join(dir, 'KPOINTS')) except: print 'input files are not ready in %s' % dir else: try: out = Outcar(os.path.join(dir, 'OUTCAR')) if len(out.run_stats) != 7: vaspjob_dir.append(dir) except: vaspjob_dirs.append(dir) return vaspjob_dirs
def pytest(ctx): pytest_cmd = r"""\ pytest -n 2 --cov-config=.coveragerc --cov=abiflows -v --doctest-modules abiflows \ --ignore=abiflows/fireworks/integration_tests """ with cd(ROOTDIR): ctx.run(pytest_cmd, pty=True)
def temp_dir(delete, changedir=True): """ Context manager that creates a temporary directory with tempfile.mkdtemp and cd to it with monty.os.cd. Args: delete (bool): if True the directory will be deleted at the end of the job, if False it will be preserved. changedir (bool): if True inside the context manager will make a cd to the temporary directoy. Yields: the path to the temporary directory created. """ testdir = tempfile.mkdtemp() if not delete: print("Running folder: {}".format(testdir)) try: if changedir: with cd(testdir): yield testdir else: yield testdir finally: if delete: shutil.rmtree(testdir, ignore_errors=True)
def test_postprocess(self): # test gzipped and zipping of additional files with cd(os.path.join(test_files_lobster3)): with ScratchDir('.', copy_from_current_on_enter=True) as d: shutil.copy('lobsterin', 'lobsterin.orig') v = LobsterJob("hello", gzipped=True, add_files_to_gzip=VASP_OUTPUT_FILES) v.postprocess() self.assertTrue(os.path.exists("WAVECAR.gz")) self.assertTrue(os.path.exists("lobsterin.gz")) self.assertTrue(os.path.exists("lobsterout.gz")) self.assertTrue(os.path.exists("INCAR.gz")) self.assertTrue(os.path.exists("lobsterin.orig.gz")) with ScratchDir('.', copy_from_current_on_enter=True) as d: shutil.copy('lobsterin', 'lobsterin.orig') v = LobsterJob("hello", gzipped=False, add_files_to_gzip=VASP_OUTPUT_FILES) v.postprocess() self.assertTrue(os.path.exists("WAVECAR")) self.assertTrue(os.path.exists("lobsterin")) self.assertTrue(os.path.exists("lobsterout")) self.assertTrue(os.path.exists("INCAR")) self.assertTrue(os.path.exists("lobsterin.orig"))
def test_postprocess(self): neb_outputs = ['INCAR', 'KPOINTS', 'POTCAR', 'vasprun.xml'] neb_sub_outputs = [ 'CHG', 'CHGCAR', 'CONTCAR', 'DOSCAR', 'EIGENVAL', 'IBZKPT', 'PCDAT', 'POSCAR', 'REPORT', 'PROCAR', 'OSZICAR', 'OUTCAR', 'WAVECAR', 'XDATCAR' ] with cd(os.path.join(test_dir, 'postprocess_neb')): postprocess_neb = os.path.abspath(".") v = VaspNEBJob("hello", final=False, suffix=".test") v.postprocess() for f in neb_outputs: self.assertTrue(os.path.isfile('{}.test'.format(f))) os.remove('{}.test'.format(f)) sub_folders = glob.glob("[0-9][0-9]") for sf in sub_folders: os.chdir(os.path.join(postprocess_neb, sf)) for f in neb_sub_outputs: if os.path.exists(f): self.assertTrue(os.path.isfile('{}.test'.format(f))) os.remove('{}.test'.format(f))
def test_postprocess(self): with cd(test_dir): with ScratchDir('.', copy_from_current_on_enter=True): f = FeffJob("hello", backup=True, gzipped=True) f.postprocess() self.assertTrue(os.path.exists('feff_out.1.tar.gz')) f.postprocess() self.assertTrue(os.path.exists('feff_out.2.tar.gz'))
def test_cd_exception(self): try: with cd(test_dir): self.assertTrue(os.path.exists("empty_file.txt")) raise RuntimeError() except: pass self.assertFalse(os.path.exists("empty_file.txt"))
def pytest(ctx): pytest_cmd = r"""\ pytest -n 2 --cov-config=.coveragerc --cov=abipy -v --doctest-modules abipy \ --ignore=abipy/integration_tests --ignore=abipy/data/refs --ignore=abipy/scripts/ \ --ignore=abipy/examples/plot --ignore=abipy/examples/flows --ignore=abipy/gui """ with cd(ABIPY_ROOTDIR): ctx.run(pytest_cmd, pty=True)
def make_doc(ctx): with cd("docs"): ctx.run("sphinx-apidoc -o . -f .") ctx.run("make html") # ctx.run("cp _static/* ../docs/html/_static") ctx.run("cp -r build/html/* .") ctx.run("rm -r build") ctx.run("touch .nojekyll")
def make_doc(ctx): with cd("docs_rst"): ctx.run("sphinx-apidoc --separate -d 6 -o . -f ../veidt") ctx.run("rm veidt*.tests.*rst") ctx.run("make html") ctx.run("cp _static/* ../docs/html/_static", warn=True) with cd("docs"): ctx.run("cp -r html/* .") ctx.run("rm -r html") ctx.run("rm -r doctrees") ctx.run("rm -r _sources") # This makes sure veidt.org works to redirect to the Gihub page # Avoid the use of jekyll so that _dir works as intended. ctx.run("touch .nojekyll")
def proc_dic(rootfolder, filename, sub): filename_pre = rootfolder.rsplit('/', 1)[-1] rename = filename_pre + '.' + filename.split('.')[-1] command = 'mv F.out {}'.format(rename) # print filename with cd(rootfolder): os.system(command) print command
def test_setup(self): with cd(test_dir): with ScratchDir('.', copy_from_current_on_enter=True) as d: v = VaspJob("hello") v.setup() incar = Incar.from_file("INCAR") count = multiprocessing.cpu_count() if count > 1: self.assertGreater(incar["NPAR"], 1)
def test_setup(self): with cd(test_dir): with ScratchDir('.', copy_from_current_on_enter=True) as d: v = VaspJob("hello", auto_npar=True) v.setup() incar = Incar.from_file("INCAR") count = multiprocessing.cpu_count() # Need at least 3 CPUs for NPAR to be greater than 1 if count > 3: self.assertGreater(incar["NPAR"], 1)
def contribute_dash(ctx): make_dash(ctx) ctx.run('cp pymatgen.tgz ../Dash-User-Contributions/docsets/pymatgen/pymatgen.tgz') with cd("../Dash-User-Contributions/docsets/pymatgen"): with open("docset.json", "rt") as f: data = json.load(f) data["version"] = NEW_VER with open("docset.json", "wt") as f: json.dump(data, f, indent=4) ctx.run('git commit -a -m "Update to v%s"' % NEW_VER) ctx.run('git push') ctx.run("rm pymatgen.tgz")
def make_doc(ctx): with cd("docs_rst"): ctx.run("sphinx-apidoc --separate -d 6 -o . -f ../monty") for f in glob.glob("*.rst"): if f.startswith('monty') and f.endswith('rst'): newoutput = [] suboutput = [] subpackage = False with open(f, 'r') as fid: for line in fid: clean = line.strip() if clean == "Subpackages": subpackage = True if not subpackage and not clean.endswith("tests"): newoutput.append(line) else: if not clean.endswith("tests"): suboutput.append(line) if clean.startswith( "monty") and not clean.endswith("tests"): newoutput.extend(suboutput) subpackage = False suboutput = [] with open(f, 'w') as fid: fid.write("".join(newoutput)) ctx.run("make html") # ctx.run("cp _static/* ../docs/html/_static") with cd("docs"): ctx.run("cp -r html/* .") ctx.run("rm -r html") ctx.run("rm -r doctrees") ctx.run("rm -r _sources") # This makes sure pymatgen.org works to redirect to the Gihub page # ctx.run("echo \"pymatgen.org\" > CNAME") # Avoid the use of jekyll so that _dir works as intended. ctx.run("touch .nojekyll")
def test_setup(self): with cd(test_dir): with ScratchDir('.', copy_from_current_on_enter=True): f = FeffJob("hello", backup=True) f.setup() parameter = Tags.from_file('feff.inp') parameter_orig = Tags.from_file('feff.inp.orig') self.assertEqual(parameter, parameter_orig) atom = Atoms.cluster_from_file('feff.inp') atom_origin = Atoms.cluster_from_file('feff.inp.orig') self.assertEqual(atom, atom_origin)
def submit_dash_pr(ctx): with cd("../Dash-User-Contributions/docsets/pymatgen"): payload = { "title": "Update pymatgen docset to v%s" % NEW_VER, "body": "Update pymatgen docset to v%s" % NEW_VER, "head": "Dash-User-Contributions:master", "base": "master" } response = requests.post( "https://api.github.com/repos/materialsvirtuallab/Dash-User-Contributions/pulls", data=json.dumps(payload)) print(response.text)
def make_doc(ctx): with cd("docs_rst"): ctx.run("sphinx-apidoc -d 6 -o . -f ../custodian") ctx.run("rm custodian*.tests.rst") for f in glob.glob("*.rst"): if f.startswith('custodian') and f.endswith('rst'): newoutput = [] suboutput = [] subpackage = False with open(f, 'r') as fid: for line in fid: clean = line.strip() if clean == "Subpackages": subpackage = True if not subpackage and not clean.endswith("tests"): newoutput.append(line) else: if not clean.endswith("tests"): suboutput.append(line) if clean.startswith("custodian") and not clean.endswith( "tests"): newoutput.extend(suboutput) subpackage = False suboutput = [] with open(f, 'w') as fid: fid.write("".join(newoutput)) ctx.run("make html") # ctx.run("cp _static/* _build/html/_static") with cd("docs"): ctx.run("cp -r html/* .") ctx.run("rm -r html") ctx.run("rm -r doctrees") ctx.run("rm -r _sources") # Avoid the use of jekyll so that _dir works as intended. ctx.run("touch .nojekyll")
def test__recover_completed(self): fw, launch_id = self.lp.reserve_fw(self.fworker, self.launch_dir) fw = self.lp.get_fw_by_id(1) with cd(self.launch_dir): setup_offline_job(self.lp, fw, launch_id) # launch rocket without launchpad to trigger offline mode launch_rocket(launchpad=None, fworker=self.fworker, fw_id=1) self.assertIsNone(self.lp.recover_offline(launch_id)) fw = self.lp.get_fw_by_id(launch_id) self.assertEqual(fw.state, 'COMPLETED')
def test_setup(self): with cd(os.path.join(test_dir, 'setup_neb')): with ScratchDir('.', copy_from_current_on_enter=True) as d: v = VaspNEBJob("hello", half_kpts=True) v.setup() incar = Incar.from_file("INCAR") count = multiprocessing.cpu_count() if count > 3: self.assertGreater(incar["NPAR"], 1) kpt = Kpoints.from_file("KPOINTS") kpt_pre = Kpoints.from_file("KPOINTS.orig") self.assertEqual(kpt_pre.style.name, "Monkhorst") self.assertEqual(kpt.style.name, "Gamma")
def test_postprocess(self): with cd(os.path.join(test_dir, 'postprocess')): with ScratchDir('.', copy_from_current_on_enter=True) as d: shutil.copy('INCAR', 'INCAR.backup') v = VaspJob("hello", final=False, suffix=".test", copy_magmom=True) v.postprocess() incar = Incar.from_file("INCAR") incar_prev = Incar.from_file("INCAR.test") for f in ['INCAR', 'KPOINTS', 'CONTCAR', 'OSZICAR', 'OUTCAR', 'POSCAR', 'vasprun.xml']: self.assertTrue(os.path.isfile('{}.test'.format(f))) os.remove('{}.test'.format(f)) shutil.move('INCAR.backup', 'INCAR') self.assertAlmostEqual(incar['MAGMOM'], [3.007, 1.397, -0.189, -0.189]) self.assertAlmostEqual(incar_prev["MAGMOM"], [5, -5, 0.6, 0.6])
def test_recover_errors(self): fw, launch_id = self.lp.reserve_fw(self.fworker, self.launch_dir) fw = self.lp.get_fw_by_id(1) with cd(self.launch_dir): setup_offline_job(self.lp, fw, launch_id) # remove the directory to cause an exception shutil.rmtree(self.launch_dir) # recover ignoring errors self.assertIsNotNone(self.lp.recover_offline(launch_id, ignore_errors=True, print_errors=True)) fw = self.lp.get_fw_by_id(launch_id) self.assertEqual(fw.state, 'RESERVED') #fizzle self.assertIsNotNone(self.lp.recover_offline(launch_id, ignore_errors=False)) fw = self.lp.get_fw_by_id(launch_id) self.assertEqual(fw.state, 'FIZZLED')
def test_postprocess(self): neb_outputs = ['INCAR', 'KPOINTS', 'POTCAR', 'vasprun.xml'] neb_sub_outputs = ['CHG', 'CHGCAR', 'CONTCAR', 'DOSCAR', 'EIGENVAL', 'IBZKPT', 'PCDAT', 'POSCAR', 'REPORT', 'PROCAR', 'OSZICAR', 'OUTCAR', 'WAVECAR', 'XDATCAR'] with cd(os.path.join(test_dir, 'postprocess_neb')): postprocess_neb = os.path.abspath(".") v = VaspNEBJob("hello", final=False, suffix=".test") v.postprocess() for f in neb_outputs: self.assertTrue(os.path.isfile('{}.test'.format(f))) os.remove('{}.test'.format(f)) sub_folders = glob.glob("[0-9][0-9]") for sf in sub_folders: os.chdir(os.path.join(postprocess_neb, sf)) for f in neb_sub_outputs: if os.path.exists(f): self.assertTrue(os.path.isfile('{}.test'.format(f))) os.remove('{}.test'.format(f))
def check_resub_relax2(args): if args.debug: logging.basicConfig(level=logging.DEBUG) dir_entry = parse_entry(args) for ele in dir_entry: with cd(ele): logging.debug('Current dir path is: {}'.format(os.getcwd())) if not os.path.exists('Complete'): os.makedirs('Complete') exclude = set(['Complete']) result_dir_entry = [f for f in os.listdir('.') if f not in exclude] for subdir in result_dir_entry: check_command = 'check_CONTCARRELAX2 {} -st Yes -stdic Complete/'.format(subdir) subprocess.call(check_command,shell=True) no_relax2_file = [f for f in os.listdir('.') if f not in exclude] for dir_ele in no_relax2_file: resub_command = submit_command(args,dir_ele) restore_command = 'restoretoinital {}'.format(dir_ele) subprocess.call(restore_command,shell=True) subprocess.call(resub_command,shell=True) logging.debug('Folder with no_relax2_file is: {}'.format(no_relax2_file))
def launch_rocket_to_queue(launchpad, fworker, qadapter, launcher_dir='.', reserve=False, strm_lvl='INFO', create_launcher_dir=False, fill_mode=False, fw_id=None): """ Submit a single job to the queue. Args: launchpad (LaunchPad) fworker (FWorker) qadapter (QueueAdapterBase) launcher_dir (str): The directory where to submit the job reserve (bool): Whether to queue in reservation mode strm_lvl (str): level at which to stream log messages create_launcher_dir (bool): Whether to create a subfolder launcher+timestamp, if needed fill_mode (bool): whether to submit jobs even when there is nothing to run (only in non-reservation mode) fw_id (int): specific fw_id to reserve (reservation mode only) """ fworker = fworker if fworker else FWorker() launcher_dir = os.path.abspath(launcher_dir) l_logger = get_fw_logger('queue.launcher', l_dir=launchpad.logdir, stream_level=strm_lvl) l_logger.debug('getting queue adapter') qadapter = load_object(qadapter.to_dict()) # make a defensive copy, mainly for reservation mode fw, launch_id = None, None # only needed in reservation mode if not os.path.exists(launcher_dir): raise ValueError('Desired launch directory {} does not exist!'.format(launcher_dir)) if '--offline' in qadapter['rocket_launch'] and not reserve: raise ValueError("Must use reservation mode (-r option) of qlaunch " "when using offline option of rlaunch!!") if reserve and 'singleshot' not in qadapter.get('rocket_launch', ''): raise ValueError('Reservation mode of queue launcher only works for singleshot Rocket Launcher!') if fill_mode and reserve: raise ValueError("Fill_mode cannot be used in conjunction with reserve mode!") if fw_id and not reserve: raise ValueError("qlaunch for specific fireworks may only be used in reservation mode.") if fill_mode or launchpad.run_exists(fworker): launch_id = None try: if reserve: if fw_id: l_logger.debug('finding a FW to reserve...') fw, launch_id = launchpad.reserve_fw(fworker, launcher_dir, fw_id=fw_id) if not fw: l_logger.info('No jobs exist in the LaunchPad for submission to queue!') return False l_logger.info('reserved FW with fw_id: {}'.format(fw.fw_id)) # update qadapter job_name based on FW name job_name = get_slug(fw.name)[0:QUEUE_JOBNAME_MAXLEN] qadapter.update({'job_name': job_name}) if '_queueadapter' in fw.spec: l_logger.debug('updating queue params using Firework spec..') qadapter.update(fw.spec['_queueadapter']) # reservation mode includes --fw_id in rocket launch qadapter['rocket_launch'] += ' --fw_id {}'.format(fw.fw_id) # update launcher_dir if _launch_dir is selected in reserved fw if '_launch_dir' in fw.spec: fw_launch_dir = os.path.expandvars(fw.spec['_launch_dir']) if not os.path.isabs(fw_launch_dir): fw_launch_dir = os.path.join(launcher_dir, fw_launch_dir) launcher_dir = fw_launch_dir makedirs_p(launcher_dir) launchpad.change_launch_dir(launch_id, launcher_dir) elif create_launcher_dir: # create launcher_dir launcher_dir = create_datestamp_dir(launcher_dir, l_logger, prefix='launcher_') launchpad.change_launch_dir(launch_id, launcher_dir) elif create_launcher_dir: # create launcher_dir launcher_dir = create_datestamp_dir(launcher_dir, l_logger, prefix='launcher_') # move to the launch directory l_logger.info('moving to launch_dir {}'.format(launcher_dir)) with cd(launcher_dir): if '--offline' in qadapter['rocket_launch']: setup_offline_job(launchpad, fw, launch_id) l_logger.debug('writing queue script') with open(SUBMIT_SCRIPT_NAME, 'w') as f: queue_script = qadapter.get_script_str(launcher_dir) f.write(queue_script) l_logger.info('submitting queue script') reservation_id = qadapter.submit_to_queue(SUBMIT_SCRIPT_NAME) if not reservation_id: raise RuntimeError('queue script could not be submitted, check queue ' 'script/queue adapter/queue server status!') elif reserve: launchpad.set_reservation_id(launch_id, reservation_id) return reservation_id except: log_exception(l_logger, 'Error writing/submitting queue script!') if reserve and launch_id is not None: try: l_logger.info('Un-reserving FW with fw_id, launch_id: {}, {}'.format( fw.fw_id, launch_id)) launchpad.cancel_reservation(launch_id) launchpad.forget_offline(launch_id) except: log_exception(l_logger, 'Error unreserving FW with fw_id {}'.format(fw.fw_id)) return False else: l_logger.info('No jobs exist in the LaunchPad for submission to queue!') return None # note: this is a hack (rather than False) to indicate a soft failure to rapidfire()