def abivalidate_work(self, work): """Invoke Abinit to test validity of the inputs of a |Work|""" from abipy.flowtk import Flow tmpdir = tempfile.mkdtemp() flow = Flow(workdir=tmpdir) flow.register_work(work) return self.abivalidate_flow(flow)
def open(cls, obj, nids=None, **kwargs): """ Flexible constructor. obj can be a :class:`Flow` or a string with the directory containing the Flow. nids is an optional list of :class:`Node` identifiers used to filter the set of :class:`Task` in the Flow. """ has_dirpath = False if is_string(obj): try: obj = Flow.pickle_load(obj) except: has_dirpath = True if not has_dirpath: # We have a Flow. smeth is the name of the Task method used to open the file. items = [] smeth = "open_" + cls.EXT.lower() for task in obj.iflat_tasks(nids=nids): #, status=obj.S_OK): open_method = getattr(task, smeth, None) if open_method is None: continue ncfile = open_method() if ncfile is not None: items.append((task.pos_str, ncfile)) return cls(*items) else: # directory --> search for files with the appropriate extension and open it with abiopen. if nids is not None: raise ValueError( "nids cannot be used when obj is a directory.") return cls.from_dir(obj)
def test_g0w0_with_ppmodel_inputs(self): """Testing g0w0_with_ppmodel_input factory.""" scf_kppa, scf_nband, nscf_nband = 10, 10, 10 ecuteps, ecutsigx = 2, 2 multi = g0w0_with_ppmodel_inputs(self.si_structure, self.si_pseudo, scf_kppa, nscf_nband, ecuteps, ecutsigx, shifts=(0.5, 0.5, 0.5), ecut=2) scf_input, nscf_input, scr_input, sigma_input = multi.split_datasets() self.abivalidate_multi(multi) self.assertIn('many_body', scr_input.runlevel) self.assertIn('screening', scr_input.runlevel) self.abivalidate_input(sigma_input) self.assertIn('many_body', sigma_input.runlevel) self.assertIn('sigma', sigma_input.runlevel) self.assertNotIn('hybrid', sigma_input.runlevel) if write_inputs_to_json: with open('g0w0_with_ppmodel_scf_input.json', mode='w') as fp: json.dump(scf_input.as_dict(), fp, indent=2) with open('g0w0_with_ppmodel_nscf_input.json', mode='w') as fp: json.dump(nscf_input.as_dict(), fp, indent=2) with open('g0w0_with_ppmodel_scr_input.json', mode='w') as fp: json.dump(scr_input.as_dict(), fp, indent=2) with open('g0w0_with_ppmodel_sigma_input.json', mode='w') as fp: json.dump(sigma_input.as_dict(), fp, indent=2) self.assert_input_equality('g0w0_with_ppmodel_scf_input.json', scf_input) self.assert_input_equality('g0w0_with_ppmodel_nscf_input.json', nscf_input) self.assert_input_equality('g0w0_with_ppmodel_scr_input.json', scr_input) self.assert_input_equality('g0w0_with_ppmodel_sigma_input.json', sigma_input) flow = Flow.temporary_flow() flow.register_work( G0W0Work(scf_input, nscf_input, scr_input, sigma_input)) assert flow.build_and_pickle_dump(abivalidate=True) == 0 # The default value of `shifts` changed in v0.3 from (0.5, 0.5, 0.5) to (0.0, 0.0, 0.0) multi = g0w0_with_ppmodel_inputs(self.si_structure, self.si_pseudo, scf_kppa, nscf_nband, ecuteps, ecutsigx, ecut=2) for inp in multi: self.assert_equal(inp["shiftk"].flatten(), (0, 0, 0))
def test_ion_ioncell_relax_input(self): """Testing ion_ioncell_relax_input factory.""" multi = ion_ioncell_relax_input(self.si_structure, self.si_pseudo, kppa=10, ecut=2) # scf_kppa, scf_nband #accuracy="normal", spin_mode="polarized", # smearing="fermi_dirac:0.1 eV", charge=0.0, scf_algorithm=None) ion_inp, ioncell_inp = multi.split_datasets() self.assertIn('ion_relax', ion_inp.runlevel) self.assertIn('relax', ion_inp.runlevel) self.assertIn('ground_state', ion_inp.runlevel) flow = Flow.temporary_flow() flow.register_work(RelaxWork(ion_inp, ioncell_inp)) assert flow.build_and_pickle_dump(abivalidate=True) == 0
def test_flow(self): """ Testing flow creation and task registering """ flow = Flow(workdir=test_dir, manager=TaskManager.from_file( os.path.join(test_dir, "manager.yml"))) inp = {} flow.register_task(input=inp) flow.allocate() self.assertTrue(flow.allocated) self.assertIsInstance(flow[0], Work) self.assertIsInstance(flow[0][0], AbinitTask) self.assertEqual(flow.check_status(), None)
def test_g0w0_with_ppmodel_inputs(self): """Testing g0w0_with_ppmodel_input factory.""" scf_kppa, scf_nband, nscf_nband = 10, 10, 10 ecuteps, ecutsigx = 2, 2 multi = g0w0_with_ppmodel_inputs(self.si_structure, self.si_pseudo, scf_kppa, nscf_nband, ecuteps, ecutsigx, shifts=(0.5, 0.5, 0.5), ecut=2) scf_input, nscf_input, scr_input, sigma_input = multi.split_datasets() self.abivalidate_multi(multi) self.assertIn('many_body', scr_input.runlevel) self.assertIn('screening', scr_input.runlevel) self.abivalidate_input(sigma_input) self.assertIn('many_body', sigma_input.runlevel) self.assertIn('sigma', sigma_input.runlevel) self.assertNotIn('hybrid', sigma_input.runlevel) if write_inputs_to_json: with open('g0w0_with_ppmodel_scf_input.json', mode='w') as fp: json.dump(scf_input.as_dict(), fp, indent=2) with open('g0w0_with_ppmodel_nscf_input.json', mode='w') as fp: json.dump(nscf_input.as_dict(), fp, indent=2) with open('g0w0_with_ppmodel_scr_input.json', mode='w') as fp: json.dump(scr_input.as_dict(), fp, indent=2) with open('g0w0_with_ppmodel_sigma_input.json', mode='w') as fp: json.dump(sigma_input.as_dict(), fp, indent=2) self.assert_input_equality('g0w0_with_ppmodel_scf_input.json', scf_input) self.assert_input_equality('g0w0_with_ppmodel_nscf_input.json', nscf_input) self.assert_input_equality('g0w0_with_ppmodel_scr_input.json', scr_input) self.assert_input_equality('g0w0_with_ppmodel_sigma_input.json', sigma_input) flow = Flow.temporary_flow() flow.register_work(G0W0Work(scf_input, nscf_input, scr_input, sigma_input)) assert flow.build_and_pickle_dump(abivalidate=True) == 0 # The default value of `shifts` changed in v0.3 from (0.5, 0.5, 0.5) to (0.0, 0.0, 0.0) multi = g0w0_with_ppmodel_inputs(self.si_structure, self.si_pseudo, scf_kppa, nscf_nband, ecuteps, ecutsigx, ecut=2) for inp in multi: self.assert_equal(inp["shiftk"].flatten(), (0, 0, 0))
def test_g0w0_with_ppmodel_inputs(self): """Testing g0w0_with_ppmodel_input factory.""" scf_kppa, scf_nband, nscf_nband = 10, 10, 10 ecuteps, ecutsigx = 2, 2 multi = g0w0_with_ppmodel_inputs(self.si_structure, self.si_pseudo, scf_kppa, nscf_nband, ecuteps, ecutsigx, ecut=2) scf_input, nscf_input, scr_input, sigma_input = multi.split_datasets() flow = Flow.temporary_flow() flow.register_work( G0W0Work(scf_input, nscf_input, scr_input, sigma_input)) assert flow.build_and_pickle_dump(abivalidate=True) == 0
def abiopen(filepath): """ Factory function that opens any file supported by abipy. File type is detected from the extension Args: filepath: string with the filename. """ if os.path.basename(filepath) == "__AbinitFlow__.pickle": return Flow.pickle_load(filepath) # Handle old output files produced by Abinit. import re outnum = re.compile(r".+\.out[\d]+") abonum = re.compile(r".+\.abo[\d]+") if outnum.match(filepath) or abonum.match(filepath): return AbinitOutputFile.from_file(filepath) cls = abifile_subclass_from_filename(filepath) return cls.from_file(filepath)
def abiopen(filepath): """ Factory function that opens any file supported by abipy. File type is detected from the extension Args: filepath: string with the filename. """ # Handle ~ in filepath. filepath = os.path.expanduser(filepath) # Handle zipped files by creating temporary file with correct extension. root, ext = os.path.splitext(filepath) if ext in (".bz2", ".gz", ".z"): from monty.io import zopen with zopen(filepath, "rt") as f: import tempfile _, tmp_path = tempfile.mkstemp(suffix=os.path.basename(root), text=True) cprint("Creating temporary file: %s" % tmp_path, "yellow") with open(tmp_path, "wt") as t: t.write(f.read()) filepath = tmp_path if os.path.basename(filepath) == "__AbinitFlow__.pickle": return Flow.pickle_load(filepath) # Handle old output files produced by Abinit. import re outnum = re.compile(r".+\.out[\d]+") abonum = re.compile(r".+\.abo[\d]+") if outnum.match(filepath) or abonum.match(filepath): return AbinitOutputFile.from_file(filepath) if os.path.basename(filepath) == "log": # Assume Abinit log file. return AbinitLogFile.from_file(filepath) cls = abifile_subclass_from_filename(filepath) return cls.from_file(filepath)