def get_irred_perts(qpt, inp, manager=None, workdir="."): fake_vars = dict( rfphon=1, nqpt=1, qpt=qpt, paral_rf=-1, rfatpol=[1, len(inp.structure)], rfdir=[1, 1, 1], ) w = _setup_faketask('ph', manager=manager, workdir=workdir) fake_input = inp.deepcopy() fake_input.set_variables(**fake_vars) fake_task = w.register(fake_input) w.allocate() w.start(wait=True) irred_perts = yaml_read_irred_perts(fake_task.log_file.path) mem = 0 for line in fake_task.log_file.readlines(): if "P This job should need less than" in line: mem = float(line.split()[7]) break w.rmtree() return irred_perts, mem
def get_irred_perts(self, ngkpt=None, shiftk=None, kptopt=None, qpoint=None, workdir=None, manager=None): """ This function, computes the list of irreducible perturbations for DFPT. It should be called with an input file that contains all the mandatory variables required by ABINIT. Args: ngkpt: Number of divisions for the k-mesh (default None i.e. use ngkpt from self) shiftk: Shiftks (default None i.e. use shiftk from self) qpoint: qpoint in reduced coordinates. Used to shift the k-mesh (default None i.e no shift) workdir: Working directory of the fake task used to compute the ibz. Use None for temporary dir. manager: :class:`TaskManager` of the task. If None, the manager is initialized from the config file. Returns: List of dictionaries with the Abinit variables defining the irreducible perturbation Example: [{'idir': 1, 'ipert': 1, 'qpt': [0.25, 0.0, 0.0]}, {'idir': 2, 'ipert': 1, 'qpt': [0.25, 0.0, 0.0]}] .. warning:: Multiple datasets are ignored. Only the list of k-points for dataset 1 are returned. """ if self.ndtset != 1: raise RuntimeError( "get_irred_perts cannot be used if the input contains more than one dataset" ) warnings.warn("get_irred_perts is still under development.") # Avoid modifications in self. inp = self.split_datasets()[0].deepcopy() # Use the magic value paral_rf = -1 to get the list of irreducible perturbations for this q-point. d = dict( paral_rf=-1, rfatpol=[1, len(inp.structure)], # Set of atoms to displace. rfdir=[1, 1, 1], # Along this set of reduced coordinate axis. ) inp.set_vars(d) # Build a Task to run Abinit in a shell subprocess task = AbinitTask.temp_shell_task(inp, workdir=workdir, manager=manager) task.start_and_wait(autoparal=False) # Parse the file to get the perturbations. try: return yaml_read_irred_perts(task.log_file.path) except Exception as exc: # Try to understand if it's a problem with the Abinit input. report = task.get_event_report() if report.errors: raise self.Error(str(report)) raise exc
def get_irred_perts(self, ngkpt=None, shiftk=None, kptopt=None, qpoint=None, workdir=None, manager=None): """ This function, computes the list of irreducible perturbations for DFPT. It should be called with an input file that contains all the mandatory variables required by ABINIT. Args: ngkpt: Number of divisions for the k-mesh (default None i.e. use ngkpt from self) shiftk: Shiftks (default None i.e. use shiftk from self) qpoint: qpoint in reduced coordinates. Used to shift the k-mesh (default None i.e no shift) workdir: Working directory of the fake task used to compute the ibz. Use None for temporary dir. manager: :class:`TaskManager` of the task. If None, the manager is initialized from the config file. Returns: List of dictionaries with the Abinit variables defining the irreducible perturbation Example: [{'idir': 1, 'ipert': 1, 'qpt': [0.25, 0.0, 0.0]}, {'idir': 2, 'ipert': 1, 'qpt': [0.25, 0.0, 0.0]}] .. warning:: Multiple datasets are ignored. Only the list of k-points for dataset 1 are returned. """ if self.ndtset != 1: raise RuntimeError("get_irred_perts cannot be used if the input contains more than one dataset") warnings.warn("get_irred_perts is still under development.") # Avoid modifications in self. inp = self.split_datasets()[0].deepcopy() # Use the magic value paral_rf = -1 to get the list of irreducible perturbations for this q-point. d = dict( paral_rf=-1, rfatpol=[1, len(inp.structure)], # Set of atoms to displace. rfdir=[1, 1, 1], # Along this set of reduced coordinate axis. ) inp.set_vars(d) # Build a Task to run Abinit in a shell subprocess task = AbinitTask.temp_shell_task(inp, workdir=workdir, manager=manager) task.start_and_wait(autoparal=False) # Parse the file to get the perturbations. try: return yaml_read_irred_perts(task.log_file.path) except Exception as exc: # Try to understand if it's a problem with the Abinit input. report = task.get_event_report() if report.errors: raise self.Error(str(report)) raise exc
def phonon_flow(workdir, manager, scf_input, ph_inputs): """ Build an `AbinitFlow` for phonon calculations. Args: workdir: Working directory. manager: `TaskManager` used to submit the jobs scf_input: Input for the GS SCF run. ph_inputs: List of Inputs for the phonon runs. Returns: `AbinitFlow` """ natom = len(scf_input.structure) # Create the container that will manage the different workflows. flow = AbinitFlow(workdir, manager) # Register the first workflow (GS calculation) scf_task = flow.register_task(scf_input, task_class=ScfTask) # Build a temporary workflow with a shell manager just to run # ABINIT to get the list of irreducible pertubations for this q-point. shell_manager = manager.to_shell_manager(mpi_ncpus=1) if not isinstance(ph_inputs, (list, tuple)): ph_inputs = [ph_inputs] for i, ph_input in enumerate(ph_inputs): fake_input = ph_input.deepcopy() # Run abinit on the front-end to get the list of irreducible pertubations. tmp_dir = os.path.join(workdir, "__ph_run" + str(i) + "__") w = Workflow(workdir=tmp_dir, manager=shell_manager) fake_task = w.register(fake_input) # Use the magic value paral_rf = -1 # to get the list of irreducible perturbations for this q-point. vars = dict(paral_rf=-1, rfatpol=[1, natom], # Set of atoms to displace. rfdir=[1, 1, 1], # Along this set of reduced coordinate axis. ) fake_task.strategy.add_extra_abivars(vars) w.start(wait=True) # Parse the file to get the perturbations. irred_perts = yaml_read_irred_perts(fake_task.log_file.path) print(irred_perts) w.rmtree() # Now we can build the final list of workflows: # One workflow per q-point, each workflow computes all # the irreducible perturbations for a singe q-point. work_qpt = PhononWorkflow() for irred_pert in irred_perts: print(irred_pert) new_input = ph_input.deepcopy() #rfatpol 1 1 # Only the first atom is displaced #rfdir 1 0 0 # Along the first reduced coordinate axis qpt = irred_pert["qpt"] idir = irred_pert["idir"] ipert = irred_pert["ipert"] # TODO this will work for phonons, but not for the other types of perturbations. rfdir = 3 * [0] rfdir[idir -1] = 1 rfatpol = [ipert, ipert] new_input.set_variables( #rfpert=1, qpt=qpt, rfdir=rfdir, rfatpol=rfatpol, ) work_qpt.register(new_input, deps={scf_task: "WFK"}, task_class=PhononTask) flow.register_work(work_qpt) return flow.allocate()
def phonon_flow(workdir, manager, scf_input, ph_inputs): """ Build an `AbinitFlow` for phonon calculations. Args: workdir: Working directory. manager: `TaskManager` used to submit the jobs scf_input: Input for the GS SCF run. ph_inputs: List of Inputs for the phonon runs. Returns: `AbinitFlow` """ natom = len(scf_input.structure) # Create the container that will manage the different workflows. flow = AbinitFlow(workdir, manager) # Register the first workflow (GS calculation) scf_task = flow.register_task(scf_input, task_class=ScfTask) # Build a temporary workflow with a shell manager just to run # ABINIT to get the list of irreducible pertubations for this q-point. shell_manager = manager.to_shell_manager(mpi_ncpus=1) if not isinstance(ph_inputs, (list, tuple)): ph_inputs = [ph_inputs] for i, ph_input in enumerate(ph_inputs): fake_input = ph_input.deepcopy() # Run abinit on the front-end to get the list of irreducible pertubations. tmp_dir = os.path.join(workdir, "__ph_run" + str(i) + "__") w = Workflow(workdir=tmp_dir, manager=shell_manager) fake_task = w.register(fake_input) # Use the magic value paral_rf = -1 to get the list of irreducible perturbations for this q-point. vars = dict( paral_rf=-1, rfatpol=[1, natom], # Set of atoms to displace. rfdir=[1, 1, 1], # Along this set of reduced coordinate axis. ) fake_task.strategy.add_extra_abivars(vars) w.allocate() w.start(wait=True) # Parse the file to get the perturbations. irred_perts = yaml_read_irred_perts(fake_task.log_file.path) print(irred_perts) w.rmtree() # Now we can build the final list of workflows: # One workflow per q-point, each workflow computes all # the irreducible perturbations for a singe q-point. work_qpt = PhononWorkflow() for irred_pert in irred_perts: print(irred_pert) new_input = ph_input.deepcopy() #rfatpol 1 1 # Only the first atom is displaced #rfdir 1 0 0 # Along the first reduced coordinate axis qpt = irred_pert["qpt"] idir = irred_pert["idir"] ipert = irred_pert["ipert"] # TODO this will work for phonons, but not for the other types of perturbations. rfdir = 3 * [0] rfdir[idir - 1] = 1 rfatpol = [ipert, ipert] new_input.set_variables( #rfpert=1, qpt=qpt, rfdir=rfdir, rfatpol=rfatpol, ) work_qpt.register(new_input, deps={scf_task: "WFK"}, task_class=PhononTask) flow.register_work(work_qpt) return flow.allocate()