def execute_forces(self): futures = {} process = WorkflowFactory('quantumespresso.pw.relax') inputs = { key: value for key, value in self.inputs.force.items() if key in process.get_description().get('spec').get('inputs').keys() } if not inputs.get('parameters').get_dict().get('CONTROL', {}).get( 'tprnfor', False): parameters = inputs.get('parameters').get_dict() parameters.setdefault('CONTROL', {}) parameters['CONTROL']['tprnfor'] = True inputs['parameters'] = ParameterData(dict=parameters) uuids = list(self.ctx.structures.keys()) for uuid in uuids: if uuid in self.ctx.energy and uuid in self.ctx.forces: continue inputs_ = copy(inputs) inputs_['structure'] = self.ctx.structures.get(uuid) futures[uuid] = self.submit(process, max_iterations=Int(1), max_meta_convergence_iterations=Int(1), **inputs_) return ToContext(**futures)
def main(pot_family, import_from, queue, code, computer, no_import): load_dbenv_if_not_loaded() from aiida.orm import WorkflowFactory, Code from aiida.work import submit if not no_import: click.echo('importing POTCAR files...') with cli_spinner(): import_pots(import_from, pot_family) code = Code.get_from_string('{}@{}'.format(code, computer)) workflow = WorkflowFactory('vasp.base') inputs = AttributeDict() inputs.structure = create_structure_Si() inputs.kpoints = create_kpoints() inputs.incar = create_params_simple() inputs.code = code inputs.potcar_family = get_data_node('str', pot_family) inputs.potcar_mapping = get_data_node('parameter', dict={'Si': 'Si'}) options = AttributeDict() options.queue_name = queue options.resources = {'num_machines': 1, 'num_mpiprocs_per_machine': 4} inputs.options = get_data_node('parameter', dict=options) submit(workflow, **inputs)
def main(pot_family, import_from, queue, code, computer, no_import): load_dbenv_if_not_loaded() from aiida.orm import WorkflowFactory, Code from aiida.work import submit if not no_import: click.echo('importing POTCAR files...') with cli_spinner(): import_pots(import_from, pot_family) code = Code.get_from_string('{}@{}'.format(code, computer)) workflow = WorkflowFactory('vasp.relax') inputs = AttributeDict() inputs.structure = create_structure_perturbed() inputs.kpoints = AttributeDict() inputs.kpoints.distance = get_data_node('float', 0.2) inputs.relax = AttributeDict() inputs.convergence = AttributeDict() inputs.convergence.shape = AttributeDict() inputs.convergence.on = get_data_node('bool', True) inputs.convergence.positions = get_data_node('float', 0.1) inputs.incar_add = get_data_node('parameter', dict={ 'nsw': 1, 'ediffg': -0.0001, 'encut': 240, 'ismear': 0, 'sigma': 0.1, 'system': 'test-case:test_relax_wf', }) # yapf: disable inputs.restart = AttributeDict() inputs.code = code inputs.potcar_family = get_data_node('str', pot_family) inputs.potcar_mapping = get_data_node('parameter', dict={'Si': 'Si'}) options = AttributeDict() options.queue_name = queue options.resources = {'num_machines': 1, 'num_mpiprocs_per_machine': 4} inputs.options = get_data_node('parameter', dict=options) submit(workflow, **inputs)
def test_existing_workflows(self): """Test listing all preinstalled workflows""" from aiida.orm import Workflow work = pl.existing_plugins(Workflow, 'aiida.workflows') self.assertIsInstance(work, list) from aiida.orm import WorkflowFactory for i in work: self.assertTrue(issubclass(WorkflowFactory(i), Workflow))
def test_existing_workflows(self): """ Test listing all preinstalled workflows """ workflows = all_plugins('workflows') self.assertIsInstance(workflows, list) for i in workflows: self.assertTrue(issubclass(WorkflowFactory(i), Workflow))
def execute_replicate(self): futures = {} process = WorkflowFactory('zrl.utils.replicate') inputs = { key: value for key, value in self.inputs.structure.replicate.items() if key in process.get_description().get('spec').get('inputs').keys() } inputs.setdefault('verbose', self.inputs.verbose) for uuid, structure in chain(self.ctx.structures.items(), self.ctx.partial_structures.items()): if uuid not in self.ctx.energy or uuid not in self.ctx.forces: futures[uuid] = self.submit(process, structure=structure, **inputs) return ToContext(**futures)
def test_existing_workflows(self): """ Test listing all preinstalled workflows """ entry_points = get_entry_points('aiida.workflows') self.assertIsInstance(entry_points, list) for entry_point in entry_points: cls = WorkflowFactory(entry_point.name) self.assertTrue(issubclass(cls, (Workflow, WorkChain)), 'Workflow plugin class {} is neither a subclass of {} nor {}'.format(cls, Workflow, WorkChain))
def test_existing_workflows(self): """ Test listing all preinstalled workflows """ workflows = all_plugins('workflows') self.assertIsInstance(workflows, list) for i in workflows: cls = WorkflowFactory(i) self.assertTrue( issubclass(cls, (Workflow, WorkChain)), 'Workflow plugin class {} is neither a subclass of {} nor {}'. format(cls, Workflow, WorkChain))
def execute_partial(self): futures = {} process = WorkflowFactory('zrl.utils.partial_occ') inputs = { key: value for key, value in self.inputs.structure.partial.items() if key in process.get_description().get('spec').get('inputs').keys() } inputs.setdefault('verbose', self.inputs.verbose) uuids = list(self.ctx.partial_structures.keys()) for uuid in uuids: if uuid in self.ctx.energy and uuid in self.ctx.forces: continue inputs_ = copy(inputs) inputs_['structure'] = self.ctx.partial_structures.get(uuid) inputs_.setdefault('seed', Int(self.ctx.rs.randint(2**16 - 1))) futures[uuid] = self.submit(process, **inputs_) return ToContext(**futures)
def test_base(fresh_aiida_env, vasp_params, potentials, vasp_kpoints, vasp_structure, mock_vasp): """Test submitting only, not correctness, with mocked vasp code.""" from aiida.orm import WorkflowFactory, Code from aiida import work rmq_config = None runner = work.Runner(poll_interval=0., rmq_config=rmq_config, enable_persistence=True) work.set_runner(runner) base_wf_proc = WorkflowFactory('vasp.base') mock_vasp.store() print(mock_vasp.get_remote_exec_path()) comp = mock_vasp.get_computer() create_authinfo(computer=comp).store() # ~ os_env = os.environ.copy() # ~ sp.call(['verdi', 'daemon', 'start'], env=os_env) # ~ print sp.check_output(['verdi', 'daemon', 'status'], env=os_env) # ~ print sp.check_output(['which', 'verdi'], env=os_env) kpoints, _ = vasp_kpoints inputs = AttributeDict() inputs.code = Code.get_from_string('mock-vasp@localhost') inputs.structure = vasp_structure inputs.incar = vasp_params inputs.kpoints = kpoints inputs.potcar_family = get_data_node('str', POTCAR_FAMILY_NAME) inputs.potcar_mapping = get_data_node('parameter', dict=POTCAR_MAP) inputs.options = get_data_node( 'parameter', dict={ 'queue_name': 'None', 'resources': { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 } }) inputs.max_iterations = get_data_node('int', 1) inputs.settings = get_data_node('parameter', dict={'parser_settings': {'add_structure': False, 'should_parse_CONTCAR': False}}) # ~ workchain = run(base_wf_proc, **inputs) results = work.run(base_wf_proc, **inputs) # ~ workchain = load_node(running.pk) # ~ timeout = 5 # ~ waiting_for = 0 # ~ while not workchain.is_terminated and waiting_for < timeout: # ~ time.sleep(1) # ~ waiting_for += 1 assert 'retrieved' in results assert 'output_parameters' in results assert 'remote_folder' in results
def run_calc(self): calc_round_params = self.ctx.calc_round_params[ self.ctx.calc_round_index] tot_magnetizations = calc_round_params['tot_magnetizations'] process = WorkflowFactory('quantumespresso.pw.relax') energy_inputs = { 'code': self.inputs.energy.code, 'options': self.inputs.energy.options, 'settings': self.inputs.energy.settings, 'kpoints': self.inputs.energy.kpoints } futures = {} for key, tot_magnetization in tot_magnetizations.items(): parameters = copy.deepcopy( self.inputs.energy.parameters.get_dict()) if tot_magnetization is not None: parameters.get( 'SYSTEM')['tot_magnetization'] = tot_magnetization if not calc_round_params['use_smearing']: del parameters.get('SYSTEM')['occupations'] del parameters.get('SYSTEM')['smearing'] del parameters.get('SYSTEM')['degauss'] energy_inputs['parameters'] = ParameterData(dict=parameters) for i, structure in self.ctx.structures_to_calc[key]: futures['energy.%s.%d.%d' % (key, i, self.ctx.calc_round_index)] = self.submit( process, structure=structure, pseudo_family=self.inputs.energy.get( 'pseudo_family', None), max_iterations=Int(1), max_meta_convergence_iterations=Int(1), **energy_inputs) return ToContext(**futures)
def initialize(self): self.ctx.input_cif_folder = self.inputs.input_cif_folder.value self.ctx.element_list = self.inputs.element_list.get_list() self.ctx.oxidation_states = self.inputs.oxidation_states.get_dict() self.ctx.structures_used = {} self.ctx.structures_match_not_used = {} self.ctx.structures_min_energy = {} self.ctx.stoichiometry_rel_tol = self.inputs.stoichiometry_rel_tol.value self.ctx.min_cell_volume = self.inputs.min_cell_volume.value self.ctx.max_cell_volume = self.inputs.max_cell_volume.value self.ctx.seed = self.inputs.seed if 'seed' in self.inputs else Int( np.random.randint(2**31 - 1)) self.out('seed', self.ctx.seed) self.ctx.rs = np.random.RandomState(seed=self.ctx.seed.value) self.mc_temp = self.inputs.mc_temp.value self.equilibration = self.inputs.equilibration.value self.ctx.process = WorkflowFactory('quantumespresso.pw.relax') self.ctx.pseudo_family = self.inputs.energy.get('pseudo_family', None) self.ctx.energy_inputs = { 'code': self.inputs.energy.code, 'options': self.inputs.energy.options, 'settings': self.inputs.energy.settings, 'kpoints': self.inputs.energy.kpoints } self.ctx.energy_parameters = self.inputs.energy.parameters.get_dict() self.ctx.mobile_species = self.inputs.mobile_species.value self.ctx.main_composition = self.inputs.main_composition.get_dict() self.ctx.ref_composition = self.inputs.ref_composition.get_dict() self.ctx.max_num_reactions = self.inputs.max_num_reactions.value self.ctx.det_thr = self.inputs.det_thr.value self.ctx.coeff_thr = self.inputs.coeff_thr.value self.ctx.energy_supercell_error = self.inputs.energy_supercell_error.value self.ctx.error_tol_potential = self.inputs.error_tol_potential.value if 'error_tol_potential' in self.inputs else None
def main(): """Write or read a workflow input file and start a workflow if requested""" parser = get_parser() args = parser.parse_args() from aiida.orm import WorkflowFactory from os.path import expanduser, abspath import json workflow_cls = WorkflowFactory(args.workflow) if args.store_template: workflow_cls().get_template(path=args.input_file) else: path = abspath(expanduser(args.input_file)) with open(path) as inputf: params = json.load(inputf) workflow = workflow_cls(params=params) workflow.label = params.get('label') valid, log = workflow.helper._verify_params( # pylint: disable=protected-access workflow.get_parameters(), silent=True) if not valid: raise IOError('invalid input:\n' + log) # ~ wf.store() workflow.start() print '\n'.join(workflow.get_report())
def test_eos_workchain_entry_point(aiida_env): from aiida_kkr.workflows.eos import kkr_eos_wc from aiida.orm import WorkflowFactory wf = WorkflowFactory('kkr.eos') assert wf == kkr_eos_wc
from aiida.work.run import run, submit from aiida.orm import load_node, DataFactory, WorkflowFactory from aiida.orm.data.base import Str, Float, Bool # Should be improved by some kind of WorkChainFactory # For now all workchains should be copied to aiida/workflows ForceConstantsData = DataFactory('phonopy.force_constants') ParameterData = DataFactory('parameter') ArrayData = DataFactory('array') StructureData = DataFactory('structure') BandStructureData = DataFactory('phonopy.band_structure') PhononPhonopy = WorkflowFactory('phonopy.phonon') import numpy as np __testing__ = False def get_phonon(structure, force_constants, ph_settings, nac_data=None): from phonopy import Phonopy from aiida_phonopy.workchains.phonon import phonopy_bulk_from_structure phonon = Phonopy(phonopy_bulk_from_structure(structure), ph_settings.dict.supercell, primitive_matrix=ph_settings.dict.primitive, symprec=ph_settings.dict.symmetry_precision)
def test_conv_workchain_entry_point(aiida_env): from aiida_kkr.workflows.check_para_convergence import kkr_check_para_wc from aiida.orm import WorkflowFactory wf = WorkflowFactory('kkr.convergence_check') assert wf == kkr_check_para_wc
def test_scf_workchain_entry_point(aiida_env): from aiida_kkr.workflows.kkr_scf import kkr_scf_wc from aiida.orm import WorkflowFactory wf = WorkflowFactory('kkr.scf') assert wf == kkr_scf_wc
def test_startpot_workchain_entry_point(aiida_env): from aiida_kkr.workflows.voro_start import kkr_startpot_wc from aiida.orm import WorkflowFactory wf = WorkflowFactory('kkr.startpot') assert wf == kkr_startpot_wc
def test_maginit_workchain_entry_point(aiida_env): from aiida_kkr.workflows.check_magnetic_state import kkr_check_mag_wc from aiida.orm import WorkflowFactory wf = WorkflowFactory('kkr.check_mag') assert wf == kkr_check_mag_wc
from aiida.orm import load_node, DataFactory, WorkflowFactory, CalculationFactory, Code from aiida.orm.data.base import Str, Float, Bool, Int from aiida.work.workchain import _If, _While from aiida_phonopy.workchains.phono3py_dist import generate_phono3py_params import numpy as np __testing__ = False ForceConstantsData = DataFactory('phonopy.force_constants') ParameterData = DataFactory('parameter') ArrayData = DataFactory('array') StructureData = DataFactory('structure') PhononPhonopy = WorkflowFactory('phonopy.phonon') PhononPhono3py = WorkflowFactory('phonopy.phonon3') Phono3pyDist = WorkflowFactory('phonopy.phono3py_dist') class ThermalPhono3py(WorkChain): @classmethod def define(cls, spec): super(ThermalPhono3py, cls).define(spec) spec.input("structure", valid_type=StructureData) spec.input("ph_settings", valid_type=ParameterData) spec.input("es_settings", valid_type=ParameterData) # Optional arguments spec.input("optimize", valid_type=Bool, required=False,
import re import glob import os Str = DataFactory('str') Float = DataFactory('float') Int = DataFactory('int') Bool = DataFactory('bool') StructureData = DataFactory('structure') ParameterData = DataFactory('parameter') ListData = DataFactory('list') KpointsData = DataFactory('array.kpoints') PartialOccupancyWorkChain = WorkflowFactory('zrl.utils.partial_occ') ReplicateWorkChain = WorkflowFactory('zrl.utils.replicate') class PhaseStabilityWorkChain(WorkChain): @classmethod def define(cls, spec): super(PhaseStabilityWorkChain, cls).define(spec) spec.input('input_cif_folder', valid_type=Str) spec.input('element_list', valid_type=ListData) spec.input('oxidation_states', valid_type=ParameterData) spec.input('stoichiometry_rel_tol', valid_type=Float, default=Float(0.05))
} # Collect workflow input data wf_parameters = { 'structure': structure, 'phonopy_input': { 'parameters': phonopy_parameters }, 'input_force': { 'code': 'vasp541mpi@stern', 'parameters': incar_dict, 'resources': machine_dict, 'pseudo': pseudo_dict, 'kpoints': kpoints_dict }, 'input_optimize': { 'code': 'vasp541mpi@stern', 'parameters': incar_dict, 'resources': machine_dict, 'pseudo': pseudo_dict, 'kpoints': kpoints_dict }, } #Submit workflow WorkflowQHA = WorkflowFactory('wf_qha') wf = WorkflowQHA(params=wf_parameters) wf.label = 'QHA VASP Si ' wf.start() print('pk: {}'.format(wf.pk))
def test_relax_wf(fresh_aiida_env, vasp_params, potentials, mock_vasp): """Test submitting only, not correctness, with mocked vasp code.""" from aiida.orm import WorkflowFactory, Code from aiida import work rmq_config = None runner = work.Runner(poll_interval=0., rmq_config=rmq_config, enable_persistence=True) work.set_runner(runner) base_wf_proc = WorkflowFactory('vasp.relax') mock_vasp.store() print(mock_vasp.get_remote_exec_path()) comp = mock_vasp.get_computer() create_authinfo(computer=comp).store() structure = PoscarParser( file_path=data_path('test_relax_wf', 'inp', 'POSCAR')).get_quantity( 'poscar-structure', {})['poscar-structure'] kpoints = KpParser( file_path=data_path('test_relax_wf', 'inp', 'KPOINTS')).get_quantity( 'kpoints-kpoints', {})['kpoints-kpoints'] incar_add = IncarParser( file_path=data_path('test_relax_wf', 'inp', 'INCAR')).get_quantity( 'incar', {})['incar'].get_dict() incar_add = { k: v for k, v in incar_add.items() if k not in ['isif', 'ibrion'] } incar_add['system'] = 'test-case:test_relax_wf' restart_clean_workdir = get_data_node('bool', False) restart_clean_workdir.store() inputs = AttributeDict() inputs.code = Code.get_from_string('mock-vasp@localhost') inputs.structure = structure inputs.incar_add = get_data_node('parameter', dict=incar_add) inputs.kpoints = AttributeDict() inputs.kpoints.mesh = kpoints inputs.potcar_family = get_data_node('str', POTCAR_FAMILY_NAME) inputs.potcar_mapping = get_data_node('parameter', dict=POTCAR_MAP) inputs.options = get_data_node('parameter', dict={ 'queue_name': 'None', 'resources': { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 } }) inputs.max_iterations = get_data_node('int', 1) inputs.convergence = AttributeDict() inputs.convergence.shape = AttributeDict() inputs.convergence.on = get_data_node('bool', True) inputs.convergence.positions = get_data_node('float', 0.1) inputs.restart = AttributeDict() inputs.restart.clean_workdir = restart_clean_workdir inputs.relax = AttributeDict() results = work.run(base_wf_proc, **inputs) assert 'relaxed_structure' in results
'structure': structure, 'phonopy_input': { 'parameters': phonopy_parameters }, 'input_force': { 'code': 'vasp541mpi@stern', 'parameters': incar_dict, 'resources': machine_dict, 'pseudo': pseudo_dict, 'kpoints': kpoints_dict }, 'input_optimize': { 'code': 'vasp541mpi@stern', 'parameters': incar_dict, 'resources': machine_dict, 'pseudo': pseudo_dict, 'kpoints': kpoints_dict } } #Define calculation to perform and lauch WorkflowPhonon = WorkflowFactory('wf_phonon') wf = WorkflowPhonon(params=wf_parameters, optimize=True, include_born=True) wf.label = material_name wf.description = 'PHON {}'.format(structure.get_formula()) wf.start() print('pk: {}'.format(wf.pk))
'Ga N N' : '1.0 0.001632 0.000 65.20700 2.82100 -0.518000 1.0 1.0 2.63906 3864.27 2.90 0.20 2.93516 6136.44', 'N Ga Ga': '1.0 0.001632 0.000 65.20700 2.82100 -0.518000 1.0 1.0 2.63906 3864.27 2.90 0.20 2.93516 6136.44', 'N Ga N ': '1.0 0.766120 0.000 0.178493 0.20172 -0.045238 1.0 0.0 0.00000 0.00000 2.20 0.20 0.00000 0.00000', 'N N Ga': '1.0 0.001632 0.000 65.20700 2.82100 -0.518000 1.0 0.0 0.00000 0.00000 2.90 0.20 0.00000 0.00000', 'Ga N Ga': '1.0 0.007874 1.846 1.918000 0.75000 -0.301300 1.0 0.0 0.00000 0.00000 2.87 0.15 0.00000 0.00000'} potential ={'pair_style': 'tersoff', 'data': tersoff_gan} # Collect workflow input data wf_parameters = { 'structure': structure, 'phonopy_input': {'parameters': phonopy_parameters}, 'input_force': {'code': 'lammps_force@boston', 'potential': potential, 'resources': lammps_machine}, 'input_optimize': {'code': 'lammps_optimize@boston', 'potential': potential, 'parameters': parameters_opt, 'resources': lammps_machine}, } #Submit workflow WorkflowGruneisen = WorkflowFactory('wf_gruneisen_pressure') wf = WorkflowGruneisen(params=wf_parameters, pre_optimize=False) # pressure in kb wf.label = 'Gruneisen GaN' wf.start() print ('pk: {}'.format(wf.pk))
'GGA' : 'PS' } settings_dict = {'code': {'optimize': 'vasp@stern', 'forces': 'vasp@stern', 'born_charges': 'vasp@stern'}, 'parameters': incar_dict, 'kpoints_density': 0.5, # k-point density, 'pseudos_family': 'pbe_test_family', 'family_folder': '/Users/abel/VASP/test_paw/', 'machine': machine_dict } es_settings = ParameterData(dict=settings_dict) QHAPhonopy = WorkflowFactory('phonopy.qha') # Chose how to run the calculation run_by_deamon = False if not run_by_deamon: result = run(QHAPhonopy, structure=structure, es_settings=es_settings, ph_settings=ph_settings, # Optional settings num_expansions=Int(10), use_nac=Bool(True), ) print (result) else:
'max_iterations': 500000 } settings_dict = { 'code': { 'optimize': 'lammps_optimize@boston_in', 'forces': 'lammps_force@boston_in' }, 'parameters': parameters, 'potential': potential, 'machine': machine_dict } es_settings = ParameterData(dict=settings_dict) OptimizeStructure = WorkflowFactory('phonopy.optimize') # Chose how to run the calculation run_by_deamon = False if not run_by_deamon: results = run( OptimizeStructure, structure=structure, es_settings=es_settings, # Optional settings pressure=Float(0.0), max_iterations=Int(3), tolerance_forces=Float(1e-5), tolerance_stress=Float(1e-2), standarize_cell=Bool(True)) else:
class WindowsWorkflow(Workflow): '''Try different inner and outer windows with wannier90''' Helper = WorkflowHelper ScfWf = WorkflowFactory('vasp.scf') NscfWf = WorkflowFactory('vasp.nscf') ProjWf = WorkflowFactory('vasp.projections') WannierWf = WorkflowFactory('vasp.wannier') def __init__(self, **kwargs): self.helper = self.Helper(parent=self) super(WindowsWorkflow, self).__init__(**kwargs) @Workflow.step def start(self): self.append_to_report(self.helper._wf_start_msg()) params = self.get_parameters() kp = params['kpoints'] scfpar = self.get_vasp_params(params) scfpar['settings'] = params['settings'] scfpar['structure'] = params['structure'] scfpar['kpoints'] = {'mesh': kp['mesh']} scfpar['paw_family'] = params['paw_family'] scfpar['paw_map'] = params['paw_map'] wf = self.ScfWf(params=scfpar) wf.label = params.get('label') wf.start() self.attach_workflow(wf) self.append_to_report( self.helper._subwf_start_msg('Scf', wf) ) self.next(self.get_win) @Workflow.step def get_win(self): start_wf = self.get_step(self.start).get_sub_workflows()[0] scf_calc = start_wf.get_result('calc') params = self.get_parameters() winpar = self.get_vasp_params(params) winpar['continue_from'] = scf_calc.uuid winpar['use_wannier'] = True wf = self.NscfWf(params=winpar) wf.label = params.get('label') wf.start() self.attach_workflow(wf) self.append_to_report( self.helper._subwf_start_msg('Win', wf) ) self.next(self.get_projections) @Workflow.step def get_projections(self): win_wf = self.get_step(self.get_win).get_sub_workflows()[0] win_calc = win_wf.get_result('calc') params = self.get_parameters() kppath = self._kppath_vasp_to_wannier(params['kpoints']['path']) projpar = self.get_vasp_params(params) projpar['continue_from'] = win_calc.uuid projpar['projections'] = params['projections'] projpar['wannier_settings'] = { 'num_wann': params['wannier_settings']['num_wann'], 'use_bloch_phases': False, 'bands_plot': True, 'hr_plot': True, 'kpoint_path': kppath } wf = self.ProjWf(params=projpar) wf.label = params.get('label') wf.start() self.attach_workflow(wf) self.append_to_report( self.helper._subwf_start_msg('Proj', wf) ) self.next(self.get_tbmodel) @classmethod def _kppath_vasp_to_wannier(cls, kppath): import itertools wannier_kpp = [] for segment in kppath: # flatten the segment list wannier_kpp.append( list(itertools.chain.from_iterable(segment[:4]))) return wannier_kpp @Workflow.step def get_tbmodel(self): proj_wf = self.get_step(self.get_projections).get_sub_workflows()[0] proj_calc = proj_wf.get_result('calc') params = self.get_parameters() count = 0 wpar = self.get_wannier_params(params) wpar['continue_from'] = proj_calc.uuid wpar['settings']['bands_plot'] = True wpar['settings']['hr_plot'] = True wfpk = [] for window in params['windows']: wpar['settings']['dis_win_min'] = window['outer'][0] wpar['settings']['dis_win_max'] = window['outer'][1] wpar['settings']['dis_froz_min'] = window['inner'][0] wpar['settings']['dis_froz_max'] = window['inner'][1] wf = self.WannierWf(params=wpar) wf.label = params.get('label') wf.start() self.attach_workflow(wf) count += 1 wfpk.append(wf.pk) self.append_to_report('running tbmodels for {} windows'.format(count)) self.append_to_report( 'tbmodels pk-range: {} - {}'.format(wfpk[0], wfpk[-1])) self.next(self.get_reference_bands) @Workflow.step def get_reference_bands(self): wannier_wf = self.get_step(self.get_tbmodel).get_sub_workflows()[0] wannier_bands = wannier_wf.get_result('bands') start_wf = self.get_step(self.start).get_sub_workflows()[0] scf_calc = start_wf.get_result('calc') kplist = wannier_bands.get_kpoints().tolist() kplabels = wannier_bands.labels params = self.get_parameters() bandpar = self.get_vasp_params(params) bandpar['continue_from'] = scf_calc.uuid bandpar['kpoints'] = { 'list': kplist } bandpar['kpoint_labels'] = kplabels bandpar['use_wannier'] = False wf = self.NscfWf(params=bandpar) wf.label = params.get('label') wf.start() self.attach_workflow(wf) self.append_to_report( self.helper._subwf_start_msg('Ref-Bands', wf) ) self.next(self.make_results) @Workflow.step def make_results(self): self.append_to_report('retrieving and compiling results') wannier_wf_list = self.get_step( self.get_tbmodel).get_sub_workflows() band_wf = self.get_step( self.get_reference_bands).get_sub_workflows()[0] self.add_result( 'reference_bands', band_wf.get_result('calc').out.bands) self.add_result('reference_calc', band_wf.get_result('calc')) for wf in wannier_wf_list: try: calc = wf.get_result('calc') bands = wf.get_result('bands') self.add_result('bands_{}'.format(calc.pk), bands) except Exception as e: wset = wf.get_parameters()['settings'] window = 'inner: {}-{}, outer: {}-{}'.format( wset['dis_froz_min'], wset['dis_froz_max'], wset['dis_win_min'], wset['dis_win_max'] ) self.append_to_report( ('workflow {pk} with window {window} ' 'did not yield the expected results: \n' '{error}').format( pk=wf.pk, window=window, error=repr(e)) ) self.next(self.exit) @classmethod def get_general_params(cls, params): genpar = {} genpar['extras'] = params.get('extras', {}).copy() # ~ genpar['extras']['wf_uuid'] = unicode(cls.uuid) genpar['label'] = params.get('label') genpar['description'] = params.get('description') return genpar @classmethod def get_vasp_params(cls, params): vasppar = cls.get_general_params(params) vasppar['vasp_code'] = params['vasp_code'] vasppar['resources'] = params['resources'] vasppar['queue'] = params['queue'] return vasppar @classmethod def get_wannier_params(cls, params): resources = params.get('wannier_resources', params['resources'].copy()) kppath = cls._kppath_vasp_to_wannier(params['kpoints']['path']) queue = params.get('wannier_queue', params['queue']) wpar = cls.get_general_params(params) wpar['wannier_code'] = params['wannier_code'] wpar['settings'] = params['wannier_settings'].copy() wpar['settings']['kpoint_path'] = kppath wpar['resources'] = resources wpar['queue'] = queue return wpar @classmethod def get_template(cls, *args, **kwargs): '''returns a JSON formatted string that could be stored in a file, edited, loaded and used as parameters to run this workflow.''' return cls.Helper.get_template(*args, wf_class=cls, **kwargs) @classmethod def get_params_template(cls): '''returns a dictionary with the necessary keys to run this workflow and explanations to each key as values''' tmpl = cls.Helper.get_params_template() wtpl = cls.WannierWf.get_params_template() ptpl = cls.ProjWf.get_params_template() tmpl['wannier_settings'] = {'num_wann': 'int', 'hr_plot': True} tmpl['wannier_code'] = wtpl['wannier_code'] tmpl['projections'] = ptpl['projections'] tmpl['windows'] = [{'inner': ['min', 'max'], 'outer': ['min', 'max']}] tmpl['windows'] += [{'inner': ['min', 'max'], 'outer': ['min', 'max']}] tmpl['kpoints'] = {'mesh': [], 'path': []} tmpl['#kpoints'] = ( 'mesh for everythin up until wannier_setup' 'path for bands, format: [["A", [...], "B", [...]], [...]]') return tmpl @classmethod def _verify_param_resources(cls, params): valid = True log = '' nbands = params['settings'].get('nbands') if nbands: res = params['resources'] nproc = res['num_machines'] * res['num_mpiprocs_per_machine'] if (nbands % nproc) != 0: valid = False log += ('nbands is not divisible by num_machines * ' 'num_mpiprocs_per_machine') return valid, log def set_params(self, params): self.helper._verify_params(params) super(WindowsWorkflow, self).set_params(params)
class AutowindowsWorkflow(Workflow): '''Try different inner and outer windows with wannier, compare them and choose the best one according to simplistic criteria''' Helper = WorkflowHelper ScfWf = WorkflowFactory('vasp.scf') NscfWf = WorkflowFactory('vasp.nscf') ProjWf = WorkflowFactory('vasp.projections') WannierWf = WorkflowFactory('vasp.wannier') def __init__(self, **kwargs): self.helper = self.Helper(parent=self) super(AutowindowsWorkflow, self).__init__(**kwargs) @Workflow.step def start(self): self.append_to_report(self.helper._wf_start_msg()) params = self.get_parameters() kp = params['kpoints'] scfpar = self.get_vasp_params(params) scfpar['settings'] = params['settings'] scfpar['structure'] = params['structure'] scfpar['kpoints'] = {'mesh': kp['mesh']} scfpar['paw_family'] = params['paw_family'] scfpar['paw_map'] = params['paw_map'] wf = self.ScfWf(params=scfpar) wf.label = params.get('label') wf.start() self.attach_workflow(wf) self.append_to_report( self.helper._subwf_start_msg('Scf', wf) ) self.next(self.get_win) @Workflow.step def get_win(self): start_wf = self.get_step(self.start).get_sub_workflows()[0] scf_calc = start_wf.get_result('calc') params = self.get_parameters() winpar = self.get_vasp_params(params) winpar['continue_from'] = scf_calc.uuid winpar['use_wannier'] = True wf = self.NscfWf(params=winpar) wf.label = params.get('label') wf.start() self.attach_workflow(wf) self.append_to_report( self.helper._subwf_start_msg('Win', wf) ) self.next(self.get_projections) @Workflow.step def get_projections(self): win_wf = self.get_step(self.get_win).get_sub_workflows()[0] win_calc = win_wf.get_result('calc') params = self.get_parameters() kppath = self._kppath_vasp_to_wannier(params['kpoints']['path']) projpar = self.get_vasp_params(params) projpar['continue_from'] = win_calc.uuid projpar['projections'] = params['projections'] projpar['wannier_settings'] = { 'num_wann': params['wannier_settings']['num_wann'], 'use_bloch_phases': False, 'bands_plot': True, 'hr_plot': True, 'kpoint_path': kppath } wf = self.ProjWf(params=projpar) wf.label = params.get('label') wf.start() self.attach_workflow(wf) self.append_to_report( self.helper._subwf_start_msg('Proj', wf) ) self.next(self.get_bands_preview) @classmethod def _kppath_vasp_to_wannier(cls, kppath): import itertools wannier_kpp = [] for segment in kppath: # flatten the segment list wannier_kpp.append( list(itertools.chain.from_iterable(segment[:4]))) return wannier_kpp @Workflow.step def get_bands_preview(self): params = self.get_parameters() start_wf = self.get_step(self.start).get_sub_workflows()[0] scf_calc = start_wf.get_result('calc') bandpar = self.get_vasp_params(params) bandpar['continue_from'] = scf_calc.uuid bandpar['kpoints'] = { 'path': params['kpoints']['path'] } bandpar['use_wannier'] = False wf = self.NscfWf(params=bandpar) wf.label = params.get('label') wf.start() self.attach_workflow(wf) self.append_to_report( self.helper._subwf_start_msg('Preview-Bands', wf) ) self.next(self.make_windows) @Workflow.step def make_windows(self): params = self.get_parameters() num_wann = params['wannier_settings']['num_wann'] bandpr_wf = self.get_step( self.get_bands_preview).get_sub_workflows()[0] bandpr_calc = bandpr_wf.get_result('calc') bandpr_bands = bandpr_calc.out.bands b, o = bandpr_bands.get_bands(also_occupations=True) b = bcp._firstspin(b) o = bcp._firstspin(o) ef = bandpr_calc.out.results.get_attr('efermi') bandgap = bcp.band_gap(b, o, efermi=ef) bmin = [b[:, i].min() for i in range(b.shape[1])] bmax = [b[:, i].max() for i in range(b.shape[1])] # find minimum innter window by looking at band gap energies import math iw_min = math.floor(bandgap['vector'][0][1]) iw_max = math.ceil(bandgap['vector'][1][1]) # find outer widow by counting nwann / 2 bands down and nwann / 2 up # from efermi n = int(math.ceil(num_wann / 2)) lower_max = [i for i in bmax if i < ef] lower_max.sort(reverse=True) ow_min = math.floor(bmin[bmax.index(lower_max[n-1])]) upper_min = sorted([i for i in bmin if i > ef]) ow_max = math.ceil(bmax[bmin.index(upper_min[n-1])]) windows = [] for i in range(params['num_owindows']): ow_offset = i * params['owindows-increment'] owindow = [ow_min - ow_offset, ow_max + ow_offset] for j in range(params['num_iwindows']): iw_offset = j * params['iwindows-increment'] iwindow = [iw_min - iw_offset, iw_max + iw_offset] windows.append({'outer': owindow, 'inner': iwindow}) self.add_attribute('windows', windows) self.next(self.get_tbmodel) @Workflow.step def get_tbmodel(self): proj_wf = self.get_step(self.get_projections).get_sub_workflows()[0] proj_calc = proj_wf.get_result('calc') params = self.get_parameters() count = 0 wpar = self.get_wannier_params(params) wpar['continue_from'] = proj_calc.uuid wpar['settings']['bands_plot'] = True wpar['settings']['hr_plot'] = True wfpk = [] for window in self.get_attribute('windows'): wpar['settings']['dis_win_min'] = window['outer'][0] wpar['settings']['dis_win_max'] = window['outer'][1] wpar['settings']['dis_froz_min'] = window['inner'][0] wpar['settings']['dis_froz_max'] = window['inner'][1] wf = self.WannierWf(params=wpar) wf.label = params.get('label') wf.start() self.attach_workflow(wf) count += 1 wfpk.append(wf.pk) self.append_to_report('running tbmodels for {} windows'.format(count)) self.append_to_report('tbmodels pk-range: {} - {}'.format( wfpk[0], wfpk[-1])) self.next(self.get_reference_bands) @Workflow.step def get_reference_bands(self): wannier_wf = self.get_step(self.get_tbmodel).get_sub_workflows()[0] wannier_bands = wannier_wf.get_result('bands') start_wf = self.get_step(self.start).get_sub_workflows()[0] scf_calc = start_wf.get_result('calc') kplist = wannier_bands.get_kpoints().tolist() kplabels = wannier_bands.labels params = self.get_parameters() bandpar = self.get_vasp_params(params) bandpar['continue_from'] = scf_calc.uuid bandpar['kpoints'] = { 'list': kplist } bandpar['kpoint_labels'] = kplabels bandpar['use_wannier'] = False wf = self.NscfWf(params=bandpar) wf.label = params.get('label') wf.start() self.attach_workflow(wf) self.append_to_report( self.helper._subwf_start_msg('Ref-Bands', wf) ) self.next(self.gather_results) @Workflow.step def gather_results(self): self.append_to_report('retrieving and compiling results') wannier_wf_list = self.get_step( self.get_tbmodel).get_sub_workflows() band_wf = self.get_step( self.get_reference_bands).get_sub_workflows()[0] self.add_result( 'reference_bands', band_wf.get_result('calc').out.bands) self.add_result('reference_calc', band_wf.get_result('calc')) wbands_list = [] for wf in wannier_wf_list: try: calc = wf.get_result('calc') bands = wf.get_result('bands') wbands_list.append(bands.uuid) self.add_result('bands_{}'.format(calc.pk), bands) except Exception as e: wset = wf.get_parameters()['settings'] window = 'inner: {}-{}, outer: {}-{}'.format( wset['dis_froz_min'], wset['dis_froz_max'], wset['dis_win_min'], wset['dis_win_max'] ) self.append_to_report( ('workflow {pk} with window {window} ' 'did not yield the expected results: \n' '{error}').format( pk=wf.pk, window=window, error=repr(e)) ) self.add_attribute('wbands_list', wbands_list) self.next(exit) @classmethod def get_general_params(cls, params): genpar = {} genpar['extras'] = params.get('extras', {}).copy() # ~ genpar['extras']['wf_uuid'] = unicode(cls.uuid) genpar['label'] = params.get('label') genpar['description'] = params.get('description') return genpar @classmethod def get_vasp_params(cls, params): vasppar = cls.get_general_params(params) vasppar['vasp_code'] = params['vasp_code'] vasppar['resources'] = params['resources'] vasppar['queue'] = params['queue'] return vasppar @classmethod def get_wannier_params(cls, params): resources = params.get('wannier_resources', params['resources'].copy()) kppath = cls._kppath_vasp_to_wannier(params['kpoints']['path']) queue = params.get('wannier_queue', params['queue']) wpar = cls.get_general_params(params) wpar['wannier_code'] = params['wannier_code'] wpar['settings'] = params['wannier_settings'].copy() wpar['settings']['kpoint_path'] = kppath wpar['resources'] = resources wpar['queue'] = queue return wpar @classmethod def get_template(cls, *args, **kwargs): '''returns a JSON formatted string that could be stored in a file, edited, loaded and used as parameters to run this workflow.''' return cls.Helper.get_template(*args, wf_class=cls, **kwargs) @classmethod def get_params_template(cls): '''returns a dictionary with the necessary keys to run this workflow and explanations to each key as values''' tmpl = cls.Helper.get_params_template() wtpl = cls.WannierWf.get_params_template() ptpl = cls.ProjWf.get_params_template() tmpl['wannier_settings'] = {'num_wann': 'int', 'hr_plot': True} tmpl['wannier_code'] = wtpl['wannier_code'] tmpl['projections'] = ptpl['projections'] tmpl['iwindows-increment'] = ('eV increment between ' 'different inner windows') tmpl['num_iwindows'] = 'number of inner windows to try' tmpl[ 'owindows-increment'] = ('eV increment between ' 'different outer windows') tmpl['num_owindows'] = 'number of outer windows to try' tmpl['kpoints'] = {'mesh': [], 'path': []} tmpl['#kpoints'] = ( 'mesh for everything up until wannier_setup' 'path for bands, format: [["A", [...], "B", [...]], [...]]') return tmpl @classmethod def _verify_param_resources(cls, params): valid = True log = '' nbands = params['settings'].get('nbands') if nbands: res = params['resources'] nproc = res['num_machines'] * res['num_mpiprocs_per_machine'] if (nbands % nproc) != 0: valid = False log += ('nbands is not divisible by ' 'num_machines * num_mpiprocs_per_machine') return valid, log def set_params(self, params): self.helper._verify_params(params) super(AutowindowsWorkflow, self).set_params(params)
'resources': lammps_machine }, 'input_force': { 'code': 'lammps_force@boston', 'potential': potential, 'resources': lammps_machine }, 'input_optimize': { 'code': 'lammps_optimize@boston', 'potential': potential, 'parameters': parameters_opt, 'resources': lammps_machine }, 'input_md': { 'code': 'lammps_comb@boston', 'supercell': [3, 3, 3], 'potential': potential, 'parameters': parameters_md, 'resources': lammps_machine }, 'scan_temperatures': range(300, 1500, 100) } WorkflowQuasiparticle = WorkflowFactory('wf_quasiparticle_thermo') wf = WorkflowQuasiparticle(params=wf_parameters, optimize=False) wf.label = 'quasiparticle scan GaN' wf.start() print('pk: {}'.format(wf.pk))