def inner(settings=None, parameters=None): inputs = AttributeDict() metadata = AttributeDict({ 'options': { 'resources': { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 } } }) if settings is not None: inputs.settings = Dict(dict=settings) if isinstance(parameters, dict): parameters = get_data_class('dict')(dict=parameters) if parameters is None: parameters = vasp_params inputs.code = vasp_code inputs.metadata = metadata inputs.parameters = parameters inputs.kpoints, _ = vasp_kpoints inputs.structure = vasp_structure inputs.potential = potentials inputs.wannier_parameters = wannier_params inputs.wannier_projections = wannier_projections return inputs
def main(pot_family, import_from, queue, code, computer, no_import): load_dbenv_if_not_loaded() from aiida.orm import WorkflowFactory, Code from aiida.work import submit if not no_import: click.echo('importing POTCAR files...') with cli_spinner(): import_pots(import_from, pot_family) code = Code.get_from_string('{}@{}'.format(code, computer)) workflow = WorkflowFactory('vasp.base') inputs = AttributeDict() inputs.structure = create_structure_Si() inputs.kpoints = create_kpoints() inputs.incar = create_params_simple() inputs.code = code inputs.potcar_family = get_data_node('str', pot_family) inputs.potcar_mapping = get_data_node('parameter', dict={'Si': 'Si'}) options = AttributeDict() options.queue_name = queue options.resources = {'num_machines': 1, 'num_mpiprocs_per_machine': 4} inputs.options = get_data_node('parameter', dict=options) submit(workflow, **inputs)
def main(code_string, datafiles, parameters): """Main method to setup the calculation.""" # First, we need to fetch the AiiDA datatypes which will # house the inputs to our calculation dict_data = DataFactory('dict') # Then, we set the workchain we would like to call workchain = WorkflowFactory('logger.gc_example') # Set inputs for the following WorkChain execution inputs = AttributeDict() # inputs.metadata = {'options': {'resources': {'num_machines': 1, 'num_mpiprocs_per_machine': 1}, # 'parser_name': 'logger', # 'withmpi': False, # 'output_filename': 'logger.out'}} # Set code inputs.code = Code.get_from_string(code_string) # Set datafiles inputs.datafiles = datafiles # Set parameters inputs.parameters = dict_data(dict=parameters) # Set workchain related inputs, in this case, give more explicit output to report inputs.verbose = Bool(True) # Submit the requested workchain with the supplied inputs run(workchain, **inputs)
def inner(inputs=None, settings=None): from aiida.plugins import CalculationFactory from aiida.engine import run calculation = CalculationFactory('vasp.vasp') mock_vasp.store() create_authinfo(computer=mock_vasp.computer, store=True) kpoints, _ = vasp_kpoints inpts = AttributeDict() inpts.code = Code.get_from_string('mock-vasp@localhost') inpts.structure = vasp_structure inpts.parameters = vasp_params inpts.kpoints = kpoints inpts.potential = get_data_class( 'vasp.potcar').get_potcars_from_structure( structure=inpts.structure, family_name=POTCAR_FAMILY_NAME, mapping=POTCAR_MAP) options = { 'withmpi': False, 'queue_name': 'None', 'resources': { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 }, 'max_wallclock_seconds': 3600 } inpts.metadata = {} inpts.metadata['options'] = options if inputs is not None: inpts.update(inputs) results_and_node = run.get_node(calculation, **inpts) return results_and_node
def test_validate_input(test_crystal_code, test_structure_data, crystal_calc_parameters, test_basis_family_predefined): from aiida.common.extendeddicts import AttributeDict from aiida_crystal_dft.calculations.serial import CrystalSerialCalculation inputs = AttributeDict() with pytest.raises(ValueError): CrystalSerialCalculation(inputs) inputs.metadata = { 'options': { 'resources': { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 } } } inputs.code = test_crystal_code with pytest.raises(ValueError): CrystalSerialCalculation(inputs) inputs.structure = test_structure_data with pytest.raises(ValueError): CrystalSerialCalculation(inputs) inputs.parameters = crystal_calc_parameters # TODO: write schemas code checking that either basis or basis_family is present! # with pytest.raises(ValueError): # CrystalSerialCalculation(inputs) # inputs.basis_family = test_basis_family_predefined assert CrystalSerialCalculation(inputs)
def main(pot_family, import_from, queue, code, computer, no_import): load_dbenv_if_not_loaded() from aiida.orm import WorkflowFactory, Code from aiida.work import submit if not no_import: click.echo('importing POTCAR files...') with cli_spinner(): import_pots(import_from, pot_family) code = Code.get_from_string('{}@{}'.format(code, computer)) workflow = WorkflowFactory('vasp.relax') inputs = AttributeDict() inputs.structure = create_structure_perturbed() inputs.kpoints = AttributeDict() inputs.kpoints.distance = get_data_node('float', 0.2) inputs.relax = AttributeDict() inputs.convergence = AttributeDict() inputs.convergence.shape = AttributeDict() inputs.convergence.on = get_data_node('bool', True) inputs.convergence.positions = get_data_node('float', 0.1) inputs.incar_add = get_data_node('parameter', dict={ 'nsw': 1, 'ediffg': -0.0001, 'encut': 240, 'ismear': 0, 'sigma': 0.1, 'system': 'test-case:test_relax_wf', }) # yapf: disable inputs.restart = AttributeDict() inputs.code = code inputs.potcar_family = get_data_node('str', pot_family) inputs.potcar_mapping = get_data_node('parameter', dict={'Si': 'Si'}) options = AttributeDict() options.queue_name = queue options.resources = {'num_machines': 1, 'num_mpiprocs_per_machine': 4} inputs.options = get_data_node('parameter', dict=options) submit(workflow, **inputs)
def main(code_string, incar, kmesh, structure, potential_family, potential_mapping, options): """Main method to setup the calculation.""" # We set the workchain you would like to call workchain = WorkflowFactory('vasp.relax') # And finally, we declare the options, settings and input containers settings = AttributeDict() inputs = AttributeDict() # Organize settings settings.parser_settings = {} # Set inputs for the following WorkChain execution # Set code inputs.code = Code.get_from_string(code_string) # Set structure inputs.structure = structure # Set k-points grid density kpoints = DataFactory('array.kpoints')() kpoints.set_kpoints_mesh(kmesh) inputs.kpoints = kpoints # Set parameters inputs.parameters = DataFactory('dict')(dict=incar) # Set potentials and their mapping inputs.potential_family = DataFactory('str')(potential_family) inputs.potential_mapping = DataFactory('dict')(dict=potential_mapping) # Set options inputs.options = DataFactory('dict')(dict=options) # Set settings inputs.settings = DataFactory('dict')(dict=settings) # Set workchain related inputs, in this case, give more explicit output to report inputs.verbose = DataFactory('bool')(True) # Relaxation related parameters that is passed to the relax workchain relax = AttributeDict() # Turn on relaxation relax.perform = DataFactory('bool')(True) # Select relaxation algorithm relax.algo = DataFactory('str')('cg') # Set force cutoff limit (EDIFFG, but no sign needed) relax.force_cutoff = DataFactory('float')(0.01) # Turn on relaxation of positions (strictly not needed as the default is on) # The three next parameters correspond to the well known ISIF=3 setting relax.positions = DataFactory('bool')(True) # Turn on relaxation of the cell shape (defaults to False) relax.shape = DataFactory('bool')(True) # Turn on relaxation of the volume (defaults to False) relax.volume = DataFactory('bool')(True) # Set maximum number of ionic steps relax.steps = DataFactory('int')(100) # Set the relaxation parameters on the inputs inputs.relax = relax # Submit the requested workchain with the supplied inputs submit(workchain, **inputs)
def test_relax_wc(fresh_aiida_env, vasp_params, potentials, mock_vasp): # def test_relax_wc(fresh_aiida_env, vasp_params, potentials, mock_vasp, mock_relax_wc): """Test submitting only, not correctness, with mocked vasp code.""" from aiida.orm import Code from aiida.plugins import WorkflowFactory from aiida.engine import run workchain = WorkflowFactory('vasp.relax') mock_vasp.store() create_authinfo(computer=mock_vasp.computer, store=True) structure = PoscarParser(file_path=data_path('test_relax_wc', 'inp', 'POSCAR')).structure kpoints = KpointsParser(file_path=data_path('test_relax_wc', 'inp', 'KPOINTS')).kpoints parameters = IncarParser(file_path=data_path('test_relax_wc', 'inp', 'INCAR')).incar parameters['system'] = 'test-case:test_relax_wc' parameters = {'incar': {k: v for k, v in parameters.items() if k not in ['isif', 'ibrion', 'nsw', 'ediffg']}} parameters['relax'] = {} parameters['relax']['perform'] = True parameters['relax']['algo'] = 'cg' parameters['relax']['force_cutoff'] = 0.01 inputs = AttributeDict() inputs.code = Code.get_from_string('mock-vasp@localhost') inputs.structure = structure inputs.kpoints = kpoints inputs.parameters = get_data_node('dict', dict=parameters) inputs.potential_family = get_data_node('str', POTCAR_FAMILY_NAME) inputs.potential_mapping = get_data_node('dict', dict=POTCAR_MAP) inputs.options = get_data_node('dict', dict={ 'withmpi': False, 'queue_name': 'None', 'max_wallclock_seconds': 1, 'import_sys_environment': True, 'resources': { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 }, }) inputs.max_iterations = get_data_node('int', 1) inputs.clean_workdir = get_data_node('bool', False) inputs.verbose = get_data_node('bool', True) results, node = run.get_node(workchain, **inputs) assert node.exit_status == 0 assert 'relax' in results relax = results['relax'] assert 'structure' in relax sites = relax['structure'].sites assert sites[0].kind_name == 'Si' assert sites[1].kind_name == 'Si' np.testing.assert_allclose(sites[0].position, [4.8125, 4.8125, 4.8125]) np.testing.assert_allclose(sites[1].position, [0.6875, 0.6875, 0.715])
def main(code_string, incar, kmesh, structure, potential_family, potential_mapping, options): """Main method to setup the calculation.""" # First, we need to fetch the AiiDA datatypes which will # house the inputs to our calculation dict_data = DataFactory('dict') kpoints_data = DataFactory('array.kpoints') # Then, we set the workchain you would like to call workchain = WorkflowFactory('vasp.master') # And finally, we declare the options, settings and input containers settings = AttributeDict() inputs = AttributeDict() # Organize settings settings.parser_settings = { 'output_params': ['total_energies', 'maximum_force'] } # Set inputs for the following WorkChain execution # Set code inputs.code = Code.get_from_string(code_string) # Set structure inputs.structure = structure # Set k-points grid density kpoints = kpoints_data() kpoints.set_kpoints_mesh(kmesh) inputs.kpoints = kpoints # Set parameters inputs.parameters = dict_data(dict=incar) # Set potentials and their mapping inputs.potential_family = Str(potential_family) inputs.potential_mapping = dict_data(dict=potential_mapping) # Set options inputs.options = dict_data(dict=options) # Set settings inputs.settings = dict_data(dict=settings) # Set workchain related inputs, in this case, give more explicit output to repor inputs.verbose = Bool(True) # Master, convergence and relaxation related parameters that is passed to the master, # convergence and relaxation workchain, respectively # Turn of relaxation relax = AttributeDict() relax.perform = Bool(False) inputs.relax = relax # Extract electronic band structure inputs.extract_bands = Bool(True) # Submit the requested workchain with the supplied inputs submit(workchain, **inputs)
def test_base(fresh_aiida_env, vasp_params, potentials, vasp_kpoints, vasp_structure, mock_vasp): """Test submitting only, not correctness, with mocked vasp code.""" from aiida.orm import WorkflowFactory, Code from aiida import work rmq_config = None runner = work.Runner(poll_interval=0., rmq_config=rmq_config, enable_persistence=True) work.set_runner(runner) base_wf_proc = WorkflowFactory('vasp.base') mock_vasp.store() print(mock_vasp.get_remote_exec_path()) comp = mock_vasp.get_computer() create_authinfo(computer=comp).store() # ~ os_env = os.environ.copy() # ~ sp.call(['verdi', 'daemon', 'start'], env=os_env) # ~ print sp.check_output(['verdi', 'daemon', 'status'], env=os_env) # ~ print sp.check_output(['which', 'verdi'], env=os_env) kpoints, _ = vasp_kpoints inputs = AttributeDict() inputs.code = Code.get_from_string('mock-vasp@localhost') inputs.structure = vasp_structure inputs.incar = vasp_params inputs.kpoints = kpoints inputs.potcar_family = get_data_node('str', POTCAR_FAMILY_NAME) inputs.potcar_mapping = get_data_node('parameter', dict=POTCAR_MAP) inputs.options = get_data_node( 'parameter', dict={ 'queue_name': 'None', 'resources': { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 } }) inputs.max_iterations = get_data_node('int', 1) inputs.settings = get_data_node('parameter', dict={'parser_settings': {'add_structure': False, 'should_parse_CONTCAR': False}}) # ~ workchain = run(base_wf_proc, **inputs) results = work.run(base_wf_proc, **inputs) # ~ workchain = load_node(running.pk) # ~ timeout = 5 # ~ waiting_for = 0 # ~ while not workchain.is_terminated and waiting_for < timeout: # ~ time.sleep(1) # ~ waiting_for += 1 assert 'retrieved' in results assert 'output_parameters' in results assert 'remote_folder' in results
def test_vasp_wc_chgcar(fresh_aiida_env, vasp_params, potentials, vasp_kpoints, vasp_structure, mock_vasp): """Test submitting only, not correctness, with mocked vasp code, test fetching of the CHGCAR.""" from aiida.orm import Code from aiida.plugins import WorkflowFactory from aiida.engine import run workchain = WorkflowFactory('vasp.vasp') mock_vasp.store() create_authinfo(computer=mock_vasp.computer, store=True) kpoints, _ = vasp_kpoints inputs = AttributeDict() inputs.code = Code.get_from_string('mock-vasp@localhost') inputs.structure = vasp_structure inputs.parameters = get_data_node('dict', dict={'incar': vasp_params.get_dict()}) inputs.kpoints = kpoints inputs.potential_family = get_data_node('str', POTCAR_FAMILY_NAME) inputs.potential_mapping = get_data_node('dict', dict=POTCAR_MAP) inputs.options = get_data_node('dict', dict={ 'withmpi': False, 'queue_name': 'None', 'resources': { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 }, 'max_wallclock_seconds': 3600 }) inputs.settings = get_data_node('dict', dict={ 'ADDITIONAL_RETRIEVE_LIST': ['CHGCAR'], 'parser_settings': { 'add_chgcar': True } }) inputs.max_iterations = get_data_node('int', 1) inputs.clean_workdir = get_data_node('bool', False) inputs.verbose = get_data_node('bool', True) results, node = run.get_node(workchain, **inputs) assert node.exit_status == 0 assert 'chgcar' in results assert results['chgcar'].get_content() == 'This is a test CHGCAR file.\n'
def main(code_string, incar, kmesh, structure, potential_family, potential_mapping, options): """Main method to setup the calculation.""" # First, we need to fetch the AiiDA datatypes which will # house the inputs to our calculation dict_data = DataFactory('dict') kpoints_data = DataFactory('array.kpoints') # Then, we set the workchain you would like to call calculation = CalculationFactory('vasp.vasp2w90') # And finally, we declare the options, settings and input containers settings = AttributeDict() inputs = AttributeDict() # Organize settings settings.parser_settings = { 'output_params': ['total_energies', 'maximum_force'] } # Set inputs # Set code inputs.code = Code.get_from_string(code_string) # Set structure inputs.structure = structure # Set k-points grid density kpoints = kpoints_data() kpoints.set_kpoints_mesh(kmesh) inputs.kpoints = kpoints # Set parameters inputs.parameters = dict_data(dict=incar) # Set potentials and their mapping inputs.potential = DataFactory('vasp.potcar').get_potcars_from_structure( structure=inputs.structure, family_name=potential_family, mapping=potential_mapping) # Set options inputs.metadata = AttributeDict({'options': options}) # Set settings inputs.settings = dict_data(dict=settings) # Set Wannier90 projectors inputs.wannier_projections = DataFactory('list')(list=['Si: sp3']) # Submit the requested calculation with the supplied inputs submit(calculation, **inputs)
def main(code_string, incar, kmesh, structures, potential_family, potential_mapping, options): """Main method to setup the calculation.""" # First, we need to fetch the AiiDA datatypes which will # house the inputs to our calculation dict_data = DataFactory('dict') kpoints_data = DataFactory('array.kpoints') # Then, we set the workchain you would like to call workchain = EosWorkChain # And finally, we declare the options, settings and input containers settings = AttributeDict() inputs = AttributeDict() # organize settings settings.parser_settings = { 'output_params': ['total_energies', 'maximum_force'] } # set inputs for the following WorkChain execution # set code inputs.code = Code.get_from_string(code_string) # set structures inputs.structures = structures # set k-points grid density kpoints = kpoints_data() kpoints.set_kpoints_mesh(kmesh) inputs.kpoints = kpoints # set parameters inputs.parameters = dict_data(dict=incar) # set potentials and their mapping inputs.potential_family = Str(potential_family) inputs.potential_mapping = dict_data(dict=potential_mapping) # set options inputs.options = dict_data(dict=options) # set settings inputs.settings = dict_data(dict=settings) # set workchain related inputs, in this case, give more explicit output to report inputs.verbose = Bool(True) # submit the requested workchain with the supplied inputs submit(workchain, **inputs)
def test_vasp_wc(fresh_aiida_env, vasp_params, potentials, vasp_kpoints, vasp_structure, mock_vasp): """Test submitting only, not correctness, with mocked vasp code.""" from aiida.orm import Code from aiida.plugins import WorkflowFactory from aiida.engine import run workchain = WorkflowFactory('vasp.vasp') mock_vasp.store() create_authinfo(computer=mock_vasp.computer, store=True) kpoints, _ = vasp_kpoints inputs = AttributeDict() inputs.code = Code.get_from_string('mock-vasp@localhost') inputs.structure = vasp_structure inputs.parameters = get_data_node('dict', dict={'incar': vasp_params.get_dict()}) inputs.kpoints = kpoints inputs.potential_family = get_data_node('str', POTCAR_FAMILY_NAME) inputs.potential_mapping = get_data_node('dict', dict=POTCAR_MAP) inputs.options = get_data_node('dict', dict={ 'withmpi': False, 'queue_name': 'None', 'resources': { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 }, 'max_wallclock_seconds': 3600 }) inputs.max_iterations = get_data_node('int', 1) inputs.clean_workdir = get_data_node('bool', False) inputs.verbose = get_data_node('bool', True) results, node = run.get_node(workchain, **inputs) assert node.exit_status == 0 assert 'retrieved' in results assert 'misc' in results assert 'remote_folder' in results misc = results['misc'].get_dict() assert misc['maximum_stress'] == 22.8499295 assert misc['total_energies']['energy_extrapolated'] == -14.16209692
def base_calc(fresh_aiida_env, vasp_code): """An instance of a VaspCalcBase Process.""" from aiida_vasp.calcs.base import VaspCalcBase manager = get_manager() runner = manager.get_runner() inputs = AttributeDict() metadata = AttributeDict({ 'options': { 'resources': { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 } } }) inputs.code = vasp_code inputs.metadata = metadata return instantiate_process(runner, VaspCalcBase, **inputs)
def test_validate_input(test_properties_code, properties_calc_parameters, test_wavefunction): from aiida.common.extendeddicts import AttributeDict from aiida_crystal_dft.calculations.properties import PropertiesCalculation inputs = AttributeDict() with pytest.raises(ValueError): PropertiesCalculation(inputs) inputs.metadata = { 'options': { 'resources': { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 } } } inputs.code = test_properties_code with pytest.raises(ValueError): PropertiesCalculation(inputs) inputs.wavefunction = test_wavefunction with pytest.raises(ValueError): PropertiesCalculation(inputs) inputs.parameters = properties_calc_parameters assert PropertiesCalculation(inputs)
def test_relax_wf(fresh_aiida_env, vasp_params, potentials, mock_vasp): """Test submitting only, not correctness, with mocked vasp code.""" from aiida.orm import WorkflowFactory, Code from aiida import work rmq_config = None runner = work.Runner(poll_interval=0., rmq_config=rmq_config, enable_persistence=True) work.set_runner(runner) base_wf_proc = WorkflowFactory('vasp.relax') mock_vasp.store() print(mock_vasp.get_remote_exec_path()) comp = mock_vasp.get_computer() create_authinfo(computer=comp).store() structure = PoscarParser( file_path=data_path('test_relax_wf', 'inp', 'POSCAR')).get_quantity( 'poscar-structure', {})['poscar-structure'] kpoints = KpParser( file_path=data_path('test_relax_wf', 'inp', 'KPOINTS')).get_quantity( 'kpoints-kpoints', {})['kpoints-kpoints'] incar_add = IncarParser( file_path=data_path('test_relax_wf', 'inp', 'INCAR')).get_quantity( 'incar', {})['incar'].get_dict() incar_add = { k: v for k, v in incar_add.items() if k not in ['isif', 'ibrion'] } incar_add['system'] = 'test-case:test_relax_wf' restart_clean_workdir = get_data_node('bool', False) restart_clean_workdir.store() inputs = AttributeDict() inputs.code = Code.get_from_string('mock-vasp@localhost') inputs.structure = structure inputs.incar_add = get_data_node('parameter', dict=incar_add) inputs.kpoints = AttributeDict() inputs.kpoints.mesh = kpoints inputs.potcar_family = get_data_node('str', POTCAR_FAMILY_NAME) inputs.potcar_mapping = get_data_node('parameter', dict=POTCAR_MAP) inputs.options = get_data_node('parameter', dict={ 'queue_name': 'None', 'resources': { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 } }) inputs.max_iterations = get_data_node('int', 1) inputs.convergence = AttributeDict() inputs.convergence.shape = AttributeDict() inputs.convergence.on = get_data_node('bool', True) inputs.convergence.positions = get_data_node('float', 0.1) inputs.restart = AttributeDict() inputs.restart.clean_workdir = restart_clean_workdir inputs.relax = AttributeDict() results = work.run(base_wf_proc, **inputs) assert 'relaxed_structure' in results
def test_bands_wc(fresh_aiida_env, potentials, mock_vasp): """Test with mocked vasp code.""" from aiida.orm import Code, Log, RemoteData from aiida.plugins import WorkflowFactory from aiida.engine import run workchain = WorkflowFactory('vasp.bands') mock_vasp.store() create_authinfo(computer=mock_vasp.computer, store=True) structure = PoscarParser(file_path=data_path('test_bands_wc', 'inp', 'POSCAR')).structure parameters = IncarParser(file_path=data_path('test_bands_wc', 'inp', 'INCAR')).incar parameters['system'] = 'test-case:test_bands_wc' # Make sure we replace encut with pwcutoff del parameters['encut'] parameters = {'vasp': parameters} parameters['electronic'] = {'pwcutoff': 200} inputs = AttributeDict() inputs.code = Code.get_from_string('mock-vasp@localhost') inputs.structure = structure inputs.parameters = get_data_node('dict', dict=parameters) inputs.potential_family = get_data_node('str', POTCAR_FAMILY_NAME) inputs.potential_mapping = get_data_node('dict', dict=POTCAR_MAP) inputs.options = get_data_node('dict', dict={ 'withmpi': False, 'queue_name': 'None', 'resources': { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 }, 'max_wallclock_seconds': 3600 }) inputs.max_iterations = get_data_node('int', 1) inputs.clean_workdir = get_data_node('bool', False) inputs.verbose = get_data_node('bool', True) # Also set the restart folder as we assume a bands data will start from # a previous calculation that is sitting in the restart folder inputs.restart_folder = RemoteData(computer=inputs.code.computer, remote_path=data_path('test_bands_wc', 'inp')) results, node = run.get_node(workchain, **inputs) assert node.exit_status == 0 assert 'bands' in results kpoints = results['bands'].get_kpoints() test_array = np.array([[0., 0., 0.], [0.02272727, 0., 0.02272727], [0.04545454, 0., 0.04545454], [0.06818182, 0., 0.06818182], [0.09090909, 0., 0.09090909], [0.11363636, 0., 0.11363636], [0.13636364, 0., 0.13636364], [0.15909091, 0., 0.15909091], [0.18181818, 0., 0.18181818], [0.20454545, 0., 0.20454545], [0.22727273, 0., 0.22727273], [0.25, 0., 0.25], [0.27272727, 0., 0.27272727], [0.29545455, 0., 0.29545455], [0.31818182, 0., 0.31818182], [0.34090909, 0., 0.34090909], [0.36363636, 0., 0.36363636], [0.38636364, 0., 0.38636364], [0.40909091, 0., 0.40909091], [0.43181818, 0., 0.43181818], [0.45454545, 0., 0.45454545], [0.47727273, 0., 0.47727273], [0.5, 0., 0.5], [0.51785714, 0.03571429, 0.51785714], [0.53571429, 0.07142857, 0.53571429], [0.55357143, 0.10714286, 0.55357143], [0.57142857, 0.14285714, 0.57142857], [0.58928571, 0.17857143, 0.58928571], [0.60714286, 0.21428571, 0.60714286], [0.625, 0.25, 0.625], [0.375, 0.375, 0.75], [0.35869565, 0.35869565, 0.7173913], [0.3423913, 0.3423913, 0.68478261], [0.32608696, 0.32608696, 0.65217391], [0.30978261, 0.30978261, 0.61956522], [0.29347826, 0.29347826, 0.58695652], [0.27717391, 0.27717391, 0.55434783], [0.26086957, 0.26086957, 0.52173913], [0.24456522, 0.24456522, 0.48913043], [0.22826087, 0.22826087, 0.45652174], [0.21195652, 0.21195652, 0.42391304], [0.19565217, 0.19565217, 0.39130435], [0.17934783, 0.17934783, 0.35869565], [0.16304348, 0.16304348, 0.32608696], [0.14673913, 0.14673913, 0.29347826], [0.13043478, 0.13043478, 0.26086957], [0.11413044, 0.11413044, 0.22826087], [0.09782609, 0.09782609, 0.19565217], [0.08152174, 0.08152174, 0.16304348], [0.06521739, 0.06521739, 0.13043478], [0.04891304, 0.04891304, 0.09782609], [0.0326087, 0.0326087, 0.06521739], [0.01630435, 0.01630435, 0.0326087], [0., 0., 0.], [0.02631579, 0.02631579, 0.02631579], [0.05263158, 0.05263158, 0.05263158], [0.07894737, 0.07894737, 0.07894737], [0.10526316, 0.10526316, 0.10526316], [0.13157895, 0.13157895, 0.13157895], [0.15789474, 0.15789474, 0.15789474], [0.18421053, 0.18421053, 0.18421053], [0.21052632, 0.21052632, 0.21052632], [0.2368421, 0.2368421, 0.2368421], [0.26315789, 0.26315789, 0.26315789], [0.28947368, 0.28947368, 0.28947368], [0.31578947, 0.31578947, 0.31578947], [0.34210526, 0.34210526, 0.34210526], [0.36842105, 0.36842105, 0.36842105], [0.39473684, 0.39473684, 0.39473684], [0.42105263, 0.42105263, 0.42105263], [0.44736842, 0.44736842, 0.44736842], [0.47368421, 0.47368421, 0.47368421], [0.5, 0.5, 0.5], [0.5, 0.48333333, 0.51666667], [0.5, 0.46666667, 0.53333333], [0.5, 0.45, 0.55], [0.5, 0.43333333, 0.56666667], [0.5, 0.41666667, 0.58333333], [0.5, 0.4, 0.6], [0.5, 0.38333333, 0.61666667], [0.5, 0.36666667, 0.63333333], [0.5, 0.35, 0.65], [0.5, 0.33333333, 0.66666667], [0.5, 0.31666667, 0.68333333], [0.5, 0.3, 0.7], [0.5, 0.28333333, 0.71666667], [0.5, 0.26666667, 0.73333333], [0.5, 0.25, 0.75], [0.5, 0.225, 0.725], [0.5, 0.2, 0.7], [0.5, 0.175, 0.675], [0.5, 0.15, 0.65], [0.5, 0.125, 0.625], [0.5, 0.1, 0.6], [0.5, 0.075, 0.575], [0.5, 0.05, 0.55], [0.5, 0.025, 0.525], [0.5, 0., 0.5]]) np.testing.assert_allclose(kpoints, test_array) bands = results['bands'].get_bands() assert bands.shape == (1, 98, 20) np.testing.assert_allclose(bands[0, 0, 0:3], np.array([-6.0753, 6.0254, 6.0254])) np.testing.assert_allclose(bands[0, 2, 0:3], np.array([-6.0386, 5.7955, 5.8737])) np.testing.assert_allclose(bands[0, 97, 0:3], np.array([-1.867, -1.867, 3.1102]))
def test_converge_wc_pw(fresh_aiida_env, vasp_params, potentials, mock_vasp): """Test convergence workflow using mock code.""" from aiida.orm import Code from aiida.plugins import WorkflowFactory from aiida.engine import run workchain = WorkflowFactory('vasp.converge') mock_vasp.store() create_authinfo(computer=mock_vasp.computer).store() structure = PoscarParser(file_path=data_path('test_converge_wc/pw/200', 'inp', 'POSCAR')).structure parameters = IncarParser( file_path=data_path('test_converge_wc/pw/200', 'inp', 'INCAR')).incar parameters['system'] = 'test-case:test_converge_wc' parameters = { k: v for k, v in parameters.items() if k not in ['isif', 'ibrion', 'encut', 'nsw'] } kpoints = KpointsParser(file_path=data_path('test_converge_wc/pw/200', 'inp', 'KPOINTS')).kpoints restart_clean_workdir = get_data_node('bool', False) restart_clean_workdir.store() inputs = AttributeDict() inputs.code = Code.get_from_string('mock-vasp@localhost') inputs.structure = structure inputs.kpoints = kpoints inputs.parameters = get_data_node('dict', dict={'incar': parameters}) inputs.potential_family = get_data_node('str', POTCAR_FAMILY_NAME) inputs.potential_mapping = get_data_node('dict', dict=POTCAR_MAP) inputs.options = get_data_node('dict', dict={ 'withmpi': False, 'queue_name': 'None', 'resources': { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 }, 'max_wallclock_seconds': 3600 }) inputs.max_iterations = get_data_node('int', 1) inputs.clean_workdir = get_data_node('bool', False) relax = AttributeDict() converge = AttributeDict() relax.perform = get_data_node('bool', False) converge.relax = get_data_node('bool', False) converge.testing = get_data_node('bool', True) converge.compress = get_data_node('bool', False) converge.displace = get_data_node('bool', False) converge.pwcutoff_samples = get_data_node('int', 3) converge.k_samples = get_data_node('int', 3) inputs.relax = relax inputs.converge = converge inputs.verbose = get_data_node('bool', True) results, node = run.get_node(workchain, **inputs) assert node.exit_status == 0 assert 'converge' in results converge = results['converge'] assert 'data' in converge conv_data = converge['data'] try: conv_data = conv_data.get_array('pw_regular') except KeyError: pytest.fail('Did not find pw_regular in converge.data') conv_data_test = np.array([[200.0, -10.77974998, 0.0, 0.0, 0.5984], [250.0, -10.80762044, 0.0, 0.0, 0.5912], [300.0, -10.82261992, 0.0, 0.0, 0.5876]]) np.testing.assert_allclose(conv_data, conv_data_test) assert 'pwcutoff_recommended' in converge try: _encut = converge['pwcutoff_recommended'].value np.testing.assert_equal(_encut, 300) except AttributeError: pytest.fail('pwcutoff_recommended does not have the expected format')
def test_converge_wc(fresh_aiida_env, potentials, mock_vasp): """Test submitting only, not correctness, with mocked vasp code.""" from aiida.orm import Code from aiida.plugins import WorkflowFactory from aiida.engine import run workchain = WorkflowFactory('vasp.converge') mock_vasp.store() create_authinfo(computer=mock_vasp.computer, store=True) structure = PoscarParser( file_path=data_path('test_converge_wc', 'inp', 'POSCAR')).structure parameters = IncarParser( file_path=data_path('test_converge_wc', 'inp', 'INCAR')).incar parameters['system'] = 'test-case:test_converge_wc' parameters = { k: v for k, v in parameters.items() if k not in ['isif', 'ibrion', 'encut', 'nsw'] } restart_clean_workdir = get_data_node('bool', False) restart_clean_workdir.store() inputs = AttributeDict() inputs.code = Code.get_from_string('mock-vasp@localhost') inputs.structure = structure inputs.parameters = get_data_node('dict', dict={'incar': parameters}) inputs.potential_family = get_data_node('str', POTCAR_FAMILY_NAME) inputs.potential_mapping = get_data_node('dict', dict=POTCAR_MAP) inputs.options = get_data_node('dict', dict={ 'withmpi': False, 'queue_name': 'None', 'resources': { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 }, 'max_wallclock_seconds': 3600 }) inputs.max_iterations = get_data_node('int', 1) inputs.clean_workdir = get_data_node('bool', False) relax = AttributeDict() converge = AttributeDict() converge.relax = get_data_node('bool', False) converge.compress = get_data_node('bool', False) converge.displace = get_data_node('bool', False) converge.pwcutoff_samples = get_data_node('int', 3) converge.k_samples = get_data_node('int', 3) relax.perform = get_data_node('bool', True) inputs.relax = relax inputs.converge = converge inputs.verbose = get_data_node('bool', True) results, node = run.get_node(workchain, **inputs) assert node.exit_status == 0 converge = results['converge'] assert 'data' in converge conv_data = converge['data'] try: conv_data.get_array('pw_regular') except KeyError: pytest.fail('Did not find pw_regular in converge.data') try: conv_data.get_array('kpoints_regular') except KeyError: pytest.fail('Did not find kpoints_regular in converge.data') assert 'pwcutoff_recommended' in converge try: _encut = converge['pwcutoff_recommended'].value except AttributeError: pytest.fail('pwcutoff_recommended does not have the expected format') assert 'kpoints_recommended' in converge try: _kpoints = converge['kpoints_recommended'].get_kpoints_mesh() except AttributeError: pytest.fail('kpoints_recommended does not have the expected format')