def test_dict2builder(aiida_profile, sto_calc_inputs): """Test that we can use nested dict input for builder""" from aiida_castep.calculations.castep import CastepCalculation from aiida.engine import run_get_node builder = CastepCalculation.get_builder() builder._update(sto_calc_inputs) run_get_node(builder)
def test_output_validation_error(self): """Test that a process is marked as failed if its output namespace validation fails.""" class TestProcess1(Process): """Defining a new TestProcess class for testing.""" _node_class = orm.WorkflowNode @classmethod def define(cls, spec): super().define(spec) spec.input('add_outputs', valid_type=orm.Bool, default=lambda: orm.Bool(False)) spec.output_namespace('integer.namespace', valid_type=orm.Int, dynamic=True) spec.output('required_string', valid_type=orm.Str, required=True) def run(self): if self.inputs.add_outputs: self.out('required_string', orm.Str('testing').store()) self.out('integer.namespace.two', orm.Int(2).store()) _, node = run_get_node(TestProcess1) # For default inputs, no outputs will be attached, causing the validation to fail at the end so an internal # exit status will be set, which is a negative integer self.assertTrue(node.is_finished) self.assertFalse(node.is_finished_ok) self.assertEqual(node.exit_status, TestProcess1.exit_codes.ERROR_MISSING_OUTPUT.status) self.assertEqual(node.exit_message, TestProcess1.exit_codes.ERROR_MISSING_OUTPUT.message) # When settings `add_outputs` to True, the outputs should be added and validation should pass _, node = run_get_node(TestProcess1, add_outputs=orm.Bool(True)) self.assertTrue(node.is_finished) self.assertTrue(node.is_finished_ok) self.assertEqual(node.exit_status, 0)
def test_submission(clear_database_before_test, sto_calc_inputs, sto_spectral_inputs): """ Test submitting a CastepCalculation """ from aiida_castep.calculations.castep import CastepCalculation from aiida.engine import run_get_node _, return_node = run_get_node(CastepCalculation, **sto_calc_inputs) assert return_node.exit_status == 106 # No castep output found # test with extra kpoints _, return_node = run_get_node(CastepCalculation, **sto_spectral_inputs) assert return_node.exit_status == 106
def test_vasp_hf_wannier_input( configure_with_daemon, # pylint: disable=unused-argument assert_finished, get_insb_input # pylint: disable=redefined-outer-name ): """ Runs the workflow that calculates Wannier90 inputs from VASP + hybrids on InSb with a coarse grid. """ from aiida import orm from aiida.engine import run_get_node from aiida_tbextraction.fp_run.wannier_input import VaspWannierInput kpoints_mesh = orm.KpointsData() kpoints_mesh.set_kpoints_mesh([2, 2, 2]) wannier_projections = orm.List() wannier_projections.extend(['In : s; px; py; pz', 'Sb : px; py; pz']) result, node = run_get_node( VaspWannierInput, kpoints_mesh=kpoints_mesh, wannier_parameters=orm.Dict( dict=dict(num_wann=14, num_bands=36, spinors=True)), wannier_projections=wannier_projections, **get_insb_input) assert node.is_finished_ok assert all(key in result for key in ['wannier_input_folder', 'wannier_parameters', 'wannier_bands']) folder_list = result['wannier_input_folder'].get_folder_list() assert all( filename in folder_list for filename in ['wannier90.amn', 'wannier90.mmn', 'wannier90.eig'])
def example_failure(cp2k_code): """Run failing calculation.""" print("Testing CP2K failure...") # a broken CP2K input parameters = Dict(dict={'GLOBAL': {'FOO_BAR_QUUX': 42}}) print("Submitted calculation...") # Construct process builder builder = cp2k_code.get_builder() builder.parameters = parameters builder.code = cp2k_code builder.metadata.options.resources = { "num_machines": 1, "num_mpiprocs_per_machine": 1, } builder.metadata.options.max_wallclock_seconds = 1 * 2 * 60 _, calc_node = run_get_node(builder) if calc_node.exit_status == 304: print("CP2K failure correctly recognized.") else: print("ERROR!") print("CP2K failure was not recognized.") sys.exit(3)
def test_run_single_reaxff(db_test_app, get_structure, pyrite_potential_reaxff_lowtol, data_regression): # type: (AiidaTestApp) -> None code = db_test_app.get_or_create_code("gulp.single") builder = code.get_builder() builder._update({ "metadata": { "options": { "withmpi": False, "resources": { "num_machines": 1, "num_mpiprocs_per_machine": 1 }, "max_wallclock_seconds": 30, } } }) builder.structure = get_structure("pyrite") builder.potential = pyrite_potential_reaxff_lowtol calc_node = run_get_node(builder).node db_test_app.check_calculation(calc_node, ["results"]) result = recursive_round(calc_node.outputs.results.get_dict(), 6) for key in [ "parser_version", "peak_dynamic_memory_mb", "opt_time_second", "total_time_second", ]: result.pop(key, None) data_regression.check(result)
def test_optimize_process( db_test_app, get_potential_data, potential_type, data_regression, ): """Test the functionality of the optimization calculation type""" calc_plugin = 'lammps.optimize' code = db_test_app.get_or_create_code(calc_plugin) pot_data = get_potential_data(potential_type) potential = DataFactory('lammps.potential')( potential_type=pot_data.type, data=pot_data.data, ) parameters = get_calc_parameters( get_lammps_version(code), calc_plugin, potential.default_units, potential_type, ) builder = code.get_builder() builder._update({ # pylint: disable=protected-access 'metadata': tests.get_default_metadata(), 'code': code, 'structure': pot_data.structure, 'potential': potential, 'parameters': parameters, }) output = run_get_node(builder) calc_node = output.node if not calc_node.is_finished_ok: print(calc_node.attributes) print(get_calcjob_report(calc_node)) raise Exception( f'finished with exit message: {calc_node.exit_message}') link_labels = calc_node.get_outgoing().all_link_labels() assert set(link_labels).issuperset( ['results', 'trajectory_data', 'structure']) trajectory_data = calc_node.outputs.trajectory_data.attributes # optimization steps may differ between lammps versions trajectory_data = { k: v for k, v in trajectory_data.items() if k != 'number_steps' } data_regression.check({ 'results': sanitize_results(calc_node.outputs.results.get_dict(), 1), 'trajectory_data': trajectory_data, 'structure': { 'kind_names': calc_node.outputs.structure.get_kind_names() } # "structure": tests.recursive_round( # calc_node.outputs.structure.attributes, 1, apply_lists=True # ), })
def test_submit_HKUST1(clear_database, network_code, basic_options): # pylint: disable=unused-argument,invalid-name,invalid-name """Test submitting a calculation""" from aiida_zeopp.tests import TEST_DIR from aiida_zeopp.calculations.network import NetworkCalculation from aiida.engine import run_get_node # Prepare input parameters from aiida.plugins import DataFactory parameters = DataFactory('zeopp.parameters')(dict={'cssr': True}) structure = DataFactory('cif')(file=os.path.join(TEST_DIR, 'HKUST-1.cif'), parse_policy='lazy') inputs = { 'code': network_code, 'parameters': parameters, 'structure': structure, 'metadata': { 'options': basic_options, 'label': 'aiida_zeopp format conversion', 'description': 'Test converting .cif to .cssr format', }, } _result, node = run_get_node(NetworkCalculation, **inputs) cssr = io.open(os.path.join(TEST_DIR, 'HKUST-1.cssr'), 'r', encoding='utf8').read() assert cssr == node.outputs.structure_cssr.get_content()
def test_filename(network_code, basic_options): # pylint: disable=unused-argument,invalid-name """Test submitting a calculation from autogenerated CifData. Note: filenames of CifData generated from ASE may miss the .cif extension. """ from ase.build import bulk # Prepare input parameters parameters = NetworkParameters(dict={ 'cssr': True, }) atoms = bulk('Mg', 'fcc', a=3.6) cif = CifData(ase=atoms) atomic_radii = SinglefileData(file=os.path.join(TEST_DIR, 'MgO.rad')) # set up calculation inputs = { 'code': network_code, 'parameters': parameters, 'structure': cif, 'atomic_radii': atomic_radii, 'metadata': { 'options': basic_options, }, } input_filename = cif.filename if cif.filename.endswith( '.cif') else cif.filename + '.cif' result, node = run_get_node(NetworkCalculation, **inputs) assert 'structure_cssr' in result assert node.res.Input_structure_filename == input_filename
def test_run_prop_mgo_no_scf(db_test_app, sanitise_calc_attr, data_regression): """Test the workchains when a folder is supplied that contains the wavefunction file.""" clear_spec() wc_builder = CryPropertiesWorkChain.get_builder() with open_resource_binary("doss", "mgo_sto3g_scf", "fort.9") as handle: wc_builder.wf_folder = SinglefileData(handle) wc_builder.doss.code = db_test_app.get_or_create_code("crystal17.doss") wc_builder.doss.parameters = get_parameters()["doss"] wc_builder.doss.metadata = db_test_app.get_default_metadata() wc_builder.ech3.code = db_test_app.get_or_create_code("crystal17.ech3") wc_builder.ech3.parameters = get_parameters()["ech3"] wc_builder.ech3.metadata = db_test_app.get_default_metadata() outputs, wc_node = run_get_node(wc_builder) sys.stderr.write(get_workchain_report(wc_node, "REPORT")) wk_attributes = sanitise_calc_attr(wc_node.attributes) data_regression.check({ "calc_node": wk_attributes, "incoming": sorted(wc_node.get_incoming().all_link_labels()), "outgoing": sorted(wc_node.get_outgoing().all_link_labels()), # "results": outputs["results"].attributes })
def test_run_mgo_scf_folder(db_test_app, sanitise_calc_attr, data_regression): # type: (AiidaTestApp) -> None """Test running a calculation.""" # set up calculation builder = db_test_app.get_or_create_code("crystal17.newk").get_builder() builder.metadata = get_metadata() builder.parameters = Dict(dict={"k_points": [18, 36]}) wf_folder = FolderData() with open_resource_binary("newk", "mgo_sto3g_scf", "fort.9") as handle: wf_folder.put_object_from_filelike(handle, "fort.9", mode="wb") builder.wf_folder = wf_folder output = run_get_node(builder) calc_node = output.node db_test_app.check_calculation(calc_node, ["results"]) calc_attributes = sanitise_calc_attr(calc_node.attributes) results = { k: round(i, 7) if isinstance(i, float) else i for k, i in calc_node.outputs.results.attributes.items() if k not in ["execution_time_seconds"] } data_regression.check({ "calc": calc_attributes, "results": results, })
def test_output_dictionary(self): """Verify that a dictionary can be passed as an output for a namespace.""" class TestProcess1(Process): """Defining a new TestProcess class for testing.""" _node_class = orm.WorkflowNode @classmethod def define(cls, spec): super().define(spec) spec.input_namespace('namespace', valid_type=orm.Int, dynamic=True) spec.output_namespace('namespace', valid_type=orm.Int, dynamic=True) def run(self): self.out('namespace', self.inputs.namespace) results, node = run_get_node(TestProcess1, namespace={ 'alpha': orm.Int(1), 'beta': orm.Int(2) }) self.assertTrue(node.is_finished_ok) self.assertEqual(results['namespace']['alpha'], orm.Int(1)) self.assertEqual(results['namespace']['beta'], orm.Int(2))
def test_without_kinds(cp2k_code, cp2k_basissets, cp2k_pseudos, clear_database): # pylint: disable=unused-argument """Testing CP2K with the Basis Set stored in gaussian.basisset but without a KIND section""" # structure atoms = ase.build.molecule("H2O") atoms.center(vacuum=2.0) structure = StructureData(ase=atoms) # parameters parameters = Dict( dict={ "FORCE_EVAL": { "METHOD": "Quickstep", "DFT": { "QS": { "EPS_DEFAULT": 1.0e-12, "WF_INTERPOLATION": "ps", "EXTRAPOLATION_ORDER": 3, }, "MGRID": { "NGRIDS": 4, "CUTOFF": 280, "REL_CUTOFF": 30 }, "XC": { "XC_FUNCTIONAL": { "_": "LDA" } }, "POISSON": { "PERIODIC": "none", "PSOLVER": "MT" }, }, } }) options = { "resources": { "num_machines": 1, "num_mpiprocs_per_machine": 1 }, "max_wallclock_seconds": 1 * 3 * 60, } inputs = { "structure": structure, "parameters": parameters, "code": cp2k_code, "metadata": { "options": options }, "basissets": cp2k_basissets, "pseudos": cp2k_pseudos, } _, calc_node = run_get_node(CalculationFactory("cp2k"), **inputs) assert calc_node.exit_status == 0
def test_FleurJobCalc_full_mock(aiida_profile, mock_code_factory, create_fleurinp, clear_database, hash_code_by_entrypoint): # pylint: disable=redefined-outer-name """ Tests the fleur inputgenerate with a mock executable if the datafiles are their, otherwise runs inpgen itself if a executable was specified """ mock_code = mock_code_factory( label='fleur', data_dir_abspath=os.path.join( os.path.dirname(os.path.abspath(__file__)), 'data_dir/'), entry_point=CALC_ENTRY_POINT, ignore_files=[ '_aiidasubmit.sh', 'cdnc', 'out', 'FleurInputSchema.xsd', 'cdn.hdf', 'usage.json', # 'cdn??'] 'cdn00', 'cdn01', 'cdn02', 'cdn03', 'cdn04', 'cdn05', 'cdn06', 'cdn07', 'cdn08', 'cdn09', 'cdn10', 'cdn11' ]) #mock_code.append_text = 'rm cdn?? broyd* wkf2 inf cdnc stars pot* FleurInputSchema* cdn.hdf' inputs = { 'fleurinpdata': create_fleurinp(TEST_INP_XML_PATH), #'parameters': orm.Dict(dict=parameters), 'metadata': { 'options': { 'resources': { 'num_machines': 1, 'tot_num_mpiprocs': 1 }, 'max_wallclock_seconds': int(600), 'withmpi': True } } } #calc = CalculationFactory(CALC_ENTRY_POINT, code=mock_code, **inputs) res, node = run_get_node(CalculationFactory(CALC_ENTRY_POINT), code=mock_code, **inputs) print((res['remote_folder'].list_objects())) print((res['retrieved'].list_objects())) assert node.is_finished_ok
def test_exit_codes_invalidate_cache(self): """ Test that returning an exit code with 'invalidates_cache' set to ``True`` indeed means that the ProcessNode will not be cached from. """ # Sanity check that caching works when the exit code is not returned. with enable_caching(): _, node1 = run_get_node(test_processes.InvalidateCaching, return_exit_code=orm.Bool(False)) _, node2 = run_get_node(test_processes.InvalidateCaching, return_exit_code=orm.Bool(False)) self.assertEqual(node1.get_extra('_aiida_hash'), node2.get_extra('_aiida_hash')) self.assertIn('_aiida_cached_from', node2.extras) with enable_caching(): _, node3 = run_get_node(test_processes.InvalidateCaching, return_exit_code=orm.Bool(True)) _, node4 = run_get_node(test_processes.InvalidateCaching, return_exit_code=orm.Bool(True)) self.assertEqual(node3.get_extra('_aiida_hash'), node4.get_extra('_aiida_hash')) self.assertNotIn('_aiida_cached_from', node4.extras)
def test_valid_cache_hook(self): """ Test that the is_valid_cache behavior can be specified from the method in the Process sub-class. """ # Sanity check that caching works when the hook returns True. with enable_caching(): _, node1 = run_get_node(test_processes.IsValidCacheHook) _, node2 = run_get_node(test_processes.IsValidCacheHook) self.assertEqual(node1.get_extra('_aiida_hash'), node2.get_extra('_aiida_hash')) self.assertIn('_aiida_cached_from', node2.extras) with enable_caching(): _, node3 = run_get_node(test_processes.IsValidCacheHook, not_valid_cache=orm.Bool(True)) _, node4 = run_get_node(test_processes.IsValidCacheHook, not_valid_cache=orm.Bool(True)) self.assertEqual(node3.get_extra('_aiida_hash'), node4.get_extra('_aiida_hash')) self.assertNotIn('_aiida_cached_from', node4.extras)
def test_md_process( db_test_app, get_potential_data, potential_type, data_regression, ): """Test the functionality of the md calculation type""" calc_plugin = 'lammps.md' code = db_test_app.get_or_create_code(calc_plugin) pot_data = get_potential_data(potential_type) potential = DataFactory('lammps.potential')( potential_type=pot_data.type, data=pot_data.data, ) version = get_lammps_version(code) version_year = version[-4:] parameters = get_calc_parameters( version, calc_plugin, potential.default_units, potential_type, ) builder = code.get_builder() builder._update({ # pylint: disable=protected-access 'metadata': tests.get_default_metadata(), 'code': code, 'structure': pot_data.structure, 'potential': potential, 'parameters': parameters, }) output = run_get_node(builder) calc_node = output.node if not calc_node.is_finished_ok: print(calc_node.attributes) print(get_calcjob_report(calc_node)) raise Exception( f'finished with exit message: {calc_node.exit_message}') link_labels = calc_node.get_outgoing().all_link_labels() assert set(link_labels).issuperset( ['results', 'trajectory_data', 'system_data']) data_regression.check( { 'results': sanitize_results( calc_node.outputs.results.get_dict(), round_energy=1, ), 'system_data': calc_node.outputs.system_data.attributes, 'trajectory_data': calc_node.outputs.trajectory_data.attributes, }, basename=f'test_md_process-{potential_type}-{version_year}', )
def test_bad_settings(db_test_app): """test bad settings dict """ with pytest.raises(ValidationError): results, node = run_get_node( WorkflowFactory("crystal17.sym3d"), settings=DataFactory("dict")(dict={ "a": 1 }), )
def test_run_window_invalid(configure_with_daemon, run_window_builder, window_values): # pylint:disable=unused-argument,redefined-outer-name """ Runs an the run_window workflow with invalid window values. """ result, node = run_get_node( run_window_builder(window_values, slice_=True, symmetries=True)) assert node.is_finished_ok assert result['cost_value'] > 1e10
def test_no_structure(db_test_app): """test no StructureData or CifData """ wflow_cls = WorkflowFactory("crystal17.sym3d") results, node = run_get_node(wflow_cls, settings=DataFactory("dict")(dict={ "symprec": 0.01 })) assert node.is_failed, node.exit_status assert node.exit_status == wflow_cls.exit_codes.ERROR_INVALID_INPUT_RESOURCES.status
def run_calculation(code, counter, inputval): """ Run a calculation through the Process layer. """ process, inputs, expected_result = create_calculation_process( code=code, inputval=inputval) result, calc = run_get_node(process, **inputs) print('[{}] ran calculation {}, pk={}'.format(counter, calc.uuid, calc.pk)) return calc, expected_result
def test_run_optimize_lj_with_symm(db_test_app, get_structure, pyrite_potential_lj, data_regression): # type: (AiidaTestApp) -> None from aiida.engine import run_get_node structure = get_structure("pyrite") symmetry = DataFactory("gulp.symmetry")( data=compute_symmetry_dict(structure, 0.01, None)) parameters = db_test_app.get_data_node( "dict", dict={ "minimize": { "style": "cg", "max_iterations": 100 }, "relax": { "type": "conp" }, }, ) code = db_test_app.get_or_create_code("gulp.optimize") builder = code.get_builder() builder._update({ "metadata": { "options": { "withmpi": False, "resources": { "num_machines": 1, "num_mpiprocs_per_machine": 1 }, "max_wallclock_seconds": 30, } } }) builder.structure = structure builder.potential = pyrite_potential_lj builder.parameters = parameters builder.symmetry = symmetry calc_node = run_get_node(builder).node db_test_app.check_calculation(calc_node, ["results", "structure", "retrieved"]) result = recursive_round(calc_node.outputs.results.get_dict(), 6) for key in [ "parser_version", "peak_dynamic_memory_mb", "opt_time_second", "total_time_second", ]: result.pop(key, None) data_regression.check(result)
def main(network_code_string): """Example usage: $ verdi run submit.py network@localhost Alternative use (creates network@localhost-test code): $ verdi run submit.py createcode """ if network_code_string == 'createcode': from aiida_zeopp import tests code = tests.get_code(entry_point='zeopp.network') else: from aiida.orm import Code code = Code.get_from_string(network_code_string) # Prepare input parameters NetworkParameters = DataFactory('zeopp.parameters') # For allowed keys, print(NetworkParameters.schema) parameters = NetworkParameters( dict={ 'ha': 'LOW', #just for speed; use 'DEF' for prodution! 'cssr': True, #converting to cssr 'sa': [1.86, 1.86, 1000], #computing surface area 'vol': [0.0, 0.0, 1000], #computing gemetric pore volume }) CifData = DataFactory('cif') this_dir = os.path.dirname(os.path.realpath(__file__)) structure = CifData(file=os.path.join(this_dir, 'HKUST-1.cif')) # set up calculation inputs = { 'code': code, 'parameters': parameters, 'structure': structure, 'metadata': { 'options': { 'max_wallclock_seconds': 1 * 60, }, 'label': 'aiida_zeopp example calculation', 'description': 'Converts .cif to .cssr format, computes surface area, and pore volume', }, } # or use aiida.engine.submit print('Running NetworkCalculation: wait...') result, node = run_get_node(NetworkCalculation, **inputs) # pylint: disable=unused-variable print('Nitrogen accessible surface area (m^2/g): {:.3f}'.format( node.outputs.output_parameters.get_attribute('ASA_m^2/g'))) print('Geometric pore volume (cm^3/g): {:.3f}'.format( node.outputs.output_parameters.get_attribute('AV_cm^3/g'))) print('CSSR structure: SinglefileData<{}>'.format( node.outputs.structure_cssr.pk))
def test_calcjob_submit_mgo(db_test_app, input_symmetry, get_structure, data_regression, file_regression): """Test submitting a calculation.""" code = db_test_app.get_or_create_code("crystal17.main") # Prepare input parameters inparams = CryInputParamsData(data={ "title": "MgO Bulk", "scf": { "k_points": (8, 8) } }) instruct = get_structure("MgO") sym_calc = run_get_node( WorkflowFactory("crystal17.sym3d"), structure=instruct, settings=orm.Dict(dict={ "symprec": 0.01, "compute_primitive": True }), ).node instruct = sym_calc.get_outgoing().get_node_by_label("structure") symmetry = sym_calc.get_outgoing().get_node_by_label("symmetry") with open_resource_text("basis_sets", "sto3g", "sto3g_Mg.basis") as handle: mg_basis, _ = BasisSetData.get_or_create(handle) with open_resource_text("basis_sets", "sto3g", "sto3g_O.basis") as handle: o_basis, _ = BasisSetData.get_or_create(handle) # set up calculation builder = code.get_builder() builder.metadata = db_test_app.get_default_metadata(dry_run=True) builder.parameters = inparams builder.structure = instruct builder.basissets = {"Mg": mg_basis, "O": o_basis} if input_symmetry: builder.symmetry = symmetry process_options = builder.process_class(inputs=builder).metadata.options with db_test_app.sandbox_folder() as folder: calc_info = db_test_app.generate_calcinfo("crystal17.main", folder, builder) with folder.open(process_options.input_file_name) as f: input_content = f.read() with folder.open("fort.34") as f: gui_content = f.read() # noqa: F841 # TODO test fort.34 (but rounded) file_regression.check(input_content) data_regression.check(sanitize_calc_info(calc_info))
def test_run_window(configure_with_daemon, run_window_builder, slice_, symmetries): # pylint:disable=unused-argument,redefined-outer-name """ Runs the workflow which evaluates an energy window. """ result, node = run_get_node( run_window_builder([-4.5, -4, 6.5, 16], slice_=slice_, symmetries=symmetries)) assert node.is_finished_ok assert all(key in result for key in ['cost_value', 'tb_model', 'plot'])
def test_run_reaxff_fes(db_test_app, get_structure, data_regression): """Test submitting a calculation.""" code = db_test_app.get_or_create_code("gulp.fitting") builder = code.get_builder() builder.metadata = db_test_app.get_default_metadata() potential_cls = DataFactory("gulp.potential") pot_data = read_lammps_format( read_resource_text("gulp", "potentials", "FeCrOSCH.reaxff").splitlines()) pot_data = filter_by_species(pot_data, ["Fe core", "S core"]) builder.settings = {"observables": {"energy": {}}} builder.potential = potential_cls( "reaxff", pot_data, fitting_data={ "species": ["Fe core", "S core"], "global": ["reaxff0_boc1", "reaxff0_boc2"], }, ) builder.structures = { "pyrite": get_structure("pyrite"), "marcasite": get_structure("marcasite"), "zincblende": get_structure("zincblende"), } builder.observables = { "pyrite": Dict(dict={ "energy": -1, "energy_units": "eV" }), "marcasite": Dict(dict={ "energy": -1, "energy_units": "eV" }), "zincblende": Dict(dict={ "energy": 1, "energy_units": "eV" }), } builder.metadata["options"]["allow_create_potential_fail"] = True calc_node = run_get_node(builder).node db_test_app.check_calculation(calc_node, ["results"]) result = recursive_round(calc_node.outputs.results.get_dict(), 6) for key in [ "parser_version", "peak_dynamic_memory_mb", "opt_time_second", "total_time_second", ]: result.pop(key, None) data_regression.check(result)
def test_md_multi_process( db_test_app, get_potential_data, potential_type, data_regression ): calc_plugin = "lammps.md.multi" code = db_test_app.get_or_create_code(calc_plugin) pot_data = get_potential_data(potential_type) potential = DataFactory("lammps.potential")(type=pot_data.type, data=pot_data.data) parameters = get_calc_parameters( get_lammps_version(code), calc_plugin, potential.default_units, potential_type ) builder = code.get_builder() builder._update( { "metadata": tests.get_default_metadata(), "code": code, "structure": pot_data.structure, "potential": potential, "parameters": parameters, } ) output = run_get_node(builder) calc_node = output.node if not calc_node.is_finished_ok: print(calc_node.attributes) print(get_calcjob_report(calc_node)) raise Exception("finished with exit message: {}".format(calc_node.exit_message)) link_labels = calc_node.get_outgoing().all_link_labels() assert set(link_labels).issuperset( [ "results", "retrieved", "trajectory__thermalise", "trajectory__equilibrate", "system__thermalise", "system__equilibrate", ] ) data_regression.check( { "retrieved": calc_node.outputs.retrieved.list_object_names(), "results": sanitize_results( calc_node.outputs.results.get_dict(), round_energy=1 ), "system__thermalise": calc_node.outputs.system__thermalise.attributes, "system__equilibrate": calc_node.outputs.system__equilibrate.attributes, "trajectory__thermalise": calc_node.outputs.trajectory__thermalise.attributes, "trajectory__equilibrate": calc_node.outputs.trajectory__equilibrate.attributes, } )
def example_dft(gaussian_code): """Run a simple gaussian optimization""" # structure structure = StructureData( pymatgen_molecule=mg.Molecule.from_file('./ch4.xyz')) num_cores = 1 memory_mb = 300 # Main parameters: geometry optimization parameters = Dict( dict={ 'link0_parameters': { '%chk': 'aiida.chk', '%mem': "%dMB" % memory_mb, '%nprocshared': num_cores, }, 'functional': 'BLYP', 'basis_set': '6-31g', 'charge': 0, 'multiplicity': 1, 'route_parameters': { 'scf': { 'cdiis': None, }, 'nosymm': None, 'opt': None, }, }) # Construct process builder builder = GaussianCalculation.get_builder() builder.structure = structure builder.parameters = parameters builder.code = gaussian_code builder.metadata.options.resources = { "num_machines": 1, "tot_num_mpiprocs": num_cores, } # Should ask for extra +25% extra memory builder.metadata.options.max_memory_kb = int(1.25 * memory_mb) * 1024 builder.metadata.options.max_wallclock_seconds = 5 * 60 print("Running calculation...") res, _node = run_get_node(builder) print("Final scf energy: %.4f" % res['output_parameters']['scfenergies'][-1])
def test_launchers(self): """Verify that the various launchers are working.""" result = run(self.function_return_true) self.assertTrue(result) result, node = run_get_node(self.function_return_true) self.assertTrue(result) self.assertEqual(result, get_true_node()) self.assertTrue(isinstance(node, orm.CalcFunctionNode)) with self.assertRaises(AssertionError): submit(self.function_return_true)
def test_fleur_scf_fleurinp_Si( #run_with_cache, with_export_cache, fleur_local_code, create_fleurinp, clear_database, clear_spec): """ full example using scf workflow with just a fleurinp data as input. Several fleur runs needed till convergence """ options = { 'resources': { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 }, 'max_wallclock_seconds': 5 * 60, 'withmpi': False, 'custom_scheduler_commands': '' } FleurCode = fleur_local_code # create process builder to set parameters builder = FleurScfWorkChain.get_builder() builder.metadata.description = 'Simple Fleur SCF test for Si bulk with fleurinp data given' builder.metadata.label = 'FleurSCF_test_Si_bulk' builder.fleurinp = create_fleurinp(TEST_INP_XML_PATH).store() builder.options = Dict(dict=options).store() builder.fleur = FleurCode #print(builder) # now run calculation #run_with_cache(builder) data_dir_path = os.path.join( aiida_path, '../tests/workflows/caches/fleur_scf_fleurinp_Si.tar.gz') with with_export_cache(data_dir_abspath=data_dir_path): out, node = run_get_node(builder) #print(out) #print(node) print(get_workchain_report(node, 'REPORT')) #assert node.is_finished_ok # check output n = out['output_scf_wc_para'] n = n.get_dict() print(get_calcjob_report(load_node(n['last_calc_uuid']))) #print(n) assert abs(n.get('distance_charge') - 9.8993e-06) < 2.0e-6 assert n.get('errors') == []