def test_run_mgo_scf_folder(db_test_app, sanitise_calc_attr, data_regression): # type: (AiidaTestApp) -> None """Test running a calculation.""" # set up calculation builder = db_test_app.get_or_create_code("crystal17.newk").get_builder() builder.metadata = get_metadata() builder.parameters = Dict(dict={"k_points": [18, 36]}) wf_folder = FolderData() with open_resource_binary("newk", "mgo_sto3g_scf", "fort.9") as handle: wf_folder.put_object_from_filelike(handle, "fort.9", mode="wb") builder.wf_folder = wf_folder output = run_get_node(builder) calc_node = output.node db_test_app.check_calculation(calc_node, ["results"]) calc_attributes = sanitise_calc_attr(calc_node.attributes) results = { k: round(i, 7) if isinstance(i, float) else i for k, i in calc_node.outputs.results.attributes.items() if k not in ["execution_time_seconds"] } data_regression.check({ "calc": calc_attributes, "results": results, })
def test_empty_log(db_test_app, plugin_name): """Check if the lammps log is empty.""" retrieved = FolderData() for filename in [ 'log.lammps', 'trajectory.lammpstrj', '_scheduler-stdout.txt', '_scheduler-stderr.txt', ]: retrieved.put_object_from_filelike(io.StringIO(''), filename) calc_node = db_test_app.generate_calcjob_node(plugin_name, retrieved) parser = ParserFactory(plugin_name) with db_test_app.sandbox_folder() as temp_path: with temp_path.open('x-trajectory.lammpstrj', 'w'): pass results, calcfunction = parser.parse_from_node( # pylint: disable=unused-variable calc_node, retrieved_temporary_folder=temp_path.abspath, ) assert calcfunction.is_finished, calcfunction.exception assert calcfunction.is_failed, calcfunction.exit_status assert (calcfunction.exit_status == calc_node.process_class.exit_codes.ERROR_LOG_PARSING.status)
def test_single(db_test_app): # type: (AiidaTestApp) -> None retrieved = FolderData() with open_resource_binary("gulp", "optimize_reaxff_pyrite", "main.gout") as handle: retrieved.put_object_from_filelike(handle, "main.gout", mode="wb") node = db_test_app.generate_calcjob_node("gulp.single", retrieved) results, calcfunction = db_test_app.parse_from_node("gulp.single", node) assert calcfunction.is_finished_ok assert "results" in results
def test_optimize_no_cif(db_test_app): # type: (AiidaTestApp) -> None calc_cls = db_test_app.get_calc_cls("gulp.optimize") retrieved = FolderData() with open_resource_binary("gulp", "optimize_reaxff_pyrite", "main.gout") as handle: retrieved.put_object_from_filelike(handle, "main.gout", mode="wb") node = db_test_app.generate_calcjob_node("gulp.optimize", retrieved) results, calcfunction = db_test_app.parse_from_node("gulp.optimize", node) assert calcfunction.is_finished assert not calcfunction.is_finished_ok assert calcfunction.exit_status == calc_cls.exit_codes.ERROR_CIF_FILE_MISSING.status
def test_empty_output(db_test_app, plugin_name): retrieved = FolderData() retrieved.put_object_from_filelike(StringIO(""), "main.out") calc_node = db_test_app.generate_calcjob_node(plugin_name, retrieved) results, calcfunction = db_test_app.parse_from_node(plugin_name, calc_node) assert calcfunction.is_finished, calcfunction.exception assert calcfunction.is_failed, calcfunction.exit_status assert (calcfunction.exit_status == calc_node.process_class.exit_codes.ERROR_PARSING_STDOUT.status)
def test_failed_pbs(db_test_app, plugin_name, fcontent, error_msg): retrieved = FolderData() retrieved.put_object_from_filelike(StringIO(fcontent), "_scheduler-stderr.txt") calc_node = db_test_app.generate_calcjob_node(plugin_name, retrieved) results, calcfunction = db_test_app.parse_from_node(plugin_name, calc_node) assert calcfunction.is_finished, calcfunction.exception assert calcfunction.is_failed, calcfunction.exit_status assert ( calcfunction.exit_status == calc_node.process_class.exit_codes[error_msg].status )
def test_missing_traj(db_test_app, plugin_name): retrieved = FolderData() with retrieved.open('log.lammps', 'w'): pass calc_node = db_test_app.generate_calcjob_node(plugin_name, retrieved) parser = db_test_app.get_parser_cls(plugin_name) results, calcfunction = parser.parse_from_node(calc_node) assert calcfunction.is_finished, calcfunction.exception assert calcfunction.is_failed, calcfunction.exit_status assert calcfunction.exit_status == calc_node.process_class.exit_codes.ERROR_TRAJ_FILE_MISSING.status
def test_get_scheduler_stderr(self): """Verify that the repository sandbox folder is cleaned after the node instance is garbage collected.""" option_key = 'scheduler_stderr' option_value = '_scheduler-error.txt' stderr = 'some\nstandard error' # Note: cannot use pytest.mark.parametrize in unittest classes, so I just do a loop here for with_file in [True, False]: for with_option in [True, False]: node = CalcJobNode(computer=self.computer, ) node.set_option('resources', { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 }) retrieved = FolderData() if with_file: retrieved.put_object_from_filelike(io.StringIO(stderr), option_value) if with_option: node.set_option(option_key, option_value) node.store() retrieved.store() retrieved.add_incoming(node, link_type=LinkType.CREATE, link_label='retrieved') # It should return `None` if no scheduler output is there (file not there, or option not set), # while it should return the content if both are set self.assertEqual(node.get_scheduler_stderr(), stderr if with_file and with_option else None)
def test_missing_isofile(db_test_app, plugin_name): retrieved = FolderData() with open_resource_binary("doss", "cubic_rocksalt_orbitals", "cubic-rocksalt_2x1_pdos.doss.out") as handle: retrieved.put_object_from_filelike(handle, "main.out", mode="wb") calc_node = db_test_app.generate_calcjob_node(plugin_name, retrieved) results, calcfunction = db_test_app.parse_from_node(plugin_name, calc_node) assert calcfunction.is_finished, calcfunction.exception assert calcfunction.is_failed, calcfunction.exit_status assert (calcfunction.exit_status == calc_node.process_class.exit_codes. ERROR_ISOVALUE_FILE_MISSING.status)
def test_missing_density(db_test_app, plugin_name): retrieved = FolderData() with open_resource_binary("ech3", "mgo_sto3g_scf", "main.out") as handle: retrieved.put_object_from_filelike(handle, "main.out", mode="wb") calc_node = db_test_app.generate_calcjob_node(plugin_name, retrieved) with db_test_app.sandbox_folder() as temp_folder: results, calcfunction = db_test_app.parse_from_node( plugin_name, calc_node, retrieved_temp=temp_folder.abspath) assert calcfunction.is_finished, calcfunction.exception assert calcfunction.is_failed, calcfunction.exit_status assert (calcfunction.exit_status == calc_node.process_class.exit_codes. ERROR_DENSITY_FILE_MISSING.status)
def test_failed_scf_convergence(db_test_app, plugin_name): retrieved = FolderData() with open_resource_binary("crystal", "failed", "FAILED_SCF_bcc_iron.out") as handle: retrieved.put_object_from_filelike(handle, "main.out", mode="wb") calc_node = db_test_app.generate_calcjob_node(plugin_name, retrieved) results, calcfunction = db_test_app.parse_from_node(plugin_name, calc_node) assert calcfunction.is_finished, calcfunction.exception assert calcfunction.is_failed, calcfunction.exit_status assert ( calcfunction.exit_status == calc_node.process_class.exit_codes.UNCONVERGED_SCF.status )
def _fixture_folderdata(dir_path, replacement_mapping=None): if replacement_mapping is None: replacement_mapping = {} dir_path = str( dir_path ) # TODO: Remove cast to 'str' when Python2 support is dropped. from aiida.orm import FolderData folder = FolderData() for file_path in os.listdir(dir_path): abs_path = os.path.abspath(os.path.join(dir_path, file_path)) res_file_path = file_path for old, new in replacement_mapping.items(): res_file_path = res_file_path.replace(old, new) folder.put_object_from_file(abs_path, res_file_path) return folder
def recollect_atomic_calculations(**kwargs): """ Collect dynamical matrix files into a single folder, putting a different number at the end of each final dynamical matrix file, obtained from the input link, which corresponds to its place in the list of q-points originally generated by distribute_qpoints. :param kwargs: keys are the string representation of the hubbard atom index and the value is the corresponding retrieved folder object. :return: FolderData object containing the perturbation files of the computed HpBaseWorkChain """ import os import errno output_folder_sub = HpCalculation._OUTPUT_SUBFOLDER output_folder_raw = HpCalculation._FOLDER_RAW output_prefix = HpCalculation()._PREFIX # Initialize the merged folder, by creating the subdirectory for the perturbation files merged_folder = FolderData() folder_path = os.path.normpath(merged_folder.get_abs_path('.')) output_path = os.path.join(folder_path, output_folder_raw) try: os.makedirs(output_path) except OSError as error: if error.errno == errno.EEXIST and os.path.isdir(output_path): pass else: raise for atomic_site_index, retrieved_folder in kwargs.iteritems(): filepath = os.path.join(output_folder_raw, '{}.chi.pert_{}.dat'.format(output_prefix, atomic_site_index)) filepath_src = retrieved_folder.get_abs_path(filepath) filepath_dst = filepath merged_folder.add_path(filepath_src, filepath_dst) # TODO: currently the Hp code requires the .save folder that is written by the original # PwCalculation, for the final post-processing matrix collection step. It doesn't really need all # the information contained in that folder, and requiring it means, copying it from remote to a # local folder and then reuploading it to remote folder. This is unnecessarily heavy retrieved_folder = kwargs.values()[0] dirpath = os.path.join(output_folder_sub, output_prefix + '.save') dirpath_src = retrieved_folder.get_abs_path(dirpath) dirpath_dst = dirpath merged_folder.add_path(dirpath_src, dirpath_dst) retrieved_folder = kwargs.values()[0] filepath = os.path.join(output_folder_sub, output_prefix + '.occup') filepath_src = retrieved_folder.get_abs_path(filepath) filepath_dst = filepath merged_folder.add_path(filepath_src, filepath_dst) return merged_folder
def parse(self, **kwargs): """Receives in input a dictionary of retrieved nodes. Does all the logic here.""" retrieved_folders = [] try: retrieved_folder = self.retrieved retrieved_folders.append(retrieved_folder) except NotExistent: return self.exit_codes.ERROR_NO_RETRIEVED_FOLDER retrieve_temp_list_input = self.node.get_attribute( 'retrieve_temporary_list', None) # If temporary files were specified, check that we have them if retrieve_temp_list_input: try: retrieved_temp_folder_path = kwargs[ 'retrieved_temporary_folder'] # create a folderdata object to treat this the same way temp_fd = FolderData(tree=retrieved_temp_folder_path) retrieved_folders.append(temp_fd) except KeyError: return self.exit_codes.ERROR_NO_RETRIEVED_TEMPORARY_FOLDER if "parser_params" in self.node.inputs: parser_params = dict(self.node.inputs.parser_params) else: parser_params = {} self._parse_folders(retrieved_folders, parser_params) return ExitCode(0)
def test_optimize_1d_molecule(db_test_app, get_structure): # type: (AiidaTestApp) -> None retrieved = FolderData() with open_resource_binary("gulp", "s2_polymer_opt", "main.gout") as handle: retrieved.put_object_from_filelike(handle, "main.gout", mode="wb") node = db_test_app.generate_calcjob_node( "gulp.optimize", retrieved, input_nodes={"structure": get_structure("s2_molecule")}, ) results, calcfunction = db_test_app.parse_from_node("gulp.optimize", node) if not calcfunction.is_finished_ok: raise AssertionError(calcfunction.attributes) assert "results" in results assert "structure" in results
def test_success(db_test_app, plugin_name, data_regression): retrieved = FolderData() with open_resource_binary("doss", "cubic_rocksalt_orbitals", "cubic-rocksalt_2x1_pdos.doss.out") as handle: retrieved.put_object_from_filelike(handle, "main.out", mode="wb") calc_node = db_test_app.generate_calcjob_node(plugin_name, retrieved) results, calcfunction = db_test_app.parse_from_node(plugin_name, calc_node) assert calcfunction.is_finished_ok, calcfunction.exception assert "results" in results results_attr = { k: round(i, 7) if isinstance(i, float) else i for k, i in results["results"].attributes.items() } data_regression.check({"results": results_attr})
def test_single_no_file(db_test_app): # type: (AiidaTestApp) -> None retrieved = FolderData() calc_cls = db_test_app.get_calc_cls("gulp.single") node = db_test_app.generate_calcjob_node("gulp.single", retrieved) results, calcfunction = db_test_app.parse_from_node("gulp.single", node) assert calcfunction.is_finished assert not calcfunction.is_finished_ok assert (calcfunction.exit_status == calc_cls.exit_codes.ERROR_OUTPUT_FILE_MISSING.status)
def _generate_lua_folder(): """Return `FolderData` node.""" from aiida.orm import FolderData foldername = os.path.join('tests', 'fixtures', 'lua_scripts', 'neb-data') folderpath = os.path.abspath(foldername) lua_folder = FolderData(tree=folderpath) return lua_folder
def test_success(db_test_app, plugin_name, data_regression): retrieved = FolderData() with open_resource_binary("ech3", "mgo_sto3g_scf", "main.out") as handle: retrieved.put_object_from_filelike(handle, "main.out", mode="wb") calc_node = db_test_app.generate_calcjob_node(plugin_name, retrieved) with resource_context("ech3", "mgo_sto3g_scf") as path: results, calcfunction = db_test_app.parse_from_node( plugin_name, calc_node, retrieved_temp=str(path)) assert calcfunction.is_finished_ok, calcfunction.exception assert "results" in results assert "charge" in results data_regression.check({ "results": recursive_round(results["results"].attributes, 7), "charge": recursive_round(results["charge"].attributes, 7), })
def test_missing_stdout(db_test_app, plugin_name): retrieved = FolderData() calc_node = db_test_app.generate_calcjob_node(plugin_name, retrieved) results, calcfunction = db_test_app.parse_from_node(plugin_name, calc_node) assert calcfunction.is_finished, calcfunction.exception assert calcfunction.is_failed, calcfunction.exit_status assert (calcfunction.exit_status == calc_node.process_class.exit_codes. ERROR_OUTPUT_FILE_MISSING.status)
def test_fleurinp_modifier_instance_modifications_node(create_fleurinp): """Tests if set_nmmpmat works on fleurinp modifier works, with right interface""" from aiida.orm import FolderData fleurinp_tmp = create_fleurinp(inpxmlfilefolder2) n_mmp_mat_folder = os.path.dirname(os.path.abspath(__file__)) n_mmp_mat_folder = os.path.abspath(os.path.join(n_mmp_mat_folder, '../files/n_mmp_mat')) n_mmp_mat_folder = FolderData(tree=n_mmp_mat_folder) n_mmp_mat_folder.store() fm = FleurinpModifier(fleurinp_tmp) fm.set_file('n_mmp_mat_GaAsMultiForceXML', dst_filename='n_mmp_mat', node=n_mmp_mat_folder) new_fleurinp = fm.freeze() assert 'n_mmp_mat' in new_fleurinp.files fm = FleurinpModifier(new_fleurinp) fm.del_file('n_mmp_mat') new_fleurinp = fm.freeze() assert 'n_mmp_mat' not in new_fleurinp.files
def folder_data(): """Create a `FolderData` instance with basic file and directory structure.""" node = FolderData() node.put_object_from_filelike(io.StringIO(''), 'nested/file.txt') node.put_object_from_filelike(io.StringIO(''), 'file.txt') return node
def test_failed_optimisation(db_test_app, plugin_name, data_regression): """Test that if the optimisation is killed before completion, the trajectory data is still available.""" retrieved = FolderData() with open_resource_binary( "crystal", "nio_sto3g_afm_opt_walltime", "main.out" ) as handle: retrieved.put_object_from_filelike(handle, "main.out", mode="wb") with open_resource_binary( "crystal", "nio_sto3g_afm_opt_walltime", "_scheduler-stderr.txt" ) as handle: retrieved.put_object_from_filelike(handle, "_scheduler-stderr.txt", mode="wb") calc_node = db_test_app.generate_calcjob_node(plugin_name, retrieved) with resource_context("crystal", "nio_sto3g_afm_opt_walltime") as path: results, calcfunction = db_test_app.parse_from_node( plugin_name, calc_node, retrieved_temp=str(path) ) # print(get_calcjob_report(calc_node)) assert calcfunction.is_finished, calcfunction.exception assert calcfunction.is_failed, calcfunction.exit_status assert ( calcfunction.exit_status == calc_node.process_class.exit_codes.ERROR_OUT_OF_WALLTIME.status ) assert "optimisation" in results, results data_regression.check(results["optimisation"].attributes)
def test_run_error(db_test_app, plugin_name): """Check if the parser runs without producing errors.""" retrieved = FolderData() retrieved.put_object_from_filelike( io.StringIO(get_log()), 'log.lammps', ) retrieved.put_object_from_filelike( io.StringIO(get_traj_force()), 'x-trajectory.lammpstrj', ) retrieved.put_object_from_filelike( io.StringIO('ERROR description'), '_scheduler-stdout.txt', ) retrieved.put_object_from_filelike( io.StringIO(''), '_scheduler-stderr.txt', ) calc_node = db_test_app.generate_calcjob_node(plugin_name, retrieved) parser = ParserFactory(plugin_name) with db_test_app.sandbox_folder() as temp_path: with temp_path.open('x-trajectory.lammpstrj', 'w') as handle: handle.write(get_traj_force()) results, calcfunction = parser.parse_from_node( # pylint: disable=unused-variable calc_node, retrieved_temporary_folder=temp_path.abspath, ) print(get_calcjob_report(calc_node)) assert calcfunction.is_finished, calcfunction.exception assert calcfunction.is_failed, calcfunction.exit_status assert (calcfunction.exit_status == calc_node.process_class.exit_codes.ERROR_LAMMPS_RUN.status)
def test_missing_log(db_test_app, plugin_name): """Check if the log file is produced during calculation.""" retrieved = FolderData() calc_node = db_test_app.generate_calcjob_node(plugin_name, retrieved) parser = ParserFactory(plugin_name) with db_test_app.sandbox_folder() as temp_path: results, calcfunction = parser.parse_from_node( # pylint: disable=unused-variable calc_node, retrieved_temporary_folder=temp_path.abspath, ) assert calcfunction.is_finished, calcfunction.exception assert calcfunction.is_failed, calcfunction.exit_status assert (calcfunction.exit_status == calc_node.process_class.exit_codes.ERROR_LOG_FILE_MISSING.status)
def test_missing_traj(db_test_app, plugin_name): """Check if the trajectory file is produced during calculation.""" retrieved = FolderData() retrieved.put_object_from_filelike(io.StringIO(get_log()), 'log.lammps') retrieved.put_object_from_filelike(io.StringIO(''), '_scheduler-stdout.txt') retrieved.put_object_from_filelike(io.StringIO(''), '_scheduler-stderr.txt') calc_node = db_test_app.generate_calcjob_node(plugin_name, retrieved) parser = ParserFactory(plugin_name) with db_test_app.sandbox_folder() as temp_path: results, calcfunction = parser.parse_from_node( # pylint: disable=unused-variable calc_node, retrieved_temporary_folder=temp_path.abspath, ) assert calcfunction.is_finished, calcfunction.exception assert calcfunction.is_failed, calcfunction.exit_status assert (calcfunction.exit_status == calc_node.process_class.exit_codes.ERROR_TRAJ_FILE_MISSING.status)
def _inner(file_path, input_settings=None): # Create a test computer computer = localhost process_type = 'aiida.calculations:{}'.format('vasp.vasp') node = CalcJobNode(computer=computer, process_type=process_type) node.set_attribute('input_filename', 'INCAR') node.set_attribute('output_filename', 'OUTCAR') #node.set_attribute('error_filename', 'aiida.err') node.set_attribute('scheduler_stderr', '_scheduler-stderr.txt') node.set_attribute('scheduler_stdout', '_scheduler-stdout.txt') node.set_option('resources', { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 }) node.set_option('max_wallclock_seconds', 1800) if input_settings is None: input_settings = {} settings = Dict(dict=input_settings) node.add_incoming(settings, link_type=LinkType.INPUT_CALC, link_label='settings') settings.store() node.store() # Create a `FolderData` that will represent the `retrieved` folder. Store the test # output fixture in there and link it. retrieved = FolderData() retrieved.put_object_from_tree(file_path) retrieved.add_incoming(node, link_type=LinkType.CREATE, link_label='retrieved') retrieved.store() return node
def test_optimize_success(db_test_app): # type: (AiidaTestApp) -> None retrieved = FolderData() with open_resource_binary("gulp", "optimize_reaxff_pyrite", "main.gout") as handle: retrieved.put_object_from_filelike(handle, "main.gout", mode="wb") with open_resource_binary("gulp", "optimize_reaxff_pyrite", "output.cif") as handle: retrieved.put_object_from_filelike(handle, "output.cif", mode="wb") node = db_test_app.generate_calcjob_node( "gulp.optimize", retrieved, options={"use_input_kinds": False}) results, calcfunction = db_test_app.parse_from_node("gulp.optimize", node) if not calcfunction.is_finished_ok: raise AssertionError(calcfunction.attributes) assert "results" in results assert "structure" in results
def test_get_scheduler_stderr(self): """Verify that the repository sandbox folder is cleaned after the node instance is garbage collected.""" option_key = 'scheduler_stderr' option_value = '_scheduler-error.txt' stderr = 'some\nstandard error' node = CalcJobNode(computer=self.computer, ) node.set_option('resources', { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 }) retrieved = FolderData() # No scheduler error filename option so should return `None` self.assertEqual(node.get_scheduler_stderr(), None) # No retrieved folder so should return `None` node.set_option(option_key, option_value) self.assertEqual(node.get_scheduler_stderr(), None) # Now it has retrieved folder, but file does not actually exist in it, should not except but return `None node.store() retrieved.store() retrieved.add_incoming(node, link_type=LinkType.CREATE, link_label='retrieved') self.assertEqual(node.get_scheduler_stderr(), None) # Add the file to the retrieved folder with tempfile.NamedTemporaryFile(mode='w+') as handle: handle.write(stderr) handle.flush() handle.seek(0) retrieved.put_object_from_filelike(handle, option_value, force=True) self.assertEqual(node.get_scheduler_stderr(), stderr)
def _fixture_calc_job_node(entry_point_name, computer, test_name, attributes=None): """Fixture to generate a mock `CalcJobNode` for testing parsers. :param entry_point_name: entry point name of the calculation class :param computer: a `Computer` instance :param test_name: relative path of directory with test output files in the `fixtures/{entry_point_name}` folder :param attributes: any optional attributes to set on the node :return: `CalcJobNode` instance with an attached `FolderData` as the `retrieved` node """ from aiida.common.links import LinkType from aiida.orm import CalcJobNode, FolderData from aiida.plugins.entry_point import format_entry_point_string entry_point = format_entry_point_string('aiida.calculations', entry_point_name) node = CalcJobNode(computer=computer, process_type=entry_point) node.set_attribute('input_filename', 'aiida.in') node.set_attribute('output_filename', 'aiida.out') node.set_attribute('error_filename', 'aiida.err') node.set_option('resources', {'num_machines': 1, 'num_mpiprocs_per_machine': 1}) node.set_option('max_wallclock_seconds', 1800) if attributes: node.set_attribute_many(attributes) node.store() basepath = os.path.dirname(os.path.abspath(__file__)) filepath = os.path.join(basepath, 'parsers', 'fixtures', entry_point_name[len('codtools.'):], test_name) retrieved = FolderData() retrieved.put_object_from_tree(filepath) retrieved.add_incoming(node, link_type=LinkType.CREATE, link_label='retrieved') retrieved.store() return node