def test_failed_optimisation(db_test_app, plugin_name, data_regression): """Test that if the optimisation is killed before completion, the trajectory data is still available.""" retrieved = FolderData() with open_resource_binary( "crystal", "nio_sto3g_afm_opt_walltime", "main.out" ) as handle: retrieved.put_object_from_filelike(handle, "main.out", mode="wb") with open_resource_binary( "crystal", "nio_sto3g_afm_opt_walltime", "_scheduler-stderr.txt" ) as handle: retrieved.put_object_from_filelike(handle, "_scheduler-stderr.txt", mode="wb") calc_node = db_test_app.generate_calcjob_node(plugin_name, retrieved) with resource_context("crystal", "nio_sto3g_afm_opt_walltime") as path: results, calcfunction = db_test_app.parse_from_node( plugin_name, calc_node, retrieved_temp=str(path) ) # print(get_calcjob_report(calc_node)) assert calcfunction.is_finished, calcfunction.exception assert calcfunction.is_failed, calcfunction.exit_status assert ( calcfunction.exit_status == calc_node.process_class.exit_codes.ERROR_OUT_OF_WALLTIME.status ) assert "optimisation" in results, results data_regression.check(results["optimisation"].attributes)
def test_get_scheduler_stderr(self): """Verify that the repository sandbox folder is cleaned after the node instance is garbage collected.""" option_key = 'scheduler_stderr' option_value = '_scheduler-error.txt' stderr = 'some\nstandard error' # Note: cannot use pytest.mark.parametrize in unittest classes, so I just do a loop here for with_file in [True, False]: for with_option in [True, False]: node = CalcJobNode(computer=self.computer, ) node.set_option('resources', { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 }) retrieved = FolderData() if with_file: retrieved.put_object_from_filelike(io.StringIO(stderr), option_value) if with_option: node.set_option(option_key, option_value) node.store() retrieved.store() retrieved.add_incoming(node, link_type=LinkType.CREATE, link_label='retrieved') # It should return `None` if no scheduler output is there (file not there, or option not set), # while it should return the content if both are set self.assertEqual(node.get_scheduler_stderr(), stderr if with_file and with_option else None)
def test_empty_log(db_test_app, plugin_name): """Check if the lammps log is empty.""" retrieved = FolderData() for filename in [ 'log.lammps', 'trajectory.lammpstrj', '_scheduler-stdout.txt', '_scheduler-stderr.txt', ]: retrieved.put_object_from_filelike(io.StringIO(''), filename) calc_node = db_test_app.generate_calcjob_node(plugin_name, retrieved) parser = ParserFactory(plugin_name) with db_test_app.sandbox_folder() as temp_path: with temp_path.open('x-trajectory.lammpstrj', 'w'): pass results, calcfunction = parser.parse_from_node( # pylint: disable=unused-variable calc_node, retrieved_temporary_folder=temp_path.abspath, ) assert calcfunction.is_finished, calcfunction.exception assert calcfunction.is_failed, calcfunction.exit_status assert (calcfunction.exit_status == calc_node.process_class.exit_codes.ERROR_LOG_PARSING.status)
def test_run_mgo_scf_folder(db_test_app, sanitise_calc_attr, data_regression): # type: (AiidaTestApp) -> None """Test running a calculation.""" # set up calculation builder = db_test_app.get_or_create_code("crystal17.newk").get_builder() builder.metadata = get_metadata() builder.parameters = Dict(dict={"k_points": [18, 36]}) wf_folder = FolderData() with open_resource_binary("newk", "mgo_sto3g_scf", "fort.9") as handle: wf_folder.put_object_from_filelike(handle, "fort.9", mode="wb") builder.wf_folder = wf_folder output = run_get_node(builder) calc_node = output.node db_test_app.check_calculation(calc_node, ["results"]) calc_attributes = sanitise_calc_attr(calc_node.attributes) results = { k: round(i, 7) if isinstance(i, float) else i for k, i in calc_node.outputs.results.attributes.items() if k not in ["execution_time_seconds"] } data_regression.check({ "calc": calc_attributes, "results": results, })
def folder_data(): """Create a `FolderData` instance with basic file and directory structure.""" node = FolderData() node.put_object_from_filelike(io.StringIO(''), 'nested/file.txt') node.put_object_from_filelike(io.StringIO(''), 'file.txt') return node
def test_single(db_test_app): # type: (AiidaTestApp) -> None retrieved = FolderData() with open_resource_binary("gulp", "optimize_reaxff_pyrite", "main.gout") as handle: retrieved.put_object_from_filelike(handle, "main.gout", mode="wb") node = db_test_app.generate_calcjob_node("gulp.single", retrieved) results, calcfunction = db_test_app.parse_from_node("gulp.single", node) assert calcfunction.is_finished_ok assert "results" in results
def test_empty_output(db_test_app, plugin_name): retrieved = FolderData() retrieved.put_object_from_filelike(StringIO(""), "main.out") calc_node = db_test_app.generate_calcjob_node(plugin_name, retrieved) results, calcfunction = db_test_app.parse_from_node(plugin_name, calc_node) assert calcfunction.is_finished, calcfunction.exception assert calcfunction.is_failed, calcfunction.exit_status assert (calcfunction.exit_status == calc_node.process_class.exit_codes.ERROR_PARSING_STDOUT.status)
def test_optimize_no_cif(db_test_app): # type: (AiidaTestApp) -> None calc_cls = db_test_app.get_calc_cls("gulp.optimize") retrieved = FolderData() with open_resource_binary("gulp", "optimize_reaxff_pyrite", "main.gout") as handle: retrieved.put_object_from_filelike(handle, "main.gout", mode="wb") node = db_test_app.generate_calcjob_node("gulp.optimize", retrieved) results, calcfunction = db_test_app.parse_from_node("gulp.optimize", node) assert calcfunction.is_finished assert not calcfunction.is_finished_ok assert calcfunction.exit_status == calc_cls.exit_codes.ERROR_CIF_FILE_MISSING.status
def test_failed_pbs(db_test_app, plugin_name, fcontent, error_msg): retrieved = FolderData() retrieved.put_object_from_filelike(StringIO(fcontent), "_scheduler-stderr.txt") calc_node = db_test_app.generate_calcjob_node(plugin_name, retrieved) results, calcfunction = db_test_app.parse_from_node(plugin_name, calc_node) assert calcfunction.is_finished, calcfunction.exception assert calcfunction.is_failed, calcfunction.exit_status assert ( calcfunction.exit_status == calc_node.process_class.exit_codes[error_msg].status )
def test_missing_isofile(db_test_app, plugin_name): retrieved = FolderData() with open_resource_binary("doss", "cubic_rocksalt_orbitals", "cubic-rocksalt_2x1_pdos.doss.out") as handle: retrieved.put_object_from_filelike(handle, "main.out", mode="wb") calc_node = db_test_app.generate_calcjob_node(plugin_name, retrieved) results, calcfunction = db_test_app.parse_from_node(plugin_name, calc_node) assert calcfunction.is_finished, calcfunction.exception assert calcfunction.is_failed, calcfunction.exit_status assert (calcfunction.exit_status == calc_node.process_class.exit_codes. ERROR_ISOVALUE_FILE_MISSING.status)
def test_missing_density(db_test_app, plugin_name): retrieved = FolderData() with open_resource_binary("ech3", "mgo_sto3g_scf", "main.out") as handle: retrieved.put_object_from_filelike(handle, "main.out", mode="wb") calc_node = db_test_app.generate_calcjob_node(plugin_name, retrieved) with db_test_app.sandbox_folder() as temp_folder: results, calcfunction = db_test_app.parse_from_node( plugin_name, calc_node, retrieved_temp=temp_folder.abspath) assert calcfunction.is_finished, calcfunction.exception assert calcfunction.is_failed, calcfunction.exit_status assert (calcfunction.exit_status == calc_node.process_class.exit_codes. ERROR_DENSITY_FILE_MISSING.status)
def test_failed_scf_convergence(db_test_app, plugin_name): retrieved = FolderData() with open_resource_binary("crystal", "failed", "FAILED_SCF_bcc_iron.out") as handle: retrieved.put_object_from_filelike(handle, "main.out", mode="wb") calc_node = db_test_app.generate_calcjob_node(plugin_name, retrieved) results, calcfunction = db_test_app.parse_from_node(plugin_name, calc_node) assert calcfunction.is_finished, calcfunction.exception assert calcfunction.is_failed, calcfunction.exit_status assert ( calcfunction.exit_status == calc_node.process_class.exit_codes.UNCONVERGED_SCF.status )
def test_optimize_success(db_test_app): # type: (AiidaTestApp) -> None retrieved = FolderData() with open_resource_binary("gulp", "optimize_reaxff_pyrite", "main.gout") as handle: retrieved.put_object_from_filelike(handle, "main.gout", mode="wb") with open_resource_binary("gulp", "optimize_reaxff_pyrite", "output.cif") as handle: retrieved.put_object_from_filelike(handle, "output.cif", mode="wb") node = db_test_app.generate_calcjob_node( "gulp.optimize", retrieved, options={"use_input_kinds": False}) results, calcfunction = db_test_app.parse_from_node("gulp.optimize", node) if not calcfunction.is_finished_ok: raise AssertionError(calcfunction.attributes) assert "results" in results assert "structure" in results
def test_optimize_1d_molecule(db_test_app, get_structure): # type: (AiidaTestApp) -> None retrieved = FolderData() with open_resource_binary("gulp", "s2_polymer_opt", "main.gout") as handle: retrieved.put_object_from_filelike(handle, "main.gout", mode="wb") node = db_test_app.generate_calcjob_node( "gulp.optimize", retrieved, input_nodes={"structure": get_structure("s2_molecule")}, ) results, calcfunction = db_test_app.parse_from_node("gulp.optimize", node) if not calcfunction.is_finished_ok: raise AssertionError(calcfunction.attributes) assert "results" in results assert "structure" in results
def test_success(db_test_app, plugin_name, data_regression): retrieved = FolderData() with open_resource_binary("doss", "cubic_rocksalt_orbitals", "cubic-rocksalt_2x1_pdos.doss.out") as handle: retrieved.put_object_from_filelike(handle, "main.out", mode="wb") calc_node = db_test_app.generate_calcjob_node(plugin_name, retrieved) results, calcfunction = db_test_app.parse_from_node(plugin_name, calc_node) assert calcfunction.is_finished_ok, calcfunction.exception assert "results" in results results_attr = { k: round(i, 7) if isinstance(i, float) else i for k, i in results["results"].attributes.items() } data_regression.check({"results": results_attr})
def test_success(db_test_app, plugin_name, data_regression): retrieved = FolderData() with open_resource_binary("ech3", "mgo_sto3g_scf", "main.out") as handle: retrieved.put_object_from_filelike(handle, "main.out", mode="wb") calc_node = db_test_app.generate_calcjob_node(plugin_name, retrieved) with resource_context("ech3", "mgo_sto3g_scf") as path: results, calcfunction = db_test_app.parse_from_node( plugin_name, calc_node, retrieved_temp=str(path)) assert calcfunction.is_finished_ok, calcfunction.exception assert "results" in results assert "charge" in results data_regression.check({ "results": recursive_round(results["results"].attributes, 7), "charge": recursive_round(results["charge"].attributes, 7), })
def test_optimize_no_convergence(db_test_app): # type: (AiidaTestApp) -> None retrieved = FolderData() with open_resource_binary("gulp", "failed", "opt_step_limit.gout") as handle: retrieved.put_object_from_filelike(handle, "main.gout", mode="wb") with open_resource_binary("gulp", "optimize_reaxff_pyrite", "output.cif") as handle: retrieved.put_object_from_filelike(handle, "output.cif", mode="wb") calc_cls = db_test_app.get_calc_cls("gulp.optimize") node = db_test_app.generate_calcjob_node( "gulp.optimize", retrieved, options={"use_input_kinds": False}) results, calcfunction = db_test_app.parse_from_node("gulp.optimize", node) # print(get_calcjob_report(node)) # raise assert calcfunction.is_finished assert not calcfunction.is_finished_ok assert (calcfunction.exit_status == calc_cls.exit_codes.ERROR_OPTIMISE_MAX_ATTEMPTS.status) # the output structure should still be passed though assert "results" in results assert "structure" in results
def test_get_scheduler_stderr(self): """Verify that the repository sandbox folder is cleaned after the node instance is garbage collected.""" option_key = 'scheduler_stderr' option_value = '_scheduler-error.txt' stderr = 'some\nstandard error' node = CalcJobNode(computer=self.computer, ) node.set_option('resources', { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 }) retrieved = FolderData() # No scheduler error filename option so should return `None` self.assertEqual(node.get_scheduler_stderr(), None) # No retrieved folder so should return `None` node.set_option(option_key, option_value) self.assertEqual(node.get_scheduler_stderr(), None) # Now it has retrieved folder, but file does not actually exist in it, should not except but return `None node.store() retrieved.store() retrieved.add_incoming(node, link_type=LinkType.CREATE, link_label='retrieved') self.assertEqual(node.get_scheduler_stderr(), None) # Add the file to the retrieved folder with tempfile.NamedTemporaryFile(mode='w+') as handle: handle.write(stderr) handle.flush() handle.seek(0) retrieved.put_object_from_filelike(handle, option_value, force=True) self.assertEqual(node.get_scheduler_stderr(), stderr)
def test_run_error(db_test_app, plugin_name): """Check if the parser runs without producing errors.""" retrieved = FolderData() retrieved.put_object_from_filelike( io.StringIO(get_log()), 'log.lammps', ) retrieved.put_object_from_filelike( io.StringIO(get_traj_force()), 'x-trajectory.lammpstrj', ) retrieved.put_object_from_filelike( io.StringIO('ERROR description'), '_scheduler-stdout.txt', ) retrieved.put_object_from_filelike( io.StringIO(''), '_scheduler-stderr.txt', ) calc_node = db_test_app.generate_calcjob_node(plugin_name, retrieved) parser = ParserFactory(plugin_name) with db_test_app.sandbox_folder() as temp_path: with temp_path.open('x-trajectory.lammpstrj', 'w') as handle: handle.write(get_traj_force()) results, calcfunction = parser.parse_from_node( # pylint: disable=unused-variable calc_node, retrieved_temporary_folder=temp_path.abspath, ) print(get_calcjob_report(calc_node)) assert calcfunction.is_finished, calcfunction.exception assert calcfunction.is_failed, calcfunction.exit_status assert (calcfunction.exit_status == calc_node.process_class.exit_codes.ERROR_LAMMPS_RUN.status)
def test_missing_traj(db_test_app, plugin_name): """Check if the trajectory file is produced during calculation.""" retrieved = FolderData() retrieved.put_object_from_filelike(io.StringIO(get_log()), 'log.lammps') retrieved.put_object_from_filelike(io.StringIO(''), '_scheduler-stdout.txt') retrieved.put_object_from_filelike(io.StringIO(''), '_scheduler-stderr.txt') calc_node = db_test_app.generate_calcjob_node(plugin_name, retrieved) parser = ParserFactory(plugin_name) with db_test_app.sandbox_folder() as temp_path: results, calcfunction = parser.parse_from_node( # pylint: disable=unused-variable calc_node, retrieved_temporary_folder=temp_path.abspath, ) assert calcfunction.is_finished, calcfunction.exception assert calcfunction.is_failed, calcfunction.exit_status assert (calcfunction.exit_status == calc_node.process_class.exit_codes.ERROR_TRAJ_FILE_MISSING.status)
def _generate_calc_job_node( entry_point_name, results_folder, inputs=None, computer=None, outputs=None, outfile_override=None, ): """ Generate a CalcJob node with fake retrieved node in the tests/data """ calc_class = CalculationFactory(entry_point_name) entry_point = format_entry_point_string('aiida.calculations', entry_point_name) builder = calc_class.get_builder() if not computer: computer = db_test_app.localhost node = CalcJobNode(computer=computer, process_type=entry_point) # Monkypatch the inputs if inputs is not None: inputs = AttributeDict(inputs) node.__dict__['inputs'] = inputs # Add direct inputs, pseudos are omitted for k, v in inputs.items(): if isinstance(v, Node): if not v.is_stored: v.store() node.add_incoming(v, link_type=LinkType.INPUT_CALC, link_label=k) options = builder.metadata.options options.update(inputs.metadata.options) node.set_attribute('input_filename', options.input_filename) node.set_attribute('seedname', options.seedname) node.set_attribute('output_filename', options.output_filename) node.set_attribute('error_filename', 'aiida.err') node.set_option('resources', { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 }) node.set_option('max_wallclock_seconds', 1800) node.store() filepath = this_folder.parent / 'data' / results_folder retrieved = FolderData() retrieved.put_object_from_tree(str(filepath.resolve())) # Apply overriding output files if outfile_override is not None: for key, content in outfile_override.items(): if content is None: retrieved.delete_object(key) continue buf = BytesIO(content.encode()) retrieved.put_object_from_filelike(buf, key) retrieved.add_incoming(node, link_type=LinkType.CREATE, link_label='retrieved') retrieved.store() if outputs is not None: for label, out_node in outputs.items(): out_node.add_incoming(node, link_type=LinkType.CREATE, link_label=label) if not out_node.is_stored: out_node.store() return node