def update_jobs(): """ calls an update for each set of pairs (machine, aiidauser) """ from aiida.orm import JobCalculation, Computer, User from aiida.backends.utils import get_authinfo # I create a unique set of pairs (computer, aiidauser) computers_users_to_check = list( JobCalculation._get_all_with_state(state=calc_states.WITHSCHEDULER, only_computer_user_pairs=True, only_enabled=True)) for computer, aiidauser in computers_users_to_check: execlogger.debug("({},{}) pair to check".format( aiidauser.email, computer.name)) try: authinfo = get_authinfo(computer.dbcomputer, aiidauser._dbuser) computed_calcs = update_running_calcs_status(authinfo) except Exception as e: msg = ("Error while updating calculation status " "for aiidauser={} on computer={}, " "error type is {}, error message: {}".format( aiidauser.email, computer.name, e.__class__.__name__, e.message)) execlogger.error(msg) # Continue with next computer continue
def getfile(self, relpath, destpath): """ Connects to the remote folder and gets a string with the (full) content of the file. :param relpath: The relative path of the file to show. :param destpath: A path on the local computer to get the file :return: a string with the file content """ from aiida.backends.utils import get_authinfo authinfo = get_authinfo(computer=self.get_computer(), aiidauser=self.get_user()) t = authinfo.get_transport() with t: try: full_path = os.path.join(self.get_remote_path(), relpath) t.getfile(full_path, destpath) except IOError as e: if e.errno == 2: # file not existing raise IOError("The required remote file {} on {} does not exist or has been deleted.".format( full_path, self.get_computer().name )) else: raise return t.listdir()
def retrieve_jobs(): from aiida.orm import JobCalculation, Computer from aiida.backends.utils import get_authinfo # I create a unique set of pairs (computer, aiidauser) computers_users_to_check = list( JobCalculation._get_all_with_state(state=calc_states.COMPUTED, only_computer_user_pairs=True, only_enabled=True)) for computer, aiidauser in computers_users_to_check: execlogger.debug("({},{}) pair to check".format( aiidauser.email, computer.name)) try: authinfo = get_authinfo(computer.dbcomputer, aiidauser._dbuser) retrieve_computed_for_authinfo(authinfo) except Exception as e: msg = ("Error while retrieving calculation status for " "aiidauser={} on computer={}, " "error type is {}, error message: {}".format( aiidauser.email, computer.name, e.__class__.__name__, e.message)) execlogger.error(msg) # Continue with next computer continue
def submit_jobs(): """ Submit all jobs in the TOSUBMIT state. """ from aiida.orm import JobCalculation, Computer, User from aiida.utils.logger import get_dblogger_extra from aiida.backends.utils import get_authinfo computers_users_to_check = list( JobCalculation._get_all_with_state(state=calc_states.TOSUBMIT, only_computer_user_pairs=True, only_enabled=True)) for computer, aiidauser in computers_users_to_check: #~ user = User.search_for_users(id=dbuser_id) #~ computer = Computer.get(dbcomputer_id) execlogger.debug("({},{}) pair to submit".format( aiidauser.email, computer.name)) try: try: authinfo = get_authinfo(computer.dbcomputer, aiidauser._dbuser) except AuthenticationError: # TODO!! # Put each calculation in the SUBMISSIONFAILED state because # I do not have AuthInfo to submit them calcs_to_inquire = JobCalculation._get_all_with_state( state=calc_states.TOSUBMIT, computer=computer, user=aiidauser) for calc in calcs_to_inquire: try: calc._set_state(calc_states.SUBMISSIONFAILED) except ModificationNotAllowed: # Someone already set it, just skip pass logger_extra = get_dblogger_extra(calc) execlogger.error("Submission of calc {} failed, " "computer pk= {} ({}) is not configured " "for aiidauser {}".format( calc.pk, computer.pk, computer.get_name(), aiidauser.email), extra=logger_extra) # Go to the next (dbcomputer,aiidauser) pair continue submitted_calcs = submit_jobs_with_authinfo(authinfo) except Exception as e: import traceback msg = ("Error while submitting jobs " "for aiidauser={} on computer={}, " "error type is {}, traceback: {}".format( aiidauser.email, computer.name, e.__class__.__name__, traceback.format_exc())) print msg execlogger.error(msg) # Continue with next computer continue
def is_empty(self): """ Check if remote folder is empty """ from aiida.backends.utils import get_authinfo authinfo = get_authinfo(computer=self.get_computer(), aiidauser=self.get_user()) t = authinfo.get_transport() with t: try: t.chdir(self.get_remote_path()) except IOError as e: if e.errno == 2: # directory not existing return True # is indeed empty, i.e. unusable return not t.listdir()
def _clean(self): """ Remove all content of the remote folder on the remote computer """ from aiida.backends.utils import get_authinfo import os authinfo = get_authinfo(computer=self.get_computer(), aiidauser=self.get_user()) t = authinfo.get_transport() remote_dir = self.get_remote_path() pre, post = os.path.split(remote_dir) with t: try: t.chdir(pre) t.rmtree(post) except IOError as e: if e.errno == 2: # directory not existing pass
def test_full(new_database, new_workdir): from aiida_crystal17.calculations.cry_main_immigrant import CryMainImmigrantCalculation computer = tests.get_computer(workdir=new_workdir, configure=True) code = tests.get_code(entry_point='crystal17.main', computer=computer) inpath = os.path.join(TEST_DIR, "input_files", 'nio_sto3g_afm.crystal.d12') outpath = os.path.join(TEST_DIR, "output_files", 'nio_sto3g_afm.crystal.out') shutil.copy(inpath, new_workdir) shutil.copy(outpath, new_workdir) resources = {'num_machines': 1, 'num_mpiprocs_per_machine': 16} calc = CryMainImmigrantCalculation( computer=computer, resources=resources, remote_workdir=new_workdir, input_file_name='nio_sto3g_afm.crystal.d12', output_file_name='nio_sto3g_afm.crystal.out') calc.use_code(code) try: # aiida v0.12 from aiida.backends.utils import get_authinfo, get_automatic_user authinfo = get_authinfo(computer=computer, aiidauser=get_automatic_user()) transport = authinfo.get_transport() except ImportError: # aiida v1 transport = computer.get_transport() with transport as open_transport: calc.create_input_nodes(open_transport) calc.prepare_for_retrieval_and_parsing(open_transport) assert set(calc.get_inputs_dict().keys()) == set( ['basis_O', 'parameters', 'settings', 'basis_Ni', 'code', 'structure'])
def get_computers_work_dir(calculations, user): """ Get a list of computers and their remotes working directory. `calculations` should be a list of JobCalculation object. """ from aiida.orm.computer import Computer from aiida.backends.utils import get_authinfo computers = [Computer.get(c.dbcomputer) for c in calculations] remotes = {} for computer in computers: remotes[computer.name] = { 'transport': get_authinfo(computer=computer, aiidauser=user).get_transport(), 'computer': computer, } return remotes
def listdir(self, relpath="."): """ Connects to the remote folder and lists the directory content. :param relpath: If 'relpath' is specified, lists the content of the given subfolder. :return: a flat list of file/directory names (as strings). """ from aiida.backends.utils import get_authinfo authinfo = get_authinfo(computer=self.get_computer(), aiidauser=self.get_user()) t = authinfo.get_transport() with t: try: full_path = os.path.join(self.get_remote_path(), relpath) t.chdir(full_path) except IOError as e: if e.errno == 2 or e.errno == 20: # directory not existing or not a directory exc = IOError("The required remote folder {} on {} does not exist, is not a directory or has been deleted.".format( full_path, self.get_computer().name )) exc.errno = e.errno raise exc else: raise try: return t.listdir() except IOError as e: if e.errno == 2 or e.errno == 20: # directory not existing or not a directory exc = IOError( "The required remote folder {} on {} does not exist, is not a directory or has been deleted.".format( full_path, self.get_computer().name )) exc.errno = e.errno raise exc else: raise
# Initialize the pw_job2 calculation node. calc2 = PwimmigrantCalculation(computer=computer, resources=resources, remote_workdir='/scratch/', input_file_name='pw_job2.in', output_file_name='pw_job2.out') # Link the code that was used to run the calculations. calc1.use_code(code) calc2.use_code(code) # Get the computer's transport and create an instance. from aiida.backends.utils import get_authinfo, get_automatic_user authinfo = get_authinfo(computer=computer, aiidauser=get_automatic_user()) transport = a.get_transport() # Open the transport for the duration of the immigrations, so it's not # reopened for each one. This is best performed using the transport's # context guard through the ``with`` statement. with transport as open_transport: # Parse the calculations' input files to automatically generate and link the # calculations' input nodes. calc1.create_input_nodes(open_transport) calc2.create_input_nodes(open_transport) # Store the calculations and their input nodes and tell the daeomon the output # is ready to be retrieved and parsed. calc1.prepare_for_retrieval_and_parsing(open_transport)
def calculation_cleanworkdir(self, *args): """ Clean the working directory of calculations by removing all the content of the associated RemoteFolder node. Calculations can be identified by pk with the -k flag or by specifying limits on the modification times with -p/-o flags """ import argparse parser = argparse.ArgumentParser( prog=self.get_full_command_name(), description=""" Clean all content of all output remote folders of calculations, passed as a list of pks, or identified by modification time. If a list of calculation PKs is not passed with the -k option, one or both of the -p and -o options has to be specified. If both are specified, a logical AND is done between the two, i.e. the calculations that will be cleaned have been modified AFTER [-p option] days from now but BEFORE [-o option] days from now. Passing the -f option will prevent the confirmation dialog from being prompted. """ ) parser.add_argument( '-k', '--pk', metavar='PK', type=int, nargs='+', dest='pk', help='The principal key (PK) of the calculations of which to clean the work directory' ) parser.add_argument( '-f', '--force', action='store_true', help='Force the cleaning (no prompt)' ) parser.add_argument( '-p', '--past-days', metavar='N', type=int, action='store', dest='past_days', help='Include calculations that have been modified within the last N days', ) parser.add_argument( '-o', '--older-than', metavar='N', type=int, action='store', dest='older_than', help='Include calculations that have been modified more than N days ago', ) parser.add_argument( '-c', '--computers', metavar='label', nargs='+', type=str, action='store', dest='computer', help='Include only calculations that were ran on these computers' ) if not is_dbenv_loaded(): load_dbenv() from aiida.backends.utils import get_automatic_user from aiida.backends.utils import get_authinfo from aiida.common.utils import query_yes_no from aiida.orm.computer import Computer as OrmComputer from aiida.orm.user import User as OrmUser from aiida.orm.calculation import Calculation as OrmCalculation from aiida.orm.querybuilder import QueryBuilder from aiida.utils import timezone import datetime parsed_args = parser.parse_args(args) # If a pk is given then the -o & -p options should not be specified if parsed_args.pk is not None: if (parsed_args.past_days is not None or parsed_args.older_than is not None): print("You cannot specify both a list of calculation pks and the -p or -o options") return # If no pk is given then at least one of the -o & -p options should be specified else: if (parsed_args.past_days is None and parsed_args.older_than is None): print("You should specify at least a list of calculations or the -p, -o options") return qb_user_filters = dict() user = OrmUser(dbuser=get_automatic_user()) qb_user_filters["email"] = user.email qb_computer_filters = dict() if parsed_args.computer is not None: qb_computer_filters["name"] = {"in": parsed_args.computer} qb_calc_filters = dict() if parsed_args.past_days is not None: pd_ts = timezone.now() - datetime.timedelta(days=parsed_args.past_days) qb_calc_filters["mtime"] = {">": pd_ts} if parsed_args.older_than is not None: ot_ts = timezone.now() - datetime.timedelta(days=parsed_args.older_than) qb_calc_filters["mtime"] = {"<": ot_ts} if parsed_args.pk is not None: print("parsed_args.pk: ", parsed_args.pk) qb_calc_filters["id"] = {"in": parsed_args.pk} qb = QueryBuilder() qb.append(OrmCalculation, tag="calc", filters=qb_calc_filters, project=["id", "uuid", "attributes.remote_workdir"]) qb.append(OrmComputer, computer_of="calc", tag="computer", project=["*"], filters=qb_computer_filters) qb.append(OrmUser, creator_of="calc", tag="user", project=["*"], filters=qb_user_filters) no_of_calcs = qb.count() if no_of_calcs == 0: print("No calculations found with the given criteria.") return print("Found {} calculations with the given criteria.".format( no_of_calcs)) if not parsed_args.force: if not query_yes_no("Are you sure you want to clean the work " "directory?", "no"): return # get the uuids of all calculations matching the filters calc_list_data = qb.dict() # get all computers associated to the calc uuids above, and load them # we group them by uuid to avoid computer duplicates comp_uuid_to_computers = {_["computer"]["*"].uuid: _["computer"]["*"] for _ in calc_list_data} # now build a dictionary with the info of folders to delete remotes = {} for computer in comp_uuid_to_computers.values(): # initialize a key of info for a given computer remotes[computer.name] = {'transport': get_authinfo( computer=computer, aiidauser=user._dbuser).get_transport(), 'computer': computer, } # select the calc pks done on this computer this_calc_pks = [_["calc"]["id"] for _ in calc_list_data if _["computer"]["*"].id == computer.id] this_calc_uuids = [unicode(_["calc"]["uuid"]) for _ in calc_list_data if _["computer"]["*"].id == computer.id] remote_workdirs = [_["calc"]["attributes.remote_workdir"] for _ in calc_list_data if _["calc"]["id"] in this_calc_pks if _["calc"]["attributes.remote_workdir"] is not None] remotes[computer.name]['remotes'] = remote_workdirs remotes[computer.name]['uuids'] = this_calc_uuids # now proceed to cleaning for computer, dic in remotes.iteritems(): print("Cleaning the work directory on computer {}.".format(computer)) counter = 0 t = dic['transport'] with t: remote_user = remote_user = t.whoami() aiida_workdir = dic['computer'].get_workdir().format( username=remote_user) t.chdir(aiida_workdir) # Hardcoding the sharding equal to 3 parts! existing_folders = t.glob('*/*/*') folders_to_delete = [i for i in existing_folders if i.replace("/", "") in dic['uuids']] for folder in folders_to_delete: t.rmtree(folder) counter += 1 if counter % 20 == 0 and counter > 0: print("Deleted work directories: {}".format(counter)) print("{} remote folder(s) cleaned.".format(counter))
def calculation_cleanworkdir(self, *args): """ Clean all the content of all the output remote folders of calculations, passed as a list of pks, or identified by modification time. If a list of calculation PKs is not passed through -c option, one of the option -p or -u has to be specified (if both are given, a logical AND is done between the 2 - you clean out calculations modified AFTER [-p option] days from now but BEFORE [-o option] days from now). If you also pass the -f option, no confirmation will be asked. """ import argparse parser = argparse.ArgumentParser( prog=self.get_full_command_name(), description="Clean work directory (i.e. remote folder) of AiiDA " "calculations.") parser.add_argument("-k", "--pk", metavar="PK", type=int, nargs="+", help="The principal key (PK) of the calculations " "to clean the workdir of", dest="pk") parser.add_argument("-f", "--force", action="store_true", help="Force the cleaning (no prompt)") parser.add_argument("-p", "--past-days", metavar="N", help="Add a filter to clean workdir of " "calculations modified during the past N " "days", type=int, action="store", dest="past_days") parser.add_argument("-o", "--older-than", metavar="N", help="Add a filter to clean workdir of " "calculations that have been modified on a " "date before N days ago", type=int, action="store", dest="older_than") parser.add_argument("-c", "--computers", metavar="label", nargs="+", help="Add a filter to clean workdir of " "calculations on this computer(s) only", type=str, action="store", dest="computer") if not is_dbenv_loaded(): load_dbenv() from aiida.backends.utils import get_automatic_user from aiida.backends.utils import get_authinfo from aiida.common.utils import query_yes_no from aiida.orm.computer import Computer as OrmComputer from aiida.orm.user import User as OrmUser from aiida.orm.calculation import Calculation as OrmCalculation from aiida.orm.querybuilder import QueryBuilder from aiida.utils import timezone import datetime parsed_args = parser.parse_args(args) # If a pk is given then the -o & -p options should not be specified if parsed_args.pk is not None: if ((parsed_args.past_days is not None) or (parsed_args.older_than is not None)): print( "You cannot specify both a list of calculation pks and " "the -p or -o options") return # If no pk is given then at least one of the -o & -p options should be # specified else: if ((parsed_args.past_days is None) and (parsed_args.older_than is None)): print( "You should specify at least a list of calculations or " "the -p, -o options") return # At this point we know that either the pk or the -p -o options are # specified # We also check that not both -o & -p options are specified if ((parsed_args.past_days is not None) and (parsed_args.older_than is not None)): print( "Not both of the -p, -o options can be specified in the " "same time") return qb_user_filters = dict() user = OrmUser(dbuser=get_automatic_user()) qb_user_filters["email"] = user.email qb_computer_filters = dict() if parsed_args.computer is not None: qb_computer_filters["name"] = {"in": parsed_args.computer} qb_calc_filters = dict() if parsed_args.past_days is not None: pd_ts = timezone.now() - datetime.timedelta( days=parsed_args.past_days) qb_calc_filters["mtime"] = {">": pd_ts} if parsed_args.older_than is not None: ot_ts = timezone.now() - datetime.timedelta( days=parsed_args.older_than) qb_calc_filters["mtime"] = {"<": ot_ts} if parsed_args.pk is not None: print("parsed_args.pk: ", parsed_args.pk) qb_calc_filters["id"] = {"in": parsed_args.pk} qb = QueryBuilder() qb.append(OrmCalculation, tag="calc", filters=qb_calc_filters, project=["id", "uuid", "attributes.remote_workdir"]) qb.append(OrmComputer, computer_of="calc", project=["*"], filters=qb_computer_filters) qb.append(OrmUser, creator_of="calc", project=["*"], filters=qb_user_filters) no_of_calcs = qb.count() if no_of_calcs == 0: print("No calculations found with the given criteria.") return print("Found {} calculations with the given criteria.".format( no_of_calcs)) if not parsed_args.force: if not query_yes_no( "Are you sure you want to clean the work " "directory?", "no"): return # get the uuids of all calculations matching the filters calc_list_data = qb.dict() # get all computers associated to the calc uuids above, and load them # we group them by uuid to avoid computer duplicates comp_uuid_to_computers = { _["computer"]["*"].uuid: _["computer"]["*"] for _ in calc_list_data } # now build a dictionary with the info of folders to delete remotes = {} for computer in comp_uuid_to_computers.values(): # initialize a key of info for a given computer remotes[computer.name] = { 'transport': get_authinfo(computer=computer, aiidauser=user._dbuser).get_transport(), 'computer': computer, } # select the calc pks done on this computer this_calc_pks = [ _["calc"]["id"] for _ in calc_list_data if _["computer"]["*"].id == computer.id ] this_calc_uuids = [ unicode(_["calc"]["uuid"]) for _ in calc_list_data if _["computer"]["*"].id == computer.id ] remote_workdirs = [ _["calc"]["attributes.remote_workdir"] for _ in calc_list_data if _["calc"]["id"] in this_calc_pks if _["calc"]["attributes.remote_workdir"] is not None ] remotes[computer.name]['remotes'] = remote_workdirs remotes[computer.name]['uuids'] = this_calc_uuids # now proceed to cleaning for computer, dic in remotes.iteritems(): print( "Cleaning the work directory on computer {}.".format(computer)) counter = 0 t = dic['transport'] with t: remote_user = remote_user = t.whoami() aiida_workdir = dic['computer'].get_workdir().format( username=remote_user) t.chdir(aiida_workdir) # Hardcoding the sharding equal to 3 parts! existing_folders = t.glob('*/*/*') folders_to_delete = [ i for i in existing_folders if i.replace("/", "") in dic['uuids'] ] for folder in folders_to_delete: t.rmtree(folder) counter += 1 if counter % 20 == 0 and counter > 0: print("Deleted work directories: {}".format(counter)) print("{} remote folder(s) cleaned.".format(counter))
def _prepare_for_submission(self, tempfolder, inputdict): """ Routine, which creates the input and prepares for submission :param tempfolder: a aiida.common.folders.Folder subclass where the plugin should put all its files. :param inputdict: a dictionary with the input nodes, as they would be returned by get_inputdata_dict (without the Code!) """ input_validator = self._get_input_validator(inputdict=inputdict) local_input_folder = input_validator(name='local_input_folder', valid_types=FolderData, required=False) remote_input_folder = input_validator(name='remote_input_folder', valid_types=RemoteData, required=False) parameters = input_validator(name='parameters', valid_types=ParameterData) param_dict = self._get_validated_parameters_dict(parameters) projections = input_validator(name='projections', valid_types=(OrbitalData, List), required=False) kpoints = input_validator(name='kpoints', valid_types=KpointsData) kpoint_path = input_validator(name='kpoint_path', valid_types=ParameterData, required=False) structure = input_validator(name='structure', valid_types=StructureData) settings = input_validator(name='settings', valid_types=ParameterData, required=False) if settings is None: settings_dict = {} else: settings_dict_raw = settings.get_dict() settings_dict = { key.lower(): val for key, val in settings_dict_raw.items() } if len(settings_dict_raw) != len(settings_dict): raise InputValidationError( 'Input settings contain duplicate keys.') pp_setup = settings_dict.pop('postproc_setup', False) if pp_setup: param_dict.update({'postproc_setup': True}) if local_input_folder is None and remote_input_folder is None and pp_setup is False: raise InputValidationError( 'Either local_input_folder or remote_input_folder must be set.' ) code = input_validator(name='code', valid_types=Code) ############################################################ # End basic check on inputs ############################################################ random_projections = settings_dict.pop('random_projections', False) write_win( filename=tempfolder.get_abs_path(self._INPUT_FILE), parameters=param_dict, structure=structure, kpoints=kpoints, kpoint_path=kpoint_path, projections=projections, random_projections=random_projections, ) if remote_input_folder is not None: remote_input_folder_uuid = remote_input_folder.get_computer().uuid remote_input_folder_path = remote_input_folder.get_remote_path() t_dest = get_authinfo( computer=remote_input_folder.get_computer(), aiidauser=remote_input_folder.get_user()).get_transport() with t_dest: remote_folder_content = t_dest.listdir( path=remote_input_folder_path) if local_input_folder is not None: local_folder_content = local_input_folder.get_folder_list() if pp_setup: required_files = [] else: required_files = [ self._SEEDNAME + suffix for suffix in ['.mmn', '.amn'] ] optional_files = [ self._SEEDNAME + suffix for suffix in ['.eig', '.chk', '.spn'] ] input_files = required_files + optional_files wavefunctions_files = ['UNK*'] def files_finder(file_list, exact_patterns, glob_patterns): result = [f for f in exact_patterns if (f in file_list)] import fnmatch for glob_p in glob_patterns: result += fnmatch.filter(file_list, glob_p) return result # Local FolderData has precedence over RemoteData if local_input_folder is not None: found_in_local = files_finder(local_folder_content, input_files, wavefunctions_files) else: found_in_local = [] if remote_input_folder is not None: found_in_remote = files_finder(remote_folder_content, input_files, wavefunctions_files) found_in_remote = [ f for f in found_in_remote if f not in found_in_local ] else: found_in_remote = [] not_found = [ f for f in required_files if f not in found_in_remote + found_in_local ] if len(not_found) != 0: raise InputValidationError( "{} necessary input files were not found: {} ".format( len(not_found), ', '.join(str(nf) for nf in not_found))) remote_copy_list = [] remote_symlink_list = [] local_copy_list = [] #Here we enforce that everything except checkpoints are symlinked #because in W90 you never modify input files on the run ALWAYS_COPY_FILES = [self._CHK_FILE] for f in found_in_remote: file_info = (remote_input_folder_uuid, os.path.join(remote_input_folder_path, f), os.path.basename(f)) if f in ALWAYS_COPY_FILES: remote_copy_list.append(file_info) else: remote_symlink_list.append(file_info) for f in found_in_local: local_copy_list.append( (local_input_folder.get_abs_path(f), os.path.basename(f))) # Add any custom copy/sym links remote_symlink_list += settings_dict.pop( "additional_remote_symlink_list", []) remote_copy_list += settings_dict.pop("additional_remote_copy_list", []) local_copy_list += settings_dict.pop("additional_local_copy_list", []) ####################################################################### calcinfo = CalcInfo() calcinfo.uuid = self.uuid calcinfo.local_copy_list = local_copy_list calcinfo.remote_copy_list = remote_copy_list calcinfo.remote_symlink_list = remote_symlink_list codeinfo = CodeInfo() codeinfo.code_uuid = code.uuid #codeinfo.withmpi = True # Current version of W90 can be run in parallel codeinfo.cmdline_params = [self._INPUT_FILE] calcinfo.codes_info = [codeinfo] calcinfo.codes_run_mode = code_run_modes.SERIAL # Retrieve files calcinfo.retrieve_list = [] calcinfo.retrieve_list.append(self._OUTPUT_FILE) calcinfo.retrieve_list.append(self._ERROR_FILE) if pp_setup: calcinfo.retrieve_list.append(self._NNKP_FILE) calcinfo.retrieve_singlefile_list = [('output_nnkp', 'singlefile', self._NNKP_FILE)] calcinfo.retrieve_list += [ '{}_band.dat'.format(self._SEEDNAME), '{}_band.kpt'.format(self._SEEDNAME) ] if settings_dict.pop('retrieve_hoppings', False): calcinfo.retrieve_list += [ '{}_wsvec.dat'.format(self._SEEDNAME), '{}_hr.dat'.format(self._SEEDNAME), '{}_centres.xyz'.format(self._SEEDNAME), ] # Retrieves bands automatically, if they are calculated calcinfo.retrieve_list += settings_dict.pop("additional_retrieve_list", []) # pop input keys not used here settings_dict.pop('seedname', None) if settings_dict: raise InputValidationError( "The following keys in settings are unrecognized: {}".format( settings_dict.keys())) return calcinfo